body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def thread_task(tasks_queue: Queue, events_queue: Queue, schema: BaseSchema, checks: Iterable[Callable], settings: hypothesis.settings, auth: Optional[RawAuth], auth_type: Optional[str], headers: Optional[Dict[(str, Any)]], seed: Optional[int], results: TestResultSet, kwargs: Any) -> None: 'A single task, that threads do.\n\n Pretty similar to the default one-thread flow, but includes communication with the main thread via the events queue.\n ' prepared_auth = get_requests_auth(auth, auth_type) with get_session(prepared_auth, headers) as session: _run_task(network_test, tasks_queue, events_queue, schema, checks, settings, seed, results, session=session, **kwargs)
8,466,409,781,368,943,000
A single task, that threads do. Pretty similar to the default one-thread flow, but includes communication with the main thread via the events queue.
src/schemathesis/runner/__init__.py
thread_task
hlobit/schemathesis
python
def thread_task(tasks_queue: Queue, events_queue: Queue, schema: BaseSchema, checks: Iterable[Callable], settings: hypothesis.settings, auth: Optional[RawAuth], auth_type: Optional[str], headers: Optional[Dict[(str, Any)]], seed: Optional[int], results: TestResultSet, kwargs: Any) -> None: 'A single task, that threads do.\n\n Pretty similar to the default one-thread flow, but includes communication with the main thread via the events queue.\n ' prepared_auth = get_requests_auth(auth, auth_type) with get_session(prepared_auth, headers) as session: _run_task(network_test, tasks_queue, events_queue, schema, checks, settings, seed, results, session=session, **kwargs)
def stop_worker(thread_id: int) -> None: 'Raise an error in a thread so it is possible to asynchronously stop thread execution.' ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(thread_id), ctypes.py_object(SystemExit))
-599,246,663,343,350,700
Raise an error in a thread so it is possible to asynchronously stop thread execution.
src/schemathesis/runner/__init__.py
stop_worker
hlobit/schemathesis
python
def stop_worker(thread_id: int) -> None: ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(thread_id), ctypes.py_object(SystemExit))
def execute_from_schema(schema: BaseSchema, checks: Iterable[Callable], *, workers_num: int=1, hypothesis_options: Optional[Dict[(str, Any)]]=None, auth: Optional[RawAuth]=None, auth_type: Optional[str]=None, headers: Optional[Dict[(str, Any)]]=None, request_timeout: Optional[int]=None, seed: Optional[int]=None) -> Generator[(events.ExecutionEvent, None, None)]: 'Execute tests for the given schema.\n\n Provides the main testing loop and preparation step.\n ' runner: BaseRunner if (workers_num > 1): if schema.app: runner = ThreadPoolWSGIRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, workers_num=workers_num) else: runner = ThreadPoolRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, request_timeout=request_timeout) elif schema.app: runner = SingleThreadWSGIRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed) else: runner = SingleThreadRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, request_timeout=request_timeout) (yield from runner.execute())
3,084,922,587,949,720,600
Execute tests for the given schema. Provides the main testing loop and preparation step.
src/schemathesis/runner/__init__.py
execute_from_schema
hlobit/schemathesis
python
def execute_from_schema(schema: BaseSchema, checks: Iterable[Callable], *, workers_num: int=1, hypothesis_options: Optional[Dict[(str, Any)]]=None, auth: Optional[RawAuth]=None, auth_type: Optional[str]=None, headers: Optional[Dict[(str, Any)]]=None, request_timeout: Optional[int]=None, seed: Optional[int]=None) -> Generator[(events.ExecutionEvent, None, None)]: 'Execute tests for the given schema.\n\n Provides the main testing loop and preparation step.\n ' runner: BaseRunner if (workers_num > 1): if schema.app: runner = ThreadPoolWSGIRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, workers_num=workers_num) else: runner = ThreadPoolRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, request_timeout=request_timeout) elif schema.app: runner = SingleThreadWSGIRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed) else: runner = SingleThreadRunner(schema=schema, checks=checks, hypothesis_settings=hypothesis_options, auth=auth, auth_type=auth_type, headers=headers, seed=seed, request_timeout=request_timeout) (yield from runner.execute())
def run_test(schema: BaseSchema, endpoint: Endpoint, test: Union[(Callable, InvalidSchema)], checks: Iterable[Callable], results: TestResultSet, **kwargs: Any) -> Generator[(events.ExecutionEvent, None, None)]: 'A single test run with all error handling needed.' result = TestResult(endpoint=endpoint) (yield events.BeforeExecution(results=results, schema=schema, endpoint=endpoint)) hypothesis_output: List[str] = [] try: if isinstance(test, InvalidSchema): status = Status.error result.add_error(test) else: with capture_hypothesis_output() as hypothesis_output: test(checks, result, **kwargs) status = Status.success except AssertionError: status = Status.failure except hypothesis.errors.Flaky: status = Status.error result.mark_errored() if result.checks: flaky_example = result.checks[(- 1)].example else: flaky_example = None result.add_error(hypothesis.errors.Flaky('Tests on this endpoint produce unreliable results: \nFalsified on the first call but did not on a subsequent one'), flaky_example) except hypothesis.errors.Unsatisfiable: status = Status.error result.add_error(hypothesis.errors.Unsatisfiable('Unable to satisfy schema parameters for this endpoint')) except KeyboardInterrupt: (yield events.Interrupted(results=results, schema=schema)) return except Exception as error: status = Status.error result.add_error(error) result.seed = (getattr(test, '_hypothesis_internal_use_seed', None) or getattr(test, '_hypothesis_internal_use_generated_seed', None)) results.append(result) (yield events.AfterExecution(results=results, schema=schema, endpoint=endpoint, status=status, hypothesis_output=hypothesis_output))
4,248,110,188,219,264,000
A single test run with all error handling needed.
src/schemathesis/runner/__init__.py
run_test
hlobit/schemathesis
python
def run_test(schema: BaseSchema, endpoint: Endpoint, test: Union[(Callable, InvalidSchema)], checks: Iterable[Callable], results: TestResultSet, **kwargs: Any) -> Generator[(events.ExecutionEvent, None, None)]: result = TestResult(endpoint=endpoint) (yield events.BeforeExecution(results=results, schema=schema, endpoint=endpoint)) hypothesis_output: List[str] = [] try: if isinstance(test, InvalidSchema): status = Status.error result.add_error(test) else: with capture_hypothesis_output() as hypothesis_output: test(checks, result, **kwargs) status = Status.success except AssertionError: status = Status.failure except hypothesis.errors.Flaky: status = Status.error result.mark_errored() if result.checks: flaky_example = result.checks[(- 1)].example else: flaky_example = None result.add_error(hypothesis.errors.Flaky('Tests on this endpoint produce unreliable results: \nFalsified on the first call but did not on a subsequent one'), flaky_example) except hypothesis.errors.Unsatisfiable: status = Status.error result.add_error(hypothesis.errors.Unsatisfiable('Unable to satisfy schema parameters for this endpoint')) except KeyboardInterrupt: (yield events.Interrupted(results=results, schema=schema)) return except Exception as error: status = Status.error result.add_error(error) result.seed = (getattr(test, '_hypothesis_internal_use_seed', None) or getattr(test, '_hypothesis_internal_use_generated_seed', None)) results.append(result) (yield events.AfterExecution(results=results, schema=schema, endpoint=endpoint, status=status, hypothesis_output=hypothesis_output))
def prepare(schema_uri: str, checks: Iterable[Callable]=DEFAULT_CHECKS, workers_num: int=1, api_options: Optional[Dict[(str, Any)]]=None, loader_options: Optional[Dict[(str, Any)]]=None, hypothesis_options: Optional[Dict[(str, Any)]]=None, loader: Callable=from_uri, seed: Optional[int]=None) -> Generator[(events.ExecutionEvent, None, None)]: 'Prepare a generator that will run test cases against the given API definition.' api_options = (api_options or {}) loader_options = (loader_options or {}) if ('base_url' not in loader_options): loader_options['base_url'] = get_base_url(schema_uri) schema = loader(schema_uri, **loader_options) return execute_from_schema(schema, checks, hypothesis_options=hypothesis_options, seed=seed, workers_num=workers_num, **api_options)
5,755,991,865,344,927,000
Prepare a generator that will run test cases against the given API definition.
src/schemathesis/runner/__init__.py
prepare
hlobit/schemathesis
python
def prepare(schema_uri: str, checks: Iterable[Callable]=DEFAULT_CHECKS, workers_num: int=1, api_options: Optional[Dict[(str, Any)]]=None, loader_options: Optional[Dict[(str, Any)]]=None, hypothesis_options: Optional[Dict[(str, Any)]]=None, loader: Callable=from_uri, seed: Optional[int]=None) -> Generator[(events.ExecutionEvent, None, None)]: api_options = (api_options or {}) loader_options = (loader_options or {}) if ('base_url' not in loader_options): loader_options['base_url'] = get_base_url(schema_uri) schema = loader(schema_uri, **loader_options) return execute_from_schema(schema, checks, hypothesis_options=hypothesis_options, seed=seed, workers_num=workers_num, **api_options)
def network_test(case: Case, checks: Iterable[Callable], result: TestResult, session: requests.Session, request_timeout: Optional[int]) -> None: 'A single test body that will be executed against the target.' timeout = prepare_timeout(request_timeout) response = case.call(session=session, timeout=timeout) _run_checks(case, checks, result, response)
788,102,240,383,414,400
A single test body that will be executed against the target.
src/schemathesis/runner/__init__.py
network_test
hlobit/schemathesis
python
def network_test(case: Case, checks: Iterable[Callable], result: TestResult, session: requests.Session, request_timeout: Optional[int]) -> None: timeout = prepare_timeout(request_timeout) response = case.call(session=session, timeout=timeout) _run_checks(case, checks, result, response)
def prepare_timeout(timeout: Optional[int]) -> Optional[float]: 'Request timeout is in milliseconds, but `requests` uses seconds' output: Optional[Union[(int, float)]] = timeout if (timeout is not None): output = (timeout / 1000) return output
2,219,370,182,692,796,400
Request timeout is in milliseconds, but `requests` uses seconds
src/schemathesis/runner/__init__.py
prepare_timeout
hlobit/schemathesis
python
def prepare_timeout(timeout: Optional[int]) -> Optional[float]: output: Optional[Union[(int, float)]] = timeout if (timeout is not None): output = (timeout / 1000) return output
def execute(self) -> Generator[(events.ExecutionEvent, None, None)]: 'Common logic for all runners.' results = TestResultSet() initialized = events.Initialized(results=results, schema=self.schema, checks=self.checks, hypothesis_settings=self.hypothesis_settings) (yield initialized) (yield from self._execute(results)) (yield events.Finished(results=results, schema=self.schema, running_time=(time.time() - initialized.start_time)))
-4,920,484,788,315,777,000
Common logic for all runners.
src/schemathesis/runner/__init__.py
execute
hlobit/schemathesis
python
def execute(self) -> Generator[(events.ExecutionEvent, None, None)]: results = TestResultSet() initialized = events.Initialized(results=results, schema=self.schema, checks=self.checks, hypothesis_settings=self.hypothesis_settings) (yield initialized) (yield from self._execute(results)) (yield events.Finished(results=results, schema=self.schema, running_time=(time.time() - initialized.start_time)))
def _execute(self, results: TestResultSet) -> Generator[(events.ExecutionEvent, None, None)]: 'All events come from a queue where different workers push their events.' tasks_queue = self._get_tasks_queue() events_queue: Queue = Queue() workers = self._init_workers(tasks_queue, events_queue, results) def stop_workers() -> None: for worker in workers: ident = cast(int, worker.ident) stop_worker(ident) worker.join() is_finished = False try: while (not is_finished): time.sleep(0.001) is_finished = all(((not worker.is_alive()) for worker in workers)) while (not events_queue.empty()): event = events_queue.get() (yield event) if isinstance(event, events.Interrupted): raise ThreadInterrupted except ThreadInterrupted: stop_workers() except KeyboardInterrupt: stop_workers() (yield events.Interrupted(results=results, schema=self.schema))
-649,910,977,915,766,500
All events come from a queue where different workers push their events.
src/schemathesis/runner/__init__.py
_execute
hlobit/schemathesis
python
def _execute(self, results: TestResultSet) -> Generator[(events.ExecutionEvent, None, None)]: tasks_queue = self._get_tasks_queue() events_queue: Queue = Queue() workers = self._init_workers(tasks_queue, events_queue, results) def stop_workers() -> None: for worker in workers: ident = cast(int, worker.ident) stop_worker(ident) worker.join() is_finished = False try: while (not is_finished): time.sleep(0.001) is_finished = all(((not worker.is_alive()) for worker in workers)) while (not events_queue.empty()): event = events_queue.get() (yield event) if isinstance(event, events.Interrupted): raise ThreadInterrupted except ThreadInterrupted: stop_workers() except KeyboardInterrupt: stop_workers() (yield events.Interrupted(results=results, schema=self.schema))
def _get_tasks_queue(self) -> Queue: 'All endpoints are distributed among all workers via a queue.' tasks_queue: Queue = Queue() tasks_queue.queue.extend(self.schema.get_all_endpoints()) return tasks_queue
8,114,420,042,134,657,000
All endpoints are distributed among all workers via a queue.
src/schemathesis/runner/__init__.py
_get_tasks_queue
hlobit/schemathesis
python
def _get_tasks_queue(self) -> Queue: tasks_queue: Queue = Queue() tasks_queue.queue.extend(self.schema.get_all_endpoints()) return tasks_queue
def _init_workers(self, tasks_queue: Queue, events_queue: Queue, results: TestResultSet) -> List[threading.Thread]: 'Initialize & start workers that will execute tests.' workers = [threading.Thread(target=self._get_task(), kwargs=self._get_worker_kwargs(tasks_queue, events_queue, results)) for _ in range(self.workers_num)] for worker in workers: worker.start() return workers
2,309,366,165,997,026,300
Initialize & start workers that will execute tests.
src/schemathesis/runner/__init__.py
_init_workers
hlobit/schemathesis
python
def _init_workers(self, tasks_queue: Queue, events_queue: Queue, results: TestResultSet) -> List[threading.Thread]: workers = [threading.Thread(target=self._get_task(), kwargs=self._get_worker_kwargs(tasks_queue, events_queue, results)) for _ in range(self.workers_num)] for worker in workers: worker.start() return workers
def __str__(self): ' print out some basic information about the BC object ' string = ('BCs: -x: %s +x: %s ' % (self.xlb, self.xrb)) return string
469,757,932,280,750,660
print out some basic information about the BC object
multigrid/patch1d.py
__str__
python-hydro/hydro_examples
python
def __str__(self): ' ' string = ('BCs: -x: %s +x: %s ' % (self.xlb, self.xrb)) return string
def __init__(self, nx, ng=1, xmin=0.0, xmax=1.0): '\n The class constructor function.\n\n The only data that we require is the number of points that\n make up the mesh.\n\n We optionally take the extrema of the domain, number of ghost\n cells (assume 1)\n ' self.nx = nx self.ng = ng self.qx = ((2 * ng) + nx) self.xmin = xmin self.xmax = xmax self.ilo = ng self.ihi = ((ng + nx) - 1) self.dx = ((xmax - xmin) / nx) self.xl = (((numpy.arange((nx + (2 * ng))) - ng) * self.dx) + xmin) self.xr = ((((numpy.arange((nx + (2 * ng))) + 1.0) - ng) * self.dx) + xmin) self.x = (0.5 * (self.xl + self.xr))
1,316,086,383,784,868,400
The class constructor function. The only data that we require is the number of points that make up the mesh. We optionally take the extrema of the domain, number of ghost cells (assume 1)
multigrid/patch1d.py
__init__
python-hydro/hydro_examples
python
def __init__(self, nx, ng=1, xmin=0.0, xmax=1.0): '\n The class constructor function.\n\n The only data that we require is the number of points that\n make up the mesh.\n\n We optionally take the extrema of the domain, number of ghost\n cells (assume 1)\n ' self.nx = nx self.ng = ng self.qx = ((2 * ng) + nx) self.xmin = xmin self.xmax = xmax self.ilo = ng self.ihi = ((ng + nx) - 1) self.dx = ((xmax - xmin) / nx) self.xl = (((numpy.arange((nx + (2 * ng))) - ng) * self.dx) + xmin) self.xr = ((((numpy.arange((nx + (2 * ng))) + 1.0) - ng) * self.dx) + xmin) self.x = (0.5 * (self.xl + self.xr))
def __str__(self): ' print out some basic information about the grid object ' return '1-d grid: nx = {}, ng = {}'.format(self.nx, self.ng)
-7,226,611,515,420,033,000
print out some basic information about the grid object
multigrid/patch1d.py
__str__
python-hydro/hydro_examples
python
def __str__(self): ' ' return '1-d grid: nx = {}, ng = {}'.format(self.nx, self.ng)
def register_var(self, name, bc_object): '\n register a variable with CellCenterData1d object. Here we pass in a\n BCObject that describes the boundary conditions for that\n variable.\n ' if (self.initialized == 1): sys.exit('ERROR: grid already initialized') self.vars.append(name) self.nvar += 1 self.BCs[name] = bc_object
2,683,589,669,001,459,000
register a variable with CellCenterData1d object. Here we pass in a BCObject that describes the boundary conditions for that variable.
multigrid/patch1d.py
register_var
python-hydro/hydro_examples
python
def register_var(self, name, bc_object): '\n register a variable with CellCenterData1d object. Here we pass in a\n BCObject that describes the boundary conditions for that\n variable.\n ' if (self.initialized == 1): sys.exit('ERROR: grid already initialized') self.vars.append(name) self.nvar += 1 self.BCs[name] = bc_object
def create(self): '\n called after all the variables are registered and allocates\n the storage for the state data\n ' if (self.initialized == 1): sys.exit('ERROR: grid already initialized') self.data = numpy.zeros((self.nvar, self.grid.qx), dtype=self.dtype) self.initialized = 1
-7,108,092,911,557,650,000
called after all the variables are registered and allocates the storage for the state data
multigrid/patch1d.py
create
python-hydro/hydro_examples
python
def create(self): '\n called after all the variables are registered and allocates\n the storage for the state data\n ' if (self.initialized == 1): sys.exit('ERROR: grid already initialized') self.data = numpy.zeros((self.nvar, self.grid.qx), dtype=self.dtype) self.initialized = 1
def __str__(self): ' print out some basic information about the ccData2d object ' if (self.initialized == 0): mystr = 'CellCenterData1d object not yet initialized' return mystr mystr = (('cc data: nx = {}, ng = {}\n'.format(self.grid.nx, self.grid.ng) + ' nvars = {}\n'.format(self.nvar)) + 'variables: \n') ilo = self.grid.ilo ihi = self.grid.ihi for n in range(self.nvar): mystr += ('%16s: min: %15.10f max: %15.10f\n' % (self.vars[n], numpy.min(self.data[n, ilo:(ihi + 1)]), numpy.max(self.data[n, ilo:(ihi + 1)]))) mystr += ('%16s BCs: -x: %-12s +x: %-12s \n' % (' ', self.BCs[self.vars[n]].xlb, self.BCs[self.vars[n]].xrb)) return mystr
8,466,030,980,728,462,000
print out some basic information about the ccData2d object
multigrid/patch1d.py
__str__
python-hydro/hydro_examples
python
def __str__(self): ' ' if (self.initialized == 0): mystr = 'CellCenterData1d object not yet initialized' return mystr mystr = (('cc data: nx = {}, ng = {}\n'.format(self.grid.nx, self.grid.ng) + ' nvars = {}\n'.format(self.nvar)) + 'variables: \n') ilo = self.grid.ilo ihi = self.grid.ihi for n in range(self.nvar): mystr += ('%16s: min: %15.10f max: %15.10f\n' % (self.vars[n], numpy.min(self.data[n, ilo:(ihi + 1)]), numpy.max(self.data[n, ilo:(ihi + 1)]))) mystr += ('%16s BCs: -x: %-12s +x: %-12s \n' % (' ', self.BCs[self.vars[n]].xlb, self.BCs[self.vars[n]].xrb)) return mystr
def get_var(self, name): '\n return a data array the variable described by name. Any changes\n made to this are automatically reflected in the CellCenterData1d\n object.\n ' n = self.vars.index(name) return self.data[n, :]
-905,601,490,949,444,400
return a data array the variable described by name. Any changes made to this are automatically reflected in the CellCenterData1d object.
multigrid/patch1d.py
get_var
python-hydro/hydro_examples
python
def get_var(self, name): '\n return a data array the variable described by name. Any changes\n made to this are automatically reflected in the CellCenterData1d\n object.\n ' n = self.vars.index(name) return self.data[n, :]
def fill_BC_all(self): '\n fill boundary conditions on all variables\n ' for name in self.vars: self.fill_BC(name)
-5,289,033,638,672,699,000
fill boundary conditions on all variables
multigrid/patch1d.py
fill_BC_all
python-hydro/hydro_examples
python
def fill_BC_all(self): '\n \n ' for name in self.vars: self.fill_BC(name)
def fill_BC(self, name): '\n fill the boundary conditions. This operates on a single state\n variable at a time, to allow for maximum flexibility\n\n we do periodic, reflect-even, reflect-odd, and outflow\n\n each variable name has a corresponding bc_object stored in the\n ccData2d object -- we refer to this to figure out the action\n to take at each boundary.\n ' n = self.vars.index(name) if ((self.BCs[name].xlb == 'outflow') or (self.BCs[name].xlb == 'neumann')): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, self.grid.ilo)] elif (self.BCs[name].xlb == 'reflect-even'): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, (((2 * self.grid.ng) - i) - 1))] elif (self.BCs[name].xlb in ['reflect-odd', 'dirichlet']): for i in range(0, self.grid.ilo): self.data[(n, i)] = (- self.data[(n, (((2 * self.grid.ng) - i) - 1))]) elif (self.BCs[name].xlb == 'periodic'): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, (((self.grid.ihi - self.grid.ng) + i) + 1))] if ((self.BCs[name].xrb == 'outflow') or (self.BCs[name].xrb == 'neumann')): for i in range((self.grid.ihi + 1), (self.grid.nx + (2 * self.grid.ng))): self.data[(n, i)] = self.data[(n, self.grid.ihi)] elif (self.BCs[name].xrb == 'reflect-even'): for i in range(0, self.grid.ng): i_bnd = ((self.grid.ihi + 1) + i) i_src = (self.grid.ihi - i) self.data[(n, i_bnd)] = self.data[(n, i_src)] elif (self.BCs[name].xrb in ['reflect-odd', 'dirichlet']): for i in range(0, self.grid.ng): i_bnd = ((self.grid.ihi + 1) + i) i_src = (self.grid.ihi - i) self.data[(n, i_bnd)] = (- self.data[(n, i_src)]) elif (self.BCs[name].xrb == 'periodic'): for i in range((self.grid.ihi + 1), ((2 * self.grid.ng) + self.grid.nx)): self.data[(n, i)] = self.data[(n, (((i - self.grid.ihi) - 1) + self.grid.ng))]
-1,690,921,127,078,352,400
fill the boundary conditions. This operates on a single state variable at a time, to allow for maximum flexibility we do periodic, reflect-even, reflect-odd, and outflow each variable name has a corresponding bc_object stored in the ccData2d object -- we refer to this to figure out the action to take at each boundary.
multigrid/patch1d.py
fill_BC
python-hydro/hydro_examples
python
def fill_BC(self, name): '\n fill the boundary conditions. This operates on a single state\n variable at a time, to allow for maximum flexibility\n\n we do periodic, reflect-even, reflect-odd, and outflow\n\n each variable name has a corresponding bc_object stored in the\n ccData2d object -- we refer to this to figure out the action\n to take at each boundary.\n ' n = self.vars.index(name) if ((self.BCs[name].xlb == 'outflow') or (self.BCs[name].xlb == 'neumann')): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, self.grid.ilo)] elif (self.BCs[name].xlb == 'reflect-even'): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, (((2 * self.grid.ng) - i) - 1))] elif (self.BCs[name].xlb in ['reflect-odd', 'dirichlet']): for i in range(0, self.grid.ilo): self.data[(n, i)] = (- self.data[(n, (((2 * self.grid.ng) - i) - 1))]) elif (self.BCs[name].xlb == 'periodic'): for i in range(0, self.grid.ilo): self.data[(n, i)] = self.data[(n, (((self.grid.ihi - self.grid.ng) + i) + 1))] if ((self.BCs[name].xrb == 'outflow') or (self.BCs[name].xrb == 'neumann')): for i in range((self.grid.ihi + 1), (self.grid.nx + (2 * self.grid.ng))): self.data[(n, i)] = self.data[(n, self.grid.ihi)] elif (self.BCs[name].xrb == 'reflect-even'): for i in range(0, self.grid.ng): i_bnd = ((self.grid.ihi + 1) + i) i_src = (self.grid.ihi - i) self.data[(n, i_bnd)] = self.data[(n, i_src)] elif (self.BCs[name].xrb in ['reflect-odd', 'dirichlet']): for i in range(0, self.grid.ng): i_bnd = ((self.grid.ihi + 1) + i) i_src = (self.grid.ihi - i) self.data[(n, i_bnd)] = (- self.data[(n, i_src)]) elif (self.BCs[name].xrb == 'periodic'): for i in range((self.grid.ihi + 1), ((2 * self.grid.ng) + self.grid.nx)): self.data[(n, i)] = self.data[(n, (((i - self.grid.ihi) - 1) + self.grid.ng))]
def restrict(self, varname): '\n restrict the variable varname to a coarser grid (factor of 2\n coarser) and return an array with the resulting data (and same\n number of ghostcells)\n ' fG = self.grid fData = self.get_var(varname) ng_c = fG.ng nx_c = (fG.nx // 2) cData = numpy.zeros(((2 * ng_c) + nx_c), dtype=self.dtype) ilo_c = ng_c ihi_c = ((ng_c + nx_c) - 1) cData[ilo_c:(ihi_c + 1)] = (0.5 * (fData[fG.ilo:(fG.ihi + 1):2] + fData[(fG.ilo + 1):(fG.ihi + 1):2])) return cData
7,760,059,069,005,052,000
restrict the variable varname to a coarser grid (factor of 2 coarser) and return an array with the resulting data (and same number of ghostcells)
multigrid/patch1d.py
restrict
python-hydro/hydro_examples
python
def restrict(self, varname): '\n restrict the variable varname to a coarser grid (factor of 2\n coarser) and return an array with the resulting data (and same\n number of ghostcells)\n ' fG = self.grid fData = self.get_var(varname) ng_c = fG.ng nx_c = (fG.nx // 2) cData = numpy.zeros(((2 * ng_c) + nx_c), dtype=self.dtype) ilo_c = ng_c ihi_c = ((ng_c + nx_c) - 1) cData[ilo_c:(ihi_c + 1)] = (0.5 * (fData[fG.ilo:(fG.ihi + 1):2] + fData[(fG.ilo + 1):(fG.ihi + 1):2])) return cData
def prolong(self, varname): "\n prolong the data in the current (coarse) grid to a finer\n (factor of 2 finer) grid. Return an array with the resulting\n data (and same number of ghostcells).\n\n We will reconstruct the data in the zone from the\n zone-averaged variables using the centered-difference slopes\n\n (x)\n f(x,y) = m x/dx + <f>\n\n When averaged over the parent cell, this reproduces <f>.\n\n Each zone's reconstrution will be averaged over 2 children.\n\n | | | | |\n | <f> | --> | | |\n | | | 1 | 2 |\n +-----------+ +-----+-----+\n\n We will fill each of the finer resolution zones by filling all\n the 1's together, using a stride 2 into the fine array. Then\n the 2's, this allows us to operate in a vector\n fashion. All operations will use the same slopes for their\n respective parents.\n\n " cG = self.grid cData = self.get_var(varname) ng_f = cG.ng nx_f = (cG.nx * 2) fData = numpy.zeros(((2 * ng_f) + nx_f), dtype=self.dtype) ilo_f = ng_f ihi_f = ((ng_f + nx_f) - 1) m_x = cG.scratch_array() m_x[cG.ilo:(cG.ihi + 1)] = (0.5 * (cData[(cG.ilo + 1):(cG.ihi + 2)] - cData[(cG.ilo - 1):cG.ihi])) fData[ilo_f:(ihi_f + 1):2] = (cData[cG.ilo:(cG.ihi + 1)] - (0.25 * m_x[cG.ilo:(cG.ihi + 1)])) fData[(ilo_f + 1):(ihi_f + 1):2] = (cData[cG.ilo:(cG.ihi + 1)] + (0.25 * m_x[cG.ilo:(cG.ihi + 1)])) return fData
3,561,002,186,046,957,600
prolong the data in the current (coarse) grid to a finer (factor of 2 finer) grid. Return an array with the resulting data (and same number of ghostcells). We will reconstruct the data in the zone from the zone-averaged variables using the centered-difference slopes (x) f(x,y) = m x/dx + <f> When averaged over the parent cell, this reproduces <f>. Each zone's reconstrution will be averaged over 2 children. | | | | | | <f> | --> | | | | | | 1 | 2 | +-----------+ +-----+-----+ We will fill each of the finer resolution zones by filling all the 1's together, using a stride 2 into the fine array. Then the 2's, this allows us to operate in a vector fashion. All operations will use the same slopes for their respective parents.
multigrid/patch1d.py
prolong
python-hydro/hydro_examples
python
def prolong(self, varname): "\n prolong the data in the current (coarse) grid to a finer\n (factor of 2 finer) grid. Return an array with the resulting\n data (and same number of ghostcells).\n\n We will reconstruct the data in the zone from the\n zone-averaged variables using the centered-difference slopes\n\n (x)\n f(x,y) = m x/dx + <f>\n\n When averaged over the parent cell, this reproduces <f>.\n\n Each zone's reconstrution will be averaged over 2 children.\n\n | | | | |\n | <f> | --> | | |\n | | | 1 | 2 |\n +-----------+ +-----+-----+\n\n We will fill each of the finer resolution zones by filling all\n the 1's together, using a stride 2 into the fine array. Then\n the 2's, this allows us to operate in a vector\n fashion. All operations will use the same slopes for their\n respective parents.\n\n " cG = self.grid cData = self.get_var(varname) ng_f = cG.ng nx_f = (cG.nx * 2) fData = numpy.zeros(((2 * ng_f) + nx_f), dtype=self.dtype) ilo_f = ng_f ihi_f = ((ng_f + nx_f) - 1) m_x = cG.scratch_array() m_x[cG.ilo:(cG.ihi + 1)] = (0.5 * (cData[(cG.ilo + 1):(cG.ihi + 2)] - cData[(cG.ilo - 1):cG.ihi])) fData[ilo_f:(ihi_f + 1):2] = (cData[cG.ilo:(cG.ihi + 1)] - (0.25 * m_x[cG.ilo:(cG.ihi + 1)])) fData[(ilo_f + 1):(ihi_f + 1):2] = (cData[cG.ilo:(cG.ihi + 1)] + (0.25 * m_x[cG.ilo:(cG.ihi + 1)])) return fData
def __init__(self, *, host: str='datastore.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: str=None, scopes: Sequence[str]=None, channel: grpc.Channel=None, api_mtls_endpoint: str=None, client_cert_source: Callable[([], Tuple[(bytes, bytes)])]=None, ssl_channel_credentials: grpc.ChannelCredentials=None, client_cert_source_for_mtls: Callable[([], Tuple[(bytes, bytes)])]=None, quota_project_id: Optional[str]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False) -> None: "Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n This argument is ignored if ``channel`` is provided.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional(Sequence[str])): A list of scopes. This argument is\n ignored if ``channel`` is provided.\n channel (Optional[grpc.Channel]): A ``Channel`` instance through\n which to make calls.\n api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.\n If provided, it overrides the ``host`` argument and tries to create\n a mutual TLS channel with client SSL credentials from\n ``client_cert_source`` or application default SSL credentials.\n client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):\n Deprecated. A callback to provide client SSL certificate bytes and\n private key bytes, both in PEM format. It is ignored if\n ``api_mtls_endpoint`` is None.\n ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials\n for the grpc channel. It is ignored if ``channel`` is provided.\n client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):\n A callback to provide client certificate bytes and private key bytes,\n both in PEM format. It is used to configure a mutual TLS channel. It is\n ignored if ``channel`` or ``ssl_channel_credentials`` is provided.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n " self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[(str, Callable)] = {} if api_mtls_endpoint: warnings.warn('api_mtls_endpoint is deprecated', DeprecationWarning) if client_cert_source: warnings.warn('client_cert_source is deprecated', DeprecationWarning) if channel: credentials = False self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: host = api_mtls_endpoint if client_cert_source: (cert, key) = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials elif (client_cert_source_for_mtls and (not ssl_channel_credentials)): (cert, key) = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key) super().__init__(host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access) if (not self._grpc_channel): self._grpc_channel = type(self).create_channel(self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[('grpc.max_send_message_length', (- 1)), ('grpc.max_receive_message_length', (- 1))]) self._prep_wrapped_messages(client_info)
-5,410,718,539,390,989,000
Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
__init__
LaudateCorpus1/python-datastore
python
def __init__(self, *, host: str='datastore.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: str=None, scopes: Sequence[str]=None, channel: grpc.Channel=None, api_mtls_endpoint: str=None, client_cert_source: Callable[([], Tuple[(bytes, bytes)])]=None, ssl_channel_credentials: grpc.ChannelCredentials=None, client_cert_source_for_mtls: Callable[([], Tuple[(bytes, bytes)])]=None, quota_project_id: Optional[str]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False) -> None: "Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n This argument is ignored if ``channel`` is provided.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional(Sequence[str])): A list of scopes. This argument is\n ignored if ``channel`` is provided.\n channel (Optional[grpc.Channel]): A ``Channel`` instance through\n which to make calls.\n api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.\n If provided, it overrides the ``host`` argument and tries to create\n a mutual TLS channel with client SSL credentials from\n ``client_cert_source`` or application default SSL credentials.\n client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):\n Deprecated. A callback to provide client SSL certificate bytes and\n private key bytes, both in PEM format. It is ignored if\n ``api_mtls_endpoint`` is None.\n ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials\n for the grpc channel. It is ignored if ``channel`` is provided.\n client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):\n A callback to provide client certificate bytes and private key bytes,\n both in PEM format. It is used to configure a mutual TLS channel. It is\n ignored if ``channel`` or ``ssl_channel_credentials`` is provided.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n " self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[(str, Callable)] = {} if api_mtls_endpoint: warnings.warn('api_mtls_endpoint is deprecated', DeprecationWarning) if client_cert_source: warnings.warn('client_cert_source is deprecated', DeprecationWarning) if channel: credentials = False self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: host = api_mtls_endpoint if client_cert_source: (cert, key) = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials elif (client_cert_source_for_mtls and (not ssl_channel_credentials)): (cert, key) = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key) super().__init__(host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access) if (not self._grpc_channel): self._grpc_channel = type(self).create_channel(self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[('grpc.max_send_message_length', (- 1)), ('grpc.max_receive_message_length', (- 1))]) self._prep_wrapped_messages(client_info)
@classmethod def create_channel(cls, host: str='datastore.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: str=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, **kwargs) -> grpc.Channel: 'Create and return a gRPC channel object.\n Args:\n host (Optional[str]): The host for the channel to use.\n credentials (Optional[~.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify this application to the service. If\n none are specified, the client will attempt to ascertain\n the credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is mutually exclusive with credentials.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n kwargs (Optional[dict]): Keyword arguments, which are passed to the\n channel creation.\n Returns:\n grpc.Channel: A gRPC channel object.\n\n Raises:\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n ' return grpc_helpers.create_channel(host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs)
-3,592,603,047,120,177,700
Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. quota_project_id (Optional[str]): An optional project to use for billing and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. Raises: google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
create_channel
LaudateCorpus1/python-datastore
python
@classmethod def create_channel(cls, host: str='datastore.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: str=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, **kwargs) -> grpc.Channel: 'Create and return a gRPC channel object.\n Args:\n host (Optional[str]): The host for the channel to use.\n credentials (Optional[~.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify this application to the service. If\n none are specified, the client will attempt to ascertain\n the credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is mutually exclusive with credentials.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n kwargs (Optional[dict]): Keyword arguments, which are passed to the\n channel creation.\n Returns:\n grpc.Channel: A gRPC channel object.\n\n Raises:\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n ' return grpc_helpers.create_channel(host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs)
@property def grpc_channel(self) -> grpc.Channel: 'Return the channel designed to connect to this service.\n ' return self._grpc_channel
-1,956,682,971,687,930,400
Return the channel designed to connect to this service.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
grpc_channel
LaudateCorpus1/python-datastore
python
@property def grpc_channel(self) -> grpc.Channel: '\n ' return self._grpc_channel
@property def lookup(self) -> Callable[([datastore.LookupRequest], datastore.LookupResponse)]: 'Return a callable for the lookup method over gRPC.\n\n Looks up entities by key.\n\n Returns:\n Callable[[~.LookupRequest],\n ~.LookupResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('lookup' not in self._stubs): self._stubs['lookup'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Lookup', request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize) return self._stubs['lookup']
2,886,046,299,462,822,400
Return a callable for the lookup method over gRPC. Looks up entities by key. Returns: Callable[[~.LookupRequest], ~.LookupResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
lookup
LaudateCorpus1/python-datastore
python
@property def lookup(self) -> Callable[([datastore.LookupRequest], datastore.LookupResponse)]: 'Return a callable for the lookup method over gRPC.\n\n Looks up entities by key.\n\n Returns:\n Callable[[~.LookupRequest],\n ~.LookupResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('lookup' not in self._stubs): self._stubs['lookup'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Lookup', request_serializer=datastore.LookupRequest.serialize, response_deserializer=datastore.LookupResponse.deserialize) return self._stubs['lookup']
@property def run_query(self) -> Callable[([datastore.RunQueryRequest], datastore.RunQueryResponse)]: 'Return a callable for the run query method over gRPC.\n\n Queries for entities.\n\n Returns:\n Callable[[~.RunQueryRequest],\n ~.RunQueryResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('run_query' not in self._stubs): self._stubs['run_query'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/RunQuery', request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize) return self._stubs['run_query']
-1,087,091,090,104,806,400
Return a callable for the run query method over gRPC. Queries for entities. Returns: Callable[[~.RunQueryRequest], ~.RunQueryResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
run_query
LaudateCorpus1/python-datastore
python
@property def run_query(self) -> Callable[([datastore.RunQueryRequest], datastore.RunQueryResponse)]: 'Return a callable for the run query method over gRPC.\n\n Queries for entities.\n\n Returns:\n Callable[[~.RunQueryRequest],\n ~.RunQueryResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('run_query' not in self._stubs): self._stubs['run_query'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/RunQuery', request_serializer=datastore.RunQueryRequest.serialize, response_deserializer=datastore.RunQueryResponse.deserialize) return self._stubs['run_query']
@property def begin_transaction(self) -> Callable[([datastore.BeginTransactionRequest], datastore.BeginTransactionResponse)]: 'Return a callable for the begin transaction method over gRPC.\n\n Begins a new transaction.\n\n Returns:\n Callable[[~.BeginTransactionRequest],\n ~.BeginTransactionResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('begin_transaction' not in self._stubs): self._stubs['begin_transaction'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/BeginTransaction', request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize) return self._stubs['begin_transaction']
6,109,222,605,587,897,000
Return a callable for the begin transaction method over gRPC. Begins a new transaction. Returns: Callable[[~.BeginTransactionRequest], ~.BeginTransactionResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
begin_transaction
LaudateCorpus1/python-datastore
python
@property def begin_transaction(self) -> Callable[([datastore.BeginTransactionRequest], datastore.BeginTransactionResponse)]: 'Return a callable for the begin transaction method over gRPC.\n\n Begins a new transaction.\n\n Returns:\n Callable[[~.BeginTransactionRequest],\n ~.BeginTransactionResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('begin_transaction' not in self._stubs): self._stubs['begin_transaction'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/BeginTransaction', request_serializer=datastore.BeginTransactionRequest.serialize, response_deserializer=datastore.BeginTransactionResponse.deserialize) return self._stubs['begin_transaction']
@property def commit(self) -> Callable[([datastore.CommitRequest], datastore.CommitResponse)]: 'Return a callable for the commit method over gRPC.\n\n Commits a transaction, optionally creating, deleting\n or modifying some entities.\n\n Returns:\n Callable[[~.CommitRequest],\n ~.CommitResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('commit' not in self._stubs): self._stubs['commit'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Commit', request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize) return self._stubs['commit']
4,309,621,428,806,862,000
Return a callable for the commit method over gRPC. Commits a transaction, optionally creating, deleting or modifying some entities. Returns: Callable[[~.CommitRequest], ~.CommitResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
commit
LaudateCorpus1/python-datastore
python
@property def commit(self) -> Callable[([datastore.CommitRequest], datastore.CommitResponse)]: 'Return a callable for the commit method over gRPC.\n\n Commits a transaction, optionally creating, deleting\n or modifying some entities.\n\n Returns:\n Callable[[~.CommitRequest],\n ~.CommitResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('commit' not in self._stubs): self._stubs['commit'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Commit', request_serializer=datastore.CommitRequest.serialize, response_deserializer=datastore.CommitResponse.deserialize) return self._stubs['commit']
@property def rollback(self) -> Callable[([datastore.RollbackRequest], datastore.RollbackResponse)]: 'Return a callable for the rollback method over gRPC.\n\n Rolls back a transaction.\n\n Returns:\n Callable[[~.RollbackRequest],\n ~.RollbackResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('rollback' not in self._stubs): self._stubs['rollback'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Rollback', request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize) return self._stubs['rollback']
8,620,746,177,282,813,000
Return a callable for the rollback method over gRPC. Rolls back a transaction. Returns: Callable[[~.RollbackRequest], ~.RollbackResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
rollback
LaudateCorpus1/python-datastore
python
@property def rollback(self) -> Callable[([datastore.RollbackRequest], datastore.RollbackResponse)]: 'Return a callable for the rollback method over gRPC.\n\n Rolls back a transaction.\n\n Returns:\n Callable[[~.RollbackRequest],\n ~.RollbackResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('rollback' not in self._stubs): self._stubs['rollback'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/Rollback', request_serializer=datastore.RollbackRequest.serialize, response_deserializer=datastore.RollbackResponse.deserialize) return self._stubs['rollback']
@property def allocate_ids(self) -> Callable[([datastore.AllocateIdsRequest], datastore.AllocateIdsResponse)]: 'Return a callable for the allocate ids method over gRPC.\n\n Allocates IDs for the given keys, which is useful for\n referencing an entity before it is inserted.\n\n Returns:\n Callable[[~.AllocateIdsRequest],\n ~.AllocateIdsResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('allocate_ids' not in self._stubs): self._stubs['allocate_ids'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/AllocateIds', request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize) return self._stubs['allocate_ids']
4,482,291,332,690,070,500
Return a callable for the allocate ids method over gRPC. Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. Returns: Callable[[~.AllocateIdsRequest], ~.AllocateIdsResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
allocate_ids
LaudateCorpus1/python-datastore
python
@property def allocate_ids(self) -> Callable[([datastore.AllocateIdsRequest], datastore.AllocateIdsResponse)]: 'Return a callable for the allocate ids method over gRPC.\n\n Allocates IDs for the given keys, which is useful for\n referencing an entity before it is inserted.\n\n Returns:\n Callable[[~.AllocateIdsRequest],\n ~.AllocateIdsResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n ' if ('allocate_ids' not in self._stubs): self._stubs['allocate_ids'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/AllocateIds', request_serializer=datastore.AllocateIdsRequest.serialize, response_deserializer=datastore.AllocateIdsResponse.deserialize) return self._stubs['allocate_ids']
@property def reserve_ids(self) -> Callable[([datastore.ReserveIdsRequest], datastore.ReserveIdsResponse)]: "Return a callable for the reserve ids method over gRPC.\n\n Prevents the supplied keys' IDs from being auto-\n llocated by Cloud Datastore.\n\n Returns:\n Callable[[~.ReserveIdsRequest],\n ~.ReserveIdsResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n " if ('reserve_ids' not in self._stubs): self._stubs['reserve_ids'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/ReserveIds', request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize) return self._stubs['reserve_ids']
5,990,443,636,854,899,000
Return a callable for the reserve ids method over gRPC. Prevents the supplied keys' IDs from being auto- llocated by Cloud Datastore. Returns: Callable[[~.ReserveIdsRequest], ~.ReserveIdsResponse]: A function that, when called, will call the underlying RPC on the server.
google/cloud/datastore_v1/services/datastore/transports/grpc.py
reserve_ids
LaudateCorpus1/python-datastore
python
@property def reserve_ids(self) -> Callable[([datastore.ReserveIdsRequest], datastore.ReserveIdsResponse)]: "Return a callable for the reserve ids method over gRPC.\n\n Prevents the supplied keys' IDs from being auto-\n llocated by Cloud Datastore.\n\n Returns:\n Callable[[~.ReserveIdsRequest],\n ~.ReserveIdsResponse]:\n A function that, when called, will call the underlying RPC\n on the server.\n " if ('reserve_ids' not in self._stubs): self._stubs['reserve_ids'] = self.grpc_channel.unary_unary('/google.datastore.v1.Datastore/ReserveIds', request_serializer=datastore.ReserveIdsRequest.serialize, response_deserializer=datastore.ReserveIdsResponse.deserialize) return self._stubs['reserve_ids']
def _get_sample(sample_info): " Get sample from SampleService\n sample_info - dict containing 'id' and 'version' of a sample\n " headers = {'Authorization': config()['ws_token']} params = {'id': sample_info['id']} if sample_info.get('version'): params['version'] = sample_info['version'] payload = {'method': 'SampleService.get_sample', 'id': '', 'params': [params], 'version': '1.1'} resp = requests.post(url=config()['sample_service_url'], headers=headers, data=json.dumps(payload)) if (not resp.ok): raise RuntimeError(f'Returned from sample service with status {resp.status_code} - {resp.text}') resp_json = resp.json() if resp_json.get('error'): raise RuntimeError(f"Error from SampleService - {resp_json['error']}") sample = resp_json['result'][0] return sample
-5,702,704,638,798,731,000
Get sample from SampleService sample_info - dict containing 'id' and 'version' of a sample
src/index_runner/es_indexers/sample_set.py
_get_sample
slebras/index_runner
python
def _get_sample(sample_info): " Get sample from SampleService\n sample_info - dict containing 'id' and 'version' of a sample\n " headers = {'Authorization': config()['ws_token']} params = {'id': sample_info['id']} if sample_info.get('version'): params['version'] = sample_info['version'] payload = {'method': 'SampleService.get_sample', 'id': , 'params': [params], 'version': '1.1'} resp = requests.post(url=config()['sample_service_url'], headers=headers, data=json.dumps(payload)) if (not resp.ok): raise RuntimeError(f'Returned from sample service with status {resp.status_code} - {resp.text}') resp_json = resp.json() if resp_json.get('error'): raise RuntimeError(f"Error from SampleService - {resp_json['error']}") sample = resp_json['result'][0] return sample
def _flatten_meta(meta, prefix=None): ' Flattens metadata fields in a Sample object. Fields are concatenated into a\n single string field to save into an Elasticsearch index\n meta - Sample Metadata to be flattened\n prefix - (optional) prefix for the metadata values. default=None\n ' new_meta = {} for key in meta: if prefix: val = (prefix + ':') else: val = '' if ('value' in meta[key]): val += str(meta[key]['value']) if ('units' in meta[key]): val += (';' + str(meta[key]['units'])) new_meta[key] = val return new_meta
1,436,780,520,164,271,400
Flattens metadata fields in a Sample object. Fields are concatenated into a single string field to save into an Elasticsearch index meta - Sample Metadata to be flattened prefix - (optional) prefix for the metadata values. default=None
src/index_runner/es_indexers/sample_set.py
_flatten_meta
slebras/index_runner
python
def _flatten_meta(meta, prefix=None): ' Flattens metadata fields in a Sample object. Fields are concatenated into a\n single string field to save into an Elasticsearch index\n meta - Sample Metadata to be flattened\n prefix - (optional) prefix for the metadata values. default=None\n ' new_meta = {} for key in meta: if prefix: val = (prefix + ':') else: val = if ('value' in meta[key]): val += str(meta[key]['value']) if ('units' in meta[key]): val += (';' + str(meta[key]['units'])) new_meta[key] = val return new_meta
def _combine_meta(meta, flattened_meta, idx): ' Combine newly flattened metadata with existing metadata. This Function is designed to keep the indexing\n of the different metadata fields consistent for each node within the sample node tree s.t. all the\n fields in index (idx) 0 will be from item 0 in the node tree. Empty string ("") entries are Empty and\n added simply so that the indexing of all fields line up.\n meta - existing metadata.\n flattened_meta - newly flattened metadata.\n idx - current index of ndoe_tree.\n ' for key in flattened_meta: if (key in meta): meta[key] += (['' for _ in range((idx - len(meta[key])))] + [flattened_meta[key]]) else: meta[key] = (['' for _ in range(idx)] + [flattened_meta[key]]) return meta
-8,150,693,350,067,738,000
Combine newly flattened metadata with existing metadata. This Function is designed to keep the indexing of the different metadata fields consistent for each node within the sample node tree s.t. all the fields in index (idx) 0 will be from item 0 in the node tree. Empty string ("") entries are Empty and added simply so that the indexing of all fields line up. meta - existing metadata. flattened_meta - newly flattened metadata. idx - current index of ndoe_tree.
src/index_runner/es_indexers/sample_set.py
_combine_meta
slebras/index_runner
python
def _combine_meta(meta, flattened_meta, idx): ' Combine newly flattened metadata with existing metadata. This Function is designed to keep the indexing\n of the different metadata fields consistent for each node within the sample node tree s.t. all the\n fields in index (idx) 0 will be from item 0 in the node tree. Empty string () entries are Empty and\n added simply so that the indexing of all fields line up.\n meta - existing metadata.\n flattened_meta - newly flattened metadata.\n idx - current index of ndoe_tree.\n ' for key in flattened_meta: if (key in meta): meta[key] += ([ for _ in range((idx - len(meta[key])))] + [flattened_meta[key]]) else: meta[key] = ([ for _ in range(idx)] + [flattened_meta[key]]) return meta
def index_sample_set(obj_data, ws_info, obj_data_v1): 'Indexer for KBaseSets.SampleSet object type' info = obj_data['info'] if (not obj_data.get('data')): raise Exception('no data in object') data = obj_data['data'] workspace_id = info[6] object_id = info[0] version = info[4] sample_set_id = f'{_NAMESPACE}::{workspace_id}:{object_id}' ver_sample_set_id = f'{_VER_NAMESPACE}::{workspace_id}:{object_id}:{version}' sample_set_index = {'_action': 'index', 'doc': {'description': data['description'], 'sample_ids': [s['id'] for s in data['samples']], 'sample_names': [s['name'] for s in data['samples']], 'sample_versions': [s['version'] for s in data['samples']]}, 'index': _SAMPLE_SET_INDEX_NAME, 'id': sample_set_id} (yield sample_set_index) ver_sample_set_index = dict(sample_set_index) ver_sample_set_index['index'] = _VER_SAMPLE_SET_INDEX_NAME ver_sample_set_index['id'] = ver_sample_set_id (yield ver_sample_set_index) for samp in data['samples']: sample = _get_sample(samp) sample_id = f"{_SAMPLE_NAMESPACE}::{sample['id']}:{sample['version']}" if (len(sample['node_tree']) == 1): meta_controlled = _flatten_meta(sample['node_tree'][0]['meta_controlled']) meta_user = _flatten_meta(sample['node_tree'][0]['meta_user']) meta_controlled['node_id'] = sample['node_tree'][0]['id'] else: (meta_controlled, meta_user) = ({}, {}) for (idx, node) in enumerate(sample['node_tree']): meta_controlled = _combine_meta(meta_controlled, _flatten_meta(node['meta_controlled']), idx) meta_user = _combine_meta(meta_user, _flatten_meta(node['meta_user']), idx) meta_controlled['node_id'] = node['id'] sample_index = {'_action': 'index', 'doc': {'save_date': sample['save_date'], 'sample_version': sample['version'], 'name': sample['name'], 'parent_id': sample_set_id, **meta_user, **meta_controlled}, 'index': _SAMPLE_INDEX_NAME, 'id': sample_id} (yield sample_index)
4,770,756,739,175,246,000
Indexer for KBaseSets.SampleSet object type
src/index_runner/es_indexers/sample_set.py
index_sample_set
slebras/index_runner
python
def index_sample_set(obj_data, ws_info, obj_data_v1): info = obj_data['info'] if (not obj_data.get('data')): raise Exception('no data in object') data = obj_data['data'] workspace_id = info[6] object_id = info[0] version = info[4] sample_set_id = f'{_NAMESPACE}::{workspace_id}:{object_id}' ver_sample_set_id = f'{_VER_NAMESPACE}::{workspace_id}:{object_id}:{version}' sample_set_index = {'_action': 'index', 'doc': {'description': data['description'], 'sample_ids': [s['id'] for s in data['samples']], 'sample_names': [s['name'] for s in data['samples']], 'sample_versions': [s['version'] for s in data['samples']]}, 'index': _SAMPLE_SET_INDEX_NAME, 'id': sample_set_id} (yield sample_set_index) ver_sample_set_index = dict(sample_set_index) ver_sample_set_index['index'] = _VER_SAMPLE_SET_INDEX_NAME ver_sample_set_index['id'] = ver_sample_set_id (yield ver_sample_set_index) for samp in data['samples']: sample = _get_sample(samp) sample_id = f"{_SAMPLE_NAMESPACE}::{sample['id']}:{sample['version']}" if (len(sample['node_tree']) == 1): meta_controlled = _flatten_meta(sample['node_tree'][0]['meta_controlled']) meta_user = _flatten_meta(sample['node_tree'][0]['meta_user']) meta_controlled['node_id'] = sample['node_tree'][0]['id'] else: (meta_controlled, meta_user) = ({}, {}) for (idx, node) in enumerate(sample['node_tree']): meta_controlled = _combine_meta(meta_controlled, _flatten_meta(node['meta_controlled']), idx) meta_user = _combine_meta(meta_user, _flatten_meta(node['meta_user']), idx) meta_controlled['node_id'] = node['id'] sample_index = {'_action': 'index', 'doc': {'save_date': sample['save_date'], 'sample_version': sample['version'], 'name': sample['name'], 'parent_id': sample_set_id, **meta_user, **meta_controlled}, 'index': _SAMPLE_INDEX_NAME, 'id': sample_id} (yield sample_index)
def loginValid(func): '\n :desc 闭包函数校验是否登录\n :param func:\n :return:\n ' def inner(request, *args, **kwargs): email = request.COOKIES.get('user') s_email = request.session.get('user') if (email and s_email and (email == s_email)): user = LoginUser.objects.filter(email=email).first() if user: return func(request, *args, **kwargs) return HttpResponseRedirect('/Buyer/login/') return inner
4,984,156,748,269,809,000
:desc 闭包函数校验是否登录 :param func: :return:
Qshop/Buyer/views.py
loginValid
songdanlee/DjangoWorkSpace
python
def loginValid(func): '\n :desc 闭包函数校验是否登录\n :param func:\n :return:\n ' def inner(request, *args, **kwargs): email = request.COOKIES.get('user') s_email = request.session.get('user') if (email and s_email and (email == s_email)): user = LoginUser.objects.filter(email=email).first() if user: return func(request, *args, **kwargs) return HttpResponseRedirect('/Buyer/login/') return inner
@loginValid def pay_order(request): '\n get请求 商品详情页购买单个商品。传入商品id,数量。\n post请求 购物车购买多个商品。\n ' if (request.method == 'GET'): num = request.GET.get('num') id = request.GET.get('id') if (num and id): num = int(num) id = int(id) order = PayOrder() order.order_number = str(time.time()).replace('.', '') order.order_date = datetime.datetime.now() order.order_user = LoginUser.objects.get(id=int(request.COOKIES.get('user_id'))) order.save() good = Goods.objects.get(id=id) order_info = OrderInfo() order_info.order_id = order order_info.goods_id = good.id order_info.goods_picture = good.goods_picture order_info.goods_name = good.goods_name order_info.goods_count = num order_info.goods_price = good.goods_price order_info.goods_total_price = round((good.goods_price * num), 3) order_info.store_id = good.goods_store order_info.order_status = 0 order_info.save() order.order_total = order_info.goods_total_price order.save() elif (request.method == 'POST'): request_data = [] data = request.POST data_item = request.POST.items() for (key, value) in data_item: if key.startswith('check_'): id = int(key.split('_', 1)[1]) num = int(data.get(('count_' + str(id)))) request_data.append((id, num)) if request_data: order = PayOrder() order.order_number = str(time.time()).replace('.', '') order.order_date = datetime.datetime.now() order.order_user = LoginUser.objects.get(id=int(request.COOKIES.get('user_id'))) order.order_total = 0.0 order.goods_number = 0 order.save() for (id, num) in request_data: good = Goods.objects.get(id=id) order_info = OrderInfo() order_info.order_id = order order_info.goods_id = good.id order_info.goods_picture = good.goods_picture order_info.goods_name = good.goods_name order_info.goods_count = num order_info.goods_price = good.goods_price order_info.goods_total_price = round((good.goods_price * num), 3) order_info.store_id = good.goods_store order_info.order_status = 0 order_info.save() order.order_total += order_info.goods_total_price order.goods_number += 1 order.save() return render(request, 'buyer/place_order.html', locals())
9,212,065,408,716,652,000
get请求 商品详情页购买单个商品。传入商品id,数量。 post请求 购物车购买多个商品。
Qshop/Buyer/views.py
pay_order
songdanlee/DjangoWorkSpace
python
@loginValid def pay_order(request): '\n get请求 商品详情页购买单个商品。传入商品id,数量。\n post请求 购物车购买多个商品。\n ' if (request.method == 'GET'): num = request.GET.get('num') id = request.GET.get('id') if (num and id): num = int(num) id = int(id) order = PayOrder() order.order_number = str(time.time()).replace('.', ) order.order_date = datetime.datetime.now() order.order_user = LoginUser.objects.get(id=int(request.COOKIES.get('user_id'))) order.save() good = Goods.objects.get(id=id) order_info = OrderInfo() order_info.order_id = order order_info.goods_id = good.id order_info.goods_picture = good.goods_picture order_info.goods_name = good.goods_name order_info.goods_count = num order_info.goods_price = good.goods_price order_info.goods_total_price = round((good.goods_price * num), 3) order_info.store_id = good.goods_store order_info.order_status = 0 order_info.save() order.order_total = order_info.goods_total_price order.save() elif (request.method == 'POST'): request_data = [] data = request.POST data_item = request.POST.items() for (key, value) in data_item: if key.startswith('check_'): id = int(key.split('_', 1)[1]) num = int(data.get(('count_' + str(id)))) request_data.append((id, num)) if request_data: order = PayOrder() order.order_number = str(time.time()).replace('.', ) order.order_date = datetime.datetime.now() order.order_user = LoginUser.objects.get(id=int(request.COOKIES.get('user_id'))) order.order_total = 0.0 order.goods_number = 0 order.save() for (id, num) in request_data: good = Goods.objects.get(id=id) order_info = OrderInfo() order_info.order_id = order order_info.goods_id = good.id order_info.goods_picture = good.goods_picture order_info.goods_name = good.goods_name order_info.goods_count = num order_info.goods_price = good.goods_price order_info.goods_total_price = round((good.goods_price * num), 3) order_info.store_id = good.goods_store order_info.order_status = 0 order_info.save() order.order_total += order_info.goods_total_price order.goods_number += 1 order.save() return render(request, 'buyer/place_order.html', locals())
@loginValid def alipayOrder(request): '\n 阿里支付,传入交易订单号,总金额\n ' order_number = request.GET.get('order_number') total = request.GET.get('total') alipay = AliPay(appid='2016101200667714', app_notify_url=None, app_private_key_string=alipay_private_key_string, alipay_public_key_string=alipay_public_key_string, sign_type='RSA2') order_string = alipay.api_alipay_trade_page_pay(out_trade_no=order_number, total_amount=str(total), subject='生鲜交易', return_url='http://127.0.0.1:8000/Buyer/pay_result/', notify_url='http://127.0.0.1:8000/Buyer/pay_result/') result = ('https://openapi.alipaydev.com/gateway.do?' + order_string) return HttpResponseRedirect(result)
-6,322,852,548,281,581,000
阿里支付,传入交易订单号,总金额
Qshop/Buyer/views.py
alipayOrder
songdanlee/DjangoWorkSpace
python
@loginValid def alipayOrder(request): '\n \n ' order_number = request.GET.get('order_number') total = request.GET.get('total') alipay = AliPay(appid='2016101200667714', app_notify_url=None, app_private_key_string=alipay_private_key_string, alipay_public_key_string=alipay_public_key_string, sign_type='RSA2') order_string = alipay.api_alipay_trade_page_pay(out_trade_no=order_number, total_amount=str(total), subject='生鲜交易', return_url='http://127.0.0.1:8000/Buyer/pay_result/', notify_url='http://127.0.0.1:8000/Buyer/pay_result/') result = ('https://openapi.alipaydev.com/gateway.do?' + order_string) return HttpResponseRedirect(result)
@loginValid def pay_result(request): '\n 支付结果页\n 如果有out_trade_no,支付成功,修改订单状态\n ' out_trade_no = request.GET.get('out_trade_no') if out_trade_no: payorder = PayOrder.objects.get(order_number=out_trade_no) payorder.orderinfo_set.all().update(order_status=1) return render(request, 'buyer/pay_result.html', locals())
-7,832,803,620,661,841,000
支付结果页 如果有out_trade_no,支付成功,修改订单状态
Qshop/Buyer/views.py
pay_result
songdanlee/DjangoWorkSpace
python
@loginValid def pay_result(request): '\n 支付结果页\n 如果有out_trade_no,支付成功,修改订单状态\n ' out_trade_no = request.GET.get('out_trade_no') if out_trade_no: payorder = PayOrder.objects.get(order_number=out_trade_no) payorder.orderinfo_set.all().update(order_status=1) return render(request, 'buyer/pay_result.html', locals())
@loginValid def add_cart(request): '\n 处理ajax 请求,添加商品到购物车 ,成功保存到数据库。\n 传入商品id,数量\n ' sendData = {'code': 200, 'data': ''} if (request.method == 'POST'): id = int(request.POST.get('goods_id')) count = int(request.POST.get('count', 1)) goods = Goods.objects.get(id=id) cart = Cart() cart.goods_name = goods.goods_name cart.goods_num = count cart.goods_price = goods.goods_price cart.goods_picture = goods.goods_picture cart.goods_total = round((goods.goods_price * count), 3) cart.goods_id = goods.id cart.cart_user = request.COOKIES.get('user_id') cart.save() sendData['data'] = '加入购物车成功' else: sendData['code'] = 500 sendData['data'] = '请求方式错误' return JsonResponse(sendData)
348,866,261,690,652,700
处理ajax 请求,添加商品到购物车 ,成功保存到数据库。 传入商品id,数量
Qshop/Buyer/views.py
add_cart
songdanlee/DjangoWorkSpace
python
@loginValid def add_cart(request): '\n 处理ajax 请求,添加商品到购物车 ,成功保存到数据库。\n 传入商品id,数量\n ' sendData = {'code': 200, 'data': } if (request.method == 'POST'): id = int(request.POST.get('goods_id')) count = int(request.POST.get('count', 1)) goods = Goods.objects.get(id=id) cart = Cart() cart.goods_name = goods.goods_name cart.goods_num = count cart.goods_price = goods.goods_price cart.goods_picture = goods.goods_picture cart.goods_total = round((goods.goods_price * count), 3) cart.goods_id = goods.id cart.cart_user = request.COOKIES.get('user_id') cart.save() sendData['data'] = '加入购物车成功' else: sendData['code'] = 500 sendData['data'] = '请求方式错误' return JsonResponse(sendData)
@databench.on def run(self): 'Run when button is pressed.' inside = 0 for draws in range(1, self.data['samples']): r1 = random.random() r2 = random.random() if (((r1 ** 2) + (r2 ** 2)) < 1.0): inside += 1 if ((draws % 1000) != 0): continue (yield self.emit('log', {'draws': draws, 'inside': inside})) p = (inside / draws) pi = {'estimate': (4.0 * p), 'uncertainty': ((4.0 * math.sqrt(((draws * p) * (1.0 - p)))) / draws)} (yield self.set_state(pi=pi)) (yield self.emit('log', {'action': 'done'}))
-4,587,078,849,969,560,000
Run when button is pressed.
databench/analyses_packaged/dummypi/analysis.py
run
phillipaug/Data-Analysis-General-repository
python
@databench.on def run(self): inside = 0 for draws in range(1, self.data['samples']): r1 = random.random() r2 = random.random() if (((r1 ** 2) + (r2 ** 2)) < 1.0): inside += 1 if ((draws % 1000) != 0): continue (yield self.emit('log', {'draws': draws, 'inside': inside})) p = (inside / draws) pi = {'estimate': (4.0 * p), 'uncertainty': ((4.0 * math.sqrt(((draws * p) * (1.0 - p)))) / draws)} (yield self.set_state(pi=pi)) (yield self.emit('log', {'action': 'done'}))
def generate_ui_test_task(dependencies, engine='Klar', device='ARM'): '\n :param str engine: Klar, Webview\n :param str device: ARM, X86\n :return: uiWebviewARMTestTaskId, uiWebviewARMTestTask\n ' if (engine is 'Klar'): engine = 'geckoview' assemble_engine = engine elif (engine is 'Webview'): engine = 'webview' assemble_engine = 'Focus' else: raise Exception('ERROR: unknown engine type --> Aborting!') task_name = '(Focus for Android) UI tests - {0} {1}'.format(engine, device) task_description = 'Run UI tests for {0} {1} build for Android.'.format(engine, device) build_dir = 'assemble{0}{1}Debug'.format(assemble_engine, device.capitalize()) build_dir_test = 'assemble{0}{1}DebugAndroidTest'.format(assemble_engine, device.capitalize()) print('BUILD_DIR: {0}'.format(build_dir)) print('BUILD_DIR_TEST: {0}'.format(build_dir_test)) device = device.lower() return (taskcluster.slugId(), generate_task(name=task_name, description=task_description, command=((((((('echo "--" > .adjust_token && ./gradlew --no-daemon clean ' + build_dir) + ' ') + build_dir_test) + ' && ./tools/taskcluster/google-firebase-testlab-login.sh && tools/taskcluster/execute-firebase-tests.sh ') + device) + ' ') + engine), dependencies=dependencies, scopes=['secrets:get:project/focus/firebase'], routes=['notify.irc-channel.#android-ci.on-any'], artifacts={'public': {'type': 'directory', 'path': '/opt/focus-android/test_artifacts', 'expires': taskcluster.stringDate(taskcluster.fromNow('1 week'))}}))
7,921,102,924,883,363,000
:param str engine: Klar, Webview :param str device: ARM, X86 :return: uiWebviewARMTestTaskId, uiWebviewARMTestTask
tools/taskcluster/schedule-master-build.py
generate_ui_test_task
kglazko/focus-android
python
def generate_ui_test_task(dependencies, engine='Klar', device='ARM'): '\n :param str engine: Klar, Webview\n :param str device: ARM, X86\n :return: uiWebviewARMTestTaskId, uiWebviewARMTestTask\n ' if (engine is 'Klar'): engine = 'geckoview' assemble_engine = engine elif (engine is 'Webview'): engine = 'webview' assemble_engine = 'Focus' else: raise Exception('ERROR: unknown engine type --> Aborting!') task_name = '(Focus for Android) UI tests - {0} {1}'.format(engine, device) task_description = 'Run UI tests for {0} {1} build for Android.'.format(engine, device) build_dir = 'assemble{0}{1}Debug'.format(assemble_engine, device.capitalize()) build_dir_test = 'assemble{0}{1}DebugAndroidTest'.format(assemble_engine, device.capitalize()) print('BUILD_DIR: {0}'.format(build_dir)) print('BUILD_DIR_TEST: {0}'.format(build_dir_test)) device = device.lower() return (taskcluster.slugId(), generate_task(name=task_name, description=task_description, command=((((((('echo "--" > .adjust_token && ./gradlew --no-daemon clean ' + build_dir) + ' ') + build_dir_test) + ' && ./tools/taskcluster/google-firebase-testlab-login.sh && tools/taskcluster/execute-firebase-tests.sh ') + device) + ' ') + engine), dependencies=dependencies, scopes=['secrets:get:project/focus/firebase'], routes=['notify.irc-channel.#android-ci.on-any'], artifacts={'public': {'type': 'directory', 'path': '/opt/focus-android/test_artifacts', 'expires': taskcluster.stringDate(taskcluster.fromNow('1 week'))}}))
def test_identity_expectation(self, device, shots, tol): 'Test that identity expectation value (i.e. the trace) is 1' theta = 0.432 phi = 0.123 dev = device(2) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])]) O = qml.Identity name = 'Identity' dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([1, 1]), **tol)
7,841,059,412,821,732,000
Test that identity expectation value (i.e. the trace) is 1
tests/test_expval.py
test_identity_expectation
wongwsvincent/pennylane-cirq
python
def test_identity_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])]) O = qml.Identity name = 'Identity' dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([1, 1]), **tol)
def test_pauliz_expectation(self, device, shots, tol): 'Test that PauliZ expectation value is correct' theta = 0.432 phi = 0.123 dev = device(2) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])]) O = qml.PauliZ name = 'PauliZ' dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([np.cos(theta), (np.cos(theta) * np.cos(phi))]), **tol)
3,565,328,443,008,282,600
Test that PauliZ expectation value is correct
tests/test_expval.py
test_pauliz_expectation
wongwsvincent/pennylane-cirq
python
def test_pauliz_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])]) O = qml.PauliZ name = 'PauliZ' dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([np.cos(theta), (np.cos(theta) * np.cos(phi))]), **tol)
def test_paulix_expectation(self, device, shots, tol): 'Test that PauliX expectation value is correct' theta = 0.432 phi = 0.123 dev = device(2) O = qml.PauliX with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([(np.sin(theta) * np.sin(phi)), np.sin(phi)]), **tol)
5,108,788,294,049,689,000
Test that PauliX expectation value is correct
tests/test_expval.py
test_paulix_expectation
wongwsvincent/pennylane-cirq
python
def test_paulix_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) O = qml.PauliX with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([(np.sin(theta) * np.sin(phi)), np.sin(phi)]), **tol)
def test_pauliy_expectation(self, device, shots, tol): 'Test that PauliY expectation value is correct' theta = 0.432 phi = 0.123 dev = device(2) O = qml.PauliY with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([0, ((- np.cos(theta)) * np.sin(phi))]), **tol)
4,112,659,539,980,843,500
Test that PauliY expectation value is correct
tests/test_expval.py
test_pauliy_expectation
wongwsvincent/pennylane-cirq
python
def test_pauliy_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) O = qml.PauliY with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) assert np.allclose(res, np.array([0, ((- np.cos(theta)) * np.sin(phi))]), **tol)
def test_hadamard_expectation(self, device, shots, tol): 'Test that Hadamard expectation value is correct' theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hadamard with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) expected = (np.array([((np.sin(theta) * np.sin(phi)) + np.cos(theta)), ((np.cos(theta) * np.cos(phi)) + np.sin(phi))]) / np.sqrt(2)) assert np.allclose(res, expected, **tol)
4,595,716,068,264,793,600
Test that Hadamard expectation value is correct
tests/test_expval.py
test_hadamard_expectation
wongwsvincent/pennylane-cirq
python
def test_hadamard_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hadamard with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(wires=[0], do_queue=False).diagonalizing_gates() + O(wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(wires=[0], do_queue=False), O(wires=[1], do_queue=False)] res = np.array([dev.expval(O(wires=[0], do_queue=False)), dev.expval(O(wires=[1], do_queue=False))]) expected = (np.array([((np.sin(theta) * np.sin(phi)) + np.cos(theta)), ((np.cos(theta) * np.cos(phi)) + np.sin(phi))]) / np.sqrt(2)) assert np.allclose(res, expected, **tol)
def test_hermitian_expectation(self, device, shots, tol): 'Test that arbitrary Hermitian expectation values are correct' theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hermitian with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(A, wires=[0], do_queue=False).diagonalizing_gates() + O(A, wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(A, wires=[0], do_queue=False), O(A, wires=[1], do_queue=False)] res = np.array([dev.expval(O(A, wires=[0], do_queue=False)), dev.expval(O(A, wires=[1], do_queue=False))]) a = A[(0, 0)] re_b = A[(0, 1)].real d = A[(1, 1)] ev1 = ((((((a - d) * np.cos(theta)) + (((2 * re_b) * np.sin(theta)) * np.sin(phi))) + a) + d) / 2) ev2 = (((((((a - d) * np.cos(theta)) * np.cos(phi)) + ((2 * re_b) * np.sin(phi))) + a) + d) / 2) expected = np.array([ev1, ev2]) assert np.allclose(res, expected, **tol)
6,151,081,162,952,617,000
Test that arbitrary Hermitian expectation values are correct
tests/test_expval.py
test_hermitian_expectation
wongwsvincent/pennylane-cirq
python
def test_hermitian_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hermitian with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=(O(A, wires=[0], do_queue=False).diagonalizing_gates() + O(A, wires=[1], do_queue=False).diagonalizing_gates())) dev._obs_queue = [O(A, wires=[0], do_queue=False), O(A, wires=[1], do_queue=False)] res = np.array([dev.expval(O(A, wires=[0], do_queue=False)), dev.expval(O(A, wires=[1], do_queue=False))]) a = A[(0, 0)] re_b = A[(0, 1)].real d = A[(1, 1)] ev1 = ((((((a - d) * np.cos(theta)) + (((2 * re_b) * np.sin(theta)) * np.sin(phi))) + a) + d) / 2) ev2 = (((((((a - d) * np.cos(theta)) * np.cos(phi)) + ((2 * re_b) * np.sin(phi))) + a) + d) / 2) expected = np.array([ev1, ev2]) assert np.allclose(res, expected, **tol)
def test_multi_mode_hermitian_expectation(self, device, shots, tol): 'Test that arbitrary multi-mode Hermitian expectation values are correct' theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hermitian with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=O(B, wires=[0, 1], do_queue=False).diagonalizing_gates()) dev._obs_queue = [O(B, wires=[0, 1], do_queue=False)] res = np.array([dev.expval(O(B, wires=[0, 1], do_queue=False))]) expected = (0.5 * ((((((6 * np.cos(theta)) * np.sin(phi)) - (np.sin(theta) * (((8 * np.sin(phi)) + (7 * np.cos(phi))) + 3))) - (2 * np.sin(phi))) - (6 * np.cos(phi))) - 6)) assert np.allclose(res, expected, **tol)
2,659,470,803,272,884,700
Test that arbitrary multi-mode Hermitian expectation values are correct
tests/test_expval.py
test_multi_mode_hermitian_expectation
wongwsvincent/pennylane-cirq
python
def test_multi_mode_hermitian_expectation(self, device, shots, tol): theta = 0.432 phi = 0.123 dev = device(2) O = qml.Hermitian with mimic_execution_for_expval(dev): dev.apply([qml.RY(theta, wires=[0]), qml.RY(phi, wires=[1]), qml.CNOT(wires=[0, 1])], rotations=O(B, wires=[0, 1], do_queue=False).diagonalizing_gates()) dev._obs_queue = [O(B, wires=[0, 1], do_queue=False)] res = np.array([dev.expval(O(B, wires=[0, 1], do_queue=False))]) expected = (0.5 * ((((((6 * np.cos(theta)) * np.sin(phi)) - (np.sin(theta) * (((8 * np.sin(phi)) + (7 * np.cos(phi))) + 3))) - (2 * np.sin(phi))) - (6 * np.cos(phi))) - 6)) assert np.allclose(res, expected, **tol)
def test_paulix_pauliy(self, device, shots, tol): 'Test that a tensor product involving PauliX and PauliY works correctly' theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) obs = (qml.PauliX(wires=[0], do_queue=False) @ qml.PauliY(wires=[2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = ((np.sin(theta) * np.sin(phi)) * np.sin(varphi)) assert np.allclose(res, expected, **tol)
3,162,494,482,914,384,000
Test that a tensor product involving PauliX and PauliY works correctly
tests/test_expval.py
test_paulix_pauliy
wongwsvincent/pennylane-cirq
python
def test_paulix_pauliy(self, device, shots, tol): theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) obs = (qml.PauliX(wires=[0], do_queue=False) @ qml.PauliY(wires=[2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = ((np.sin(theta) * np.sin(phi)) * np.sin(varphi)) assert np.allclose(res, expected, **tol)
def test_pauliz_hadamard(self, device, shots, tol): 'Test that a tensor product involving PauliZ and PauliY and hadamard works correctly' theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) obs = ((qml.PauliZ(wires=[0], do_queue=False) @ qml.Hadamard(wires=[1], do_queue=False)) @ qml.PauliY(wires=[2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = ((- ((np.cos(varphi) * np.sin(phi)) + (np.sin(varphi) * np.cos(theta)))) / np.sqrt(2)) assert np.allclose(res, expected, **tol)
-1,377,457,128,183,673,600
Test that a tensor product involving PauliZ and PauliY and hadamard works correctly
tests/test_expval.py
test_pauliz_hadamard
wongwsvincent/pennylane-cirq
python
def test_pauliz_hadamard(self, device, shots, tol): theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) obs = ((qml.PauliZ(wires=[0], do_queue=False) @ qml.Hadamard(wires=[1], do_queue=False)) @ qml.PauliY(wires=[2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = ((- ((np.cos(varphi) * np.sin(phi)) + (np.sin(varphi) * np.cos(theta)))) / np.sqrt(2)) assert np.allclose(res, expected, **tol)
def test_hermitian(self, device, shots, tol): 'Test that a tensor product involving qml.Hermitian works correctly' theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) A = np.array([[(- 6), (2 + 1j), (- 3), ((- 5) + 2j)], [(2 - 1j), 0, (2 - 1j), ((- 5) + 4j)], [(- 3), (2 + 1j), 0, ((- 4) + 3j)], [((- 5) - 2j), ((- 5) - 4j), ((- 4) - 3j), (- 6)]]) obs = (qml.PauliZ(wires=[0], do_queue=False) @ qml.Hermitian(A, wires=[1, 2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = (0.5 * ((((((- 6) * np.cos(theta)) * (np.cos(varphi) + 1)) - ((2 * np.sin(varphi)) * ((np.cos(theta) + np.sin(phi)) - (2 * np.cos(phi))))) + ((3 * np.cos(varphi)) * np.sin(phi))) + np.sin(phi))) assert np.allclose(res, expected, **tol)
2,487,172,467,202,695,000
Test that a tensor product involving qml.Hermitian works correctly
tests/test_expval.py
test_hermitian
wongwsvincent/pennylane-cirq
python
def test_hermitian(self, device, shots, tol): theta = 0.432 phi = 0.123 varphi = (- 0.543) dev = device(3) A = np.array([[(- 6), (2 + 1j), (- 3), ((- 5) + 2j)], [(2 - 1j), 0, (2 - 1j), ((- 5) + 4j)], [(- 3), (2 + 1j), 0, ((- 4) + 3j)], [((- 5) - 2j), ((- 5) - 4j), ((- 4) - 3j), (- 6)]]) obs = (qml.PauliZ(wires=[0], do_queue=False) @ qml.Hermitian(A, wires=[1, 2], do_queue=False)) with mimic_execution_for_expval(dev): dev.apply([qml.RX(theta, wires=[0]), qml.RX(phi, wires=[1]), qml.RX(varphi, wires=[2]), qml.CNOT(wires=[0, 1]), qml.CNOT(wires=[1, 2])], rotations=obs.diagonalizing_gates()) res = dev.expval(obs) expected = (0.5 * ((((((- 6) * np.cos(theta)) * (np.cos(varphi) + 1)) - ((2 * np.sin(varphi)) * ((np.cos(theta) + np.sin(phi)) - (2 * np.cos(phi))))) + ((3 * np.cos(varphi)) * np.sin(phi))) + np.sin(phi))) assert np.allclose(res, expected, **tol)
def main(sys_argv: typing.List[str]) -> None: 'PV simulator execution entry point.\n\n Parameters\n ----------\n sys_argv : list\n contains the list of arguments passed to the CLI during its execution. The first argument contains the\n executed script name.\n ' main_logger: typing.Optional[logging.Logger] = None try: must_exit_after_24h = os.getenv('MUST_EXIT_AFTER_24H', '0') must_exit_after_24h = (True if (must_exit_after_24h.isdecimal() and (int(must_exit_after_24h) == 1)) else False) main_logger = utils.initialize_loggers(current_dir_path) main_loop: MainLoop = MainLoop(constants.LOGGER_NAME, constants.RESULTS_LOGGER_NAME, current_dir_path, must_exit_after_24h, get_mq_receiver, get_pv_power_value_calculator, tests_modules_names_provider=get_test_modules_names) main_loop.handle_arguments(sys_argv) except KeyboardInterrupt: if (main_logger is not None): main_logger.exception('Required to abort:') else: import traceback traceback.print_exc() except Exception: if (main_logger is not None): main_logger.exception('Error:') else: import traceback traceback.print_exc()
795,831,813,431,906,000
PV simulator execution entry point. Parameters ---------- sys_argv : list contains the list of arguments passed to the CLI during its execution. The first argument contains the executed script name.
services/pv_simulator/main.py
main
reynierg/pv_simulator_challenge
python
def main(sys_argv: typing.List[str]) -> None: 'PV simulator execution entry point.\n\n Parameters\n ----------\n sys_argv : list\n contains the list of arguments passed to the CLI during its execution. The first argument contains the\n executed script name.\n ' main_logger: typing.Optional[logging.Logger] = None try: must_exit_after_24h = os.getenv('MUST_EXIT_AFTER_24H', '0') must_exit_after_24h = (True if (must_exit_after_24h.isdecimal() and (int(must_exit_after_24h) == 1)) else False) main_logger = utils.initialize_loggers(current_dir_path) main_loop: MainLoop = MainLoop(constants.LOGGER_NAME, constants.RESULTS_LOGGER_NAME, current_dir_path, must_exit_after_24h, get_mq_receiver, get_pv_power_value_calculator, tests_modules_names_provider=get_test_modules_names) main_loop.handle_arguments(sys_argv) except KeyboardInterrupt: if (main_logger is not None): main_logger.exception('Required to abort:') else: import traceback traceback.print_exc() except Exception: if (main_logger is not None): main_logger.exception('Error:') else: import traceback traceback.print_exc()
def render_message_template(message_template: List[dict], **kwargs): 'Renders the jinja data included in the template itself.' data = [] new_copy = copy.deepcopy(message_template) for d in new_copy: if d.get('status_mapping'): d['text'] = d['status_mapping'][kwargs['status']] if d.get('datetime'): d['datetime'] = Template(d['datetime']).render(**kwargs) d['text'] = Template(d['text']).render(**kwargs) d['title'] = Template(d['title']).render(**kwargs) if d.get('title_link'): d['title_link'] = Template(d['title_link']).render(**kwargs) if (d['title_link'] == 'None'): continue if (not d['title_link']): continue if d.get('button_text'): d['button_text'] = Template(d['button_text']).render(**kwargs) if d.get('button_value'): d['button_value'] = Template(d['button_value']).render(**kwargs) data.append(d) return data
6,656,896,164,718,373,000
Renders the jinja data included in the template itself.
src/dispatch/messaging.py
render_message_template
oliverzgy/dispatch
python
def render_message_template(message_template: List[dict], **kwargs): data = [] new_copy = copy.deepcopy(message_template) for d in new_copy: if d.get('status_mapping'): d['text'] = d['status_mapping'][kwargs['status']] if d.get('datetime'): d['datetime'] = Template(d['datetime']).render(**kwargs) d['text'] = Template(d['text']).render(**kwargs) d['title'] = Template(d['title']).render(**kwargs) if d.get('title_link'): d['title_link'] = Template(d['title_link']).render(**kwargs) if (d['title_link'] == 'None'): continue if (not d['title_link']): continue if d.get('button_text'): d['button_text'] = Template(d['button_text']).render(**kwargs) if d.get('button_value'): d['button_value'] = Template(d['button_value']).render(**kwargs) data.append(d) return data
def __init__(self, params=None): 'Initialization method.\n\n Args:\n params (dict): Contains key-value parameters to the meta-heuristics.\n\n ' logger.info('Overriding class: Optimizer -> QSA.') super(QSA, self).__init__() self.build(params) logger.info('Class overrided.')
-3,367,765,822,090,985,000
Initialization method. Args: params (dict): Contains key-value parameters to the meta-heuristics.
opytimizer/optimizers/social/qsa.py
__init__
anukaal/opytimizer
python
def __init__(self, params=None): 'Initialization method.\n\n Args:\n params (dict): Contains key-value parameters to the meta-heuristics.\n\n ' logger.info('Overriding class: Optimizer -> QSA.') super(QSA, self).__init__() self.build(params) logger.info('Class overrided.')
def _calculate_queue(self, n_agents, t_1, t_2, t_3): 'Calculates the number of agents that belongs to each queue.\n\n Args:\n n_agents (int): Number of agents.\n t_1 (float): Fitness value of first agent in the population.\n t_2 (float): Fitness value of second agent in the population.\n t_3 (float): Fitness value of third agent in the population.\n\n Returns:\n The number of agents in first, second and third queues.\n\n ' if (t_1 > c.EPSILON): n_1 = ((1 / t_1) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) n_2 = ((1 / t_2) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) n_3 = ((1 / t_3) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) else: n_1 = (1 / 3) n_2 = (1 / 3) n_3 = (1 / 3) q_1 = int((n_1 * n_agents)) q_2 = int((n_2 * n_agents)) q_3 = int((n_3 * n_agents)) return (q_1, q_2, q_3)
2,714,208,289,195,568,000
Calculates the number of agents that belongs to each queue. Args: n_agents (int): Number of agents. t_1 (float): Fitness value of first agent in the population. t_2 (float): Fitness value of second agent in the population. t_3 (float): Fitness value of third agent in the population. Returns: The number of agents in first, second and third queues.
opytimizer/optimizers/social/qsa.py
_calculate_queue
anukaal/opytimizer
python
def _calculate_queue(self, n_agents, t_1, t_2, t_3): 'Calculates the number of agents that belongs to each queue.\n\n Args:\n n_agents (int): Number of agents.\n t_1 (float): Fitness value of first agent in the population.\n t_2 (float): Fitness value of second agent in the population.\n t_3 (float): Fitness value of third agent in the population.\n\n Returns:\n The number of agents in first, second and third queues.\n\n ' if (t_1 > c.EPSILON): n_1 = ((1 / t_1) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) n_2 = ((1 / t_2) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) n_3 = ((1 / t_3) / (((1 / t_1) + (1 / t_2)) + (1 / t_3))) else: n_1 = (1 / 3) n_2 = (1 / 3) n_3 = (1 / 3) q_1 = int((n_1 * n_agents)) q_2 = int((n_2 * n_agents)) q_3 = int((n_3 * n_agents)) return (q_1, q_2, q_3)
def _business_one(self, agents, function, beta): 'Performs the first business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n beta (float): Range of fluctuation.\n\n ' agents.sort(key=(lambda x: x.fit)) (A_1, A_2, A_3) = (copy.deepcopy(agents[0]), copy.deepcopy(agents[1]), copy.deepcopy(agents[2])) (q_1, q_2, _) = self._calculate_queue(len(agents), A_1.fit, A_2.fit, A_3.fit) case = None for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) if (i < q_1): if (i == 0): case = 1 A = copy.deepcopy(A_1) elif (q_1 <= i < (q_1 + q_2)): if (i == q_1): case = 1 A = copy.deepcopy(A_2) else: if (i == (q_1 + q_2)): case = 1 A = copy.deepcopy(A_3) alpha = r.generate_uniform_random_number((- 1), 1) E = r.generate_gamma_random_number(1, 0.5, (agent.n_variables, agent.n_dimensions)) if (case == 1): e = r.generate_gamma_random_number(1, 0.5, 1) F_1 = (((beta * alpha) * (E * np.fabs((A.position - a.position)))) + (e * (A.position - a.position))) a.position = (A.position + F_1) a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit) case = 1 else: case = 2 else: F_2 = ((beta * alpha) * (E * np.fabs((A.position - a.position)))) a.position += F_2 a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit) case = 2 else: case = 1
7,335,458,090,288,138,000
Performs the first business phase. Args: agents (list): List of agents. function (Function): A Function object that will be used as the objective function. beta (float): Range of fluctuation.
opytimizer/optimizers/social/qsa.py
_business_one
anukaal/opytimizer
python
def _business_one(self, agents, function, beta): 'Performs the first business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n beta (float): Range of fluctuation.\n\n ' agents.sort(key=(lambda x: x.fit)) (A_1, A_2, A_3) = (copy.deepcopy(agents[0]), copy.deepcopy(agents[1]), copy.deepcopy(agents[2])) (q_1, q_2, _) = self._calculate_queue(len(agents), A_1.fit, A_2.fit, A_3.fit) case = None for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) if (i < q_1): if (i == 0): case = 1 A = copy.deepcopy(A_1) elif (q_1 <= i < (q_1 + q_2)): if (i == q_1): case = 1 A = copy.deepcopy(A_2) else: if (i == (q_1 + q_2)): case = 1 A = copy.deepcopy(A_3) alpha = r.generate_uniform_random_number((- 1), 1) E = r.generate_gamma_random_number(1, 0.5, (agent.n_variables, agent.n_dimensions)) if (case == 1): e = r.generate_gamma_random_number(1, 0.5, 1) F_1 = (((beta * alpha) * (E * np.fabs((A.position - a.position)))) + (e * (A.position - a.position))) a.position = (A.position + F_1) a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit) case = 1 else: case = 2 else: F_2 = ((beta * alpha) * (E * np.fabs((A.position - a.position)))) a.position += F_2 a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit) case = 2 else: case = 1
def _business_two(self, agents, function): 'Performs the second business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n\n ' agents.sort(key=(lambda x: x.fit)) (A_1, A_2, A_3) = (copy.deepcopy(agents[0]), copy.deepcopy(agents[1]), copy.deepcopy(agents[2])) (q_1, q_2, _) = self._calculate_queue(len(agents), A_1.fit, A_2.fit, A_3.fit) pr = [(i / len(agents)) for i in range(1, (len(agents) + 1))] cv = (A_1.fit / ((A_2.fit + A_3.fit) + c.EPSILON)) for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) if (i < q_1): A = copy.deepcopy(A_1) elif (q_1 <= i < (q_1 + q_2)): A = copy.deepcopy(A_2) else: A = copy.deepcopy(A_3) r1 = r.generate_uniform_random_number() if (r1 < pr[i]): (A_1, A_2) = np.random.choice(agents, 2, replace=False) r2 = r.generate_uniform_random_number() e = r.generate_gamma_random_number(1, 0.5, 1) if (r2 < cv): F_1 = (e * (A_1.position - A_2.position)) a.position += F_1 else: F_2 = (e * (A.position - A_1.position)) a.position += F_2 a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit)
8,086,955,353,914,416,000
Performs the second business phase. Args: agents (list): List of agents. function (Function): A Function object that will be used as the objective function.
opytimizer/optimizers/social/qsa.py
_business_two
anukaal/opytimizer
python
def _business_two(self, agents, function): 'Performs the second business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n\n ' agents.sort(key=(lambda x: x.fit)) (A_1, A_2, A_3) = (copy.deepcopy(agents[0]), copy.deepcopy(agents[1]), copy.deepcopy(agents[2])) (q_1, q_2, _) = self._calculate_queue(len(agents), A_1.fit, A_2.fit, A_3.fit) pr = [(i / len(agents)) for i in range(1, (len(agents) + 1))] cv = (A_1.fit / ((A_2.fit + A_3.fit) + c.EPSILON)) for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) if (i < q_1): A = copy.deepcopy(A_1) elif (q_1 <= i < (q_1 + q_2)): A = copy.deepcopy(A_2) else: A = copy.deepcopy(A_3) r1 = r.generate_uniform_random_number() if (r1 < pr[i]): (A_1, A_2) = np.random.choice(agents, 2, replace=False) r2 = r.generate_uniform_random_number() e = r.generate_gamma_random_number(1, 0.5, 1) if (r2 < cv): F_1 = (e * (A_1.position - A_2.position)) a.position += F_1 else: F_2 = (e * (A.position - A_1.position)) a.position += F_2 a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit)
def _business_three(self, agents, function): 'Performs the third business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n\n ' agents.sort(key=(lambda x: x.fit)) pr = [(i / len(agents)) for i in range(1, (len(agents) + 1))] for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) for j in range(agent.n_variables): r1 = r.generate_uniform_random_number() if (r1 < pr[i]): (A_1, A_2) = np.random.choice(agents, 2, replace=False) e = r.generate_gamma_random_number(1, 0.5, 1) a.position[j] = (A_1.position[j] + (e * (A_2.position[j] - a.position[j]))) a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit)
-4,295,532,801,114,571,300
Performs the third business phase. Args: agents (list): List of agents. function (Function): A Function object that will be used as the objective function.
opytimizer/optimizers/social/qsa.py
_business_three
anukaal/opytimizer
python
def _business_three(self, agents, function): 'Performs the third business phase.\n\n Args:\n agents (list): List of agents.\n function (Function): A Function object that will be used as the objective function.\n\n ' agents.sort(key=(lambda x: x.fit)) pr = [(i / len(agents)) for i in range(1, (len(agents) + 1))] for (i, agent) in enumerate(agents): a = copy.deepcopy(agent) for j in range(agent.n_variables): r1 = r.generate_uniform_random_number() if (r1 < pr[i]): (A_1, A_2) = np.random.choice(agents, 2, replace=False) e = r.generate_gamma_random_number(1, 0.5, 1) a.position[j] = (A_1.position[j] + (e * (A_2.position[j] - a.position[j]))) a.fit = function(a.position) if (a.fit < agent.fit): agent.position = copy.deepcopy(a.position) agent.fit = copy.deepcopy(a.fit)
def update(self, space, function, iteration, n_iterations): 'Wraps Queue Search Algorithm over all agents and variables.\n\n Args:\n space (Space): Space containing agents and update-related information.\n function (Function): A Function object that will be used as the objective function.\n iteration (int): Current iteration.\n n_iterations (int): Maximum number of iterations.\n\n ' beta = np.exp((np.log((1 / (iteration + c.EPSILON))) * np.sqrt((iteration / n_iterations)))) self._business_one(space.agents, function, beta) self._business_two(space.agents, function) self._business_three(space.agents, function)
8,129,542,189,646,762,000
Wraps Queue Search Algorithm over all agents and variables. Args: space (Space): Space containing agents and update-related information. function (Function): A Function object that will be used as the objective function. iteration (int): Current iteration. n_iterations (int): Maximum number of iterations.
opytimizer/optimizers/social/qsa.py
update
anukaal/opytimizer
python
def update(self, space, function, iteration, n_iterations): 'Wraps Queue Search Algorithm over all agents and variables.\n\n Args:\n space (Space): Space containing agents and update-related information.\n function (Function): A Function object that will be used as the objective function.\n iteration (int): Current iteration.\n n_iterations (int): Maximum number of iterations.\n\n ' beta = np.exp((np.log((1 / (iteration + c.EPSILON))) * np.sqrt((iteration / n_iterations)))) self._business_one(space.agents, function, beta) self._business_two(space.agents, function) self._business_three(space.agents, function)
def __init__(self, **kwargs): '\n Initializes a new WorkRequestLogEntryCollection object with values from keyword arguments.\n The following keyword arguments are supported (corresponding to the getters/setters of this class):\n\n :param items:\n The value to assign to the items property of this WorkRequestLogEntryCollection.\n :type items: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n\n ' self.swagger_types = {'items': 'list[WorkRequestLogEntry]'} self.attribute_map = {'items': 'items'} self._items = None
326,346,107,807,940,160
Initializes a new WorkRequestLogEntryCollection object with values from keyword arguments. The following keyword arguments are supported (corresponding to the getters/setters of this class): :param items: The value to assign to the items property of this WorkRequestLogEntryCollection. :type items: list[oci.network_load_balancer.models.WorkRequestLogEntry]
src/oci/network_load_balancer/models/work_request_log_entry_collection.py
__init__
LaudateCorpus1/oci-python-sdk
python
def __init__(self, **kwargs): '\n Initializes a new WorkRequestLogEntryCollection object with values from keyword arguments.\n The following keyword arguments are supported (corresponding to the getters/setters of this class):\n\n :param items:\n The value to assign to the items property of this WorkRequestLogEntryCollection.\n :type items: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n\n ' self.swagger_types = {'items': 'list[WorkRequestLogEntry]'} self.attribute_map = {'items': 'items'} self._items = None
@property def items(self): '\n Gets the items of this WorkRequestLogEntryCollection.\n An array of WorkRequestLogEntry objects.\n\n\n :return: The items of this WorkRequestLogEntryCollection.\n :rtype: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n ' return self._items
6,000,403,713,483,571,000
Gets the items of this WorkRequestLogEntryCollection. An array of WorkRequestLogEntry objects. :return: The items of this WorkRequestLogEntryCollection. :rtype: list[oci.network_load_balancer.models.WorkRequestLogEntry]
src/oci/network_load_balancer/models/work_request_log_entry_collection.py
items
LaudateCorpus1/oci-python-sdk
python
@property def items(self): '\n Gets the items of this WorkRequestLogEntryCollection.\n An array of WorkRequestLogEntry objects.\n\n\n :return: The items of this WorkRequestLogEntryCollection.\n :rtype: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n ' return self._items
@items.setter def items(self, items): '\n Sets the items of this WorkRequestLogEntryCollection.\n An array of WorkRequestLogEntry objects.\n\n\n :param items: The items of this WorkRequestLogEntryCollection.\n :type: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n ' self._items = items
-1,871,939,153,352,299,300
Sets the items of this WorkRequestLogEntryCollection. An array of WorkRequestLogEntry objects. :param items: The items of this WorkRequestLogEntryCollection. :type: list[oci.network_load_balancer.models.WorkRequestLogEntry]
src/oci/network_load_balancer/models/work_request_log_entry_collection.py
items
LaudateCorpus1/oci-python-sdk
python
@items.setter def items(self, items): '\n Sets the items of this WorkRequestLogEntryCollection.\n An array of WorkRequestLogEntry objects.\n\n\n :param items: The items of this WorkRequestLogEntryCollection.\n :type: list[oci.network_load_balancer.models.WorkRequestLogEntry]\n ' self._items = items
def twoSum(self, nums, target): '\n :type nums: List[int]\n :type target: int\n :rtype: List[int]\n ' lookup = dict(((v, i) for (i, v) in enumerate(nums))) return next((((i + 1), (lookup.get((target - v)) + 1)) for (i, v) in enumerate(nums) if (lookup.get((target - v), i) != i)), None)
-5,897,341,292,383,729,000
:type nums: List[int] :type target: int :rtype: List[int]
array/twosum.py
twoSum
mengyangbai/leetcode
python
def twoSum(self, nums, target): '\n :type nums: List[int]\n :type target: int\n :rtype: List[int]\n ' lookup = dict(((v, i) for (i, v) in enumerate(nums))) return next((((i + 1), (lookup.get((target - v)) + 1)) for (i, v) in enumerate(nums) if (lookup.get((target - v), i) != i)), None)
def _parse_actions(actions): ' Actions come in as a combined list. This method separates the webhook actions into a\n separate collection and combines any number of email actions into a single email collection\n and a single value for `email_service_owners`. If any email action contains a True value\n for `send_to_service_owners` then it is assumed the entire value should be True. ' from azure.mgmt.monitor.models import RuleEmailAction, RuleWebhookAction actions = (actions or []) email_service_owners = None webhooks = [x for x in actions if isinstance(x, RuleWebhookAction)] custom_emails = set() for action in actions: if isinstance(action, RuleEmailAction): if action.send_to_service_owners: email_service_owners = True custom_emails = (custom_emails | set(action.custom_emails)) return (list(custom_emails), webhooks, email_service_owners)
3,501,492,025,227,609,600
Actions come in as a combined list. This method separates the webhook actions into a separate collection and combines any number of email actions into a single email collection and a single value for `email_service_owners`. If any email action contains a True value for `send_to_service_owners` then it is assumed the entire value should be True.
src/azure-cli/azure/cli/command_modules/monitor/operations/metric_alert.py
_parse_actions
21m57/azure-cli
python
def _parse_actions(actions): ' Actions come in as a combined list. This method separates the webhook actions into a\n separate collection and combines any number of email actions into a single email collection\n and a single value for `email_service_owners`. If any email action contains a True value\n for `send_to_service_owners` then it is assumed the entire value should be True. ' from azure.mgmt.monitor.models import RuleEmailAction, RuleWebhookAction actions = (actions or []) email_service_owners = None webhooks = [x for x in actions if isinstance(x, RuleWebhookAction)] custom_emails = set() for action in actions: if isinstance(action, RuleEmailAction): if action.send_to_service_owners: email_service_owners = True custom_emails = (custom_emails | set(action.custom_emails)) return (list(custom_emails), webhooks, email_service_owners)
def _parse_action_removals(actions): ' Separates the combined list of keys to remove into webhooks and emails. ' flattened = list({x for sublist in actions for x in sublist}) emails = [] webhooks = [] for item in flattened: if (item.startswith('http://') or item.startswith('https://')): webhooks.append(item) else: emails.append(item) return (emails, webhooks)
-5,889,282,027,629,094,000
Separates the combined list of keys to remove into webhooks and emails.
src/azure-cli/azure/cli/command_modules/monitor/operations/metric_alert.py
_parse_action_removals
21m57/azure-cli
python
def _parse_action_removals(actions): ' ' flattened = list({x for sublist in actions for x in sublist}) emails = [] webhooks = [] for item in flattened: if (item.startswith('http://') or item.startswith('https://')): webhooks.append(item) else: emails.append(item) return (emails, webhooks)
async def async_setup_entry(opp: OpenPeerPower, config_entry: ConfigEntry, async_add_entities) -> None: 'Set up discovered sensors.' devs = [] for dev in opp.data[AQUALINK_DOMAIN][DOMAIN]: devs.append(OppAqualinkSensor(dev)) async_add_entities(devs, True)
6,281,085,809,589,287,000
Set up discovered sensors.
openpeerpower/components/iaqualink/sensor.py
async_setup_entry
OpenPeerPower/core
python
async def async_setup_entry(opp: OpenPeerPower, config_entry: ConfigEntry, async_add_entities) -> None: devs = [] for dev in opp.data[AQUALINK_DOMAIN][DOMAIN]: devs.append(OppAqualinkSensor(dev)) async_add_entities(devs, True)
@property def name(self) -> str: 'Return the name of the sensor.' return self.dev.label
-2,087,878,669,653,740,500
Return the name of the sensor.
openpeerpower/components/iaqualink/sensor.py
name
OpenPeerPower/core
python
@property def name(self) -> str: return self.dev.label
@property def unit_of_measurement(self) -> (str | None): 'Return the measurement unit for the sensor.' if self.dev.name.endswith('_temp'): if (self.dev.system.temp_unit == 'F'): return TEMP_FAHRENHEIT return TEMP_CELSIUS return None
4,229,724,856,024,876,000
Return the measurement unit for the sensor.
openpeerpower/components/iaqualink/sensor.py
unit_of_measurement
OpenPeerPower/core
python
@property def unit_of_measurement(self) -> (str | None): if self.dev.name.endswith('_temp'): if (self.dev.system.temp_unit == 'F'): return TEMP_FAHRENHEIT return TEMP_CELSIUS return None
@property def state(self) -> (str | None): 'Return the state of the sensor.' if (self.dev.state == ''): return None try: state = int(self.dev.state) except ValueError: state = float(self.dev.state) return state
9,126,054,177,930,050,000
Return the state of the sensor.
openpeerpower/components/iaqualink/sensor.py
state
OpenPeerPower/core
python
@property def state(self) -> (str | None): if (self.dev.state == ): return None try: state = int(self.dev.state) except ValueError: state = float(self.dev.state) return state
@property def device_class(self) -> (str | None): 'Return the class of the sensor.' if self.dev.name.endswith('_temp'): return DEVICE_CLASS_TEMPERATURE return None
-1,855,583,597,421,016,000
Return the class of the sensor.
openpeerpower/components/iaqualink/sensor.py
device_class
OpenPeerPower/core
python
@property def device_class(self) -> (str | None): if self.dev.name.endswith('_temp'): return DEVICE_CLASS_TEMPERATURE return None
def scanI2c(ip): '\n scans devices on i2c bus\n :return: list of hex string addresses present on i2c bus\n ' try: req_url = (('http://' + ip) + '/i2c/scan') resp = requests.get(url=req_url) return resp.content.decode('utf-8') except ValueError: print('i2c failed scan')
-6,240,809,939,133,776,000
scans devices on i2c bus :return: list of hex string addresses present on i2c bus
python/papaya_i2chttpinst.py
scanI2c
papaya-iot/papaya-examples
python
def scanI2c(ip): '\n scans devices on i2c bus\n :return: list of hex string addresses present on i2c bus\n ' try: req_url = (('http://' + ip) + '/i2c/scan') resp = requests.get(url=req_url) return resp.content.decode('utf-8') except ValueError: print('i2c failed scan')
def read(self, reg_addr, len_read): '\n read len_read bytes starting from register reg_addr\n :param reg_addr: (str) register address to read in hex\n :param len_read: (int) number of bytes to read\n :return: bytestring of data\n ' assert (len_read < 256), 'num of bytes to read cannot exceed 255' hex_reg_addr = enforce_hex(reg_addr) try: req_url = ('%sread/%s/%s/%d' % (self.url, self.dev_addr, hex_reg_addr, len_read)) resp = requests.get(url=req_url) return binascii.a2b_hex(resp.content) except ValueError: print('i2c failed read')
-222,386,349,693,098,020
read len_read bytes starting from register reg_addr :param reg_addr: (str) register address to read in hex :param len_read: (int) number of bytes to read :return: bytestring of data
python/papaya_i2chttpinst.py
read
papaya-iot/papaya-examples
python
def read(self, reg_addr, len_read): '\n read len_read bytes starting from register reg_addr\n :param reg_addr: (str) register address to read in hex\n :param len_read: (int) number of bytes to read\n :return: bytestring of data\n ' assert (len_read < 256), 'num of bytes to read cannot exceed 255' hex_reg_addr = enforce_hex(reg_addr) try: req_url = ('%sread/%s/%s/%d' % (self.url, self.dev_addr, hex_reg_addr, len_read)) resp = requests.get(url=req_url) return binascii.a2b_hex(resp.content) except ValueError: print('i2c failed read')
def write(self, reg_addr, data, len_data=0): "\n :param reg_addr: (str) register address to write to in hex\n :param data: (str or bytes) hex-encoded bytes, ie: '014ce8'\n :param len_data: (optional int) dummy variable to support code portability\n :return: None\n " hex_reg_addr = enforce_hex(reg_addr) if (type(data) == bytes): data = getencoder('hex')(data)[0].decode('ascii') try: req_url = ('%swrite/%s/%s/%s' % (self.url, self.dev_addr, hex_reg_addr, data)) requests.get(url=req_url) except ValueError: print(('i2c device 0x%s failed write' % self.dev_addr))
-2,559,223,647,912,131,600
:param reg_addr: (str) register address to write to in hex :param data: (str or bytes) hex-encoded bytes, ie: '014ce8' :param len_data: (optional int) dummy variable to support code portability :return: None
python/papaya_i2chttpinst.py
write
papaya-iot/papaya-examples
python
def write(self, reg_addr, data, len_data=0): "\n :param reg_addr: (str) register address to write to in hex\n :param data: (str or bytes) hex-encoded bytes, ie: '014ce8'\n :param len_data: (optional int) dummy variable to support code portability\n :return: None\n " hex_reg_addr = enforce_hex(reg_addr) if (type(data) == bytes): data = getencoder('hex')(data)[0].decode('ascii') try: req_url = ('%swrite/%s/%s/%s' % (self.url, self.dev_addr, hex_reg_addr, data)) requests.get(url=req_url) except ValueError: print(('i2c device 0x%s failed write' % self.dev_addr))
@abc.abstractmethod def __iter__(self) -> Iterator[Class]: 'Create an iterator for the class map values.'
2,161,965,101,768,955,100
Create an iterator for the class map values.
xsdata/codegen/mixins.py
__iter__
amal-khailtash/xsdata
python
@abc.abstractmethod def __iter__(self) -> Iterator[Class]:
@abc.abstractmethod def find(self, qname: str, condition: Callable=return_true) -> Optional[Class]: 'Search by qualified name for a specific class with an optional\n condition callable.'
-7,627,424,956,996,297,000
Search by qualified name for a specific class with an optional condition callable.
xsdata/codegen/mixins.py
find
amal-khailtash/xsdata
python
@abc.abstractmethod def find(self, qname: str, condition: Callable=return_true) -> Optional[Class]: 'Search by qualified name for a specific class with an optional\n condition callable.'
@abc.abstractmethod def find_inner(self, source: Class, qname: str) -> Class: 'Search by qualified name for a specific inner class or fail.'
-3,995,696,163,048,785,000
Search by qualified name for a specific inner class or fail.
xsdata/codegen/mixins.py
find_inner
amal-khailtash/xsdata
python
@abc.abstractmethod def find_inner(self, source: Class, qname: str) -> Class:
@abc.abstractmethod def add(self, item: Class): 'Add class item to the container.'
1,259,824,434,139,553,300
Add class item to the container.
xsdata/codegen/mixins.py
add
amal-khailtash/xsdata
python
@abc.abstractmethod def add(self, item: Class):
@abc.abstractmethod def extend(self, items: List[Class]): 'Add a list of classes the container.'
5,647,266,450,418,243,000
Add a list of classes the container.
xsdata/codegen/mixins.py
extend
amal-khailtash/xsdata
python
@abc.abstractmethod def extend(self, items: List[Class]):
@abc.abstractmethod def reset(self, item: Class, qname: str): 'Update the given class qualified name.'
-8,610,711,932,646,473,000
Update the given class qualified name.
xsdata/codegen/mixins.py
reset
amal-khailtash/xsdata
python
@abc.abstractmethod def reset(self, item: Class, qname: str):
@abc.abstractmethod def process(self, target: Class): 'Process the given target class.'
-781,199,706,564,020,600
Process the given target class.
xsdata/codegen/mixins.py
process
amal-khailtash/xsdata
python
@abc.abstractmethod def process(self, target: Class):
@abc.abstractmethod def run(self): 'Run the process for the whole container.'
8,029,094,563,809,355,000
Run the process for the whole container.
xsdata/codegen/mixins.py
run
amal-khailtash/xsdata
python
@abc.abstractmethod def run(self):
def __init__(self, db): '\n :type db: datacube.index.postgres._api.PostgresDb\n ' self._db = db
8,569,476,321,902,646,000
:type db: datacube.index.postgres._api.PostgresDb
datacube/index/_datasets.py
__init__
cronosnull/agdc-v2
python
def __init__(self, db): '\n \n ' self._db = db
def add(self, definition, allow_table_lock=False): "\n :type definition: dict\n :param allow_table_lock:\n Allow an exclusive lock to be taken on the table while creating the indexes.\n This will halt other user's requests until completed.\n\n If false, creation will be slightly slower and cannot be done in a transaction.\n :rtype: datacube.model.MetadataType\n " MetadataType.validate(definition) name = definition['name'] existing = self._db.get_metadata_type_by_name(name) if existing: check_doc_unchanged(existing.definition, definition, 'Metadata Type {}'.format(name)) else: self._db.add_metadata_type(name=name, definition=definition, concurrently=(not allow_table_lock)) return self.get_by_name(name)
-8,154,873,849,997,148,000
:type definition: dict :param allow_table_lock: Allow an exclusive lock to be taken on the table while creating the indexes. This will halt other user's requests until completed. If false, creation will be slightly slower and cannot be done in a transaction. :rtype: datacube.model.MetadataType
datacube/index/_datasets.py
add
cronosnull/agdc-v2
python
def add(self, definition, allow_table_lock=False): "\n :type definition: dict\n :param allow_table_lock:\n Allow an exclusive lock to be taken on the table while creating the indexes.\n This will halt other user's requests until completed.\n\n If false, creation will be slightly slower and cannot be done in a transaction.\n :rtype: datacube.model.MetadataType\n " MetadataType.validate(definition) name = definition['name'] existing = self._db.get_metadata_type_by_name(name) if existing: check_doc_unchanged(existing.definition, definition, 'Metadata Type {}'.format(name)) else: self._db.add_metadata_type(name=name, definition=definition, concurrently=(not allow_table_lock)) return self.get_by_name(name)
@lru_cache() def get(self, id_): '\n :rtype: datacube.model.MetadataType\n ' return self._make(self._db.get_metadata_type(id_))
-2,829,958,382,661,361,700
:rtype: datacube.model.MetadataType
datacube/index/_datasets.py
get
cronosnull/agdc-v2
python
@lru_cache() def get(self, id_): '\n \n ' return self._make(self._db.get_metadata_type(id_))
@lru_cache() def get_by_name(self, name): '\n :rtype: datacube.model.MetadataType\n ' record = self._db.get_metadata_type_by_name(name) if (not record): return None return self._make(record)
966,241,435,974,297,600
:rtype: datacube.model.MetadataType
datacube/index/_datasets.py
get_by_name
cronosnull/agdc-v2
python
@lru_cache() def get_by_name(self, name): '\n \n ' record = self._db.get_metadata_type_by_name(name) if (not record): return None return self._make(record)
def check_field_indexes(self, allow_table_lock=False, rebuild_all=False): "\n Create or replace per-field indexes and views.\n :param allow_table_lock:\n Allow an exclusive lock to be taken on the table while creating the indexes.\n This will halt other user's requests until completed.\n\n If false, creation will be slightly slower and cannot be done in a transaction.\n " self._db.check_dynamic_fields(concurrently=(not allow_table_lock), rebuild_all=rebuild_all)
3,272,245,721,149,252,000
Create or replace per-field indexes and views. :param allow_table_lock: Allow an exclusive lock to be taken on the table while creating the indexes. This will halt other user's requests until completed. If false, creation will be slightly slower and cannot be done in a transaction.
datacube/index/_datasets.py
check_field_indexes
cronosnull/agdc-v2
python
def check_field_indexes(self, allow_table_lock=False, rebuild_all=False): "\n Create or replace per-field indexes and views.\n :param allow_table_lock:\n Allow an exclusive lock to be taken on the table while creating the indexes.\n This will halt other user's requests until completed.\n\n If false, creation will be slightly slower and cannot be done in a transaction.\n " self._db.check_dynamic_fields(concurrently=(not allow_table_lock), rebuild_all=rebuild_all)
def _make(self, query_row): '\n :rtype list[datacube.model.MetadataType]\n ' definition = query_row['definition'] dataset_ = definition['dataset'] return MetadataType(query_row['name'], dataset_, dataset_search_fields=self._db.get_dataset_fields(query_row), id_=query_row['id'])
895,167,996,513,292,200
:rtype list[datacube.model.MetadataType]
datacube/index/_datasets.py
_make
cronosnull/agdc-v2
python
def _make(self, query_row): '\n \n ' definition = query_row['definition'] dataset_ = definition['dataset'] return MetadataType(query_row['name'], dataset_, dataset_search_fields=self._db.get_dataset_fields(query_row), id_=query_row['id'])
def __init__(self, db, metadata_type_resource): '\n :type db: datacube.index.postgres._api.PostgresDb\n :type metadata_type_resource: MetadataTypeResource\n ' self._db = db self.metadata_type_resource = metadata_type_resource
-6,762,795,850,044,275,000
:type db: datacube.index.postgres._api.PostgresDb :type metadata_type_resource: MetadataTypeResource
datacube/index/_datasets.py
__init__
cronosnull/agdc-v2
python
def __init__(self, db, metadata_type_resource): '\n :type db: datacube.index.postgres._api.PostgresDb\n :type metadata_type_resource: MetadataTypeResource\n ' self._db = db self.metadata_type_resource = metadata_type_resource
def from_doc(self, definition): '\n Create a Product from its definitions\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(definition) metadata_type = definition['metadata_type'] if isinstance(metadata_type, compat.string_types): metadata_type = self.metadata_type_resource.get_by_name(metadata_type) else: metadata_type = self.metadata_type_resource.add(metadata_type, allow_table_lock=False) if (not metadata_type): raise InvalidDocException(('Unknown metadata type: %r' % definition['metadata_type'])) return DatasetType(metadata_type, definition)
-8,327,443,222,895,981,000
Create a Product from its definitions :param dict definition: product definition document :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
from_doc
cronosnull/agdc-v2
python
def from_doc(self, definition): '\n Create a Product from its definitions\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(definition) metadata_type = definition['metadata_type'] if isinstance(metadata_type, compat.string_types): metadata_type = self.metadata_type_resource.get_by_name(metadata_type) else: metadata_type = self.metadata_type_resource.add(metadata_type, allow_table_lock=False) if (not metadata_type): raise InvalidDocException(('Unknown metadata type: %r' % definition['metadata_type'])) return DatasetType(metadata_type, definition)
def add(self, type_): '\n Add a Product\n\n :param datacube.model.DatasetType type_: Product to add\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(type_.definition) existing = self._db.get_dataset_type_by_name(type_.name) if existing: check_doc_unchanged(existing.definition, jsonify_document(type_.definition), 'Dataset type {}'.format(type_.name)) else: self._db.add_dataset_type(name=type_.name, metadata=type_.metadata_doc, metadata_type_id=type_.metadata_type.id, definition=type_.definition) return self.get_by_name(type_.name)
3,908,486,326,552,046,600
Add a Product :param datacube.model.DatasetType type_: Product to add :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
add
cronosnull/agdc-v2
python
def add(self, type_): '\n Add a Product\n\n :param datacube.model.DatasetType type_: Product to add\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(type_.definition) existing = self._db.get_dataset_type_by_name(type_.name) if existing: check_doc_unchanged(existing.definition, jsonify_document(type_.definition), 'Dataset type {}'.format(type_.name)) else: self._db.add_dataset_type(name=type_.name, metadata=type_.metadata_doc, metadata_type_id=type_.metadata_type.id, definition=type_.definition) return self.get_by_name(type_.name)
def update(self, type_, allow_unsafe_updates=False): '\n Update a product. Unsafe changes will throw a ValueError by default.\n\n (An unsafe change is anything that may potentially make the product\n incompatible with existing datasets of that type)\n\n :param datacube.model.DatasetType type_: Product to add\n :param allow_unsafe_updates bool: Allow unsafe changes. Use with caution.\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(type_.definition) existing = self._db.get_dataset_type_by_name(type_.name) if (not existing): raise ValueError(('Unknown product %s, cannot update – did you intend to add it?' % type_.name)) def handle_unsafe(msg): if (not allow_unsafe_updates): raise ValueError(msg) else: _LOG.warning('Ignoring %s', msg) safe_keys_to_change = ('description', 'metadata') doc_changes = get_doc_changes(existing.definition, jsonify_document(type_.definition)) for (offset, old_value, new_value) in doc_changes: _LOG.info('Changing %s %s: %r -> %r', type_.name, '.'.join(offset), old_value, new_value) key_name = offset[0] if (key_name not in safe_keys_to_change): handle_unsafe(('Potentially unsafe update: changing %r of product definition.' % key_name)) if (key_name == 'metadata'): if (not contains(old_value, new_value, case_sensitive=True)): handle_unsafe('Unsafe update: new product match rules are not a superset of old ones.') if doc_changes: _LOG.info('Updating product %s', type_.name) self._db.update_dataset_type(name=type_.name, metadata=type_.metadata_doc, metadata_type_id=type_.metadata_type.id, definition=type_.definition) self.get_by_name.cache_clear() self.get.cache_clear() else: _LOG.info('No changes detected for product %s', type_.name)
-881,313,504,314,386,600
Update a product. Unsafe changes will throw a ValueError by default. (An unsafe change is anything that may potentially make the product incompatible with existing datasets of that type) :param datacube.model.DatasetType type_: Product to add :param allow_unsafe_updates bool: Allow unsafe changes. Use with caution. :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
update
cronosnull/agdc-v2
python
def update(self, type_, allow_unsafe_updates=False): '\n Update a product. Unsafe changes will throw a ValueError by default.\n\n (An unsafe change is anything that may potentially make the product\n incompatible with existing datasets of that type)\n\n :param datacube.model.DatasetType type_: Product to add\n :param allow_unsafe_updates bool: Allow unsafe changes. Use with caution.\n :rtype: datacube.model.DatasetType\n ' DatasetType.validate(type_.definition) existing = self._db.get_dataset_type_by_name(type_.name) if (not existing): raise ValueError(('Unknown product %s, cannot update – did you intend to add it?' % type_.name)) def handle_unsafe(msg): if (not allow_unsafe_updates): raise ValueError(msg) else: _LOG.warning('Ignoring %s', msg) safe_keys_to_change = ('description', 'metadata') doc_changes = get_doc_changes(existing.definition, jsonify_document(type_.definition)) for (offset, old_value, new_value) in doc_changes: _LOG.info('Changing %s %s: %r -> %r', type_.name, '.'.join(offset), old_value, new_value) key_name = offset[0] if (key_name not in safe_keys_to_change): handle_unsafe(('Potentially unsafe update: changing %r of product definition.' % key_name)) if (key_name == 'metadata'): if (not contains(old_value, new_value, case_sensitive=True)): handle_unsafe('Unsafe update: new product match rules are not a superset of old ones.') if doc_changes: _LOG.info('Updating product %s', type_.name) self._db.update_dataset_type(name=type_.name, metadata=type_.metadata_doc, metadata_type_id=type_.metadata_type.id, definition=type_.definition) self.get_by_name.cache_clear() self.get.cache_clear() else: _LOG.info('No changes detected for product %s', type_.name)
def update_document(self, definition, allow_unsafe_update=False): '\n Update a Product using its difinition\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' type_ = self.from_doc(definition) return self.update(type_, allow_unsafe_updates=allow_unsafe_update)
5,400,559,459,547,803,000
Update a Product using its difinition :param dict definition: product definition document :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
update_document
cronosnull/agdc-v2
python
def update_document(self, definition, allow_unsafe_update=False): '\n Update a Product using its difinition\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' type_ = self.from_doc(definition) return self.update(type_, allow_unsafe_updates=allow_unsafe_update)
def add_document(self, definition): '\n Add a Product using its difinition\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' type_ = self.from_doc(definition) return self.add(type_)
3,831,264,721,657,754,000
Add a Product using its difinition :param dict definition: product definition document :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
add_document
cronosnull/agdc-v2
python
def add_document(self, definition): '\n Add a Product using its difinition\n\n :param dict definition: product definition document\n :rtype: datacube.model.DatasetType\n ' type_ = self.from_doc(definition) return self.add(type_)
@lru_cache() def get(self, id_): '\n Retrieve Product by id\n\n :param int id_: id of the Product\n :rtype: datacube.model.DatasetType\n ' return self._make(self._db.get_dataset_type(id_))
4,528,030,748,589,950,000
Retrieve Product by id :param int id_: id of the Product :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
get
cronosnull/agdc-v2
python
@lru_cache() def get(self, id_): '\n Retrieve Product by id\n\n :param int id_: id of the Product\n :rtype: datacube.model.DatasetType\n ' return self._make(self._db.get_dataset_type(id_))
@lru_cache() def get_by_name(self, name): '\n Retrieve Product by name\n\n :param str name: name of the Product\n :rtype: datacube.model.DatasetType\n ' result = self._db.get_dataset_type_by_name(name) if (not result): return None return self._make(result)
7,114,874,661,913,099,000
Retrieve Product by name :param str name: name of the Product :rtype: datacube.model.DatasetType
datacube/index/_datasets.py
get_by_name
cronosnull/agdc-v2
python
@lru_cache() def get_by_name(self, name): '\n Retrieve Product by name\n\n :param str name: name of the Product\n :rtype: datacube.model.DatasetType\n ' result = self._db.get_dataset_type_by_name(name) if (not result): return None return self._make(result)
def get_with_fields(self, field_names): '\n Return dataset types that have all the given fields.\n\n :param tuple[str] field_names:\n :rtype: __generator[DatasetType]\n ' for type_ in self.get_all(): for name in field_names: if (name not in type_.metadata_type.dataset_fields): break else: (yield type_)
8,910,857,798,950,001,000
Return dataset types that have all the given fields. :param tuple[str] field_names: :rtype: __generator[DatasetType]
datacube/index/_datasets.py
get_with_fields
cronosnull/agdc-v2
python
def get_with_fields(self, field_names): '\n Return dataset types that have all the given fields.\n\n :param tuple[str] field_names:\n :rtype: __generator[DatasetType]\n ' for type_ in self.get_all(): for name in field_names: if (name not in type_.metadata_type.dataset_fields): break else: (yield type_)
def search(self, **query): '\n Return dataset types that have all the given fields.\n\n :param dict query:\n :rtype: __generator[DatasetType]\n ' for (type_, q) in self.search_robust(**query): if (not q): (yield type_)
4,333,933,674,560,769,500
Return dataset types that have all the given fields. :param dict query: :rtype: __generator[DatasetType]
datacube/index/_datasets.py
search
cronosnull/agdc-v2
python
def search(self, **query): '\n Return dataset types that have all the given fields.\n\n :param dict query:\n :rtype: __generator[DatasetType]\n ' for (type_, q) in self.search_robust(**query): if (not q): (yield type_)
def search_robust(self, **query): '\n Return dataset types that match match-able fields and dict of remaining un-matchable fields.\n\n :param dict query:\n :rtype: __generator[(DatasetType, dict)]\n ' for type_ in self.get_all(): q = query.copy() if (q.pop('product', type_.name) != type_.name): continue if (q.pop('metadata_type', type_.metadata_type.name) != type_.metadata_type.name): continue for (key, value) in list(q.items()): try: exprs = fields.to_expressions(type_.metadata_type.dataset_fields.get, **{key: value}) except UnknownFieldError as e: break try: if all((expr.evaluate(type_.metadata_doc) for expr in exprs)): q.pop(key) else: break except (AttributeError, KeyError, ValueError) as e: continue else: (yield (type_, q))
-9,188,898,544,133,689,000
Return dataset types that match match-able fields and dict of remaining un-matchable fields. :param dict query: :rtype: __generator[(DatasetType, dict)]
datacube/index/_datasets.py
search_robust
cronosnull/agdc-v2
python
def search_robust(self, **query): '\n Return dataset types that match match-able fields and dict of remaining un-matchable fields.\n\n :param dict query:\n :rtype: __generator[(DatasetType, dict)]\n ' for type_ in self.get_all(): q = query.copy() if (q.pop('product', type_.name) != type_.name): continue if (q.pop('metadata_type', type_.metadata_type.name) != type_.metadata_type.name): continue for (key, value) in list(q.items()): try: exprs = fields.to_expressions(type_.metadata_type.dataset_fields.get, **{key: value}) except UnknownFieldError as e: break try: if all((expr.evaluate(type_.metadata_doc) for expr in exprs)): q.pop(key) else: break except (AttributeError, KeyError, ValueError) as e: continue else: (yield (type_, q))