index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
727,425 | d8s_lists.iterables | iterable_has_single_item | Return whether the iterable has a single item in it. | def iterable_has_single_item(iterable: Iterable[Any]) -> bool:
"""Return whether the iterable has a single item in it."""
iterable = deduplicate(iterable)
result = len(tuple(iterable)) == 1
return result
| (iterable: Iterable[Any]) -> bool |
727,426 | d8s_lists.iterables | iterable_has_single_type | Return whether or not the iterable has items of only one type. | def iterable_has_single_type(iterable: Iterable[Any]) -> bool:
"""Return whether or not the iterable has items of only one type."""
return len(tuple(deduplicate(types(iterable)))) == 1
| (iterable: Iterable[Any]) -> bool |
727,427 | d8s_lists.iterables | iterable_item_index | Find the given item in the iterable. Return -1 if the item is not found. | def iterable_item_index(iterable: Sequence, item: Any) -> int:
"""Find the given item in the iterable. Return -1 if the item is not found."""
try:
return iterable.index(item)
except ValueError:
return -1
| (iterable: Sequence, item: Any) -> int |
727,428 | d8s_lists.iterables | iterable_item_indexes | Find the given item in the iterable. Return -1 if the item is not found. | def iterable_item_indexes(iterable: Iterable[Any], item: Any) -> Iterator[int]:
"""Find the given item in the iterable. Return -1 if the item is not found."""
indexes = (index for index, value in enumerate(iterable) if value == item)
return indexes
| (iterable: Iterable[Any], item: Any) -> Iterator[int] |
727,429 | d8s_lists.iterables | iterable_item_of_types | Return True if the iterable has any item that is of the item_types. Otherwise, return False. | def iterable_item_of_types(iterable: Iterable[Any], item_types: Iterable[type]) -> bool:
"""Return True if the iterable has any item that is of the item_types. Otherwise, return False."""
for i in iterable:
if type(i) in item_types:
return True
return False
| (iterable: Iterable[Any], item_types: Iterable[type]) -> bool |
727,430 | d8s_lists.iterables | iterable_replace | Replace all instances of the old_value with the new_value in the given iterable. | def iterable_replace(iterable: Iterable[Any], old_value: Any, new_value: Any) -> Iterator[Any]:
"""Replace all instances of the old_value with the new_value in the given iterable."""
for value in iterable:
if value == old_value:
yield new_value
else:
yield value
| (iterable: Iterable[Any], old_value: Any, new_value: Any) -> Iterator[Any] |
727,431 | d8s_lists.iterables | iterable_sort_by_length | . | def iterable_sort_by_length(iterable: Iterable[Any], **kwargs) -> List[Any]:
"""."""
sorted_list = sorted(iterable, key=lambda x: len(x), **kwargs) # pylint: disable=W0108
return sorted_list
| (iterable: Iterable[Any], **kwargs) -> List[Any] |
727,433 | d8s_lists.iterables | iterables_are_same_length | Return whether or not the given iterables are the same lengths. | def iterables_are_same_length(a: Sized, b: Sized, *args: Sized, debug_failure: bool = False) -> bool:
"""Return whether or not the given iterables are the same lengths."""
consolidated_list = [a, b, *args]
lengths_1, lengths_2 = itertools.tee(map(len, consolidated_list))
result = iterable_has_single_item(lengths_1)
if debug_failure and not result:
list_length_breakdown = iterable_count(lengths_2)
minority_list_count = min(dict_values(list_length_breakdown))
for index, arg in enumerate(consolidated_list):
if list_length_breakdown[len(arg)] == minority_list_count:
print(f'Argument {index} is not the same length as the majority of the arguments')
return result
| (a: Sized, b: Sized, *args: Sized, debug_failure: bool = False) -> bool |
727,434 | d8s_lists.iterables | iterables_have_same_items | Return whether iterables have identical items (considering both identity and count). | def iterables_have_same_items(a: Sequence, b: Sequence, *args: Sequence) -> bool: # noqa: CCR001
"""Return whether iterables have identical items (considering both identity and count)."""
first_list = a
remaining_lists = [b, *args]
if iterables_are_same_length(a, *remaining_lists):
for item in first_list:
first_list_count = first_list.count(item)
item_counts = [list_.count(item) for list_ in remaining_lists]
same_count = item_counts[0] == first_list_count
if not iterable_has_single_item(item_counts) or not same_count:
return False
else:
return False
return True
| (a: Sequence, b: Sequence, *args: Sequence) -> bool |
727,436 | d8s_lists.iterables | longest | . | def longest(iterable: Iterable[Any]) -> Any:
"""."""
longest_item = max(iterable, key=len)
return longest_item
| (iterable: Iterable[Any]) -> Any |
727,437 | d8s_lists.iterables | nontruthy_items | Return an iterable with only elements of the given iterable which evaluate to False.
(see https://docs.python.org/3.9/library/stdtypes.html#truth-value-testing)
| def nontruthy_items(iterable: Iterable[Any]) -> Iterator[Any]:
"""Return an iterable with only elements of the given iterable which evaluate to False.
(see https://docs.python.org/3.9/library/stdtypes.html#truth-value-testing)
"""
return filter(lambda x: not x, iterable)
| (iterable: Iterable[Any]) -> Iterator[Any] |
727,438 | d8s_lists.iterables | run_length_encoding | Perform run-length encoding on the given array.
See https://en.wikipedia.org/wiki/Run-length_encoding for more details.
| def run_length_encoding(iterable: Iterable[Any]) -> Iterator[str]:
"""Perform run-length encoding on the given array.
See https://en.wikipedia.org/wiki/Run-length_encoding for more details.
"""
run_length_encodings = (f'{len(tuple(g))}{k}' for k, g in itertools.groupby(iterable))
return run_length_encodings
| (iterable: Iterable[Any]) -> Iterator[str] |
727,441 | d8s_lists.iterables | types | Return a set containing the types of all items in the list_arg. | def types(iterable: Iterable[Any]) -> Iterator[Type]:
"""Return a set containing the types of all items in the list_arg."""
return map(type, iterable)
| (iterable: Iterable[Any]) -> Iterator[Type] |
727,442 | datasette.filters | FilterArguments | null | class FilterArguments:
def __init__(
self, where_clauses, params=None, human_descriptions=None, extra_context=None
):
self.where_clauses = where_clauses
self.params = params or {}
self.human_descriptions = human_descriptions or []
self.extra_context = extra_context or {}
| (where_clauses, params=None, human_descriptions=None, extra_context=None) |
727,443 | datasette.filters | __init__ | null | def __init__(
self, where_clauses, params=None, human_descriptions=None, extra_context=None
):
self.where_clauses = where_clauses
self.params = params or {}
self.human_descriptions = human_descriptions or []
self.extra_context = extra_context or {}
| (self, where_clauses, params=None, human_descriptions=None, extra_context=None) |
727,444 | datasette_leaflet_freedraw | extra_body_script | null | @hookimpl
def extra_body_script(request, datasette, database, table):
async def inner():
has_geometry = False
if table:
has_geometry = bool(
await geometry_columns_for_table(datasette, database, table)
)
current_geojson = None
freedraw = request.args.get("_freedraw")
if freedraw:
try:
current_geojson = json.loads(freedraw)
except ValueError:
pass
return textwrap.dedent(
"""
window.datasette = window.datasette || {{}};
datasette.leaflet_freedraw = {{
FREEDRAW_URL: '{}',
show_for_table: {},
current_geojson: {}
}};
""".format(
datasette.urls.static_plugins(
"datasette-leaflet-freedraw", "leaflet-freedraw.esm.js"
),
"true" if has_geometry else "false",
htmlsafe_json_dumps(current_geojson),
)
)
return inner
| (request, datasette, database, table) |
727,445 | datasette_leaflet_freedraw | extra_js_urls | null | @hookimpl
def extra_js_urls(view_name, datasette):
if view_name in ("database", "table"):
return [
{
"url": datasette.urls.static_plugins(
"datasette-leaflet-freedraw", "datasette-leaflet-freedraw.js"
),
"module": True,
}
]
| (view_name, datasette) |
727,446 | datasette_leaflet_freedraw | filters_from_request | null | @hookimpl
def filters_from_request(request, database, table, datasette):
async def inner():
geometry_columns = await geometry_columns_for_table(datasette, database, table)
if not geometry_columns:
return
freedraw = request.args.get("_freedraw", "")
try:
geojson = json.loads(freedraw)
except ValueError:
return
# Just use the first geometry column
column, spatial_index_enabled = list(geometry_columns.items())[0]
where_clauses = [
"Intersects(GeomFromGeoJSON(:freedraw), [{}]) = 1".format(column)
]
params = {"freedraw": json.dumps(geojson)}
# Spatial index support, if possible
if spatial_index_enabled:
where_clauses.append(
textwrap.dedent(
"""
[{table}].rowid in (select rowid from SpatialIndex where f_table_name = :freedraw_table
and search_frame = GeomFromGeoJSON(:freedraw))
""".format(
table=table
)
).strip()
)
params["freedraw_table"] = table
return FilterArguments(
where_clauses,
params=params,
human_descriptions=["geometry intersects the specified map area"],
)
return inner
| (request, database, table, datasette) |
727,447 | datasette_leaflet_freedraw | geometry_columns_for_table | null | from datasette import hookimpl
from datasette.filters import FilterArguments
from datasette.utils import sqlite3
from jinja2.utils import htmlsafe_json_dumps
import json
import textwrap
async def geometry_columns_for_table(datasette, database, table):
# Returns [{"column": spatial_index_enabled (boolean)}]
sql = """
select f_geometry_column, spatial_index_enabled
from geometry_columns
where lower(f_table_name) = lower(:table)
"""
db = datasette.get_database(database)
try:
return {
r["f_geometry_column"]: bool(r["spatial_index_enabled"])
for r in await db.execute(sql, {"table": table})
}
except sqlite3.OperationalError:
return {}
| (datasette, database, table) |
727,448 | jinja2.utils | htmlsafe_json_dumps | Serialize an object to a string of JSON with :func:`json.dumps`,
then replace HTML-unsafe characters with Unicode escapes and mark
the result safe with :class:`~markupsafe.Markup`.
This is available in templates as the ``|tojson`` filter.
The following characters are escaped: ``<``, ``>``, ``&``, ``'``.
The returned string is safe to render in HTML documents and
``<script>`` tags. The exception is in HTML attributes that are
double quoted; either use single quotes or the ``|forceescape``
filter.
:param obj: The object to serialize to JSON.
:param dumps: The ``dumps`` function to use. Defaults to
``env.policies["json.dumps_function"]``, which defaults to
:func:`json.dumps`.
:param kwargs: Extra arguments to pass to ``dumps``. Merged onto
``env.policies["json.dumps_kwargs"]``.
.. versionchanged:: 3.0
The ``dumper`` parameter is renamed to ``dumps``.
.. versionadded:: 2.9
| def htmlsafe_json_dumps(
obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
) -> markupsafe.Markup:
"""Serialize an object to a string of JSON with :func:`json.dumps`,
then replace HTML-unsafe characters with Unicode escapes and mark
the result safe with :class:`~markupsafe.Markup`.
This is available in templates as the ``|tojson`` filter.
The following characters are escaped: ``<``, ``>``, ``&``, ``'``.
The returned string is safe to render in HTML documents and
``<script>`` tags. The exception is in HTML attributes that are
double quoted; either use single quotes or the ``|forceescape``
filter.
:param obj: The object to serialize to JSON.
:param dumps: The ``dumps`` function to use. Defaults to
``env.policies["json.dumps_function"]``, which defaults to
:func:`json.dumps`.
:param kwargs: Extra arguments to pass to ``dumps``. Merged onto
``env.policies["json.dumps_kwargs"]``.
.. versionchanged:: 3.0
The ``dumper`` parameter is renamed to ``dumps``.
.. versionadded:: 2.9
"""
if dumps is None:
dumps = json.dumps
return markupsafe.Markup(
dumps(obj, **kwargs)
.replace("<", "\\u003c")
.replace(">", "\\u003e")
.replace("&", "\\u0026")
.replace("'", "\\u0027")
)
| (obj: Any, dumps: Optional[Callable[..., str]] = None, **kwargs: Any) -> markupsafe.Markup |
727,453 | django_urr.extract | extract_urls |
Extract URLEntry objects from the given iterable
of Django URL pattern objects. If no iterable is given,
the patterns exposed by the root resolver are used, i.e.
all of the URLs routed in the project.
:param urlpatterns: Iterable of URLPattern objects
:return: Generator of `URLEntry` objects.
:rtype: list[URLEntry]
| def extract_urls(urlpatterns=None):
"""
Extract URLEntry objects from the given iterable
of Django URL pattern objects. If no iterable is given,
the patterns exposed by the root resolver are used, i.e.
all of the URLs routed in the project.
:param urlpatterns: Iterable of URLPattern objects
:return: Generator of `URLEntry` objects.
:rtype: list[URLEntry]
"""
if urlpatterns is None:
urlpatterns = urls.get_resolver(None).url_patterns
yield from _extract_urls(urlpatterns, [])
| (urlpatterns=None) |
727,454 | wxai_langchain.credentials | Credentials | null | class Credentials:
DEFAULT_API = "https://us-south.ml.cloud.ibm.com/ml/v1-beta/generation/text?version=2023-05-29"
def __init__(
self,
api_key: str,
project_id: str,
api_endpoint: str = DEFAULT_API,
):
"""
Instansiate the credentials object
Args:
api_key (str): The GENAI API Key
api_endpoint (str, optional): GENAI API Endpoint. Defaults to DEFAULT_API.
"""
if api_key is None:
raise ValueError("api_key must be provided")
self.api_key = api_key
if api_endpoint is None:
raise ValueError("api_endpoint must be provided")
self.api_endpoint = api_endpoint
if project_id is None:
raise ValueError("project_id must be provided")
self.project_id = project_id
# New dict for ibm-watson-machine-learning-sdk
self.wml_credentials = {
"apikey": self.api_key,
"url": self.api_endpoint
}
| (api_key: str, project_id: str, api_endpoint: str = 'https://us-south.ml.cloud.ibm.com/ml/v1-beta/generation/text?version=2023-05-29') |
727,455 | wxai_langchain.credentials | __init__ |
Instansiate the credentials object
Args:
api_key (str): The GENAI API Key
api_endpoint (str, optional): GENAI API Endpoint. Defaults to DEFAULT_API.
| def __init__(
self,
api_key: str,
project_id: str,
api_endpoint: str = DEFAULT_API,
):
"""
Instansiate the credentials object
Args:
api_key (str): The GENAI API Key
api_endpoint (str, optional): GENAI API Endpoint. Defaults to DEFAULT_API.
"""
if api_key is None:
raise ValueError("api_key must be provided")
self.api_key = api_key
if api_endpoint is None:
raise ValueError("api_endpoint must be provided")
self.api_endpoint = api_endpoint
if project_id is None:
raise ValueError("project_id must be provided")
self.project_id = project_id
# New dict for ibm-watson-machine-learning-sdk
self.wml_credentials = {
"apikey": self.api_key,
"url": self.api_endpoint
}
| (self, api_key: str, project_id: str, api_endpoint: str = 'https://us-south.ml.cloud.ibm.com/ml/v1-beta/generation/text?version=2023-05-29') |
727,458 | openpaygo.metrics_shared | AuthMethod | null | class AuthMethod(object):
SIMPLE_AUTH = 'sa'
TIMESTAMP_AUTH = 'ta'
COUNTER_AUTH = 'ca'
DATA_AUTH = 'da'
RECURSIVE_DATA_AUTH = 'ra'
| () |
727,459 | openpaygo.metrics_request | MetricsRequestHandler | null | class MetricsRequestHandler(object):
def __init__(self, serial_number, data_format=None, secret_key=None, auth_method=None):
self.secret_key = secret_key
self.auth_method = auth_method
self.request_dict = {
'serial_number': serial_number,
}
self.data_format = data_format
if self.data_format:
if self.data_format.get('id'):
self.request_dict['data_format_id'] = data_format.get('id')
else:
self.request_dict['data_format'] = data_format
self.data = {}
self.historical_data = {}
def set_request_count(self, request_count):
self.request_dict['request_count'] = request_count
def set_timestamp(self, timestamp):
self.request_dict['timestamp'] = timestamp
def set_data(self, data):
self.data = data
def set_historical_data(self, historical_data):
if not self.data_format.get('historical_data_interval'):
for time_step in historical_data:
if not time_step.get('timestamp'):
raise ValueError('Historical Data objects must have a time stamp if no historical_data_interval is defined.')
self.historical_data = historical_data
def get_simple_request_payload(self):
payload = self.get_simple_request_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
def get_simple_request_dict(self):
simple_request = self.request_dict
simple_request['data'] = self.data
simple_request['historical_data'] = self.historical_data
# We prepare the auth
if self.auth_method:
simple_request['auth'] = OpenPAYGOMetricsShared.generate_request_signature_from_data(simple_request, self.auth_method, self.secret_key)
return simple_request
def get_condensed_request_payload(self):
payload = self.get_condensed_request_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
def get_condensed_request_dict(self):
if not self.data_format:
raise ValueError('No Data Format provided for condensed request')
data_order = self.data_format.get('data_order')
if self.data and not data_order:
raise ValueError('Data Format does not contain data_order')
historical_data_order = self.data_format.get('historical_data_order')
if self.historical_data and not historical_data_order:
raise ValueError('Data Format does not contain historical_data_order')
condensed_request = copy.deepcopy(self.request_dict)
condensed_request['data'] = []
condensed_request['historical_data'] = []
# We add the data
data_copy = copy.deepcopy(self.data)
for var in data_order:
condensed_request['data'].append(data_copy.pop(var) if var in data_copy else None)
if len(data_copy) > 0:
raise ValueError('Additional variables not present in the data format: '+str(data_copy))
condensed_request['data'] = OpenPAYGOMetricsShared.remove_trailing_empty_elements(condensed_request['data'])
# We add the historical data
historical_data_copy = copy.deepcopy(self.historical_data)
for time_step in historical_data_copy:
time_step_data = []
for var in historical_data_order:
time_step_data.append(time_step.pop(var) if var in time_step else None)
if len(time_step) > 0:
raise ValueError('Additional variables not present in the historical data format: '+str(time_step))
time_step_data = OpenPAYGOMetricsShared.remove_trailing_empty_elements(time_step_data)
condensed_request['historical_data'].append(time_step_data)
# We prepare the auth
if self.auth_method:
condensed_request['auth'] = OpenPAYGOMetricsShared.generate_request_signature_from_data(condensed_request, self.auth_method, self.secret_key)
# We replace the key names by the condensed ones
condensed_request = OpenPAYGOMetricsShared.convert_dict_keys_to_condensed(condensed_request)
return condensed_request
| (serial_number, data_format=None, secret_key=None, auth_method=None) |
727,460 | openpaygo.metrics_request | __init__ | null | def __init__(self, serial_number, data_format=None, secret_key=None, auth_method=None):
self.secret_key = secret_key
self.auth_method = auth_method
self.request_dict = {
'serial_number': serial_number,
}
self.data_format = data_format
if self.data_format:
if self.data_format.get('id'):
self.request_dict['data_format_id'] = data_format.get('id')
else:
self.request_dict['data_format'] = data_format
self.data = {}
self.historical_data = {}
| (self, serial_number, data_format=None, secret_key=None, auth_method=None) |
727,461 | openpaygo.metrics_request | get_condensed_request_dict | null | def get_condensed_request_dict(self):
if not self.data_format:
raise ValueError('No Data Format provided for condensed request')
data_order = self.data_format.get('data_order')
if self.data and not data_order:
raise ValueError('Data Format does not contain data_order')
historical_data_order = self.data_format.get('historical_data_order')
if self.historical_data and not historical_data_order:
raise ValueError('Data Format does not contain historical_data_order')
condensed_request = copy.deepcopy(self.request_dict)
condensed_request['data'] = []
condensed_request['historical_data'] = []
# We add the data
data_copy = copy.deepcopy(self.data)
for var in data_order:
condensed_request['data'].append(data_copy.pop(var) if var in data_copy else None)
if len(data_copy) > 0:
raise ValueError('Additional variables not present in the data format: '+str(data_copy))
condensed_request['data'] = OpenPAYGOMetricsShared.remove_trailing_empty_elements(condensed_request['data'])
# We add the historical data
historical_data_copy = copy.deepcopy(self.historical_data)
for time_step in historical_data_copy:
time_step_data = []
for var in historical_data_order:
time_step_data.append(time_step.pop(var) if var in time_step else None)
if len(time_step) > 0:
raise ValueError('Additional variables not present in the historical data format: '+str(time_step))
time_step_data = OpenPAYGOMetricsShared.remove_trailing_empty_elements(time_step_data)
condensed_request['historical_data'].append(time_step_data)
# We prepare the auth
if self.auth_method:
condensed_request['auth'] = OpenPAYGOMetricsShared.generate_request_signature_from_data(condensed_request, self.auth_method, self.secret_key)
# We replace the key names by the condensed ones
condensed_request = OpenPAYGOMetricsShared.convert_dict_keys_to_condensed(condensed_request)
return condensed_request
| (self) |
727,462 | openpaygo.metrics_request | get_condensed_request_payload | null | def get_condensed_request_payload(self):
payload = self.get_condensed_request_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
| (self) |
727,463 | openpaygo.metrics_request | get_simple_request_dict | null | def get_simple_request_dict(self):
simple_request = self.request_dict
simple_request['data'] = self.data
simple_request['historical_data'] = self.historical_data
# We prepare the auth
if self.auth_method:
simple_request['auth'] = OpenPAYGOMetricsShared.generate_request_signature_from_data(simple_request, self.auth_method, self.secret_key)
return simple_request
| (self) |
727,464 | openpaygo.metrics_request | get_simple_request_payload | null | def get_simple_request_payload(self):
payload = self.get_simple_request_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
| (self) |
727,465 | openpaygo.metrics_request | set_data | null | def set_data(self, data):
self.data = data
| (self, data) |
727,466 | openpaygo.metrics_request | set_historical_data | null | def set_historical_data(self, historical_data):
if not self.data_format.get('historical_data_interval'):
for time_step in historical_data:
if not time_step.get('timestamp'):
raise ValueError('Historical Data objects must have a time stamp if no historical_data_interval is defined.')
self.historical_data = historical_data
| (self, historical_data) |
727,467 | openpaygo.metrics_request | set_request_count | null | def set_request_count(self, request_count):
self.request_dict['request_count'] = request_count
| (self, request_count) |
727,468 | openpaygo.metrics_request | set_timestamp | null | def set_timestamp(self, timestamp):
self.request_dict['timestamp'] = timestamp
| (self, timestamp) |
727,469 | openpaygo.metrics_response | MetricsResponseHandler | null | class MetricsResponseHandler(object):
def __init__(self, received_metrics, data_format=None, secret_key=None, last_request_count=None, last_request_timestamp=None):
self.received_metrics = received_metrics
self.request_dict = json.loads(received_metrics)
# We convert the base variable names to simple
self.request_dict = OpenPAYGOMetricsShared.convert_dict_keys_to_simple(self.request_dict)
# We add the reception timestamp if not timestamp was provided
self.request_timestamp = self.request_dict.get('timestamp')
if not self.request_dict.get('timestamp'):
self.timestamp = int(datetime.now().timestamp())
else:
self.timestamp = self.request_dict.get('timestamp')
self.response_dict = {}
self.secret_key = secret_key
self.data_format = data_format
self.last_request_count = last_request_count
self.last_request_timestamp = last_request_timestamp
if not self.data_format and self.request_dict.get('data_format'):
self.data_format = self.request_dict.get('data_format')
def get_device_serial(self):
return self.request_dict.get('serial_number')
def get_data_format_id(self):
return self.request_dict.get('data_format_id')
def data_format_available(self):
return self.data_format != None
def set_device_parameters(self, secret_key=None, data_format=None, last_request_count=None, last_request_timestamp=None):
if secret_key:
self.secret_key = secret_key
if data_format:
self.data_format = data_format
if last_request_count:
self.last_request_count = last_request_count
if last_request_timestamp:
self.last_request_timestamp = last_request_timestamp
def is_auth_valid(self):
auth_string = self.request_dict.get('auth', None)
if not auth_string:
return False
elif not self.secret_key:
raise ValueError('Secret key is required to check the auth.')
self.auth_method = auth_string[:2]
new_signature = OpenPAYGOMetricsShared.generate_request_signature_from_data(self.request_dict, self.auth_method, self.secret_key)
if auth_string == new_signature:
request_count = self.get_request_count()
if request_count and self.last_request_count and request_count <= self.last_request_count:
return False
timestamp = self.get_request_timestamp()
if timestamp and self.last_request_timestamp and timestamp <= self.last_request_timestamp:
return False
# Either the request count or timestamp is required
if request_count or timestamp:
return True
return False
def get_simple_metrics(self):
# We start the process by making a copy of the dict to work with
simple_dict = copy.deepcopy(self.request_dict)
simple_dict.pop('auth') if 'auth' in simple_dict else None # We remove the auth
# We process the data and replace it
simple_dict['data'] = self._get_simple_data()
# We process the historical data
simple_dict['historical_data'] = self._get_simple_historical_data()
# We fill in the timestamps for each time step
simple_dict['historical_data'] = self._fill_timestamp_in_historical_data(simple_dict['historical_data'])
return simple_dict
def get_data_timestamp(self):
return self.request_dict.get('data_collection_timestamp', self.timestamp)
def get_request_timestamp(self):
return self.request_timestamp
def get_request_count(self):
return self.request_dict.get('request_count')
def get_token_count(self):
data = self._get_simple_data()
return data.get('token_count')
def expects_token_answer(self):
return self.get_token_count() is not None
def add_tokens_to_answer(self, token_list):
self.response_dict['token_list'] = token_list
def expects_time_answer(self):
data = self._get_simple_data()
if data.get('active_until_timestamp_requested', False) or data.get('active_seconds_left_requested', False):
return True
return False
def add_time_to_answer(self, target_datetime):
data = self._get_simple_data()
if data.get('active_until_timestamp_requested', False):
target_timestamp = 0
if target_datetime:
if target_datetime.year > 1970:
target_timestamp = target_datetime.timestamp()
self.response_dict['active_until_timestamp'] = target_timestamp
elif data.get('active_seconds_left_requested', False):
seconds_left = (datetime.now() - target_datetime).total_seconds() if target_datetime else 0
self.response_dict['active_seconds_left'] = seconds_left if seconds_left > 0 else 0
else:
raise ValueError('No time requested')
def add_new_base_url_to_answer(self, new_base_url):
self.add_settings_to_answer({'base_url': new_base_url})
def add_settings_to_answer(self, settings_dict):
if not self.response_dict.get('settings'):
self.response_dict['settings'] = {}
self.response_dict['settings'].update(settings_dict)
def add_extra_data_to_answer(self, extra_data_dict):
if not self.response_dict.get('extra_data'):
self.response_dict['extra_data'] = {}
self.response_dict['extra_data'].update(extra_data_dict)
def get_answer_payload(self):
payload = self.get_answer_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
def get_answer_dict(self):
# If there is not data format, we just return the full response
condensed_answer = copy.deepcopy(self.response_dict)
if self.secret_key:
condensed_answer['auth'] = OpenPAYGOMetricsShared.generate_response_signature_from_data(
serial_number=self.request_dict.get('serial_number'),
request_count=self.request_dict.get('request_count'),
data=condensed_answer,
timestamp=self.request_dict.get('timestamp'),
secret_key=self.secret_key
)
return OpenPAYGOMetricsShared.convert_dict_keys_to_condensed(condensed_answer)
def _get_simple_data(self):
data = copy.deepcopy(self.request_dict.get('data'))
# If no data or not condensed in list, we just return it
if not data:
return {}
if not isinstance(data, list):
return data
data_order = self.data_format.get('data_order')
if not data_order:
raise ValueError('Data Format does not contain data_order')
clean_data = {}
data_len = len(data)
for idx, var in enumerate(data_order):
clean_data[var] = data[idx] if idx < data_len else None
data = data[data_len:]
if len(data) > 0:
raise ValueError('Additional variables not present in the data format: '+str(data))
return OpenPAYGOMetricsShared.convert_dict_keys_to_simple(clean_data)
def _get_simple_historical_data(self):
historical_data = copy.deepcopy(self.request_dict.get('historical_data'))
if not historical_data:
return []
historical_data_order = self.data_format.get('historical_data_order')
clean_historical_data = []
for time_step in historical_data:
time_step_data = {}
if isinstance(time_step, list):
if not historical_data_order:
raise ValueError('Data Format does not contain historical_data_order')
timse_step_len = len(time_step)
for idx, var in enumerate(historical_data_order):
if idx < timse_step_len:
time_step_data[var] = time_step[idx]
time_step = time_step[timse_step_len:]
if len(time_step) > 0:
raise ValueError('Additional variables not present in the historical data format: '+str(time_step))
elif isinstance(time_step, dict):
for key in time_step:
if key.isdigit() and int(key) < len(historical_data_order):
time_step_data[historical_data_order[int(key)]] = time_step[key]
else:
time_step_data[key] = time_step[key]
else:
raise ValueError('Invalid historical data step type: '+str(time_step))
clean_historical_data.append(time_step_data)
return clean_historical_data
def _fill_timestamp_in_historical_data(self, historical_data):
last_timestamp = datetime.fromtimestamp(self.get_data_timestamp())
for idx, time_step in enumerate(historical_data):
if time_step.get('relative_time') is not None:
last_timestamp = last_timestamp + timedelta(seconds=int(time_step.get('relative_time')))
historical_data[idx]['timestamp'] = int(last_timestamp.timestamp())
del historical_data[idx]['relative_time']
elif time_step.get('timestamp'):
last_timestamp = datetime.fromtimestamp(time_step.get('timestamp'))
else:
if idx != 0:
last_timestamp = last_timestamp + timedelta(seconds=int(self.data_format.get('historical_data_interval')))
historical_data[idx]['timestamp'] = int(last_timestamp.timestamp())
return historical_data
| (received_metrics, data_format=None, secret_key=None, last_request_count=None, last_request_timestamp=None) |
727,470 | openpaygo.metrics_response | __init__ | null | def __init__(self, received_metrics, data_format=None, secret_key=None, last_request_count=None, last_request_timestamp=None):
self.received_metrics = received_metrics
self.request_dict = json.loads(received_metrics)
# We convert the base variable names to simple
self.request_dict = OpenPAYGOMetricsShared.convert_dict_keys_to_simple(self.request_dict)
# We add the reception timestamp if not timestamp was provided
self.request_timestamp = self.request_dict.get('timestamp')
if not self.request_dict.get('timestamp'):
self.timestamp = int(datetime.now().timestamp())
else:
self.timestamp = self.request_dict.get('timestamp')
self.response_dict = {}
self.secret_key = secret_key
self.data_format = data_format
self.last_request_count = last_request_count
self.last_request_timestamp = last_request_timestamp
if not self.data_format and self.request_dict.get('data_format'):
self.data_format = self.request_dict.get('data_format')
| (self, received_metrics, data_format=None, secret_key=None, last_request_count=None, last_request_timestamp=None) |
727,471 | openpaygo.metrics_response | _fill_timestamp_in_historical_data | null | def _fill_timestamp_in_historical_data(self, historical_data):
last_timestamp = datetime.fromtimestamp(self.get_data_timestamp())
for idx, time_step in enumerate(historical_data):
if time_step.get('relative_time') is not None:
last_timestamp = last_timestamp + timedelta(seconds=int(time_step.get('relative_time')))
historical_data[idx]['timestamp'] = int(last_timestamp.timestamp())
del historical_data[idx]['relative_time']
elif time_step.get('timestamp'):
last_timestamp = datetime.fromtimestamp(time_step.get('timestamp'))
else:
if idx != 0:
last_timestamp = last_timestamp + timedelta(seconds=int(self.data_format.get('historical_data_interval')))
historical_data[idx]['timestamp'] = int(last_timestamp.timestamp())
return historical_data
| (self, historical_data) |
727,472 | openpaygo.metrics_response | _get_simple_data | null | def _get_simple_data(self):
data = copy.deepcopy(self.request_dict.get('data'))
# If no data or not condensed in list, we just return it
if not data:
return {}
if not isinstance(data, list):
return data
data_order = self.data_format.get('data_order')
if not data_order:
raise ValueError('Data Format does not contain data_order')
clean_data = {}
data_len = len(data)
for idx, var in enumerate(data_order):
clean_data[var] = data[idx] if idx < data_len else None
data = data[data_len:]
if len(data) > 0:
raise ValueError('Additional variables not present in the data format: '+str(data))
return OpenPAYGOMetricsShared.convert_dict_keys_to_simple(clean_data)
| (self) |
727,473 | openpaygo.metrics_response | _get_simple_historical_data | null | def _get_simple_historical_data(self):
historical_data = copy.deepcopy(self.request_dict.get('historical_data'))
if not historical_data:
return []
historical_data_order = self.data_format.get('historical_data_order')
clean_historical_data = []
for time_step in historical_data:
time_step_data = {}
if isinstance(time_step, list):
if not historical_data_order:
raise ValueError('Data Format does not contain historical_data_order')
timse_step_len = len(time_step)
for idx, var in enumerate(historical_data_order):
if idx < timse_step_len:
time_step_data[var] = time_step[idx]
time_step = time_step[timse_step_len:]
if len(time_step) > 0:
raise ValueError('Additional variables not present in the historical data format: '+str(time_step))
elif isinstance(time_step, dict):
for key in time_step:
if key.isdigit() and int(key) < len(historical_data_order):
time_step_data[historical_data_order[int(key)]] = time_step[key]
else:
time_step_data[key] = time_step[key]
else:
raise ValueError('Invalid historical data step type: '+str(time_step))
clean_historical_data.append(time_step_data)
return clean_historical_data
| (self) |
727,474 | openpaygo.metrics_response | add_extra_data_to_answer | null | def add_extra_data_to_answer(self, extra_data_dict):
if not self.response_dict.get('extra_data'):
self.response_dict['extra_data'] = {}
self.response_dict['extra_data'].update(extra_data_dict)
| (self, extra_data_dict) |
727,475 | openpaygo.metrics_response | add_new_base_url_to_answer | null | def add_new_base_url_to_answer(self, new_base_url):
self.add_settings_to_answer({'base_url': new_base_url})
| (self, new_base_url) |
727,476 | openpaygo.metrics_response | add_settings_to_answer | null | def add_settings_to_answer(self, settings_dict):
if not self.response_dict.get('settings'):
self.response_dict['settings'] = {}
self.response_dict['settings'].update(settings_dict)
| (self, settings_dict) |
727,477 | openpaygo.metrics_response | add_time_to_answer | null | def add_time_to_answer(self, target_datetime):
data = self._get_simple_data()
if data.get('active_until_timestamp_requested', False):
target_timestamp = 0
if target_datetime:
if target_datetime.year > 1970:
target_timestamp = target_datetime.timestamp()
self.response_dict['active_until_timestamp'] = target_timestamp
elif data.get('active_seconds_left_requested', False):
seconds_left = (datetime.now() - target_datetime).total_seconds() if target_datetime else 0
self.response_dict['active_seconds_left'] = seconds_left if seconds_left > 0 else 0
else:
raise ValueError('No time requested')
| (self, target_datetime) |
727,478 | openpaygo.metrics_response | add_tokens_to_answer | null | def add_tokens_to_answer(self, token_list):
self.response_dict['token_list'] = token_list
| (self, token_list) |
727,479 | openpaygo.metrics_response | data_format_available | null | def data_format_available(self):
return self.data_format != None
| (self) |
727,480 | openpaygo.metrics_response | expects_time_answer | null | def expects_time_answer(self):
data = self._get_simple_data()
if data.get('active_until_timestamp_requested', False) or data.get('active_seconds_left_requested', False):
return True
return False
| (self) |
727,481 | openpaygo.metrics_response | expects_token_answer | null | def expects_token_answer(self):
return self.get_token_count() is not None
| (self) |
727,482 | openpaygo.metrics_response | get_answer_dict | null | def get_answer_dict(self):
# If there is not data format, we just return the full response
condensed_answer = copy.deepcopy(self.response_dict)
if self.secret_key:
condensed_answer['auth'] = OpenPAYGOMetricsShared.generate_response_signature_from_data(
serial_number=self.request_dict.get('serial_number'),
request_count=self.request_dict.get('request_count'),
data=condensed_answer,
timestamp=self.request_dict.get('timestamp'),
secret_key=self.secret_key
)
return OpenPAYGOMetricsShared.convert_dict_keys_to_condensed(condensed_answer)
| (self) |
727,483 | openpaygo.metrics_response | get_answer_payload | null | def get_answer_payload(self):
payload = self.get_answer_dict()
return OpenPAYGOMetricsShared.convert_to_metrics_json(payload)
| (self) |
727,484 | openpaygo.metrics_response | get_data_format_id | null | def get_data_format_id(self):
return self.request_dict.get('data_format_id')
| (self) |
727,485 | openpaygo.metrics_response | get_data_timestamp | null | def get_data_timestamp(self):
return self.request_dict.get('data_collection_timestamp', self.timestamp)
| (self) |
727,486 | openpaygo.metrics_response | get_device_serial | null | def get_device_serial(self):
return self.request_dict.get('serial_number')
| (self) |
727,487 | openpaygo.metrics_response | get_request_count | null | def get_request_count(self):
return self.request_dict.get('request_count')
| (self) |
727,488 | openpaygo.metrics_response | get_request_timestamp | null | def get_request_timestamp(self):
return self.request_timestamp
| (self) |
727,489 | openpaygo.metrics_response | get_simple_metrics | null | def get_simple_metrics(self):
# We start the process by making a copy of the dict to work with
simple_dict = copy.deepcopy(self.request_dict)
simple_dict.pop('auth') if 'auth' in simple_dict else None # We remove the auth
# We process the data and replace it
simple_dict['data'] = self._get_simple_data()
# We process the historical data
simple_dict['historical_data'] = self._get_simple_historical_data()
# We fill in the timestamps for each time step
simple_dict['historical_data'] = self._fill_timestamp_in_historical_data(simple_dict['historical_data'])
return simple_dict
| (self) |
727,490 | openpaygo.metrics_response | get_token_count | null | def get_token_count(self):
data = self._get_simple_data()
return data.get('token_count')
| (self) |
727,491 | openpaygo.metrics_response | is_auth_valid | null | def is_auth_valid(self):
auth_string = self.request_dict.get('auth', None)
if not auth_string:
return False
elif not self.secret_key:
raise ValueError('Secret key is required to check the auth.')
self.auth_method = auth_string[:2]
new_signature = OpenPAYGOMetricsShared.generate_request_signature_from_data(self.request_dict, self.auth_method, self.secret_key)
if auth_string == new_signature:
request_count = self.get_request_count()
if request_count and self.last_request_count and request_count <= self.last_request_count:
return False
timestamp = self.get_request_timestamp()
if timestamp and self.last_request_timestamp and timestamp <= self.last_request_timestamp:
return False
# Either the request count or timestamp is required
if request_count or timestamp:
return True
return False
| (self) |
727,492 | openpaygo.metrics_response | set_device_parameters | null | def set_device_parameters(self, secret_key=None, data_format=None, last_request_count=None, last_request_timestamp=None):
if secret_key:
self.secret_key = secret_key
if data_format:
self.data_format = data_format
if last_request_count:
self.last_request_count = last_request_count
if last_request_timestamp:
self.last_request_timestamp = last_request_timestamp
| (self, secret_key=None, data_format=None, last_request_count=None, last_request_timestamp=None) |
727,493 | openpaygo.token_decode | OpenPAYGOTokenDecoder | null | class OpenPAYGOTokenDecoder(object):
MAX_TOKEN_JUMP = 64
MAX_TOKEN_JUMP_COUNTER_SYNC = 100
MAX_UNUSED_OLDER_TOKENS = 8*2
@classmethod
def decode_token(cls, token, secret_key, count, used_counts=None, starting_code=None, value_divider=1, restricted_digit_set=False):
secret_key = OpenPAYGOTokenShared.load_secret_key_from_hex(secret_key)
if not starting_code:
# We generate the starting code from the key if not provided
starting_code = OpenPAYGOTokenShared.generate_starting_code(secret_key)
if not restricted_digit_set:
if len(token) <= 9:
extended_token = False
elif len(token) <= 12:
extended_token = True
else:
raise ValueError("Token is too long")
elif restricted_digit_set:
if len(token) <= 15:
extended_token = False
elif len(token) <= 20:
extended_token = True
else:
raise ValueError("Token is too long")
token = int(token)
if not extended_token:
value, token_type, count, updated_counts = cls.get_activation_value_count_and_type_from_token(token, starting_code, secret_key, count, restricted_digit_set, used_counts)
else:
value, token_type, count, updated_counts = cls.get_activation_value_count_from_extended_token(token, starting_code, secret_key, count, restricted_digit_set, used_counts)
if value and value_divider:
value = value / value_divider
return value, token_type, count, updated_counts
@classmethod
def get_activation_value_count_and_type_from_token(cls, token, starting_code, key, last_count,
restricted_digit_set=False, used_counts=None):
if restricted_digit_set:
token = OpenPAYGOTokenShared.convert_from_4_digit_token(token)
valid_older_token = False
token_base = OpenPAYGOTokenShared.get_token_base(token) # We get the base of the token
current_code = OpenPAYGOTokenShared.put_base_in_token(starting_code, token_base) # We put it into the starting code
starting_code_base = OpenPAYGOTokenShared.get_token_base(starting_code) # We get the base of the starting code
value = cls._decode_base(starting_code_base, token_base) # If there is a match we get the value from the token
# We try all combination up until last_count + TOKEN_JUMP, or to the larger jump if syncing counter
# We could start directly the loop at the last count if we kept the token value for the last count
if value == OpenPAYGOTokenShared.COUNTER_SYNC_VALUE:
max_count_try = last_count + cls.MAX_TOKEN_JUMP_COUNTER_SYNC + 1
else:
max_count_try = last_count + cls.MAX_TOKEN_JUMP + 1
for count in range(0, max_count_try):
masked_token = OpenPAYGOTokenShared.put_base_in_token(current_code, token_base)
if count % 2:
if value == OpenPAYGOTokenShared.COUNTER_SYNC_VALUE:
this_type = TokenType.COUNTER_SYNC
elif value == OpenPAYGOTokenShared.PAYG_DISABLE_VALUE:
this_type = TokenType.DISABLE_PAYG
else:
this_type = TokenType.SET_TIME
else:
this_type = TokenType.ADD_TIME
if masked_token == token:
if cls._count_is_valid(count, last_count, value, this_type, used_counts):
updated_counts = cls.update_used_counts(used_counts, value, count, this_type)
return value, this_type, count, updated_counts
else:
valid_older_token = True
current_code = OpenPAYGOTokenShared.generate_next_token(current_code, key) # If not we go to the next token
if valid_older_token:
return None, TokenType.ALREADY_USED, None, None
return None, TokenType.INVALID, None, None
@classmethod
def _count_is_valid(cls, count, last_count, value, type, used_counts):
if value == OpenPAYGOTokenShared.COUNTER_SYNC_VALUE:
if count > (last_count - cls.MAX_TOKEN_JUMP):
return True
elif count > last_count:
return True
elif cls.MAX_UNUSED_OLDER_TOKENS > 0:
if count > last_count - cls.MAX_UNUSED_OLDER_TOKENS:
if count not in used_counts and type == TokenType.ADD_TIME:
return True
return False
@classmethod
def update_used_counts(cls, past_used_counts, value, new_count, type):
if not past_used_counts:
return None
highest_count = max(past_used_counts) if past_used_counts else 0
if new_count > highest_count:
highest_count = new_count
bottom_range = highest_count-cls.MAX_UNUSED_OLDER_TOKENS
used_counts = []
if type != TokenType.ADD_TIME or value == OpenPAYGOTokenShared.COUNTER_SYNC_VALUE or value == OpenPAYGOTokenShared.PAYG_DISABLE_VALUE:
# If it is not an Add-Time token, we mark all the past tokens as used in the range
for count in range(bottom_range, highest_count+1):
used_counts.append(count)
else:
# If it is an Add-Time token, we just mark the tokens actually used in the range
for count in range(bottom_range, highest_count+1):
if count == new_count or count in past_used_counts:
used_counts.append(count)
return used_counts
@classmethod
def _decode_base(cls, starting_code_base, token_base):
decoded_value = token_base - starting_code_base
if decoded_value < 0:
return decoded_value + 1000
else:
return decoded_value
@classmethod
def get_activation_value_count_from_extended_token(cls, token, starting_code, key, last_count,
restricted_digit_set=False, used_counts=None):
if restricted_digit_set:
token = OpenPAYGOTokenSharedExtended.convert_from_4_digit_token(token)
token_base = OpenPAYGOTokenSharedExtended.get_token_base(token) # We get the base of the token
current_code = OpenPAYGOTokenSharedExtended.put_base_in_token(starting_code, token_base) # We put it into the starting code
starting_code_base = OpenPAYGOTokenSharedExtended.get_token_base(starting_code) # We get the base of the starting code
value = cls._decode_base_extended(starting_code_base, token_base) # If there is a match we get the value from the token
max_count_try = last_count + cls.MAX_TOKEN_JUMP + 1
for count in range(0, max_count_try):
masked_token = OpenPAYGOTokenSharedExtended.put_base_in_token(current_code, token_base)
if count % 2:
this_type = TokenType.SET_TIME
else:
this_type = TokenType.ADD_TIME
if masked_token == token:
if cls._count_is_valid(count, last_count, value, this_type, used_counts):
updated_counts = cls.update_used_counts(used_counts, value, count, this_type)
return value, this_type, count, updated_counts
else:
valid_older_token = True
current_code = OpenPAYGOTokenSharedExtended.generate_next_token(current_code, key) # If not we go to the next token
if valid_older_token:
return None, TokenType.ALREADY_USED, None, None
return None, TokenType.INVALID, None, None
@classmethod
def _decode_base_extended(cls, starting_code_base, token_base):
decoded_value = token_base - starting_code_base
if decoded_value < 0:
return decoded_value + 1000000
else:
return decoded_value
| () |
727,494 | openpaygo.token_encode | OpenPAYGOTokenEncoder | null | class OpenPAYGOTokenEncoder(object):
@classmethod
def generate_token(cls, secret_key, count, value=None, token_type=TokenType.ADD_TIME, starting_code=None, value_divider=1, restricted_digit_set=False, extended_token=False):
secret_key = OpenPAYGOTokenShared.load_secret_key_from_hex(secret_key)
if not starting_code:
# We generate the starting code from the key if not provided
starting_code = OpenPAYGOTokenShared.generate_starting_code(secret_key)
if token_type in [TokenType.ADD_TIME, TokenType.SET_TIME]:
value = int(round(value * value_divider, 0))
if not extended_token:
max_value = OpenPAYGOTokenShared.MAX_ACTIVATION_VALUE
else:
max_value = OpenPAYGOTokenSharedExtended.MAX_ACTIVATION_VALUE
if value > max_value:
raise ValueError('The value provided is too high.')
elif value:
raise ValueError('A value is not allowed for this token type.')
else:
if token_type == TokenType.DISABLE_PAYG:
value = OpenPAYGOTokenShared.PAYG_DISABLE_VALUE
elif token_type == TokenType.COUNTER_SYNC:
value = OpenPAYGOTokenShared.COUNTER_SYNC_VALUE
else:
raise ValueError('The token type provided is not supported.')
if extended_token:
return cls.generate_extended_token(starting_code, secret_key, value, count, token_type, restricted_digit_set)
else:
return cls.generate_standard_token(starting_code, secret_key, value, count, token_type, restricted_digit_set)
@classmethod
def generate_standard_token(cls, starting_code=None, key=None, value=None, count=None,
mode=TokenType.ADD_TIME, restricted_digit_set=False):
# We get the first 3 digits with encoded value
starting_code_base = OpenPAYGOTokenShared.get_token_base(starting_code)
token_base = cls._encode_base(starting_code_base, value)
current_token = OpenPAYGOTokenShared.put_base_in_token(starting_code, token_base)
new_count = cls._get_new_count(count, mode)
for xn in range(0, new_count):
current_token = OpenPAYGOTokenShared.generate_next_token(current_token, key)
final_token = OpenPAYGOTokenShared.put_base_in_token(current_token, token_base)
if restricted_digit_set:
final_token = OpenPAYGOTokenShared.convert_to_4_digit_token(final_token)
final_token = '{:015d}'.format(final_token)
else:
final_token = '{:09d}'.format(final_token)
return new_count, final_token
@classmethod
def _encode_base(cls, base, number):
if number + base > 999:
return number + base - 1000
else:
return number + base
@classmethod
def generate_extended_token(cls, starting_code, key, value, count, mode=TokenType.ADD_TIME, restricted_digit_set=False):
starting_code_base = OpenPAYGOTokenSharedExtended.get_token_base(starting_code)
token_base = cls._encode_base_extended(starting_code_base, value)
current_token = OpenPAYGOTokenSharedExtended.put_base_in_token(starting_code, token_base)
new_count = cls._get_new_count(count, mode)
for xn in range(0, new_count):
current_token = OpenPAYGOTokenSharedExtended.generate_next_token(current_token, key)
final_token = OpenPAYGOTokenSharedExtended.put_base_in_token(current_token, token_base)
if restricted_digit_set:
final_token = OpenPAYGOTokenSharedExtended.convert_to_4_digit_token(final_token)
final_token = '{:020d}'.format(final_token)
else:
final_token = '{:012d}'.format(final_token)
return new_count, final_token
@classmethod
def _encode_base_extended(cls, base, number):
if number + base > 999999:
return number + base - 1000000
else:
return number + base
@classmethod
def _get_new_count(cls, count, mode):
current_count_odd = count % 2
if mode in [TokenType.SET_TIME, TokenType.DISABLE_PAYG, TokenType.COUNTER_SYNC]:
if current_count_odd: # Odd numbers are for Set Time, Disable PAYG or Counter Sync
new_count = count+2
else:
new_count = count+1
else:
if current_count_odd: # Even numbers are for Add Time
new_count = count+1
else:
new_count = count+2
return new_count
| () |
727,495 | openpaygo.token_shared | TokenType | null | class TokenType(object):
ADD_TIME = 1
SET_TIME = 2
DISABLE_PAYG = 3
COUNTER_SYNC = 4
INVALID = 10
ALREADY_USED = 11
| () |
727,496 | openpaygo | decode_token | null | def decode_token(**kwargs):
return OpenPAYGOTokenDecoder.decode_token(**kwargs)
| (**kwargs) |
727,497 | openpaygo | generate_token | null | def generate_token(**kwargs):
return OpenPAYGOTokenEncoder.generate_token(**kwargs)
| (**kwargs) |
727,505 | chiabip158 | PyBIP158 | null | from chiabip158 import PyBIP158
| null |
727,506 | stream_chat.client | StreamChat | null | class StreamChat(StreamChatInterface):
def __init__(
self, api_key: str, api_secret: str, timeout: float = 6.0, **options: Any
):
super().__init__(
api_key=api_key, api_secret=api_secret, timeout=timeout, **options
)
self.session = requests.Session()
self.session.mount("http://", requests.adapters.HTTPAdapter(max_retries=1))
self.session.mount("https://", requests.adapters.HTTPAdapter(max_retries=1))
def set_http_session(self, session: requests.Session) -> None:
"""
You can use your own `requests.Session` instance. This instance
will be used for underlying HTTP requests.
"""
self.session = session
def _parse_response(self, response: requests.Response) -> StreamResponse:
try:
parsed_result = json.loads(response.text) if response.text else {}
except ValueError:
raise StreamAPIException(response.text, response.status_code)
if response.status_code >= 399:
raise StreamAPIException(response.text, response.status_code)
return StreamResponse(
parsed_result, dict(response.headers), response.status_code
)
def _make_request(
self,
method: Callable[..., requests.Response],
relative_url: str,
params: Dict = None,
data: Any = None,
) -> StreamResponse:
params = params or {}
data = data or {}
serialized = None
default_params = self.get_default_params()
default_params.update(params)
headers = get_default_header()
headers["Authorization"] = self.auth_token
headers["stream-auth-type"] = "jwt"
url = f"{self.base_url}/{relative_url}"
if method.__name__ in ["post", "put", "patch"]:
serialized = json.dumps(data)
response = method(
url,
data=serialized,
headers=headers,
params=default_params,
timeout=self.timeout,
)
return self._parse_response(response)
def put(
self, relative_url: str, params: Dict = None, data: Any = None
) -> StreamResponse:
return self._make_request(self.session.put, relative_url, params, data)
def post(
self, relative_url: str, params: Dict = None, data: Any = None
) -> StreamResponse:
return self._make_request(self.session.post, relative_url, params, data)
def get(self, relative_url: str, params: Dict = None) -> StreamResponse:
return self._make_request(self.session.get, relative_url, params, None)
def delete(self, relative_url: str, params: Dict = None) -> StreamResponse:
return self._make_request(self.session.delete, relative_url, params, None)
def patch(
self, relative_url: str, params: Dict = None, data: Any = None
) -> StreamResponse:
return self._make_request(self.session.patch, relative_url, params, data)
def update_app_settings(self, **settings: Any) -> StreamResponse:
return self.patch("app", data=settings)
def get_app_settings(self) -> StreamResponse:
return self.get("app")
def set_guest_user(self, guest_user: Dict) -> StreamResponse:
return self.post("guest", data=dict(user=guest_user))
def update_users(self, users: List[Dict]) -> StreamResponse:
warnings.warn(
"This method is deprecated. Use upsert_users instead.",
DeprecationWarning,
stacklevel=2,
)
return self.upsert_users(users)
def update_user(self, user: Dict) -> StreamResponse:
warnings.warn(
"This method is deprecated. Use upsert_user instead.",
DeprecationWarning,
stacklevel=2,
)
return self.upsert_user(user)
def upsert_users(self, users: List[Dict]) -> StreamResponse:
return self.post("users", data={"users": {u["id"]: u for u in users}})
def upsert_user(self, user: Dict) -> StreamResponse:
return self.upsert_users([user])
def update_users_partial(self, updates: List[Dict]) -> StreamResponse:
return self.patch("users", data={"users": updates})
def update_user_partial(self, update: Dict) -> StreamResponse:
return self.update_users_partial([update])
def delete_user(self, user_id: str, **options: Any) -> StreamResponse:
return self.delete(f"users/{user_id}", options)
def delete_users(
self, user_ids: Iterable[str], delete_type: str, **options: Any
) -> StreamResponse:
return self.post(
"users/delete", data=dict(options, user=delete_type, user_ids=user_ids)
)
def restore_users(self, user_ids: Iterable[str]) -> StreamResponse:
return self.post("users/restore", data={"user_ids": user_ids})
def deactivate_user(self, user_id: str, **options: Any) -> StreamResponse:
return self.post(f"users/{user_id}/deactivate", data=options)
def reactivate_user(self, user_id: str, **options: Any) -> StreamResponse:
return self.post(f"users/{user_id}/reactivate", data=options)
def export_user(self, user_id: str, **options: Any) -> StreamResponse:
return self.get(f"users/{user_id}/export", options)
def ban_user(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_user_id": target_id, **options}
return self.post("moderation/ban", data=data)
def shadow_ban(self, target_id: str, **options: Any) -> StreamResponse:
return self.ban_user(target_id, shadow=True, **options)
def remove_shadow_ban(self, target_id: str, **options: Any) -> StreamResponse:
return self.unban_user(target_id, shadow=True, **options)
def unban_user(self, target_id: str, **options: Any) -> StreamResponse:
params = {"target_user_id": target_id, **options}
return self.delete("moderation/ban", params)
def query_banned_users(self, query_conditions: Dict) -> StreamResponse:
return self.get(
"query_banned_users", params={"payload": json.dumps(query_conditions)}
)
def run_message_action(self, message_id: str, data: Dict) -> StreamResponse:
return self.post(f"messages/{message_id}/action", data=data)
def flag_message(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_message_id": target_id, **options}
return self.post("moderation/flag", data=data)
def unflag_message(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_message_id": target_id, **options}
return self.post("moderation/unflag", data=data)
def query_message_flags(
self, filter_conditions: Dict, **options: Any
) -> StreamResponse:
params = {
**options,
"filter_conditions": filter_conditions,
}
return self.get(
"moderation/flags/message", params={"payload": json.dumps(params)}
)
def flag_user(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_user_id": target_id, **options}
return self.post("moderation/flag", data=data)
def unflag_user(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_user_id": target_id, **options}
return self.post("moderation/unflag", data=data)
def _query_flag_reports(self, **options: Any) -> StreamResponse:
"""
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
"""
data = {"filter_conditions": options}
return self.post("moderation/reports", data=data)
def _review_flag_report(
self, report_id: str, review_result: str, user_id: str, **details: Any
) -> StreamResponse:
"""
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
"""
data = {
"review_result": review_result,
"user_id": user_id,
"review_details": details,
}
return self.patch(f"moderation/reports/{report_id}", data=data)
def mute_user(self, target_id: str, user_id: str, **options: Any) -> StreamResponse:
data = {"target_id": target_id, "user_id": user_id, **options}
return self.post("moderation/mute", data=data)
def mute_users(
self, target_ids: List[str], user_id: str, **options: Any
) -> StreamResponse:
data = {"target_ids": target_ids, "user_id": user_id, **options}
return self.post("moderation/mute", data=data)
def unmute_user(self, target_id: str, user_id: str) -> StreamResponse:
data = {"target_id": target_id, "user_id": user_id}
return self.post("moderation/unmute", data=data)
def unmute_users(self, target_ids: List[str], user_id: str) -> StreamResponse:
data = {"target_ids": target_ids, "user_id": user_id}
return self.post("moderation/unmute", data=data)
def mark_all_read(self, user_id: str) -> StreamResponse:
return self.post("channels/read", data={"user": {"id": user_id}})
def translate_message(self, message_id: str, language: str) -> StreamResponse:
return self.post(
f"messages/{message_id}/translate", data={"language": language}
)
def commit_message(self, message_id: str) -> StreamResponse:
return self.post(f"messages/{message_id}/commit")
def pin_message(
self, message_id: str, user_id: str, expiration: int = None
) -> StreamResponse:
updates = {
"set": {
"pinned": True,
"pin_expires": expiration,
}
}
return self.update_message_partial(message_id, updates, user_id)
def unpin_message(self, message_id: str, user_id: str) -> StreamResponse:
updates = {
"set": {
"pinned": False,
}
}
return self.update_message_partial(message_id, updates, user_id)
def update_message(self, message: Dict) -> StreamResponse:
if message.get("id") is None:
raise ValueError("message must have an id")
return self.post(f"messages/{message['id']}", data={"message": message})
def update_message_partial(
self, message_id: str, updates: Dict, user_id: str, **options: Any
) -> StreamResponse:
data = updates.copy()
if user_id:
data["user"] = {"id": user_id}
data.update(options)
return self.put(f"messages/{message_id}", data=data)
def delete_message(self, message_id: str, **options: Any) -> StreamResponse:
return self.delete(f"messages/{message_id}", options)
def get_message(self, message_id: str, **options: Any) -> StreamResponse:
return self.get(f"messages/{message_id}", options)
def query_users(
self, filter_conditions: Dict, sort: List[Dict] = None, **options: Any
) -> StreamResponse:
params: Dict = options.copy()
params.update(
{"filter_conditions": filter_conditions, "sort": self.normalize_sort(sort)}
)
return self.get("users", params={"payload": json.dumps(params)})
def query_channels(
self, filter_conditions: Dict, sort: List[Dict] = None, **options: Any
) -> StreamResponse:
params: Dict[str, Any] = {"state": True, "watch": False, "presence": False}
params.update(options)
params.update(
{"filter_conditions": filter_conditions, "sort": self.normalize_sort(sort)}
)
return self.post("channels", data=params)
def create_channel_type(self, data: Dict) -> StreamResponse:
if "commands" not in data or not data["commands"]:
data["commands"] = ["all"]
return self.post("channeltypes", data=data)
def get_channel_type(self, channel_type: str) -> StreamResponse:
return self.get(f"channeltypes/{channel_type}")
def list_channel_types(self) -> StreamResponse:
return self.get("channeltypes")
def update_channel_type(self, channel_type: str, **settings: Any) -> StreamResponse:
return self.put(f"channeltypes/{channel_type}", data=settings)
def delete_channel_type(self, channel_type: str) -> StreamResponse:
return self.delete(f"channeltypes/{channel_type}")
def channel( # type: ignore
self, channel_type: str, channel_id: str = None, data: Dict = None
) -> Channel:
return Channel(self, channel_type, channel_id, data)
def delete_channels(self, cids: Iterable[str], **options: Any) -> StreamResponse:
return self.post("channels/delete", data=dict(options, cids=cids))
def list_commands(self) -> StreamResponse:
return self.get("commands")
def create_command(self, data: Dict) -> StreamResponse:
return self.post("commands", data=data)
def delete_command(self, name: str) -> StreamResponse:
return self.delete(f"commands/{name}")
def get_command(self, name: str) -> StreamResponse:
return self.get(f"commands/{name}")
def update_command(self, name: str, **settings: Any) -> StreamResponse:
return self.put(f"commands/{name}", data=settings)
def add_device(
self,
device_id: str,
push_provider: str,
user_id: str,
push_provider_name: str = None,
) -> StreamResponse:
return self.post(
"devices",
data={
"id": device_id,
"push_provider": push_provider,
"user_id": user_id,
"push_provider_name": push_provider_name,
},
)
def delete_device(self, device_id: str, user_id: str) -> StreamResponse:
return self.delete("devices", {"id": device_id, "user_id": user_id})
def get_devices(self, user_id: str) -> StreamResponse:
return self.get("devices", {"user_id": user_id})
def get_rate_limits(
self,
server_side: bool = False,
android: bool = False,
ios: bool = False,
web: bool = False,
endpoints: Iterable[str] = None,
) -> StreamResponse:
params: Dict[str, Any] = {}
if server_side:
params["server_side"] = "true"
if android:
params["android"] = "true"
if ios:
params["ios"] = "true"
if web:
params["web"] = "true"
if endpoints:
params["endpoints"] = ",".join(endpoints)
return self.get("rate_limits", params)
def search(
self,
filter_conditions: Dict,
query: Union[str, Dict],
sort: List[Dict] = None,
**options: Any,
) -> StreamResponse:
if "offset" in options:
if sort or "next" in options:
raise ValueError("cannot use offset with sort or next parameters")
params = self.create_search_params(filter_conditions, query, sort, **options)
return self.get("search", params={"payload": json.dumps(params)})
def send_file(
self, uri: str, url: str, name: str, user: Dict, content_type: str = None
) -> StreamResponse:
headers = {
"Authorization": self.auth_token,
"stream-auth-type": "jwt",
"X-Stream-Client": get_user_agent(),
}
parts = urlparse(url)
if parts[0] == "":
with open(url, "rb") as f:
content = f.read()
else:
content = urlopen(
Request(url, headers={"User-Agent": "Mozilla/5.0"})
).read()
response = requests.post(
f"{self.base_url}/{uri}",
params=self.get_default_params(),
data={"user": json.dumps(user)},
files={"file": (name, content, content_type)}, # type: ignore
headers=headers,
)
return self._parse_response(response)
def create_blocklist(
self, name: str, words: Iterable[str], type: str = "word"
) -> StreamResponse:
return self.post(
"blocklists", data={"name": name, "words": words, "type": type}
)
def list_blocklists(self) -> StreamResponse:
return self.get("blocklists")
def get_blocklist(self, name: str) -> StreamResponse:
return self.get(f"blocklists/{name}")
def update_blocklist(self, name: str, words: Iterable[str]) -> StreamResponse:
return self.put(f"blocklists/{name}", data={"words": words})
def delete_blocklist(self, name: str) -> StreamResponse:
return self.delete(f"blocklists/{name}")
def check_push(self, push_data: Dict) -> StreamResponse:
return self.post("check_push", data=push_data)
def check_sqs(
self, sqs_key: str = None, sqs_secret: str = None, sqs_url: str = None
) -> StreamResponse:
data = {"sqs_key": sqs_key, "sqs_secret": sqs_secret, "sqs_url": sqs_url}
return self.post("check_sqs", data=data)
def check_sns(
self, sns_key: str = None, sns_secret: str = None, sns_topic_arn: str = None
) -> StreamResponse:
data = {
"sns_key": sns_key,
"sns_secret": sns_secret,
"sns_topic_arn": sns_topic_arn,
}
return self.post("check_sns", data=data)
def get_permission(self, id: str) -> StreamResponse:
return self.get(f"permissions/{id}")
def create_permission(self, permission: Dict) -> StreamResponse:
return self.post("permissions", data=permission)
def update_permission(self, id: str, permission: Dict) -> StreamResponse:
return self.put(f"permissions/{id}", data=permission)
def delete_permission(self, id: str) -> StreamResponse:
return self.delete(f"permissions/{id}")
def list_permissions(self) -> StreamResponse:
return self.get("permissions")
def create_role(self, name: str) -> StreamResponse:
return self.post("roles", data={"name": name})
def delete_role(self, name: str) -> StreamResponse:
return self.delete(f"roles/{name}")
def list_roles(self) -> StreamResponse:
return self.get("roles")
def segment( # type: ignore
self,
segment_type: SegmentType,
segment_id: Optional[str] = None,
data: Optional[SegmentData] = None,
) -> Segment:
return Segment(
client=self, segment_type=segment_type, segment_id=segment_id, data=data
)
def create_segment(
self,
segment_type: SegmentType,
segment_id: Optional[str] = None,
data: Optional[SegmentData] = None,
) -> StreamResponse:
payload = {"type": segment_type.value}
if segment_id is not None:
payload["id"] = segment_id
if data is not None:
payload.update(cast(dict, data))
return self.post("segments", data=payload)
def get_segment(self, segment_id: str) -> StreamResponse:
return self.get(f"segments/{segment_id}")
def query_segments(
self,
filter_conditions: Optional[Dict] = None,
sort: Optional[List[SortParam]] = None,
options: Optional[QuerySegmentsOptions] = None,
) -> StreamResponse:
payload = {}
if filter_conditions is not None:
payload["filter"] = filter_conditions
if sort is not None:
payload["sort"] = sort # type: ignore
if options is not None:
payload.update(cast(dict, options))
return self.post("segments/query", data=payload)
def update_segment(
self, segment_id: str, data: SegmentUpdatableFields
) -> StreamResponse:
return self.put(f"segments/{segment_id}", data=data)
def delete_segment(self, segment_id: str) -> StreamResponse:
return self.delete(f"segments/{segment_id}")
def segment_target_exists(self, segment_id: str, target_id: str) -> StreamResponse:
return self.get(f"segments/{segment_id}/target/{target_id}")
def add_segment_targets(
self, segment_id: str, target_ids: List[str]
) -> StreamResponse:
return self.post(
f"segments/{segment_id}/addtargets", data={"target_ids": target_ids}
)
def query_segment_targets(
self,
segment_id: str,
filter_conditions: Optional[Dict[str, Any]] = None,
sort: Optional[List[SortParam]] = None,
options: Optional[QuerySegmentTargetsOptions] = None,
) -> StreamResponse:
payload: Dict[str, Union[Dict[str, Any], List[SortParam]]] = {}
if filter_conditions is not None:
payload["filter"] = filter_conditions
if sort is not None:
payload["sort"] = sort
if options is not None:
payload.update(cast(dict, options))
return self.post(f"segments/{segment_id}/targets/query", data=payload)
def remove_segment_targets(
self, segment_id: str, target_ids: List[str]
) -> StreamResponse:
return self.post(
f"segments/{segment_id}/deletetargets", data={"target_ids": target_ids}
)
def campaign( # type: ignore
self, campaign_id: Optional[str] = None, data: Optional[CampaignData] = None
) -> Campaign:
return Campaign(client=self, campaign_id=campaign_id, data=data)
def create_campaign(
self, campaign_id: Optional[str] = None, data: CampaignData = None
) -> StreamResponse:
payload = {"id": campaign_id}
if data is not None:
payload.update(cast(dict, data))
return self.post("campaigns", data=payload)
def get_campaign(self, campaign_id: str) -> StreamResponse:
return self.get(f"campaigns/{campaign_id}")
def query_campaigns(
self,
filter_conditions: Optional[Dict[str, Any]] = None,
sort: Optional[List[SortParam]] = None,
options: Optional[QueryCampaignsOptions] = None,
) -> StreamResponse:
payload = {}
if filter_conditions is not None:
payload["filter"] = filter_conditions
if sort is not None:
payload["sort"] = sort # type: ignore
if options is not None:
payload.update(cast(dict, options))
return self.post("campaigns/query", data=payload)
def update_campaign(self, campaign_id: str, data: CampaignData) -> StreamResponse:
return self.put(f"campaigns/{campaign_id}", data=data)
def delete_campaign(self, campaign_id: str, **options: Any) -> StreamResponse:
return self.delete(f"campaigns/{campaign_id}", options)
def start_campaign(
self,
campaign_id: str,
scheduled_for: Optional[Union[str, datetime.datetime]] = None,
stop_at: Optional[Union[str, datetime.datetime]] = None,
) -> StreamResponse:
payload = {}
if scheduled_for is not None:
if isinstance(scheduled_for, datetime.datetime):
scheduled_for = scheduled_for.isoformat()
payload["scheduled_for"] = scheduled_for
if stop_at is not None:
if isinstance(stop_at, datetime.datetime):
stop_at = stop_at.isoformat()
payload["stop_at"] = stop_at
return self.post(f"campaigns/{campaign_id}/start", data=payload)
def stop_campaign(self, campaign_id: str) -> StreamResponse:
return self.post(f"campaigns/{campaign_id}/stop")
def test_campaign(self, campaign_id: str, users: Iterable[str]) -> StreamResponse:
return self.post(f"campaigns/{campaign_id}/test", data={"users": users})
def revoke_tokens(self, since: Union[str, datetime.datetime]) -> StreamResponse:
if isinstance(since, datetime.datetime):
since = since.isoformat()
return self.update_app_settings(revoke_tokens_issued_before=since)
def revoke_user_token(
self, user_id: str, before: Union[str, datetime.datetime]
) -> StreamResponse:
return self.revoke_users_token([user_id], before)
def revoke_users_token(
self, user_ids: Iterable[str], before: Union[str, datetime.datetime]
) -> StreamResponse:
if isinstance(before, datetime.datetime):
before = before.isoformat()
updates = []
for user_id in user_ids:
updates.append(
{"id": user_id, "set": {"revoke_tokens_issued_before": before}}
)
return self.update_users_partial(updates)
def export_channel(
self,
channel_type: str,
channel_id: str,
messages_since: Union[str, datetime.datetime] = None,
messages_until: Union[str, datetime.datetime] = None,
**options: Any,
) -> StreamResponse:
if isinstance(messages_since, datetime.datetime):
messages_since = messages_since.isoformat()
if isinstance(messages_until, datetime.datetime):
messages_until = messages_until.isoformat()
return self.export_channels(
[
{
"id": channel_id,
"type": channel_type,
"messages_since": messages_since,
"messages_until": messages_until,
}
],
**options,
)
def export_channels(
self, channels: Iterable[Dict], **options: Any
) -> StreamResponse:
return self.post("export_channels", data={"channels": channels, **options})
def get_export_channel_status(self, task_id: str) -> StreamResponse:
return self.get(f"export_channels/{task_id}")
def get_task(self, task_id: str) -> StreamResponse:
return self.get(f"tasks/{task_id}")
def send_user_custom_event(self, user_id: str, event: Dict) -> StreamResponse:
return self.post(f"users/{user_id}/event", data={"event": event})
def upsert_push_provider(self, push_provider_config: Dict) -> StreamResponse:
return self.post("push_providers", data={"push_provider": push_provider_config})
def delete_push_provider(self, provider_type: str, name: str) -> StreamResponse:
return self.delete(f"push_providers/{provider_type}/{name}")
def list_push_providers(self) -> StreamResponse:
return self.get("push_providers")
def create_import_url(self, filename: str) -> StreamResponse:
return self.post("import_urls", data={"filename": filename})
def create_import(
self, path: str, mode: Literal["insert", "upsert"] = "upsert"
) -> StreamResponse:
return self.post("imports", data={"path": path, "mode": mode})
def get_import(self, id: str) -> StreamResponse:
return self.get(f"imports/{id}")
def list_imports(self, options: Dict = None) -> StreamResponse:
return self.get("imports", params=options)
def unread_counts(self, user_id: str) -> StreamResponse:
return self.get("unread", params={"user_id": user_id})
def unread_counts_batch(self, user_ids: List[str]) -> StreamResponse:
return self.post("unread_batch", data={"user_ids": user_ids})
| (api_key: str, api_secret: str, timeout: float = 6.0, **options: Any) |
727,507 | stream_chat.client | __init__ | null | def __init__(
self, api_key: str, api_secret: str, timeout: float = 6.0, **options: Any
):
super().__init__(
api_key=api_key, api_secret=api_secret, timeout=timeout, **options
)
self.session = requests.Session()
self.session.mount("http://", requests.adapters.HTTPAdapter(max_retries=1))
self.session.mount("https://", requests.adapters.HTTPAdapter(max_retries=1))
| (self, api_key: str, api_secret: str, timeout: float = 6.0, **options: Any) |
727,508 | stream_chat.client | _make_request | null | def _make_request(
self,
method: Callable[..., requests.Response],
relative_url: str,
params: Dict = None,
data: Any = None,
) -> StreamResponse:
params = params or {}
data = data or {}
serialized = None
default_params = self.get_default_params()
default_params.update(params)
headers = get_default_header()
headers["Authorization"] = self.auth_token
headers["stream-auth-type"] = "jwt"
url = f"{self.base_url}/{relative_url}"
if method.__name__ in ["post", "put", "patch"]:
serialized = json.dumps(data)
response = method(
url,
data=serialized,
headers=headers,
params=default_params,
timeout=self.timeout,
)
return self._parse_response(response)
| (self, method: Callable[..., requests.models.Response], relative_url: str, params: Optional[Dict] = None, data: Optional[Any] = None) -> stream_chat.types.stream_response.StreamResponse |
727,509 | stream_chat.client | _parse_response | null | def _parse_response(self, response: requests.Response) -> StreamResponse:
try:
parsed_result = json.loads(response.text) if response.text else {}
except ValueError:
raise StreamAPIException(response.text, response.status_code)
if response.status_code >= 399:
raise StreamAPIException(response.text, response.status_code)
return StreamResponse(
parsed_result, dict(response.headers), response.status_code
)
| (self, response: requests.models.Response) -> stream_chat.types.stream_response.StreamResponse |
727,510 | stream_chat.client | _query_flag_reports |
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
| def _query_flag_reports(self, **options: Any) -> StreamResponse:
"""
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
"""
data = {"filter_conditions": options}
return self.post("moderation/reports", data=data)
| (self, **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,511 | stream_chat.client | _review_flag_report |
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
| def _review_flag_report(
self, report_id: str, review_result: str, user_id: str, **details: Any
) -> StreamResponse:
"""
Note: Do not use this.
It is present for internal usage only.
This function can, and will, break and/or be removed at any point in time.
"""
data = {
"review_result": review_result,
"user_id": user_id,
"review_details": details,
}
return self.patch(f"moderation/reports/{report_id}", data=data)
| (self, report_id: str, review_result: str, user_id: str, **details: Any) -> stream_chat.types.stream_response.StreamResponse |
727,512 | stream_chat.client | add_device | null | def add_device(
self,
device_id: str,
push_provider: str,
user_id: str,
push_provider_name: str = None,
) -> StreamResponse:
return self.post(
"devices",
data={
"id": device_id,
"push_provider": push_provider,
"user_id": user_id,
"push_provider_name": push_provider_name,
},
)
| (self, device_id: str, push_provider: str, user_id: str, push_provider_name: Optional[str] = None) -> stream_chat.types.stream_response.StreamResponse |
727,513 | stream_chat.client | add_segment_targets | null | def add_segment_targets(
self, segment_id: str, target_ids: List[str]
) -> StreamResponse:
return self.post(
f"segments/{segment_id}/addtargets", data={"target_ids": target_ids}
)
| (self, segment_id: str, target_ids: List[str]) -> stream_chat.types.stream_response.StreamResponse |
727,514 | stream_chat.client | ban_user | null | def ban_user(self, target_id: str, **options: Any) -> StreamResponse:
data = {"target_user_id": target_id, **options}
return self.post("moderation/ban", data=data)
| (self, target_id: str, **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,515 | stream_chat.client | campaign | null | def campaign( # type: ignore
self, campaign_id: Optional[str] = None, data: Optional[CampaignData] = None
) -> Campaign:
return Campaign(client=self, campaign_id=campaign_id, data=data)
| (self, campaign_id: Optional[str] = None, data: Optional[stream_chat.types.campaign.CampaignData] = None) -> stream_chat.campaign.Campaign |
727,516 | stream_chat.client | channel | null | def channel( # type: ignore
self, channel_type: str, channel_id: str = None, data: Dict = None
) -> Channel:
return Channel(self, channel_type, channel_id, data)
| (self, channel_type: str, channel_id: Optional[str] = None, data: Optional[Dict] = None) -> stream_chat.channel.Channel |
727,517 | stream_chat.client | check_push | null | def check_push(self, push_data: Dict) -> StreamResponse:
return self.post("check_push", data=push_data)
| (self, push_data: Dict) -> stream_chat.types.stream_response.StreamResponse |
727,518 | stream_chat.client | check_sns | null | def check_sns(
self, sns_key: str = None, sns_secret: str = None, sns_topic_arn: str = None
) -> StreamResponse:
data = {
"sns_key": sns_key,
"sns_secret": sns_secret,
"sns_topic_arn": sns_topic_arn,
}
return self.post("check_sns", data=data)
| (self, sns_key: Optional[str] = None, sns_secret: Optional[str] = None, sns_topic_arn: Optional[str] = None) -> stream_chat.types.stream_response.StreamResponse |
727,519 | stream_chat.client | check_sqs | null | def check_sqs(
self, sqs_key: str = None, sqs_secret: str = None, sqs_url: str = None
) -> StreamResponse:
data = {"sqs_key": sqs_key, "sqs_secret": sqs_secret, "sqs_url": sqs_url}
return self.post("check_sqs", data=data)
| (self, sqs_key: Optional[str] = None, sqs_secret: Optional[str] = None, sqs_url: Optional[str] = None) -> stream_chat.types.stream_response.StreamResponse |
727,520 | stream_chat.client | commit_message | null | def commit_message(self, message_id: str) -> StreamResponse:
return self.post(f"messages/{message_id}/commit")
| (self, message_id: str) -> stream_chat.types.stream_response.StreamResponse |
727,521 | stream_chat.client | create_blocklist | null | def create_blocklist(
self, name: str, words: Iterable[str], type: str = "word"
) -> StreamResponse:
return self.post(
"blocklists", data={"name": name, "words": words, "type": type}
)
| (self, name: str, words: Iterable[str], type: str = 'word') -> stream_chat.types.stream_response.StreamResponse |
727,522 | stream_chat.client | create_campaign | null | def create_campaign(
self, campaign_id: Optional[str] = None, data: CampaignData = None
) -> StreamResponse:
payload = {"id": campaign_id}
if data is not None:
payload.update(cast(dict, data))
return self.post("campaigns", data=payload)
| (self, campaign_id: Optional[str] = None, data: Optional[stream_chat.types.campaign.CampaignData] = None) -> stream_chat.types.stream_response.StreamResponse |
727,523 | stream_chat.client | create_channel_type | null | def create_channel_type(self, data: Dict) -> StreamResponse:
if "commands" not in data or not data["commands"]:
data["commands"] = ["all"]
return self.post("channeltypes", data=data)
| (self, data: Dict) -> stream_chat.types.stream_response.StreamResponse |
727,524 | stream_chat.client | create_command | null | def create_command(self, data: Dict) -> StreamResponse:
return self.post("commands", data=data)
| (self, data: Dict) -> stream_chat.types.stream_response.StreamResponse |
727,525 | stream_chat.client | create_import | null | def create_import(
self, path: str, mode: Literal["insert", "upsert"] = "upsert"
) -> StreamResponse:
return self.post("imports", data={"path": path, "mode": mode})
| (self, path: str, mode: Literal['insert', 'upsert'] = 'upsert') -> stream_chat.types.stream_response.StreamResponse |
727,526 | stream_chat.client | create_import_url | null | def create_import_url(self, filename: str) -> StreamResponse:
return self.post("import_urls", data={"filename": filename})
| (self, filename: str) -> stream_chat.types.stream_response.StreamResponse |
727,527 | stream_chat.client | create_permission | null | def create_permission(self, permission: Dict) -> StreamResponse:
return self.post("permissions", data=permission)
| (self, permission: Dict) -> stream_chat.types.stream_response.StreamResponse |
727,528 | stream_chat.client | create_role | null | def create_role(self, name: str) -> StreamResponse:
return self.post("roles", data={"name": name})
| (self, name: str) -> stream_chat.types.stream_response.StreamResponse |
727,529 | stream_chat.base.client | create_search_params | null | def create_search_params(
self,
filter_conditions: Dict,
query: Union[str, Dict],
sort: List[Dict] = None,
**options: Any,
) -> Dict[str, Any]:
params = options.copy()
if isinstance(query, str):
params.update({"query": query})
else:
params.update({"message_filter_conditions": query})
params.update({"filter_conditions": filter_conditions})
if sort:
params.update({"sort": self.normalize_sort(sort)})
return params
| (self, filter_conditions: Dict, query: Union[str, Dict], sort: Optional[List[Dict]] = None, **options: Any) -> Dict[str, Any] |
727,530 | stream_chat.client | create_segment | null | def create_segment(
self,
segment_type: SegmentType,
segment_id: Optional[str] = None,
data: Optional[SegmentData] = None,
) -> StreamResponse:
payload = {"type": segment_type.value}
if segment_id is not None:
payload["id"] = segment_id
if data is not None:
payload.update(cast(dict, data))
return self.post("segments", data=payload)
| (self, segment_type: stream_chat.types.segment.SegmentType, segment_id: Optional[str] = None, data: Optional[stream_chat.types.segment.SegmentData] = None) -> stream_chat.types.stream_response.StreamResponse |
727,531 | stream_chat.base.client | create_token |
Creates a JWT for a user.
Stream uses JWT (JSON Web Tokens) to authenticate chat users, enabling them to login.
Knowing whether a user is authorized to perform certain actions is managed
separately via a role based permissions system.
By default, user tokens are valid indefinitely. You can set an `exp`
or issued at (`iat`) claim as well.
| def create_token(
self, user_id: str, exp: int = None, iat: int = None, **claims: Any
) -> str:
"""
Creates a JWT for a user.
Stream uses JWT (JSON Web Tokens) to authenticate chat users, enabling them to login.
Knowing whether a user is authorized to perform certain actions is managed
separately via a role based permissions system.
By default, user tokens are valid indefinitely. You can set an `exp`
or issued at (`iat`) claim as well.
"""
payload: Dict[str, Any] = {**claims, "user_id": user_id}
if exp:
payload["exp"] = exp
if iat:
payload["iat"] = iat
return jwt.encode(payload, self.api_secret, algorithm="HS256")
| (self, user_id: str, exp: Optional[int] = None, iat: Optional[int] = None, **claims: Any) -> str |
727,532 | stream_chat.client | deactivate_user | null | def deactivate_user(self, user_id: str, **options: Any) -> StreamResponse:
return self.post(f"users/{user_id}/deactivate", data=options)
| (self, user_id: str, **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,533 | stream_chat.client | delete | null | def delete(self, relative_url: str, params: Dict = None) -> StreamResponse:
return self._make_request(self.session.delete, relative_url, params, None)
| (self, relative_url: str, params: Optional[Dict] = None) -> stream_chat.types.stream_response.StreamResponse |
727,534 | stream_chat.client | delete_blocklist | null | def delete_blocklist(self, name: str) -> StreamResponse:
return self.delete(f"blocklists/{name}")
| (self, name: str) -> stream_chat.types.stream_response.StreamResponse |
727,535 | stream_chat.client | delete_campaign | null | def delete_campaign(self, campaign_id: str, **options: Any) -> StreamResponse:
return self.delete(f"campaigns/{campaign_id}", options)
| (self, campaign_id: str, **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,536 | stream_chat.client | delete_channel_type | null | def delete_channel_type(self, channel_type: str) -> StreamResponse:
return self.delete(f"channeltypes/{channel_type}")
| (self, channel_type: str) -> stream_chat.types.stream_response.StreamResponse |
727,537 | stream_chat.client | delete_channels | null | def delete_channels(self, cids: Iterable[str], **options: Any) -> StreamResponse:
return self.post("channels/delete", data=dict(options, cids=cids))
| (self, cids: Iterable[str], **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,538 | stream_chat.client | delete_command | null | def delete_command(self, name: str) -> StreamResponse:
return self.delete(f"commands/{name}")
| (self, name: str) -> stream_chat.types.stream_response.StreamResponse |
727,539 | stream_chat.client | delete_device | null | def delete_device(self, device_id: str, user_id: str) -> StreamResponse:
return self.delete("devices", {"id": device_id, "user_id": user_id})
| (self, device_id: str, user_id: str) -> stream_chat.types.stream_response.StreamResponse |
727,540 | stream_chat.client | delete_message | null | def delete_message(self, message_id: str, **options: Any) -> StreamResponse:
return self.delete(f"messages/{message_id}", options)
| (self, message_id: str, **options: Any) -> stream_chat.types.stream_response.StreamResponse |
727,541 | stream_chat.client | delete_permission | null | def delete_permission(self, id: str) -> StreamResponse:
return self.delete(f"permissions/{id}")
| (self, id: str) -> stream_chat.types.stream_response.StreamResponse |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.