repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
840k
|
---|---|---|---|---|
achoraev/SoftUni | PythonBasics/ExamPreparation/FamilyTrip.py | 0cc7db470a096cc33bbe0ca6bd90060b79120573 | budget = float(input())
nights = int(input())
price_night = float(input())
percent_extra = int(input())
if nights > 7:
price_night = price_night - (price_night * 0.05)
sum = nights * price_night
total_sum = sum + (budget * percent_extra / 100)
if total_sum <= budget:
print(f"Ivanovi will be left with {(budget - total_sum):.2f} leva after vacation.")
else:
print(f"{(total_sum - budget):.2f} leva needed.") | [] |
csch0/SublimeText-TeX-Live-Package-Manager | tex_live_package_manager/progress.py | ab21bd49a945f611250613e9cb862a7703dc534f | import sublime, sublime_plugin
import threading
class ProcessQueueManager():
__shared = {}
items = []
thread = None
# Current item details
messages = None
function = None
callback = None
# Progress Bar preferences
i = 0
size = 8
add = 1
def __new__(cls, *args, **kwargs):
inst = object.__new__(cls)
inst.__dict__ = cls.__shared
return inst
def queue(self, unique_id, function, messages, callback):
print(unique_id, function, messages, callback)
self.items += [{"function": function, "messages": messages, "callback": callback}]
if not self.thread or not self.thread.is_alive():
sublime.set_timeout(lambda: self.run(), 100)
def run(self):
# If thread available and running
if self.thread and self.thread.is_alive():
# Recall run
self.progress()
sublime.set_timeout(lambda: self.run(), 100)
# Stop if thread available, not running and no item is available
elif self.thread and not self.thread.is_alive() and not self.items:
sublime.status_message(self.messages[1])
# Callback
sublime.set_timeout(self.callback, 0)
# Reset progress details
self.i = 0
self.callback = None
self.function = None
self.message = None
# If no thread availale or not running
elif not self.thread or not self.thread.is_alive():
# Check for callback of old item
if self.callback:
sublime.set_timeout(self.callback, 0)
self.callback = None
# Queue available
if self.items:
item = self.items.pop(0)
self.callback = item["callback"]
self.function = item["function"]
self.messages = item["messages"]
# Start thread for current item
self.thread = HelperThread(self.function)
self.thread.start()
# Call run to start updating progress
sublime.set_timeout(lambda: self.run(), 100)
def progress(self):
# Calculate items on the left size
before = self.i % self.size
after = self.size - (before + 1)
# Print the actual progress
sublime.status_message('%s [%s=%s]' % (self.messages[0], ' ' * before, ' ' * after))
# Invert increment if reached the end or start
if not after:
self.add = -1
elif not before:
self.add = 1
self.i += self.add
class HelperThread(threading.Thread):
def __init__(self, function):
self.function = function if isinstance(function, list) else [function]
threading.Thread.__init__(self)
def run(self):
for function in self.function:
function()
def ProgressFunction(function, messages, callback):
t = ThreadThread(function)
t.start()
Progress(t, messages[0], messages[1], callback) | [((84, 2, 84, 86), 'sublime.status_message', 'sublime.status_message', ({(84, 25, 84, 85): "('%s [%s=%s]' % (self.messages[0], ' ' * before, ' ' * after))"}, {}), "('%s [%s=%s]' % (self.messages[0], ' ' * before, ' ' *\n after))", False, 'import sublime, sublime_plugin\n'), ((99, 2, 99, 33), 'threading.Thread.__init__', 'threading.Thread.__init__', ({(99, 28, 99, 32): 'self'}, {}), '(self)', False, 'import threading\n'), ((44, 4, 44, 44), 'sublime.status_message', 'sublime.status_message', ({(44, 27, 44, 43): 'self.messages[1]'}, {}), '(self.messages[1])', False, 'import sublime, sublime_plugin\n'), ((47, 4, 47, 41), 'sublime.set_timeout', 'sublime.set_timeout', ({(47, 24, 47, 37): 'self.callback', (47, 39, 47, 40): '(0)'}, {}), '(self.callback, 0)', False, 'import sublime, sublime_plugin\n'), ((60, 4, 60, 41), 'sublime.set_timeout', 'sublime.set_timeout', ({(60, 24, 60, 37): 'self.callback', (60, 39, 60, 40): '(0)'}, {}), '(self.callback, 0)', False, 'import sublime, sublime_plugin\n')] |
rscprof/moscow_routes_parser | moscow_routes_parser/t_mos_ru.py | 692627dd43d62f70e3e12a761897571c79a022a0 | import html
import json
import logging
import re
from abc import abstractmethod
from datetime import datetime, time
from typing import Optional
import requests
from moscow_routes_parser.model import Route, Timetable, Equipment, Timetable_builder
from moscow_routes_parser.model_impl import Timetable_builder_t_mos_ru
class parser_timetable:
""""Interface for parser"""
@abstractmethod
def parse(self, text: str) -> Timetable_builder:
pass
class parser_timetable_t_mos_ru(parser_timetable):
""""Parser for timetable from t.mos.ru implementation"""
def __init__(self, builder: Timetable_builder):
""""Initialize parser
:param builder: Builder for Timetable for route
"""
self.builder = lambda: builder
def parse(self, text: str) -> Timetable_builder:
"""Parse text from https://transport.mos.ru/ru/ajax/App/ScheduleController/getRoute (for format using
2022-Jan-11)
Since 12.01.2022 t.mos.ru drop data-services from results
Since 13.03.2022 added flag_has_another_direction
@param text: text for parse
@return Timetable for route
"""
result_stops = type(self.builder())()
# stops = re.finditer(r'data-stop="([^"]*?)".*?data-services="([^"]*?)".*?d-inline.*?>(.*?)<(.*?)</li>', text,
# re.M + re.S
# )
stops = re.finditer(r'data-stop="(.*?)".*?d-inline.*?>(.*?)<(.*?)</li>', text,
re.M + re.S
)
data_coords_iter = re.finditer(r'data-coords="(.*?)"', text,
re.M + re.S
)
data_coords_list = list(data_coords_iter)
if re.search(r'ic-change-a-b', text, re.M + re.S) is None:
result_stops.set_has_another_direction(False)
else:
result_stops.set_has_another_direction(True)
# если есть расписание
if len(data_coords_list) > 0:
data_coords = data_coords_list[0].group(1)
data_coords = html.unescape(data_coords)
data_coords = json.loads(data_coords)['features']
data_coords = iter(map(lambda feature: feature['geometry']['coordinates'], data_coords))
else:
data_coords = []
for stop in stops:
name_stop = stop.group(2)
coords_stop = next(data_coords)
description = stop.group(3)
logger = logging.getLogger(__name__)
logger.info(name_stop)
hours = re.finditer(r'dt1.*?(\d\d):(.*?)</div>\s*</div>\s*</div>', description, re.M + re.S)
timetable_stop = result_stops.add_stop()
timetable_stop.set_name(name_stop)
timetable_stop.set_coords(coords_stop)
log_timetable = ""
for hour in hours:
num_hour = int(hour.group(1))
minutes_text = hour.group(2)
log_timetable += str(num_hour) + ": "
minutes = re.finditer(r'div10([^>]*)>\s*(\d\d)', minutes_text, re.M + re.S)
for minute in minutes:
num_minute = int(minute.group(2))
color_start = minute.group(1).find('color: ')
if color_start >= 0:
quote = minute.group(1).find('"', color_start)
min_color = minute.group(1)[color_start + 7:quote]
else:
min_color = None
if not (min_color is None):
log_timetable += "{}{}".format(num_minute, min_color) + " "
pass
else:
log_timetable += str(num_minute) + " "
pass
time_flight = time(num_hour, num_minute)
timetable_stop.add_item_timetable(time_flight, min_color)
logger.info(log_timetable)
return result_stops
class Parser_routes:
@abstractmethod
def parse(self, text: str) -> [Route]:
pass
class Parser_routes_t_mos_ru(Parser_routes):
def __init__(self):
self.count = None
def parse(self, text: str) -> [Route]:
""""Parses route info from transport.mos.ru (name, id, type)
:param text: text for parsing from t.mos.ru
:return list of Route
"""
count_result = re.finditer(r'data-count-pages="(\d+)"', text, re.M + re.S)
self.count = int(list(count_result)[0].group(1))
result = re.finditer(r'<a.*?href=.*?route/(.+?)".*?<div.*?ic[ ]([a-z-]+).*?</i>\s*(\S+?)\s*</div>', text,
re.M + re.S)
list_routes = []
for route in result:
num = route.group(1)
type_route = route.group(2)
if type_route.find('-bus') >= 0:
type_route = Equipment.bus()
elif type_route.find('tramway') >= 0:
type_route = Equipment.tramway()
elif type_route.find('trolleybus') >= 0:
type_route = Equipment.trolleybus()
else:
logging.getLogger(__name__).error("Unknown type route: {}".format(type_route))
type_route = None
name = route.group(3)
list_routes.append(Route(num, type_route, name))
return list_routes
def get_route(date: datetime.date, id_route_t_mos_ru: str, direction: int,
get_route_url: str = 'https://transport.mos.ru/ru/ajax/App/ScheduleController/getRoute',
parser: parser_timetable = parser_timetable_t_mos_ru(builder=Timetable_builder_t_mos_ru())
) -> Timetable:
"""Get timetable for route by date and direction
:param date: date of timetable for route
:param id_route_t_mos_ru: id of route from t.mos.ru
:param direction: direction for route (0 or 1)
:param get_route_url URL for requesting timetable
:param parser for timetable
:return timetable for route by date and direction
"""
logger = logging.getLogger(__name__)
try:
# strange problem with SSL Cert in package
response = requests.get(get_route_url,
params={
'mgt_schedule[isNight]': '',
'mgt_schedule[date]': date.strftime("%d.%m.%Y"),
'mgt_schedule[route]': id_route_t_mos_ru,
'mgt_schedule[direction]': direction,
},
headers={'X-Requested-With': 'XMLHttpRequest'}
)
if response.status_code == 200:
logger.info("Get route #{}".format(id_route_t_mos_ru))
route_info = parser.parse(response.text)
else:
logger.error("Error status: {}".format(response.status_code))
route_info = None
except requests.exceptions.RequestException as e:
logger.error("Error " + str(e))
route_info = None
if not (route_info is None):
result = route_info.set_id_route_t_mos_ru(id_route_t_mos_ru).set_direction(direction).set_date(date).build()
if len(result.get_stops()) == 0: # Error of loading timetable without exceptions
result = None
else:
result = None
return result
def get_list_routes(work_time: int, direction: int,
parser: Parser_routes = None,
get_routes_url: str = 'https://transport.mos.ru/ru/ajax/App/ScheduleController/getRoutesList'
) -> Optional[list[Route]]:
"""get list routes by work_time and direction from transport.mos.ru
:param parser: function to parse got string
:param get_routes_url: url for requesting routes
:param work_time: work day or not (1 or 0)
:param direction: 0
:return list of Route
"""
if parser is None:
parser = Parser_routes_t_mos_ru()
page = 1
result_routes = []
finish = False
count = None
logger = logging.getLogger(__name__)
while not finish:
finish = False
repeat = True
while repeat:
repeat = False
try:
# strange problem with SSL Cert in package
response = requests.get(get_routes_url,
params={
'mgt_schedule[search]': '',
'mgt_schedule[isNight]': '',
# 'mgt_schedule[filters]': '',
'mgt_schedule[work_time]': work_time,
'page': page,
'mgt_schedule[direction]': direction,
}
, headers={'X-Requested-With': 'XMLHttpRequest'}
# , headers={'Cookie': "_ym_d=1637468102; _ym_uid=1637468102592825648; mos_id=rBEAAmGaFNawBwAOHRgWAgA=; _ga=GA1.2.1733238845.1637487830; uxs_uid=147e2110-500d-11ec-a7cb-8bb8b12c3186; KFP_DID=ee285837-cd1f-0a9b-c8a2-9cef6a4ee333; _ym_isad=2; _ym_visorc=w"}
)
if response.status_code == 200:
logger.info("Get page #{}".format(page))
routes = parser.parse(response.text)
result_routes += routes
if count is None:
count = parser.count
if not routes:
finish = True
else:
logger.error("Error status: {}".format(response.status_code))
finish = True
page = page + 1
if page > count:
finish = True
except requests.exceptions.RequestException as e:
logger.error("Error " + str(e))
repeat = True
return result_routes
| [((155, 13, 155, 40), 'logging.getLogger', 'logging.getLogger', ({(155, 31, 155, 39): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((203, 13, 203, 40), 'logging.getLogger', 'logging.getLogger', ({(203, 31, 203, 39): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((46, 16, 48, 29), 're.finditer', 're.finditer', ({(46, 28, 46, 79): '"""data-stop="(.*?)".*?d-inline.*?>(.*?)<(.*?)</li>"""', (46, 81, 46, 85): 'text', (47, 28, 47, 39): 're.M + re.S'}, {}), '(\'data-stop="(.*?)".*?d-inline.*?>(.*?)<(.*?)</li>\', text, re.M +\n re.S)', False, 'import re\n'), ((49, 27, 51, 40), 're.finditer', 're.finditer', ({(49, 39, 49, 61): '"""data-coords="(.*?)\\""""', (49, 63, 49, 67): 'text', (50, 39, 50, 50): 're.M + re.S'}, {}), '(\'data-coords="(.*?)"\', text, re.M + re.S)', False, 'import re\n'), ((119, 23, 119, 82), 're.finditer', 're.finditer', ({(119, 35, 119, 62): '"""data-count-pages="(\\\\d+)\\""""', (119, 64, 119, 68): 'text', (119, 70, 119, 81): 're.M + re.S'}, {}), '(\'data-count-pages="(\\\\d+)"\', text, re.M + re.S)', False, 'import re\n'), ((122, 17, 123, 41), 're.finditer', 're.finditer', ({(122, 29, 122, 106): '"""<a.*?href=.*?route/(.+?)".*?<div.*?ic[ ]([a-z-]+).*?</i>\\\\s*(\\\\S+?)\\\\s*</div>"""', (122, 108, 122, 112): 'text', (123, 29, 123, 40): 're.M + re.S'}, {}), '(\n \'<a.*?href=.*?route/(.+?)".*?<div.*?ic[ ]([a-z-]+).*?</i>\\\\s*(\\\\S+?)\\\\s*</div>\'\n , text, re.M + re.S)', False, 'import re\n'), ((53, 11, 53, 57), 're.search', 're.search', ({(53, 21, 53, 37): '"""ic-change-a-b"""', (53, 39, 53, 43): 'text', (53, 45, 53, 56): '(re.M + re.S)'}, {}), "('ic-change-a-b', text, re.M + re.S)", False, 'import re\n'), ((60, 26, 60, 52), 'html.unescape', 'html.unescape', ({(60, 40, 60, 51): 'data_coords'}, {}), '(data_coords)', False, 'import html\n'), ((70, 21, 70, 48), 'logging.getLogger', 'logging.getLogger', ({(70, 39, 70, 47): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((72, 20, 72, 104), 're.finditer', 're.finditer', ({(72, 32, 72, 77): '"""dt1.*?(\\\\d\\\\d):(.*?)</div>\\\\s*</div>\\\\s*</div>"""', (72, 79, 72, 90): 'description', (72, 92, 72, 103): 're.M + re.S'}, {}), "('dt1.*?(\\\\d\\\\d):(.*?)</div>\\\\s*</div>\\\\s*</div>', description, \n re.M + re.S)", False, 'import re\n'), ((145, 75, 145, 103), 'moscow_routes_parser.model_impl.Timetable_builder_t_mos_ru', 'Timetable_builder_t_mos_ru', ({}, {}), '()', False, 'from moscow_routes_parser.model_impl import Timetable_builder_t_mos_ru\n'), ((61, 26, 61, 49), 'json.loads', 'json.loads', ({(61, 37, 61, 48): 'data_coords'}, {}), '(data_coords)', False, 'import json\n'), ((81, 26, 81, 91), 're.finditer', 're.finditer', ({(81, 38, 81, 63): '"""div10([^>]*)>\\\\s*(\\\\d\\\\d)"""', (81, 65, 81, 77): 'minutes_text', (81, 79, 81, 90): 're.M + re.S'}, {}), "('div10([^>]*)>\\\\s*(\\\\d\\\\d)', minutes_text, re.M + re.S)", False, 'import re\n'), ((130, 29, 130, 44), 'moscow_routes_parser.model.Equipment.bus', 'Equipment.bus', ({}, {}), '()', False, 'from moscow_routes_parser.model import Route, Timetable, Equipment, Timetable_builder\n'), ((139, 31, 139, 59), 'moscow_routes_parser.model.Route', 'Route', ({(139, 37, 139, 40): 'num', (139, 42, 139, 52): 'type_route', (139, 54, 139, 58): 'name'}, {}), '(num, type_route, name)', False, 'from moscow_routes_parser.model import Route, Timetable, Equipment, Timetable_builder\n'), ((211, 27, 222, 41), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((96, 34, 96, 60), 'datetime.time', 'time', ({(96, 39, 96, 47): 'num_hour', (96, 49, 96, 59): 'num_minute'}, {}), '(num_hour, num_minute)', False, 'from datetime import datetime, time\n'), ((132, 29, 132, 48), 'moscow_routes_parser.model.Equipment.tramway', 'Equipment.tramway', ({}, {}), '()', False, 'from moscow_routes_parser.model import Route, Timetable, Equipment, Timetable_builder\n'), ((134, 29, 134, 51), 'moscow_routes_parser.model.Equipment.trolleybus', 'Equipment.trolleybus', ({}, {}), '()', False, 'from moscow_routes_parser.model import Route, Timetable, Equipment, Timetable_builder\n'), ((136, 16, 136, 43), 'logging.getLogger', 'logging.getLogger', ({(136, 34, 136, 42): '__name__'}, {}), '(__name__)', False, 'import logging\n')] |
spudmind/spud | web/api/get_summary_data.py | 86e44bca4efd3cd6358467e1511048698a45edbc | from web.api import BaseAPI
from utils import mongo
import json
class DataApi(BaseAPI):
def __init__(self):
BaseAPI.__init__(self)
self._db = mongo.MongoInterface()
self.query = {}
self.fields = {
"donation_count": "$influences.electoral_commission.donation_count",
"donor_count": '$influences.electoral_commission.donor_count',
"donation_total_int": "$influences.electoral_commission.donation_total_int",
"mp_interest_relationships": "$influences.register_of_interests.relationship_count",
"lord_interest_relationships": "$influences.register_of_interests.interest_relationships",
"remuneration_count": "$influences.register_of_interests.remuneration_count",
"remuneration_total_int": "$influences.register_of_interests.remuneration_total_int",
"lobbyists_hired": "$influences.lobby_registers.lobbyist_hired"
}
def request(self, **args):
node_type = args.get("type")
category = args.get("category")
field = args.get("field")
summary = {
"influencers": self._influencers_aggregate(category, field),
#"lobby_agencies": self._influencers_aggregate(),
"political_parties": self._party_aggregate(category, field),
"mps": self._mp_aggregate(category, field),
"lords": self._lord_aggregate(category, field)
}
return {"children": summary[node_type][category]}
def _influencers_aggregate(self, category, field):
_db_table = 'api_influencers'
response = {}
if category == "electoral_commission":
# get electoral commission data
ec_fields = ["donation_total_int", "donation_count"]
top_total, top_count = self._get_top(_db_table, ec_fields)
ec = {
"donation_total": self._format_top(top_total, "influencer"),
"donation_count": self._format_top(top_count, "influencer", monetary=False)
}
response["electoral_commission"] = ec[field]
if category == "register_of_interests":
# get register of interests data
reg_fields = [
"remuneration_total_int",
"mp_interest_relationships",
"remuneration_count"
]
top_total, top_relationships, top_count = self._get_top(_db_table, reg_fields)
reg = {
"remuneration_total": self._format_top(top_total, "influencer"),
"interest_relationships": self._format_top(
top_relationships, "influencer", monetary=False
),
"remuneration_count": self._format_top(
top_count, "influencer", monetary=False
)
}
response["register_of_interests"] = reg[field]
return response
def _party_aggregate(self, category, field):
_db_table = 'api_political_parties'
response = {}
if category == "political_parties":
ec_fields = ["donation_total_int", "donation_count"]
top_total, top_count = self._get_top(_db_table, ec_fields)
result = {
"donation_total": self._format_top(top_total, "party"),
"donation_count": self._format_top(top_count, "party", monetary=False)
}
response["electoral_commission"] = result[field]
return response
def _mp_aggregate(self, category, field):
_db_table = 'api_mps'
response = {}
if category == "electoral_commission":
# get electoral commission data
ec_fields = ["donation_total_int", "donor_count"]
top_total, top_count = self._get_top(_db_table, ec_fields)
ec = {
"donation_total": self._format_top(top_total, "mp"),
"donor_count": self._format_top(top_count, "mp", monetary=False)
}
response["electoral_commission"] = ec[field]
if category == "register_of_interests":
# get register of interests data
reg_fields = [
"remuneration_total_int",
"lord_interest_relationships",
"remuneration_count"
]
top_total, top_relationships, top_count = self._get_top(_db_table, reg_fields)
reg = {
"remuneration_total": self._format_top(top_total, "mp"),
"interest_relationships": self._format_top(
top_relationships, "mp", monetary=False
),
"remuneration_count": self._format_top(
top_count, "mp", monetary=False
)
}
response["register_of_interests"] = reg[field]
return response
def _lord_aggregate(self, category, field):
_db_table = 'api_lords'
response ={}
if category == "electoral_commission":
# get electoral commission data
ec_fields = ["donation_total_int", "donation_count"]
top_total, top_count = self._get_top(_db_table, ec_fields)
ec = {
"donation_total": self._format_top(top_total, "lord"),
"donation_count": self._format_top(top_count, "lord", monetary=False)
}
response["electoral_commission"] = ec[field]
if category == "register_of_interests":
# get register of interests data
reg_fields = ["lord_interest_relationships"]
top_relationships = self._get_top(_db_table, reg_fields)[0]
reg = {
"interest_relationships": self._format_top(
top_relationships, "lord", monetary=False
)
}
response["register_of_interests"] = reg[field]
return response
def _format_top(self, results, label, monetary=True):
updated = []
for entry in results:
new = {
"name": entry["_id"],
"details_url": self.named_entity_resources(
entry["_id"], label
)[0]
}
if monetary:
new["total_int"] = entry["total"]
new["total"] = self._format_number(entry["total"])
else:
new["total"] = entry["total"]
updated.append(new)
return updated
def _get_aggregate(self, table, field_list):
return [self._db.sum(table, field=self.fields[x]) for x in field_list]
def _get_top(self, table, field_list):
return [self._db.top(table, field=self.fields[x]) for x in field_list]
| [((8, 8, 8, 30), 'web.api.BaseAPI.__init__', 'BaseAPI.__init__', ({(8, 25, 8, 29): 'self'}, {}), '(self)', False, 'from web.api import BaseAPI\n'), ((9, 19, 9, 41), 'utils.mongo.MongoInterface', 'mongo.MongoInterface', ({}, {}), '()', False, 'from utils import mongo\n')] |
guillermomolina/neutron | neutron/common/ovn/utils.py | bd2933a2588d1e0b18790dd719ca1d89aa4a0c8d | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import inspect
import os
import re
import netaddr
from neutron_lib.api.definitions import external_net
from neutron_lib.api.definitions import extra_dhcp_opt as edo_ext
from neutron_lib.api.definitions import l3
from neutron_lib.api.definitions import port_security as psec
from neutron_lib.api.definitions import portbindings
from neutron_lib.api import validators
from neutron_lib import constants as const
from neutron_lib import context as n_context
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from neutron_lib.utils import net as n_utils
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import netutils
from oslo_utils import strutils
from ovsdbapp import constants as ovsdbapp_const
from neutron._i18n import _
from neutron.common.ovn import constants
from neutron.common.ovn import exceptions as ovn_exc
from neutron.db import models_v2
from neutron.objects import ports as ports_obj
LOG = log.getLogger(__name__)
CONF = cfg.CONF
DNS_RESOLVER_FILE = "/etc/resolv.conf"
AddrPairsDiff = collections.namedtuple(
'AddrPairsDiff', ['added', 'removed', 'changed'])
PortExtraDHCPValidation = collections.namedtuple(
'PortExtraDHCPValidation', ['failed', 'invalid_ipv4', 'invalid_ipv6'])
def ovn_name(id):
# The name of the OVN entry will be neutron-<UUID>
# This is due to the fact that the OVN application checks if the name
# is a UUID. If so then there will be no matches.
# We prefix the UUID to enable us to use the Neutron UUID when
# updating, deleting etc.
return "%s%s" % (constants.OVN_NAME_PREFIX, id)
def ovn_lrouter_port_name(id):
# The name of the OVN lrouter port entry will be lrp-<UUID>
# This is to distinguish with the name of the connected lswitch patch port,
# which is named with neutron port uuid, so that OVS patch ports are
# generated properly. The pairing patch port names will be:
# - patch-lrp-<UUID>-to-<UUID>
# - patch-<UUID>-to-lrp-<UUID>
# lrp stands for Logical Router Port
return constants.LRP_PREFIX + '%s' % id
def ovn_cr_lrouter_port_name(_id):
# The name of the OVN chassisredirect lrouter port entry will be
# cr-lrp-<UUID>
return 'cr-lrp-%s' % _id
def ovn_provnet_port_name(network_id):
# The name of OVN lswitch provider network port entry will be
# provnet-<Network-UUID>. The port is created for network having
# provider:physical_network attribute.
return constants.OVN_PROVNET_PORT_NAME_PREFIX + '%s' % network_id
def ovn_vhu_sockpath(sock_dir, port_id):
# Frame the socket path of a virtio socket
return os.path.join(
sock_dir,
# this parameter will become the virtio port name,
# so it should not exceed IFNAMSIZ(16).
(const.VHOST_USER_DEVICE_PREFIX + port_id)[:14])
def ovn_addrset_name(sg_id, ip_version):
# The name of the address set for the given security group id and ip
# version. The format is:
# as-<ip version>-<security group uuid>
# with all '-' replaced with '_'. This replacement is necessary
# because OVN doesn't support '-' in an address set name.
return ('as-%s-%s' % (ip_version, sg_id)).replace('-', '_')
def ovn_pg_addrset_name(sg_id, ip_version):
# The name of the address set for the given security group id modelled as a
# Port Group and ip version. The format is:
# pg-<security group uuid>-<ip version>
# with all '-' replaced with '_'. This replacement is necessary
# because OVN doesn't support '-' in an address set name.
return ('pg-%s-%s' % (sg_id, ip_version)).replace('-', '_')
def ovn_port_group_name(sg_id):
# The name of the port group for the given security group id.
# The format is: pg-<security group uuid>.
return ('pg-%s' % sg_id).replace('-', '_')
def is_network_device_port(port):
return port.get('device_owner', '').startswith(
const.DEVICE_OWNER_PREFIXES)
def _is_dhcp_disabled(dhcp_opt):
return (dhcp_opt['opt_name'] == constants.DHCP_DISABLED_OPT and
dhcp_opt.get('opt_value', '').lower() == 'true')
def validate_port_extra_dhcp_opts(port):
"""Validate port's extra DHCP options.
:param port: A neutron port.
:returns: A PortExtraDHCPValidation object.
"""
invalid = {const.IP_VERSION_4: [], const.IP_VERSION_6: []}
failed = False
for edo in port.get(edo_ext.EXTRADHCPOPTS, []):
ip_version = edo['ip_version']
opt_name = edo['opt_name']
# If DHCP is disabled for this port via this special option,
# always succeed the validation
if _is_dhcp_disabled(edo):
failed = False
break
if opt_name not in constants.SUPPORTED_DHCP_OPTS_MAPPING[ip_version]:
invalid[ip_version].append(opt_name)
failed = True
return PortExtraDHCPValidation(
failed=failed,
invalid_ipv4=invalid[const.IP_VERSION_4] if failed else [],
invalid_ipv6=invalid[const.IP_VERSION_6] if failed else [])
def get_lsp_dhcp_opts(port, ip_version):
# Get dhcp options from Neutron port, for setting DHCP_Options row
# in OVN.
lsp_dhcp_disabled = False
lsp_dhcp_opts = {}
if is_network_device_port(port):
lsp_dhcp_disabled = True
else:
mapping = constants.SUPPORTED_DHCP_OPTS_MAPPING[ip_version]
for edo in port.get(edo_ext.EXTRADHCPOPTS, []):
if edo['ip_version'] != ip_version:
continue
if _is_dhcp_disabled(edo):
# OVN native DHCP is disabled on this port
lsp_dhcp_disabled = True
# Make sure return value behavior not depends on the order and
# content of the extra DHCP options for the port
lsp_dhcp_opts.clear()
break
if edo['opt_name'] not in mapping:
LOG.warning('The DHCP option %(opt_name)s on port %(port)s '
'is not suppported by OVN, ignoring it',
{'opt_name': edo['opt_name'], 'port': port['id']})
continue
opt = mapping[edo['opt_name']]
lsp_dhcp_opts[opt] = edo['opt_value']
return (lsp_dhcp_disabled, lsp_dhcp_opts)
def is_lsp_trusted(port):
return n_utils.is_port_trusted(port) if port.get('device_owner') else False
def is_lsp_ignored(port):
# Since the floating IP port is not bound to any chassis, packets from vm
# destined to floating IP will be dropped. To overcome this, we do not
# create/update floating IP port in OVN.
return port.get('device_owner') in [const.DEVICE_OWNER_FLOATINGIP]
def get_lsp_security_groups(port, skip_trusted_port=True):
# In other agent link OVS, skipping trusted port is processed in security
# groups RPC. We haven't that step, so we do it here.
return [] if (skip_trusted_port and is_lsp_trusted(port)
) else port.get('security_groups', [])
def is_snat_enabled(router):
return router.get(l3.EXTERNAL_GW_INFO, {}).get('enable_snat', True)
def is_port_security_enabled(port):
return port.get(psec.PORTSECURITY)
def is_security_groups_enabled(port):
return port.get(constants.PORT_SECURITYGROUPS)
def validate_and_get_data_from_binding_profile(port):
if (constants.OVN_PORT_BINDING_PROFILE not in port or
not validators.is_attr_set(
port[constants.OVN_PORT_BINDING_PROFILE])):
return {}
param_set = {}
param_dict = {}
for param_set in constants.OVN_PORT_BINDING_PROFILE_PARAMS:
param_keys = param_set.keys()
for param_key in param_keys:
try:
param_dict[param_key] = (port[
constants.OVN_PORT_BINDING_PROFILE][param_key])
except KeyError:
pass
if len(param_dict) == 0:
continue
if len(param_dict) != len(param_keys):
msg = _('Invalid binding:profile. %s are all '
'required.') % param_keys
raise n_exc.InvalidInput(error_message=msg)
if (len(port[constants.OVN_PORT_BINDING_PROFILE]) != len(
param_keys)):
msg = _('Invalid binding:profile. too many parameters')
raise n_exc.InvalidInput(error_message=msg)
break
if not param_dict:
return {}
for param_key, param_type in param_set.items():
if param_type is None:
continue
param_value = param_dict[param_key]
if not isinstance(param_value, param_type):
msg = _('Invalid binding:profile. %(key)s %(value)s '
'value invalid type') % {'key': param_key,
'value': param_value}
raise n_exc.InvalidInput(error_message=msg)
# Make sure we can successfully look up the port indicated by
# parent_name. Just let it raise the right exception if there is a
# problem.
if 'parent_name' in param_set:
plugin = directory.get_plugin()
plugin.get_port(n_context.get_admin_context(),
param_dict['parent_name'])
if 'tag' in param_set:
tag = int(param_dict['tag'])
if tag < 0 or tag > 4095:
msg = _('Invalid binding:profile. tag "%s" must be '
'an integer between 0 and 4095, inclusive') % tag
raise n_exc.InvalidInput(error_message=msg)
return param_dict
def is_dhcp_options_ignored(subnet):
# Don't insert DHCP_Options entry for v6 subnet with 'SLAAC' as
# 'ipv6_address_mode', since DHCPv6 shouldn't work for this mode.
return (subnet['ip_version'] == const.IP_VERSION_6 and
subnet.get('ipv6_address_mode') == const.IPV6_SLAAC)
def get_ovn_ipv6_address_mode(address_mode):
return constants.OVN_IPV6_ADDRESS_MODES[address_mode]
def get_revision_number(resource, resource_type):
"""Get the resource's revision number based on its type."""
if resource_type in (constants.TYPE_NETWORKS,
constants.TYPE_PORTS,
constants.TYPE_SECURITY_GROUP_RULES,
constants.TYPE_ROUTERS,
constants.TYPE_ROUTER_PORTS,
constants.TYPE_SECURITY_GROUPS,
constants.TYPE_FLOATINGIPS, constants.TYPE_SUBNETS):
return resource['revision_number']
else:
raise ovn_exc.UnknownResourceType(resource_type=resource_type)
def remove_macs_from_lsp_addresses(addresses):
"""Remove the mac addreses from the Logical_Switch_Port addresses column.
:param addresses: The list of addresses from the Logical_Switch_Port.
Example: ["80:fa:5b:06:72:b7 158.36.44.22",
"ff:ff:ff:ff:ff:ff 10.0.0.2"]
:returns: A list of IP addesses (v4 and v6)
"""
ip_list = []
for addr in addresses:
ip_list.extend([x for x in addr.split() if
(netutils.is_valid_ipv4(x) or
netutils.is_valid_ipv6(x))])
return ip_list
def get_allowed_address_pairs_ip_addresses(port):
"""Return a list of IP addresses from port's allowed_address_pairs.
:param port: A neutron port
:returns: A list of IP addesses (v4 and v6)
"""
return [x['ip_address'] for x in port.get('allowed_address_pairs', [])
if 'ip_address' in x]
def get_allowed_address_pairs_ip_addresses_from_ovn_port(ovn_port):
"""Return a list of IP addresses from ovn port.
Return a list of IP addresses equivalent of Neutron's port
allowed_address_pairs column using the data in the OVN port.
:param ovn_port: A OVN port
:returns: A list of IP addesses (v4 and v6)
"""
addresses = remove_macs_from_lsp_addresses(ovn_port.addresses)
port_security = remove_macs_from_lsp_addresses(ovn_port.port_security)
return [x for x in port_security if x not in addresses]
def get_ovn_port_security_groups(ovn_port, skip_trusted_port=True):
info = {'security_groups': ovn_port.external_ids.get(
constants.OVN_SG_IDS_EXT_ID_KEY, '').split(),
'device_owner': ovn_port.external_ids.get(
constants.OVN_DEVICE_OWNER_EXT_ID_KEY, '')}
return get_lsp_security_groups(info, skip_trusted_port=skip_trusted_port)
def get_ovn_port_addresses(ovn_port):
addresses = remove_macs_from_lsp_addresses(ovn_port.addresses)
port_security = remove_macs_from_lsp_addresses(ovn_port.port_security)
return list(set(addresses + port_security))
def sort_ips_by_version(addresses):
ip_map = {'ip4': [], 'ip6': []}
for addr in addresses:
ip_version = netaddr.IPNetwork(addr).version
ip_map['ip%d' % ip_version].append(addr)
return ip_map
def is_lsp_router_port(port):
return port.get('device_owner') in const.ROUTER_PORT_OWNERS
def get_lrouter_ext_gw_static_route(ovn_router):
return [route for route in getattr(ovn_router, 'static_routes', []) if
strutils.bool_from_string(getattr(
route, 'external_ids', {}).get(
constants.OVN_ROUTER_IS_EXT_GW, 'false'))]
def get_lrouter_snats(ovn_router):
return [n for n in getattr(ovn_router, 'nat', []) if n.type == 'snat']
def get_lrouter_non_gw_routes(ovn_router):
routes = []
for route in getattr(ovn_router, 'static_routes', []):
external_ids = getattr(route, 'external_ids', {})
if strutils.bool_from_string(
external_ids.get(constants.OVN_ROUTER_IS_EXT_GW, 'false')):
continue
routes.append({'destination': route.ip_prefix,
'nexthop': route.nexthop})
return routes
def is_ovn_l3(l3_plugin):
return hasattr(l3_plugin, '_ovn_client_inst')
def get_system_dns_resolvers(resolver_file=DNS_RESOLVER_FILE):
resolvers = []
if not os.path.exists(resolver_file):
return resolvers
with open(resolver_file, 'r') as rconf:
for line in rconf.readlines():
if not line.startswith('nameserver'):
continue
line = line.split('nameserver')[1].strip()
ipv4 = re.search(r'^(?:[0-9]{1,3}\.){3}[0-9]{1,3}', line)
if ipv4:
resolvers.append(ipv4.group(0))
return resolvers
def get_port_subnet_ids(port):
fixed_ips = list(port['fixed_ips'])
return [f['subnet_id'] for f in fixed_ips]
def get_method_class(method):
if not inspect.ismethod(method):
return
return method.__self__.__class__
def ovn_metadata_name(id_):
"""Return the OVN metadata name based on an id."""
return 'metadata-%s' % id_
def is_gateway_chassis_invalid(chassis_name, gw_chassis,
physnet, chassis_physnets):
"""Check if gateway chassis is invalid
@param chassis_name: gateway chassis name
@type chassis_name: string
@param gw_chassis: List of gateway chassis in the system
@type gw_chassis: []
@param physnet: physical network associated to chassis_name
@type physnet: string
@param chassis_physnets: Dictionary linking chassis with their physnets
@type chassis_physnets: {}
@return Boolean
"""
if chassis_name == constants.OVN_GATEWAY_INVALID_CHASSIS:
return True
elif chassis_name not in chassis_physnets:
return True
elif physnet and physnet not in chassis_physnets.get(chassis_name):
return True
elif gw_chassis and chassis_name not in gw_chassis:
return True
return False
def is_provider_network(network):
return network.get(external_net.EXTERNAL, False)
def is_neutron_dhcp_agent_port(port):
"""Check if the given DHCP port belongs to Neutron DHCP agents
The DHCP ports with the device_id equals to 'reserved_dhcp_port'
or starting with the word 'dhcp' belongs to the Neutron DHCP agents.
"""
return (port['device_owner'] == const.DEVICE_OWNER_DHCP and
(port['device_id'] == const.DEVICE_ID_RESERVED_DHCP_PORT or
port['device_id'].startswith('dhcp')))
def compute_address_pairs_diff(ovn_port, neutron_port):
"""Compute the differences in the allowed_address_pairs field."""
ovn_ap = get_allowed_address_pairs_ip_addresses_from_ovn_port(
ovn_port)
neutron_ap = get_allowed_address_pairs_ip_addresses(neutron_port)
added = set(neutron_ap) - set(ovn_ap)
removed = set(ovn_ap) - set(neutron_ap)
return AddrPairsDiff(added, removed, changed=any(added or removed))
def get_ovn_cms_options(chassis):
"""Return the list of CMS options in a Chassis."""
return [opt.strip() for opt in chassis.external_ids.get(
constants.OVN_CMS_OPTIONS, '').split(',')]
def is_gateway_chassis(chassis):
"""Check if the given chassis is a gateway chassis"""
return constants.CMS_OPT_CHASSIS_AS_GW in get_ovn_cms_options(chassis)
def get_port_capabilities(port):
"""Return a list of port's capabilities"""
return port.get(portbindings.PROFILE, {}).get('capabilities', [])
def get_port_id_from_gwc_row(row):
"""Return a port_id from gwc row
The Gateway_Chassis row stores router port_id in
the row name attribute:
<prefix>-<port_id>_<chassis_id>
:param row: A Gateway_Chassis table row.
:returns: String containing router port_id.
"""
return constants.RE_PORT_FROM_GWC.search(row.name).group(2)
def get_chassis_availability_zones(chassis):
"""Return a list of availability zones from a given OVN Chassis."""
azs = set()
if not chassis:
return azs
opt_key = constants.CMS_OPT_AVAILABILITY_ZONES + '='
for opt in get_ovn_cms_options(chassis):
if not opt.startswith(opt_key):
continue
values = opt.split('=')[1]
azs = {az.strip() for az in values.split(':') if az.strip()}
break
return azs
def get_chassis_in_azs(chassis_list, az_list):
"""Return a set of Chassis that belongs to the AZs.
Given a list of Chassis and a list of availability zones (AZs),
return a set of Chassis that belongs to one or more AZs.
:param chassis_list: A list of Chassis objects
:param az_list: A list of availability zones
:returns: A set of Chassis names
"""
chassis = set()
for ch in chassis_list:
chassis_azs = get_chassis_availability_zones(ch)
if chassis_azs.intersection(az_list):
chassis.add(ch.name)
return chassis
def get_gateway_chassis_without_azs(chassis_list):
"""Return a set of Chassis that does not belong to any AZs.
Filter a list of Chassis and return only the Chassis that does not
belong to any availability zones.
:param chassis_list: A list of Chassis objects
:returns: A set of Chassis names
"""
return {ch.name for ch in chassis_list if is_gateway_chassis(ch) and not
get_chassis_availability_zones(ch)}
def parse_ovn_lb_port_forwarding(ovn_rtr_lb_pfs):
"""Return a dictionary compatible with port forwarding from OVN lb."""
result = {}
for ovn_lb in ovn_rtr_lb_pfs:
ext_ids = ovn_lb.external_ids
fip_id = ext_ids.get(constants.OVN_FIP_EXT_ID_KEY)
protocol = (ovn_lb.protocol[0]
if ovn_lb.protocol else ovsdbapp_const.PROTO_TCP)
fip_dict = result.get(fip_id, {})
fip_dict_proto = fip_dict.get(protocol, set())
ovn_vips = ovn_lb.vips
for vip, ips in ovn_vips.items():
for ip in ips.split(','):
fip_dict_proto.add("{} {}".format(vip, ip))
fip_dict[protocol] = fip_dict_proto
result[fip_id] = fip_dict
return result
def get_network_name_from_datapath(datapath):
return datapath.external_ids['name'].replace('neutron-', '')
def is_port_external(port):
# This port is represented in OVN DB as lsp.type=external
capabilities = []
vnic_type = portbindings.VNIC_NORMAL
if isinstance(port, dict):
capabilities = get_port_capabilities(port)
vnic_type = port.get(portbindings.VNIC_TYPE,
portbindings.VNIC_NORMAL)
else:
if isinstance(port, models_v2.Port):
bindings = port.port_bindings
elif isinstance(port, ports_obj.Port):
bindings = port.bindings
else: # What else could be "port"?
bindings = []
if bindings:
profile = bindings[0].get('profile')
if profile:
# DB object, not OVO, stores the dict in JSON.
profile = (jsonutils.loads(profile) if isinstance(profile, str)
else profile)
capabilities = profile.get('capabilities', [])
vnic_type = bindings[0].get('vnic_type', portbindings.VNIC_NORMAL)
return (vnic_type in constants.EXTERNAL_PORT_TYPES and
constants.PORT_CAP_SWITCHDEV not in capabilities)
| [((43, 6, 43, 29), 'oslo_log.log.getLogger', 'log.getLogger', ({(43, 20, 43, 28): '__name__'}, {}), '(__name__)', False, 'from oslo_log import log\n'), ((49, 16, 50, 53), 'collections.namedtuple', 'collections.namedtuple', ({(50, 4, 50, 19): '"""AddrPairsDiff"""', (50, 21, 50, 52): "['added', 'removed', 'changed']"}, {}), "('AddrPairsDiff', ['added', 'removed', 'changed'])", False, 'import collections\n'), ((52, 26, 53, 74), 'collections.namedtuple', 'collections.namedtuple', ({(53, 4, 53, 29): '"""PortExtraDHCPValidation"""', (53, 31, 53, 73): "['failed', 'invalid_ipv4', 'invalid_ipv6']"}, {}), "('PortExtraDHCPValidation', ['failed', 'invalid_ipv4',\n 'invalid_ipv6'])", False, 'import collections\n'), ((91, 11, 95, 56), 'os.path.join', 'os.path.join', ({(92, 8, 92, 16): 'sock_dir', (95, 8, 95, 55): '(const.VHOST_USER_DEVICE_PREFIX + port_id)[:14]'}, {}), '(sock_dir, (const.VHOST_USER_DEVICE_PREFIX + port_id)[:14])', False, 'import os\n'), ((194, 11, 194, 40), 'neutron_lib.utils.net.is_port_trusted', 'n_utils.is_port_trusted', ({(194, 35, 194, 39): 'port'}, {}), '(port)', True, 'from neutron_lib.utils import net as n_utils\n'), ((267, 17, 267, 39), 'neutron_lib.plugins.directory.get_plugin', 'directory.get_plugin', ({}, {}), '()', False, 'from neutron_lib.plugins import directory\n'), ((303, 14, 303, 70), 'neutron.common.ovn.exceptions.UnknownResourceType', 'ovn_exc.UnknownResourceType', (), '', True, 'from neutron.common.ovn import exceptions as ovn_exc\n'), ((402, 11, 402, 40), 'os.path.exists', 'os.path.exists', ({(402, 26, 402, 39): 'resolver_file'}, {}), '(resolver_file)', False, 'import os\n'), ((423, 11, 423, 35), 'inspect.ismethod', 'inspect.ismethod', ({(423, 28, 423, 34): 'method'}, {}), '(method)', False, 'import inspect\n'), ((225, 16, 226, 57), 'neutron_lib.api.validators.is_attr_set', 'validators.is_attr_set', ({(226, 16, 226, 56): 'port[constants.OVN_PORT_BINDING_PROFILE]'}, {}), '(port[constants.OVN_PORT_BINDING_PROFILE])', False, 'from neutron_lib.api import validators\n'), ((243, 18, 243, 55), 'neutron_lib.exceptions.InvalidInput', 'n_exc.InvalidInput', (), '', True, 'from neutron_lib import exceptions as n_exc\n'), ((246, 18, 246, 67), 'neutron._i18n._', '_', ({(246, 20, 246, 66): '"""Invalid binding:profile. too many parameters"""'}, {}), "('Invalid binding:profile. too many parameters')", False, 'from neutron._i18n import _\n'), ((247, 18, 247, 55), 'neutron_lib.exceptions.InvalidInput', 'n_exc.InvalidInput', (), '', True, 'from neutron_lib import exceptions as n_exc\n'), ((261, 18, 261, 55), 'neutron_lib.exceptions.InvalidInput', 'n_exc.InvalidInput', (), '', True, 'from neutron_lib import exceptions as n_exc\n'), ((268, 24, 268, 53), 'neutron_lib.context.get_admin_context', 'n_context.get_admin_context', ({}, {}), '()', True, 'from neutron_lib import context as n_context\n'), ((276, 18, 276, 55), 'neutron_lib.exceptions.InvalidInput', 'n_exc.InvalidInput', (), '', True, 'from neutron_lib import exceptions as n_exc\n'), ((363, 21, 363, 44), 'netaddr.IPNetwork', 'netaddr.IPNetwork', ({(363, 39, 363, 43): 'addr'}, {}), '(addr)', False, 'import netaddr\n'), ((411, 19, 411, 69), 're.search', 're.search', ({(411, 29, 411, 62): '"""^(?:[0-9]{1,3}\\\\.){3}[0-9]{1,3}"""', (411, 64, 411, 68): 'line'}, {}), "('^(?:[0-9]{1,3}\\\\.){3}[0-9]{1,3}', line)", False, 'import re\n'), ((511, 11, 511, 54), 'neutron.common.ovn.constants.RE_PORT_FROM_GWC.search', 'constants.RE_PORT_FROM_GWC.search', ({(511, 45, 511, 53): 'row.name'}, {}), '(row.name)', False, 'from neutron.common.ovn import constants\n'), ((241, 18, 242, 32), 'neutron._i18n._', '_', ({(241, 20, 242, 31): '"""Invalid binding:profile. %s are all required."""'}, {}), "('Invalid binding:profile. %s are all required.')", False, 'from neutron._i18n import _\n'), ((258, 18, 259, 41), 'neutron._i18n._', '_', ({(258, 20, 259, 40): '"""Invalid binding:profile. %(key)s %(value)s value invalid type"""'}, {}), "('Invalid binding:profile. %(key)s %(value)s value invalid type')", False, 'from neutron._i18n import _\n'), ((274, 18, 275, 63), 'neutron._i18n._', '_', ({(274, 20, 275, 62): '"""Invalid binding:profile. tag "%s" must be an integer between 0 and 4095, inclusive"""'}, {}), '(\'Invalid binding:profile. tag "%s" must be an integer between 0 and 4095, inclusive\'\n )', False, 'from neutron._i18n import _\n'), ((605, 27, 605, 51), 'oslo_serialization.jsonutils.loads', 'jsonutils.loads', ({(605, 43, 605, 50): 'profile'}, {}), '(profile)', False, 'from oslo_serialization import jsonutils\n'), ((317, 24, 317, 49), 'oslo_utils.netutils.is_valid_ipv4', 'netutils.is_valid_ipv4', ({(317, 47, 317, 48): 'x'}, {}), '(x)', False, 'from oslo_utils import netutils\n'), ((318, 24, 318, 49), 'oslo_utils.netutils.is_valid_ipv6', 'netutils.is_valid_ipv6', ({(318, 47, 318, 48): 'x'}, {}), '(x)', False, 'from oslo_utils import netutils\n')] |
pjryan93/web3.py | ens/exceptions.py | e066452a7b0e78d6cb8a9462532d169de901ef99 | import idna
class AddressMismatch(ValueError):
'''
In order to set up reverse resolution correctly, the ENS name should first
point to the address. This exception is raised if the name does
not currently point to the address.
'''
pass
class InvalidName(idna.IDNAError):
'''
This exception is raised if the provided name does not meet
the syntax standards specified in `EIP 137 name syntax
<https://github.com/ethereum/EIPs/blob/master/EIPS/eip-137.md#name-syntax>`_.
For example: names may not start with a dot, or include a space.
'''
pass
class UnauthorizedError(Exception):
'''
Raised if the sending account is not the owner of the name
you are trying to modify. Make sure to set ``from`` in the
``transact`` keyword argument to the owner of the name.
'''
pass
class UnownedName(Exception):
'''
Raised if you are trying to modify a name that no one owns.
If working on a subdomain, make sure the subdomain gets created
first with :meth:`~ens.main.ENS.setup_address`.
'''
pass
class BidTooLow(ValueError):
'''
Raised if you bid less than the minimum amount
'''
pass
class InvalidBidHash(ValueError):
'''
Raised if you supply incorrect data to generate the bid hash.
'''
pass
class InvalidLabel(ValueError):
'''
Raised if you supply an invalid label
'''
pass
class OversizeTransaction(ValueError):
'''
Raised if a transaction you are trying to create would cost so
much gas that it could not fit in a block.
For example: when you try to start too many auctions at once.
'''
pass
class UnderfundedBid(ValueError):
'''
Raised if you send less wei with your bid than you declared
as your intent to bid.
'''
pass
| [] |
CaptainHorse/MAS-Additions | Submods/MAS Additions/MASM/scripts/midi_input.py | 5714aaf8cfa3c57432f6231795cbe1d75df46f74 | import mido
from socketer import MASM
inPort = None
doReadInput = False
def Start():
global inPort
try:
print(f"MIDI inputs: {mido.get_input_names()}")
inPort = mido.open_input()
print(f"MIDI input open: {inPort}")
except Exception as e:
inPort = None
print(f"Could not open MIDI input: {e}")
def Update():
global inPort
global doReadInput
if inPort is not None:
if doReadInput and MASM.hasDataBool("MIDI_STOP"):
doReadInput = False
elif not doReadInput and MASM.hasDataBool("MIDI_START"):
doReadInput = True
for msg in inPort.iter_pending():
if MASM.hasDataCheck("MIDI_KEYMAPKEY"):
bytes = msg.bytes()
if len(bytes) >= 3:
MASM.hasDataBool("MIDI_KEYMAPKEY")
MASM.sendData("MIDI_KEY", bytes[1])
elif doReadInput: # We want to clear old pending messages but not send them if input is disabled
bytes = msg.bytes()
if len(bytes) >= 3:
if bytes[0] == 144 and bytes[2] > 0:
MASM.sendData(f"MIDI_NOTE.{bytes[1]}", bytes[2])
elif bytes[0] == 128 or bytes[2] == 0:
MASM.sendData(f"MIDI_NOTE.{bytes[1]}", 0) | [((11, 11, 11, 28), 'mido.open_input', 'mido.open_input', ({}, {}), '()', False, 'import mido\n'), ((21, 21, 21, 50), 'socketer.MASM.hasDataBool', 'MASM.hasDataBool', ({(21, 38, 21, 49): '"""MIDI_STOP"""'}, {}), "('MIDI_STOP')", False, 'from socketer import MASM\n'), ((26, 6, 26, 41), 'socketer.MASM.hasDataCheck', 'MASM.hasDataCheck', ({(26, 24, 26, 40): '"""MIDI_KEYMAPKEY"""'}, {}), "('MIDI_KEYMAPKEY')", False, 'from socketer import MASM\n'), ((23, 27, 23, 57), 'socketer.MASM.hasDataBool', 'MASM.hasDataBool', ({(23, 44, 23, 56): '"""MIDI_START"""'}, {}), "('MIDI_START')", False, 'from socketer import MASM\n'), ((10, 24, 10, 46), 'mido.get_input_names', 'mido.get_input_names', ({}, {}), '()', False, 'import mido\n'), ((29, 5, 29, 39), 'socketer.MASM.hasDataBool', 'MASM.hasDataBool', ({(29, 22, 29, 38): '"""MIDI_KEYMAPKEY"""'}, {}), "('MIDI_KEYMAPKEY')", False, 'from socketer import MASM\n'), ((30, 5, 30, 40), 'socketer.MASM.sendData', 'MASM.sendData', ({(30, 19, 30, 29): '"""MIDI_KEY"""', (30, 31, 30, 39): 'bytes[1]'}, {}), "('MIDI_KEY', bytes[1])", False, 'from socketer import MASM\n'), ((35, 6, 35, 54), 'socketer.MASM.sendData', 'MASM.sendData', ({(35, 20, 35, 43): 'f"""MIDI_NOTE.{bytes[1]}"""', (35, 45, 35, 53): 'bytes[2]'}, {}), "(f'MIDI_NOTE.{bytes[1]}', bytes[2])", False, 'from socketer import MASM\n'), ((37, 6, 37, 47), 'socketer.MASM.sendData', 'MASM.sendData', ({(37, 20, 37, 43): 'f"""MIDI_NOTE.{bytes[1]}"""', (37, 45, 37, 46): '(0)'}, {}), "(f'MIDI_NOTE.{bytes[1]}', 0)", False, 'from socketer import MASM\n')] |
Matheus-Rangel/dash-carbon-components | dash_carbon_components/Column.py | e3f4aa4a8d649e2740db32677040f2548ef5da48 | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class Column(Component):
"""A Column component.
Row Column
Keyword arguments:
- children (list of a list of or a singular dash component, string or numbers | a list of or a singular dash component, string or number; optional): The children of the element
- style (dict; optional): The inline styles
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique across all of the
components in an app.
- className (string; default ''): The class of the element
- columnSizes (list of strings; optional): The size of the column with the display size, sm-4, lg-16 ...
- offsetSizes (list of strings; optional): The size of the offset with the display size, lg-2 ..."""
@_explicitize_args
def __init__(self, children=None, style=Component.UNDEFINED, id=Component.UNDEFINED, className=Component.UNDEFINED, columnSizes=Component.UNDEFINED, offsetSizes=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'style', 'id', 'className', 'columnSizes', 'offsetSizes']
self._type = 'Column'
self._namespace = 'dash_carbon_components'
self._valid_wildcard_attributes = []
self.available_properties = ['children', 'style', 'id', 'className', 'columnSizes', 'offsetSizes']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Column, self).__init__(children=children, **args)
| [] |
ddddhm1/LuWu | src/backend/schemas/vps.py | f9feaf10a6aca0dd31f250741a1c542ee5256633 | from typing import List
from typing import Optional
from typing import Union
from models.vps import VpsStatus
from schemas.base import APIModel
from schemas.base import BasePagination
from schemas.base import BaseSchema
from schemas.base import BaseSuccessfulResponseModel
class VpsSshKeySchema(APIModel):
name: str
public_key: str = None
private_key: str = None
isp_id: int
ssh_key_id: Optional[str]
date_created: Optional[str]
fingerprint: Optional[str]
class VpsSpecPlanSchema(APIModel):
name: str
plan_code: Union[str, int]
region_codes: List = None
bandwidth: float
ram: int
vcpu: int
disk: int
price_monthly: Union[float, int, str] = None
price_hourly: Union[float, int, str] = None
price_yearly: Union[float, int, str] = None
class VpsSpecRegionSchema(APIModel):
name: str
region_code: Union[str, int]
features: List[str] = None
plan_codes: List[Union[str, int]] = []
class VpsSpecOsSchema(APIModel):
name: str
os_code: Union[str, int]
region_codes: List[Union[str, int]] = []
plan_codes: List[Union[str, int]] = []
class VpsSpecSchema(APIModel):
region: List[VpsSpecRegionSchema] = []
plan: List[VpsSpecPlanSchema] = []
os: List[VpsSpecOsSchema] = []
class VpsSpecResponse(BaseSuccessfulResponseModel):
result: VpsSpecSchema
class VpsCreateSchema(APIModel):
hostname: str
isp_id: int
region_code: str
os_code: str
plan_code: str
ssh_keys: List[str] = []
status: int = VpsStatus.init
remark: str = None
class VpsItemSchema(BaseSchema):
isp_id: int
ip: Union[int, str, None]
server_id: Optional[str]
hostname: str
os: Optional[str]
plan: Optional[str]
region: Optional[str]
status: int
status_name: str
status_msg: Optional[str]
isp_provider_name: str
class VpsItemResponse(BaseSuccessfulResponseModel):
result: VpsItemSchema
class VpsPaginationSchema(BasePagination):
items: Optional[List[VpsItemSchema]]
class VpsPaginationResponse(BaseSuccessfulResponseModel):
result: VpsPaginationSchema
class VpsSshKeyResponseSchema(BaseSuccessfulResponseModel):
result: List[VpsSshKeySchema]
| [] |
hari-sh/sigplot | main.py | cd2359d7c868e35ed1d976d7eb8ac35d2dcc7e81 | import sigplot as sp
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.rcParams['toolbar'] = 'None'
plt.style.use('dark_background')
fig = plt.figure()
# seed = np.linspace(3, 7, 1000)
# a = (np.sin(2 * np.pi * seed))
# b = (np.cos(2 * np.pi * seed))
# sp.correlate(fig, b, a, 300)
t = np.linspace(0, 1, 500)
b = (np.cos(2 * np.pi * t))
# x = np.concatenate([np.zeros(500), signal.sawtooth(2 * np.pi * 5 * t), np.zeros(500), np.ones(120), np.zeros(500)])
x = np.concatenate([np.zeros(500), np.ones(500), np.zeros(500)])
sp.fourier_series(fig, x, 100, 200, 200)
plt.show()
# WriteToVideo("twoPulse.mp4", anim);
| [((8, 0, 8, 32), 'matplotlib.pyplot.style.use', 'plt.style.use', ({(8, 14, 8, 31): '"""dark_background"""'}, {}), "('dark_background')", True, 'import matplotlib.pyplot as plt\n'), ((10, 6, 10, 18), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((17, 4, 17, 26), 'numpy.linspace', 'np.linspace', ({(17, 16, 17, 17): '0', (17, 19, 17, 20): '1', (17, 22, 17, 25): '500'}, {}), '(0, 1, 500)', True, 'import numpy as np\n'), ((18, 5, 18, 26), 'numpy.cos', 'np.cos', ({(18, 12, 18, 25): '2 * np.pi * t'}, {}), '(2 * np.pi * t)', True, 'import numpy as np\n'), ((21, 0, 21, 40), 'sigplot.fourier_series', 'sp.fourier_series', ({(21, 18, 21, 21): 'fig', (21, 23, 21, 24): 'x', (21, 26, 21, 29): '(100)', (21, 31, 21, 34): '(200)', (21, 36, 21, 39): '(200)'}, {}), '(fig, x, 100, 200, 200)', True, 'import sigplot as sp\n'), ((23, 0, 23, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((20, 20, 20, 33), 'numpy.zeros', 'np.zeros', ({(20, 29, 20, 32): '500'}, {}), '(500)', True, 'import numpy as np\n'), ((20, 35, 20, 47), 'numpy.ones', 'np.ones', ({(20, 43, 20, 46): '500'}, {}), '(500)', True, 'import numpy as np\n'), ((20, 49, 20, 62), 'numpy.zeros', 'np.zeros', ({(20, 58, 20, 61): '500'}, {}), '(500)', True, 'import numpy as np\n')] |
msyyc/autorest.python | test/vanilla/version-tolerant/Expected/AcceptanceTests/UrlVersionTolerant/urlversiontolerant/operations/_operations.py | 91aa86f51d5c43c10ead5d51ac102618d23e3a21 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .._vendor import _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
def build_paths_get_boolean_true_request(**kwargs: Any) -> HttpRequest:
bool_path = kwargs.pop("bool_path", True) # type: bool
accept = "application/json"
# Construct URL
url = "/paths/bool/true/{boolPath}"
path_format_arguments = {
"boolPath": _SERIALIZER.url("bool_path", bool_path, "bool"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_boolean_false_request(**kwargs: Any) -> HttpRequest:
bool_path = kwargs.pop("bool_path", False) # type: bool
accept = "application/json"
# Construct URL
url = "/paths/bool/false/{boolPath}"
path_format_arguments = {
"boolPath": _SERIALIZER.url("bool_path", bool_path, "bool"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_int_one_million_request(**kwargs: Any) -> HttpRequest:
int_path = kwargs.pop("int_path", 1000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/int/1000000/{intPath}"
path_format_arguments = {
"intPath": _SERIALIZER.url("int_path", int_path, "int"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_int_negative_one_million_request(**kwargs: Any) -> HttpRequest:
int_path = kwargs.pop("int_path", -1000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/int/-1000000/{intPath}"
path_format_arguments = {
"intPath": _SERIALIZER.url("int_path", int_path, "int"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_path = kwargs.pop("long_path", 10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/long/10000000000/{longPath}"
path_format_arguments = {
"longPath": _SERIALIZER.url("long_path", long_path, "long"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_get_negative_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_path = kwargs.pop("long_path", -10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/paths/long/-10000000000/{longPath}"
path_format_arguments = {
"longPath": _SERIALIZER.url("long_path", long_path, "long"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_float_scientific_positive_request(**kwargs: Any) -> HttpRequest:
float_path = kwargs.pop("float_path", 103400000000000000000) # type: float
accept = "application/json"
# Construct URL
url = "/paths/float/1.034E+20/{floatPath}"
path_format_arguments = {
"floatPath": _SERIALIZER.url("float_path", float_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_float_scientific_negative_request(**kwargs: Any) -> HttpRequest:
float_path = kwargs.pop("float_path", -1.034e-20) # type: float
accept = "application/json"
# Construct URL
url = "/paths/float/-1.034E-20/{floatPath}"
path_format_arguments = {
"floatPath": _SERIALIZER.url("float_path", float_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_double_decimal_positive_request(**kwargs: Any) -> HttpRequest:
double_path = kwargs.pop("double_path", 9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/paths/double/9999999.999/{doublePath}"
path_format_arguments = {
"doublePath": _SERIALIZER.url("double_path", double_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_double_decimal_negative_request(**kwargs: Any) -> HttpRequest:
double_path = kwargs.pop("double_path", -9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/paths/double/-9999999.999/{doublePath}"
path_format_arguments = {
"doublePath": _SERIALIZER.url("double_path", double_path, "float"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_unicode_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "啊齄丂狛狜隣郎隣兀﨩") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/unicode/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_url_encoded_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "begin!*'();:@ &=+$,/?#[]end") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_url_non_encoded_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "begin!*'();:@&=+$,end") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/begin!*'();:@&=+$,end/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str", skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_empty_request(**kwargs: Any) -> HttpRequest:
string_path = kwargs.pop("string_path", "") # type: str
accept = "application/json"
# Construct URL
url = "/paths/string/empty/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_string_null_request(string_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/null/{stringPath}"
path_format_arguments = {
"stringPath": _SERIALIZER.url("string_path", string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_enum_valid_request(enum_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/enum/green%20color/{enumPath}"
path_format_arguments = {
"enumPath": _SERIALIZER.url("enum_path", enum_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_enum_null_request(enum_path: str, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/null/{enumPath}"
path_format_arguments = {
"enumPath": _SERIALIZER.url("enum_path", enum_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_multi_byte_request(byte_path: bytearray, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/byte/multibyte/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_empty_request(**kwargs: Any) -> HttpRequest:
byte_path = kwargs.pop("byte_path", bytearray("", encoding="utf-8")) # type: bytearray
accept = "application/json"
# Construct URL
url = "/paths/byte/empty/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_byte_null_request(byte_path: bytearray, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/byte/null/{bytePath}"
path_format_arguments = {
"bytePath": _SERIALIZER.url("byte_path", byte_path, "bytearray"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_valid_request(**kwargs: Any) -> HttpRequest:
date_path = kwargs.pop("date_path", "2012-01-01") # type: datetime.date
accept = "application/json"
# Construct URL
url = "/paths/date/2012-01-01/{datePath}"
path_format_arguments = {
"datePath": _SERIALIZER.url("date_path", date_path, "date"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_null_request(date_path: datetime.date, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/date/null/{datePath}"
path_format_arguments = {
"datePath": _SERIALIZER.url("date_path", date_path, "date"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_time_valid_request(**kwargs: Any) -> HttpRequest:
date_time_path = kwargs.pop("date_time_path", "2012-01-01T01:01:01Z") # type: datetime.datetime
accept = "application/json"
# Construct URL
url = "/paths/datetime/2012-01-01T01%3A01%3A01Z/{dateTimePath}"
path_format_arguments = {
"dateTimePath": _SERIALIZER.url("date_time_path", date_time_path, "iso-8601"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_date_time_null_request(date_time_path: datetime.datetime, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/datetime/null/{dateTimePath}"
path_format_arguments = {
"dateTimePath": _SERIALIZER.url("date_time_path", date_time_path, "iso-8601"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_base64_url_request(base64_url_path: bytes, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/string/bG9yZW0/{base64UrlPath}"
path_format_arguments = {
"base64UrlPath": _SERIALIZER.url("base64_url_path", base64_url_path, "base64"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_array_csv_in_path_request(array_path: List[str], **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = (
"/paths/array/ArrayPath1%2cbegin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend%2c%2c/{arrayPath}"
)
path_format_arguments = {
"arrayPath": _SERIALIZER.url("array_path", array_path, "[str]", div=","),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_paths_unix_time_url_request(unix_time_url_path: datetime.datetime, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/paths/int/1460505600/{unixTimeUrlPath}"
path_format_arguments = {
"unixTimeUrlPath": _SERIALIZER.url("unix_time_url_path", unix_time_url_path, "unix-time"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, headers=header_parameters, **kwargs)
def build_queries_get_boolean_true_request(**kwargs: Any) -> HttpRequest:
bool_query = kwargs.pop("bool_query", True) # type: bool
accept = "application/json"
# Construct URL
url = "/queries/bool/true"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_boolean_false_request(**kwargs: Any) -> HttpRequest:
bool_query = kwargs.pop("bool_query", False) # type: bool
accept = "application/json"
# Construct URL
url = "/queries/bool/false"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_boolean_null_request(*, bool_query: Optional[bool] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/bool/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if bool_query is not None:
query_parameters["boolQuery"] = _SERIALIZER.query("bool_query", bool_query, "bool")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_one_million_request(**kwargs: Any) -> HttpRequest:
int_query = kwargs.pop("int_query", 1000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/int/1000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_negative_one_million_request(**kwargs: Any) -> HttpRequest:
int_query = kwargs.pop("int_query", -1000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/int/-1000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_int_null_request(*, int_query: Optional[int] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/int/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if int_query is not None:
query_parameters["intQuery"] = _SERIALIZER.query("int_query", int_query, "int")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_query = kwargs.pop("long_query", 10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/long/10000000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_negative_ten_billion_request(**kwargs: Any) -> HttpRequest:
long_query = kwargs.pop("long_query", -10000000000) # type: int
accept = "application/json"
# Construct URL
url = "/queries/long/-10000000000"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_get_long_null_request(*, long_query: Optional[int] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/long/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if long_query is not None:
query_parameters["longQuery"] = _SERIALIZER.query("long_query", long_query, "long")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_scientific_positive_request(**kwargs: Any) -> HttpRequest:
float_query = kwargs.pop("float_query", 103400000000000000000) # type: float
accept = "application/json"
# Construct URL
url = "/queries/float/1.034E+20"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_scientific_negative_request(**kwargs: Any) -> HttpRequest:
float_query = kwargs.pop("float_query", -1.034e-20) # type: float
accept = "application/json"
# Construct URL
url = "/queries/float/-1.034E-20"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_float_null_request(*, float_query: Optional[float] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/float/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if float_query is not None:
query_parameters["floatQuery"] = _SERIALIZER.query("float_query", float_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_decimal_positive_request(**kwargs: Any) -> HttpRequest:
double_query = kwargs.pop("double_query", 9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/queries/double/9999999.999"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_decimal_negative_request(**kwargs: Any) -> HttpRequest:
double_query = kwargs.pop("double_query", -9999999.999) # type: float
accept = "application/json"
# Construct URL
url = "/queries/double/-9999999.999"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_double_null_request(*, double_query: Optional[float] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/double/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if double_query is not None:
query_parameters["doubleQuery"] = _SERIALIZER.query("double_query", double_query, "float")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_unicode_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "啊齄丂狛狜隣郎隣兀﨩") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/unicode/"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_url_encoded_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "begin!*'();:@ &=+$,/?#[]end") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_empty_request(**kwargs: Any) -> HttpRequest:
string_query = kwargs.pop("string_query", "") # type: str
accept = "application/json"
# Construct URL
url = "/queries/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_string_null_request(*, string_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/string/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if string_query is not None:
query_parameters["stringQuery"] = _SERIALIZER.query("string_query", string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_enum_valid_request(*, enum_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/enum/green%20color"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if enum_query is not None:
query_parameters["enumQuery"] = _SERIALIZER.query("enum_query", enum_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_enum_null_request(*, enum_query: Optional[str] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/enum/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if enum_query is not None:
query_parameters["enumQuery"] = _SERIALIZER.query("enum_query", enum_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_multi_byte_request(*, byte_query: Optional[bytearray] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/byte/multibyte"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if byte_query is not None:
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_empty_request(**kwargs: Any) -> HttpRequest:
byte_query = kwargs.pop("byte_query", bytearray("", encoding="utf-8")) # type: bytearray
accept = "application/json"
# Construct URL
url = "/queries/byte/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_byte_null_request(*, byte_query: Optional[bytearray] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/byte/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if byte_query is not None:
query_parameters["byteQuery"] = _SERIALIZER.query("byte_query", byte_query, "bytearray")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_valid_request(**kwargs: Any) -> HttpRequest:
date_query = kwargs.pop("date_query", "2012-01-01") # type: datetime.date
accept = "application/json"
# Construct URL
url = "/queries/date/2012-01-01"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["dateQuery"] = _SERIALIZER.query("date_query", date_query, "date")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_null_request(*, date_query: Optional[datetime.date] = None, **kwargs: Any) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/date/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if date_query is not None:
query_parameters["dateQuery"] = _SERIALIZER.query("date_query", date_query, "date")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_time_valid_request(**kwargs: Any) -> HttpRequest:
date_time_query = kwargs.pop("date_time_query", "2012-01-01T01:01:01Z") # type: datetime.datetime
accept = "application/json"
# Construct URL
url = "/queries/datetime/2012-01-01T01%3A01%3A01Z"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters["dateTimeQuery"] = _SERIALIZER.query("date_time_query", date_time_query, "iso-8601")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_date_time_null_request(
*, date_time_query: Optional[datetime.datetime] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/datetime/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if date_time_query is not None:
query_parameters["dateTimeQuery"] = _SERIALIZER.query("date_time_query", date_time_query, "iso-8601")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_null_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/null"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_csv_empty_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/csv/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_no_collection_format_empty_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/none/string/empty"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=",")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_ssv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/ssv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=" ")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_tsv_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/tsv/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div=" ")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_queries_array_string_pipes_valid_request(
*, array_query: Optional[List[str]] = None, **kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/queries/array/pipes/string/valid"
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if array_query is not None:
query_parameters["arrayQuery"] = _SERIALIZER.query("array_query", array_query, "[str]", div="|")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_all_with_values_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/pathItemStringQuery/localStringQuery"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_global_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/localStringQuery"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_global_and_local_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/null"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
def build_path_items_get_local_path_item_query_null_request(
path_item_string_path: str,
global_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
global_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
accept = "application/json"
# Construct URL
url = "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/null/null"
path_format_arguments = {
"pathItemStringPath": _SERIALIZER.url("path_item_string_path", path_item_string_path, "str"),
"globalStringPath": _SERIALIZER.url("global_string_path", global_string_path, "str"),
"localStringPath": _SERIALIZER.url("local_string_path", local_string_path, "str"),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if path_item_string_query is not None:
query_parameters["pathItemStringQuery"] = _SERIALIZER.query(
"path_item_string_query", path_item_string_query, "str"
)
if global_string_query is not None:
query_parameters["globalStringQuery"] = _SERIALIZER.query("global_string_query", global_string_query, "str")
if local_string_query is not None:
query_parameters["localStringQuery"] = _SERIALIZER.query("local_string_query", local_string_query, "str")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs)
class PathsOperations(object):
"""PathsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_boolean_true(self, **kwargs: Any) -> None:
"""Get true Boolean value on path.
:keyword bool_path: true boolean value. The default value is True. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_path: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_path = kwargs.pop("bool_path", True) # type: bool
request = build_paths_get_boolean_true_request(
bool_path=bool_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_true.metadata = {"url": "/paths/bool/true/{boolPath}"} # type: ignore
@distributed_trace
def get_boolean_false(self, **kwargs: Any) -> None:
"""Get false Boolean value on path.
:keyword bool_path: false boolean value. The default value is False. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_path: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_path = kwargs.pop("bool_path", False) # type: bool
request = build_paths_get_boolean_false_request(
bool_path=bool_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_false.metadata = {"url": "/paths/bool/false/{boolPath}"} # type: ignore
@distributed_trace
def get_int_one_million(self, **kwargs: Any) -> None:
"""Get '1000000' integer value.
:keyword int_path: '1000000' integer value. The default value is 1000000. Note that overriding
this default value may result in unsupported behavior.
:paramtype int_path: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_path = kwargs.pop("int_path", 1000000) # type: int
request = build_paths_get_int_one_million_request(
int_path=int_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_one_million.metadata = {"url": "/paths/int/1000000/{intPath}"} # type: ignore
@distributed_trace
def get_int_negative_one_million(self, **kwargs: Any) -> None:
"""Get '-1000000' integer value.
:keyword int_path: '-1000000' integer value. The default value is -1000000. Note that
overriding this default value may result in unsupported behavior.
:paramtype int_path: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_path = kwargs.pop("int_path", -1000000) # type: int
request = build_paths_get_int_negative_one_million_request(
int_path=int_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_negative_one_million.metadata = {"url": "/paths/int/-1000000/{intPath}"} # type: ignore
@distributed_trace
def get_ten_billion(self, **kwargs: Any) -> None:
"""Get '10000000000' 64 bit integer value.
:keyword long_path: '10000000000' 64 bit integer value. The default value is 10000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype long_path: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_path = kwargs.pop("long_path", 10000000000) # type: int
request = build_paths_get_ten_billion_request(
long_path=long_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_ten_billion.metadata = {"url": "/paths/long/10000000000/{longPath}"} # type: ignore
@distributed_trace
def get_negative_ten_billion(self, **kwargs: Any) -> None:
"""Get '-10000000000' 64 bit integer value.
:keyword long_path: '-10000000000' 64 bit integer value. The default value is -10000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype long_path: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_path = kwargs.pop("long_path", -10000000000) # type: int
request = build_paths_get_negative_ten_billion_request(
long_path=long_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_negative_ten_billion.metadata = {"url": "/paths/long/-10000000000/{longPath}"} # type: ignore
@distributed_trace
def float_scientific_positive(self, **kwargs: Any) -> None:
"""Get '1.034E+20' numeric value.
:keyword float_path: '1.034E+20'numeric value. The default value is 103400000000000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype float_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_path = kwargs.pop("float_path", 103400000000000000000) # type: float
request = build_paths_float_scientific_positive_request(
float_path=float_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_positive.metadata = {"url": "/paths/float/1.034E+20/{floatPath}"} # type: ignore
@distributed_trace
def float_scientific_negative(self, **kwargs: Any) -> None:
"""Get '-1.034E-20' numeric value.
:keyword float_path: '-1.034E-20'numeric value. The default value is -1.034e-20. Note that
overriding this default value may result in unsupported behavior.
:paramtype float_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_path = kwargs.pop("float_path", -1.034e-20) # type: float
request = build_paths_float_scientific_negative_request(
float_path=float_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_negative.metadata = {"url": "/paths/float/-1.034E-20/{floatPath}"} # type: ignore
@distributed_trace
def double_decimal_positive(self, **kwargs: Any) -> None:
"""Get '9999999.999' numeric value.
:keyword double_path: '9999999.999'numeric value. The default value is 9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_path = kwargs.pop("double_path", 9999999.999) # type: float
request = build_paths_double_decimal_positive_request(
double_path=double_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_positive.metadata = {"url": "/paths/double/9999999.999/{doublePath}"} # type: ignore
@distributed_trace
def double_decimal_negative(self, **kwargs: Any) -> None:
"""Get '-9999999.999' numeric value.
:keyword double_path: '-9999999.999'numeric value. The default value is -9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_path: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_path = kwargs.pop("double_path", -9999999.999) # type: float
request = build_paths_double_decimal_negative_request(
double_path=double_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_negative.metadata = {"url": "/paths/double/-9999999.999/{doublePath}"} # type: ignore
@distributed_trace
def string_unicode(self, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value.
:keyword string_path: '啊齄丂狛狜隣郎隣兀﨩'multi-byte string value. The default value is "啊齄丂狛狜隣郎隣兀﨩".
Note that overriding this default value may result in unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "啊齄丂狛狜隣郎隣兀﨩") # type: str
request = build_paths_string_unicode_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_unicode.metadata = {"url": "/paths/string/unicode/{stringPath}"} # type: ignore
@distributed_trace
def string_url_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@ &=+$,/?#[]end.
:keyword string_path: 'begin!*'();:@ &=+$,/?#[]end' url encoded string value. The default value
is "begin!*'();:@ &=+$,/?#[]end". Note that overriding this default value may result in
unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "begin!*'();:@ &=+$,/?#[]end") # type: str
request = build_paths_string_url_encoded_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_encoded.metadata = {"url": "/paths/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend/{stringPath}"} # type: ignore
@distributed_trace
def string_url_non_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@&=+$,end.
https://tools.ietf.org/html/rfc3986#appendix-A 'path' accept any 'pchar' not encoded.
:keyword string_path: 'begin!*'();:@&=+$,end' url encoded string value. The default value is
"begin!*'();:@&=+$,end". Note that overriding this default value may result in unsupported
behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "begin!*'();:@&=+$,end") # type: str
request = build_paths_string_url_non_encoded_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_non_encoded.metadata = {"url": "/paths/string/begin!*'();:@&=+$,end/{stringPath}"} # type: ignore
@distributed_trace
def string_empty(self, **kwargs: Any) -> None:
"""Get ''.
:keyword string_path: '' string value. The default value is "". Note that overriding this
default value may result in unsupported behavior.
:paramtype string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_path = kwargs.pop("string_path", "") # type: str
request = build_paths_string_empty_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_empty.metadata = {"url": "/paths/string/empty/{stringPath}"} # type: ignore
@distributed_trace
def string_null(self, string_path: str, **kwargs: Any) -> None:
"""Get null (should throw).
:param string_path: null string value.
:type string_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_string_null_request(
string_path=string_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_null.metadata = {"url": "/paths/string/null/{stringPath}"} # type: ignore
@distributed_trace
def enum_valid(self, enum_path: str, **kwargs: Any) -> None:
"""Get using uri with 'green color' in path parameter.
:param enum_path: send the value green. Possible values are: "red color", "green color", and
"blue color".
:type enum_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_enum_valid_request(
enum_path=enum_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_valid.metadata = {"url": "/paths/enum/green%20color/{enumPath}"} # type: ignore
@distributed_trace
def enum_null(self, enum_path: str, **kwargs: Any) -> None:
"""Get null (should throw on the client before the request is sent on wire).
:param enum_path: send null should throw. Possible values are: "red color", "green color", and
"blue color".
:type enum_path: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_enum_null_request(
enum_path=enum_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_null.metadata = {"url": "/paths/string/null/{enumPath}"} # type: ignore
@distributed_trace
def byte_multi_byte(self, byte_path: bytearray, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:param byte_path: '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:type byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_byte_multi_byte_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_multi_byte.metadata = {"url": "/paths/byte/multibyte/{bytePath}"} # type: ignore
@distributed_trace
def byte_empty(self, **kwargs: Any) -> None:
"""Get '' as byte array.
:keyword byte_path: '' as byte array. The default value is bytearray("", encoding="utf-8").
Note that overriding this default value may result in unsupported behavior.
:paramtype byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
byte_path = kwargs.pop("byte_path", bytearray("", encoding="utf-8")) # type: bytearray
request = build_paths_byte_empty_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_empty.metadata = {"url": "/paths/byte/empty/{bytePath}"} # type: ignore
@distributed_trace
def byte_null(self, byte_path: bytearray, **kwargs: Any) -> None:
"""Get null as byte array (should throw).
:param byte_path: null as byte array (should throw).
:type byte_path: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_byte_null_request(
byte_path=byte_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_null.metadata = {"url": "/paths/byte/null/{bytePath}"} # type: ignore
@distributed_trace
def date_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01' as date.
:keyword date_path: '2012-01-01' as date. The default value is "2012-01-01". Note that
overriding this default value may result in unsupported behavior.
:paramtype date_path: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_path = kwargs.pop("date_path", "2012-01-01") # type: datetime.date
request = build_paths_date_valid_request(
date_path=date_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_valid.metadata = {"url": "/paths/date/2012-01-01/{datePath}"} # type: ignore
@distributed_trace
def date_null(self, date_path: datetime.date, **kwargs: Any) -> None:
"""Get null as date - this should throw or be unusable on the client side, depending on date
representation.
:param date_path: null as date (should throw).
:type date_path: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_date_null_request(
date_path=date_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_null.metadata = {"url": "/paths/date/null/{datePath}"} # type: ignore
@distributed_trace
def date_time_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01T01:01:01Z' as date-time.
:keyword date_time_path: '2012-01-01T01:01:01Z' as date-time. The default value is
"2012-01-01T01:01:01Z". Note that overriding this default value may result in unsupported
behavior.
:paramtype date_time_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_time_path = kwargs.pop("date_time_path", "2012-01-01T01:01:01Z") # type: datetime.datetime
request = build_paths_date_time_valid_request(
date_time_path=date_time_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_valid.metadata = {"url": "/paths/datetime/2012-01-01T01%3A01%3A01Z/{dateTimePath}"} # type: ignore
@distributed_trace
def date_time_null(self, date_time_path: datetime.datetime, **kwargs: Any) -> None:
"""Get null as date-time, should be disallowed or throw depending on representation of date-time.
:param date_time_path: null as date-time.
:type date_time_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_date_time_null_request(
date_time_path=date_time_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [400]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_null.metadata = {"url": "/paths/datetime/null/{dateTimePath}"} # type: ignore
@distributed_trace
def base64_url(self, base64_url_path: bytes, **kwargs: Any) -> None:
"""Get 'lorem' encoded value as 'bG9yZW0' (base64url).
:param base64_url_path: base64url encoded value.
:type base64_url_path: bytes
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_base64_url_request(
base64_url_path=base64_url_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
base64_url.metadata = {"url": "/paths/string/bG9yZW0/{base64UrlPath}"} # type: ignore
@distributed_trace
def array_csv_in_path(self, array_path: List[str], **kwargs: Any) -> None:
"""Get an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
csv-array format.
:param array_path: an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, '']
using the csv-array format.
:type array_path: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_array_csv_in_path_request(
array_path=array_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_csv_in_path.metadata = {"url": "/paths/array/ArrayPath1%2cbegin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend%2c%2c/{arrayPath}"} # type: ignore
@distributed_trace
def unix_time_url(self, unix_time_url_path: datetime.datetime, **kwargs: Any) -> None:
"""Get the date 2016-04-13 encoded value as '1460505600' (Unix time).
:param unix_time_url_path: Unix time encoded value.
:type unix_time_url_path: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_paths_unix_time_url_request(
unix_time_url_path=unix_time_url_path,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
unix_time_url.metadata = {"url": "/paths/int/1460505600/{unixTimeUrlPath}"} # type: ignore
class QueriesOperations(object):
"""QueriesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_boolean_true(self, **kwargs: Any) -> None:
"""Get true Boolean value on path.
:keyword bool_query: true boolean value. The default value is True. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_query = kwargs.pop("bool_query", True) # type: bool
request = build_queries_get_boolean_true_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_true.metadata = {"url": "/queries/bool/true"} # type: ignore
@distributed_trace
def get_boolean_false(self, **kwargs: Any) -> None:
"""Get false Boolean value on path.
:keyword bool_query: false boolean value. The default value is False. Note that overriding this
default value may result in unsupported behavior.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
bool_query = kwargs.pop("bool_query", False) # type: bool
request = build_queries_get_boolean_false_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_false.metadata = {"url": "/queries/bool/false"} # type: ignore
@distributed_trace
def get_boolean_null(self, *, bool_query: Optional[bool] = None, **kwargs: Any) -> None:
"""Get null Boolean value on query (query string should be absent).
:keyword bool_query: null boolean value.
:paramtype bool_query: bool
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_boolean_null_request(
bool_query=bool_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_boolean_null.metadata = {"url": "/queries/bool/null"} # type: ignore
@distributed_trace
def get_int_one_million(self, **kwargs: Any) -> None:
"""Get '1000000' integer value.
:keyword int_query: '1000000' integer value. The default value is 1000000. Note that overriding
this default value may result in unsupported behavior.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_query = kwargs.pop("int_query", 1000000) # type: int
request = build_queries_get_int_one_million_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_one_million.metadata = {"url": "/queries/int/1000000"} # type: ignore
@distributed_trace
def get_int_negative_one_million(self, **kwargs: Any) -> None:
"""Get '-1000000' integer value.
:keyword int_query: '-1000000' integer value. The default value is -1000000. Note that
overriding this default value may result in unsupported behavior.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
int_query = kwargs.pop("int_query", -1000000) # type: int
request = build_queries_get_int_negative_one_million_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_negative_one_million.metadata = {"url": "/queries/int/-1000000"} # type: ignore
@distributed_trace
def get_int_null(self, *, int_query: Optional[int] = None, **kwargs: Any) -> None:
"""Get null integer value (no query parameter).
:keyword int_query: null integer value.
:paramtype int_query: int
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_int_null_request(
int_query=int_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_int_null.metadata = {"url": "/queries/int/null"} # type: ignore
@distributed_trace
def get_ten_billion(self, **kwargs: Any) -> None:
"""Get '10000000000' 64 bit integer value.
:keyword long_query: '10000000000' 64 bit integer value. The default value is 10000000000. Note
that overriding this default value may result in unsupported behavior.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_query = kwargs.pop("long_query", 10000000000) # type: int
request = build_queries_get_ten_billion_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_ten_billion.metadata = {"url": "/queries/long/10000000000"} # type: ignore
@distributed_trace
def get_negative_ten_billion(self, **kwargs: Any) -> None:
"""Get '-10000000000' 64 bit integer value.
:keyword long_query: '-10000000000' 64 bit integer value. The default value is -10000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
long_query = kwargs.pop("long_query", -10000000000) # type: int
request = build_queries_get_negative_ten_billion_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_negative_ten_billion.metadata = {"url": "/queries/long/-10000000000"} # type: ignore
@distributed_trace
def get_long_null(self, *, long_query: Optional[int] = None, **kwargs: Any) -> None:
"""Get 'null 64 bit integer value (no query param in uri).
:keyword long_query: null 64 bit integer value.
:paramtype long_query: long
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_get_long_null_request(
long_query=long_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_long_null.metadata = {"url": "/queries/long/null"} # type: ignore
@distributed_trace
def float_scientific_positive(self, **kwargs: Any) -> None:
"""Get '1.034E+20' numeric value.
:keyword float_query: '1.034E+20'numeric value. The default value is 103400000000000000000.
Note that overriding this default value may result in unsupported behavior.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_query = kwargs.pop("float_query", 103400000000000000000) # type: float
request = build_queries_float_scientific_positive_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_positive.metadata = {"url": "/queries/float/1.034E+20"} # type: ignore
@distributed_trace
def float_scientific_negative(self, **kwargs: Any) -> None:
"""Get '-1.034E-20' numeric value.
:keyword float_query: '-1.034E-20'numeric value. The default value is -1.034e-20. Note that
overriding this default value may result in unsupported behavior.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
float_query = kwargs.pop("float_query", -1.034e-20) # type: float
request = build_queries_float_scientific_negative_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_scientific_negative.metadata = {"url": "/queries/float/-1.034E-20"} # type: ignore
@distributed_trace
def float_null(self, *, float_query: Optional[float] = None, **kwargs: Any) -> None:
"""Get null numeric value (no query parameter).
:keyword float_query: null numeric value.
:paramtype float_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_float_null_request(
float_query=float_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
float_null.metadata = {"url": "/queries/float/null"} # type: ignore
@distributed_trace
def double_decimal_positive(self, **kwargs: Any) -> None:
"""Get '9999999.999' numeric value.
:keyword double_query: '9999999.999'numeric value. The default value is 9999999.999. Note that
overriding this default value may result in unsupported behavior.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_query = kwargs.pop("double_query", 9999999.999) # type: float
request = build_queries_double_decimal_positive_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_positive.metadata = {"url": "/queries/double/9999999.999"} # type: ignore
@distributed_trace
def double_decimal_negative(self, **kwargs: Any) -> None:
"""Get '-9999999.999' numeric value.
:keyword double_query: '-9999999.999'numeric value. The default value is -9999999.999. Note
that overriding this default value may result in unsupported behavior.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
double_query = kwargs.pop("double_query", -9999999.999) # type: float
request = build_queries_double_decimal_negative_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_decimal_negative.metadata = {"url": "/queries/double/-9999999.999"} # type: ignore
@distributed_trace
def double_null(self, *, double_query: Optional[float] = None, **kwargs: Any) -> None:
"""Get null numeric value (no query parameter).
:keyword double_query: null numeric value.
:paramtype double_query: float
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_double_null_request(
double_query=double_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
double_null.metadata = {"url": "/queries/double/null"} # type: ignore
@distributed_trace
def string_unicode(self, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value.
:keyword string_query: '啊齄丂狛狜隣郎隣兀﨩'multi-byte string value. The default value is "啊齄丂狛狜隣郎隣兀﨩".
Note that overriding this default value may result in unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "啊齄丂狛狜隣郎隣兀﨩") # type: str
request = build_queries_string_unicode_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_unicode.metadata = {"url": "/queries/string/unicode/"} # type: ignore
@distributed_trace
def string_url_encoded(self, **kwargs: Any) -> None:
"""Get 'begin!*'();:@ &=+$,/?#[]end.
:keyword string_query: 'begin!*'();:@ &=+$,/?#[]end' url encoded string value. The default
value is "begin!*'();:@ &=+$,/?#[]end". Note that overriding this default value may result in
unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "begin!*'();:@ &=+$,/?#[]end") # type: str
request = build_queries_string_url_encoded_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_url_encoded.metadata = {"url": "/queries/string/begin%21%2A%27%28%29%3B%3A%40%20%26%3D%2B%24%2C%2F%3F%23%5B%5Dend"} # type: ignore
@distributed_trace
def string_empty(self, **kwargs: Any) -> None:
"""Get ''.
:keyword string_query: '' string value. The default value is "". Note that overriding this
default value may result in unsupported behavior.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
string_query = kwargs.pop("string_query", "") # type: str
request = build_queries_string_empty_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_empty.metadata = {"url": "/queries/string/empty"} # type: ignore
@distributed_trace
def string_null(self, *, string_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get null (no query parameter in url).
:keyword string_query: null string value.
:paramtype string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_string_null_request(
string_query=string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
string_null.metadata = {"url": "/queries/string/null"} # type: ignore
@distributed_trace
def enum_valid(self, *, enum_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get using uri with query parameter 'green color'.
:keyword enum_query: 'green color' enum value. Possible values are: "red color", "green color",
and "blue color".
:paramtype enum_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_enum_valid_request(
enum_query=enum_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_valid.metadata = {"url": "/queries/enum/green%20color"} # type: ignore
@distributed_trace
def enum_null(self, *, enum_query: Optional[str] = None, **kwargs: Any) -> None:
"""Get null (no query parameter in url).
:keyword enum_query: null string value. Possible values are: "red color", "green color", and
"blue color".
:paramtype enum_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_enum_null_request(
enum_query=enum_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
enum_null.metadata = {"url": "/queries/enum/null"} # type: ignore
@distributed_trace
def byte_multi_byte(self, *, byte_query: Optional[bytearray] = None, **kwargs: Any) -> None:
"""Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:keyword byte_query: '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array.
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_byte_multi_byte_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_multi_byte.metadata = {"url": "/queries/byte/multibyte"} # type: ignore
@distributed_trace
def byte_empty(self, **kwargs: Any) -> None:
"""Get '' as byte array.
:keyword byte_query: '' as byte array. The default value is bytearray("", encoding="utf-8").
Note that overriding this default value may result in unsupported behavior.
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
byte_query = kwargs.pop("byte_query", bytearray("", encoding="utf-8")) # type: bytearray
request = build_queries_byte_empty_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_empty.metadata = {"url": "/queries/byte/empty"} # type: ignore
@distributed_trace
def byte_null(self, *, byte_query: Optional[bytearray] = None, **kwargs: Any) -> None:
"""Get null as byte array (no query parameters in uri).
:keyword byte_query: null as byte array (no query parameters in uri).
:paramtype byte_query: bytearray
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_byte_null_request(
byte_query=byte_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
byte_null.metadata = {"url": "/queries/byte/null"} # type: ignore
@distributed_trace
def date_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01' as date.
:keyword date_query: '2012-01-01' as date. The default value is "2012-01-01". Note that
overriding this default value may result in unsupported behavior.
:paramtype date_query: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_query = kwargs.pop("date_query", "2012-01-01") # type: datetime.date
request = build_queries_date_valid_request(
date_query=date_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_valid.metadata = {"url": "/queries/date/2012-01-01"} # type: ignore
@distributed_trace
def date_null(self, *, date_query: Optional[datetime.date] = None, **kwargs: Any) -> None:
"""Get null as date - this should result in no query parameters in uri.
:keyword date_query: null as date (no query parameters in uri).
:paramtype date_query: ~datetime.date
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_date_null_request(
date_query=date_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_null.metadata = {"url": "/queries/date/null"} # type: ignore
@distributed_trace
def date_time_valid(self, **kwargs: Any) -> None:
"""Get '2012-01-01T01:01:01Z' as date-time.
:keyword date_time_query: '2012-01-01T01:01:01Z' as date-time. The default value is
"2012-01-01T01:01:01Z". Note that overriding this default value may result in unsupported
behavior.
:paramtype date_time_query: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
date_time_query = kwargs.pop("date_time_query", "2012-01-01T01:01:01Z") # type: datetime.datetime
request = build_queries_date_time_valid_request(
date_time_query=date_time_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_valid.metadata = {"url": "/queries/datetime/2012-01-01T01%3A01%3A01Z"} # type: ignore
@distributed_trace
def date_time_null(self, *, date_time_query: Optional[datetime.datetime] = None, **kwargs: Any) -> None:
"""Get null as date-time, should result in no query parameters in uri.
:keyword date_time_query: null as date-time (no query parameters).
:paramtype date_time_query: ~datetime.datetime
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_date_time_null_request(
date_time_query=date_time_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
date_time_null.metadata = {"url": "/queries/datetime/null"} # type: ignore
@distributed_trace
def array_string_csv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
csv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_valid.metadata = {"url": "/queries/array/csv/string/valid"} # type: ignore
@distributed_trace
def array_string_csv_null(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get a null array of string using the csv-array format.
:keyword array_query: a null array of string using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_null_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_null.metadata = {"url": "/queries/array/csv/string/null"} # type: ignore
@distributed_trace
def array_string_csv_empty(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an empty array [] of string using the csv-array format.
:keyword array_query: an empty array [] of string using the csv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_csv_empty_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_csv_empty.metadata = {"url": "/queries/array/csv/string/empty"} # type: ignore
@distributed_trace
def array_string_no_collection_format_empty(
self, *, array_query: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""Array query has no defined collection format, should default to csv. Pass in ['hello', 'nihao',
'bonjour'] for the 'arrayQuery' parameter to the service.
:keyword array_query: Array-typed query parameter. Pass in ['hello', 'nihao', 'bonjour'].
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_no_collection_format_empty_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_no_collection_format_empty.metadata = {"url": "/queries/array/none/string/empty"} # type: ignore
@distributed_trace
def array_string_ssv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
ssv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the ssv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_ssv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_ssv_valid.metadata = {"url": "/queries/array/ssv/string/valid"} # type: ignore
@distributed_trace
def array_string_tsv_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
tsv-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the tsv-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_tsv_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_tsv_valid.metadata = {"url": "/queries/array/tsv/string/valid"} # type: ignore
@distributed_trace
def array_string_pipes_valid(self, *, array_query: Optional[List[str]] = None, **kwargs: Any) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
pipes-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the pipes-array format.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_queries_array_string_pipes_valid_request(
array_query=array_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
array_string_pipes_valid.metadata = {"url": "/queries/array/pipes/string/valid"} # type: ignore
class PathItemsOperations(object):
"""PathItemsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_all_with_values(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery='globalStringQuery',
pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value 'localStringQuery'.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_all_with_values_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_all_with_values.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/pathItemStringQuery/localStringQuery"} # type: ignore
@distributed_trace
def get_global_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery=null,
pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery'.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value 'localStringQuery'.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_global_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_global_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/localStringQuery"} # type: ignore
@distributed_trace
def get_global_and_local_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery=null,
pathItemStringQuery='pathItemStringQuery', localStringQuery=null.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: A string value 'pathItemStringQuery' that appears as a query
parameter.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain null value.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_global_and_local_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_global_and_local_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/null/pathItemStringQuery/null"} # type: ignore
@distributed_trace
def get_local_path_item_query_null(
self,
path_item_string_path: str,
local_string_path: str,
*,
path_item_string_query: Optional[str] = None,
local_string_query: Optional[str] = None,
**kwargs: Any
) -> None:
"""send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath',
localStringPath='localStringPath', globalStringQuery='globalStringQuery',
pathItemStringQuery=null, localStringQuery=null.
:param path_item_string_path: A string value 'pathItemStringPath' that appears in the path.
:type path_item_string_path: str
:param local_string_path: should contain value 'localStringPath'.
:type local_string_path: str
:keyword path_item_string_query: should contain value null.
:paramtype path_item_string_query: str
:keyword local_string_query: should contain value null.
:paramtype local_string_query: str
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_path_items_get_local_path_item_query_null_request(
path_item_string_path=path_item_string_path,
global_string_path=self._config.global_string_path,
local_string_path=local_string_path,
path_item_string_query=path_item_string_query,
global_string_query=self._config.global_string_query,
local_string_query=local_string_query,
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
get_local_path_item_query_null.metadata = {"url": "/pathitem/nullable/globalStringPath/{globalStringPath}/pathItemStringPath/{pathItemStringPath}/localStringPath/{localStringPath}/globalStringQuery/null/null"} # type: ignore
| [((28, 4, 28, 16), 'typing.TypeVar', 'TypeVar', ({(28, 12, 28, 15): '"""T"""'}, {}), "('T')", False, 'from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar\n'), ((31, 14, 31, 26), 'msrest.Serializer', 'Serializer', ({}, {}), '()', False, 'from msrest import Serializer\n'), ((50, 11, 50, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((69, 11, 69, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((88, 11, 88, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((107, 11, 107, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((126, 11, 126, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((145, 11, 145, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((164, 11, 164, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((183, 11, 183, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((202, 11, 202, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((221, 11, 221, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((240, 11, 240, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((259, 11, 259, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((278, 11, 278, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((297, 11, 297, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((314, 11, 314, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((331, 11, 331, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((348, 11, 348, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((365, 11, 365, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((384, 11, 384, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((401, 11, 401, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((420, 11, 420, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((437, 11, 437, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((456, 11, 456, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((473, 11, 473, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((490, 11, 490, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((509, 11, 509, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((526, 11, 526, 82), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((544, 11, 544, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((562, 11, 562, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((579, 11, 579, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((597, 11, 597, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((615, 11, 615, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((632, 11, 632, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((650, 11, 650, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((668, 11, 668, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((685, 11, 685, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((703, 11, 703, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((721, 11, 721, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((738, 11, 738, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((756, 11, 756, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((774, 11, 774, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((791, 11, 791, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((809, 11, 809, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((827, 11, 827, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((845, 11, 845, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((862, 11, 862, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((879, 11, 879, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((896, 11, 896, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((913, 11, 913, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((931, 11, 931, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((948, 11, 948, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((966, 11, 966, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((983, 11, 983, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1001, 11, 1001, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1020, 11, 1020, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1039, 11, 1039, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1058, 11, 1058, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1077, 11, 1077, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1096, 11, 1096, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1115, 11, 1115, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1134, 11, 1134, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1153, 11, 1153, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1192, 11, 1192, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1231, 11, 1231, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1270, 11, 1270, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1309, 11, 1309, 107), 'azure.core.rest.HttpRequest', 'HttpRequest', (), '', False, 'from azure.core.rest import HttpRequest\n'), ((1356, 12, 1356, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1357, 18, 1357, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1390, 12, 1390, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1391, 18, 1391, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1424, 12, 1424, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1425, 18, 1425, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1458, 12, 1458, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1459, 18, 1459, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1492, 12, 1492, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1493, 18, 1493, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1526, 12, 1526, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1527, 18, 1527, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1560, 12, 1560, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1561, 18, 1561, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1594, 12, 1594, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1595, 18, 1595, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1628, 12, 1628, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1629, 18, 1629, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1662, 12, 1662, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1663, 18, 1663, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1696, 12, 1696, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1697, 18, 1697, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1731, 12, 1731, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1732, 18, 1732, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1768, 12, 1768, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1769, 18, 1769, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1802, 12, 1802, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1803, 18, 1803, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1833, 12, 1833, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1834, 18, 1834, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1865, 12, 1865, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1866, 18, 1866, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1897, 12, 1897, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1898, 18, 1898, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1928, 12, 1928, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1929, 18, 1929, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1962, 12, 1962, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1963, 18, 1963, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1993, 12, 1993, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((1994, 18, 1994, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2027, 12, 2027, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2028, 18, 2028, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2059, 12, 2059, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2060, 18, 2060, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2094, 12, 2094, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2095, 18, 2095, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2125, 12, 2125, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2126, 18, 2126, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2156, 12, 2156, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2157, 18, 2157, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2189, 12, 2189, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2190, 18, 2190, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2220, 12, 2220, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2221, 18, 2221, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2273, 12, 2273, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2274, 18, 2274, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2307, 12, 2307, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2308, 18, 2308, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2338, 12, 2338, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2339, 18, 2339, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2372, 12, 2372, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2373, 18, 2373, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2406, 12, 2406, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2407, 18, 2407, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2437, 12, 2437, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2438, 18, 2438, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2471, 12, 2471, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2472, 18, 2472, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2505, 12, 2505, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2506, 18, 2506, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2536, 12, 2536, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2537, 18, 2537, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2570, 12, 2570, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2571, 18, 2571, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2604, 12, 2604, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2605, 18, 2605, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2635, 12, 2635, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2636, 18, 2636, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2669, 12, 2669, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2670, 18, 2670, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2703, 12, 2703, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2704, 18, 2704, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2734, 12, 2734, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2735, 18, 2735, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2768, 12, 2768, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2769, 18, 2769, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2803, 12, 2803, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2804, 18, 2804, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2837, 12, 2837, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2838, 18, 2838, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2868, 12, 2868, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2869, 18, 2869, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2900, 12, 2900, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2901, 18, 2901, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2932, 12, 2932, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2933, 18, 2933, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2963, 12, 2963, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2964, 18, 2964, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2997, 12, 2997, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((2998, 18, 2998, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3028, 12, 3028, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3029, 18, 3029, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3062, 12, 3062, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3063, 18, 3063, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3093, 12, 3093, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3094, 18, 3094, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3128, 12, 3128, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3129, 18, 3129, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3159, 12, 3159, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3160, 18, 3160, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3192, 12, 3192, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3193, 18, 3193, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3223, 12, 3223, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3224, 18, 3224, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3254, 12, 3254, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3255, 18, 3255, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3288, 12, 3288, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3289, 18, 3289, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3321, 12, 3321, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3322, 18, 3322, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3354, 12, 3354, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3355, 18, 3355, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3387, 12, 3387, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3388, 18, 3388, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3459, 12, 3459, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3460, 18, 3460, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3512, 12, 3512, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3513, 18, 3513, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3565, 12, 3565, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3566, 18, 3566, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3617, 12, 3617, 95), 'azure.core.exceptions.map_error', 'map_error', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n'), ((3618, 18, 3618, 54), 'azure.core.exceptions.HttpResponseError', 'HttpResponseError', (), '', False, 'from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error\n')] |
reddit/baseplate.py-upgrader | baseplate_py_upgrader/docker.py | 2e4b019de7c22e2d2467eba488867fe81d7d5fc1 | import logging
import re
from pathlib import Path
from typing import Match
logger = logging.getLogger(__name__)
IMAGE_RE = re.compile(
r"/baseplate-py:(?P<version>[0-9.]+(\.[0-9]+)?)-py(?P<python>[23]\.[0-9]+)-(?P<distro>(bionic|buster))(?P<repo>-artifactory)?(?P<dev>-dev)?"
)
def upgrade_docker_image_references_in_file(target_series: str, filepath: Path) -> None:
major, minor = target_series.split(".")
if major == "0":
image_series = f"{major}.{minor}"
else:
image_series = f"{major}"
force_distro = None
force_dev = False
force_repo = None
if major == "2":
force_distro = "buster"
force_dev = True
force_repo = ""
def replace_docker_image_reference(m: Match[str]) -> str:
distro = force_distro or m["distro"]
repo = force_repo if force_repo is not None else m["repo"]
dev = "-dev" if force_dev else m["dev"]
return f"/baseplate-py:{image_series}-py{m['python']}-{distro}{repo or ''}{dev or ''}"
file_content = filepath.read_text()
changed = IMAGE_RE.sub(replace_docker_image_reference, file_content, re.MULTILINE)
if file_content == changed:
return
with filepath.open("w") as f:
logger.info("Updated Docker image references in %s", filepath)
f.write(changed)
def upgrade_docker_image_references(target_series: str, root: Path) -> None:
for dockerfile in root.glob("**/Dockerfile*"):
upgrade_docker_image_references_in_file(target_series, dockerfile)
dronefile = root / ".drone.yml"
if dronefile.exists():
upgrade_docker_image_references_in_file(target_series, dronefile)
| [((8, 9, 8, 36), 'logging.getLogger', 'logging.getLogger', ({(8, 27, 8, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((11, 11, 13, 1), 're.compile', 're.compile', ({(12, 4, 12, 144): '"""/baseplate-py:(?P<version>[0-9.]+(\\\\.[0-9]+)?)-py(?P<python>[23]\\\\.[0-9]+)-(?P<distro>(bionic|buster))(?P<repo>-artifactory)?(?P<dev>-dev)?"""'}, {}), "(\n '/baseplate-py:(?P<version>[0-9.]+(\\\\.[0-9]+)?)-py(?P<python>[23]\\\\.[0-9]+)-(?P<distro>(bionic|buster))(?P<repo>-artifactory)?(?P<dev>-dev)?'\n )", False, 'import re\n')] |
ArcherLuo233/election-s-prediction | model/swtz_ty.py | 9da72cb855f6d61f9cdec6e15f7ca832629ba51a | from sqlalchemy import Column, ForeignKey, Integer, String, Text
from model.base import Base
class SWTZ_TY(Base):
__tablename__ = 'swtz_ty'
class_name = '商务团组-团员'
foreign_key = 'swtz_id'
export_docx = False
export_handle_file = ['identity']
field = [
'id', 'nickname', 'job', 'id_card', 'phone', 'remark', 'identity'
]
combo_field = {
'identity': {
'exclude': False,
'items': ['基层', '青年', '商界', '学界', '政界']
}
}
template_start_row = 3
swtz_id = Column(Integer, ForeignKey('swtz.id'))
nickname = Column(String(100), comment='姓名')
job = Column(String(100), comment='单位职务')
id_card = Column(String(100), comment='身份证号')
phone = Column(String(100), comment='联系电话')
remark = Column(Text, comment='备注')
identity_ = Column('identity', String(100), comment='身份')
@property
def identity(self):
if self.identity_ is None:
return []
return self.identity_.split(' ')
@identity.setter
def identity(self, val):
if isinstance(val, list):
while '' in val:
val.remove('')
self.identity_ = ' '.join(val)
else:
self.identity_ = val
| [((30, 13, 30, 43), 'sqlalchemy.Column', 'Column', (), '', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((25, 30, 25, 51), 'sqlalchemy.ForeignKey', 'ForeignKey', ({(25, 41, 25, 50): '"""swtz.id"""'}, {}), "('swtz.id')", False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((26, 22, 26, 33), 'sqlalchemy.String', 'String', ({(26, 29, 26, 32): '100'}, {}), '(100)', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((27, 17, 27, 28), 'sqlalchemy.String', 'String', ({(27, 24, 27, 27): '100'}, {}), '(100)', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((28, 21, 28, 32), 'sqlalchemy.String', 'String', ({(28, 28, 28, 31): '100'}, {}), '(100)', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((29, 19, 29, 30), 'sqlalchemy.String', 'String', ({(29, 26, 29, 29): '100'}, {}), '(100)', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n'), ((31, 35, 31, 46), 'sqlalchemy.String', 'String', ({(31, 42, 31, 45): '100'}, {}), '(100)', False, 'from sqlalchemy import Column, ForeignKey, Integer, String, Text\n')] |
code-review-doctor/amy | amy/dashboard/tests/test_autoupdate_profile.py | 268c1a199510457891459f3ddd73fcce7fe2b974 | from django.urls import reverse
from consents.models import Consent, Term
from workshops.models import KnowledgeDomain, Person, Qualification
from workshops.tests.base import TestBase
class TestAutoUpdateProfile(TestBase):
def setUp(self):
self._setUpAirports()
self._setUpLessons()
self._setUpLanguages()
self.user = Person.objects.create_user(
username="user",
personal="",
family="",
email="[email protected]",
password="pass",
)
self.person_consent_required_terms(self.user)
Qualification.objects.create(person=self.user, lesson=self.git)
Qualification.objects.create(person=self.user, lesson=self.sql)
self.physics = KnowledgeDomain.objects.create(name="physics")
self.chemistry = KnowledgeDomain.objects.create(name="chemistry")
self.user.domains.add(self.physics)
self.user.languages.add(self.english)
self.user.languages.add(self.french)
self.client.login(username="user", password="pass")
def test_load_form(self):
rv = self.client.get(reverse("autoupdate_profile"))
self.assertEqual(rv.status_code, 200)
def test_update_profile(self):
term_slugs = [
"may-contact",
"may-publish-name",
"public-profile",
]
terms_by_term_slug = {
term.slug: term
for term in Term.objects.filter(slug__in=term_slugs)
.active()
.prefetch_active_options()
}
consent_data = {
f"consents-{slug}": terms_by_term_slug[slug].active_options[0].pk
for slug in term_slugs
}
data = {
"personal": "admin",
"middle": "",
"family": "Smith",
"email": "[email protected]",
"gender": Person.UNDISCLOSED,
"airport": self.airport_0_0.pk,
"github": "changed",
"twitter": "",
"url": "",
"username": "changed",
"affiliation": "",
"languages": [self.latin.pk, self.french.pk],
"domains": [self.chemistry.pk],
"lessons": [self.git.pk, self.matlab.pk],
"consents-person": self.user.pk,
**consent_data,
}
rv = self.client.post(reverse("autoupdate_profile"), data, follow=True)
self.assertEqual(rv.status_code, 200)
content = rv.content.decode("utf-8")
self.assertNotIn("Fix errors below", content)
self.user.refresh_from_db()
self.assertEqual(self.user.username, "user") # username is read-only
self.assertEqual(self.user.github, None) # github is read-only
self.assertEqual(self.user.family, "Smith")
self.assertEqual(set(self.user.lessons.all()), {self.git, self.matlab})
self.assertEqual(list(self.user.domains.all()), [self.chemistry])
self.assertEqual(set(self.user.languages.all()), {self.french, self.latin})
updated_consents_by_term_slug = {
consent.term.slug: consent
for consent in Consent.objects.filter(
term__slug__in=term_slugs, person=self.user
)
.active()
.select_related("term")
}
for slug in term_slugs:
self.assertEqual(
updated_consents_by_term_slug[slug].term_option.pk,
consent_data[f"consents-{slug}"],
)
| [((14, 20, 20, 9), 'workshops.models.Person.objects.create_user', 'Person.objects.create_user', (), '', False, 'from workshops.models import KnowledgeDomain, Person, Qualification\n'), ((24, 8, 24, 71), 'workshops.models.Qualification.objects.create', 'Qualification.objects.create', (), '', False, 'from workshops.models import KnowledgeDomain, Person, Qualification\n'), ((25, 8, 25, 71), 'workshops.models.Qualification.objects.create', 'Qualification.objects.create', (), '', False, 'from workshops.models import KnowledgeDomain, Person, Qualification\n'), ((27, 23, 27, 69), 'workshops.models.KnowledgeDomain.objects.create', 'KnowledgeDomain.objects.create', (), '', False, 'from workshops.models import KnowledgeDomain, Person, Qualification\n'), ((28, 25, 28, 73), 'workshops.models.KnowledgeDomain.objects.create', 'KnowledgeDomain.objects.create', (), '', False, 'from workshops.models import KnowledgeDomain, Person, Qualification\n'), ((37, 29, 37, 58), 'django.urls.reverse', 'reverse', ({(37, 37, 37, 57): '"""autoupdate_profile"""'}, {}), "('autoupdate_profile')", False, 'from django.urls import reverse\n'), ((75, 30, 75, 59), 'django.urls.reverse', 'reverse', ({(75, 38, 75, 58): '"""autoupdate_profile"""'}, {}), "('autoupdate_profile')", False, 'from django.urls import reverse\n'), ((48, 24, 48, 64), 'consents.models.Term.objects.filter', 'Term.objects.filter', (), '', False, 'from consents.models import Consent, Term\n'), ((90, 27, 92, 13), 'consents.models.Consent.objects.filter', 'Consent.objects.filter', (), '', False, 'from consents.models import Consent, Term\n')] |
kprokofi/animal-recognition-with-voice | bot/recognizer_bot/yolo/common/utils.py | e9e5235315255eb6e17df3dba616b2ed4c902c92 | import numpy as np
import time
import cv2
import colorsys
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Activation, ReLU, Multiply
# Custom objects from backbones package https://github.com/david8862/keras-YOLOv3-model-set/tree/master/common/backbones
def mish(x):
return x * K.tanh(K.softplus(x))
def hard_swish(x):
return Multiply()([Activation(hard_sigmoid)(x), x])
def hard_sigmoid(x):
return ReLU(6.)(x + 3.) * (1. / 6.)
def swish(x):
"""Swish activation function.
# Arguments
x: Input tensor.
# Returns
The Swish activation: `x * sigmoid(x)`.
# References
[Searching for Activation Functions](https://arxiv.org/abs/1710.05941)
"""
if K.backend() == 'tensorflow':
try:
# The native TF implementation has a more
# memory-efficient gradient implementation
return K.tf.nn.swish(x)
except AttributeError:
pass
return x * K.sigmoid(x)
def get_custom_objects():
'''
form up a custom_objects dict so that the customized
layer/function call could be correctly parsed when keras
.h5 model is loading or converting
'''
custom_objects_dict = {
'tf': tf,
'swish': swish,
'hard_sigmoid': hard_sigmoid,
'hard_swish': hard_swish,
'mish': mish
}
return custom_objects_dict
def get_multiscale_list():
input_shape_list = [(320, 320), (352, 352), (384, 384), (416, 416),
(448, 448), (480, 480), (512, 512), (544, 544), (576, 576), (608, 608)]
return input_shape_list
def resize_anchors(base_anchors, target_shape, base_shape=(416, 416)):
'''
original anchor size is clustered from COCO dataset
under input shape (416,416). We need to resize it to
our train input shape for better performance
'''
return np.around(base_anchors*target_shape[::-1]/base_shape[::-1])
def get_classes(classes_path):
'''loads the classes'''
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def get_anchors(anchors_path):
'''loads the anchors from a file'''
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def get_colors(class_names):
# Generate colors for drawing bounding boxes.
hsv_tuples = [(x / len(class_names), 1., 1.)
for x in range(len(class_names))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(
map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)),
colors))
np.random.seed(10101) # Fixed seed for consistent colors across runs.
# Shuffle colors to decorrelate adjacent classes.
np.random.shuffle(colors)
np.random.seed(None) # Reset seed to default.
return colors
def get_dataset(annotation_file, shuffle=True):
with open(annotation_file) as f:
lines = f.readlines()
lines = [line.strip() for line in lines]
if shuffle:
np.random.seed(int(time.time()))
np.random.shuffle(lines)
# np.random.seed(None)
return lines
def draw_label(image, text, color, coords):
font = cv2.FONT_HERSHEY_PLAIN
font_scale = 1.
(text_width, text_height) = cv2.getTextSize(
text, font, fontScale=font_scale, thickness=1)[0]
padding = 5
rect_height = text_height + padding * 2
rect_width = text_width + padding * 2
(x, y) = coords
cv2.rectangle(image, (x, y), (x + rect_width,
y - rect_height), color, cv2.FILLED)
cv2.putText(image, text, (x + padding, y - text_height + padding), font,
fontScale=font_scale,
color=(255, 255, 255),
lineType=cv2.LINE_AA)
return image
def draw_boxes(image, boxes, classes, scores, class_names, colors, show_score=True):
if boxes is None or len(boxes) == 0:
return image
if classes is None or len(classes) == 0:
return image
for box, cls, score in zip(boxes, classes, scores):
xmin, ymin, xmax, ymax = map(int, box)
class_name = class_names[cls]
if show_score:
label = '{} {:.2f}'.format(class_name, score)
else:
label = '{}'.format(class_name)
#print(label, (xmin, ymin), (xmax, ymax))
# if no color info, use black(0,0,0)
if colors is None:
color = (0, 0, 0)
else:
color = colors[cls]
cv2.rectangle(image, (xmin, ymin), (xmax, ymax), color, 1, cv2.LINE_AA)
image = draw_label(image, label, color, (xmin, ymin))
return image
| [((74, 11, 74, 70), 'numpy.around', 'np.around', ({(74, 21, 74, 69): '(base_anchors * target_shape[::-1] / base_shape[::-1])'}, {}), '(base_anchors * target_shape[::-1] / base_shape[::-1])', True, 'import numpy as np\n'), ((101, 4, 101, 25), 'numpy.random.seed', 'np.random.seed', ({(101, 19, 101, 24): '(10101)'}, {}), '(10101)', True, 'import numpy as np\n'), ((103, 4, 103, 29), 'numpy.random.shuffle', 'np.random.shuffle', ({(103, 22, 103, 28): 'colors'}, {}), '(colors)', True, 'import numpy as np\n'), ((104, 4, 104, 24), 'numpy.random.seed', 'np.random.seed', ({(104, 19, 104, 23): 'None'}, {}), '(None)', True, 'import numpy as np\n'), ((133, 4, 134, 54), 'cv2.rectangle', 'cv2.rectangle', ({(133, 18, 133, 23): 'image', (133, 25, 133, 31): '(x, y)', (133, 33, 134, 34): '(x + rect_width, y - rect_height)', (134, 36, 134, 41): 'color', (134, 43, 134, 53): 'cv2.FILLED'}, {}), '(image, (x, y), (x + rect_width, y - rect_height), color, cv2.\n FILLED)', False, 'import cv2\n'), ((135, 4, 138, 37), 'cv2.putText', 'cv2.putText', (), '', False, 'import cv2\n'), ((17, 11, 17, 21), 'tensorflow.keras.layers.Multiply', 'Multiply', ({}, {}), '()', False, 'from tensorflow.keras.layers import Activation, ReLU, Multiply\n'), ((33, 7, 33, 18), 'tensorflow.keras.backend.backend', 'K.backend', ({}, {}), '()', True, 'from tensorflow.keras import backend as K\n'), ((41, 15, 41, 27), 'tensorflow.keras.backend.sigmoid', 'K.sigmoid', ({(41, 25, 41, 26): 'x'}, {}), '(x)', True, 'from tensorflow.keras import backend as K\n'), ((115, 8, 115, 32), 'numpy.random.shuffle', 'np.random.shuffle', ({(115, 26, 115, 31): 'lines'}, {}), '(lines)', True, 'import numpy as np\n'), ((124, 32, 125, 54), 'cv2.getTextSize', 'cv2.getTextSize', (), '', False, 'import cv2\n'), ((164, 8, 164, 79), 'cv2.rectangle', 'cv2.rectangle', ({(164, 22, 164, 27): 'image', (164, 29, 164, 41): '(xmin, ymin)', (164, 43, 164, 55): '(xmax, ymax)', (164, 57, 164, 62): 'color', (164, 64, 164, 65): '(1)', (164, 67, 164, 78): 'cv2.LINE_AA'}, {}), '(image, (xmin, ymin), (xmax, ymax), color, 1, cv2.LINE_AA)', False, 'import cv2\n'), ((13, 22, 13, 35), 'tensorflow.keras.backend.softplus', 'K.softplus', ({(13, 33, 13, 34): 'x'}, {}), '(x)', True, 'from tensorflow.keras import backend as K\n'), ((21, 11, 21, 19), 'tensorflow.keras.layers.ReLU', 'ReLU', ({(21, 16, 21, 18): '(6.0)'}, {}), '(6.0)', False, 'from tensorflow.keras.layers import Activation, ReLU, Multiply\n'), ((37, 19, 37, 35), 'tensorflow.keras.backend.tf.nn.swish', 'K.tf.nn.swish', ({(37, 33, 37, 34): 'x'}, {}), '(x)', True, 'from tensorflow.keras import backend as K\n'), ((90, 11, 90, 28), 'numpy.array', 'np.array', ({(90, 20, 90, 27): 'anchors'}, {}), '(anchors)', True, 'import numpy as np\n'), ((17, 23, 17, 47), 'tensorflow.keras.layers.Activation', 'Activation', ({(17, 34, 17, 46): 'hard_sigmoid'}, {}), '(hard_sigmoid)', False, 'from tensorflow.keras.layers import Activation, ReLU, Multiply\n'), ((97, 32, 97, 55), 'colorsys.hsv_to_rgb', 'colorsys.hsv_to_rgb', ({(97, 52, 97, 54): '*x'}, {}), '(*x)', False, 'import colorsys\n'), ((114, 27, 114, 38), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n')] |
PhilippHafe/CarND-Capstone | ros/src/tl_detector/light_classification/tl_classifier.py | 9f933c817b11e7a093c3f2b07fad10710f7eb551 | from styx_msgs.msg import TrafficLight
import tensorflow as tf
import numpy as np
import datetime
class TLClassifier(object):
def __init__(self):
PATH_TO_CKPT = "light_classification/frozen_inference_graph.pb"
self.graph = tf.Graph()
self.threshold = 0.5
with self.graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name = '')
self.image_tensor = self.graph.get_tensor_by_name('image_tensor:0')
self.boxes = self.graph.get_tensor_by_name('detection_boxes:0')
self.scores = self.graph.get_tensor_by_name('detection_scores:0')
self.classes = self.graph.get_tensor_by_name('detection_classes:0')
self.num_detections = self.graph.get_tensor_by_name('num_detections:0')
self.sess = tf.Session(graph=self.graph)
def get_classification(self, image):
"""Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
"""
#TODO implement light color prediction
with self.graph.as_default():
image_np_expanded = np.expand_dims(image, axis=0)
start = datetime.datetime.now()
(boxes, scores, classes, num) = self.sess.run(
[self.boxes, self.scores, self.classes, self.num_detections],
feed_dict={self.image_tensor: image_np_expanded})
end = datetime.datetime.now()
dur = end - start
#print(dur.total_seconds())
boxes = np.squeeze(boxes)
classes = np.squeeze(classes).astype(np.int32)
scores = np.squeeze(scores)
if len(scores)>0 and np.max(scores)>self.threshold:
detected_class = int(classes[np.argmax(scores)])
else:
detected_class = 4
if detected_class == 1:
print('Classes: {}, Green. Detection Duration {}'.format(TrafficLight.GREEN,dur.total_seconds()))
return TrafficLight.GREEN
elif detected_class == 2:
print('Classes: {}, Red Detection Duration {}'.format(TrafficLight.RED,dur.total_seconds()))
return TrafficLight.RED
elif detected_class == 3:
print('Classes: {}, Gelb. Detection Duration {}'.format(TrafficLight.YELLOW,dur.total_seconds()))
return TrafficLight.YELLOW
return TrafficLight.UNKNOWN
| [] |
ctralie/SiRPyGL | testGMDS.py | e06c317ed60321d492725e39fd8fcc0ce56ff4c0 | #Based off of http://wiki.wxpython.org/GLCanvas
#Lots of help from http://wiki.wxpython.org/Getting%20Started
from OpenGL.GL import *
import wx
from wx import glcanvas
from Primitives3D import *
from PolyMesh import *
from LaplacianMesh import *
from Geodesics import *
from PointCloud import *
from Cameras3D import *
from ICP import *
from sys import exit, argv
import random
import numpy as np
import scipy.io as sio
from pylab import cm
import os
import subprocess
import math
import time
#from sklearn import manifold
from GMDS import *
DEFAULT_SIZE = wx.Size(1200, 800)
DEFAULT_POS = wx.Point(10, 10)
PRINCIPAL_AXES_SCALEFACTOR = 1
def saveImageGL(mvcanvas, filename):
view = glGetIntegerv(GL_VIEWPORT)
img = wx.EmptyImage(view[2], view[3] )
pixels = glReadPixels(0, 0, view[2], view[3], GL_RGB,
GL_UNSIGNED_BYTE)
img.SetData( pixels )
img = img.Mirror(False)
img.SaveFile(filename, wx.BITMAP_TYPE_PNG)
def saveImage(canvas, filename):
s = wx.ScreenDC()
w, h = canvas.size.Get()
b = wx.EmptyBitmap(w, h)
m = wx.MemoryDCFromDC(s)
m.SelectObject(b)
m.Blit(0, 0, w, h, s, 70, 0)
m.SelectObject(wx.NullBitmap)
b.SaveFile(filename, wx.BITMAP_TYPE_PNG)
class MeshViewerCanvas(glcanvas.GLCanvas):
def __init__(self, parent):
attribs = (glcanvas.WX_GL_RGBA, glcanvas.WX_GL_DOUBLEBUFFER, glcanvas.WX_GL_DEPTH_SIZE, 24)
glcanvas.GLCanvas.__init__(self, parent, -1, attribList = attribs)
self.context = glcanvas.GLContext(self)
self.parent = parent
#Camera state variables
self.size = self.GetClientSize()
#self.camera = MouseSphericalCamera(self.size.x, self.size.y)
self.camera = MousePolarCamera(self.size.width, self.size.height)
#Main state variables
self.MousePos = [0, 0]
self.initiallyResized = False
self.bbox = BBox3D()
self.unionbbox = BBox3D()
random.seed()
#Face mesh variables and manipulation variables
self.mesh1 = None
self.mesh1Dist = None
self.mesh1DistLoaded = False
self.mesh2 = None
self.mesh2DistLoaded = False
self.mesh2Dist = None
self.mesh3 = None
#Holds the transformations of the best iteration in ICP
self.transformations = []
self.savingMovie = False
self.movieIter = 0
self.displayMeshFaces = True
self.displayMeshEdges = False
self.displayMeshVertices = False
self.displayMeshNormals = False
self.displayPrincipalAxes = False
self.vertexColors = np.zeros(0)
self.cutPlane = None
self.displayCutPlane = False
self.GLinitialized = False
#GL-related events
wx.EVT_ERASE_BACKGROUND(self, self.processEraseBackgroundEvent)
wx.EVT_SIZE(self, self.processSizeEvent)
wx.EVT_PAINT(self, self.processPaintEvent)
#Mouse Events
wx.EVT_LEFT_DOWN(self, self.MouseDown)
wx.EVT_LEFT_UP(self, self.MouseUp)
wx.EVT_RIGHT_DOWN(self, self.MouseDown)
wx.EVT_RIGHT_UP(self, self.MouseUp)
wx.EVT_MIDDLE_DOWN(self, self.MouseDown)
wx.EVT_MIDDLE_UP(self, self.MouseUp)
wx.EVT_MOTION(self, self.MouseMotion)
#self.initGL()
def initPointCloud(self, pointCloud):
self.pointCloud = pointCloud
def centerOnMesh1(self, evt):
if not self.mesh1:
return
self.bbox = self.mesh1.getBBox()
self.camera.centerOnBBox(self.bbox, theta = -math.pi/2, phi = math.pi/2)
self.Refresh()
def centerOnMesh2(self, evt):
if not self.mesh2:
return
self.bbox = self.mesh2.getBBox()
self.camera.centerOnBBox(self.bbox, theta = -math.pi/2, phi = math.pi/2)
self.Refresh()
def centerOnBoth(self, evt):
if not self.mesh1 or not self.mesh2:
return
self.bbox = self.mesh1.getBBox()
self.bbox.Union(self.mesh2.getBBox())
self.camera.centerOnBBox(self.bbox, theta = -math.pi/2, phi = math.pi/2)
self.Refresh()
def MDSMesh1(self, evt):
if not self.mesh1:
print "ERROR: Mesh 1 not loaded yet"
return
if not self.mesh1DistLoaded:
print "ERROR: Mesh 1 distance matrix not loaded"
return
mds = manifold.MDS(n_components=2, dissimilarity="precomputed", n_jobs=1)
print "Doing MDS on mesh 1...."
pos = mds.fit(self.mesh1Dist).embedding_
print "Finished MDS on mesh 1"
for i in range(pos.shape[0]):
self.mesh1.vertices[i].pos = Point3D(pos[i, 0], pos[i, 1], pos[i, 2])
self.mesh1.needsDisplayUpdate = True
self.Refresh()
def MDSMesh2(self, evt):
if not self.mesh2:
print "ERROR: Mesh 2 not loaded yet"
return
if not self.mesh2DistLoaded:
print "ERROR: Mesh 2 distance matrix not loaded"
return
mds = manifold.MDS(n_components=2, dissimilarity="precomputed", n_jobs=1)
print "Doing MDS on mesh 2..."
pos = mds.fit(self.mesh2Dist).embedding_
print "Finished MDS on mesh 2"
for i in range(pos.shape[0]):
self.mesh2.vertices[i].pos = Point3D(pos[i, 0], pos[i, 1], pos[i, 2])
self.mesh2.needsDisplayUpdate = True
self.Refresh()
def doGMDS(self, evt):
if self.mesh1 and self.mesh2:
if not self.mesh1DistLoaded:
print "Mesh 1 distance not loaded"
return
if not self.mesh2DistLoaded:
print "Mesh 2 distance not loaded"
return
N = len(self.mesh1.vertices)
VX = np.zeros((N, 3))
for i in range(N):
V = self.mesh1.vertices[i].pos
VX[i, :] = np.array([V.x, V.y, V.z])
print "Doing GMDS..."
t, u = GMDSPointsToMesh(VX, self.mesh1Dist, self.mesh2, self.mesh2Dist)
print "Finished GMDS"
#Update the vertices based on the triangles where they landed
#and the barycentric coordinates of those triangles
for i in range(N):
Vs = [v.pos for v in self.mesh2.faces[int(t[i].flatten()[0])].getVertices()]
pos = Point3D(0, 0, 0)
for k in range(3):
pos = pos + u[i, k]*Vs[k]
self.mesh1.vertices[i].pos = pos
self.mesh1.needsDisplayUpdate = True
else:
print "ERROR: One or both meshes have not been loaded yet"
self.Refresh()
def displayMeshFacesCheckbox(self, evt):
self.displayMeshFaces = evt.Checked()
self.Refresh()
def displayMeshEdgesCheckbox(self, evt):
self.displayMeshEdges = evt.Checked()
self.Refresh()
def displayCutPlaneCheckbox(self, evt):
self.displayCutPlane = evt.Checked()
self.Refresh()
def displayMeshVerticesCheckbox(self, evt):
self.displayMeshVertices = evt.Checked()
self.Refresh()
def displayPrincipalAxesCheckbox(self, evt):
self.displayPrincipalAxes = evt.Checked()
self.Refresh()
def processEraseBackgroundEvent(self, event): pass #avoid flashing on MSW.
def processSizeEvent(self, event):
self.size = self.GetClientSize()
self.SetCurrent(self.context)
glViewport(0, 0, self.size.width, self.size.height)
if not self.initiallyResized:
#The canvas gets resized once on initialization so the camera needs
#to be updated accordingly at that point
self.camera = MousePolarCamera(self.size.width, self.size.height)
self.camera.centerOnBBox(self.bbox, math.pi/2, math.pi/2)
self.initiallyResized = True
def processPaintEvent(self, event):
dc = wx.PaintDC(self)
self.SetCurrent(self.context)
if not self.GLinitialized:
self.initGL()
self.GLinitialized = True
self.repaint()
def repaint(self):
#Set up projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
farDist = (self.camera.eye - self.bbox.getCenter()).Length()*2
#This is to make sure we can see on the inside
farDist = max(farDist, self.unionbbox.getDiagLength()*2)
nearDist = farDist/50.0
gluPerspective(180.0*self.camera.yfov/M_PI, float(self.size.x)/self.size.y, nearDist, farDist)
#Set up modelview matrix
self.camera.gotoCameraFrame()
glClearColor(0.0, 0.0, 0.0, 0.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLightfv(GL_LIGHT0, GL_POSITION, [3.0, 4.0, 5.0, 0.0]);
glLightfv(GL_LIGHT1, GL_POSITION, [-3.0, -2.0, -3.0, 0.0]);
glEnable(GL_LIGHTING)
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE, [0.8, 0.8, 0.8, 1.0]);
glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, [0.2, 0.2, 0.2, 1.0])
glMaterialfv(GL_FRONT_AND_BACK, GL_SHININESS, 64)
if self.mesh1:
self.mesh1.renderGL(True, True, False, False, None)
if self.mesh2:
self.mesh2.renderGL(self.displayMeshEdges, self.displayMeshVertices, self.displayMeshNormals, self.displayMeshFaces, None)
self.SwapBuffers()
def initGL(self):
glLightModelfv(GL_LIGHT_MODEL_AMBIENT, [0.2, 0.2, 0.2, 1.0])
glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, GL_TRUE)
glLightfv(GL_LIGHT0, GL_DIFFUSE, [1.0, 1.0, 1.0, 1.0])
glEnable(GL_LIGHT0)
glLightfv(GL_LIGHT1, GL_DIFFUSE, [0.5, 0.5, 0.5, 1.0])
glEnable(GL_LIGHT1)
glEnable(GL_NORMALIZE)
glEnable(GL_LIGHTING)
glEnable(GL_DEPTH_TEST)
def handleMouseStuff(self, x, y):
#Invert y from what the window manager says
y = self.size.height - y
self.MousePos = [x, y]
def MouseDown(self, evt):
x, y = evt.GetPosition()
self.CaptureMouse()
self.handleMouseStuff(x, y)
self.Refresh()
def MouseUp(self, evt):
x, y = evt.GetPosition()
self.handleMouseStuff(x, y)
self.ReleaseMouse()
self.Refresh()
def MouseMotion(self, evt):
x, y = evt.GetPosition()
[lastX, lastY] = self.MousePos
self.handleMouseStuff(x, y)
dX = self.MousePos[0] - lastX
dY = self.MousePos[1] - lastY
if evt.Dragging():
if evt.MiddleIsDown():
self.camera.translate(dX, dY)
elif evt.RightIsDown():
self.camera.zoom(-dY)#Want to zoom in as the mouse goes up
elif evt.LeftIsDown():
self.camera.orbitLeftRight(dX)
self.camera.orbitUpDown(dY)
self.Refresh()
class MeshViewerFrame(wx.Frame):
(ID_LOADDATASET1, ID_LOADDATASET2, ID_SAVEDATASET, ID_SAVESCREENSHOT) = (1, 2, 3, 4)
def __init__(self, parent, id, title, pos=DEFAULT_POS, size=DEFAULT_SIZE, style=wx.DEFAULT_FRAME_STYLE, name = 'GLWindow', mesh1 = None, mesh2 = None):
style = style | wx.NO_FULL_REPAINT_ON_RESIZE
super(MeshViewerFrame, self).__init__(parent, id, title, pos, size, style, name)
#Initialize the menu
self.CreateStatusBar()
self.size = size
self.pos = pos
print "MeshViewerFrameSize = %s, pos = %s"%(self.size, self.pos)
filemenu = wx.Menu()
menuOpenMesh1 = filemenu.Append(MeshViewerFrame.ID_LOADDATASET1, "&Load Mesh1","Load a polygon mesh")
self.Bind(wx.EVT_MENU, self.OnLoadMesh1, menuOpenMesh1)
menuOpenMesh2 = filemenu.Append(MeshViewerFrame.ID_LOADDATASET2, "&Load Mesh2","Load a polygon mesh")
self.Bind(wx.EVT_MENU, self.OnLoadMesh2, menuOpenMesh2)
menuSaveScreenshot = filemenu.Append(MeshViewerFrame.ID_SAVESCREENSHOT, "&Save Screenshot", "Save a screenshot of the GL Canvas")
self.Bind(wx.EVT_MENU, self.OnSaveScreenshot, menuSaveScreenshot)
menuExit = filemenu.Append(wx.ID_EXIT,"E&xit"," Terminate the program")
self.Bind(wx.EVT_MENU, self.OnExit, menuExit)
# Creating the menubar.
menuBar = wx.MenuBar()
menuBar.Append(filemenu,"&File") # Adding the "filemenu" to the MenuBar
self.SetMenuBar(menuBar) # Adding the MenuBar to the Frame content.
self.glcanvas = MeshViewerCanvas(self)
self.glcanvas.mesh1 = None
self.glcanvas.mesh2 = None
if mesh1:
(self.glcanvas.mesh1, self.glcanvas.mesh1Dist) = self.loadMesh(mesh1)
if self.glcanvas.mesh1Dist.shape[0] > 0:
self.glcanvas.mesh1DistLoaded = True
else:
self.glcanvas.mesh1DistLoaded = False
if mesh2:
(self.glcanvas.mesh2, self.glcanvas.mesh2Dist) = self.loadMesh(mesh2)
if self.glcanvas.mesh2Dist.shape[0] > 0:
self.glcanvas.mesh2DistLoaded = True
else:
self.glcanvas.mesh2DistLoaded = False
self.rightPanel = wx.BoxSizer(wx.VERTICAL)
#Buttons to go to a default view
viewPanel = wx.BoxSizer(wx.HORIZONTAL)
center1Button = wx.Button(self, -1, "Mesh1")
self.Bind(wx.EVT_BUTTON, self.glcanvas.centerOnMesh1, center1Button)
viewPanel.Add(center1Button, 0, wx.EXPAND)
center2Button = wx.Button(self, -1, "Mesh2")
self.Bind(wx.EVT_BUTTON, self.glcanvas.centerOnMesh2, center2Button)
viewPanel.Add(center2Button, 0, wx.EXPAND)
bothButton = wx.Button(self, -1, "Both")
self.Bind(wx.EVT_BUTTON, self.glcanvas.centerOnBoth, bothButton)
viewPanel.Add(bothButton, 0, wx.EXPAND)
self.rightPanel.Add(wx.StaticText(self, label="Views"), 0, wx.EXPAND)
self.rightPanel.Add(viewPanel, 0, wx.EXPAND)
#Buttons for MDS
MDSPanel = wx.BoxSizer(wx.HORIZONTAL)
MDS1Button = wx.Button(self, -1, "MDS Mesh1")
self.Bind(wx.EVT_BUTTON, self.glcanvas.MDSMesh1, MDS1Button)
MDSPanel.Add(MDS1Button, 0, wx.EXPAND)
MDS2Button = wx.Button(self, -1, "MDS Mesh2")
self.Bind(wx.EVT_BUTTON, self.glcanvas.MDSMesh2, MDS2Button)
MDSPanel.Add(MDS2Button, 0, wx.EXPAND)
self.rightPanel.Add(wx.StaticText(self, label="MDS on Meshes"), 0, wx.EXPAND)
self.rightPanel.Add(MDSPanel, 0, wx.EXPAND)
#Checkboxes for displaying data
self.displayMeshFacesCheckbox = wx.CheckBox(self, label = "Display Mesh Faces")
self.displayMeshFacesCheckbox.SetValue(True)
self.Bind(wx.EVT_CHECKBOX, self.glcanvas.displayMeshFacesCheckbox, self.displayMeshFacesCheckbox)
self.rightPanel.Add(self.displayMeshFacesCheckbox, 0, wx.EXPAND)
self.displayMeshEdgesCheckbox = wx.CheckBox(self, label = "Display Mesh Edges")
self.displayMeshEdgesCheckbox.SetValue(False)
self.Bind(wx.EVT_CHECKBOX, self.glcanvas.displayMeshEdgesCheckbox, self.displayMeshEdgesCheckbox)
self.rightPanel.Add(self.displayMeshEdgesCheckbox, 0, wx.EXPAND)
self.displayMeshVerticesCheckbox = wx.CheckBox(self, label = "Display Mesh Points")
self.displayMeshVerticesCheckbox.SetValue(False)
self.Bind(wx.EVT_CHECKBOX, self.glcanvas.displayMeshVerticesCheckbox, self.displayMeshVerticesCheckbox)
self.rightPanel.Add(self.displayMeshVerticesCheckbox)
#Button for doing ICP
GMDSButton = wx.Button(self, -1, "DO GMDS")
self.Bind(wx.EVT_BUTTON, self.glcanvas.doGMDS, GMDSButton)
self.rightPanel.Add(GMDSButton, 0, wx.EXPAND)
#Finally add the two main panels to the sizer
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer.Add(self.glcanvas, 2, wx.EXPAND)
self.sizer.Add(self.rightPanel, 0, wx.EXPAND)
self.SetSizer(self.sizer)
self.Layout()
self.Show()
def loadMesh(self, filepath):
print "Loading mesh %s..."%filepath
mesh = LaplacianMesh()
mesh.loadFile(filepath)
print "Finished loading mesh 1\n %s"%mesh
#Now try to load in the distance matrix
fileName, fileExtension = os.path.splitext(filepath)
matfile = sio.loadmat("%s.mat"%fileName)
D = np.array([])
if 'D' in matfile:
D = matfile['D']
else:
print "ERROR: No distance matrix found for mesh %s"%filepath
return (mesh, D)
def OnLoadMesh1(self, evt):
dlg = wx.FileDialog(self, "Choose a file", ".", "", "OBJ files (*.obj)|*.obj|OFF files (*.off)|*.off", wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
filename = dlg.GetFilename()
dirname = dlg.GetDirectory()
filepath = os.path.join(dirname, filename)
print dirname
(self.glcanvas.mesh1, self.glcanvas.mesh1Dist) = self.loadMesh(filepath)
self.glcanvas.bbox = self.glcanvas.mesh1.getBBox()
print "Mesh BBox: %s\n"%self.glcanvas.bbox
self.glcanvas.camera.centerOnBBox(self.glcanvas.bbox, theta = -math.pi/2, phi = math.pi/2)
#Now try to load in the distance matrix
if self.glcanvas.mesh1Dist.shape[0] > 0:
self.glcanvas.mesh1DistLoaded = True
self.glcanvas.Refresh()
dlg.Destroy()
return
def OnLoadMesh2(self, evt):
dlg = wx.FileDialog(self, "Choose a file", ".", "", "OBJ files (*.obj)|*.obj|OFF files (*.off)|*.off", wx.OPEN)
if dlg.ShowModal() == wx.ID_OK:
filename = dlg.GetFilename()
dirname = dlg.GetDirectory()
filepath = os.path.join(dirname, filename)
print dirname
(self.glcanvas.mesh2, self.glcanvas.mesh2Dist) = self.loadMesh(filepath)
self.glcanvas.bbox = self.glcanvas.mesh2.getBBox()
print "Mesh BBox: %s\n"%self.glcanvas.bbox
self.glcanvas.camera.centerOnBBox(self.glcanvas.bbox, theta = -math.pi/2, phi = math.pi/2)
#Now try to load in the distance matrix
if self.glcanvas.mesh2Dist.shape[0] > 0:
self.glcanvas.mesh2DistLoaded = True
self.glcanvas.Refresh()
dlg.Destroy()
return
def OnSaveScreenshot(self, evt):
dlg = wx.FileDialog(self, "Choose a file", ".", "", "*", wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
filename = dlg.GetFilename()
dirname = dlg.GetDirectory()
filepath = os.path.join(dirname, filename)
saveImageGL(self.glcanvas, filepath)
dlg.Destroy()
return
def OnExit(self, evt):
self.Close(True)
return
class MeshViewer(object):
def __init__(self, m1 = None, m2 = None):
app = wx.App()
frame = MeshViewerFrame(None, -1, 'MeshViewer', mesh1 = m1, mesh2 = m2)
frame.Show(True)
app.MainLoop()
app.Destroy()
if __name__ == '__main__':
m1 = None
m2 = None
if len(argv) >= 3:
m1 = argv[1]
m2 = argv[2]
viewer = MeshViewer(m1, m2)
| [] |
manpan-1/PySS | PySS/fem.py | 1e4b13de3b2aed13ecf9818f9084a2fedb295cf1 | import matplotlib.pyplot as plt
import numpy as np
import pickle
# import csv
# from collections import namedtuple
# from mpl_toolkits.mplot3d import Axes3D
# import matplotlib.animation as animation
# import matplotlib.colors as mc
class FEModel:
def __init__(self, name=None, hist_data=None):
self.name = name
self.hist_outs = hist_data
def tuple2dict(self, data):
"""
Used to convert the load-displacement data exported from models to a dictionary
"""
ld_data = []
for specimen in data:
sp_dict = dict()
load = []
disp = []
for action in specimen[0]:
load.append(action[1])
for action in specimen[1]:
disp.append(action[1])
sp_dict["Load"] = np.array(load)
sp_dict["Disp"] = -1 * np.array(disp)
ld_data.append(sp_dict)
def plot_history(self, x_axis, y_axis):
"""
XXXXXXXXXXXXXXXXXXXXXXXXXX
"""
plt.figure()
plt.plot(self.hist_outs[x_axis], self.hist_outs[y_axis])
@classmethod
def from_hist_pkl(cls, filename):
"""
Creates an object and imports history output data.
"""
with open(filename, "rb") as fh:
history_data = pickle.load(fh)
return cls(name=filename, hist_data=history_data)
#
# class ParametricDB:
# def __init__(self, dimensions, responses):
# self.responses = responses
# self.dimensions = dimensions
#
# @classmethod
# def from_file(cls, filename):
# """
# Create from file.
#
# The file should be comma separated, first row titles, subsequent rows only numbers.
#
# Parameters
# ----------
# filename : str
# Relative path/filename.
#
# Return
# ------
# ParametricDB
#
# """
# # with open(filename, 'rU') as infile:
# # reader = csv.reader(infile)
# # n_dim = int(next(reader)[0].split()[0])
# # db = {c[0]: c[1:] for c in zip(*reader)}
#
# with open(filename, 'rU') as infile:
# reader = csv.reader(infile, delimiter=";")
# n_dim = int(next(reader)[0].split()[0])
# db = [c for c in zip(*reader)]
#
# all_responses = {i[0]: i[1:] for i in db[n_dim:]}
#
# dim_ticks = np.array([i[1:] for i in db[:n_dim]]).T
# dim_lengths = [len(set(dim_ticks[:, i])) for i in range(n_dim)]
# dim_names = [db[i][0] for i in range(n_dim)]
#
# # with open(filename, 'r') as infile:
# # all_lines = [[c.split(sep=":")[0]] + c.split(sep=":")[1].split(sep=",") for c in infile]
# # db = {c[0]: c[1:] for c in zip(*all_lines)}
#
# # for key in db.keys():
# # if len(key.split(",")) > 1:
# # n_dim = len(key.split(","))
# # dim_str = key
# # dim_ticks = np.array([c.split(sep=",") for c in db[dim_str]])
# # dim_lengths = [len(set(dim_ticks[:, i])) for i in range(n_dim)]
# # dim_names = dim_str.split(sep=",")
# full_list = {i[0]: i[1:][0] for i in zip(dim_names, dim_ticks.T)}
#
# # del db[dim_str]
#
# #df = pd.DataFrame(full_dict)
#
# Address = namedtuple("map", " ".join(dim_names))
# args = [tuple(sorted(set(dim_ticks[:, i]))) for i, j in enumerate(dim_names)]
# addressbook = Address(*args)
#
# mtx = {i: np.empty(dim_lengths) for i in all_responses.keys()}
# for response in all_responses.keys():
# for i, response_value in enumerate(all_responses[response]):
# current_idx = tuple(addressbook[idx].index(full_list[name][i]) for idx, name in enumerate(dim_names))
# mtx[response][current_idx] = response_value
# mtx[response].flags.writeable = False
#
# return cls(addressbook, mtx)
#
# def get_slice(self, slice_at, response):
# """
# Get a slice of the database.
#
# Parameters
# ----------
# slice_at : dict of int
# A dictionary of the keys to be sliced at the assigned values.
# response : str
# The name of the requested response to be sliced.
#
# """
#
# idx_arr = [0]*len(self.dimensions)
#
# for key in self.dimensions._fields:
# if key not in slice_at.keys():
# idx_arr[self.get_idx(key)] = slice(None, None)
# for name, value in zip(slice_at.keys(), slice_at.values()):
# idx_arr[self.get_idx(name)] = value
#
# return self.responses[response][idx_arr]
#
# def get_idx(self, attrname):
# """
# Get the index number of a parameter (dimension) in the database.
#
# Parameters
# ----------
# attrname : str
#
# """
# return(self.dimensions.index(self.dimensions.__getattribute__(attrname)))
#
# def contour_2d(self, slice_at, response, transpose=False, fig=None, sbplt=None):
# """
# Contour plot.
# :param slice_at:
# :return:
# """
# plt.rc('text', usetex=True)
# if fig is None:
# fig = plt.figure()
# if sbplt is None:
# ax = fig.add_subplot(111)
# else:
# ax = fig.add_subplot(sbplt)
# else:
# if sbplt is None:
# ax = fig.add_subplot(111)
# else:
# ax = fig.add_subplot(sbplt)
#
# axes = [key for key in self.dimensions._fields if key not in slice_at.keys()]
#
# if transpose:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[1])], self.dimensions[self.get_idx(axes[0])])
# Z = self.get_slice(slice_at, response).T
# x_label, y_label = axes[1], axes[0]
# else:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[0])], self.dimensions[self.get_idx(axes[1])])
# Z = self.get_slice(slice_at, response)
# x_label, y_label = axes[0], axes[1]
#
# ttl_values = [self.dimensions[self.get_idx(i)][slice_at[i]] for i in slice_at.keys()]
#
# # levels = np.arange(0, 2., 0.025)
# # sbplt = ax.contour(X.astype(np.float), Y.astype(np.float), Z.T, vmin=0.4, vmax=1., levels=levels, cmap=plt.cm.inferno)
# sbplt = ax.contour(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.gray_r)
# sbplt2 = ax.contourf(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.inferno)
# plt.clabel(sbplt, inline=1, fontsize=10)
# ttl = [i for i in zip(slice_at.keys(), ttl_values)]
# ttl = ", ".join(["=".join(i) for i in ttl])
# ax.set_title("$" + response + "$" + " for : " + "$" + ttl + "$")
# ax.set_xlabel("$"+x_label+"$")
# ax.set_ylabel("$"+y_label+"$")
#
# return fig
#
# def surf_3d(self, slice_at, response, transpose=False, fig=None, sbplt=None):
# """
# Surface plot.
# :param slice_at:
# :return:
# """
# #Convenient window dimensions
# # one subplot:
# # 2 side by side: Bbox(x0=0.0, y0=0.0, x1=6.79, y1=2.57)
# # azim elev = -160 30
# # 3 subplots side by side
# # 4 subplots: Bbox(x0=0.0, y0=0.0, x1=6.43, y1=5.14)
# #azim elev -160 30
# plt.rc('text', usetex=True)
# if fig is None:
# fig = plt.figure()
# if sbplt is None:
# ax = fig.add_subplot(111, projection='3d')
# else:
# ax = fig.add_subplot(sbplt, projection='3d')
# else:
# if sbplt is None:
# ax = fig.add_subplot(111, projection='3d')
# else:
# ax = fig.add_subplot(sbplt, projection='3d')
#
#
# axes = [key for key in self.dimensions._fields if key not in slice_at.keys()]
#
# if transpose:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[1])], self.dimensions[self.get_idx(axes[0])])
# Z = self.get_slice(slice_at, response).T
# x_label, y_label = axes[1], axes[0]
# else:
# X, Y = np.meshgrid(self.dimensions[self.get_idx(axes[0])], self.dimensions[self.get_idx(axes[1])])
# Z = self.get_slice(slice_at, response)
# x_label, y_label = axes[0], axes[1]
#
# ttl_values = [self.dimensions[self.get_idx(i)][slice_at[i]] for i in slice_at.keys()]
#
# sbplt = ax.plot_surface(X.astype(np.float), Y.astype(np.float), Z.T, cmap=plt.cm.inferno)
# # plt.clabel(sbplt, inline=1, fontsize=10)
# ttl = [i for i in zip(slice_at.keys(), ttl_values)]
# ttl = ", ".join(["=".join(i) for i in ttl])
# ax.set_title("$" + response + "$" + " for : " + "$" + ttl + "$")
# ax.set_xlabel("$"+x_label+"$")
# ax.set_ylabel("$"+y_label+"$")
#
# return fig
#
# def match_viewports(fig=None):
# if fig is None:
# fig = plt.gcf()
# fig.axes[1].view_init(azim=fig.axes[0].azim, elev=fig.axes[0].elev)
def main():
lambda01 = ParametricDB.from_file("data/fem/fem-results_lambda01.dat")
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 355 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 700 MPa, lambda_flex: 0.1")
lambda01.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda01.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda01.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda01.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda01.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda01.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 2])
lambda02 = ParametricDB.from_file("data/fem/fem-results-lambda02.dat")
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 355 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 0}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcA, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 0, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcB, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 1, "f_yield": 1}, "lpf", ax=ax[1, 2])
fig, ax = plt.subplots(nrows=2, ncols=3)
fig.suptitle("fab_class: fcC, f_yield: 700 MPa, lambda_flex: 0.2")
lambda02.contour_2d({"plate_imp": 0, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 0])
lambda02.contour_2d({"plate_imp": 1, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 1])
lambda02.contour_2d({"plate_imp": 2, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[0, 2])
lambda02.contour_2d({"plate_imp": 3, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 0])
lambda02.contour_2d({"plate_imp": 4, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 1])
lambda02.contour_2d({"plate_imp": 5, "fab_class": 2, "f_yield": 1}, "lpf", ax=ax[1, 2])
return
| [((258, 14, 258, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((266, 14, 266, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((274, 14, 274, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((283, 14, 283, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((291, 14, 291, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((299, 14, 299, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((312, 14, 312, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((320, 14, 320, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((328, 14, 328, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((337, 14, 337, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((345, 14, 345, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((353, 14, 353, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((39, 8, 39, 20), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((40, 8, 40, 64), 'matplotlib.pyplot.plot', 'plt.plot', ({(40, 17, 40, 39): 'self.hist_outs[x_axis]', (40, 41, 40, 63): 'self.hist_outs[y_axis]'}, {}), '(self.hist_outs[x_axis], self.hist_outs[y_axis])', True, 'import matplotlib.pyplot as plt\n'), ((31, 30, 31, 44), 'numpy.array', 'np.array', ({(31, 39, 31, 43): 'load'}, {}), '(load)', True, 'import numpy as np\n'), ((48, 27, 48, 42), 'pickle.load', 'pickle.load', ({(48, 39, 48, 41): 'fh'}, {}), '(fh)', False, 'import pickle\n'), ((32, 35, 32, 49), 'numpy.array', 'np.array', ({(32, 44, 32, 48): 'disp'}, {}), '(disp)', True, 'import numpy as np\n')] |
gigamonkey/sheets | gigamonkeys/get.py | a89e76360ad9a35e44e5e352346eeccbe6952b1f | #!/usr/bin/env python
import json
import sys
from gigamonkeys.spreadsheets import spreadsheets
spreadsheet_id = sys.argv[1]
ranges = sys.argv[2:]
data = spreadsheets().get(spreadsheet_id, include_grid_data=bool(ranges), ranges=ranges)
json.dump(data, sys.stdout, indent=2)
| [((13, 0, 13, 37), 'json.dump', 'json.dump', (), '', False, 'import json\n'), ((11, 7, 11, 21), 'gigamonkeys.spreadsheets.spreadsheets', 'spreadsheets', ({}, {}), '()', False, 'from gigamonkeys.spreadsheets import spreadsheets\n')] |
mhmddpkts/Get-Turkish-Words-with-Web-Scraping | config.py | 6e344640f6dc512f03a9b59522876ce7b6339a86 |
root_URL = "https://tr.wiktionary.org/wiki/Vikis%C3%B6zl%C3%BCk:S%C3%B6zc%C3%BCk_listesi_"
filepath = "words.csv"
#letters=["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O",
# "P","R","S","T","U","V","Y","Z"] ##İ,Ç,Ö,Ş,Ü harfleri not work correctly
letters=["C"] | [] |
praekelt/sow-generator | sow_generator/tasks.py | eb5dab3b3231688966254a1797ced7eec67b6e8a | from github3 import login
from github3.models import GitHubError
from celery import task
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from sow_generator.models import Repository, AuthToken
def _sync_repository(obj):
dirty = False
token = AuthToken.objects.get(id=1).token
gh = login(token=token)
dc = gh.user()
org, name = obj.orgname
repo = gh.repository(org, name)
if repo is not None:
# Find RST or MD files. Markdown takes precedence.
for fieldname in ("readme", "sow"):
v = repo.contents("%s.rst" % fieldname.upper())
if v is not None:
setattr(obj, fieldname, v.decoded)
setattr(obj, "%s_format" % fieldname, "rst")
dirty = True
v = repo.contents("%s.md" % fieldname.upper())
if v is not None:
setattr(obj, fieldname, v.decoded)
setattr(obj, "%s_format" % fieldname, "md")
dirty = True
if dirty:
obj.save()
@task(max_retries=5)
def sync_repository(id):
obj = Repository.objects.get(id=id)
_sync_repository(obj)
@periodic_task(run_every=crontab(hour='*', minute='0', day_of_week='*'))
def sync_repositories():
"""Sync all repositories"""
for obj in Repository.objects.all():
_sync_repository(obj)
| [((35, 1, 35, 20), 'celery.task', 'task', (), '', False, 'from celery import task\n'), ((13, 9, 13, 27), 'github3.login', 'login', (), '', False, 'from github3 import login\n'), ((37, 10, 37, 39), 'sow_generator.models.Repository.objects.get', 'Repository.objects.get', (), '', False, 'from sow_generator.models import Repository, AuthToken\n'), ((44, 15, 44, 39), 'sow_generator.models.Repository.objects.all', 'Repository.objects.all', ({}, {}), '()', False, 'from sow_generator.models import Repository, AuthToken\n'), ((12, 12, 12, 39), 'sow_generator.models.AuthToken.objects.get', 'AuthToken.objects.get', (), '', False, 'from sow_generator.models import Repository, AuthToken\n'), ((41, 25, 41, 71), 'celery.task.schedules.crontab', 'crontab', (), '', False, 'from celery.task.schedules import crontab\n')] |
X-rayLaser/keras-auto-hwr | sources/wrappers.py | 67cfc0209045b1e211f0491b0199cb9d6811bfd0 | import numpy as np
from sources import BaseSource
from sources.base import BaseSourceWrapper
from sources.preloaded import PreLoadedSource
import json
class WordsSource(BaseSource):
def __init__(self, source):
self._source = source
def __len__(self):
return len(self._source)
def _remove_apostrpohs(self, seq):
res = ''.join(seq.split('''))
res = ''.join(res.split('"'))
return res
def _clean(self, seq):
s = ''
for ch in seq.strip():
if ch.isalpha():
s += ch
return s
def get_sequences(self):
for seq_in, transcription in self._source.get_sequences():
transcription = self._remove_apostrpohs(transcription)
words = [self._clean(word) for word in transcription.split(' ')]
yield seq_in, words
class LabelSource(BaseSource):
def __init__(self, source, mapping_table):
self._source = source
self._mapping_table = mapping_table
def __len__(self):
return len(self._source)
def get_sequences(self):
for seq_in, seq_out in self._source.get_sequences():
label_seq = [self._mapping_table.encode(ch) for ch in seq_out]
yield seq_in, label_seq
class CTCAdaptedSource(BaseSource):
def __init__(self, source, padding_value=0):
self._source = source
self._padding = padding_value
def __len__(self):
return len(self._source)
def get_sequences(self):
for seq_in, seq_out in self._source.get_sequences():
seqs_in_pad = list(seq_in)
while len(seqs_in_pad) <= 2 * len(seq_out) + 1:
n = len(seqs_in_pad[0])
seqs_in_pad.append([self._padding] * n)
yield seqs_in_pad, seq_out
class Normalizer:
def __init__(self):
self._mu = None
self._sd = None
@staticmethod
def from_json(path):
with open(path, 'r') as f:
s = f.read()
d = json.loads(s)
normalizer = Normalizer()
mu = np.array(d['mu'])
sd = np.array(d['sd'])
normalizer.set_mean(mu)
normalizer.set_deviation(sd)
return normalizer
def to_json(self, path):
d = {
'mu': np.array(self.mu).tolist(),
'sd': np.array(self.sd).tolist()
}
with open(path, 'w') as f:
f.write(json.dumps(d))
def set_mean(self, mu):
self._mu = mu
def set_deviation(self, sd):
self._sd = sd
@property
def mu(self):
return self._mu
@property
def sd(self):
return self._sd
def fit(self, X):
sequence = []
for x in X:
sequence.extend(x)
self._mu = np.mean(sequence, axis=0)
self._sd = np.std(sequence, axis=0)
def preprocess(self, X):
res = []
for x in X:
x_norm = (x - self._mu) / self._sd
# we do not want to normalize END-OF-STROKE flag which is last in the tuple
x_norm[:, -1] = np.array(x)[:, -1]
res.append(x_norm.tolist())
return res
class OffsetPointsSource(BaseSource):
def __init__(self, source):
self._source = source
def __len__(self):
return len(self._source)
def get_sequences(self):
for strokes, transcription in self._source.get_sequences():
x0, y0, t0 = strokes[0].points[0]
new_seq = []
for stroke in strokes:
points = []
for x, y, t in stroke.points:
points.append((x - x0, y - y0, t - t0, 0))
points[-1] = points[-1][:-1] + (1,)
new_seq.extend(points)
yield new_seq, transcription
class NormalizedSource(BaseSource):
def __init__(self, source, normalizer):
self._source = source
self._normalizer = normalizer
def __len__(self):
return len(self._source)
def get_sequences(self):
for points, transcription in self._source.get_sequences():
norm = self._normalizer.preprocess([points])[0]
yield norm, transcription
class DenormalizedSource(BaseSource):
def __init__(self, source, normalizer):
self._source = source
self._normalizer = normalizer
def __len__(self):
return len(self._source)
def get_sequences(self):
mu = self._normalizer.mu
sd = self._normalizer.sd
for points, transcription in self._source.get_sequences():
denormalized = [(p * sd + mu).tolist() for p in points]
for i, p in enumerate(denormalized):
p[3] = points[i][3]
yield denormalized, transcription
class H5pySource(BaseSource):
def __init__(self, h5py_ds, random_order=True):
self._h5py = h5py_ds
self._random = random_order
def __len__(self):
return len(self._h5py)
def get_sequences(self):
return self._h5py.get_data(random_order=self._random)
class PreprocessedSource(BaseSourceWrapper):
def __init__(self, source, preprocessor):
super().__init__(source)
self._preprocessor = preprocessor
def get_sequences(self):
for xs, ys in self._source.get_sequences():
yield self._preprocessor.pre_process_example(xs, ys)
class ConstrainedSource(BaseSourceWrapper):
def __init__(self, source, num_lines):
super().__init__(source)
self._num_lines = num_lines
self._use_all = (num_lines == 0)
def get_sequences(self):
for j, (seq_in, seq_out) in enumerate(self._source.get_sequences()):
#print(j, seq_out)
if j % 500 == 0:
print('Fetched {} examples'.format(j))
if j >= self._num_lines and not self._use_all:
break
yield seq_in, seq_out
class PlainListSource(BaseSourceWrapper):
def get_sequences(self):
for strokes, t in self._source.get_sequences():
points = [stroke.points for stroke in strokes]
yield points, t
| [((81, 12, 81, 25), 'json.loads', 'json.loads', ({(81, 23, 81, 24): 's'}, {}), '(s)', False, 'import json\n'), ((83, 13, 83, 30), 'numpy.array', 'np.array', ({(83, 22, 83, 29): "d['mu']"}, {}), "(d['mu'])", True, 'import numpy as np\n'), ((84, 13, 84, 30), 'numpy.array', 'np.array', ({(84, 22, 84, 29): "d['sd']"}, {}), "(d['sd'])", True, 'import numpy as np\n'), ((116, 19, 116, 44), 'numpy.mean', 'np.mean', (), '', True, 'import numpy as np\n'), ((117, 19, 117, 43), 'numpy.std', 'np.std', (), '', True, 'import numpy as np\n'), ((95, 20, 95, 33), 'json.dumps', 'json.dumps', ({(95, 31, 95, 32): 'd'}, {}), '(d)', False, 'import json\n'), ((125, 28, 125, 39), 'numpy.array', 'np.array', ({(125, 37, 125, 38): 'x'}, {}), '(x)', True, 'import numpy as np\n'), ((91, 18, 91, 35), 'numpy.array', 'np.array', ({(91, 27, 91, 34): 'self.mu'}, {}), '(self.mu)', True, 'import numpy as np\n'), ((92, 18, 92, 35), 'numpy.array', 'np.array', ({(92, 27, 92, 34): 'self.sd'}, {}), '(self.sd)', True, 'import numpy as np\n')] |
yufeiwang63/ROLL | multiworld/multiworld/core/image_env.py | aba0b4530934946eb9c41fbe5a0d6c27775596ff | import random
import cv2
import numpy as np
import warnings
from PIL import Image
from gym.spaces import Box, Dict
from multiworld.core.multitask_env import MultitaskEnv
from multiworld.core.wrapper_env import ProxyEnv
from multiworld.envs.env_util import concatenate_box_spaces
from multiworld.envs.env_util import get_stat_in_paths, create_stats_ordered_dict
class ImageEnv(ProxyEnv, MultitaskEnv):
def __init__(
self,
wrapped_env,
imsize=84,
init_camera=None,
transpose=False,
grayscale=False,
normalize=False,
reward_type='wrapped_env',
threshold=10,
image_length=None,
presampled_goals=None,
non_presampled_goal_img_is_garbage=False,
recompute_reward=True,
):
"""
:param wrapped_env:
:param imsize:
:param init_camera:
:param transpose:
:param grayscale:
:param normalize:
:param reward_type:
:param threshold:
:param image_length:
:param presampled_goals:
:param non_presampled_goal_img_is_garbage: Set this option to True if
you want to allow the code to work without presampled goals,
but where the underlying env doesn't support set_to_goal. As the name,
implies this will make it so that the goal image is garbage if you
don't provide pre-sampled goals. The main use case is if you want to
use an ImageEnv to pre-sample a bunch of goals.
"""
self.quick_init(locals())
super().__init__(wrapped_env)
self.wrapped_env.hide_goal_markers = True
self.imsize = imsize
self.init_camera = init_camera
self.transpose = transpose
self.grayscale = grayscale
self.normalize = normalize
self.recompute_reward = recompute_reward
self.non_presampled_goal_img_is_garbage = non_presampled_goal_img_is_garbage
if image_length is not None:
self.image_length = image_length
else:
if grayscale:
self.image_length = self.imsize * self.imsize
else:
self.image_length = 3 * self.imsize * self.imsize
self.channels = 1 if grayscale else 3
# This is torch format rather than PIL image
self.image_shape = (self.imsize, self.imsize)
# Flattened past image queue
# init camera
if init_camera is not None:
sim = self._wrapped_env.initialize_camera(init_camera)
# viewer = mujoco_py.MjRenderContextOffscreen(sim, device_id=-1)
# init_camera(viewer.cam)
# sim.add_render_context(viewer)
img_space = Box(0, 1, (self.image_length,), dtype=np.float32)
self._img_goal = img_space.sample() #has to be done for presampling
spaces = self.wrapped_env.observation_space.spaces.copy()
spaces['observation'] = img_space
spaces['desired_goal'] = img_space
spaces['achieved_goal'] = img_space
spaces['image_observation'] = img_space
spaces['image_desired_goal'] = img_space
spaces['image_achieved_goal'] = img_space
self.return_image_proprio = False
if 'proprio_observation' in spaces.keys():
self.return_image_proprio = True
spaces['image_proprio_observation'] = concatenate_box_spaces(
spaces['image_observation'],
spaces['proprio_observation']
)
spaces['image_proprio_desired_goal'] = concatenate_box_spaces(
spaces['image_desired_goal'],
spaces['proprio_desired_goal']
)
spaces['image_proprio_achieved_goal'] = concatenate_box_spaces(
spaces['image_achieved_goal'],
spaces['proprio_achieved_goal']
)
self.observation_space = Dict(spaces)
self.action_space = self.wrapped_env.action_space
self.reward_type = reward_type
self.threshold = threshold
self._presampled_goals = presampled_goals
if self._presampled_goals is None:
self.num_goals_presampled = 0
else:
self.num_goals_presampled = presampled_goals[random.choice(list(presampled_goals))].shape[0]
self._last_image = None
def step(self, action):
obs, reward, done, info = self.wrapped_env.step(action)
new_obs = self._update_obs(obs)
if self.recompute_reward:
reward = self.compute_reward(action, new_obs)
self._update_info(info, obs)
return new_obs, reward, done, info
def _update_info(self, info, obs):
achieved_goal = obs['image_achieved_goal']
desired_goal = self._img_goal
image_dist = np.linalg.norm(achieved_goal-desired_goal)
image_success = (image_dist<self.threshold).astype(float)-1
info['image_dist'] = image_dist
info['image_success'] = image_success
def reset(self):
obs = self.wrapped_env.reset()
if self.num_goals_presampled > 0:
goal = self.sample_goal()
self._img_goal = goal['image_desired_goal']
self.wrapped_env.set_goal(goal)
for key in goal:
obs[key] = goal[key]
elif self.non_presampled_goal_img_is_garbage:
# This is use mainly for debugging or pre-sampling goals.
self._img_goal = self._get_flat_img()
else:
env_state = self.wrapped_env.get_env_state()
self.wrapped_env.set_to_goal(self.wrapped_env.get_goal())
self._img_goal = self._get_flat_img()
self.wrapped_env.set_env_state(env_state)
return self._update_obs(obs)
def _get_obs(self):
return self._update_obs(self.wrapped_env._get_obs())
def _update_obs(self, obs):
img_obs = self._get_flat_img()
obs['image_observation'] = img_obs
obs['image_desired_goal'] = self._img_goal
obs['image_achieved_goal'] = img_obs
obs['observation'] = img_obs
obs['desired_goal'] = self._img_goal
obs['achieved_goal'] = img_obs
if self.return_image_proprio:
obs['image_proprio_observation'] = np.concatenate(
(obs['image_observation'], obs['proprio_observation'])
)
obs['image_proprio_desired_goal'] = np.concatenate(
(obs['image_desired_goal'], obs['proprio_desired_goal'])
)
obs['image_proprio_achieved_goal'] = np.concatenate(
(obs['image_achieved_goal'], obs['proprio_achieved_goal'])
)
return obs
def _get_flat_img(self):
image_obs = self._wrapped_env.get_image(
width=self.imsize,
height=self.imsize,
)
self._last_image = image_obs
if self.grayscale:
image_obs = Image.fromarray(image_obs).convert('L')
image_obs = np.array(image_obs)
if self.normalize:
image_obs = image_obs / 255.0
if self.transpose:
image_obs = image_obs.transpose()
assert image_obs.shape[0] == self.channels
return image_obs.flatten()
def render(self, mode='wrapped'):
if mode == 'wrapped':
self.wrapped_env.render()
elif mode == 'cv2':
if self._last_image is None:
self._last_image = self._wrapped_env.get_image(
width=self.imsize,
height=self.imsize,
)
cv2.imshow('ImageEnv', self._last_image)
cv2.waitKey(1)
else:
raise ValueError("Invalid render mode: {}".format(mode))
def show_obs(self, normalized_img_vec_, name='img'):
print(name)
normalized_img_vec = copy.deepcopy(normalized_img_vec_)
img = (normalized_img_vec * 255).astype(np.uint8)
img = img.reshape(3, self.imsize, self.imsize).transpose()
img = img[::-1, :, ::-1]
cv2.imshow(name, img)
cv2.waitKey()
"""
Multitask functions
"""
def get_goal(self):
goal = self.wrapped_env.get_goal()
goal['desired_goal'] = self._img_goal
goal['image_desired_goal'] = self._img_goal
return goal
def set_goal(self, goal):
''' Assume goal contains both image_desired_goal and any goals required for wrapped envs'''
self._img_goal = goal['image_desired_goal']
self.wrapped_env.set_goal(goal)
def sample_goals(self, batch_size):
if self.num_goals_presampled > 0:
idx = np.random.randint(0, self.num_goals_presampled, batch_size)
sampled_goals = {
k: v[idx] for k, v in self._presampled_goals.items()
}
return sampled_goals
if batch_size > 1:
warnings.warn("Sampling goal images is slow")
img_goals = np.zeros((batch_size, self.image_length))
goals = self.wrapped_env.sample_goals(batch_size)
pre_state = self.wrapped_env.get_env_state()
for i in range(batch_size):
goal = self.unbatchify_dict(goals, i)
self.wrapped_env.set_to_goal(goal)
img_goals[i, :] = self._get_flat_img()
self.wrapped_env.set_env_state(pre_state)
goals['desired_goal'] = img_goals
goals['image_desired_goal'] = img_goals
return goals
def compute_rewards(self, actions, obs):
achieved_goals = obs['achieved_goal']
desired_goals = obs['desired_goal']
dist = np.linalg.norm(achieved_goals - desired_goals, axis=1)
if self.reward_type=='image_distance':
return -dist
elif self.reward_type=='image_sparse':
return -(dist > self.threshold).astype(float)
elif self.reward_type=='wrapped_env':
return self.wrapped_env.compute_rewards(actions, obs)
else:
raise NotImplementedError()
def get_diagnostics(self, paths, **kwargs):
statistics = self.wrapped_env.get_diagnostics(paths, **kwargs)
for stat_name_in_paths in ["image_dist", "image_success"]:
stats = get_stat_in_paths(paths, 'env_infos', stat_name_in_paths)
statistics.update(create_stats_ordered_dict(
stat_name_in_paths,
stats,
always_show_all_stats=True,
))
final_stats = [s[-1] for s in stats]
statistics.update(create_stats_ordered_dict(
"Final " + stat_name_in_paths,
final_stats,
always_show_all_stats=True,
))
return statistics
def normalize_image(image, dtype=np.float64):
assert image.dtype == np.uint8
return dtype(image) / 255.0
def unormalize_image(image):
assert image.dtype != np.uint8
return np.uint8(image * 255.0)
| [((286, 11, 286, 34), 'numpy.uint8', 'np.uint8', ({(286, 20, 286, 33): '(image * 255.0)'}, {}), '(image * 255.0)', True, 'import numpy as np\n'), ((79, 20, 79, 69), 'gym.spaces.Box', 'Box', (), '', False, 'from gym.spaces import Box, Dict\n'), ((105, 33, 105, 45), 'gym.spaces.Dict', 'Dict', ({(105, 38, 105, 44): 'spaces'}, {}), '(spaces)', False, 'from gym.spaces import Box, Dict\n'), ((127, 21, 127, 63), 'numpy.linalg.norm', 'np.linalg.norm', ({(127, 36, 127, 62): 'achieved_goal - desired_goal'}, {}), '(achieved_goal - desired_goal)', True, 'import numpy as np\n'), ((212, 8, 212, 29), 'cv2.imshow', 'cv2.imshow', ({(212, 19, 212, 23): 'name', (212, 25, 212, 28): 'img'}, {}), '(name, img)', False, 'import cv2\n'), ((213, 8, 213, 21), 'cv2.waitKey', 'cv2.waitKey', ({}, {}), '()', False, 'import cv2\n'), ((238, 20, 238, 61), 'numpy.zeros', 'np.zeros', ({(238, 29, 238, 60): '(batch_size, self.image_length)'}, {}), '((batch_size, self.image_length))', True, 'import numpy as np\n'), ((253, 15, 253, 69), 'numpy.linalg.norm', 'np.linalg.norm', (), '', True, 'import numpy as np\n'), ((92, 50, 95, 13), 'multiworld.envs.env_util.concatenate_box_spaces', 'concatenate_box_spaces', ({(93, 16, 93, 43): "spaces['image_observation']", (94, 16, 94, 45): "spaces['proprio_observation']"}, {}), "(spaces['image_observation'], spaces[\n 'proprio_observation'])", False, 'from multiworld.envs.env_util import concatenate_box_spaces\n'), ((96, 51, 99, 13), 'multiworld.envs.env_util.concatenate_box_spaces', 'concatenate_box_spaces', ({(97, 16, 97, 44): "spaces['image_desired_goal']", (98, 16, 98, 46): "spaces['proprio_desired_goal']"}, {}), "(spaces['image_desired_goal'], spaces[\n 'proprio_desired_goal'])", False, 'from multiworld.envs.env_util import concatenate_box_spaces\n'), ((100, 52, 103, 13), 'multiworld.envs.env_util.concatenate_box_spaces', 'concatenate_box_spaces', ({(101, 16, 101, 45): "spaces['image_achieved_goal']", (102, 16, 102, 47): "spaces['proprio_achieved_goal']"}, {}), "(spaces['image_achieved_goal'], spaces[\n 'proprio_achieved_goal'])", False, 'from multiworld.envs.env_util import concatenate_box_spaces\n'), ((164, 47, 166, 13), 'numpy.concatenate', 'np.concatenate', ({(165, 16, 165, 70): "(obs['image_observation'], obs['proprio_observation'])"}, {}), "((obs['image_observation'], obs['proprio_observation']))", True, 'import numpy as np\n'), ((167, 48, 169, 13), 'numpy.concatenate', 'np.concatenate', ({(168, 16, 168, 72): "(obs['image_desired_goal'], obs['proprio_desired_goal'])"}, {}), "((obs['image_desired_goal'], obs['proprio_desired_goal']))", True, 'import numpy as np\n'), ((170, 49, 172, 13), 'numpy.concatenate', 'np.concatenate', ({(171, 16, 171, 74): "(obs['image_achieved_goal'], obs['proprio_achieved_goal'])"}, {}), "((obs['image_achieved_goal'], obs['proprio_achieved_goal']))", True, 'import numpy as np\n'), ((184, 24, 184, 43), 'numpy.array', 'np.array', ({(184, 33, 184, 42): 'image_obs'}, {}), '(image_obs)', True, 'import numpy as np\n'), ((231, 18, 231, 77), 'numpy.random.randint', 'np.random.randint', ({(231, 36, 231, 37): '0', (231, 39, 231, 64): 'self.num_goals_presampled', (231, 66, 231, 76): 'batch_size'}, {}), '(0, self.num_goals_presampled, batch_size)', True, 'import numpy as np\n'), ((237, 12, 237, 57), 'warnings.warn', 'warnings.warn', ({(237, 26, 237, 56): '"""Sampling goal images is slow"""'}, {}), "('Sampling goal images is slow')", False, 'import warnings\n'), ((266, 20, 266, 77), 'multiworld.envs.env_util.get_stat_in_paths', 'get_stat_in_paths', ({(266, 38, 266, 43): 'paths', (266, 45, 266, 56): '"""env_infos"""', (266, 58, 266, 76): 'stat_name_in_paths'}, {}), "(paths, 'env_infos', stat_name_in_paths)", False, 'from multiworld.envs.env_util import get_stat_in_paths, create_stats_ordered_dict\n'), ((201, 12, 201, 52), 'cv2.imshow', 'cv2.imshow', ({(201, 23, 201, 33): '"""ImageEnv"""', (201, 35, 201, 51): 'self._last_image'}, {}), "('ImageEnv', self._last_image)", False, 'import cv2\n'), ((202, 12, 202, 26), 'cv2.waitKey', 'cv2.waitKey', ({(202, 24, 202, 25): '(1)'}, {}), '(1)', False, 'import cv2\n'), ((267, 30, 271, 13), 'multiworld.envs.env_util.create_stats_ordered_dict', 'create_stats_ordered_dict', (), '', False, 'from multiworld.envs.env_util import get_stat_in_paths, create_stats_ordered_dict\n'), ((273, 30, 277, 13), 'multiworld.envs.env_util.create_stats_ordered_dict', 'create_stats_ordered_dict', (), '', False, 'from multiworld.envs.env_util import get_stat_in_paths, create_stats_ordered_dict\n'), ((183, 24, 183, 50), 'PIL.Image.fromarray', 'Image.fromarray', ({(183, 40, 183, 49): 'image_obs'}, {}), '(image_obs)', False, 'from PIL import Image\n')] |
huynguyen82/Modified-Kaldi-GStream-OnlineServer | sample_full_post_processor.py | e7429a5e44b9567b603523c0046fb42d8503a275 | #!/usr/bin/env python
import sys
import json
import logging
from math import exp
import requests as rq
import re
### For NLP post-processing
header={"Content-Type": "application/json"}
message='{"sample":"Hello bigdata"}'
api_url="http://192.168.1.197:11992/norm"
###
def NLP_process_output(pre_str):
try:
jmsg=json.loads(message)
jmsg['sample']=pre_str
r = rq.post(api_url,json=jmsg, headers=header)
results = json.loads(r.text)['result']
logging.info("NLP=%s" % results)
return results
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error("Failed to get NLP post-processing: %s : %s " % (exc_type, exc_value))
return pre_str
def post_process_json(str):
try:
event = json.loads(str)
if "result" in event:
if len(event["result"]["hypotheses"]) > 1:
likelihood1 = event["result"]["hypotheses"][0]["likelihood"]
likelihood2 = event["result"]["hypotheses"][1]["likelihood"]
confidence = likelihood1 - likelihood2
confidence = 1 - exp(-confidence)
else:
confidence = 1.0e+10
event["result"]["hypotheses"][0]["confidence"] = confidence
org_trans = event["result"]["hypotheses"][0]["transcript"]
logging.info("Recognized result=%s" % org_trans )
out_trans = NLP_process_output(org_trans) + '.'
out_trans =
logging.info("Pass into funtion is %s" % out_trans)
event["result"]["hypotheses"][0]["transcript"] = out_trans
del event["result"]["hypotheses"][1:]
return json.dumps(event)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error("Failed to process JSON result: %s : %s " % (exc_type, exc_value))
return str
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG, format="%(levelname)8s %(asctime)s %(message)s ")
lines = []
while True:
l = sys.stdin.readline()
if not l: break # EOF
if l.strip() == "":
if len(lines) > 0:
result_json = post_process_json("".join(lines))
print result_json
print
sys.stdout.flush()
lines = []
else:
lines.append(l)
if len(lines) > 0:
result_json = post_process_json("".join(lines))
print result_json
lines = []
| [] |
AlexMikhalev/cord19redisknowledgegraph | lang_detect_gears.py | a143415aca8d4a6db820dc7a25280045f421a665 | from langdetect import detect
def detect_language(x):
#detect language of the article
try:
lang=detect(x['value'])
except:
lang="empty"
execute('SET', 'lang_article:' + x['key'], lang)
if lang!='en':
execute('SADD','titles_to_delete', x['key'])
gb = GB()
gb.foreach(detect_language)
gb.run('title:*') | [((6, 13, 6, 31), 'langdetect.detect', 'detect', ({(6, 20, 6, 30): "x['value']"}, {}), "(x['value'])", False, 'from langdetect import detect\n')] |
shanv82/core | daemon/core/coreobj.py | 70abb8cc1426ffceb53a03e84edc26f56f9ed4c0 | """
Defines the basic objects for CORE emulation: the PyCoreObj base class, along with PyCoreNode,
PyCoreNet, and PyCoreNetIf.
"""
import os
import shutil
import socket
import threading
from socket import AF_INET
from socket import AF_INET6
from core.data import NodeData, LinkData
from core.enumerations import LinkTypes
from core.misc import ipaddress
class Position(object):
"""
Helper class for Cartesian coordinate position
"""
def __init__(self, x=None, y=None, z=None):
"""
Creates a Position instance.
:param x: x position
:param y: y position
:param z: z position
:return:
"""
self.x = x
self.y = y
self.z = z
def set(self, x=None, y=None, z=None):
"""
Returns True if the position has actually changed.
:param float x: x position
:param float y: y position
:param float z: z position
:return: True if position changed, False otherwise
:rtype: bool
"""
if self.x == x and self.y == y and self.z == z:
return False
self.x = x
self.y = y
self.z = z
return True
def get(self):
"""
Retrieve x,y,z position.
:return: x,y,z position tuple
:rtype: tuple
"""
return self.x, self.y, self.z
class PyCoreObj(object):
"""
Base class for CORE objects (nodes and networks)
"""
apitype = None
# TODO: appears start has no usage, verify and remove
def __init__(self, session, objid=None, name=None, start=True):
"""
Creates a PyCoreObj instance.
:param core.session.Session session: CORE session object
:param int objid: object id
:param str name: object name
:param bool start: start value
:return:
"""
self.session = session
if objid is None:
objid = session.get_object_id()
self.objid = objid
if name is None:
name = "o%s" % self.objid
self.name = name
self.type = None
self.server = None
self.services = None
# ifindex is key, PyCoreNetIf instance is value
self._netif = {}
self.ifindex = 0
self.canvas = None
self.icon = None
self.opaque = None
self.position = Position()
def startup(self):
"""
Each object implements its own startup method.
:return: nothing
"""
raise NotImplementedError
def shutdown(self):
"""
Each object implements its own shutdown method.
:return: nothing
"""
raise NotImplementedError
def setposition(self, x=None, y=None, z=None):
"""
Set the (x,y,z) position of the object.
:param float x: x position
:param float y: y position
:param float z: z position
:return: True if position changed, False otherwise
:rtype: bool
"""
return self.position.set(x=x, y=y, z=z)
def getposition(self):
"""
Return an (x,y,z) tuple representing this object's position.
:return: x,y,z position tuple
:rtype: tuple
"""
return self.position.get()
def ifname(self, ifindex):
"""
Retrieve interface name for index.
:param int ifindex: interface index
:return: interface name
:rtype: str
"""
return self._netif[ifindex].name
def netifs(self, sort=False):
"""
Retrieve network interfaces, sorted if desired.
:param bool sort: boolean used to determine if interfaces should be sorted
:return: network interfaces
:rtype: list
"""
if sort:
return map(lambda k: self._netif[k], sorted(self._netif.keys()))
else:
return self._netif.itervalues()
def numnetif(self):
"""
Return the attached interface count.
:return: number of network interfaces
:rtype: int
"""
return len(self._netif)
def getifindex(self, netif):
"""
Retrieve index for an interface.
:param PyCoreNetIf netif: interface to get index for
:return: interface index if found, -1 otherwise
:rtype: int
"""
for ifindex in self._netif:
if self._netif[ifindex] is netif:
return ifindex
return -1
def newifindex(self):
"""
Create a new interface index.
:return: interface index
:rtype: int
"""
while self.ifindex in self._netif:
self.ifindex += 1
ifindex = self.ifindex
self.ifindex += 1
return ifindex
def data(self, message_type, lat=None, lon=None, alt=None):
"""
Build a data object for this node.
:param message_type: purpose for the data object we are creating
:param str lat: latitude
:param str lon: longitude
:param str alt: altitude
:return: node data object
:rtype: core.data.NodeData
"""
if self.apitype is None:
return None
x, y, _ = self.getposition()
model = self.type
emulation_server = self.server
services = self.services
if services is not None:
services = "|".join([service.name for service in services])
node_data = NodeData(
message_type=message_type,
id=self.objid,
node_type=self.apitype,
name=self.name,
emulation_id=self.objid,
canvas=self.canvas,
icon=self.icon,
opaque=self.opaque,
x_position=x,
y_position=y,
latitude=lat,
longitude=lon,
altitude=alt,
model=model,
emulation_server=emulation_server,
services=services
)
return node_data
def all_link_data(self, flags):
"""
Build CORE Link data for this object. There is no default
method for PyCoreObjs as PyCoreNodes do not implement this but
PyCoreNets do.
:param flags: message flags
:return: list of link data
:rtype: core.data.LinkData
"""
return []
class PyCoreNode(PyCoreObj):
"""
Base class for CORE nodes.
"""
def __init__(self, session, objid=None, name=None, start=True):
"""
Create a PyCoreNode instance.
:param core.session.Session session: CORE session object
:param int objid: object id
:param str name: object name
:param bool start: boolean for starting
"""
super(PyCoreNode, self).__init__(session, objid, name, start=start)
self.services = []
self.nodedir = None
self.tmpnodedir = False
def addservice(self, service):
"""
Add a services to the service list.
:param core.service.CoreService service: service to add
:return: nothing
"""
if service is not None:
self.services.append(service)
def makenodedir(self):
"""
Create the node directory.
:return: nothing
"""
if self.nodedir is None:
self.nodedir = os.path.join(self.session.session_dir, self.name + ".conf")
os.makedirs(self.nodedir)
self.tmpnodedir = True
else:
self.tmpnodedir = False
def rmnodedir(self):
"""
Remove the node directory, unless preserve directory has been set.
:return: nothing
"""
preserve = self.session.options.get_config("preservedir") == "1"
if preserve:
return
if self.tmpnodedir:
shutil.rmtree(self.nodedir, ignore_errors=True)
def addnetif(self, netif, ifindex):
"""
Add network interface to node and set the network interface index if successful.
:param PyCoreNetIf netif: network interface to add
:param int ifindex: interface index
:return: nothing
"""
if ifindex in self._netif:
raise ValueError("ifindex %s already exists" % ifindex)
self._netif[ifindex] = netif
# TODO: this should have probably been set ahead, seems bad to me, check for failure and fix
netif.netindex = ifindex
def delnetif(self, ifindex):
"""
Delete a network interface
:param int ifindex: interface index to delete
:return: nothing
"""
if ifindex not in self._netif:
raise ValueError("ifindex %s does not exist" % ifindex)
netif = self._netif.pop(ifindex)
netif.shutdown()
del netif
# TODO: net parameter is not used, remove
def netif(self, ifindex, net=None):
"""
Retrieve network interface.
:param int ifindex: index of interface to retrieve
:param PyCoreNetIf net: network node
:return: network interface, or None if not found
:rtype: PyCoreNetIf
"""
if ifindex in self._netif:
return self._netif[ifindex]
else:
return None
def attachnet(self, ifindex, net):
"""
Attach a network.
:param int ifindex: interface of index to attach
:param PyCoreNetIf net: network to attach
:return:
"""
if ifindex not in self._netif:
raise ValueError("ifindex %s does not exist" % ifindex)
self._netif[ifindex].attachnet(net)
def detachnet(self, ifindex):
"""
Detach network interface.
:param int ifindex: interface index to detach
:return: nothing
"""
if ifindex not in self._netif:
raise ValueError("ifindex %s does not exist" % ifindex)
self._netif[ifindex].detachnet()
def setposition(self, x=None, y=None, z=None):
"""
Set position.
:param x: x position
:param y: y position
:param z: z position
:return: nothing
"""
changed = super(PyCoreNode, self).setposition(x, y, z)
if changed:
for netif in self.netifs(sort=True):
netif.setposition(x, y, z)
def commonnets(self, obj, want_ctrl=False):
"""
Given another node or net object, return common networks between
this node and that object. A list of tuples is returned, with each tuple
consisting of (network, interface1, interface2).
:param obj: object to get common network with
:param want_ctrl: flag set to determine if control network are wanted
:return: tuples of common networks
:rtype: list
"""
common = []
for netif1 in self.netifs():
if not want_ctrl and hasattr(netif1, "control"):
continue
for netif2 in obj.netifs():
if netif1.net == netif2.net:
common.append((netif1.net, netif1, netif2))
return common
def check_cmd(self, args):
"""
Runs shell command on node.
:param list[str]|str args: command to run
:return: combined stdout and stderr
:rtype: str
:raises CoreCommandError: when a non-zero exit status occurs
"""
raise NotImplementedError
def cmd(self, args, wait=True):
"""
Runs shell command on node, with option to not wait for a result.
:param list[str]|str args: command to run
:param bool wait: wait for command to exit, defaults to True
:return: exit status for command
:rtype: int
"""
raise NotImplementedError
def cmd_output(self, args):
"""
Runs shell command on node and get exit status and output.
:param list[str]|str args: command to run
:return: exit status and combined stdout and stderr
:rtype: tuple[int, str]
"""
raise NotImplementedError
def termcmdstring(self, sh):
"""
Create a terminal command string.
:param str sh: shell to execute command in
:return: str
"""
raise NotImplementedError
class PyCoreNet(PyCoreObj):
"""
Base class for networks
"""
linktype = LinkTypes.WIRED.value
def __init__(self, session, objid, name, start=True):
"""
Create a PyCoreNet instance.
:param core.session.Session session: CORE session object
:param int objid: object id
:param str name: object name
:param bool start: should object start
"""
super(PyCoreNet, self).__init__(session, objid, name, start=start)
self._linked = {}
self._linked_lock = threading.Lock()
def startup(self):
"""
Each object implements its own startup method.
:return: nothing
"""
raise NotImplementedError
def shutdown(self):
"""
Each object implements its own shutdown method.
:return: nothing
"""
raise NotImplementedError
def attach(self, netif):
"""
Attach network interface.
:param PyCoreNetIf netif: network interface to attach
:return: nothing
"""
i = self.newifindex()
self._netif[i] = netif
netif.netifi = i
with self._linked_lock:
self._linked[netif] = {}
def detach(self, netif):
"""
Detach network interface.
:param PyCoreNetIf netif: network interface to detach
:return: nothing
"""
del self._netif[netif.netifi]
netif.netifi = None
with self._linked_lock:
del self._linked[netif]
def all_link_data(self, flags):
"""
Build link data objects for this network. Each link object describes a link
between this network and a node.
"""
all_links = []
# build a link message from this network node to each node having a
# connected interface
for netif in self.netifs(sort=True):
if not hasattr(netif, "node"):
continue
otherobj = netif.node
uni = False
if otherobj is None:
# two layer-2 switches/hubs linked together via linknet()
if not hasattr(netif, "othernet"):
continue
otherobj = netif.othernet
if otherobj.objid == self.objid:
continue
netif.swapparams('_params_up')
upstream_params = netif.getparams()
netif.swapparams('_params_up')
if netif.getparams() != upstream_params:
uni = True
unidirectional = 0
if uni:
unidirectional = 1
interface2_ip4 = None
interface2_ip4_mask = None
interface2_ip6 = None
interface2_ip6_mask = None
for address in netif.addrlist:
ip, _sep, mask = address.partition("/")
mask = int(mask)
if ipaddress.is_ipv4_address(ip):
family = AF_INET
ipl = socket.inet_pton(family, ip)
interface2_ip4 = ipaddress.IpAddress(af=family, address=ipl)
interface2_ip4_mask = mask
else:
family = AF_INET6
ipl = socket.inet_pton(family, ip)
interface2_ip6 = ipaddress.IpAddress(af=family, address=ipl)
interface2_ip6_mask = mask
link_data = LinkData(
message_type=flags,
node1_id=self.objid,
node2_id=otherobj.objid,
link_type=self.linktype,
unidirectional=unidirectional,
interface2_id=otherobj.getifindex(netif),
interface2_mac=netif.hwaddr,
interface2_ip4=interface2_ip4,
interface2_ip4_mask=interface2_ip4_mask,
interface2_ip6=interface2_ip6,
interface2_ip6_mask=interface2_ip6_mask,
delay=netif.getparam("delay"),
bandwidth=netif.getparam("bw"),
dup=netif.getparam("duplicate"),
jitter=netif.getparam("jitter")
)
all_links.append(link_data)
if not uni:
continue
netif.swapparams('_params_up')
link_data = LinkData(
message_type=0,
node1_id=otherobj.objid,
node2_id=self.objid,
unidirectional=1,
delay=netif.getparam("delay"),
bandwidth=netif.getparam("bw"),
dup=netif.getparam("duplicate"),
jitter=netif.getparam("jitter")
)
netif.swapparams('_params_up')
all_links.append(link_data)
return all_links
class PyCoreNetIf(object):
"""
Base class for network interfaces.
"""
def __init__(self, node, name, mtu):
"""
Creates a PyCoreNetIf instance.
:param core.coreobj.PyCoreNode node: node for interface
:param str name: interface name
:param mtu: mtu value
"""
self.node = node
self.name = name
if not isinstance(mtu, (int, long)):
raise ValueError
self.mtu = mtu
self.net = None
self._params = {}
self.addrlist = []
self.hwaddr = None
# placeholder position hook
self.poshook = lambda a, b, c, d: None
# used with EMANE
self.transport_type = None
# interface index on the network
self.netindex = None
# index used to find flow data
self.flow_id = None
def startup(self):
"""
Startup method for the interface.
:return: nothing
"""
pass
def shutdown(self):
"""
Shutdown method for the interface.
:return: nothing
"""
pass
def attachnet(self, net):
"""
Attach network.
:param core.coreobj.PyCoreNet net: network to attach
:return: nothing
"""
if self.net:
self.detachnet()
self.net = None
net.attach(self)
self.net = net
def detachnet(self):
"""
Detach from a network.
:return: nothing
"""
if self.net is not None:
self.net.detach(self)
def addaddr(self, addr):
"""
Add address.
:param str addr: address to add
:return: nothing
"""
self.addrlist.append(addr)
def deladdr(self, addr):
"""
Delete address.
:param str addr: address to delete
:return: nothing
"""
self.addrlist.remove(addr)
def sethwaddr(self, addr):
"""
Set hardware address.
:param core.misc.ipaddress.MacAddress addr: hardware address to set to.
:return: nothing
"""
self.hwaddr = addr
def getparam(self, key):
"""
Retrieve a parameter from the, or None if the parameter does not exist.
:param key: parameter to get value for
:return: parameter value
"""
return self._params.get(key)
def getparams(self):
"""
Return (key, value) pairs for parameters.
"""
parameters = []
for k in sorted(self._params.keys()):
parameters.append((k, self._params[k]))
return parameters
def setparam(self, key, value):
"""
Set a parameter value, returns True if the parameter has changed.
:param key: parameter name to set
:param value: parameter value
:return: True if parameter changed, False otherwise
"""
# treat None and 0 as unchanged values
current_value = self._params.get(key)
if current_value == value or current_value <= 0 and value <= 0:
return False
self._params[key] = value
return True
def swapparams(self, name):
"""
Swap out parameters dict for name. If name does not exist,
intialize it. This is for supporting separate upstream/downstream
parameters when two layer-2 nodes are linked together.
:param str name: name of parameter to swap
:return: nothing
"""
tmp = self._params
if not hasattr(self, name):
setattr(self, name, {})
self._params = getattr(self, name)
setattr(self, name, tmp)
def setposition(self, x, y, z):
"""
Dispatch position hook handler.
:param x: x position
:param y: y position
:param z: z position
:return: nothing
"""
self.poshook(self, x, y, z)
| [((218, 20, 235, 9), 'core.data.NodeData', 'NodeData', (), '', False, 'from core.data import NodeData, LinkData\n'), ((466, 28, 466, 44), 'threading.Lock', 'threading.Lock', ({}, {}), '()', False, 'import threading\n'), ((288, 27, 288, 86), 'os.path.join', 'os.path.join', ({(288, 40, 288, 64): 'self.session.session_dir', (288, 66, 288, 85): "self.name + '.conf'"}, {}), "(self.session.session_dir, self.name + '.conf')", False, 'import os\n'), ((289, 12, 289, 37), 'os.makedirs', 'os.makedirs', ({(289, 24, 289, 36): 'self.nodedir'}, {}), '(self.nodedir)', False, 'import os\n'), ((305, 12, 305, 59), 'shutil.rmtree', 'shutil.rmtree', (), '', False, 'import shutil\n'), ((547, 19, 547, 48), 'core.misc.ipaddress.is_ipv4_address', 'ipaddress.is_ipv4_address', ({(547, 45, 547, 47): 'ip'}, {}), '(ip)', False, 'from core.misc import ipaddress\n'), ((549, 26, 549, 54), 'socket.inet_pton', 'socket.inet_pton', ({(549, 43, 549, 49): 'family', (549, 51, 549, 53): 'ip'}, {}), '(family, ip)', False, 'import socket\n'), ((550, 37, 550, 80), 'core.misc.ipaddress.IpAddress', 'ipaddress.IpAddress', (), '', False, 'from core.misc import ipaddress\n'), ((554, 26, 554, 54), 'socket.inet_pton', 'socket.inet_pton', ({(554, 43, 554, 49): 'family', (554, 51, 554, 53): 'ip'}, {}), '(family, ip)', False, 'import socket\n'), ((555, 37, 555, 80), 'core.misc.ipaddress.IpAddress', 'ipaddress.IpAddress', (), '', False, 'from core.misc import ipaddress\n')] |
wotsushi/competitive-programming | abc/128/b.py | 17ec8fd5e1c23aee626aee70b1c0da8d7f8b8c86 | # 入力
N = int(input())
S, P = (
zip(*(
(s, int(p))
for s, p in (input().split() for _ in range(N))
)) if N else
((), ())
)
ans = '\n'.join(
str(i)
for _, _, i in sorted(
zip(
S,
P,
range(1, N + 1)
),
key=lambda t: (t[0], -t[1])
)
)
# 出力
print(ans)
| [] |
mmmds/WirelessDiscoverCrackScan | additional/hashcat_crack.py | 2eda9bd7c474d91ea08511a7322f5ba14d034f3d | # External cracking script, part of https://github.com/mmmds/WirelessDiscoverCrackScan
import datetime
import subprocess
import os
### CONFIGURATION
HASHCAT_DIR = "C:\\hashcat-5.1.0"
HASHCAT_EXE = "hashcat64.exe"
LOG_FILE = "crack_log.txt"
DICT_DIR = "./dicts"
def load_dict_list():
for r,d,f in os.walk(DICT_DIR):
return f
def parse_log():
r = {}
with open(LOG_FILE, "r") as f:
for line in f.readlines():
try:
a = line.split("/")
date = a[0]
dict_file = a[1].strip()
hash_file = a[2].split(".")[0].strip()
r[(hash_file, dict_file)] = date
except:
pass
return r
def append_log(file, dictionary):
text = "{}/{}/{}".format(str(datetime.datetime.now()), dictionary, file)
with open(LOG_FILE, "a") as f:
f.write("\n" + text)
def read_files():
result = ([],[])
files = os.listdir(".")
for f in files:
if f.endswith(".16800"):
result[0].append(f.split(".")[0])
elif f.endswith(".2500"):
result[1].append(f.split(".")[0])
return result
def process(files, t, logs, dicts):
for f in files:
for d in dicts:
if (f.split(".")[0], d) not in logs:
print("\n\n######## {} {}\n\n".format(f, d))
cwd = os.getcwd()
subprocess.Popen([HASHCAT_DIR+ "\\" + HASHCAT_EXE, "-m", t, "{}\\{}.{}".format(cwd,f, t), "{}\\{}\\{}".format(cwd,DICT_DIR, d)], cwd = HASHCAT_DIR).wait()
append_log(f, d)
else:
print("\n\n-----------{} {} in logs\n\n".format(f, d))
files = read_files()
logs = parse_log()
dicts = load_dict_list()
print(dicts)
print(files)
print(logs)
pmkid = files[0]
hs4 = files[1]
process(pmkid, "16800", logs, dicts)
process(hs4, "2500", logs, dicts)
| [((14, 17, 14, 34), 'os.walk', 'os.walk', ({(14, 25, 14, 33): 'DICT_DIR'}, {}), '(DICT_DIR)', False, 'import os\n'), ((38, 12, 38, 27), 'os.listdir', 'os.listdir', ({(38, 23, 38, 26): '"""."""'}, {}), "('.')", False, 'import os\n'), ((32, 33, 32, 56), 'datetime.datetime.now', 'datetime.datetime.now', ({}, {}), '()', False, 'import datetime\n'), ((51, 22, 51, 33), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n')] |
bennettdc/MCEdit-Unified | editortools/player.py | 90abfb170c65b877ac67193e717fa3a3ded635dd | """Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
#-# Modifiedby D.C.-G. for translation purpose
from OpenGL import GL
import numpy
import os
from albow import TableView, TableColumn, Label, Button, Column, CheckBox, AttrRef, Row, ask, alert, input_text_buttons, TabPanel
from albow.table_view import TableRowView
from albow.translate import _
from config import config
from editortools.editortool import EditorTool
from editortools.tooloptions import ToolOptions
from glbackground import Panel
from glutils import DisplayList
from mceutils import loadPNGTexture, alertException, drawTerrainCuttingWire, drawCube
from operation import Operation
import pymclevel
from pymclevel.box import BoundingBox, FloatBox
from pymclevel import nbt
import logging
from player_cache import PlayerCache, ThreadRS
from nbtexplorer import loadFile, saveFile, NBTExplorerToolPanel
import pygame
log = logging.getLogger(__name__)
class PlayerRemoveOperation(Operation):
undoTag = None
def __init__(self, tool, player="Player (Single Player)"):
super(PlayerRemoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.player = player
self.level = self.tool.editor.level
self.canUndo = False
self.playercache = PlayerCache()
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
if self.player == "Player (Single Player)":
answer = ask(_("Are you sure you want to delete the default player?"), ["Yes", "Cancel"])
if answer == "Cancel":
return
self.player = "Player"
if recordUndo:
self.undoTag = self.level.getPlayerTag(self.player)
self.level.players.remove(self.player)
if self.tool.panel:
if self.player != "Player":
#self.tool.panel.players.remove(player_cache.getPlayerNameFromUUID(self.player))
#self.tool.panel.players.remove(self.playercache.getPlayerInfo(self.player)[0])
str()
else:
self.tool.panel.players.remove("Player (Single Player)")
while self.tool.panel.table.index >= len(self.tool.panel.players):
self.tool.panel.table.index -= 1
#if len(self.tool.panel.players) == 0:
# self.tool.hidePanel()
# self.tool.showPanel()
self.tool.hidePanel()
self.tool.showPanel()
self.tool.markerList.invalidate()
self.tool.movingPlayer = None
pos = self.tool.revPlayerPos[self.editor.level.dimNo][self.player]
del self.tool.playerPos[self.editor.level.dimNo][pos]
if self.player != "Player":
del self.tool.playerTexture[self.player]
else:
del self.level.root_tag["Data"]["Player"]
del self.tool.revPlayerPos[self.editor.level.dimNo][self.player]
self.canUndo = True
def undo(self):
if not (self.undoTag is None):
if self.player != "Player":
self.level.playerTagCache[self.level.getPlayerPath(self.player)] = self.undoTag
else:
self.level.root_tag["Data"]["Player"] = self.undoTag
self.level.players.append(self.player)
if self.tool.panel:
#if self.player != "Player":
# self.tool.panel.players.append(self.playercache.getPlayerInfo(self.player)[0])
#else:
# self.tool.panel.players.append("Player (Single Player)")
if "[No players]" in self.tool.panel.players:
self.tool.panel.players.remove("[No players]")
self.tool.hidePanel()
self.tool.showPanel()
self.tool.markerList.invalidate()
def redo(self):
self.perform()
class PlayerAddOperation(Operation):
playerTag = None
def __init__(self, tool):
super(PlayerAddOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.level = self.tool.editor.level
self.canUndo = False
self.playercache = PlayerCache()
def perform(self, recordUndo=True):
initial = ""
allowed_chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
while True:
self.player = input_text_buttons("Enter a Player Name: ", 160, initial=initial, allowed_chars=allowed_chars)
if self.player is None:
return
elif len(self.player) > 16:
alert("Name too long. Maximum name length is 16.")
initial = self.player
elif len(self.player) < 1:
alert("Name too short. Minimum name length is 1.")
initial = self.player
else:
break
# print 1
data = self.playercache.getPlayerInfo(self.player)
if "<Unknown UUID>" not in data and "Server not ready" not in data:
self.uuid = data[0]
self.player = data[1]
else:
action = ask("Could not get {}'s UUID. Please make sure that you are connected to the internet and that the player \"{}\" exists.".format(self.player, self.player), ["Enter UUID manually", "Cancel"])
if action != "Enter UUID manually":
return
self.uuid = input_text_buttons("Enter a Player UUID: ", 160)
if not self.uuid:
return
# print 2
self.player = self.playercache.getPlayerInfo(self.uuid)
if self.player == self.uuid.replace("-", ""):
if ask("UUID was not found. Continue anyways?") == "Cancel":
return
# print "PlayerAddOperation.perform::self.uuid", self.uuid
if self.uuid in self.level.players:
alert("Player already exists in this World.")
return
self.playerTag = self.newPlayer()
#if self.tool.panel:
# self.tool.panel.players.append(self.player)
if self.level.oldPlayerFolderFormat:
self.level.playerTagCache[self.level.getPlayerPath(self.player)] = self.playerTag
self.level.players.append(self.player)
#if self.tool.panel:
#self.tool.panel.player_UUID[self.player] = self.player
else:
self.level.playerTagCache[self.level.getPlayerPath(self.uuid)] = self.playerTag
self.level.players.append(self.uuid)
if self.tool.panel:
self.tool.panel.player_UUID["UUID"].append(self.uuid)
self.tool.panel.player_UUID["Name"].append(self.player)
self.tool.playerPos[self.editor.level.dimNo][(0,0,0)] = self.uuid
self.tool.revPlayerPos[self.editor.level.dimNo][self.uuid] = (0,0,0)
# print 3
r = self.playercache.getPlayerSkin(self.uuid, force_download=False)
if not isinstance(r, (str, unicode)):
# print 'r 1', r
r = r.join()
# print 'r 2', r
self.tool.playerTexture[self.uuid] = loadPNGTexture(r)
self.tool.markerList.invalidate()
self.tool.recordMove = False
self.tool.movingPlayer = self.uuid
if self.tool.panel:
self.tool.hidePanel()
self.tool.showPanel()
self.canUndo = True
self.playerTag.save(self.level.getPlayerPath(self.uuid))
self.tool.nonSavedPlayers.append(self.level.getPlayerPath(self.uuid))
self.tool.inOtherDimension[self.editor.level.dimNo].append(self.uuid)
def newPlayer(self):
playerTag = nbt.TAG_Compound()
playerTag['Air'] = nbt.TAG_Short(300)
playerTag['AttackTime'] = nbt.TAG_Short(0)
playerTag['DeathTime'] = nbt.TAG_Short(0)
playerTag['Fire'] = nbt.TAG_Short(-20)
playerTag['Health'] = nbt.TAG_Short(20)
playerTag['HurtTime'] = nbt.TAG_Short(0)
playerTag['Score'] = nbt.TAG_Int(0)
playerTag['FallDistance'] = nbt.TAG_Float(0)
playerTag['OnGround'] = nbt.TAG_Byte(0)
playerTag['Dimension'] = nbt.TAG_Int(self.editor.level.dimNo)
playerTag["Inventory"] = nbt.TAG_List()
playerTag['Motion'] = nbt.TAG_List([nbt.TAG_Double(0) for i in xrange(3)])
spawn = self.level.playerSpawnPosition()
spawnX = spawn[0]
spawnZ = spawn[2]
blocks = [self.level.blockAt(spawnX, i, spawnZ) for i in xrange(self.level.Height)]
i = self.level.Height
done = False
for index, b in enumerate(reversed(blocks)):
if b != 0 and not done:
i = index
done = True
spawnY = self.level.Height - i
playerTag['Pos'] = nbt.TAG_List([nbt.TAG_Double([spawnX, spawnY, spawnZ][i]) for i in xrange(3)])
playerTag['Rotation'] = nbt.TAG_List([nbt.TAG_Float(0), nbt.TAG_Float(0)])
return playerTag
def undo(self):
self.level.players.remove(self.uuid)
self.tool.movingPlayer = None
if self.tool.panel:
#self.tool.panel.players.remove(self.player)
self.tool.panel.player_UUID["UUID"].remove(self.uuid)
self.tool.panel.player_UUID["Name"].remove(self.player)
self.tool.hidePanel()
self.tool.showPanel()
if self.tool.movingPlayer is None:
del self.tool.playerPos[self.tool.revPlayerPos[self.uuid]]
else:
del self.tool.playerPos[(0,0,0)]
del self.tool.revPlayerPos[self.uuid]
del self.tool.playerTexture[self.uuid]
os.remove(self.level.getPlayerPath(self.uuid))
if self.level.getPlayerPath(self.uuid) in self.tool.nonSavedPlayers:
self.tool.nonSavedPlayers.remove(self.level.getPlayerPath(self.uuid))
self.tool.markerList.invalidate()
def redo(self):
if not (self.playerTag is None):
self.level.playerTagCache[self.level.getPlayerPath(self.uuid)] = self.playerTag
self.level.players.append(self.uuid)
if self.tool.panel:
#self.tool.panel.players.append(self.uuid)
#self.tool.panel.player_UUID[self.player] = self.uuid
self.tool.panel.player_UUID["UUID"].append(self.uuid)
self.tool.panel.player_UUID["Name"].append(self.player)
# print 4
r = self.playercache.getPlayerSkin(self.uuid)
if isinstance(r, (str, unicode)):
r = r.join()
self.tool.playerTexture[self.uuid] = loadPNGTexture(r)
self.tool.playerPos[(0,0,0)] = self.uuid
self.tool.revPlayerPos[self.uuid] = (0,0,0)
self.playerTag.save(self.level.getPlayerPath(self.uuid))
self.tool.nonSavedPlayers.append(self.level.getPlayerPath(self.uuid))
self.tool.markerList.invalidate()
class PlayerMoveOperation(Operation):
undoPos = None
redoPos = None
def __init__(self, tool, pos, player="Player", yp=(None, None)):
super(PlayerMoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool = tool
self.canUndo = False
self.pos = pos
self.player = player
self.yp = yp
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
try:
level = self.tool.editor.level
try:
self.undoPos = level.getPlayerPosition(self.player)
self.undoDim = level.getPlayerDimension(self.player)
self.undoYP = level.getPlayerOrientation(self.player)
except Exception as e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
yaw, pitch = self.yp
if yaw is not None and pitch is not None:
level.setPlayerOrientation((yaw, pitch), self.player)
level.setPlayerPosition(self.pos, self.player)
level.setPlayerDimension(level.dimNo, self.player)
self.tool.playerPos[tuple(self.pos)] = self.player
self.tool.revPlayerPos[self.player] = self.pos
self.tool.markerList.invalidate()
self.canUndo = True
except pymclevel.PlayerNotFound as e:
print "Player move failed: ", e
def undo(self):
if not (self.undoPos is None):
level = self.tool.editor.level
try:
self.redoPos = level.getPlayerPosition(self.player)
self.redoDim = level.getPlayerDimension(self.player)
self.redoYP = level.getPlayerOrientation(self.player)
except Exception as e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
level.setPlayerPosition(self.undoPos, self.player)
level.setPlayerDimension(self.undoDim, self.player)
level.setPlayerOrientation(self.undoYP, self.player)
self.tool.markerList.invalidate()
def redo(self):
if not (self.redoPos is None):
level = self.tool.editor.level
try:
self.undoPos = level.getPlayerPosition(self.player)
self.undoDim = level.getPlayerDimension(self.player)
self.undoYP = level.getPlayerOrientation(self.player)
except Exception as e:
log.info(_("Couldn't get player position! ({0!r})").format(e))
level.setPlayerPosition(self.redoPos, self.player)
level.setPlayerDimension(self.redoDim, self.player)
level.setPlayerOrientation(self.redoYP, self.player)
self.tool.markerList.invalidate()
@staticmethod
def bufferSize():
return 20
class SpawnPositionInvalid(Exception):
pass
def okayAt63(level, pos):
"""blocks 63 or 64 must be occupied"""
# return level.blockAt(pos[0], 63, pos[2]) != 0 or level.blockAt(pos[0], 64, pos[2]) != 0
return True
def okayAboveSpawn(level, pos):
"""3 blocks above spawn must be open"""
return not any([level.blockAt(pos[0], pos[1] + i, pos[2]) for i in xrange(1, 4)])
def positionValid(level, pos):
try:
return okayAt63(level, pos) and okayAboveSpawn(level, pos)
except EnvironmentError:
return False
class PlayerSpawnMoveOperation(Operation):
undoPos = None
redoPos = None
def __init__(self, tool, pos):
super(PlayerSpawnMoveOperation, self).__init__(tool.editor, tool.editor.level)
self.tool, self.pos = tool, pos
self.canUndo = False
def perform(self, recordUndo=True):
if self.level.saving:
alert(_("Cannot perform action while saving is taking place"))
return
level = self.tool.editor.level
'''
if isinstance(level, pymclevel.MCInfdevOldLevel):
if not positionValid(level, self.pos):
if config.spawn.spawnProtection.get():
raise SpawnPositionInvalid(
"You cannot have two air blocks at Y=63 and Y=64 in your spawn point's column. Additionally, you cannot have a solid block in the three blocks above your spawn point. It's weird, I know.")
'''
self.undoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.pos)
self.tool.markerList.invalidate()
self.canUndo = True
def undo(self):
if self.undoPos is not None:
level = self.tool.editor.level
self.redoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.undoPos)
self.tool.markerList.invalidate()
def redo(self):
if self.redoPos is not None:
level = self.tool.editor.level
self.undoPos = level.playerSpawnPosition()
level.setPlayerSpawnPosition(self.redoPos)
self.tool.markerList.invalidate()
class PlayerPositionPanel(Panel):
def __init__(self, tool):
Panel.__init__(self, name='Panel.PlayerPositionPanel')
self.tool = tool
self.player_UUID = {"UUID": [], "Name": []}
self.level = tool.editor.level
self.playercache = PlayerCache()
# Add this instance to PlayerCache 'targets'. PlayerCache generated processes will call
# this instance 'update_player' method when they have finished their execution.
self.playercache.add_target(self.update_player)
if hasattr(self.level, 'players'):
players = self.level.players or ["[No players]"]
if not self.level.oldPlayerFolderFormat:
for player in players:
if player != "Player" and player != "[No players]":
if len(player) > 4 and player[4] == "-":
os.rename(os.path.join(self.level.worldFolder.getFolderPath("playerdata"), player+".dat"), os.path.join(self.level.worldFolder.getFolderPath("playerdata"), player.replace("-", "", 1)+".dat"))
player = player.replace("-", "", 1)
# print 5
data = self.playercache.getPlayerInfo(player, use_old_data=True)
#self.player_UUID[data[0]] = data[1]
self.player_UUID["UUID"].append(data[0])
self.player_UUID["Name"].append(data[1])
#self.player_UUID[player] = data
if "Player" in players:
#self.player_UUID["Player (Single Player)"] = "Player"
self.player_UUID["UUID"].append("Player")
self.player_UUID["Name"].append("Player (Single Player)")
if "[No players]" not in players:
self.player_names = sorted(self.player_UUID.values(), key=lambda x: False if x == "Player (Single Player)" else x)
else:
self.player_UUID["UUID"].append("[No players]")
self.player_UUID["Name"].append("[No players]")
else:
players = ["Player (Single Player)"]
self.players = players
if 'Player' in self.player_UUID['UUID'] and 'Player (Single Player)' in self.player_UUID['Name']:
self.player_UUID['UUID'].insert(0, self.player_UUID['UUID'].pop(self.player_UUID['UUID'].index('Player')))
self.player_UUID['Name'].insert(0, self.player_UUID['Name'].pop(self.player_UUID['Name'].index('Player (Single Player)')))
self.pages = TabPanel()
tab_height = self.pages.tab_height
max_height = tab_height + self.tool.editor.mainViewport.height - self.tool.editor.toolbar.height - self.tool.editor.subwidgets[0].height - self.pages.margin * 2
#-# Uncomment the following line to have a maximum height for this panel.
# max_height = min(max_height, 500)
self.editNBTDataButton = Button("Edit NBT", action=self.editNBTData, tooltipText="Open the NBT Explorer to edit player's attributes and inventory")
addButton = Button("Add", action=self.tool.addPlayer)
removeButton = Button("Remove", action=self.tool.removePlayer)
gotoButton = Button("Goto", action=self.tool.gotoPlayer)
gotoCameraButton = Button("Goto View", action=self.tool.gotoPlayerCamera)
moveButton = Button("Move", action=self.tool.movePlayer)
moveToCameraButton = Button("Align to Camera", action=self.tool.movePlayerToCamera)
reloadSkin = Button("Reload Skins", action=self.tool.reloadSkins, tooltipText="This pulls skins from the online server, so this may take a while")
btns = [self.editNBTDataButton]
if not isinstance(self.level, pymclevel.leveldbpocket.PocketLeveldbWorld):
btns.extend([addButton, removeButton])
btns.extend([gotoButton, gotoCameraButton, moveButton, moveToCameraButton, reloadSkin])
btns = Column(btns, margin=0, spacing=2)
h = max_height - btns.height - self.pages.margin * 2 - 2 - self.font.get_linesize() * 2
col = Label('')
def close():
self.pages.show_page(col)
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject={}, height=max_height, \
close_text="Go Back", no_header=True, close_action=close,
load_text=None)
self.nbttree.shrink_wrap()
self.nbtpage = Column([self.nbttree])
self.nbtpage.shrink_wrap()
self.pages.add_page("NBT Data", self.nbtpage)
self.pages.set_rect(map(lambda x:x+self.margin, self.nbttree._rect))
tableview = TableView(nrows=(h - (self.font.get_linesize() * 2.5)) / self.font.get_linesize(),
header_height=self.font.get_linesize(),
columns=[TableColumn("Player Name(s):", (self.nbttree.width - (self.margin * 3)) / 3),
TableColumn("Player UUID(s):", (self.nbttree.width - (self.margin * 3)))],
)
tableview.index = 0
tableview.num_rows = lambda: len(self.player_UUID["UUID"])
tableview.row_data = lambda i: (self.player_UUID["Name"][i],self.player_UUID["UUID"][i])
tableview.row_is_selected = lambda x: x == tableview.index
tableview.zebra_color = (0, 0, 0, 48)
def selectTableRow(i, evt):
tableview.index = i
tableview.click_row = selectTableRow
def mouse_down(e):
if e.button == 1 and e.num_clicks > 1:
self.editNBTData()
TableRowView.mouse_down(tableview.rows, e)
tableview.rows.mouse_down = mouse_down
tableview.rows.tooltipText = "Double-click or use the button below to edit the NBT Data."
self.table = tableview
col.set_parent(None)
self.col = col = Column([tableview, btns], spacing=2)
self.pages.add_page("Players", col, 0)
self.pages.shrink_wrap()
self.pages.show_page(col)
self.add(self.pages)
self.shrink_wrap()
self.max_height = max_height
def editNBTData(self):
player = self.selectedPlayer
if player == 'Player (Single Player)':
alert("Not yet implemented.\nUse the NBT Explorer to edit this player.")
elif player == '[No players]':
return
else:
player = self.level.getPlayerTag(self.selectedPlayer)
if player is not None:
self.pages.remove_page(self.nbtpage)
def close():
self.pages.show_page(self.col)
self.nbttree = NBTExplorerToolPanel(self.tool.editor, nbtObject=player, fileName=None,
savePolicy=-1, dataKeyName=None,
height=self.max_height, no_header=True, close_text="Go Back",
close_action=close, load_text=None,
copy_data=False)
self.nbtpage = Column([self.nbttree,])
self.nbtpage.shrink_wrap()
self.pages.add_page("NBT Data", self.nbtpage)
self.pages.show_page(self.nbtpage)
else:
alert(_("Unable to load player %s" % self.selectedPlayer()))
@property
def selectedPlayer(self):
if not self.level.oldPlayerFolderFormat:
player = self.players[self.table.index]
if player != "Player (Single Player)" and player != "[No players]" and player != "~local_player":
return self.player_UUID["UUID"][self.table.index]
else:
return player
else:
return self.players[self.table.index]
def key_down(self, evt):
self.dispatch_key('key_down', evt)
def dispatch_key(self, name, evt):
if not hasattr(evt, 'key'):
return
if name == "key_down":
keyname = self.root.getKey(evt)
if self.pages.current_page == self.col:
if keyname == "Up" and self.table.index > 0:
self.table.index -= 1
self.table.rows.scroll_to_item(self.table.index)
elif keyname == "Down" and self.table.index < len(self.players) - 1:
self.table.index += 1
self.table.rows.scroll_to_item(self.table.index)
elif keyname == 'Page down':
self.table.index = min(len(self.players) - 1, self.table.index + self.table.rows.num_rows())
elif keyname == 'Page up':
self.table.index = max(0, self.table.index - self.table.rows.num_rows())
elif keyname == 'Return':
if self.selectedPlayer:
self.editNBTData()
if self.table.rows.cell_to_item_no(0, 0) + self.table.rows.num_rows() -1 > self.table.index or self.table.rows.cell_to_item_no(0, 0) + self.table.rows.num_rows() -1 < self.table.index:
self.table.rows.scroll_to_item(self.table.index)
elif self.pages.current_page == self.nbtpage:
self.nbttree.dispatch_key(name, evt)
def update_player(self, data):
if isinstance(data, tuple):
if data[0] in self.player_UUID['UUID']:
idx = self.player_UUID['UUID'].index(data[0])
self.player_UUID['UUID'][idx] = data[0]
self.player_UUID['Name'][idx] = data[1]
class PlayerPositionTool(EditorTool):
surfaceBuild = True
toolIconName = "player"
tooltipText = "Players"
movingPlayer = None
recordMove = True
def reloadTextures(self):
self.charTex = loadPNGTexture('char.png')
@alertException
def addPlayer(self):
op = PlayerAddOperation(self)
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
@alertException
def removePlayer(self):
player = self.panel.selectedPlayer
if player != "[No players]":
op = PlayerRemoveOperation(self, player)
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
@alertException
def movePlayer(self):
if self.panel.selectedPlayer != "[No players]":
self.movingPlayer = self.panel.selectedPlayer
if self.movingPlayer == "Player (Single Player)":
self.movingPlayer = "Player"
@alertException
def movePlayerToCamera(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
if player != "[No players]":
pos = self.editor.mainViewport.cameraPosition
y = self.editor.mainViewport.yaw
p = self.editor.mainViewport.pitch
op = PlayerMoveOperation(self, pos, player, (y, p))
self.movingPlayer = None
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
def delete_skin(self, uuid):
del self.playerTexture[uuid]
self.playerTexture[uuid] = self.charTex
@alertException
def reloadSkins(self):
#result = ask("This pulls skins from the online server, so this may take a while", ["Ok", "Cancel"])
#if result == "Ok":
try:
for player in self.editor.level.players:
if player != "Player" and player in self.playerTexture.keys():
del self.playerTexture[player]
# print 6
r = self.playercache.getPlayerSkin(player, force_download=True, instance=self)
if isinstance(r, (str, unicode)):
r = r.join()
self.playerTexture[player] = loadPNGTexture(r)
#self.markerList.call(self._drawToolMarkers)
except:
raise Exception("Could not connect to the skins server, please check your Internet connection and try again.")
def gotoPlayerCamera(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
try:
pos = self.editor.level.getPlayerPosition(player)
y, p = self.editor.level.getPlayerOrientation(player)
self.editor.gotoDimension(self.editor.level.getPlayerDimension(player))
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.yaw = y
self.editor.mainViewport.pitch = p
self.editor.mainViewport.stopMoving()
self.editor.mainViewport.invalidate()
except pymclevel.PlayerNotFound:
pass
def gotoPlayer(self):
player = self.panel.selectedPlayer
if player == "Player (Single Player)":
player = "Player"
try:
if self.editor.mainViewport.pitch < 0:
self.editor.mainViewport.pitch = -self.editor.mainViewport.pitch
self.editor.mainViewport.cameraVector = self.editor.mainViewport._cameraVector()
cv = self.editor.mainViewport.cameraVector
pos = self.editor.level.getPlayerPosition(player)
pos = map(lambda p, c: p - c * 5, pos, cv)
self.editor.gotoDimension(self.editor.level.getPlayerDimension(player))
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.stopMoving()
except pymclevel.PlayerNotFound:
pass
def __init__(self, *args):
EditorTool.__init__(self, *args)
self.reloadTextures()
self.nonSavedPlayers = []
textureVerticesHead = numpy.array(
(
# Backside of Head
24, 16, # Bottom Left
24, 8, # Top Left
32, 8, # Top Right
32, 16, # Bottom Right
# Front of Head
8, 16,
8, 8,
16, 8,
16, 16,
#
24, 0,
16, 0,
16, 8,
24, 8,
#
16, 0,
8, 0,
8, 8,
16, 8,
#
8, 8,
0, 8,
0, 16,
8, 16,
16, 16,
24, 16,
24, 8,
16, 8,
), dtype='f4')
textureVerticesHat = numpy.array(
(
56, 16,
56, 8,
64, 8,
64, 16,
48, 16,
48, 8,
40, 8,
40, 16,
56, 0,
48, 0,
48, 8,
56, 8,
48, 0,
40, 0,
40, 8,
48, 8,
40, 8,
32, 8,
32, 16,
40, 16,
48, 16,
56, 16,
56, 8,
48, 8,
), dtype='f4')
textureVerticesHead.shape = (24, 2)
textureVerticesHat.shape = (24, 2)
textureVerticesHead *= 4
textureVerticesHead[:, 1] *= 2
textureVerticesHat *= 4
textureVerticesHat[:, 1] *= 2
self.texVerts = (textureVerticesHead, textureVerticesHat)
self.playerPos = {0:{}, -1:{}, 1:{}}
self.playerTexture = {}
self.revPlayerPos = {0:{}, -1:{}, 1:{}}
self.inOtherDimension = {0: [], 1: [], -1: []}
self.playercache = PlayerCache()
self.markerList = DisplayList()
panel = None
def showPanel(self):
if not self.panel:
self.panel = PlayerPositionPanel(self)
self.panel.centery = (self.editor.mainViewport.height - self.editor.toolbar.height) / 2 + self.editor.subwidgets[0].height
self.panel.left = self.editor.left
self.editor.add(self.panel)
def hidePanel(self):
if self.panel and self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None
def drawToolReticle(self):
if self.movingPlayer is None:
return
pos, direction = self.editor.blockFaceUnderCursor
dim = self.editor.level.getPlayerDimension(self.movingPlayer)
pos = (pos[0], pos[1] + 2, pos[2])
x, y, z = pos
# x,y,z=map(lambda p,d: p+d, pos, direction)
GL.glEnable(GL.GL_BLEND)
GL.glColor(1.0, 1.0, 1.0, 0.5)
self.drawCharacterHead(x + 0.5, y + 0.75, z + 0.5, self.revPlayerPos[dim][self.movingPlayer], dim)
GL.glDisable(GL.GL_BLEND)
GL.glEnable(GL.GL_DEPTH_TEST)
self.drawCharacterHead(x + 0.5, y + 0.75, z + 0.5, self.revPlayerPos[dim][self.movingPlayer], dim)
drawTerrainCuttingWire(BoundingBox((x, y, z), (1, 1, 1)))
drawTerrainCuttingWire(BoundingBox((x, y - 1, z), (1, 1, 1)))
#drawTerrainCuttingWire( BoundingBox((x,y-2,z), (1,1,1)) )
GL.glDisable(GL.GL_DEPTH_TEST)
markerLevel = None
def drawToolMarkers(self):
if not config.settings.drawPlayerHeads.get():
return
if self.markerLevel != self.editor.level:
self.markerList.invalidate()
self.markerLevel = self.editor.level
self.markerList.call(self._drawToolMarkers)
def _drawToolMarkers(self):
GL.glColor(1.0, 1.0, 1.0, 0.5)
GL.glEnable(GL.GL_DEPTH_TEST)
GL.glMatrixMode(GL.GL_MODELVIEW)
for player in self.editor.level.players:
try:
pos = self.editor.level.getPlayerPosition(player)
yaw, pitch = self.editor.level.getPlayerOrientation(player)
dim = self.editor.level.getPlayerDimension(player)
self.inOtherDimension[dim].append(player)
self.playerPos[dim][pos] = player
self.revPlayerPos[dim][player] = pos
if player != "Player" and config.settings.downloadPlayerSkins.get():
# print 7
r = self.playercache.getPlayerSkin(player, force_download=False)
if not isinstance(r, (str, unicode)):
r = r.join()
self.playerTexture[player] = loadPNGTexture(r)
else:
self.playerTexture[player] = self.charTex
if dim != self.editor.level.dimNo:
continue
x, y, z = pos
GL.glPushMatrix()
GL.glTranslate(x, y, z)
GL.glRotate(-yaw, 0, 1, 0)
GL.glRotate(pitch, 1, 0, 0)
GL.glColor(1, 1, 1, 1)
self.drawCharacterHead(0, 0, 0, (x,y,z), self.editor.level.dimNo)
GL.glPopMatrix()
# GL.glEnable(GL.GL_BLEND)
drawTerrainCuttingWire(FloatBox((x - .5, y - .5, z - .5), (1, 1, 1)),
c0=(0.3, 0.9, 0.7, 1.0),
c1=(0, 0, 0, 0),
)
#GL.glDisable(GL.GL_BLEND)
except Exception, e:
print "Exception in editortools.player.PlayerPositionTool._drawToolMarkers:", repr(e)
import traceback
print traceback.format_exc()
continue
GL.glDisable(GL.GL_DEPTH_TEST)
def drawCharacterHead(self, x, y, z, realCoords=None, dim=0):
GL.glEnable(GL.GL_CULL_FACE)
origin = (x - 0.25, y - 0.25, z - 0.25)
size = (0.5, 0.5, 0.5)
box = FloatBox(origin, size)
hat_origin = (x - 0.275, y - 0.275, z - 0.275)
hat_size = (0.55, 0.55, 0.55)
hat_box = FloatBox(hat_origin, hat_size)
if realCoords is not None and self.playerPos[dim][realCoords] != "Player" and config.settings.downloadPlayerSkins.get():
drawCube(box,
texture=self.playerTexture[self.playerPos[dim][realCoords]], textureVertices=self.texVerts[0])
GL.glEnable(GL.GL_BLEND)
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
drawCube(hat_box,
texture=self.playerTexture[self.playerPos[dim][realCoords]], textureVertices=self.texVerts[1])
GL.glDisable(GL.GL_BLEND)
else:
drawCube(box,
texture=self.charTex, textureVertices=self.texVerts[0])
GL.glDisable(GL.GL_CULL_FACE)
#@property
#def statusText(self):
# if not self.panel:
# return ""
# player = self.panel.selectedPlayer
# if player == "Player":
# return "Click to move the player"
#
# return _("Click to move the player \"{0}\"").format(player)
@alertException
def mouseDown(self, evt, pos, direction):
if self.movingPlayer is None:
return
pos = (pos[0] + 0.5, pos[1] + 2.75, pos[2] + 0.5)
op = PlayerMoveOperation(self, pos, self.movingPlayer)
self.movingPlayer = None
if self.recordMove:
self.editor.addOperation(op)
addingMoving = False
else:
self.editor.performWithRetry(op) #Prevent recording of Undo when adding player
self.recordMove = True
addingMoving = True
if op.canUndo and not addingMoving:
self.editor.addUnsavedEdit()
def keyDown(self, evt):
keyname = evt.dict.get('keyname', None) or self.editor.get_root().getKey(evt)
if not self.recordMove:
if not pygame.key.get_focused():
return
if keyname == "Escape":
self.recordMove = True
if self.panel and self.panel.__class__ == PlayerPositionPanel:
self.panel.key_down(evt)
def keyUp(self, evt):
pass
def levelChanged(self):
self.markerList.invalidate()
@alertException
def toolSelected(self):
self.showPanel()
self.movingPlayer = None
@alertException
def toolReselected(self):
if self.panel:
self.gotoPlayer()
class PlayerSpawnPositionOptions(ToolOptions):
def __init__(self, tool):
ToolOptions.__init__(self, name='Panel.PlayerSpawnPositionOptions')
self.tool = tool
self.spawnProtectionCheckBox = CheckBox(ref=AttrRef(tool, "spawnProtection"))
self.spawnProtectionLabel = Label("Spawn Position Safety")
self.spawnProtectionLabel.mouse_down = self.spawnProtectionCheckBox.mouse_down
tooltipText = "Minecraft will randomly move your spawn point if you try to respawn in a column where there are no blocks at Y=63 and Y=64. Only uncheck this box if Minecraft is changed."
self.spawnProtectionLabel.tooltipText = self.spawnProtectionCheckBox.tooltipText = tooltipText
row = Row((self.spawnProtectionCheckBox, self.spawnProtectionLabel))
col = Column((Label("Spawn Point Options"), row, Button("OK", action=self.dismiss)))
self.add(col)
self.shrink_wrap()
class PlayerSpawnPositionTool(PlayerPositionTool):
surfaceBuild = True
toolIconName = "playerspawn"
tooltipText = "Move Spawn Point\nRight-click for options"
def __init__(self, *args):
PlayerPositionTool.__init__(self, *args)
self.optionsPanel = PlayerSpawnPositionOptions(self)
def toolEnabled(self):
return self.editor.level.dimNo == 0
def showPanel(self):
self.panel = Panel(name='Panel.PlayerSpawnPositionTool')
button = Button("Goto Spawn", action=self.gotoSpawn)
self.panel.add(button)
self.panel.shrink_wrap()
self.panel.left = self.editor.left
self.panel.centery = self.editor.centery
self.editor.add(self.panel)
def gotoSpawn(self):
cv = self.editor.mainViewport.cameraVector
pos = self.editor.level.playerSpawnPosition()
pos = map(lambda p, c: p - c * 5, pos, cv)
self.editor.mainViewport.cameraPosition = pos
self.editor.mainViewport.stopMoving()
@property
def statusText(self):
return "Click to set the spawn position."
spawnProtection = config.spawn.spawnProtection.property()
def drawToolReticle(self):
pos, direction = self.editor.blockFaceUnderCursor
x, y, z = map(lambda p, d: p + d, pos, direction)
color = (1.0, 1.0, 1.0, 0.5)
if isinstance(self.editor.level, pymclevel.MCInfdevOldLevel) and self.spawnProtection:
if not positionValid(self.editor.level, (x, y, z)):
color = (1.0, 0.0, 0.0, 0.5)
GL.glColor(*color)
GL.glEnable(GL.GL_BLEND)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5, z + 0.5)
GL.glDisable(GL.GL_BLEND)
GL.glEnable(GL.GL_DEPTH_TEST)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5, z + 0.5)
color2 = map(lambda a: a * 0.4, color)
drawTerrainCuttingWire(BoundingBox((x, y, z), (1, 1, 1)), color2, color)
GL.glDisable(GL.GL_DEPTH_TEST)
def _drawToolMarkers(self):
x, y, z = self.editor.level.playerSpawnPosition()
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
GL.glEnable(GL.GL_BLEND)
color = config.selectionColors.black.get() + (0.35,)
GL.glColor(*color)
GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_LINE)
GL.glLineWidth(2.0)
drawCube(FloatBox((x, y, z), (1, 1, 1)))
GL.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_FILL)
drawCube(FloatBox((x, y, z), (1, 1, 1)))
GL.glDisable(GL.GL_BLEND)
GL.glEnable(GL.GL_DEPTH_TEST)
GL.glColor(1.0, 1.0, 1.0, 1.0)
self.drawCage(x, y, z)
self.drawCharacterHead(x + 0.5, y + 0.5 + 0.125 * numpy.sin(self.editor.frames * 0.05), z + 0.5)
GL.glDisable(GL.GL_DEPTH_TEST)
def drawCage(self, x, y, z):
cageTexVerts = numpy.array(pymclevel.MCInfdevOldLevel.materials.blockTextures[52, 0])
pixelScale = 0.5 if self.editor.level.materials.name in ("Pocket", "Alpha") else 1.0
texSize = 16 * pixelScale
cageTexVerts = cageTexVerts.astype(float) * pixelScale
cageTexVerts = numpy.array(
[((tx, ty), (tx + texSize, ty), (tx + texSize, ty + texSize), (tx, ty + texSize)) for (tx, ty) in
cageTexVerts], dtype='float32')
GL.glEnable(GL.GL_ALPHA_TEST)
drawCube(BoundingBox((x, y, z), (1, 1, 1)), texture=pymclevel.alphaMaterials.terrainTexture,
textureVertices=cageTexVerts)
GL.glDisable(GL.GL_ALPHA_TEST)
@alertException
def mouseDown(self, evt, pos, direction):
pos = map(lambda p, d: p + d, pos, direction)
op = PlayerSpawnMoveOperation(self, pos)
try:
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
self.markerList.invalidate()
except SpawnPositionInvalid, e:
if "Okay" != ask(str(e), responses=["Okay", "Fix it for me!"]):
level = self.editor.level
status = ""
if not okayAt63(level, pos):
level.setBlockAt(pos[0], 63, pos[2], 1)
status += _("Block added at y=63.\n")
if 59 < pos[1] < 63:
pos[1] = 63
status += _("Spawn point moved upward to y=63.\n")
if not okayAboveSpawn(level, pos):
if pos[1] > 63 or pos[1] < 59:
lpos = (pos[0], pos[1] - 1, pos[2])
if level.blockAt(*pos) == 0 and level.blockAt(*lpos) != 0 and okayAboveSpawn(level, lpos):
pos = lpos
status += _("Spawn point shifted down by one block.\n")
if not okayAboveSpawn(level, pos):
for i in xrange(1, 4):
level.setBlockAt(pos[0], pos[1] + i, pos[2], 0)
status += _("Blocks above spawn point cleared.\n")
self.editor.invalidateChunks([(pos[0] // 16, pos[2] // 16)])
op = PlayerSpawnMoveOperation(self, pos)
try:
self.editor.addOperation(op)
if op.canUndo:
self.editor.addUnsavedEdit()
self.markerList.invalidate()
except SpawnPositionInvalid, e:
alert(str(e))
return
if len(status):
alert(_("Spawn point fixed. Changes: \n\n") + status)
@alertException
def toolReselected(self):
self.gotoSpawn()
| [] |
slimgroup/Devito-Examples | seismic/checkpointing/checkpoint.py | 449e1286a18ebc4172069372ba2bf3cd2ec99a2f | # The MIT License (MIT)
#
# Copyright (c) 2016, Imperial College, London
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from pyrevolve import Checkpoint, Operator
from devito import TimeFunction
from devito.tools import flatten
class CheckpointOperator(Operator):
"""Devito's concrete implementation of the ABC pyrevolve.Operator. This class wraps
devito.Operator so it conforms to the pyRevolve API. pyRevolve will call apply
with arguments t_start and t_end. Devito calls these arguments t_s and t_e so
the following dict is used to perform the translations between different names.
Parameters
----------
op : Operator
devito.Operator object that this object will wrap.
args : dict
If devito.Operator.apply() expects any arguments, they can be provided
here to be cached. Any calls to CheckpointOperator.apply() will
automatically include these cached arguments in the call to the
underlying devito.Operator.apply().
"""
t_arg_names = {'t_start': 'time_m', 't_end': 'time_M'}
def __init__(self, op, **kwargs):
self.op = op
self.args = kwargs
op_default_args = self.op._prepare_arguments(**kwargs)
self.start_offset = op_default_args[self.t_arg_names['t_start']]
def _prepare_args(self, t_start, t_end):
args = self.args.copy()
args[self.t_arg_names['t_start']] = t_start + self.start_offset
args[self.t_arg_names['t_end']] = t_end - 1 + self.start_offset
return args
def apply(self, t_start, t_end):
""" If the devito operator requires some extra arguments in the call to apply
they can be stored in the args property of this object so pyRevolve calls
pyRevolve.Operator.apply() without caring about these extra arguments while
this method passes them on correctly to devito.Operator
"""
# Build the arguments list to invoke the kernel function
args = self.op.arguments(**self._prepare_args(t_start, t_end))
# Invoke kernel function with args
arg_values = [args[p.name] for p in self.op.parameters]
self.op.cfunction(*arg_values)
class DevitoCheckpoint(Checkpoint):
"""Devito's concrete implementation of the Checkpoint abstract base class provided by
pyRevolve. Holds a list of symbol objects that hold data.
"""
def __init__(self, objects):
"""Intialise a checkpoint object. Upon initialisation, a checkpoint
stores only a reference to the objects that are passed into it."""
assert(all(isinstance(o, TimeFunction) for o in objects))
dtypes = set([o.dtype for o in objects])
assert(len(dtypes) == 1)
self._dtype = dtypes.pop()
self.objects = objects
@property
def dtype(self):
return self._dtype
def get_data(self, timestep):
data = flatten([get_symbol_data(s, timestep) for s in self.objects])
return data
def get_data_location(self, timestep):
return self.get_data(timestep)
@property
def size(self):
"""The memory consumption of the data contained in a checkpoint."""
return sum([int((o.size_allocated/(o.time_order+1))*o.time_order)
for o in self.objects])
def save(*args):
raise RuntimeError("Invalid method called. Did you check your version" +
" of pyrevolve?")
def load(*args):
raise RuntimeError("Invalid method called. Did you check your version" +
" of pyrevolve?")
def get_symbol_data(symbol, timestep):
timestep += symbol.time_order - 1
ptrs = []
for i in range(symbol.time_order):
# Use `._data`, instead of `.data`, as `.data` is a view of the DOMAIN
# data region which is non-contiguous in memory. The performance hit from
# dealing with non-contiguous memory is so big (introduces >1 copy), it's
# better to checkpoint unneccesarry stuff to get a contiguous chunk of memory.
ptr = symbol._data[timestep - i, :, :]
ptrs.append(ptr)
return ptrs
| [] |
shyba/lbry | lbrynet/file_manager/EncryptedFileManager.py | ab3278c50a8b7b5a8e9486a1c52be3d5e0c18297 | """
Keep track of which LBRY Files are downloading and store their LBRY File specific metadata
"""
import logging
import os
from twisted.enterprise import adbapi
from twisted.internet import defer, task, reactor
from twisted.python.failure import Failure
from lbrynet.reflector.reupload import reflect_stream
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.lbry_file.StreamDescriptor import EncryptedFileStreamType
from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.core.sqlite_helpers import rerun_if_locked
from lbrynet import conf
log = logging.getLogger(__name__)
def safe_start_looping_call(looping_call, seconds=3600):
if not looping_call.running:
looping_call.start(seconds)
def safe_stop_looping_call(looping_call):
if looping_call.running:
looping_call.stop()
class EncryptedFileManager(object):
"""Keeps track of currently opened LBRY Files, their options, and
their LBRY File specific metadata.
"""
def __init__(self, session, stream_info_manager, sd_identifier, download_directory=None):
self.session = session
self.stream_info_manager = stream_info_manager
# TODO: why is sd_identifier part of the file manager?
self.sd_identifier = sd_identifier
self.lbry_files = []
self.sql_db = None
if download_directory:
self.download_directory = download_directory
else:
self.download_directory = os.getcwd()
self.lbry_file_reflector = task.LoopingCall(self.reflect_lbry_files)
log.debug("Download directory for EncryptedFileManager: %s", str(self.download_directory))
@defer.inlineCallbacks
def setup(self):
yield self._open_db()
yield self._add_to_sd_identifier()
yield self._start_lbry_files()
if conf.settings['reflect_uploads']:
safe_start_looping_call(self.lbry_file_reflector)
def get_lbry_file_status(self, lbry_file):
return self._get_lbry_file_status(lbry_file.rowid)
def set_lbry_file_data_payment_rate(self, lbry_file, new_rate):
return self._set_lbry_file_payment_rate(lbry_file.rowid, new_rate)
def change_lbry_file_status(self, lbry_file, status):
log.debug("Changing status of %s to %s", lbry_file.stream_hash, status)
return self._change_file_status(lbry_file.rowid, status)
def get_lbry_file_status_reports(self):
ds = []
for lbry_file in self.lbry_files:
ds.append(lbry_file.status())
dl = defer.DeferredList(ds)
def filter_failures(status_reports):
return [status_report for success, status_report in status_reports if success is True]
dl.addCallback(filter_failures)
return dl
def save_sd_blob_hash_to_stream(self, stream_hash, sd_hash):
return self.stream_info_manager.save_sd_blob_hash_to_stream(stream_hash, sd_hash)
def _add_to_sd_identifier(self):
downloader_factory = ManagedEncryptedFileDownloaderFactory(self)
self.sd_identifier.add_stream_downloader_factory(
EncryptedFileStreamType, downloader_factory)
@defer.inlineCallbacks
def _check_stream_is_managed(self, stream_hash):
# check that all the streams in the stream_info_manager are also
# tracked by lbry_file_manager and fix any streams that aren't.
rowid = yield self._get_rowid_for_stream_hash(stream_hash)
if rowid is not None:
defer.returnValue(True)
rate = self.session.base_payment_rate_manager.min_blob_data_payment_rate
key, stream_name, file_name = yield self.stream_info_manager.get_stream_info(stream_hash)
log.warning("Trying to fix missing lbry file for %s", stream_name.decode('hex'))
yield self._save_lbry_file(stream_hash, rate)
@defer.inlineCallbacks
def _check_stream_info_manager(self):
def _iter_streams(stream_hashes):
for stream_hash in stream_hashes:
yield self._check_stream_is_managed(stream_hash)
stream_hashes = yield self.stream_info_manager.get_all_streams()
log.debug("Checking %s streams", len(stream_hashes))
yield defer.DeferredList(list(_iter_streams(stream_hashes)))
@defer.inlineCallbacks
def _start_lbry_files(self):
yield self._check_stream_info_manager()
files_and_options = yield self._get_all_lbry_files()
yield defer.DeferredList([
self._set_options_and_restore(rowid, stream_hash, options)
for rowid, stream_hash, options in files_and_options
])
log.info("Started %i lbry files", len(self.lbry_files))
@defer.inlineCallbacks
def _set_options_and_restore(self, rowid, stream_hash, options):
try:
b_prm = self.session.base_payment_rate_manager
payment_rate_manager = NegotiatedPaymentRateManager(
b_prm, self.session.blob_tracker)
downloader = yield self.start_lbry_file(
rowid, stream_hash, payment_rate_manager, blob_data_rate=options)
yield downloader.restore()
except Exception:
log.error('An error occurred while starting a lbry file (%s, %s, %s)',
rowid, stream_hash, options)
@defer.inlineCallbacks
def start_lbry_file(self, rowid, stream_hash,
payment_rate_manager, blob_data_rate=None,
download_directory=None, file_name=None):
if not download_directory:
download_directory = self.download_directory
payment_rate_manager.min_blob_data_payment_rate = blob_data_rate
lbry_file_downloader = ManagedEncryptedFileDownloader(
rowid,
stream_hash,
self.session.peer_finder,
self.session.rate_limiter,
self.session.blob_manager,
self.stream_info_manager,
self,
payment_rate_manager,
self.session.wallet,
download_directory,
file_name=file_name
)
yield lbry_file_downloader.set_stream_info()
self.lbry_files.append(lbry_file_downloader)
defer.returnValue(lbry_file_downloader)
@defer.inlineCallbacks
def _stop_lbry_file(self, lbry_file):
def wait_for_finished(lbry_file, count=2):
if count or lbry_file.saving_status is not False:
return task.deferLater(reactor, 1, self._stop_lbry_file, lbry_file, count=count - 1)
try:
yield lbry_file.stop(change_status=False)
self.lbry_files.remove(lbry_file)
except CurrentlyStoppingError:
yield wait_for_finished(lbry_file)
except AlreadyStoppedError:
pass
finally:
defer.returnValue(None)
def _stop_lbry_files(self):
log.info("Stopping %i lbry files", len(self.lbry_files))
lbry_files = self.lbry_files
for lbry_file in lbry_files:
yield self._stop_lbry_file(lbry_file)
@defer.inlineCallbacks
def add_lbry_file(self, stream_hash, payment_rate_manager, blob_data_rate=None,
download_directory=None, file_name=None):
rowid = yield self._save_lbry_file(stream_hash, blob_data_rate)
lbry_file = yield self.start_lbry_file(rowid, stream_hash, payment_rate_manager,
blob_data_rate, download_directory,
file_name)
defer.returnValue(lbry_file)
@defer.inlineCallbacks
def delete_lbry_file(self, lbry_file, delete_file=False):
if lbry_file not in self.lbry_files:
raise ValueError("Could not find that LBRY file")
def wait_for_finished(count=2):
if count <= 0 or lbry_file.saving_status is False:
return True
else:
return task.deferLater(reactor, 1, wait_for_finished, count=count - 1)
full_path = os.path.join(lbry_file.download_directory, lbry_file.file_name)
try:
yield lbry_file.stop()
except (AlreadyStoppedError, CurrentlyStoppingError):
yield wait_for_finished()
self.lbry_files.remove(lbry_file)
yield self._delete_lbry_file_options(lbry_file.rowid)
yield lbry_file.delete_data()
# TODO: delete this
# get count for stream hash returns the count of the lbry files with the stream hash
# in the lbry_file_options table, which will soon be removed.
stream_count = yield self.get_count_for_stream_hash(lbry_file.stream_hash)
if stream_count == 0:
yield self.stream_info_manager.delete_stream(lbry_file.stream_hash)
else:
msg = ("Can't delete stream info for %s, count is %i\n"
"The call that resulted in this warning will\n"
"be removed in the database refactor")
log.warning(msg, lbry_file.stream_hash, stream_count)
if delete_file and os.path.isfile(full_path):
os.remove(full_path)
defer.returnValue(True)
def toggle_lbry_file_running(self, lbry_file):
"""Toggle whether a stream reader is currently running"""
for l in self.lbry_files:
if l == lbry_file:
return l.toggle_running()
return defer.fail(Failure(ValueError("Could not find that LBRY file")))
def _reflect_lbry_files(self):
for lbry_file in self.lbry_files:
yield reflect_stream(lbry_file)
@defer.inlineCallbacks
def reflect_lbry_files(self):
yield defer.DeferredList(list(self._reflect_lbry_files()))
@defer.inlineCallbacks
def stop(self):
safe_stop_looping_call(self.lbry_file_reflector)
yield defer.DeferredList(list(self._stop_lbry_files()))
if self.sql_db:
yield self.sql_db.close()
self.sql_db = None
log.info("Stopped %s", self)
defer.returnValue(True)
def get_count_for_stream_hash(self, stream_hash):
return self._get_count_for_stream_hash(stream_hash)
######### database calls #########
def _open_db(self):
# check_same_thread=False is solely to quiet a spurious error that appears to be due
# to a bug in twisted, where the connection is closed by a different thread than the
# one that opened it. The individual connections in the pool are not used in multiple
# threads.
self.sql_db = adbapi.ConnectionPool(
"sqlite3",
os.path.join(self.session.db_dir, "lbryfile_info.db"),
check_same_thread=False
)
return self.sql_db.runQuery(
"create table if not exists lbry_file_options (" +
" blob_data_rate real, " +
" status text," +
" stream_hash text,"
" foreign key(stream_hash) references lbry_files(stream_hash)" +
")"
)
@rerun_if_locked
def _save_lbry_file(self, stream_hash, data_payment_rate):
def do_save(db_transaction):
row = (data_payment_rate, ManagedEncryptedFileDownloader.STATUS_STOPPED, stream_hash)
db_transaction.execute("insert into lbry_file_options values (?, ?, ?)", row)
return db_transaction.lastrowid
return self.sql_db.runInteraction(do_save)
@rerun_if_locked
def _delete_lbry_file_options(self, rowid):
return self.sql_db.runQuery("delete from lbry_file_options where rowid = ?",
(rowid,))
@rerun_if_locked
def _set_lbry_file_payment_rate(self, rowid, new_rate):
return self.sql_db.runQuery(
"update lbry_file_options set blob_data_rate = ? where rowid = ?",
(new_rate, rowid))
@rerun_if_locked
def _get_all_lbry_files(self):
d = self.sql_db.runQuery("select rowid, stream_hash, blob_data_rate from lbry_file_options")
return d
@rerun_if_locked
def _change_file_status(self, rowid, new_status):
return self.sql_db.runQuery("update lbry_file_options set status = ? where rowid = ?",
(new_status, rowid))
@rerun_if_locked
def _get_lbry_file_status(self, rowid):
d = self.sql_db.runQuery("select status from lbry_file_options where rowid = ?",
(rowid,))
d.addCallback(lambda r: (r[0][0] if len(r) else None))
return d
@rerun_if_locked
def _get_count_for_stream_hash(self, stream_hash):
d = self.sql_db.runQuery("select count(*) from lbry_file_options where stream_hash = ?",
(stream_hash,))
d.addCallback(lambda r: (r[0][0] if r else 0))
return d
@rerun_if_locked
def _get_rowid_for_stream_hash(self, stream_hash):
d = self.sql_db.runQuery("select rowid from lbry_file_options where stream_hash = ?",
(stream_hash,))
d.addCallback(lambda r: (r[0][0] if len(r) else None))
return d
| [((23, 6, 23, 33), 'logging.getLogger', 'logging.getLogger', ({(23, 24, 23, 32): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((53, 35, 53, 76), 'twisted.internet.task.LoopingCall', 'task.LoopingCall', ({(53, 52, 53, 75): 'self.reflect_lbry_files'}, {}), '(self.reflect_lbry_files)', False, 'from twisted.internet import defer, task, reactor\n'), ((80, 13, 80, 35), 'twisted.internet.defer.DeferredList', 'defer.DeferredList', ({(80, 32, 80, 34): 'ds'}, {}), '(ds)', False, 'from twisted.internet import defer, task, reactor\n'), ((92, 29, 92, 72), 'lbrynet.file_manager.EncryptedFileDownloader.ManagedEncryptedFileDownloaderFactory', 'ManagedEncryptedFileDownloaderFactory', ({(92, 67, 92, 71): 'self'}, {}), '(self)', False, 'from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory\n'), ((148, 31, 160, 9), 'lbrynet.file_manager.EncryptedFileDownloader.ManagedEncryptedFileDownloader', 'ManagedEncryptedFileDownloader', (), '', False, 'from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader\n'), ((163, 8, 163, 47), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(163, 26, 163, 46): 'lbry_file_downloader'}, {}), '(lbry_file_downloader)', False, 'from twisted.internet import defer, task, reactor\n'), ((193, 8, 193, 36), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(193, 26, 193, 35): 'lbry_file'}, {}), '(lbry_file)', False, 'from twisted.internet import defer, task, reactor\n'), ((206, 20, 206, 83), 'os.path.join', 'os.path.join', ({(206, 33, 206, 61): 'lbry_file.download_directory', (206, 63, 206, 82): 'lbry_file.file_name'}, {}), '(lbry_file.download_directory, lbry_file.file_name)', False, 'import os\n'), ((235, 8, 235, 31), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(235, 26, 235, 30): '(True)'}, {}), '(True)', False, 'from twisted.internet import defer, task, reactor\n'), ((260, 8, 260, 31), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(260, 26, 260, 30): '(True)'}, {}), '(True)', False, 'from twisted.internet import defer, task, reactor\n'), ((52, 38, 52, 49), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((102, 12, 102, 35), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(102, 30, 102, 34): '(True)'}, {}), '(True)', False, 'from twisted.internet import defer, task, reactor\n'), ((132, 35, 133, 49), 'lbrynet.core.PaymentRateManager.NegotiatedPaymentRateManager', 'NegotiatedPaymentRateManager', ({(133, 16, 133, 21): 'b_prm', (133, 23, 133, 48): 'self.session.blob_tracker'}, {}), '(b_prm, self.session.blob_tracker)', False, 'from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager\n'), ((178, 12, 178, 35), 'twisted.internet.defer.returnValue', 'defer.returnValue', ({(178, 30, 178, 34): 'None'}, {}), '(None)', False, 'from twisted.internet import defer, task, reactor\n'), ((232, 27, 232, 52), 'os.path.isfile', 'os.path.isfile', ({(232, 42, 232, 51): 'full_path'}, {}), '(full_path)', False, 'import os\n'), ((233, 12, 233, 32), 'os.remove', 'os.remove', ({(233, 22, 233, 31): 'full_path'}, {}), '(full_path)', False, 'import os\n'), ((274, 12, 274, 65), 'os.path.join', 'os.path.join', ({(274, 25, 274, 44): 'self.session.db_dir', (274, 46, 274, 64): '"""lbryfile_info.db"""'}, {}), "(self.session.db_dir, 'lbryfile_info.db')", False, 'import os\n'), ((169, 23, 169, 100), 'twisted.internet.task.deferLater', 'task.deferLater', (), '', False, 'from twisted.internet import defer, task, reactor\n'), ((204, 23, 204, 86), 'twisted.internet.task.deferLater', 'task.deferLater', (), '', False, 'from twisted.internet import defer, task, reactor\n'), ((246, 18, 246, 43), 'lbrynet.reflector.reupload.reflect_stream', 'reflect_stream', ({(246, 33, 246, 42): 'lbry_file'}, {}), '(lbry_file)', False, 'from lbrynet.reflector.reupload import reflect_stream\n')] |
TomMinor/MayaPerforce | Perforce/AppUtils.py | 52182c7e5c3e91e41973d0c2abbda8880e809e49 | import os
import sys
import re
import logging
p4_logger = logging.getLogger("Perforce")
# Import app specific utilities, maya opens scenes differently than nuke etc
# Are we in maya or nuke?
if re.match( "maya", os.path.basename( sys.executable ), re.I ):
p4_logger.info("Configuring for Maya")
from MayaUtils import *
elif re.match( "nuke", os.path.basename( sys.executable ), re.I ):
p4_logger.info("Configuring for Nuke")
from NukeUtils import *
else:
p4_logger.warning("Couldn't find app configuration")
raise ImportError("No supported applications found that this plugin can interface with")
| [((6, 12, 6, 41), 'logging.getLogger', 'logging.getLogger', ({(6, 30, 6, 40): '"""Perforce"""'}, {}), "('Perforce')", False, 'import logging\n'), ((10, 21, 10, 55), 'os.path.basename', 'os.path.basename', ({(10, 39, 10, 53): 'sys.executable'}, {}), '(sys.executable)', False, 'import os\n'), ((13, 23, 13, 57), 'os.path.basename', 'os.path.basename', ({(13, 41, 13, 55): 'sys.executable'}, {}), '(sys.executable)', False, 'import os\n')] |
kourtneyshort/healthcare | fhir/immunizations_demo/models/trainer/model.py | 1d1e2375304ac99f43a8b6aee7374fcdf641eb6f | #!/usr/bin/python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""A simple logistics regression model for immunization prediction.
The following features are used in this model:
1. age of the patient
2. gender of the patient
3. country the patient is visiting
4. expected duration of stay
5. disease
We are predicting the possibility of the patient getting a disease.
Note that this model is part of an end-to-end demo which shows how
to leverage the Google Cloud Healthcare APIs (FHIR APIs specifically)
to finish data analysis and machine learning tasks. This problem
itself is not a natural machine learning task.
"""
import tensorflow as tf
from functools import reduce
# Input data specific flags.
tf.flags.DEFINE_string("training_data", default=None,
help="Path to training data. This should be a GCS path.")
tf.flags.DEFINE_string("eval_data", default=None,
help="Path to evaluation data. This should be a GCS path.")
# Model specific flags. See more details here:
# https://www.tensorflow.org/api_docs/python/tf/estimator/LinearClassifier
tf.flags.DEFINE_string("model_dir", default=None,
help="Estimator model_dir.")
tf.flags.DEFINE_string("export_model_dir", default=None,
help="Folder to export trained model.")
tf.flags.DEFINE_integer("batch_size", default=96,
help="Mini-batch size for the training.")
tf.flags.DEFINE_integer("training_steps", default=1000,
help="Total number of training steps.")
tf.flags.DEFINE_integer("eval_steps", default=100,
help="Total number of evaluation steps.")
tf.flags.DEFINE_integer("n_classes", default=2,
help="Number of categories to classify to.")
# More advanced flags that controls the behavior of FTRL optimizer.
# See more details here:
# https://www.tensorflow.org/api_docs/python/tf/train/FtrlOptimizer
tf.flags.DEFINE_float("learning_rate", default=0.01,
help="Learning rate")
tf.flags.DEFINE_float("l1_regularization_strength", default=0.005,
help="L1 regularization strength for FTRL optimizer.")
tf.flags.DEFINE_float("l2_regularization_strength", default=0.001,
help="L2 regularization strength for FTRL optimizer.")
FLAGS = tf.flags.FLAGS
# Feature and label keys.
FEATURE_KEYS = ['age', 'gender', 'country', 'duration', 'disease']
LABEL_KEYS = ['risk']
DS_BUFFER_SIZE = 50000
def build_input_fn(filename):
"""Builds the input funciton for training/evaluation.
Args:
filename (string): The path of the file that contains features and
labels. This can be a Google Cloud Storage path (e.g. gs://...).
"""
def input_fn():
"""Input function to be used by the classifier."""
def parse(serialized_example):
"""Parses a single tensorflow example."""
def parse_feature(features, key):
features[key] = tf.FixedLenFeature([], tf.int64)
return features
data = tf.parse_single_example(serialized_example,
features=reduce(parse_feature, FEATURE_KEYS + LABEL_KEYS, {}))
features = [tf.convert_to_tensor(tf.cast(data[key], tf.int32))
for key in FEATURE_KEYS]
labels = [tf.convert_to_tensor(tf.cast(data[key], tf.int32))
for key in LABEL_KEYS]
return features, labels
dataset = tf.data.TFRecordDataset(filename, buffer_size=DS_BUFFER_SIZE)
dataset = dataset.map(parse).cache().repeat()
dataset = dataset.batch(FLAGS.batch_size)
features, labels = dataset.make_one_shot_iterator().get_next()
# Slice features into a dictionary which is expected by the classifier.
features = tf.transpose(features)
def map_feature(dict, idx):
"""Maps individual features into a dictionary."""
dict[FEATURE_KEYS[idx]] = tf.transpose(
tf.nn.embedding_lookup(features, [idx]))
return dict
return reduce(map_feature, list(range(len(FEATURE_KEYS))), {}), labels
return input_fn
def build_serving_input_receiver_fn():
"""Builds a serving_input_receiver_fn which takes JSON as input."""
def serving_input_receiver_fn():
def add_input(inputs, feature):
inputs[feature] = tf.placeholder(shape=[None], dtype=tf.int32)
return inputs
inputs = reduce(add_input, FEATURE_KEYS, {})
return tf.estimator.export.ServingInputReceiver(inputs, inputs)
return serving_input_receiver_fn
def main(_):
# All features have been converted to integer representation beforehand.
feature_columns = [tf.feature_column.numeric_column(key=key, dtype=tf.int32)
for key in FEATURE_KEYS]
classifier = tf.estimator.LinearClassifier(
feature_columns=feature_columns,
model_dir=FLAGS.model_dir,
n_classes=FLAGS.n_classes,
optimizer=tf.train.FtrlOptimizer(
learning_rate=FLAGS.learning_rate,
l1_regularization_strength=FLAGS.l1_regularization_strength,
l2_regularization_strength=FLAGS.l2_regularization_strength),
config=tf.estimator.RunConfig(keep_checkpoint_max=1))
# Training.
classifier.train(
input_fn=build_input_fn(FLAGS.training_data),
steps=FLAGS.training_steps)
# Evaluation.
classifier.evaluate(
input_fn=build_input_fn(FLAGS.eval_data),
steps=FLAGS.eval_steps)
# Export SavedModel.
if FLAGS.export_model_dir is not None:
classifier.export_saved_model(
FLAGS.export_model_dir,
build_serving_input_receiver_fn())
if __name__ == '__main__':
# Set logging level to INFO.
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| [((41, 0, 42, 59), 'tensorflow.flags.DEFINE_string', 'tf.flags.DEFINE_string', (), '', True, 'import tensorflow as tf\n'), ((43, 0, 44, 61), 'tensorflow.flags.DEFINE_string', 'tf.flags.DEFINE_string', (), '', True, 'import tensorflow as tf\n'), ((48, 0, 49, 30), 'tensorflow.flags.DEFINE_string', 'tf.flags.DEFINE_string', (), '', True, 'import tensorflow as tf\n'), ((50, 0, 51, 41), 'tensorflow.flags.DEFINE_string', 'tf.flags.DEFINE_string', (), '', True, 'import tensorflow as tf\n'), ((52, 0, 53, 43), 'tensorflow.flags.DEFINE_integer', 'tf.flags.DEFINE_integer', (), '', True, 'import tensorflow as tf\n'), ((54, 0, 55, 41), 'tensorflow.flags.DEFINE_integer', 'tf.flags.DEFINE_integer', (), '', True, 'import tensorflow as tf\n'), ((56, 0, 57, 43), 'tensorflow.flags.DEFINE_integer', 'tf.flags.DEFINE_integer', (), '', True, 'import tensorflow as tf\n'), ((58, 0, 59, 46), 'tensorflow.flags.DEFINE_integer', 'tf.flags.DEFINE_integer', (), '', True, 'import tensorflow as tf\n'), ((64, 0, 65, 23), 'tensorflow.flags.DEFINE_float', 'tf.flags.DEFINE_float', (), '', True, 'import tensorflow as tf\n'), ((66, 0, 67, 56), 'tensorflow.flags.DEFINE_float', 'tf.flags.DEFINE_float', (), '', True, 'import tensorflow as tf\n'), ((68, 0, 69, 56), 'tensorflow.flags.DEFINE_float', 'tf.flags.DEFINE_float', (), '', True, 'import tensorflow as tf\n'), ((169, 2, 169, 43), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', ({(169, 27, 169, 42): 'tf.logging.INFO'}, {}), '(tf.logging.INFO)', True, 'import tensorflow as tf\n'), ((170, 2, 170, 14), 'tensorflow.app.run', 'tf.app.run', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((106, 14, 106, 75), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (), '', True, 'import tensorflow as tf\n'), ((112, 15, 112, 37), 'tensorflow.transpose', 'tf.transpose', ({(112, 28, 112, 36): 'features'}, {}), '(features)', True, 'import tensorflow as tf\n'), ((131, 13, 131, 48), 'functools.reduce', 'reduce', ({(131, 20, 131, 29): 'add_input', (131, 31, 131, 43): 'FEATURE_KEYS', (131, 45, 131, 47): '{}'}, {}), '(add_input, FEATURE_KEYS, {})', False, 'from functools import reduce\n'), ((132, 11, 132, 67), 'tensorflow.estimator.export.ServingInputReceiver', 'tf.estimator.export.ServingInputReceiver', ({(132, 52, 132, 58): 'inputs', (132, 60, 132, 66): 'inputs'}, {}), '(inputs, inputs)', True, 'import tensorflow as tf\n'), ((138, 21, 138, 78), 'tensorflow.feature_column.numeric_column', 'tf.feature_column.numeric_column', (), '', True, 'import tensorflow as tf\n'), ((128, 24, 128, 68), 'tensorflow.placeholder', 'tf.placeholder', (), '', True, 'import tensorflow as tf\n'), ((145, 14, 148, 68), 'tensorflow.train.FtrlOptimizer', 'tf.train.FtrlOptimizer', (), '', True, 'import tensorflow as tf\n'), ((149, 11, 149, 56), 'tensorflow.estimator.RunConfig', 'tf.estimator.RunConfig', (), '', True, 'import tensorflow as tf\n'), ((94, 24, 94, 56), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', ({(94, 43, 94, 45): '[]', (94, 47, 94, 55): 'tf.int64'}, {}), '([], tf.int64)', True, 'import tensorflow as tf\n'), ((116, 8, 116, 47), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', ({(116, 31, 116, 39): 'features', (116, 41, 116, 46): '[idx]'}, {}), '(features, [idx])', True, 'import tensorflow as tf\n'), ((98, 17, 98, 69), 'functools.reduce', 'reduce', ({(98, 24, 98, 37): 'parse_feature', (98, 39, 98, 64): 'FEATURE_KEYS + LABEL_KEYS', (98, 66, 98, 68): '{}'}, {}), '(parse_feature, FEATURE_KEYS + LABEL_KEYS, {})', False, 'from functools import reduce\n'), ((100, 39, 100, 67), 'tensorflow.cast', 'tf.cast', ({(100, 47, 100, 56): 'data[key]', (100, 58, 100, 66): 'tf.int32'}, {}), '(data[key], tf.int32)', True, 'import tensorflow as tf\n'), ((102, 37, 102, 65), 'tensorflow.cast', 'tf.cast', ({(102, 45, 102, 54): 'data[key]', (102, 56, 102, 64): 'tf.int32'}, {}), '(data[key], tf.int32)', True, 'import tensorflow as tf\n')] |
thecodeteam/heliosburn | heliosburn/django/hbproject/webui/models.py | 513f6335c9788948d82e5c9285d7869f3ff4cc10 | import json
import re
from django.conf import settings
import requests
from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, \
UnexpectedException, LocationHeaderNotFoundException, NotFoundException
def validate_response(response):
if 200 <= response.status_code < 300:
return True
return False
def status_code_to_exception(status_code):
if status_code == 400:
return BadRequestException()
if status_code == 401:
return UnauthorizedException()
if status_code == 404:
return NotFoundException()
if status_code >= 500:
return ServerErrorException()
if 300 <= status_code < 400:
return RedirectException()
return UnexpectedException()
def get_resource_id_or_raise_exception(resource_name, response):
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
location = response.headers.get('location')
pattern = '.+{}\/(?P<id>\w+)'.format(resource_name)
p = re.compile(pattern)
m = p.match(location)
try:
resource_id = m.group('id')
return resource_id
except:
return UnexpectedException('Could not get the resource ID from the response.')
class Base(object):
def __init__(self, auth_token=None):
self.auth_token = auth_token
def get_url(self, extra=''):
return '{base_url}{endpoint}{extra}'.format(base_url=settings.API_BASE_URL,
endpoint=object.__getattribute__(self, '__endpoint__'),
extra=extra)
class Session(Base):
__endpoint__ = '/session/'
__resourcename__ = 'session'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
session = json.loads(response.text)
return session
def start(self, resource_id):
url = self.get_url(extra='{}/{}/'.format(resource_id, 'start'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def stop(self, resource_id):
url = self.get_url(extra='{}/{}/'.format(resource_id, 'stop'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class TestPlan(Base):
__endpoint__ = '/testplan/'
__resourcename__ = 'testplan'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
testplan = json.loads(response.text)
return testplan
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
testplans = json.loads(response.text)
return testplans
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class Rule(Base):
__endpoint__ = '/testplan/{testplan_id}/rule/'
__resourcename__ = 'rule'
def __init__(self, testplan_id, auth_token=None):
self.auth_token = auth_token
self.__endpoint__ = self.__endpoint__.format(testplan_id=testplan_id)
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
rule = json.loads(response.text)
return rule
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
resource = json.loads(response.text)
return resource
class Recording(Base):
__endpoint__ = '/recording/'
__resourcename__ = 'recording'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
recording = json.loads(response.text)
return recording
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
recordings = json.loads(response.text)
return recordings
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def start(self, resource_id):
url = self.get_url(extra='{}/{}'.format(resource_id, 'start'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def stop(self, resource_id):
url = self.get_url(extra='{}/{}'.format(resource_id, 'stop'))
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class QoS(Base):
__endpoint__ = '/qos/'
__resourcename__ = 'qos'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
qos = json.loads(response.text)
return qos
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
qos = json.loads(response.text)
return qos
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class ServerOverload(Base):
__endpoint__ = '/serveroverload/'
__resourcename__ = 'serveroverload'
def create(self, data):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.post(url, headers=headers, data=json.dumps(data))
resource_id = get_resource_id_or_raise_exception(self.__resourcename__, response)
return resource_id
def update(self, resource_id, data):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.put(url, headers=headers, data=json.dumps(data))
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
def get(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
profile = json.loads(response.text)
return profile
def get_all(self):
url = self.get_url()
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
profile = json.loads(response.text)
return profile
def delete(self, resource_id):
url = self.get_url(extra=str(resource_id))
headers = {'X-Auth-Token': self.auth_token}
response = requests.delete(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
class Logs(Base):
__endpoint__ = '/log/'
__resourcename__ = 'log'
def stats(self):
url = self.get_url(extra='stats')
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
stats = json.loads(response.text)
return stats
def get(self, start, length, component, levels, date_from, date_to, msg):
url = self.get_url(
extra='?start={}&limit={}&component={}&levels={}&from={}&to={}&msg={}'.format(start, length, component,
levels,
date_from, date_to, msg))
headers = {'X-Auth-Token': self.auth_token}
response = requests.get(url, headers=headers)
if not validate_response(response):
exception = status_code_to_exception(response.status_code)
exception.message = response.text
raise exception
logs = json.loads(response.text)
return logs | [((26, 11, 26, 32), 'webui.exceptions.UnexpectedException', 'UnexpectedException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((36, 8, 36, 27), 're.compile', 're.compile', ({(36, 19, 36, 26): 'pattern'}, {}), '(pattern)', False, 'import re\n'), ((17, 15, 17, 36), 'webui.exceptions.BadRequestException', 'BadRequestException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((19, 15, 19, 38), 'webui.exceptions.UnauthorizedException', 'UnauthorizedException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((21, 15, 21, 34), 'webui.exceptions.NotFoundException', 'NotFoundException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((23, 15, 23, 37), 'webui.exceptions.ServerErrorException', 'ServerErrorException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((25, 15, 25, 34), 'webui.exceptions.RedirectException', 'RedirectException', ({}, {}), '()', False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((69, 19, 69, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((74, 18, 74, 43), 'json.loads', 'json.loads', ({(74, 29, 74, 42): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((80, 19, 80, 54), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((89, 19, 89, 54), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((119, 19, 119, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((124, 19, 124, 44), 'json.loads', 'json.loads', ({(124, 30, 124, 43): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((130, 19, 130, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((135, 20, 135, 45), 'json.loads', 'json.loads', ({(135, 31, 135, 44): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((141, 19, 141, 56), 'requests.delete', 'requests.delete', (), '', False, 'import requests\n'), ((175, 19, 175, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((180, 15, 180, 40), 'json.loads', 'json.loads', ({(180, 26, 180, 39): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((186, 19, 186, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((191, 19, 191, 44), 'json.loads', 'json.loads', ({(191, 30, 191, 43): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((209, 19, 209, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((214, 20, 214, 45), 'json.loads', 'json.loads', ({(214, 31, 214, 44): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((220, 19, 220, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((225, 21, 225, 46), 'json.loads', 'json.loads', ({(225, 32, 225, 45): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((240, 19, 240, 54), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((249, 19, 249, 54), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((279, 19, 279, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((284, 14, 284, 39), 'json.loads', 'json.loads', ({(284, 25, 284, 38): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((290, 19, 290, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((295, 14, 295, 39), 'json.loads', 'json.loads', ({(295, 25, 295, 38): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((301, 19, 301, 56), 'requests.delete', 'requests.delete', (), '', False, 'import requests\n'), ((331, 19, 331, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((336, 18, 336, 43), 'json.loads', 'json.loads', ({(336, 29, 336, 42): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((342, 19, 342, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((347, 18, 347, 43), 'json.loads', 'json.loads', ({(347, 29, 347, 42): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((353, 19, 353, 56), 'requests.delete', 'requests.delete', (), '', False, 'import requests\n'), ((367, 19, 367, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((372, 16, 372, 41), 'json.loads', 'json.loads', ({(372, 27, 372, 40): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((381, 19, 381, 53), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((386, 15, 386, 40), 'json.loads', 'json.loads', ({(386, 26, 386, 39): 'response.text'}, {}), '(response.text)', False, 'import json\n'), ((42, 15, 42, 86), 'webui.exceptions.UnexpectedException', 'UnexpectedException', ({(42, 35, 42, 85): '"""Could not get the resource ID from the response."""'}, {}), "('Could not get the resource ID from the response.')", False, 'from webui.exceptions import BadRequestException, UnauthorizedException, ServerErrorException, RedirectException, UnexpectedException, LocationHeaderNotFoundException, NotFoundException\n'), ((62, 60, 62, 76), 'json.dumps', 'json.dumps', ({(62, 71, 62, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((103, 60, 103, 76), 'json.dumps', 'json.dumps', ({(103, 71, 103, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((110, 59, 110, 75), 'json.dumps', 'json.dumps', ({(110, 70, 110, 74): 'data'}, {}), '(data)', False, 'import json\n'), ((159, 60, 159, 76), 'json.dumps', 'json.dumps', ({(159, 71, 159, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((166, 59, 166, 75), 'json.dumps', 'json.dumps', ({(166, 70, 166, 74): 'data'}, {}), '(data)', False, 'import json\n'), ((202, 60, 202, 76), 'json.dumps', 'json.dumps', ({(202, 71, 202, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((231, 59, 231, 75), 'json.dumps', 'json.dumps', ({(231, 70, 231, 74): 'data'}, {}), '(data)', False, 'import json\n'), ((263, 60, 263, 76), 'json.dumps', 'json.dumps', ({(263, 71, 263, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((270, 59, 270, 75), 'json.dumps', 'json.dumps', ({(270, 70, 270, 74): 'data'}, {}), '(data)', False, 'import json\n'), ((315, 60, 315, 76), 'json.dumps', 'json.dumps', ({(315, 71, 315, 75): 'data'}, {}), '(data)', False, 'import json\n'), ((322, 59, 322, 75), 'json.dumps', 'json.dumps', ({(322, 70, 322, 74): 'data'}, {}), '(data)', False, 'import json\n')] |
sushilkar/ychaos | src/ychaos/core/verification/controller.py | 6801390f0faf553789e3384440a72a0705310738 | # Copyright 2021, Yahoo
# Licensed under the terms of the Apache 2.0 license. See the LICENSE file in the project root for terms
import time
from typing import Dict, List, Optional, Type
from pydantic import validate_arguments
from ...app_logger import AppLogger
from ...testplan import SystemState
from ...testplan.schema import TestPlan
from ...testplan.verification import VerificationConfig, VerificationType
from ...utils.hooks import EventHook
from ...utils.yaml import Dumper
from .data import VerificationData, VerificationStateData
from .plugins.BaseVerificationPlugin import BaseVerificationPlugin
from .plugins.HTTPRequestVerificationPlugin import (
HTTPRequestVerificationPlugin,
)
from .plugins.PythonModuleVerificationPlugin import (
PythonModuleVerificationPlugin,
)
from .plugins.SDv4VerificationPlugin import SDv4VerificationPlugin
# Enum value to corresponding Plugin Map
VERIFICATION_PLUGIN_MAP: Dict[str, Type[BaseVerificationPlugin]] = {
"python_module": PythonModuleVerificationPlugin,
"http_request": HTTPRequestVerificationPlugin,
"sdv4": SDv4VerificationPlugin,
}
class VerificationController(EventHook):
"""
Verification controller is used to run all the verification plugins configured in the testplan
and assert that the system is expected to be in a state expected by the user. Extends the EventHook class,
that defines the following event hooks.
## Valid Hooks
=== "on_start"
Hook that gets called when the verification execution is about to start.
No arguments are passed to the callable.
```python
def callable_hook(): ...
```
=== "on_each_plugin_start"
Hook that gets called when a particular plugin execution is about to start. `index` in the signature refers
to the position in the list
```python
def callable_hook(index: int, config: VerificationConfig): ...
```
References:
1. [VerificationConfig][ychaos.testplan.verification.VerificationConfig]
=== "on_each_plugin_end"
Hook that gets called when a particular plugin execution has ended. `index` in the signature refers to the
position in the list
```python
def callable_hook(index: int, config: VerificationConfig, state_data: VerificationStateData): ...
```
References:
1. [VerificationConfig][ychaos.testplan.verification.VerificationConfig]
2. [VerificationStateData][ychaos.core.verification.data.VerificationStateData]
=== "on_end"
Hook that gets called when the verification execution has ended. Each element in the list
of boolean corresponds to the result of the plugin, where `True` indicates successful verification
and `False` is a failure to verify the state
```python
def callable_hook(verify_list: List[bool]): ...
```
=== "on_plugin_not_found"
Hook that gets called when a plugin available in schema is not ready for usage/not implemented.
This case is possible for the plugins that are in Beta/development phase
```python
def callable_hook(index:int, plugin_type: VerificationType): ...
```
---
Each of the hooks get called on a certain event. The caller can register as many hooks for a particular event,
by calling the `register_hook(event_name, hook_method)` method. All the hooks are executed sequentially. The best example
of this is to register a hook to print information on CLI.
"""
__hook_events__ = {
"on_start": EventHook.CallableType(),
"on_each_plugin_start": EventHook.CallableType(int, VerificationConfig),
"on_each_plugin_end": EventHook.CallableType(
int, VerificationConfig, VerificationStateData
),
"on_plugin_not_found": EventHook.CallableType(int, VerificationType),
"on_end": EventHook.CallableType(List[bool]),
}
@validate_arguments
def __init__(
self,
testplan: TestPlan,
current_state: SystemState,
verification_data: List[Dict[SystemState, Optional[VerificationStateData]]],
):
"""
Initialize a verification controller object.
Args:
testplan: A valid testplan object
current_state: The state in which the system is expected to be in
verification_data (List[VerificationData]): The verification data probably from previous run.
"""
super(VerificationController, self).__init__()
self.logger = AppLogger.get_logger(self.__class__.__name__)
self.logger.bind(event="controller")
self.testplan = testplan
self.current_state = current_state
if not verification_data:
verification_data = [
dict(),
] * len(self.testplan.verification)
elif len(verification_data) != len(self.testplan.verification):
raise ValueError("Data and verification config size mismatch")
self.verification_data = list()
for data in verification_data:
self.verification_data.append(VerificationData.parse_obj(data))
def execute(self) -> bool:
"""
Execute the Verification controller.
Returns:
True if all the verification plugin pass, False otherwise
"""
# Call all the hooks that were registered for `verification_start`
# If there were no hooks registered, this will be no-op
self.execute_hooks("on_start")
_verify_list = list()
for index, (verification_plugin, data) in enumerate(
zip(self.testplan.verification, self.verification_data)
):
# Delay before verifying
time.sleep(verification_plugin.delay_before)
assert isinstance(verification_plugin.states, List) # For mypy
if self.current_state in verification_plugin.states:
self.logger.info(
msg=f"Starting {verification_plugin.type.value} verification"
)
plugin_class = VERIFICATION_PLUGIN_MAP.get(
verification_plugin.type.value, None
)
if plugin_class is None:
# This can happen when a new plugin is not implemented yet, but is
# available in the schema
self.execute_hooks(
"on_plugin_not_found", index, verification_plugin.type
)
continue
plugin = plugin_class(verification_plugin.config, data)
# Call all the hooks that were registered for `verification_plugin_start`.
self.execute_hooks("on_each_plugin_start", index, verification_plugin)
state_data = plugin.run_verification()
self.logger.info(
msg=f"Completed {verification_plugin.type.value} verification"
)
# Call all the hooks that were registered for `verification_plugin_end`.
self.execute_hooks(
"on_each_plugin_end", index, verification_plugin, state_data
)
data.replace_data(self.current_state, state_data)
if verification_plugin.strict:
_verify_list.append(state_data.rc == 0)
else:
data.add_data(self.current_state, None)
# Delay after verifying
time.sleep(verification_plugin.delay_after)
# Call all the hooks that were registered for `verification_end`.
self.execute_hooks("on_end", _verify_list)
return all(_verify_list)
def get_encoded_verification_data(self):
return [data.encoded_dict() for data in self.verification_data]
def dump_verification_json(self, fp):
import json
json.dump(self.get_encoded_verification_data(), fp=fp, indent=4)
def dump_verification_yaml(self, fp):
import yaml
yaml.dump(
self.get_encoded_verification_data(),
fp,
default_flow_style=False,
sort_keys=False,
Dumper=Dumper,
indent=4,
)
| [((155, 12, 155, 56), 'time.sleep', 'time.sleep', ({(155, 23, 155, 55): 'verification_plugin.delay_before'}, {}), '(verification_plugin.delay_before)', False, 'import time\n'), ((196, 12, 196, 55), 'time.sleep', 'time.sleep', ({(196, 23, 196, 54): 'verification_plugin.delay_after'}, {}), '(verification_plugin.delay_after)', False, 'import time\n')] |
binary-signal/vimeo-channel-downloader | tests/test_vimeodl.py | 7c2ded9d07b2b698f4e52558ba7dc327c2827b6c | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from vimeodl import __version__
from vimeodl.vimeo import VimeoLinkExtractor, VimeoDownloader
def test_version():
assert __version__ == '0.1.0'
def test_vimeo_link_extractor():
vm = VimeoLinkExtractor()
vm.extract()
| [((11, 9, 11, 29), 'vimeodl.vimeo.VimeoLinkExtractor', 'VimeoLinkExtractor', ({}, {}), '()', False, 'from vimeodl.vimeo import VimeoLinkExtractor, VimeoDownloader\n')] |
Yunusbcr/labgraph | labgraph/graphs/node_test_harness.py | a00ae7098b7b0e0eda8ce2e7e62dae86854616fb | #!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import asyncio
import functools
import inspect
from contextlib import contextmanager
from typing import (
Any,
AsyncIterable,
Awaitable,
Callable,
Generic,
Iterator,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from ..messages.message import Message
from ..util.testing import get_event_loop
from .config import Config
from .method import AsyncPublisher
from .node import Node
from .state import State
from .topic import Topic
N = TypeVar("N", bound=Node) # Node type
T = TypeVar("T", bound=Tuple[Topic, Message]) # Type yielded by async functions
class NodeTestHarness(Generic[N]):
"""
Utility class for testing Labgraph nodes. This allows a user to test some behavior
of a node in an asyncio event loop, with the harness taking care of setting up and
cleaning up the node.
Args:
node_type: The type of node this harness will test.
"""
def __init__(self, node_type: Type[N]) -> None:
self.node_type: Type[N] = node_type
@contextmanager
def get_node(
self, config: Optional[Config] = None, state: Optional[State] = None
) -> Iterator[N]:
"""
Context manager to create, configure and yield a node of specified type.
Node is cleaned up when the context manager exits.
Args:
config: The configuration to set on the node, if provided.
state: The state to set on the Node, if provided.
"""
node = None
try:
node = self.node_type(config=config, state=state)
node.setup()
yield node
finally:
if node is not None:
node.cleanup()
@overload
def run_with_harness(
node_type: Type[N],
fn: Callable[[N], AsyncIterable[T]],
config: Optional[Config],
state: Optional[State],
max_num_results: Optional[int] = None,
) -> List[T]:
...
@overload
def run_with_harness(
node_type: Type[N],
fn: Callable[[N], Awaitable[T]],
config: Optional[Config],
state: Optional[State],
) -> T:
...
def run_with_harness(node_type, fn, config=None, state=None, max_num_results=None):
"""
Runs an async function on a new node of the provided type using `NodeTestHarness`.
Args:
node_type: The type of node to create.
fn:
The async function to run. An instance of a node typed `node_type` will be
provided to the function as an argument.
config: The configuration to set on the node, if provided.
state: The state to set on the node, if provided.
max_num_results:
If `fn` is an async generator, the maximum number of results it will generate.
If this is `None`, then the generator can produce an unbounded number of
results.
"""
# Check whether the max_num_results argument was improperly provided
_check_max_num_results_arg(run_with_harness.__name__, fn, max_num_results)
test_harness = NodeTestHarness(node_type=node_type)
with test_harness.get_node(config=config, state=state) as node:
return run_async(fn, args=[node], max_num_results=max_num_results)
@overload
def run_async(
fn: Callable[..., Awaitable[T]],
args: Optional[Sequence[Any]] = None,
kwargs: Optional[Mapping[str, Any]] = None,
) -> T:
...
@overload
def run_async(
fn: Callable[..., AsyncIterable[T]],
args: Optional[Sequence[Any]] = None,
kwargs: Optional[Mapping[str, Any]] = None,
max_num_results: Optional[int] = None,
) -> List[T]:
...
def run_async(fn, args=None, kwargs=None, max_num_results=None):
"""
Runs an async function to completion. Uses the current thread's event loop. Blocks
until the async function has finished executing. Forwards all arguments after `fn`
to the async function.
Args:
fn: The async function to run.
args: Positional arguments to forward to the function.
kwargs: Keyword arguments to forward to the function.
max_num_results:
If `fn` is an async generator, the maximum number of results it will generate.
If this is `None`, then the generator can produce an unbounded number of
results.
"""
# Check whether the max_num_results argument was improperly provided
_check_max_num_results_arg(run_async.__name__, fn, max_num_results)
# Unwrap functools.partial so we can check whether it is async
if isinstance(fn, functools.partial):
test_fn = fn.func
else:
test_fn = fn
if inspect.isasyncgenfunction(test_fn):
return get_event_loop().run_until_complete(
_async_generator_to_list(
fn=fn,
args=args or [],
kwargs=kwargs or {},
max_num_results=max_num_results,
)
)
elif asyncio.iscoroutinefunction(test_fn):
return get_event_loop().run_until_complete(fn(*(args or []), **(kwargs or {})))
else:
raise TypeError(f"{run_async.__name__}: function '{fn}' is not async")
def _check_max_num_results_arg(
called_fn_name: str,
fn: Union[Callable[..., Awaitable[Any]], Callable[..., AsyncIterable[Any]]],
max_num_results: Optional[int] = None,
) -> None:
if not inspect.isasyncgenfunction(fn) and max_num_results is not None:
raise TypeError(
f"{called_fn_name}: function '{fn}' is not an async generator but "
"max_num_results was provided"
)
async def _async_generator_to_list(
fn: Callable[..., AsyncIterable[T]],
args: Sequence[Any],
kwargs: Mapping[str, Any],
max_num_results: Optional[int] = None,
) -> List[T]:
if max_num_results is not None and max_num_results < 0:
raise ValueError("max_num_results must be non-negative")
result = []
async for retval in fn(*args, **kwargs):
result.append(retval)
if max_num_results is not None and len(result) >= max_num_results:
return result
return result
| [((35, 4, 35, 28), 'typing.TypeVar', 'TypeVar', (), '', False, 'from typing import Any, AsyncIterable, Awaitable, Callable, Generic, Iterator, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar, Union, overload\n'), ((36, 4, 36, 45), 'typing.TypeVar', 'TypeVar', (), '', False, 'from typing import Any, AsyncIterable, Awaitable, Callable, Generic, Iterator, List, Mapping, Optional, Sequence, Tuple, Type, TypeVar, Union, overload\n'), ((161, 7, 161, 42), 'inspect.isasyncgenfunction', 'inspect.isasyncgenfunction', ({(161, 34, 161, 41): 'test_fn'}, {}), '(test_fn)', False, 'import inspect\n'), ((170, 9, 170, 45), 'asyncio.iscoroutinefunction', 'asyncio.iscoroutinefunction', ({(170, 37, 170, 44): 'test_fn'}, {}), '(test_fn)', False, 'import asyncio\n'), ((181, 11, 181, 41), 'inspect.isasyncgenfunction', 'inspect.isasyncgenfunction', ({(181, 38, 181, 40): 'fn'}, {}), '(fn)', False, 'import inspect\n')] |
edward70/2021Computing | pygamelearning/lrud.py | df8fb818480a6e23f2eac736744294871ec0e38c | import pygame
import sys
pygame.init()
clock = pygame.time.Clock()
screen = pygame.display.set_mode([500, 500])
gameOn = True
x1 = 0
y1 = 100
x2 = 100
y2 = 0
while gameOn == True:
screen.fill([255,255,255])
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if x1 == 500:
moveRight = False
elif x1 == 0:
moveRight = True
if y2 == 500:
moveDown = False
elif y2 == 0:
moveDown = True
if moveRight:
x1 = x1+1
else:
x1 = x1-1
if moveDown:
y2 = y2+1
else:
y2 = y2-1
pygame.draw.circle(screen, [0,0,0], [x1,y1], 10)
pygame.draw.rect(screen, [0,0,0], [x2,y2,30,30])
clock.tick(100)
pygame.display.flip()
pygame.quit()
| [((4, 0, 4, 13), 'pygame.init', 'pygame.init', ({}, {}), '()', False, 'import pygame\n'), ((5, 8, 5, 27), 'pygame.time.Clock', 'pygame.time.Clock', ({}, {}), '()', False, 'import pygame\n'), ((7, 9, 7, 44), 'pygame.display.set_mode', 'pygame.display.set_mode', ({(7, 33, 7, 43): '[500, 500]'}, {}), '([500, 500])', False, 'import pygame\n'), ((46, 0, 46, 13), 'pygame.quit', 'pygame.quit', ({}, {}), '()', False, 'import pygame\n'), ((16, 17, 16, 35), 'pygame.event.get', 'pygame.event.get', ({}, {}), '()', False, 'import pygame\n'), ((41, 4, 41, 52), 'pygame.draw.circle', 'pygame.draw.circle', ({(41, 23, 41, 29): 'screen', (41, 31, 41, 38): '[0, 0, 0]', (41, 40, 41, 47): '[x1, y1]', (41, 49, 41, 51): '(10)'}, {}), '(screen, [0, 0, 0], [x1, y1], 10)', False, 'import pygame\n'), ((42, 4, 42, 52), 'pygame.draw.rect', 'pygame.draw.rect', ({(42, 21, 42, 27): 'screen', (42, 29, 42, 36): '[0, 0, 0]', (42, 38, 42, 51): '[x2, y2, 30, 30]'}, {}), '(screen, [0, 0, 0], [x2, y2, 30, 30])', False, 'import pygame\n'), ((45, 4, 45, 25), 'pygame.display.flip', 'pygame.display.flip', ({}, {}), '()', False, 'import pygame\n'), ((18, 12, 18, 25), 'pygame.quit', 'pygame.quit', ({}, {}), '()', False, 'import pygame\n'), ((19, 12, 19, 22), 'sys.exit', 'sys.exit', ({}, {}), '()', False, 'import sys\n')] |
e93fem/PyTorchNLPBook | pytorch/xor/training_a_perceptron.py | c9ea9e0b3d1b8bba6a983b425c6c03dd79d3d6b0 | import numpy as np
import torch
import matplotlib.pyplot as plt
from torch import optim, nn
from pytorch.xor.multilayer_perceptron import MultilayerPerceptron
from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations
input_size = 2
output_size = len(set(LABELS))
num_hidden_layers = 0
hidden_size = 2 # isn't ever used but we still set it
seed = 24
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
mlp1 = MultilayerPerceptron(input_size=input_size,
hidden_size=hidden_size,
num_hidden_layers=num_hidden_layers,
output_size=output_size)
print(mlp1)
batch_size = 1000
x_data_static, y_truth_static = get_toy_data(batch_size)
fig, ax = plt.subplots(1, 1, figsize=(10,5))
visualize_results(mlp1, x_data_static, y_truth_static,
ax=ax, title='Initial Perceptron State', levels=[0.5])
plt.axis('off')
plt.savefig('images/perceptron_initial.png')
plt.show()
losses = []
batch_size = 10000
n_batches = 10
max_epochs = 10
loss_change = 1.0
last_loss = 10.0
change_threshold = 1e-3
epoch = 0
all_imagefiles = []
lr = 0.01
optimizer = optim.Adam(params=mlp1.parameters(), lr=lr)
cross_ent_loss = nn.CrossEntropyLoss()
def early_termination(loss_change, change_threshold, epoch, max_epochs):
terminate_for_loss_change = loss_change < change_threshold
terminate_for_epochs = epoch > max_epochs
# return terminate_for_loss_change or
return terminate_for_epochs
while not early_termination(loss_change, change_threshold, epoch, max_epochs):
for _ in range(n_batches):
# step 0: fetch the data
x_data, y_target = get_toy_data(batch_size)
# step 1: zero the gradients
mlp1.zero_grad()
# step 2: run the forward pass
y_pred = mlp1(x_data).squeeze()
# step 3: compute the loss
loss = cross_ent_loss(y_pred, y_target.long())
# step 4: compute the backward pass
loss.backward()
# step 5: have the optimizer take an optimization step
optimizer.step()
# auxillary: bookkeeping
loss_value = loss.item()
losses.append(loss_value)
loss_change = abs(last_loss - loss_value)
last_loss = loss_value
print("epoch: {}: loss_value: {}".format(epoch, loss_value))
fig, ax = plt.subplots(1, 1, figsize=(10, 5))
visualize_results(mlp1, x_data_static, y_truth_static, ax=ax, epoch=epoch,
title=f"{loss_value:0.2f}; {loss_change:0.4f}")
plt.axis('off')
epoch += 1
all_imagefiles.append(f'images/perceptron_epoch{epoch}_toylearning.png')
plt.savefig(all_imagefiles[-1])
_, ax = plt.subplots(1,1,figsize=(10,5))
visualize_results(mlp1, x_data_static, y_truth_static, epoch=None, levels=[0.5], ax=ax)
plt.axis('off');
plt.savefig('images/perceptron_final.png')
plot_intermediate_representations(mlp1,
"The Perceptron's Input and Intermediate Representation",
figsize=(9, 3))
plt.savefig("images/perceptron_intermediate.png")
plt.savefig("images/figure_4_5.pdf") | [((17, 0, 17, 23), 'torch.manual_seed', 'torch.manual_seed', ({(17, 18, 17, 22): 'seed'}, {}), '(seed)', False, 'import torch\n'), ((18, 0, 18, 32), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', ({(18, 27, 18, 31): 'seed'}, {}), '(seed)', False, 'import torch\n'), ((19, 0, 19, 20), 'numpy.random.seed', 'np.random.seed', ({(19, 15, 19, 19): 'seed'}, {}), '(seed)', True, 'import numpy as np\n'), ((21, 7, 24, 52), 'pytorch.xor.multilayer_perceptron.MultilayerPerceptron', 'MultilayerPerceptron', (), '', False, 'from pytorch.xor.multilayer_perceptron import MultilayerPerceptron\n'), ((28, 32, 28, 56), 'pytorch.xor.utils.get_toy_data', 'get_toy_data', ({(28, 45, 28, 55): 'batch_size'}, {}), '(batch_size)', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n'), ((29, 10, 29, 44), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((30, 0, 31, 72), 'pytorch.xor.utils.visualize_results', 'visualize_results', (), '', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n'), ((33, 0, 33, 15), 'matplotlib.pyplot.axis', 'plt.axis', ({(33, 9, 33, 14): '"""off"""'}, {}), "('off')", True, 'import matplotlib.pyplot as plt\n'), ((34, 0, 34, 44), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(34, 12, 34, 43): '"""images/perceptron_initial.png"""'}, {}), "('images/perceptron_initial.png')", True, 'import matplotlib.pyplot as plt\n'), ((35, 0, 35, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((50, 17, 50, 38), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ({}, {}), '()', False, 'from torch import optim, nn\n'), ((97, 8, 97, 40), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((98, 0, 98, 87), 'pytorch.xor.utils.visualize_results', 'visualize_results', (), '', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n'), ((99, 0, 99, 15), 'matplotlib.pyplot.axis', 'plt.axis', ({(99, 9, 99, 14): '"""off"""'}, {}), "('off')", True, 'import matplotlib.pyplot as plt\n'), ((100, 0, 100, 42), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(100, 12, 100, 41): '"""images/perceptron_final.png"""'}, {}), "('images/perceptron_final.png')", True, 'import matplotlib.pyplot as plt\n'), ((102, 0, 104, 49), 'pytorch.xor.utils.plot_intermediate_representations', 'plot_intermediate_representations', (), '', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n'), ((105, 0, 105, 49), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(105, 12, 105, 48): '"""images/perceptron_intermediate.png"""'}, {}), "('images/perceptron_intermediate.png')", True, 'import matplotlib.pyplot as plt\n'), ((106, 0, 106, 36), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(106, 12, 106, 35): '"""images/figure_4_5.pdf"""'}, {}), "('images/figure_4_5.pdf')", True, 'import matplotlib.pyplot as plt\n'), ((89, 14, 89, 49), 'matplotlib.pyplot.subplots', 'plt.subplots', (), '', True, 'import matplotlib.pyplot as plt\n'), ((90, 4, 91, 69), 'pytorch.xor.utils.visualize_results', 'visualize_results', (), '', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n'), ((92, 4, 92, 19), 'matplotlib.pyplot.axis', 'plt.axis', ({(92, 13, 92, 18): '"""off"""'}, {}), "('off')", True, 'import matplotlib.pyplot as plt\n'), ((95, 4, 95, 35), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(95, 16, 95, 34): 'all_imagefiles[-1]'}, {}), '(all_imagefiles[-1])', True, 'import matplotlib.pyplot as plt\n'), ((64, 27, 64, 51), 'pytorch.xor.utils.get_toy_data', 'get_toy_data', ({(64, 40, 64, 50): 'batch_size'}, {}), '(batch_size)', False, 'from pytorch.xor.utils import LABELS, get_toy_data, visualize_results, plot_intermediate_representations\n')] |
raccoongang/socraticqs2 | mysite/api/v0/tests.py | 06201005136ee139846f857dbb2f518736e441de | import json
import mock
from django.core.urlresolvers import reverse
from pymongo.errors import ServerSelectionTimeoutError
from analytics.models import CourseReport
from core.common.mongo import c_onboarding_status, _conn
from core.common import onboarding
from ct.models import UnitLesson, StudentError
from ctms.tests import MyTestCase
HEALTH_URL = reverse('api:v0:health-check')
def test_health_positive(client, db):
result = client.get(HEALTH_URL)
assert result.status_code == 200
assert 'ok' in json.loads(result.content)
def test_health_non_ok(client, db, mocker):
"""
Ping and Stats Mongo command return non ok results.
"""
do_health = mocker.patch('api.v0.views.do_health')
do_health.return_value = {}, {}
result = client.get(HEALTH_URL)
assert result.status_code == 503
def test_health_exception(client, db, mocker):
"""
Mongo query raises exception.
"""
do_health = mocker.patch('api.v0.views.do_health')
do_health.side_effect = ServerSelectionTimeoutError()
result = client.get(HEALTH_URL)
assert result.status_code == 503
class TestOnboardingStatus(MyTestCase):
namespace = 'api:v0:onboarding-status'
def setUp(self):
super(TestOnboardingStatus, self).setUp()
# # Hack: remove all test_ databases before test
# for db in _conn.connector.list_databases():
# if 'test_' in db.get('name') and:
# _conn.connector.drop_database(db.get('name'))
self.data = {
onboarding.USER_ID: self.user.id,
onboarding.STEP_1: False,
onboarding.STEP_2: False,
onboarding.STEP_3: False,
onboarding.STEP_4: False,
}
def test_put_valid_data(self):
data_to_update = {onboarding.STEP_2: True}
c_onboarding_status().remove()
c_onboarding_status().insert(self.data.copy())
ensure_saved = c_onboarding_status().find_one({onboarding.USER_ID: self.user.id}, {'_id': False})
self.assertEqual(ensure_saved, self.data)
self.assertEqual(self.client.login(username=self.username, password=self.password), True)
response = self.client.put(
reverse('api:v0:onboarding-status'),
data=json.dumps(data_to_update),
content_type="application/json"
)
data = self.data.copy()
self.assertEqual(response.status_code, 200)
data.update(data_to_update)
mongo_data = c_onboarding_status().find_one({onboarding.USER_ID: self.user.id}, {'_id': False})
self.assertEqual(mongo_data, data)
def test_put_invalid_keys(self):
data_to_update = {'invalid_key': True}
c_onboarding_status().remove()
c_onboarding_status().insert(self.data.copy())
ensure_saved = c_onboarding_status().find_one({onboarding.USER_ID: self.user.id}, {'_id': False})
self.assertEqual(ensure_saved, self.data)
response = self.client.put(
reverse('api:v0:onboarding-status'),
data=json.dumps(data_to_update),
content_type="application/json"
)
self.assertEqual(response.status_code, 400)
def test_wo_user_403(self):
c_onboarding_status().remove()
self.client.logout()
response = self.client.get(reverse(self.namespace))
self.assertEqual(response.status_code, 403)
def test_get_with_user_200(self):
c_onboarding_status().remove()
c_onboarding_status().insert(self.data.copy())
response = self.client.get(reverse(self.namespace))
expected_data = {
"done": True,
}
response_data = json.loads(response.content)['data']
for key in response_data.keys():
self.assertSetEqual(set(expected_data), set(response_data[key]))
class ApiAccessMixinTest(object):
def test_permissions_instructor_allowed(self):
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
self.assertEqual(response.status_code, 200)
def test_permissions_not_instructor_disallowed(self):
self.client.login(username=self.username2, password=self.password2)
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
self.assertEqual(response.status_code, 403)
def test_permissions_user_not_authenticated(self):
self.client.logout()
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
self.assertEqual(response.status_code, 403)
def test_course_doesnt_exist(self):
response = self.client.get(reverse(self.namespace, kwargs={'course_id': 100}))
self.assertEqual(response.status_code, 404)
class TestResponseViewSet(ApiAccessMixinTest, MyTestCase):
namespace = 'api:v0:responses'
def test_serializer_author_name(self):
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
self.assertEqual(
json.loads(response.content)[0].get('author_name'),
self.user.get_full_name() or self.user.username
)
class TestErrorViewSet(ApiAccessMixinTest, MyTestCase):
namespace = 'api:v0:errors'
def setUp(self):
super(TestErrorViewSet, self).setUp()
self.unit_lesson_error = UnitLesson(
unit=self.unit, order=0,
lesson=self.lesson, addedBy=self.user,
treeID=self.lesson.id
)
self.unit_lesson_error.save()
self.student_error = StudentError(
response=self.resp1,
errorModel=self.unit_lesson_error,
author=self.user
)
self.student_error.save()
def test_serializer_em_data(self):
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
fields_set = set([
'id', 'lesson_concept_id', 'lesson_concept_isAbort', 'lesson_concept_isFail', 'lesson_text', 'treeID'
])
em_data_set = set(json.loads(response.content)[0]['em_data'])
self.assertSetEqual(fields_set, em_data_set)
class TestGenReportView(MyTestCase):
namespace = 'api:v0:gen-report'
def test_missed_course_id(self):
response = self.client.get(reverse(self.namespace))
self.assertEqual(response.status_code, 400)
def test_course_doesnt_exist(self):
response = self.client.get(reverse(self.namespace), data={'course_id': 100})
self.assertEqual(response.status_code, 404)
def test_not_allowed(self):
self.client.login(username=self.username2, password=self.password2)
response = self.client.get(reverse(self.namespace), data={'course_id': self.course.id})
self.assertEqual(response.status_code, 403)
@mock.patch('api.v0.views.report.delay')
def test_report_generated(self, report):
response = self.client.get(reverse(self.namespace), data={'course_id': self.course.id})
self.assertEqual(response.status_code, 200)
report.assert_called_with(str(self.course.id), self.user.id)
class TestCourseReportViewSet(ApiAccessMixinTest, MyTestCase):
namespace = 'api:v0:reports'
def test_serializer_data(self):
report = CourseReport(
course=self.course
)
report.save()
response = self.client.get(reverse(self.namespace, kwargs={'course_id': self.course.id}))
fields_set = {'date', 'response_report'}
data_set = set(json.loads(response.content)[0])
self.assertSetEqual(fields_set, data_set)
class TestEchoDataView(MyTestCase):
namespace = 'api:v0:echo-data'
def test_echo_405(self):
get_response = self.client.get(reverse(self.namespace))
self.assertEqual(get_response.status_code, 405)
def test_echo_200(self):
post_response = self.client.post(reverse(self.namespace))
self.assertEqual(post_response.status_code, 200)
self.client.logout()
post_response = self.client.post(reverse(self.namespace))
self.assertEqual(post_response.status_code, 200)
| [((16, 13, 16, 43), 'django.core.urlresolvers.reverse', 'reverse', ({(16, 21, 16, 42): '"""api:v0:health-check"""'}, {}), "('api:v0:health-check')", False, 'from django.core.urlresolvers import reverse\n'), ((43, 28, 43, 57), 'pymongo.errors.ServerSelectionTimeoutError', 'ServerSelectionTimeoutError', ({}, {}), '()', False, 'from pymongo.errors import ServerSelectionTimeoutError\n'), ((213, 5, 213, 44), 'mock.patch', 'mock.patch', ({(213, 16, 213, 43): '"""api.v0.views.report.delay"""'}, {}), "('api.v0.views.report.delay')", False, 'import mock\n'), ((23, 19, 23, 45), 'json.loads', 'json.loads', ({(23, 30, 23, 44): 'result.content'}, {}), '(result.content)', False, 'import json\n'), ((174, 33, 178, 9), 'ct.models.UnitLesson', 'UnitLesson', (), '', False, 'from ct.models import UnitLesson, StudentError\n'), ((181, 29, 185, 9), 'ct.models.StudentError', 'StudentError', (), '', False, 'from ct.models import UnitLesson, StudentError\n'), ((225, 17, 227, 9), 'analytics.models.CourseReport', 'CourseReport', (), '', False, 'from analytics.models import CourseReport\n'), ((84, 12, 84, 47), 'django.core.urlresolvers.reverse', 'reverse', ({(84, 20, 84, 46): '"""api:v0:onboarding-status"""'}, {}), "('api:v0:onboarding-status')", False, 'from django.core.urlresolvers import reverse\n'), ((108, 12, 108, 47), 'django.core.urlresolvers.reverse', 'reverse', ({(108, 20, 108, 46): '"""api:v0:onboarding-status"""'}, {}), "('api:v0:onboarding-status')", False, 'from django.core.urlresolvers import reverse\n'), ((117, 35, 117, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(117, 43, 117, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((126, 35, 126, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(126, 43, 126, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((130, 24, 130, 52), 'json.loads', 'json.loads', ({(130, 35, 130, 51): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((138, 35, 138, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((143, 35, 143, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((148, 35, 148, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((152, 35, 152, 85), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((161, 35, 161, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((189, 35, 189, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((201, 35, 201, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(201, 43, 201, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((205, 35, 205, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(205, 43, 205, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((210, 35, 210, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(210, 43, 210, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((215, 35, 215, 58), 'django.core.urlresolvers.reverse', 'reverse', ({(215, 43, 215, 57): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((229, 35, 229, 96), 'django.core.urlresolvers.reverse', 'reverse', (), '', False, 'from django.core.urlresolvers import reverse\n'), ((240, 39, 240, 62), 'django.core.urlresolvers.reverse', 'reverse', ({(240, 47, 240, 61): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((245, 41, 245, 64), 'django.core.urlresolvers.reverse', 'reverse', ({(245, 49, 245, 63): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((250, 41, 250, 64), 'django.core.urlresolvers.reverse', 'reverse', ({(250, 49, 250, 63): 'self.namespace'}, {}), '(self.namespace)', False, 'from django.core.urlresolvers import reverse\n'), ((74, 8, 74, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((76, 8, 76, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((78, 23, 78, 44), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((85, 17, 85, 43), 'json.dumps', 'json.dumps', ({(85, 28, 85, 42): 'data_to_update'}, {}), '(data_to_update)', False, 'import json\n'), ((91, 21, 91, 42), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((99, 8, 99, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((101, 8, 101, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((103, 23, 103, 44), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((109, 17, 109, 43), 'json.dumps', 'json.dumps', ({(109, 28, 109, 42): 'data_to_update'}, {}), '(data_to_update)', False, 'import json\n'), ((115, 8, 115, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((122, 8, 122, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((124, 8, 124, 29), 'core.common.mongo.c_onboarding_status', 'c_onboarding_status', ({}, {}), '()', False, 'from core.common.mongo import c_onboarding_status, _conn\n'), ((231, 23, 231, 51), 'json.loads', 'json.loads', ({(231, 34, 231, 50): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((193, 26, 193, 54), 'json.loads', 'json.loads', ({(193, 37, 193, 53): 'response.content'}, {}), '(response.content)', False, 'import json\n'), ((163, 12, 163, 40), 'json.loads', 'json.loads', ({(163, 23, 163, 39): 'response.content'}, {}), '(response.content)', False, 'import json\n')] |
anthonymark33/Global-signbank | signbank/settings/base.py | ae61984a24f1cc0801d4621c81b882154ce99098 | # Django settings for signbank project.
import os
from signbank.settings.server_specific import *
from datetime import datetime
DEBUG = True
PROJECT_DIR = os.path.dirname(BASE_DIR)
MANAGERS = ADMINS
TIME_ZONE = 'Europe/Amsterdam'
LOCALE_PATHS = [BASE_DIR+'conf/locale']
# in the database, SITE_ID 1 is example.com
SITE_ID = 2
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = WRITABLE_FOLDER
MEDIA_URL = PREFIX_URL+'/media/'
MEDIA_MOBILE_URL = MEDIA_URL
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = PREFIX_URL
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = PREFIX_URL+'/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_DIR, "media"),
)
# STATICFILES_STORAGE = ( os.path.join(PROJECT_DIR, "static"), )
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^g=q21r_nnmbz49d!vs*2gvpll-y9b@&t3k2r3c$*u&2la5!%s'
MIDDLEWARE_CLASSES = (
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'signbank.pages.middleware.PageFallbackMiddleware',
# 'django_mobile.middleware.MobileDetectionMiddleware',
# 'django_mobile.middleware.SetFlavourMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
'reversion.middleware.RevisionMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_DIR, 'templates/' + SIGNBANK_VERSION_CODE + '-templates'),
os.path.join(PROJECT_DIR, 'signbank/registration/templates/')],
'OPTIONS': {
'context_processors': [
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"signbank.context_processors.url",
"signbank.pages.context_processors.menu",
# "django_mobile.context_processors.flavour",
],
'loaders': [
# 'django_mobile.loader.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
},
},
]
# add the Email backend to allow logins using email as username
AUTHENTICATION_BACKENDS = (
"signbank.registration.EmailBackend",
"django.contrib.auth.backends.ModelBackend",
'guardian.backends.ObjectPermissionBackend',
)
AUTH_PROFILE_MODULE = 'dictionary.UserProfile'
INTERNAL_IPS = ('127.0.0.1','131.174.132.138')
ROOT_URLCONF = 'signbank.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'signbank.wsgi.application'
INSTALLED_APPS = (
'modeltranslation',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.staticfiles',
'bootstrap3',
'django_summernote',
# 'django_select2',
# 'easy_select2',
'signbank.dictionary',
'signbank.feedback',
#'signbank.registration',
'signbank.pages',
'signbank.attachments',
'signbank.video',
'reversion',
#'django_mobile',
'tagging',
'guardian',
#'debug_toolbar'
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# turn on lots of logging or not
DO_LOGGING = False
LOG_FILENAME = "debug.log"
SOUTH_TESTS_MIGRATE = False
## Application settings for signbank
## Settings controlling page contents
# do we implement safe search for anonymous users?
# if True, any gloss that is tagged lexis:crude will be removed from
# search results for users who are not logged in
ANON_SAFE_SEARCH = False
# do we show the tag based search for anonymous users?
ANON_TAG_SEARCH = False
# do we display the previous/next links to signs, requires gloss.sn to be used consistently
SIGN_NAVIGATION = False
# which definition fields do we show and in what order?
DEFINITION_FIELDS = ['general', 'noun', 'verb', 'interact', 'deictic', 'modifier', 'question', 'augment', 'note']
HANDSHAPE_RESULT_FIELDS = ['machine_value', 'english_name', 'dutch_name', 'chinese_name',
'hsFingSel', 'hsFingConf', 'hsFingSel2', 'hsFingConf2', 'hsFingUnsel', 'hsSpread', 'hsAperture']
# location and URL for uploaded files
UPLOAD_ROOT = MEDIA_ROOT + "upload/"
UPLOAD_URL = MEDIA_URL + "upload/"
# Location for comment videos relative to MEDIA_ROOT
COMMENT_VIDEO_LOCATION = "comments"
# Location for videos associated with pages
PAGES_VIDEO_LOCATION = 'pages'
# location for upload of videos relative to MEDIA_ROOT
# videos are stored here prior to copying over to the main
# storage location
VIDEO_UPLOAD_LOCATION = "upload"
# path to store uploaded attachments relative to MEDIA_ROOT
ATTACHMENT_LOCATION = 'attachments'
# which fields from the Gloss model should be included in the quick update form on the sign view
QUICK_UPDATE_GLOSS_FIELDS = ['signlanguage', 'dialect']
# should we always require a login for viewing dictionary content
ALWAYS_REQUIRE_LOGIN = True
# do we allow people to register for the site
ALLOW_REGISTRATION = True
ACCOUNT_ACTIVATION_DAYS = 7
# show the number signs page or an under construction page?
SHOW_NUMBERSIGNS = True
LOGIN_URL = PREFIX_URL+'/accounts/login/'
LOGIN_REDIRECT_URL = PREFIX_URL+'/signs/recently_added/'
# location of ffmpeg, used to convert uploaded videos
# FFMPEG_PROGRAM = "/Applications/ffmpegX.app/Contents/Resources/ffmpeg"
FFMPEG_TIMEOUT = 60
FFMPEG_OPTIONS = ["-vcodec", "h264", "-an"]
# defines the aspect ratio for videos
VIDEO_ASPECT_RATIO = 3.0/4.0
# settings for django-tagging
FORCE_LOWERCASE_TAGS = False
PRIMARY_CSS = "css/"+SIGNBANK_VERSION_CODE+"/main.css"
import mimetypes
mimetypes.add_type("video/mp4", ".mov", True)
# a list of tags we're allowed to use
XALLOWED_TAGS = [ '',
'workflow:needs video',
'workflow:redo video',
'workflow:problematic',
'corpus:attested',
'lexis:doubtlex',
'phonology:alternating',
'phonology:dominant hand only',
'phonology:double handed',
'phonology:forearm rotation',
'phonology:handshape change',
'phonology:onehand',
'phonology:parallel',
'phonology:symmetrical',
'phonology:two handed',
]
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
EARLIEST_GLOSS_CREATION_DATE = datetime(2015,1,1)
SUPPORTED_CITATION_IMAGE_EXTENSIONS = ['.jpg','.jpeg','.png']
MAXIMUM_UPLOAD_SIZE = 5000000
MINIMUM_OVERLAP_BETWEEN_SIGNING_HANDS_IN_CNGT = 40
DISABLE_MOVING_THUMBNAILS_ABOVE_NR_OF_GLOSSES = 200
DATA_UPLOAD_MAX_NUMBER_FIELDS = None
DATA_UPLOAD_MAX_MEMORY_SIZE = None | [((8, 14, 8, 39), 'os.path.dirname', 'os.path.dirname', ({(8, 30, 8, 38): 'BASE_DIR'}, {}), '(BASE_DIR)', False, 'import os\n'), ((256, 0, 256, 45), 'mimetypes.add_type', 'mimetypes.add_type', ({(256, 19, 256, 30): '"""video/mp4"""', (256, 32, 256, 38): '""".mov"""', (256, 40, 256, 44): '(True)'}, {}), "('video/mp4', '.mov', True)", False, 'import mimetypes\n'), ((280, 31, 280, 49), 'datetime.datetime', 'datetime', ({(280, 40, 280, 44): '2015', (280, 45, 280, 46): '1', (280, 47, 280, 48): '1'}, {}), '(2015, 1, 1)', False, 'from datetime import datetime\n'), ((39, 4, 39, 38), 'os.path.join', 'os.path.join', ({(39, 17, 39, 28): 'PROJECT_DIR', (39, 30, 39, 37): '"""media"""'}, {}), "(PROJECT_DIR, 'media')", False, 'import os\n'), ((75, 17, 75, 95), 'os.path.join', 'os.path.join', ({(75, 30, 75, 41): 'PROJECT_DIR', (75, 43, 75, 94): "('templates/' + SIGNBANK_VERSION_CODE + '-templates')"}, {}), "(PROJECT_DIR, 'templates/' + SIGNBANK_VERSION_CODE + '-templates')", False, 'import os\n'), ((76, 17, 76, 78), 'os.path.join', 'os.path.join', ({(76, 30, 76, 41): 'PROJECT_DIR', (76, 43, 76, 77): '"""signbank/registration/templates/"""'}, {}), "(PROJECT_DIR, 'signbank/registration/templates/')", False, 'import os\n')] |
crombus/ocs-ci | ocs_ci/ocs/cluster.py | 20340365882bdd06ddb6cd65bbd7df0ba7e2c2d8 | """
A module for all rook functionalities and abstractions.
This module has rook related classes, support for functionalities to work with
rook cluster. This works with assumptions that an OCP cluster is already
functional and proper configurations are made for interaction.
"""
import base64
import logging
import random
import re
import threading
import yaml
import time
import ocs_ci.ocs.resources.pod as pod
from ocs_ci.ocs.exceptions import UnexpectedBehaviour
from ocs_ci.ocs.resources import ocs, storage_cluster
import ocs_ci.ocs.constants as constant
from ocs_ci.ocs.resources.mcg import MCG
from ocs_ci.utility.retry import retry
from ocs_ci.utility.utils import (
TimeoutSampler,
run_cmd,
convert_device_size,
get_trim_mean,
)
from ocs_ci.ocs.utils import get_pod_name_by_pattern
from ocs_ci.framework import config
from ocs_ci.ocs import ocp, constants, exceptions
from ocs_ci.ocs.exceptions import PoolNotFound
from ocs_ci.ocs.resources.pvc import get_all_pvc_objs
logger = logging.getLogger(__name__)
class CephCluster(object):
"""
Handles all cluster related operations from ceph perspective
This class has depiction of ceph cluster. Contains references to
pod objects which represents ceph cluster entities.
Attributes:
pods (list) : A list of ceph cluster related pods
cluster_name (str): Name of ceph cluster
namespace (str): openshift Namespace where this cluster lives
"""
def __init__(self):
"""
Cluster object initializer, this object needs to be initialized
after cluster deployment. However its harmless to do anywhere.
"""
# cluster_name is name of cluster in rook of type CephCluster
self.POD = ocp.OCP(kind="Pod", namespace=config.ENV_DATA["cluster_namespace"])
self.CEPHCLUSTER = ocp.OCP(
kind="CephCluster", namespace=config.ENV_DATA["cluster_namespace"]
)
self.CEPHFS = ocp.OCP(
kind="CephFilesystem", namespace=config.ENV_DATA["cluster_namespace"]
)
self.DEP = ocp.OCP(
kind="Deployment", namespace=config.ENV_DATA["cluster_namespace"]
)
self.cluster_resource_config = self.CEPHCLUSTER.get().get("items")[0]
try:
self.cephfs_config = self.CEPHFS.get().get("items")[0]
except IndexError as e:
logging.warning(e)
logging.warning("No CephFS found")
self.cephfs_config = None
self._cluster_name = self.cluster_resource_config.get("metadata").get("name")
self._namespace = self.cluster_resource_config.get("metadata").get("namespace")
# We are not invoking ocs.create() here
# assuming cluster creation is done somewhere after deployment
# So just load ocs with existing cluster details
self.cluster = ocs.OCS(**self.cluster_resource_config)
if self.cephfs_config:
self.cephfs = ocs.OCS(**self.cephfs_config)
else:
self.cephfs = None
self.mon_selector = constant.MON_APP_LABEL
self.mds_selector = constant.MDS_APP_LABEL
self.tool_selector = constant.TOOL_APP_LABEL
self.mgr_selector = constant.MGR_APP_LABEL
self.osd_selector = constant.OSD_APP_LABEL
self.noobaa_selector = constant.NOOBAA_APP_LABEL
self.noobaa_core_selector = constant.NOOBAA_CORE_POD_LABEL
self.mons = []
self._ceph_pods = []
self.mdss = []
self.mgrs = []
self.osds = []
self.noobaas = []
self.rgws = []
self.toolbox = None
self.mds_count = 0
self.mon_count = 0
self.mgr_count = 0
self.osd_count = 0
self.noobaa_count = 0
self.rgw_count = 0
self._mcg_obj = None
self.scan_cluster()
logging.info(f"Number of mons = {self.mon_count}")
logging.info(f"Number of mds = {self.mds_count}")
self.used_space = 0
@property
def mcg_obj(self):
if not self._mcg_obj:
self._mcg_obj = MCG()
return self._mcg_obj
@property
def cluster_name(self):
return self._cluster_name
@property
def namespace(self):
return self._namespace
@property
def pods(self):
return self._ceph_pods
def scan_cluster(self):
"""
Get accurate info on current state of pods
"""
self._ceph_pods = pod.get_all_pods(self._namespace)
# TODO: Workaround for BZ1748325:
mons = pod.get_mon_pods(self.mon_selector, self.namespace)
for mon in mons:
if mon.ocp.get_resource_status(mon.name) == constant.STATUS_RUNNING:
self.mons.append(mon)
# TODO: End of workaround for BZ1748325
self.mdss = pod.get_mds_pods(self.mds_selector, self.namespace)
self.mgrs = pod.get_mgr_pods(self.mgr_selector, self.namespace)
self.osds = pod.get_osd_pods(self.osd_selector, self.namespace)
self.noobaas = pod.get_noobaa_pods(self.noobaa_selector, self.namespace)
self.rgws = pod.get_rgw_pods()
self.toolbox = pod.get_ceph_tools_pod()
# set port attrib on mon pods
self.mons = list(map(self.set_port, self.mons))
self.cluster.reload()
if self.cephfs:
self.cephfs.reload()
else:
try:
self.cephfs_config = self.CEPHFS.get().get("items")[0]
self.cephfs = ocs.OCS(**self.cephfs_config)
self.cephfs.reload()
except IndexError as e:
logging.warning(e)
logging.warning("No CephFS found")
self.mon_count = len(self.mons)
self.mds_count = len(self.mdss)
self.mgr_count = len(self.mgrs)
self.osd_count = len(self.osds)
self.noobaa_count = len(self.noobaas)
self.rgw_count = len(self.rgws)
@staticmethod
def set_port(pod):
"""
Set port attribute on pod.
port attribute for mon is required for secrets and this attrib
is not a member for original pod class.
Args:
pod(Pod): Pod object without 'port' attribute
Returns:
pod(Pod): A modified pod object with 'port' attribute set
"""
container = pod.pod_data.get("spec").get("containers")
port = container[0]["ports"][0]["containerPort"]
# Dynamically added attribute 'port'
pod.port = port
logging.info(f"port={pod.port}")
return pod
def is_health_ok(self):
"""
Returns:
bool: True if "HEALTH_OK" else False
"""
self.cluster.reload()
return self.cluster.data["status"]["ceph"]["health"] == "HEALTH_OK"
def cluster_health_check(self, timeout=None):
"""
Check overall cluster health.
Relying on health reported by CephCluster.get()
Args:
timeout (int): in seconds. By default timeout value will be scaled
based on number of ceph pods in the cluster. This is just a
crude number. Its been observed that as the number of pods
increases it takes more time for cluster's HEALTH_OK.
Returns:
bool: True if "HEALTH_OK" else False
Raises:
CephHealthException: if cluster is not healthy
"""
# Scale timeout only if user hasn't passed any value
timeout = timeout or (10 * len(self.pods))
sample = TimeoutSampler(timeout=timeout, sleep=3, func=self.is_health_ok)
if not sample.wait_for_func_status(result=True):
raise exceptions.CephHealthException("Cluster health is NOT OK")
# This way of checking health of different cluster entities and
# raising only CephHealthException is not elegant.
# TODO: add an attribute in CephHealthException, called "reason"
# which should tell because of which exact cluster entity health
# is not ok ?
expected_mon_count = self.mon_count
expected_mds_count = self.mds_count
self.scan_cluster()
try:
self.mon_health_check(expected_mon_count)
except exceptions.MonCountException as e:
logger.error(e)
raise exceptions.CephHealthException("Cluster health is NOT OK")
try:
if not expected_mds_count:
pass
else:
self.mds_health_check(expected_mds_count)
except exceptions.MDSCountException as e:
logger.error(e)
raise exceptions.CephHealthException("Cluster health is NOT OK")
# TODO: OSD and MGR health check
logger.info("Cluster HEALTH_OK")
# This scan is for reconcilation on *.count
# because during first scan in this function some of the
# pods may not be up and would have set count to lesser number
self.scan_cluster()
# Check Noobaa health
self.wait_for_noobaa_health_ok()
def noobaa_health_check(self):
"""
Check Noobaa health
"""
if not self.mcg_obj.status:
raise exceptions.NoobaaHealthException("Cluster health is NOT OK")
def wait_for_noobaa_health_ok(self, tries=60, delay=5):
"""
Wait for Noobaa health to be OK
"""
return retry(
exceptions.NoobaaHealthException, tries=tries, delay=delay, backoff=1
)(self.noobaa_health_check)()
def mon_change_count(self, new_count):
"""
Change mon count in the cluster
Args:
new_count(int): Absolute number of mons required
"""
self.cluster.reload()
self.cluster.data["spec"]["mon"]["count"] = new_count
logger.info(self.cluster.data)
self.cluster.apply(**self.cluster.data)
self.mon_count = new_count
self.cluster_health_check()
logger.info(f"Mon count changed to {new_count}")
self.cluster.reload()
def mon_health_check(self, count):
"""
Mon health check based on pod count
Args:
count (int): Expected number of mon pods
Raises:
MonCountException: if mon pod count doesn't match
"""
timeout = 10 * len(self.pods)
logger.info(f"Expected MONs = {count}")
try:
assert self.POD.wait_for_resource(
condition="Running",
selector=self.mon_selector,
resource_count=count,
timeout=timeout,
sleep=3,
)
# TODO: Workaround for BZ1748325:
actual_mons = pod.get_mon_pods()
actual_running_mons = list()
for mon in actual_mons:
if mon.ocp.get_resource_status(mon.name) == constant.STATUS_RUNNING:
actual_running_mons.append(mon)
actual = len(actual_running_mons)
# TODO: End of workaround for BZ1748325
assert count == actual, f"Expected {count}, Got {actual}"
except exceptions.TimeoutExpiredError as e:
logger.error(e)
raise exceptions.MonCountException(
f"Failed to achieve desired Mon count" f" {count}"
)
def mds_change_count(self, new_count):
"""
Change mds count in the cluster
Args:
new_count(int): Absolute number of active mdss required
"""
self.cephfs.data["spec"]["metadataServer"]["activeCount"] = new_count
self.cephfs.apply(**self.cephfs.data)
logger.info(f"MDS active count changed to {new_count}")
if self.cephfs.data["spec"]["metadataServer"]["activeStandby"]:
expected = new_count * 2
else:
expected = new_count
self.mds_count = expected
self.cluster_health_check()
self.cephfs.reload()
def mds_health_check(self, count):
"""
MDS health check based on pod count
Args:
count (int): number of pods expected
Raises:
MDACountException: if pod count doesn't match
"""
timeout = 10 * len(self.pods)
try:
assert self.POD.wait_for_resource(
condition="Running",
selector=self.mds_selector,
resource_count=count,
timeout=timeout,
sleep=3,
)
except AssertionError as e:
logger.error(e)
raise exceptions.MDSCountException(
f"Failed to achieve desired MDS count" f" {count}"
)
def get_admin_key(self):
"""
Returns:
adminkey (str): base64 encoded key
"""
return self.get_user_key("client.admin")
def set_noout(self):
"""
Set noout flag for maintainance
"""
self.toolbox.exec_cmd_on_pod("ceph osd set noout")
def unset_noout(self):
"""
unset noout flag for peering
"""
self.toolbox.exec_cmd_on_pod("ceph osd unset noout")
def get_user_key(self, user):
"""
Args:
user (str): ceph username ex: client.user1
Returns:
key (str): base64 encoded user key
"""
out = self.toolbox.exec_cmd_on_pod(f"ceph auth get-key {user} --format json")
if "ENOENT" in out:
return False
key_base64 = base64.b64encode(out["key"].encode()).decode()
return key_base64
def create_user(self, username, caps):
"""
Create a ceph user in the cluster
Args:
username (str): ex client.user1
caps (str): ceph caps ex: mon 'allow r' osd 'allow rw'
Return:
return value of get_user_key()
"""
cmd = f"ceph auth add {username} {caps}"
# As of now ceph auth command gives output to stderr
# To be handled
out = self.toolbox.exec_cmd_on_pod(cmd)
logging.info(type(out))
return self.get_user_key(username)
def get_mons_from_cluster(self):
"""
Getting the list of mons from the cluster
Returns:
available_mon (list): Returns the mons from the cluster
"""
ret = self.DEP.get(
resource_name="", out_yaml_format=False, selector="app=rook-ceph-mon"
)
available_mon = re.findall(r"[\w-]+mon-+[\w-]", ret)
return available_mon
def remove_mon_from_cluster(self):
"""
Removing the mon pod from deployment
Returns:
remove_mon(bool): True if removal of mon is successful, False otherwise
"""
mons = self.get_mons_from_cluster()
after_delete_mon_count = len(mons) - 1
random_mon = random.choice(mons)
remove_mon = self.DEP.delete(resource_name=random_mon)
assert self.POD.wait_for_resource(
condition=constant.STATUS_RUNNING,
resource_count=after_delete_mon_count,
selector="app=rook-ceph-mon",
)
logging.info(f"Removed the mon {random_mon} from the cluster")
return remove_mon
@retry(UnexpectedBehaviour, tries=20, delay=10, backoff=1)
def check_ceph_pool_used_space(self, cbp_name):
"""
Check for the used space of a pool in cluster
Returns:
used_in_gb (float): Amount of used space in pool (in GBs)
Raises:
UnexpectedBehaviour: If used size keeps varying in Ceph status
"""
ct_pod = pod.get_ceph_tools_pod()
rados_status = ct_pod.exec_ceph_cmd(ceph_cmd=f"rados df -p {cbp_name}")
assert rados_status is not None
used = rados_status["pools"][0]["size_bytes"]
used_in_gb = format(used / constants.GB, ".4f")
if self.used_space and self.used_space == used_in_gb:
return float(self.used_space)
self.used_space = used_in_gb
raise UnexpectedBehaviour("In Rados df, Used size is varying")
def get_ceph_health(self, detail=False):
"""
Exec `ceph health` cmd on tools pod and return the status of the ceph
cluster.
Args:
detail (bool): If True the 'ceph health detail' is executed
Returns:
str: Output of the ceph health command.
"""
ceph_health_cmd = "ceph health"
if detail:
ceph_health_cmd = f"{ceph_health_cmd} detail"
return self.toolbox.exec_cmd_on_pod(
ceph_health_cmd,
out_yaml_format=False,
)
def get_ceph_status(self, format=None):
"""
Exec `ceph status` cmd on tools pod and return its output.
Args:
format (str) : Format of the output (e.g. json-pretty, json, plain)
Returns:
str: Output of the ceph status command.
"""
cmd = "ceph status"
if format:
cmd += f" -f {format}"
return self.toolbox.exec_cmd_on_pod(cmd, out_yaml_format=False)
def get_ceph_capacity(self):
"""
The function gets the total mount of storage capacity of the ocs cluster.
the calculation is <Num of OSD> * <OSD size> / <replica number>
it will not take into account the current used capacity.
Returns:
int : Total storage capacity in GiB (GiB is for development environment)
"""
storage_cluster_obj = storage_cluster.StorageCluster(
resource_name=config.ENV_DATA["storage_cluster_name"],
namespace=config.ENV_DATA["cluster_namespace"],
)
replica = int(
storage_cluster_obj.data["spec"]["storageDeviceSets"][0]["replica"]
)
ceph_pod = pod.get_ceph_tools_pod()
ceph_status = ceph_pod.exec_ceph_cmd(ceph_cmd="ceph df")
usable_capacity = (
int(ceph_status["stats"]["total_bytes"]) / replica / constant.GB
)
return usable_capacity
def get_ceph_cluster_iops(self):
"""
The function gets the IOPS from the ocs cluster
Returns:
Total IOPS in the cluster
"""
ceph_pod = pod.get_ceph_tools_pod()
ceph_status = ceph_pod.exec_ceph_cmd(ceph_cmd="ceph status")
read_ops = ceph_status["pgmap"]["read_op_per_sec"]
write_ops = ceph_status["pgmap"]["write_op_per_sec"]
cluster_iops = read_ops + write_ops
return cluster_iops
def get_iops_percentage(self, osd_size=2):
"""
The function calculates the IOPS percentage
of the cluster depending on number of osds in the cluster
Args:
osd_size (int): Size of 1 OSD in Ti
Returns:
IOPS percentage of the OCS cluster
"""
osd_count = count_cluster_osd()
iops_per_osd = osd_size * constants.IOPS_FOR_1TiB_OSD
iops_in_cluster = self.get_ceph_cluster_iops()
osd_iops_limit = iops_per_osd * osd_count
iops_percentage = (iops_in_cluster / osd_iops_limit) * 100
logging.info(f"The IOPS percentage of the cluster is {iops_percentage}%")
return iops_percentage
def get_cluster_throughput(self):
"""
Function to get the throughput of ocs cluster
Returns:
float: The write throughput of the cluster in MiB/s
"""
ceph_status = self.get_ceph_status()
for item in ceph_status.split("\n"):
if "client" in item:
throughput_data = item.strip("client: ").split(",")
throughput_data = throughput_data[:2:1]
# Converting all B/s and KiB/s to MiB/s
throughput = 0
for val in throughput_data:
throughput += [
float(re.findall(r"\d+", val)[0]) * constants.TP_CONVERSION[key]
for key in constants.TP_CONVERSION.keys()
if key in val
][0]
logger.info(
f"The {val[-2:].upper()} throughput is {throughput} MiB/s"
)
return throughput
def get_throughput_percentage(self):
"""
Function to get throughput percentage of the ocs cluster
Returns:
Throughput percentage of the cluster
"""
throughput_of_cluster = self.get_cluster_throughput()
throughput_percentage = (
throughput_of_cluster / constants.THROUGHPUT_LIMIT_OSD
) * 100
logging.info(
f"The throughput percentage of the cluster is {throughput_percentage}%"
)
return throughput_percentage
def calc_trim_mean_throughput(self, samples=8):
"""
Calculate the cluster average throughput out of a few samples
Args:
samples (int): The number of samples to take
Returns:
float: The average cluster throughput
"""
throughput_vals = [self.get_cluster_throughput() for _ in range(samples)]
return round(get_trim_mean(throughput_vals), 3)
def get_rebalance_status(self):
"""
This function gets the rebalance status
Returns:
bool: True if rebalance is completed, False otherwise
"""
ceph_pod = pod.get_ceph_tools_pod()
ceph_status = ceph_pod.exec_ceph_cmd(ceph_cmd="ceph status")
ceph_health = ceph_pod.exec_ceph_cmd(ceph_cmd="ceph health")
total_pg_count = ceph_status["pgmap"]["num_pgs"]
pg_states = ceph_status["pgmap"]["pgs_by_state"]
logger.info(ceph_health)
logger.info(pg_states)
for states in pg_states:
return (
states["state_name"] == "active+clean"
and states["count"] == total_pg_count
)
def wait_for_rebalance(self, timeout=600):
"""
Wait for re-balance to complete
Args:
timeout (int): Time to wait for the completion of re-balance
Returns:
bool: True if rebalance completed, False otherwise
"""
try:
for rebalance in TimeoutSampler(
timeout=timeout, sleep=10, func=self.get_rebalance_status
):
if rebalance:
logging.info("Re-balance is completed")
return True
except exceptions.TimeoutExpiredError:
logger.error(
f"Data re-balance failed to complete within the given "
f"timeout of {timeout} seconds"
)
return False
def time_taken_to_complete_rebalance(self, timeout=600):
"""
This function calculates the time taken to complete
rebalance
Args:
timeout (int): Time to wait for the completion of rebalance
Returns:
int : Time taken in minutes for the completion of rebalance
"""
start_time = time.time()
assert self.wait_for_rebalance(timeout=timeout), (
f"Data re-balance failed to complete within the given "
f"timeout of {timeout} seconds"
)
time_taken = time.time() - start_time
return time_taken / 60
class CephHealthMonitor(threading.Thread):
"""
Context manager class for monitoring ceph health status of CephCluster.
If CephCluster will get to HEALTH_ERROR state it will save the ceph status
to health_error_status variable and will stop monitoring.
"""
def __init__(self, ceph_cluster, sleep=5):
"""
Constructor for ceph health status thread.
Args:
ceph_cluster (CephCluster): Reference to CephCluster object.
sleep (int): Number of seconds to sleep between health checks.
"""
self.ceph_cluster = ceph_cluster
self.sleep = sleep
self.health_error_status = None
self.health_monitor_enabled = False
self.latest_health_status = None
super(CephHealthMonitor, self).__init__()
def run(self):
self.health_monitor_enabled = True
while self.health_monitor_enabled and (not self.health_error_status):
time.sleep(self.sleep)
self.latest_health_status = self.ceph_cluster.get_ceph_health(detail=True)
if "HEALTH_ERROR" in self.latest_health_status:
self.health_error_status = self.ceph_cluster.get_ceph_status()
self.log_error_status()
def __enter__(self):
self.start()
def __exit__(self, exception_type, value, traceback):
"""
Exit method for context manager
Raises:
CephHealthException: If no other exception occurred during
execution of context manager and HEALTH_ERROR is detected
during the monitoring.
exception_type: In case of exception raised during processing of
the context manager.
"""
self.health_monitor_enabled = False
if self.health_error_status:
self.log_error_status()
if exception_type:
raise exception_type.with_traceback(value, traceback)
if self.health_error_status:
raise exceptions.CephHealthException(
f"During monitoring of Ceph health status hit HEALTH_ERROR: "
f"{self.health_error_status}"
)
return True
def log_error_status(self):
logger.error(
f"ERROR HEALTH STATUS DETECTED! " f"Status: {self.health_error_status}"
)
def validate_ocs_pods_on_pvc(pods, pvc_names):
"""
Validate if ocs pod has PVC. This validation checking if there is the pvc
like: rook-ceph-mon-a for the pod rook-ceph-mon-a-56f67f5968-6j4px.
Args:
pods (list): OCS pod names
pvc_names (list): names of all PVCs
Raises:
AssertionError: If no PVC found for one of the pod
"""
logger.info(f"Validating if each pod from: {pods} has PVC from {pvc_names}.")
for pod_name in pods:
found_pvc = ""
for pvc in pvc_names:
if pvc in pod_name:
found_pvc = pvc
if found_pvc:
logger.info(f"PVC {found_pvc} found for pod {pod_name}")
continue
assert found_pvc, f"No PVC found for pod: {pod_name}!"
def validate_cluster_on_pvc():
"""
Validate creation of PVCs for MON and OSD pods.
Also validate that those PVCs are attached to the OCS pods
Raises:
AssertionError: If PVC is not mounted on one or more OCS pods
"""
# Get the PVCs for selected label (MON/OSD)
ns = config.ENV_DATA["cluster_namespace"]
ocs_pvc_obj = get_all_pvc_objs(namespace=ns)
# Check all pvc's are in bound state
pvc_names = []
for pvc_obj in ocs_pvc_obj:
if pvc_obj.name.startswith(
constants.DEFAULT_DEVICESET_PVC_NAME
) or pvc_obj.name.startswith(constants.DEFAULT_MON_PVC_NAME):
assert (
pvc_obj.status == constants.STATUS_BOUND
), f"PVC {pvc_obj.name} is not Bound"
logger.info(f"PVC {pvc_obj.name} is in Bound state")
pvc_names.append(pvc_obj.name)
mon_pods = get_pod_name_by_pattern("rook-ceph-mon", ns)
if not config.DEPLOYMENT.get("local_storage"):
logger.info("Validating all mon pods have PVC")
validate_ocs_pods_on_pvc(mon_pods, pvc_names)
else:
logger.debug(
"Skipping validation if all mon pods have PVC because in LSO "
"deployment we don't have mon pods backed by PVC"
)
logger.info("Validating all osd pods have PVC")
osd_deviceset_pods = get_pod_name_by_pattern(
"rook-ceph-osd-prepare-ocs-deviceset", ns
)
validate_ocs_pods_on_pvc(osd_deviceset_pods, pvc_names)
osd_pods = get_pod_name_by_pattern("rook-ceph-osd", ns, filter="prepare")
for ceph_pod in mon_pods + osd_pods:
out = run_cmd(f"oc -n {ns} get pods {ceph_pod} -o yaml")
out_yaml = yaml.safe_load(out)
for vol in out_yaml["spec"]["volumes"]:
if vol.get("persistentVolumeClaim"):
claimName = vol.get("persistentVolumeClaim").get("claimName")
logger.info(f"{ceph_pod} backed by pvc {claimName}")
assert claimName in pvc_names, "Ceph Internal Volume not backed by PVC"
def count_cluster_osd():
"""
The function returns the number of cluster OSDs
Returns:
osd_count (int): number of OSD pods in current cluster
"""
storage_cluster_obj = storage_cluster.StorageCluster(
resource_name=config.ENV_DATA["storage_cluster_name"],
namespace=config.ENV_DATA["cluster_namespace"],
)
storage_cluster_obj.reload_data()
osd_count = int(
storage_cluster_obj.data["spec"]["storageDeviceSets"][0]["count"]
) * int(storage_cluster_obj.data["spec"]["storageDeviceSets"][0]["replica"])
return osd_count
def validate_pdb_creation():
"""
Validate creation of PDBs for MON, MDS and OSD pods.
Raises:
AssertionError: If required PDBs were not created.
"""
pdb_obj = ocp.OCP(kind="PodDisruptionBudget")
item_list = pdb_obj.get().get("items")
pdb_list = [item["metadata"]["name"] for item in item_list]
osd_count = count_cluster_osd()
pdb_required = [constants.MDS_PDB, constants.MON_PDB]
for num in range(osd_count):
pdb_required.append(constants.OSD_PDB + str(num))
pdb_list.sort()
pdb_required.sort()
for required, given in zip(pdb_required, pdb_list):
assert required == given, f"{required} was not created"
logger.info(f"All required PDBs created: {pdb_required}")
def get_osd_utilization():
"""
Get osd utilization value
Returns:
osd_filled (dict): Dict of osd name and its used value
i.e {'osd.1': 15.276289408185841, 'osd.0': 15.276289408185841, 'osd.2': 15.276289408185841}
"""
osd_filled = {}
ceph_cmd = "ceph osd df"
ct_pod = pod.get_ceph_tools_pod()
output = ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd)
for osd in output.get("nodes"):
osd_filled[osd["name"]] = osd["utilization"]
return osd_filled
def get_ceph_df_detail():
"""
Get ceph osd df detail
Returns:
dict: 'ceph df details' command output
"""
ceph_cmd = "ceph df detail"
ct_pod = pod.get_ceph_tools_pod()
return ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd)
def validate_replica_data(pool_name, replica):
"""
Check if data is replica 2 or 3
Args:
replica (int): size of the replica(2,3)
pool_name (str): name of the pool to check replica
Returns:
Bool: True if replicated data size is meet rep config and False if dont
"""
ceph_df_detail_output = get_ceph_df_detail()
pool_list = ceph_df_detail_output.get("pools")
for pool in pool_list:
if pool.get("name") == pool_name:
logger.info(f"{pool_name}")
stored = pool["stats"]["stored"]
byte_used = pool["stats"]["bytes_used"]
compress_bytes_used = pool["stats"]["compress_bytes_used"]
compress_under_bytes = pool["stats"]["compress_under_bytes"]
byte_used = byte_used + compress_under_bytes - compress_bytes_used
store_ratio = byte_used / stored
if (replica + 0.2) > store_ratio > (replica - 0.2):
logger.info(f"pool {pool_name} meet rep {replica} size")
return True
else:
logger.info(
f"pool {pool_name} meet do not meet rep {replica}"
f" size Store ratio is {store_ratio}"
)
return False
raise PoolNotFound(f"Pool {pool_name} not found on cluster")
def validate_compression(pool_name):
"""
Check if data was compressed
Args:
pool_name (str): name of the pool to check replica
Returns:
bool: True if compression works. False if not
"""
ceph_df_detail_output = get_ceph_df_detail()
pool_list = ceph_df_detail_output.get("pools")
for pool in pool_list:
if pool.get("name") == pool_name:
logger.info(f"{pool_name}")
byte_used = pool["stats"]["bytes_used"]
compress_bytes_used = pool["stats"]["compress_bytes_used"]
compress_under_bytes = pool["stats"]["compress_under_bytes"]
all_byte_used = byte_used + compress_under_bytes - compress_bytes_used
compression_ratio = byte_used / all_byte_used
logger.info(f"this is the comp_ratio {compression_ratio}")
if 0.6 < compression_ratio:
logger.info(
f"Compression ratio {compression_ratio} is " f"larger than 0.6"
)
return True
else:
logger.info(
f"Compression ratio {compression_ratio} is " f"smaller than 0.6"
)
return False
raise PoolNotFound(f"Pool {pool_name} not found on cluster")
def validate_osd_utilization(osd_used=80):
"""
Validates osd utilization matches osd_used value
Args:
osd_used (int): osd used value
Returns:
bool: True if all osd values is equal or greater to osd_used.
False Otherwise.
"""
_rc = True
osd_filled = get_osd_utilization()
for osd, value in osd_filled.items():
if int(value) >= osd_used:
logger.info(f"{osd} used value {value}")
else:
_rc = False
logger.warning(f"{osd} used value {value}")
return _rc
def get_pgs_per_osd():
"""
Function to get ceph pg count per OSD
Returns:
osd_dict (dict): Dict of osd name and its used value
i.e {'osd.0': 136, 'osd.2': 136, 'osd.1': 136}
"""
osd_dict = {}
ceph_cmd = "ceph osd df"
ct_pod = pod.get_ceph_tools_pod()
output = ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd)
for osd in output.get("nodes"):
osd_dict[osd["name"]] = osd["pgs"]
return osd_dict
def get_balancer_eval():
"""
Function to get ceph pg balancer eval value
Returns:
eval_out (float): Eval output of pg balancer
"""
ceph_cmd = "ceph balancer eval"
ct_pod = pod.get_ceph_tools_pod()
eval_out = ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd).split(" ")
return float(eval_out[3])
def get_pg_balancer_status():
"""
Function to check pg_balancer active and mode is upmap
Returns:
bool: True if active and upmap is set else False
"""
# Check either PG balancer is active or not
ceph_cmd = "ceph balancer status"
ct_pod = pod.get_ceph_tools_pod()
output = ct_pod.exec_ceph_cmd(ceph_cmd=ceph_cmd)
# Check 'mode' is 'upmap', based on suggestion from Ceph QE
# TODO: Revisit this if mode needs change.
if output["active"] and output["mode"] == "upmap":
logging.info("PG balancer is active and mode is upmap")
return True
else:
logging.error("PG balancer is not active")
return False
def validate_pg_balancer():
"""
Validate either data is equally distributed to OSDs
Returns:
bool: True if avg PG's per osd difference is <=10 else False
"""
# Check OSD utilization either pg balancer is active
# TODO: Revisit this if pg difference value needs change
# TODO: Revisit eval value if pg balancer mode changes from 'upmap'
if get_pg_balancer_status():
eval = get_balancer_eval()
osd_dict = get_pgs_per_osd()
osd_avg_pg_value = round(sum(osd_dict.values()) / len(osd_dict))
osd_pg_value_flag = True
for key, value in osd_dict.items():
diff = abs(value - osd_avg_pg_value)
if diff <= 10:
logging.info(f"{key} PG difference {diff} is acceptable")
else:
logging.error(f"{key} PG difference {diff} is not acceptable")
osd_pg_value_flag = False
if osd_pg_value_flag and eval <= 0.025:
logging.info(
f"Eval value is {eval} and pg distribution "
f"average difference is <=10 which is acceptable"
)
return True
else:
logging.error(
f"Eval value is {eval} and pg distribution "
f"average difference is >=10 which is high and not acceptable"
)
return False
else:
logging.info("pg_balancer is not active")
def get_percent_used_capacity():
"""
Function to calculate the percentage of used capacity in a cluster
Returns:
float: The percentage of the used capacity in the cluster
"""
ct_pod = pod.get_ceph_tools_pod()
output = ct_pod.exec_ceph_cmd(ceph_cmd="ceph df")
total_used = output.get("stats").get("total_used_raw_bytes")
total_avail = output.get("stats").get("total_bytes")
return 100.0 * total_used / total_avail
def get_osd_pods_memory_sum():
"""
Get the sum of memory of all OSD pods. This is used to determine the size
needed for a PVC so when IO will be running over it the OSDs cache will be filled
Returns:
int: The sum of the OSD pods memory in GB
"""
osd_pods = pod.get_osd_pods()
num_of_osd_pods = len(osd_pods)
osd_pod_mem_size_str = osd_pods[0].get_memory().get("osd")
osd_pod_mem_size = convert_device_size(
unformatted_size=osd_pod_mem_size_str, units_to_covert_to="GB"
)
return num_of_osd_pods * osd_pod_mem_size
def get_child_nodes_osd_tree(node_id, osd_tree):
"""
This function finds the children of a node from the 'ceph osd tree' and returns them as list
Args:
node_id (int): the id of the node for which the children to be retrieved
osd_tree (dict): dictionary containing the output of 'ceph osd tree'
Returns:
list: of 'children' of a given node_id
"""
for i in range(len(osd_tree["nodes"])):
if osd_tree["nodes"][i]["id"] == node_id:
return osd_tree["nodes"][i]["children"]
def check_osds_in_hosts_osd_tree(hosts, osd_tree):
"""
Checks if osds are formed correctly after cluster expansion
Args:
hosts (list) : List of hosts
osd_tree (str) : 'ceph osd tree' command output
Returns:
bool : True if osd tree formatted correctly
"""
for each_host in hosts:
osd_in_each_host = get_child_nodes_osd_tree(each_host, osd_tree)
if len(osd_in_each_host) > 1 or len(osd_in_each_host) <= 0:
logger.error(
"Error. ceph osd tree is NOT formed correctly after cluster expansion"
)
return False
logger.info("osd tree verification Passed")
return True
def check_osd_tree_1az_vmware(osd_tree, number_of_osds):
"""
Checks whether an OSD tree is created/modified correctly. This can be used as a verification step for
deployment and cluster expansion tests.
This function is specifically for ocs cluster created on 1 AZ VMWare setup
Args:
osd_tree (dict): Dictionary of the values which represent 'osd tree'.
number_of_osds (int): total number of osds in the cluster
Returns:
bool: True, if the ceph osd tree is formed correctly. Else False
"""
# in case of vmware, there will be only one zone as of now. The OSDs are arranged as follows:
# ID CLASS WEIGHT TYPE NAME STATUS REWEIGHT PRI-AFF
# -1 0.99326 root default
# -8 0.33109 rack rack0
# -7 0.33109 host ocs-deviceset-0-0-dktqc
# 1 hdd 0.33109 osd.1 up 1.00000 1.00000
# There will be 3 racks - rack0, rack1, rack2.
# When cluster expansion is successfully done, a host and an osd are added in each rack.
# The number of hosts will be equal to the number osds the cluster has. Each rack can
# have multiple hosts but each host will have only one osd under it.
number_of_hosts_expected = int(number_of_osds / 3)
all_hosts = []
racks = osd_tree["nodes"][0]["children"]
for rack in racks:
hosts = get_child_nodes_osd_tree(rack, osd_tree)
if len(hosts) != number_of_hosts_expected:
logging.error(
f"Number of hosts under rack {rack} "
f"is not matching the expected ={number_of_hosts_expected} "
)
return False
else:
all_hosts.append(hosts)
all_hosts_flatten = [item for sublist in all_hosts for item in sublist]
return check_osds_in_hosts_osd_tree(all_hosts_flatten, osd_tree)
def check_osd_tree_3az_aws(osd_tree, number_of_osds):
"""
Checks whether an OSD tree is created/modified correctly. This can be used as a verification step for
deployment and cluster expansion tests.
This function is specifically for ocs cluster created on 3 AZ AWS config
Args:
osd_tree (dict): Dictionary of the values which represent 'osd tree'.
number_of_osds (int): total number of osds in the cluster
Returns:
Boolean: True, if the ceph osd tree is formed correctly. Else False
"""
all_hosts = []
region = osd_tree["nodes"][0]["children"]
zones = get_child_nodes_osd_tree(region[0], osd_tree)
for each_zone in zones:
hosts_in_each_zone = get_child_nodes_osd_tree(each_zone, osd_tree)
if len(hosts_in_each_zone) != number_of_osds / 3: # 3 is replica_factor
logger.error("number of hosts in zone is incorrect")
return False
else:
all_hosts.append(hosts_in_each_zone)
all_hosts_flatten = [item for sublist in all_hosts for item in sublist]
return check_osds_in_hosts_osd_tree(all_hosts_flatten, osd_tree)
def check_osd_tree_1az_aws(osd_tree, number_of_osds):
"""
Checks whether an OSD tree is created/modified correctly. This can be used as a verification step for
deployment and cluster expansion tests.
This function is specifically for ocs cluster created on 1 AZ AWS config
Args:
osd_tree (dict): Dictionary of the values which represent 'osd tree'.
number_of_osds (int): total number of osds in the cluster
Returns:
Boolean: True, if the ceph osd tree is formed correctly. Else False
"""
all_hosts = []
region = osd_tree["nodes"][0]["children"]
zones = get_child_nodes_osd_tree(region[0], osd_tree)
racks = get_child_nodes_osd_tree(zones[0], osd_tree)
logging.info(f"racks = {racks}")
if len(racks) != 3:
logging.error(f"Expected 3 racks but got {len(racks)}")
for each_rack in racks:
hosts_in_each_rack = get_child_nodes_osd_tree(each_rack, osd_tree)
if len(hosts_in_each_rack) != number_of_osds / 3: # 3 is replica_factor
logging.error("number of hosts in rack is incorrect")
return False
else:
logging.info(f"adding host...{hosts_in_each_rack}")
all_hosts.append(hosts_in_each_rack)
all_hosts_flatten = [item for sublist in all_hosts for item in sublist]
return check_osds_in_hosts_osd_tree(all_hosts_flatten, osd_tree)
def check_osds_in_hosts_are_up(osd_tree):
"""
Check if all the OSD's in status 'up'
Args:
osd_tree (dict): The ceph osd tree
Returns:
bool: True if all the OSD's in status 'up'. Else False
"""
for n in osd_tree["nodes"]:
if n["type"] == "osd":
if n["status"] != "up":
logger.warning(f"osd with name {n['name']} is not up")
return False
return True
def check_ceph_osd_tree():
"""
Checks whether an OSD tree is created/modified correctly.
It is a summary of the previous functions: 'check_osd_tree_1az_vmware',
'check_osd_tree_3az_aws', 'check_osd_tree_1az_aws'.
Returns:
bool: True, if the ceph osd tree is formed correctly. Else False
"""
osd_pods = pod.get_osd_pods()
# 'ceph osd tree' should show the new osds under right nodes/hosts
# Verification is different for 3 AZ and 1 AZ configs
ct_pod = pod.get_ceph_tools_pod()
tree_output = ct_pod.exec_ceph_cmd(ceph_cmd="ceph osd tree")
if config.ENV_DATA["platform"].lower() == constants.VSPHERE_PLATFORM:
return check_osd_tree_1az_vmware(tree_output, len(osd_pods))
aws_number_of_zones = 3
if config.ENV_DATA["platform"].lower() == constants.AWS_PLATFORM:
# parse the osd tree. if it contains a node 'rack' then it's a
# AWS_1AZ cluster. Else, 3 AWS_3AZ cluster
for i in range(len(tree_output["nodes"])):
if tree_output["nodes"][i]["name"] in "rack0":
aws_number_of_zones = 1
if aws_number_of_zones == 1:
return check_osd_tree_1az_aws(tree_output, len(osd_pods))
else:
return check_osd_tree_3az_aws(tree_output, len(osd_pods))
def check_ceph_osd_tree_after_node_replacement():
"""
Check the ceph osd tree after the process of node replacement.
Returns:
bool: True if the ceph osd tree formation is correct,
and all the OSD's are up. Else False
"""
ct_pod = pod.get_ceph_tools_pod()
osd_tree = ct_pod.exec_ceph_cmd(ceph_cmd="ceph osd tree")
if not check_ceph_osd_tree():
logger.warning("Incorrect ceph osd tree formation found")
return False
if not check_osds_in_hosts_are_up(osd_tree):
logger.warning("Not all the osd's are in status 'up'")
return False
return True
def silence_ceph_osd_crash_warning(osd_pod_name):
"""
Silence the osd crash warning of a specific osd pod
Args:
osd_pod_name (str): The name of the osd pod which we need to
silence the crash warning
Returns:
bool: True if it found the osd crash with name 'osd_pod_name'. False otherwise
"""
ct_pod = pod.get_ceph_tools_pod()
new_crash_objects_list = ct_pod.exec_ceph_cmd(ceph_cmd="ceph crash ls-new")
for crash_obj in new_crash_objects_list:
if crash_obj.get("utsname_hostname") == osd_pod_name:
logger.info(f"Found osd crash with name {osd_pod_name}")
obj_crash_id = crash_obj.get("crash_id")
crash_info = ct_pod.exec_ceph_cmd(
ceph_cmd=f"ceph crash info {obj_crash_id}"
)
logger.info(f"ceph crash info: {crash_info}")
logger.info("silence the osd crash warning")
ct_pod.exec_ceph_cmd(ceph_cmd=f"ceph crash archive {obj_crash_id}")
return True
logger.info(
f"Didn't find osd crash with name {osd_pod_name} in ceph crash warnings"
)
return False
def wait_for_silence_ceph_osd_crash_warning(osd_pod_name, timeout=900):
"""
Wait for 'timeout' seconds to check for the ceph osd crash warning,
and silence it.
Args:
osd_pod_name (str): The name of the osd pod which we need to
silence the crash warning
timeout (int): time in seconds to wait for silence the osd crash warning
Returns:
bool: True if it found the osd crash with name 'osd_pod_name'. False otherwise
"""
try:
for silence_old_osd_crash_warning in TimeoutSampler(
timeout=timeout,
sleep=30,
func=silence_ceph_osd_crash_warning,
osd_pod_name=osd_pod_name,
):
if silence_old_osd_crash_warning:
return True
except TimeoutError:
return False
class CephClusterExternal(CephCluster):
"""
Handle all external ceph cluster related functionalities
Assumption: Cephcluster Kind resource exists
"""
def __init__(self):
self.POD = ocp.OCP(kind="Pod", namespace=config.ENV_DATA["cluster_namespace"])
self.CEPHCLUSTER = ocp.OCP(
kind="CephCluster", namespace=config.ENV_DATA["cluster_namespace"]
)
self.wait_for_cluster_cr()
self._cluster_name = self.cluster_resource.get("metadata").get("name")
self._namespace = self.cluster_resource.get("metadata").get("namespace")
self.cluster = ocs.OCS(**self.cluster_resource)
self.wait_for_nooba_cr()
@property
def cluster_name(self):
return self._cluster_name
@property
def namespace(self):
return self._namespace
@retry(IndexError, 10, 3, 1)
def wait_for_cluster_cr(self):
"""
we have to wait for cluster cr to appear else
it leads to list index out of range error
"""
cluster_cr = self.CEPHCLUSTER.get()
self.cluster_resource = cluster_cr.get("items")[0]
@retry((IndexError, AttributeError, TypeError), 100, 3, 1)
def wait_for_nooba_cr(self):
self._mcg_obj = MCG()
def cluster_health_check(self, timeout=300):
"""
This would be a comprehensive cluster health check
which includes checking pods, external ceph cluster health.
raise exceptions.CephHealthException("Cluster health is NOT OK")
"""
sample = TimeoutSampler(timeout=timeout, sleep=3, func=self.is_health_ok)
if not sample.wait_for_func_status(result=True):
raise exceptions.CephHealthException("Cluster health is NOT OK")
self.wait_for_noobaa_health_ok()
self.validate_pvc()
def validate_pvc(self):
"""
Check whether all PVCs are in bound state
"""
ocs_pvc_obj = get_all_pvc_objs(namespace=self.namespace)
for pvc_obj in ocs_pvc_obj:
assert pvc_obj.status == constants.STATUS_BOUND, {
f"PVC {pvc_obj.name} is not Bound"
}
logger.info(f"PVC {pvc_obj.name} is in Bound state")
| [((35, 9, 35, 36), 'logging.getLogger', 'logging.getLogger', ({(35, 27, 35, 35): '__name__'}, {}), '(__name__)', False, 'import logging\n'), ((455, 5, 455, 62), 'ocs_ci.utility.retry.retry', 'retry', (), '', False, 'from ocs_ci.utility.retry import retry\n'), ((805, 18, 805, 48), 'ocs_ci.ocs.resources.pvc.get_all_pvc_objs', 'get_all_pvc_objs', (), '', False, 'from ocs_ci.ocs.resources.pvc import get_all_pvc_objs\n'), ((820, 15, 820, 59), 'ocs_ci.ocs.utils.get_pod_name_by_pattern', 'get_pod_name_by_pattern', ({(820, 39, 820, 54): '"""rook-ceph-mon"""', (820, 56, 820, 58): 'ns'}, {}), "('rook-ceph-mon', ns)", False, 'from ocs_ci.ocs.utils import get_pod_name_by_pattern\n'), ((830, 25, 832, 5), 'ocs_ci.ocs.utils.get_pod_name_by_pattern', 'get_pod_name_by_pattern', ({(831, 8, 831, 45): '"""rook-ceph-osd-prepare-ocs-deviceset"""', (831, 47, 831, 49): 'ns'}, {}), "('rook-ceph-osd-prepare-ocs-deviceset', ns)", False, 'from ocs_ci.ocs.utils import get_pod_name_by_pattern\n'), ((834, 15, 834, 77), 'ocs_ci.ocs.utils.get_pod_name_by_pattern', 'get_pod_name_by_pattern', (), '', False, 'from ocs_ci.ocs.utils import get_pod_name_by_pattern\n'), ((853, 26, 856, 5), 'ocs_ci.ocs.resources.storage_cluster.StorageCluster', 'storage_cluster.StorageCluster', (), '', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((872, 14, 872, 49), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((899, 13, 899, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((916, 13, 916, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((954, 10, 954, 64), 'ocs_ci.ocs.exceptions.PoolNotFound', 'PoolNotFound', ({(954, 23, 954, 63): 'f"""Pool {pool_name} not found on cluster"""'}, {}), "(f'Pool {pool_name} not found on cluster')", False, 'from ocs_ci.ocs.exceptions import PoolNotFound\n'), ((989, 10, 989, 64), 'ocs_ci.ocs.exceptions.PoolNotFound', 'PoolNotFound', ({(989, 23, 989, 63): 'f"""Pool {pool_name} not found on cluster"""'}, {}), "(f'Pool {pool_name} not found on cluster')", False, 'from ocs_ci.ocs.exceptions import PoolNotFound\n'), ((1027, 13, 1027, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1044, 13, 1044, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1059, 13, 1059, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1119, 13, 1119, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1135, 15, 1135, 33), 'ocs_ci.ocs.resources.pod.get_osd_pods', 'pod.get_osd_pods', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1138, 23, 1140, 5), 'ocs_ci.utility.utils.convert_device_size', 'convert_device_size', (), '', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((1277, 4, 1277, 36), 'logging.info', 'logging.info', ({(1277, 17, 1277, 35): 'f"""racks = {racks}"""'}, {}), "(f'racks = {racks}')", False, 'import logging\n'), ((1323, 15, 1323, 33), 'ocs_ci.ocs.resources.pod.get_osd_pods', 'pod.get_osd_pods', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1326, 13, 1326, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1353, 13, 1353, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1377, 13, 1377, 37), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((1452, 5, 1452, 32), 'ocs_ci.utility.retry.retry', 'retry', ({(1452, 11, 1452, 21): 'IndexError', (1452, 23, 1452, 25): '(10)', (1452, 27, 1452, 28): '(3)', (1452, 30, 1452, 31): '(1)'}, {}), '(IndexError, 10, 3, 1)', False, 'from ocs_ci.utility.retry import retry\n'), ((1462, 5, 1462, 62), 'ocs_ci.utility.retry.retry', 'retry', ({(1462, 11, 1462, 50): '(IndexError, AttributeError, TypeError)', (1462, 52, 1462, 55): '(100)', (1462, 57, 1462, 58): '(3)', (1462, 60, 1462, 61): '(1)'}, {}), '((IndexError, AttributeError, TypeError), 100, 3, 1)', False, 'from ocs_ci.utility.retry import retry\n'), ((58, 19, 58, 86), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((59, 27, 61, 9), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((62, 22, 64, 9), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((65, 19, 67, 9), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((83, 23, 83, 62), 'ocs_ci.ocs.resources.ocs.OCS', 'ocs.OCS', ({}, {}), '(**self.cluster_resource_config)', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((112, 8, 112, 58), 'logging.info', 'logging.info', ({(112, 21, 112, 57): 'f"""Number of mons = {self.mon_count}"""'}, {}), "(f'Number of mons = {self.mon_count}')", False, 'import logging\n'), ((113, 8, 113, 57), 'logging.info', 'logging.info', ({(113, 21, 113, 56): 'f"""Number of mds = {self.mds_count}"""'}, {}), "(f'Number of mds = {self.mds_count}')", False, 'import logging\n'), ((139, 26, 139, 59), 'ocs_ci.ocs.resources.pod.get_all_pods', 'pod.get_all_pods', ({(139, 43, 139, 58): 'self._namespace'}, {}), '(self._namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((141, 15, 141, 66), 'ocs_ci.ocs.resources.pod.get_mon_pods', 'pod.get_mon_pods', ({(141, 32, 141, 49): 'self.mon_selector', (141, 51, 141, 65): 'self.namespace'}, {}), '(self.mon_selector, self.namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((146, 20, 146, 71), 'ocs_ci.ocs.resources.pod.get_mds_pods', 'pod.get_mds_pods', ({(146, 37, 146, 54): 'self.mds_selector', (146, 56, 146, 70): 'self.namespace'}, {}), '(self.mds_selector, self.namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((147, 20, 147, 71), 'ocs_ci.ocs.resources.pod.get_mgr_pods', 'pod.get_mgr_pods', ({(147, 37, 147, 54): 'self.mgr_selector', (147, 56, 147, 70): 'self.namespace'}, {}), '(self.mgr_selector, self.namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((148, 20, 148, 71), 'ocs_ci.ocs.resources.pod.get_osd_pods', 'pod.get_osd_pods', ({(148, 37, 148, 54): 'self.osd_selector', (148, 56, 148, 70): 'self.namespace'}, {}), '(self.osd_selector, self.namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((149, 23, 149, 80), 'ocs_ci.ocs.resources.pod.get_noobaa_pods', 'pod.get_noobaa_pods', ({(149, 43, 149, 63): 'self.noobaa_selector', (149, 65, 149, 79): 'self.namespace'}, {}), '(self.noobaa_selector, self.namespace)', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((150, 20, 150, 38), 'ocs_ci.ocs.resources.pod.get_rgw_pods', 'pod.get_rgw_pods', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((151, 23, 151, 47), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((191, 8, 191, 40), 'logging.info', 'logging.info', ({(191, 21, 191, 39): 'f"""port={pod.port}"""'}, {}), "(f'port={pod.port}')", False, 'import logging\n'), ((221, 17, 221, 81), 'ocs_ci.utility.utils.TimeoutSampler', 'TimeoutSampler', (), '', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((433, 24, 433, 60), 're.findall', 're.findall', ({(433, 35, 433, 54): '"""[\\\\w-]+mon-+[\\\\w-]"""', (433, 56, 433, 59): 'ret'}, {}), "('[\\\\w-]+mon-+[\\\\w-]', ret)", False, 'import re\n'), ((445, 21, 445, 40), 'random.choice', 'random.choice', ({(445, 35, 445, 39): 'mons'}, {}), '(mons)', False, 'import random\n'), ((452, 8, 452, 70), 'logging.info', 'logging.info', ({(452, 21, 452, 69): 'f"""Removed the mon {random_mon} from the cluster"""'}, {}), "(f'Removed the mon {random_mon} from the cluster')", False, 'import logging\n'), ((466, 17, 466, 41), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((474, 14, 474, 70), 'ocs_ci.ocs.exceptions.UnexpectedBehaviour', 'UnexpectedBehaviour', ({(474, 34, 474, 69): '"""In Rados df, Used size is varying"""'}, {}), "('In Rados df, Used size is varying')", False, 'from ocs_ci.ocs.exceptions import UnexpectedBehaviour\n'), ((523, 30, 526, 9), 'ocs_ci.ocs.resources.storage_cluster.StorageCluster', 'storage_cluster.StorageCluster', (), '', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((531, 19, 531, 43), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((548, 19, 548, 43), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((573, 8, 573, 81), 'logging.info', 'logging.info', ({(573, 21, 573, 80): 'f"""The IOPS percentage of the cluster is {iops_percentage}%"""'}, {}), "(f'The IOPS percentage of the cluster is {iops_percentage}%')", False, 'import logging\n'), ((615, 8, 617, 9), 'logging.info', 'logging.info', ({(616, 12, 616, 83): 'f"""The throughput percentage of the cluster is {throughput_percentage}%"""'}, {}), "(\n f'The throughput percentage of the cluster is {throughput_percentage}%')", False, 'import logging\n'), ((643, 19, 643, 43), 'ocs_ci.ocs.resources.pod.get_ceph_tools_pod', 'pod.get_ceph_tools_pod', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((693, 21, 693, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((821, 11, 821, 49), 'ocs_ci.framework.config.DEPLOYMENT.get', 'config.DEPLOYMENT.get', ({(821, 33, 821, 48): '"""local_storage"""'}, {}), "('local_storage')", False, 'from ocs_ci.framework import config\n'), ((836, 14, 836, 64), 'ocs_ci.utility.utils.run_cmd', 'run_cmd', ({(836, 22, 836, 63): 'f"""oc -n {ns} get pods {ceph_pod} -o yaml"""'}, {}), "(f'oc -n {ns} get pods {ceph_pod} -o yaml')", False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((837, 19, 837, 38), 'yaml.safe_load', 'yaml.safe_load', ({(837, 34, 837, 37): 'out'}, {}), '(out)', False, 'import yaml\n'), ((1065, 8, 1065, 63), 'logging.info', 'logging.info', ({(1065, 21, 1065, 62): '"""PG balancer is active and mode is upmap"""'}, {}), "('PG balancer is active and mode is upmap')", False, 'import logging\n'), ((1068, 8, 1068, 50), 'logging.error', 'logging.error', ({(1068, 22, 1068, 49): '"""PG balancer is not active"""'}, {}), "('PG balancer is not active')", False, 'import logging\n'), ((1108, 8, 1108, 49), 'logging.info', 'logging.info', ({(1108, 21, 1108, 48): '"""pg_balancer is not active"""'}, {}), "('pg_balancer is not active')", False, 'import logging\n'), ((1413, 45, 1418, 9), 'ocs_ci.utility.utils.TimeoutSampler', 'TimeoutSampler', (), '', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((1433, 19, 1433, 86), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((1434, 27, 1436, 9), 'ocs_ci.ocs.ocp.OCP', 'ocp.OCP', (), '', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((1441, 23, 1441, 55), 'ocs_ci.ocs.resources.ocs.OCS', 'ocs.OCS', ({}, {}), '(**self.cluster_resource)', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((1464, 24, 1464, 29), 'ocs_ci.ocs.resources.mcg.MCG', 'MCG', ({}, {}), '()', False, 'from ocs_ci.ocs.resources.mcg import MCG\n'), ((1472, 17, 1472, 81), 'ocs_ci.utility.utils.TimeoutSampler', 'TimeoutSampler', (), '', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((1484, 22, 1484, 64), 'ocs_ci.ocs.resources.pvc.get_all_pvc_objs', 'get_all_pvc_objs', (), '', False, 'from ocs_ci.ocs.resources.pvc import get_all_pvc_objs\n'), ((85, 26, 85, 55), 'ocs_ci.ocs.resources.ocs.OCS', 'ocs.OCS', ({}, {}), '(**self.cephfs_config)', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((120, 28, 120, 33), 'ocs_ci.ocs.resources.mcg.MCG', 'MCG', ({}, {}), '()', False, 'from ocs_ci.ocs.resources.mcg import MCG\n'), ((224, 18, 224, 76), 'ocs_ci.ocs.exceptions.CephHealthException', 'exceptions.CephHealthException', ({(224, 49, 224, 75): '"""Cluster health is NOT OK"""'}, {}), "('Cluster health is NOT OK')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((265, 18, 265, 78), 'ocs_ci.ocs.exceptions.NoobaaHealthException', 'exceptions.NoobaaHealthException', ({(265, 51, 265, 77): '"""Cluster health is NOT OK"""'}, {}), "('Cluster health is NOT OK')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((313, 26, 313, 44), 'ocs_ci.ocs.resources.pod.get_mon_pods', 'pod.get_mon_pods', ({}, {}), '()', True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((632, 21, 632, 51), 'ocs_ci.utility.utils.get_trim_mean', 'get_trim_mean', ({(632, 35, 632, 50): 'throughput_vals'}, {}), '(throughput_vals)', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((668, 29, 670, 13), 'ocs_ci.utility.utils.TimeoutSampler', 'TimeoutSampler', (), '', False, 'from ocs_ci.utility.utils import TimeoutSampler, run_cmd, convert_device_size, get_trim_mean\n'), ((698, 21, 698, 32), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((729, 12, 729, 34), 'time.sleep', 'time.sleep', ({(729, 23, 729, 33): 'self.sleep'}, {}), '(self.sleep)', False, 'import time\n'), ((756, 18, 759, 13), 'ocs_ci.ocs.exceptions.CephHealthException', 'exceptions.CephHealthException', ({(757, 16, 758, 45): 'f"""During monitoring of Ceph health status hit HEALTH_ERROR: {self.health_error_status}"""'}, {}), "(\n f'During monitoring of Ceph health status hit HEALTH_ERROR: {self.health_error_status}'\n )", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((1096, 12, 1099, 13), 'logging.info', 'logging.info', ({(1097, 16, 1098, 65): 'f"""Eval value is {eval} and pg distribution average difference is <=10 which is acceptable"""'}, {}), "(\n f'Eval value is {eval} and pg distribution average difference is <=10 which is acceptable'\n )", False, 'import logging\n'), ((1102, 12, 1105, 13), 'logging.error', 'logging.error', ({(1103, 16, 1104, 78): 'f"""Eval value is {eval} and pg distribution average difference is >=10 which is high and not acceptable"""'}, {}), "(\n f'Eval value is {eval} and pg distribution average difference is >=10 which is high and not acceptable'\n )", False, 'import logging\n'), ((1216, 12, 1219, 13), 'logging.error', 'logging.error', ({(1217, 16, 1218, 76): 'f"""Number of hosts under rack {rack} is not matching the expected ={number_of_hosts_expected} """'}, {}), "(\n f'Number of hosts under rack {rack} is not matching the expected ={number_of_hosts_expected} '\n )", False, 'import logging\n'), ((1283, 12, 1283, 65), 'logging.error', 'logging.error', ({(1283, 26, 1283, 64): '"""number of hosts in rack is incorrect"""'}, {}), "('number of hosts in rack is incorrect')", False, 'import logging\n'), ((1286, 12, 1286, 63), 'logging.info', 'logging.info', ({(1286, 25, 1286, 62): 'f"""adding host...{hosts_in_each_rack}"""'}, {}), "(f'adding host...{hosts_in_each_rack}')", False, 'import logging\n'), ((1474, 18, 1474, 76), 'ocs_ci.ocs.exceptions.CephHealthException', 'exceptions.CephHealthException', ({(1474, 49, 1474, 75): '"""Cluster health is NOT OK"""'}, {}), "('Cluster health is NOT OK')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((73, 12, 73, 30), 'logging.warning', 'logging.warning', ({(73, 28, 73, 29): 'e'}, {}), '(e)', False, 'import logging\n'), ((74, 12, 74, 46), 'logging.warning', 'logging.warning', ({(74, 28, 74, 45): '"""No CephFS found"""'}, {}), "('No CephFS found')", False, 'import logging\n'), ((161, 30, 161, 59), 'ocs_ci.ocs.resources.ocs.OCS', 'ocs.OCS', ({}, {}), '(**self.cephfs_config)', False, 'from ocs_ci.ocs.resources import ocs, storage_cluster\n'), ((187, 20, 187, 44), 'ocs_ci.ocs.resources.pod.pod_data.get', 'pod.pod_data.get', ({(187, 37, 187, 43): '"""spec"""'}, {}), "('spec')", True, 'import ocs_ci.ocs.resources.pod as pod\n'), ((238, 18, 238, 76), 'ocs_ci.ocs.exceptions.CephHealthException', 'exceptions.CephHealthException', ({(238, 49, 238, 75): '"""Cluster health is NOT OK"""'}, {}), "('Cluster health is NOT OK')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((247, 18, 247, 76), 'ocs_ci.ocs.exceptions.CephHealthException', 'exceptions.CephHealthException', ({(247, 49, 247, 75): '"""Cluster health is NOT OK"""'}, {}), "('Cluster health is NOT OK')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((271, 15, 273, 9), 'ocs_ci.utility.retry.retry', 'retry', (), '', False, 'from ocs_ci.utility.retry import retry\n'), ((324, 18, 326, 13), 'ocs_ci.ocs.exceptions.MonCountException', 'exceptions.MonCountException', ({(325, 16, 325, 66): 'f"""Failed to achieve desired Mon count {count}"""'}, {}), "(f'Failed to achieve desired Mon count {count}')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((367, 18, 369, 13), 'ocs_ci.ocs.exceptions.MDSCountException', 'exceptions.MDSCountException', ({(368, 16, 368, 66): 'f"""Failed to achieve desired MDS count {count}"""'}, {}), "(f'Failed to achieve desired MDS count {count}')", False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((1091, 16, 1091, 73), 'logging.info', 'logging.info', ({(1091, 29, 1091, 72): 'f"""{key} PG difference {diff} is acceptable"""'}, {}), "(f'{key} PG difference {diff} is acceptable')", False, 'import logging\n'), ((1093, 16, 1093, 78), 'logging.error', 'logging.error', ({(1093, 30, 1093, 77): 'f"""{key} PG difference {diff} is not acceptable"""'}, {}), "(f'{key} PG difference {diff} is not acceptable')", False, 'import logging\n'), ((164, 16, 164, 34), 'logging.warning', 'logging.warning', ({(164, 32, 164, 33): 'e'}, {}), '(e)', False, 'import logging\n'), ((165, 16, 165, 50), 'logging.warning', 'logging.warning', ({(165, 32, 165, 49): '"""No CephFS found"""'}, {}), "('No CephFS found')", False, 'import logging\n'), ((672, 20, 672, 59), 'logging.info', 'logging.info', ({(672, 33, 672, 58): '"""Re-balance is completed"""'}, {}), "('Re-balance is completed')", False, 'import logging\n'), ((594, 35, 594, 65), 'ocs_ci.ocs.constants.TP_CONVERSION.keys', 'constants.TP_CONVERSION.keys', ({}, {}), '()', False, 'from ocs_ci.ocs import ocp, constants, exceptions\n'), ((593, 30, 593, 53), 're.findall', 're.findall', ({(593, 41, 593, 47): '"""\\\\d+"""', (593, 49, 593, 52): 'val'}, {}), "('\\\\d+', val)", False, 'import re\n')] |
techthiyanes/scenic | scenic/projects/baselines/detr/configs/detr_config.py | 05585b1189364e29d82413b9d4a50ffa8c246f0c | # pylint: disable=line-too-long
r"""Default configs for COCO detection using DETR.
"""
# pylint: enable=line-too-long
import copy
import ml_collections
_COCO_TRAIN_SIZE = 118287
NUM_EPOCHS = 300
def get_config():
"""Returns the configuration for COCO detection using DETR."""
config = ml_collections.ConfigDict()
config.experiment_name = 'coco_detection_detr'
# Dataset.
config.dataset_name = 'coco_detr_detection'
config.dataset_configs = ml_collections.ConfigDict()
config.dataset_configs.prefetch_to_device = 2
config.dataset_configs.shuffle_buffer_size = 10_000
config.dataset_configs.max_boxes = 99
config.data_dtype_str = 'float32'
# Model.
config.model_dtype_str = 'float32'
config.model_name = 'detr'
config.matcher = 'hungarian_cover_tpu'
config.hidden_dim = 256
config.num_queries = 100
config.query_emb_size = None # Same as hidden_size.
config.transformer_num_heads = 8
config.transformer_num_encoder_layers = 6
config.transformer_num_decoder_layers = 6
config.transformer_qkv_dim = 256
config.transformer_mlp_dim = 2048
config.transformer_normalize_before = False
config.backbone_num_filters = 64
config.backbone_num_layers = 50
config.dropout_rate = 0.
config.attention_dropout_rate = 0.1
# Loss.
config.aux_loss = True
config.bbox_loss_coef = 5.0
config.giou_loss_coef = 2.0
config.class_loss_coef = 1.0
config.eos_coef = 0.1
# Training.
config.trainer_name = 'detr_trainer'
config.optimizer = 'adam'
config.optimizer_configs = ml_collections.ConfigDict()
config.optimizer_configs.weight_decay = 1e-4
config.optimizer_configs.beta1 = 0.9
config.optimizer_configs.beta2 = 0.999
config.max_grad_norm = 0.1
config.num_training_epochs = NUM_EPOCHS
config.batch_size = 64
config.rng_seed = 0
decay_events = {500: 400}
# Learning rate.
steps_per_epoch = _COCO_TRAIN_SIZE // config.batch_size
config.lr_configs = ml_collections.ConfigDict()
config.lr_configs.learning_rate_schedule = 'compound'
config.lr_configs.factors = 'constant*piecewise_constant'
config.lr_configs.decay_events = [
decay_events.get(NUM_EPOCHS, NUM_EPOCHS * 2 // 3) * steps_per_epoch,
]
# Note: this is absolute (not relative):
config.lr_configs.decay_factors = [.1]
config.lr_configs.base_learning_rate = 1e-4
# Backbone training configs: optimizer and learning rate.
config.backbone_training = ml_collections.ConfigDict()
config.backbone_training.optimizer = copy.deepcopy(config.optimizer)
config.backbone_training.optimizer_configs = copy.deepcopy(
config.optimizer_configs)
config.backbone_training.lr_configs = copy.deepcopy(config.lr_configs)
config.backbone_training.lr_configs.base_learning_rate = 1e-5
# Pretrained_backbone.
config.load_pretrained_backbone = True
config.freeze_backbone_batch_stats = True
config.pretrained_backbone_configs = ml_collections.ConfigDict()
# Download pretrained ResNet50 checkpoints from here:
# https://github.com/google-research/scenic/tree/main/scenic/projects/baselines pylint: disable=line-too-long
config.pretrained_backbone_configs.checkpoint_path = 'path_to_checkpoint_of_resnet_50'
# Logging.
config.write_summary = True
config.xprof = True # Profile using xprof.
config.log_summary_steps = 50 # train summary steps
config.log_large_summary_steps = 1000 # Expensive summary operations freq
config.checkpoint = True # Do checkpointing.
config.checkpoint_steps = steps_per_epoch
config.debug_train = False # Debug mode during training.
config.debug_eval = False # Debug mode during eval.
return config
| [((15, 11, 15, 38), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n'), ((20, 27, 20, 54), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n'), ((54, 29, 54, 56), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n'), ((67, 22, 67, 49), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n'), ((78, 29, 78, 56), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n'), ((79, 39, 79, 70), 'copy.deepcopy', 'copy.deepcopy', ({(79, 53, 79, 69): 'config.optimizer'}, {}), '(config.optimizer)', False, 'import copy\n'), ((80, 47, 81, 31), 'copy.deepcopy', 'copy.deepcopy', ({(81, 6, 81, 30): 'config.optimizer_configs'}, {}), '(config.optimizer_configs)', False, 'import copy\n'), ((82, 40, 82, 72), 'copy.deepcopy', 'copy.deepcopy', ({(82, 54, 82, 71): 'config.lr_configs'}, {}), '(config.lr_configs)', False, 'import copy\n'), ((88, 39, 88, 66), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', ({}, {}), '()', False, 'import ml_collections\n')] |
artembashlak/share-youtube-to-mail | tests/conftest.py | 347f72ed8846b85cae8e4f39896ab54e698a6de9 | import pytest
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
@pytest.fixture(scope="function")
def browser():
options = webdriver.ChromeOptions()
options.add_argument('ignore-certificate-errors')
options.add_argument("--headless")
options.add_argument('--no-sandbox')
options.add_argument('start-maximized')
options.add_argument('disable-infobars')
options.add_argument("--disable-extensions")
driver = webdriver.Chrome(ChromeDriverManager().install(), options=options)
yield driver
driver.quit()
| [((6, 1, 6, 33), 'pytest.fixture', 'pytest.fixture', (), '', False, 'import pytest\n'), ((8, 14, 8, 39), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ({}, {}), '()', False, 'from selenium import webdriver\n'), ((16, 30, 16, 51), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ({}, {}), '()', False, 'from webdriver_manager.chrome import ChromeDriverManager\n')] |
lab-medvedeva/SCABFA-feature-selection | prepare_cicero_peaks.py | d5cd7568e667a75f75e753d9ab9dc645f3166902 | from scale.dataset import read_mtx
from argparse import ArgumentParser
import pandas as pd
import numpy as np
import os
def parse_args():
parser = ArgumentParser('Preparing raw peaks from cicero pipeline')
parser.add_argument('--dataset_path', help='Path to Scale dataset: count, feature, barcode folder')
parser.add_argument('--label_path', help='Path to cell labels')
parser.add_argument('--num_peaks_threshold', type=int, help='Num peaks to filter')
parser.add_argument('--output_path', help='Path to save peaks in bed folder')
parser.add_argument('--suffix', help='Suffix to path')
return parser.parse_args()
def main():
args = parse_args()
labels = pd.read_csv(args.label_path, sep='\t', header=None)
count, feature, barcode = read_mtx(args.dataset_path)
os.makedirs(args.output_path, exist_ok=True)
cell_types = labels[1].unique()
cell_barcodes = {}
for cell_type in cell_types:
cell_barcodes[cell_type] = list(labels[labels[1] == cell_type].index)
for cell_type, barcode in cell_barcodes.items():
cell_by_feature = np.asarray(count[barcode].sum(axis=0)).flatten()
feature_threshold = cell_by_feature[np.argsort(cell_by_feature)[-args.num_peaks_threshold]]
print(f'{cell_type}: {feature_threshold}')
filtered_features = (cell_by_feature > 0) & (cell_by_feature >= feature_threshold)
print(f'{cell_type}: filtered {np.sum(filtered_features)}')
output = pd.DataFrame(feature[filtered_features])
# print(cell_type, cell_by_feature[np.argsort(cell_by_feature)[-args.num_peaks_threshold:]][:10])
output['chr'] = output[0].apply(lambda x: x.split('_')[0])
output['start'] = output[0].apply(lambda x: x.split('_')[1])
output['end'] = output[0].apply(lambda x: x.split('_')[2])
output.drop(0, axis=1).to_csv(
os.path.join(args.output_path, f'{cell_type.replace(" ", "_").replace("/", "_")}_{args.suffix}.bed'),
header=None,
index=None,
sep='\t'
)
if __name__ == '__main__':
main()
| [((11, 13, 11, 71), 'argparse.ArgumentParser', 'ArgumentParser', ({(11, 28, 11, 70): '"""Preparing raw peaks from cicero pipeline"""'}, {}), "('Preparing raw peaks from cicero pipeline')", False, 'from argparse import ArgumentParser\n'), ((22, 13, 22, 64), 'pandas.read_csv', 'pd.read_csv', (), '', True, 'import pandas as pd\n'), ((23, 30, 23, 57), 'scale.dataset.read_mtx', 'read_mtx', ({(23, 39, 23, 56): 'args.dataset_path'}, {}), '(args.dataset_path)', False, 'from scale.dataset import read_mtx\n'), ((25, 4, 25, 48), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((37, 17, 37, 57), 'pandas.DataFrame', 'pd.DataFrame', ({(37, 30, 37, 56): 'feature[filtered_features]'}, {}), '(feature[filtered_features])', True, 'import pandas as pd\n'), ((33, 44, 33, 71), 'numpy.argsort', 'np.argsort', ({(33, 55, 33, 70): 'cell_by_feature'}, {}), '(cell_by_feature)', True, 'import numpy as np\n'), ((36, 39, 36, 64), 'numpy.sum', 'np.sum', ({(36, 46, 36, 63): 'filtered_features'}, {}), '(filtered_features)', True, 'import numpy as np\n')] |
CSIRT-MU/CRUSOE | crusoe_observe/neo4j-client/neo4jclient/CMSClient.py | 73e4ac0ced6c3ac46d24ac5c3feb01a1e88bd36b | from neo4jclient.AbsClient import AbstractClient
class CMSClient(AbstractClient):
def __init__(self, password, **kwargs):
super().__init__(password=password, **kwargs)
def get_domain_names(self):
"""
Gets all domain names from database.
:return: domain names in JSON-like form
"""
return self._run_query("MATCH(n:DomainName) RETURN n.domain_name AS domains")
def get_ips_and_domain_names(self):
"""
Gets all domain names with corresponding IPs from database.
:return: IPs and DomainNames in JSON-like form
"""
return self._run_query("MATCH(n:IP)-[:RESOLVES_TO]-(y:DomainName {tag: \'A/AAAA\'}) "
"RETURN { IP: n.address , Domain: y.domain_name } AS entry")
def create_cms_component(self, path):
"""
Create nodes and relationships for cms client.
-------------
Antivirus_query:
1. Parse csv given in path.
2. Create node of type [:SoftwareVersion, :IP] if not already exists.
3. Create node of type [:Host], relationship of type [:ON] with parameters [start,end] if not already exists.
Otherwise just update information about time on parameters [start,end].
4. Create node of type [:Node], relationship of type [:HAS_ASSIGNED].
5. Create relationship of type [:IS_A] between :Host and :Node if not already exists.
:param path: Path to the JSON with values
:return:
"""
path = f'file:///{path}'
query = "CALL apoc.load.json($path) " \
"YIELD value " \
"UNWIND value.data AS data " \
"UNWIND data.cpe as cpe " \
"WITH data.ip as ip_ad, cpe, value.time as theTime " \
"MERGE (ipadd:IP {address: ip_ad}) " \
"MERGE (softVersion:SoftwareVersion {version: cpe, tag: \'cms_client\'}) " \
"MERGE (ipadd)<-[:HAS_ASSIGNED]-(nod:Node) " \
"MERGE (nod)-[:IS_A]->(host:Host) " \
"MERGE (softVersion)-[r:ON]->(host) " \
"ON CREATE SET r.start = datetime(theTime),r.end = datetime(theTime) " \
"ON MATCH SET r.end = datetime(theTime)"
params = {'path': path}
self._run_query(query, **params)
| [] |
jonasjucker/wildlife-telegram | location.py | 5fb548d3779782467247cf5d1e165d1c2349de30 | import time
from datetime import date,datetime
from astral import LocationInfo
from astral.sun import sun
class CamLocation:
def __init__(self,lat,lon,info,country,timezone):
self.info = LocationInfo(info, country, timezone, lat, lon)
def is_night(self):
s = sun(self.info.observer, date=date.today(),tzinfo=self.info.timezone)
sunrise = s["sunrise"].timestamp()
sunset = s["sunset"].timestamp()
time_now = datetime.now().timestamp()
if time_now > sunrise and time_now < sunset:
return False
else:
return True
| [((8, 20, 8, 67), 'astral.LocationInfo', 'LocationInfo', ({(8, 33, 8, 37): 'info', (8, 39, 8, 46): 'country', (8, 48, 8, 56): 'timezone', (8, 58, 8, 61): 'lat', (8, 63, 8, 66): 'lon'}, {}), '(info, country, timezone, lat, lon)', False, 'from astral import LocationInfo\n'), ((11, 41, 11, 53), 'datetime.date.today', 'date.today', ({}, {}), '()', False, 'from datetime import date, datetime\n'), ((16, 19, 16, 33), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import date, datetime\n')] |
ch1huizong/learning | lang/py/cookbook/v2/source/cb2_20_9_exm_1.py | 632267634a9fd84a5f5116de09ff1e2681a6cc85 | class Skidoo(object):
''' a mapping which claims to contain all keys, each with a value
of 23; item setting and deletion are no-ops; you can also call
an instance with arbitrary positional args, result is 23. '''
__metaclass__ = MetaInterfaceChecker
__implements__ = IMinimalMapping, ICallable
def __getitem__(self, key): return 23
def __setitem__(self, key, value): pass
def __delitem__(self, key): pass
def __contains__(self, key): return True
def __call__(self, *args): return 23
sk = Skidoo()
| [] |
davidleonfdez/face2anime | face2anime/nb_utils.py | 896bf85a7aa28322cc9e9e586685db8cbbf39d89 | import importlib
__all__ = ['mount_gdrive']
def mount_gdrive() -> str:
"""Mount Google Drive storage of the current Google account and return the root path.
Functionality only available in Google Colab Enviroment; otherwise, it raises a RuntimeError.
"""
if (importlib.util.find_spec("google.colab") is None):
raise RuntimeError("Cannot mount Google Drive outside of Google Colab.")
from google.colab import drive
drive.mount('/content/gdrive', force_remount=True)
root_dir = "/content/gdrive/My Drive/"
return root_dir
| [((16, 4, 16, 54), 'google.colab.drive.mount', 'drive.mount', (), '', False, 'from google.colab import drive\n'), ((12, 8, 12, 48), 'importlib.util.find_spec', 'importlib.util.find_spec', ({(12, 33, 12, 47): '"""google.colab"""'}, {}), "('google.colab')", False, 'import importlib\n')] |
waschag-tvk/pywaschedv | wasch/tests.py | 8f0428827c4c1c7e9462eaa94ba02290db1c340f | import datetime
from django.utils import timezone
from django.test import TestCase
from django.contrib.auth.models import (
User,
)
from wasch.models import (
Appointment,
WashUser,
WashParameters,
# not models:
AppointmentError,
StatusRights,
)
from wasch import tvkutils, payment
class WashUserTestCase(TestCase):
def test_god(self):
god, _ = WashUser.objects.get_or_create_god()
self.assertTrue(god.isActivated)
self.assertTrue(god.user.is_staff)
self.assertTrue(god.user.is_superuser)
group_names = (group.name for group in god.user.groups.all())
for expected_group in StatusRights(9).groups:
self.assertIn(expected_group, group_names)
class AppointmentTestCase(TestCase):
exampleUserName = 'waschexample'
examplePoorUserName = 'poor'
exampleTime = Appointment.manager.scheduled_appointment_times()[-1]
exampleTooOldTime = timezone.make_aware(datetime.datetime(1991, 12, 25))
exampleTooOldReference = 4481037
exampleMachine, exampleBrokenMachine, lastMachine = \
tvkutils.get_or_create_machines()[0]
def setUp(self):
tvkutils.setup()
self.exampleMachine.isAvailable = True # though this is default
self.exampleMachine.save()
self.exampleBrokenMachine.isAvailable = False
self.exampleMachine.save()
WashUser.objects.create_enduser(self.exampleUserName, isActivated=True)
WashUser.objects.create_enduser(
self.examplePoorUserName, isActivated=False)
def _createExample(self):
user = User.objects.get(username=self.exampleUserName)
return Appointment.objects.create(
time=self.exampleTime, machine=self.exampleMachine, user=user,
wasUsed=False)
def test_create(self):
result = self._createExample()
self.assertEqual(result.time, self.exampleTime)
self.assertEqual(result.machine, self.exampleMachine)
self.assertEqual(result.user.username, self.exampleUserName)
self.assertTrue(Appointment.manager.appointment_exists(
result.time, result.machine))
self.assertFalse(Appointment.manager.bookable(
result.time, result.machine, result.user))
self.assertEqual(
Appointment.manager.why_not_bookable(
result.time, result.machine, result.user),
41, # Appointment taken
)
result.cancel()
self.assertTrue(Appointment.manager.bookable(
result.time, result.machine, result.user))
def test_bookable(self):
user = User.objects.get(username=self.exampleUserName)
poorUser = User.objects.get(username=self.examplePoorUserName)
god, _ = WashUser.objects.get_or_create_god()
self.assertEqual(
Appointment.manager.why_not_bookable(
self.exampleTime, self.exampleMachine, poorUser),
31, # User not active
)
self.assertTrue(Appointment.manager.bookable(
self.exampleTime, self.exampleMachine, user))
self.assertTrue(Appointment.manager.bookable(
self.exampleTime, self.exampleMachine, god.user))
self.assertEqual(
Appointment.manager.why_not_bookable(
self.exampleTooOldTime, self.exampleMachine, user),
11, # Unsupported time
)
unsavedTooOldAppointment = Appointment.from_reference(
self.exampleTooOldReference, user)
self.assertEqual(self.exampleTooOldReference, Appointment(
time=self.exampleTooOldTime, machine=self.exampleMachine,
user=user).reference)
self.assertEqual(unsavedTooOldAppointment.time, self.exampleTooOldTime)
self.assertEqual(unsavedTooOldAppointment.machine, self.exampleMachine)
self.assertEqual(
unsavedTooOldAppointment.user.username, self.exampleUserName)
self.assertEqual(
unsavedTooOldAppointment.reference, self.exampleTooOldReference)
self.assertEqual(
Appointment.manager.why_not_bookable(
self.exampleTime, self.exampleBrokenMachine, user),
21, # Machine out of service
)
def test_make_appointment(self):
user = User.objects.get(username=self.exampleUserName)
god, _ = WashUser.objects.get_or_create_god()
appointment = Appointment.manager.make_appointment(
self.exampleTime, self.exampleMachine, user)
reference = appointment.reference
self.assertEqual(
Appointment.manager.why_not_bookable(
self.exampleTime, self.exampleMachine, god.user),
41, # Appointment taken
)
with self.assertRaises(AppointmentError) as ae:
Appointment.manager.make_appointment(
self.exampleTime, self.exampleMachine, user)
self.assertEqual(ae.exception.reason, 41)
appointment.cancel()
self.assertEqual(
appointment,
Appointment.manager.filter_for_reference(reference).get())
WashParameters.objects.update_value('bonus-method', 'empty')
self.assertTrue(Appointment.manager.bookable(
self.exampleTime, self.exampleMachine, user))
with self.assertRaises(payment.PaymentError):
Appointment.manager.make_appointment(
self.exampleTime, self.exampleMachine, user)
def test_use(self):
user = User.objects.get(username=self.exampleUserName)
appointment = Appointment.manager.make_appointment(
self.exampleTime, self.exampleMachine, user)
appointment.use()
with self.assertRaises(AppointmentError) as ae:
appointment.use()
self.assertEqual(ae.exception.reason, 61) # Appointment already used
with self.assertRaises(AppointmentError) as ae:
appointment.rebook()
self.assertEqual(ae.exception.reason, 41) # Appointment taken
with self.assertRaises(AppointmentError) as ae:
appointment.cancel()
self.assertEqual(ae.exception.reason, 61) # Appointment already used
self.assertTrue(appointment.wasUsed)
| [((20, 17, 20, 53), 'wasch.models.WashUser.objects.get_or_create_god', 'WashUser.objects.get_or_create_god', ({}, {}), '()', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((32, 18, 32, 67), 'wasch.models.Appointment.manager.scheduled_appointment_times', 'Appointment.manager.scheduled_appointment_times', ({}, {}), '()', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((33, 44, 33, 75), 'datetime.datetime', 'datetime.datetime', ({(33, 62, 33, 66): '1991', (33, 68, 33, 70): '12', (33, 72, 33, 74): '25'}, {}), '(1991, 12, 25)', False, 'import datetime\n'), ((36, 8, 36, 41), 'wasch.tvkutils.get_or_create_machines', 'tvkutils.get_or_create_machines', ({}, {}), '()', False, 'from wasch import tvkutils, payment\n'), ((39, 8, 39, 24), 'wasch.tvkutils.setup', 'tvkutils.setup', ({}, {}), '()', False, 'from wasch import tvkutils, payment\n'), ((44, 8, 44, 79), 'wasch.models.WashUser.objects.create_enduser', 'WashUser.objects.create_enduser', (), '', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((45, 8, 46, 56), 'wasch.models.WashUser.objects.create_enduser', 'WashUser.objects.create_enduser', (), '', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((49, 15, 49, 62), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((50, 15, 52, 26), 'wasch.models.Appointment.objects.create', 'Appointment.objects.create', (), '', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((73, 15, 73, 62), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((74, 19, 74, 70), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((75, 17, 75, 53), 'wasch.models.WashUser.objects.get_or_create_god', 'WashUser.objects.get_or_create_god', ({}, {}), '()', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((90, 35, 91, 46), 'wasch.models.Appointment.from_reference', 'Appointment.from_reference', ({(91, 12, 91, 39): 'self.exampleTooOldReference', (91, 41, 91, 45): 'user'}, {}), '(self.exampleTooOldReference, user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((108, 15, 108, 62), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((109, 17, 109, 53), 'wasch.models.WashUser.objects.get_or_create_god', 'WashUser.objects.get_or_create_god', ({}, {}), '()', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((110, 22, 111, 56), 'wasch.models.Appointment.manager.make_appointment', 'Appointment.manager.make_appointment', ({(111, 12, 111, 28): 'self.exampleTime', (111, 30, 111, 49): 'self.exampleMachine', (111, 51, 111, 55): 'user'}, {}), '(self.exampleTime, self.exampleMachine,\n user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((126, 8, 126, 68), 'wasch.models.WashParameters.objects.update_value', 'WashParameters.objects.update_value', ({(126, 44, 126, 58): '"""bonus-method"""', (126, 60, 126, 67): '"""empty"""'}, {}), "('bonus-method', 'empty')", False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((134, 15, 134, 62), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', (), '', False, 'from django.contrib.auth.models import User\n'), ((135, 22, 136, 56), 'wasch.models.Appointment.manager.make_appointment', 'Appointment.manager.make_appointment', ({(136, 12, 136, 28): 'self.exampleTime', (136, 30, 136, 49): 'self.exampleMachine', (136, 51, 136, 55): 'user'}, {}), '(self.exampleTime, self.exampleMachine,\n user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((25, 30, 25, 45), 'wasch.models.StatusRights', 'StatusRights', ({(25, 43, 25, 44): '(9)'}, {}), '(9)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((59, 24, 60, 40), 'wasch.models.Appointment.manager.appointment_exists', 'Appointment.manager.appointment_exists', ({(60, 12, 60, 23): 'result.time', (60, 25, 60, 39): 'result.machine'}, {}), '(result.time, result.machine)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((61, 25, 62, 53), 'wasch.models.Appointment.manager.bookable', 'Appointment.manager.bookable', ({(62, 12, 62, 23): 'result.time', (62, 25, 62, 39): 'result.machine', (62, 41, 62, 52): 'result.user'}, {}), '(result.time, result.machine, result.user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((64, 12, 65, 57), 'wasch.models.Appointment.manager.why_not_bookable', 'Appointment.manager.why_not_bookable', ({(65, 16, 65, 27): 'result.time', (65, 29, 65, 43): 'result.machine', (65, 45, 65, 56): 'result.user'}, {}), '(result.time, result.machine, result.user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((69, 24, 70, 53), 'wasch.models.Appointment.manager.bookable', 'Appointment.manager.bookable', ({(70, 12, 70, 23): 'result.time', (70, 25, 70, 39): 'result.machine', (70, 41, 70, 52): 'result.user'}, {}), '(result.time, result.machine, result.user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((77, 12, 78, 64), 'wasch.models.Appointment.manager.why_not_bookable', 'Appointment.manager.why_not_bookable', ({(78, 16, 78, 32): 'self.exampleTime', (78, 34, 78, 53): 'self.exampleMachine', (78, 55, 78, 63): 'poorUser'}, {}), '(self.exampleTime, self.exampleMachine,\n poorUser)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((81, 24, 82, 56), 'wasch.models.Appointment.manager.bookable', 'Appointment.manager.bookable', ({(82, 12, 82, 28): 'self.exampleTime', (82, 30, 82, 49): 'self.exampleMachine', (82, 51, 82, 55): 'user'}, {}), '(self.exampleTime, self.exampleMachine, user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((83, 24, 84, 60), 'wasch.models.Appointment.manager.bookable', 'Appointment.manager.bookable', ({(84, 12, 84, 28): 'self.exampleTime', (84, 30, 84, 49): 'self.exampleMachine', (84, 51, 84, 59): 'god.user'}, {}), '(self.exampleTime, self.exampleMachine, god.user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((86, 12, 87, 66), 'wasch.models.Appointment.manager.why_not_bookable', 'Appointment.manager.why_not_bookable', ({(87, 16, 87, 38): 'self.exampleTooOldTime', (87, 40, 87, 59): 'self.exampleMachine', (87, 61, 87, 65): 'user'}, {}), '(self.exampleTooOldTime, self.\n exampleMachine, user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((102, 12, 103, 66), 'wasch.models.Appointment.manager.why_not_bookable', 'Appointment.manager.why_not_bookable', ({(103, 16, 103, 32): 'self.exampleTime', (103, 34, 103, 59): 'self.exampleBrokenMachine', (103, 61, 103, 65): 'user'}, {}), '(self.exampleTime, self.\n exampleBrokenMachine, user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((114, 12, 115, 64), 'wasch.models.Appointment.manager.why_not_bookable', 'Appointment.manager.why_not_bookable', ({(115, 16, 115, 32): 'self.exampleTime', (115, 34, 115, 53): 'self.exampleMachine', (115, 55, 115, 63): 'god.user'}, {}), '(self.exampleTime, self.exampleMachine,\n god.user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((119, 12, 120, 60), 'wasch.models.Appointment.manager.make_appointment', 'Appointment.manager.make_appointment', ({(120, 16, 120, 32): 'self.exampleTime', (120, 34, 120, 53): 'self.exampleMachine', (120, 55, 120, 59): 'user'}, {}), '(self.exampleTime, self.exampleMachine,\n user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((127, 24, 128, 56), 'wasch.models.Appointment.manager.bookable', 'Appointment.manager.bookable', ({(128, 12, 128, 28): 'self.exampleTime', (128, 30, 128, 49): 'self.exampleMachine', (128, 51, 128, 55): 'user'}, {}), '(self.exampleTime, self.exampleMachine, user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((130, 12, 131, 60), 'wasch.models.Appointment.manager.make_appointment', 'Appointment.manager.make_appointment', ({(131, 16, 131, 32): 'self.exampleTime', (131, 34, 131, 53): 'self.exampleMachine', (131, 55, 131, 59): 'user'}, {}), '(self.exampleTime, self.exampleMachine,\n user)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((92, 54, 94, 22), 'wasch.models.Appointment', 'Appointment', (), '', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n'), ((125, 12, 125, 63), 'wasch.models.Appointment.manager.filter_for_reference', 'Appointment.manager.filter_for_reference', ({(125, 53, 125, 62): 'reference'}, {}), '(reference)', False, 'from wasch.models import Appointment, WashUser, WashParameters, AppointmentError, StatusRights\n')] |
1050669722/LeetCode-Answers | Python/problem1150.py | c8f4d1ccaac09cda63b60d75144335347b06dc81 | from typing import List
from collections import Counter
# class Solution:
# def isMajorityElement(self, nums: List[int], target: int) -> bool:
# d = Counter(nums)
# return d[target] > len(nums)//2
# class Solution:
# def isMajorityElement(self, nums: List[int], target: int) -> bool:
# ans = 0
# for num in nums:
# if num == target:
# ans += 1
# return ans > len(target)//2
class Solution:
def isMajorityElement(self, nums: List[int], target: int) -> bool:
if not nums:
return False
if len(nums) == 1:
return nums[0] == target
p, q = 0, len(nums)-1
while p < q:
if nums[p] > target:
return False
elif nums[p] < target:
p += 1
if nums[q] < target:
return False
elif nums[q] > target:
q -= 1
if nums[p] == nums[q] == target:
return q - p + 1 > len(nums)//2
| [] |
arcadecoffee/advent-2021 | day17/module.py | 57d24cd6ba6e2b4d7e68ea492b955b73eaad7b6a | """
Advent of Code 2021 - Day 17
https://adventofcode.com/2021/day/17
"""
import re
from math import ceil, sqrt
from typing import List, Tuple
DAY = 17
FULL_INPUT_FILE = f'../inputs/day{DAY:02d}/input.full.txt'
TEST_INPUT_FILE = f'../inputs/day{DAY:02d}/input.test.txt'
def load_data(infile_path: str) -> Tuple[int, int, int, int]:
regex = r'target area: x=(-?\d*)\.\.(-?\d*), y=(-?\d*)\.\.(-?\d*)'
with open(infile_path, 'r', encoding='ascii') as infile:
x1, x2, y1, y2 = [int(i) for i in re.match(regex, infile.readline()).groups()]
return x1, x2, y1, y2
def maximum_altitude(y: int) -> int:
return int(y * -1 * (y * -1 - 1) / 2)
def shot_good(x_velocity: int, y_velocity: int, x1: int, x2: int, y1: int, y2: int) -> bool:
x_position = y_position = 0
while x_position <= x2 and y_position >= y1:
if x_position >= x1 and y_position <= y2:
return True
x_position += x_velocity
y_position += y_velocity
x_velocity -= 1 if x_velocity > 0 else -1 if x_velocity < 0 else 0
y_velocity -= 1
return False
def count_good_shots(x1: int, x2: int, y1: int, y2: int) -> int:
x_min = ceil(sqrt(x1 * 8 + 1) / 2 - 1 / 2)
x_max = round(x2 / 2) + 1
y_min = y1
y_max = y1 * -1
arcing_good_shots = []
for x in range(x_min, x_max):
for y in range(y_min, y_max):
if shot_good(x, y, x1, x2, y1, y2):
arcing_good_shots.append((x, y))
direct_shot_count = (x2 + 1 - x1) * (y2 + 1 - y1)
return len(arcing_good_shots) + direct_shot_count
def part_1(infile_path: str) -> int:
target_area = load_data(infile_path)
return maximum_altitude(target_area[2])
def part_2(infile_path: str) -> int:
target_area = load_data(infile_path)
return count_good_shots(*target_area)
if __name__ == '__main__':
part1_answer = part_1(FULL_INPUT_FILE)
print(f'Part 1: {part1_answer}')
part2_answer = part_2(FULL_INPUT_FILE)
print(f'Part 2: {part2_answer}')
| [((40, 17, 40, 33), 'math.sqrt', 'sqrt', ({(40, 22, 40, 32): 'x1 * 8 + 1'}, {}), '(x1 * 8 + 1)', False, 'from math import ceil, sqrt\n')] |
stefano-bragaglia/DePYsible | src/main/python/depysible/domain/rete.py | 6b53ede459a10f5e24da89d3ebaa05f08ec7af12 | from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
Payload = Tuple[List['Literal'], 'Substitutions']
class Root:
def __init__(self):
self.children = set()
def notify(self, ground: 'Literal'):
for child in self.children:
child.notify(ground, {}, self)
class Alfa:
def __init__(self, pattern: 'Literal', parent: Root):
self.parent = parent
self.pattern = pattern
self.name = repr(pattern)
self.memory = []
self.children = set()
parent.children.add(self)
def notify(self, ground: 'Literal', subs: 'Substitutions', parent: Root):
subs = self.pattern.unifies(ground)
if subs is not None:
payload = ([ground], subs)
if payload not in self.memory:
self.memory.append(payload)
for child in self.children:
child.notify([ground], subs, self)
class Beta:
def __init__(self, parent_1: Union[Alfa, 'Beta'], parent_2: Alfa):
self.parent_1 = parent_1
self.parent_2 = parent_2
self.name = '%s, %s' % (parent_1.name, parent_2.name)
self.memory = []
self.children = set()
parent_1.children.add(self)
parent_2.children.add(self)
def notify(self, ground: List['Literal'], subs: 'Substitutions', parent: Union[Alfa, 'Beta']):
if parent is self.parent_1:
for ground_2, subs_2 in self.parent_2.memory:
self._notify(ground, subs, ground_2, subs_2)
elif parent is self.parent_2:
for ground_1, subs_1 in self.parent_1.memory:
self._notify(ground_1, subs_1, ground, subs)
@staticmethod
def _unifies(subs_1: 'Substitutions', subs_2: 'Substitutions') -> Optional['Substitutions']:
for var in set(subs_1).intersection(subs_2):
if subs_1[var] != subs_2[var]:
return None
return {**subs_1, **subs_2}
def _notify(self, ground_1: List['Literal'], subs_1: 'Substitutions', ground_2: List['Literal'],
subs_2: 'Substitutions'):
subs = self._unifies(subs_1, subs_2)
if subs is not None:
ground = [*ground_1, *ground_2]
payload = (ground, subs)
if payload not in self.memory:
self.memory.append(payload)
for child in self.children:
child.notify(ground, subs, self)
class Leaf:
def __init__(self, rule: 'Rule', parent: Union[Alfa, Beta], root: Root, agenda: List):
self.parent = parent
self.rule = rule
self.name = repr(rule)
self.memory = []
self.root = root
self.agenda = agenda
parent.children.add(self)
def notify(self, ground: List['Literal'], subs: 'Substitutions', parent: Union[Alfa, 'Beta']):
from depysible.domain.definitions import Rule
payload = (ground, subs)
if payload not in self.memory:
self.memory.append(payload)
lit = self.rule.head.substitutes(subs)
# if self.rule.type is RuleType.STRICT:
# fact = Rule(lit, self.rule.type, [])
# if fact not in self.agenda:
# self.agenda.append(fact)
rule = Rule(lit, self.rule.type, ground)
if rule not in self.agenda:
self.agenda.append(rule)
self.root.notify(lit)
def fire_rules(program: 'Program') -> List['Rule']:
if program.is_ground():
return program
rules = []
table = {}
root = Root()
for rule in program.rules:
if rule.is_fact():
rules.append(rule)
else:
beta = None
for lit in rule.body:
name = repr(lit)
alfa = table.setdefault(name, Alfa(lit, root))
if beta is None:
beta = alfa
else:
name = '%s, %s' % (beta.name, alfa.name)
beta = table.setdefault(name, Beta(beta, alfa))
Leaf(rule, beta, root, rules)
for fact in program.get_facts():
root.notify(fact.head)
return rules
| [((99, 19, 99, 52), 'depysible.domain.definitions.Rule', 'Rule', ({(99, 24, 99, 27): 'lit', (99, 29, 99, 43): 'self.rule.type', (99, 45, 99, 51): 'ground'}, {}), '(lit, self.rule.type, ground)', False, 'from depysible.domain.definitions import Rule\n')] |
jeffreyzli/pokerbot-2017 | pythonbot_1.0/GameData.py | df2aa31d6aaf0e3162d24ae5f4c2a918ab19831f | import HandRankings as Hand
from deuces.deuces import Card, Evaluator
class GameData:
def __init__(self, name, opponent_name, stack_size, bb):
# match stats
self.name = name
self.opponent_name = opponent_name
self.starting_stack_size = int(stack_size)
self.num_hands = 0
self.num_wins = 0
self.num_flop = 0
self.big_blind = int(bb)
# self pre-flop stats
self.pfr = 0
self.vpip = 0
self.three_bet = 0
self.fold_big_bet = 0
# opponent pre-flop stats
self.opponent_pfr = 0
self.opponent_vpip = 0
self.opponent_three_bet = 0
self.opponent_fold_pfr = 0
self.opponent_fold_three_bet = 0
# self post-flop stats
self.aggression_factor = False
self.showdown = 0
self.c_bet = 0
self.showdown_win = 0
self.double_barrel = 0
self.discarded_card = None
# opponent post-flop stats
self.opponent_c_bet = 0
self.opponent_fold_c_bet = 0
self.opponent_double_barrel = 0
# current hand stats
self.button = True
self.current_pot_size = 0
self.current_hand = []
self.current_hand_strength = 0.0
self.hand_class = ''
self.hand_score = 0
self.current_game_state = ''
self.board_cards = []
self.last_actions = []
self.current_legal_actions = []
self.has_called = False
self.opponent_has_called = False
self.has_two_bet = False
self.opponent_has_two_bet = False
self.has_three_bet = False
self.opponent_has_three_bet = False
self.has_four_bet = False
self.opponent_has_four_bet = False
self.street_dict = {'0': 0, '3': 0, '4': 0, '5': 0}
self.discard = False
self.has_five_bet = False
self.has_bet_aggressively = False
self.time_bank = 0.0
self.opc = 0
def new_hand(self, data_list):
self.num_hands += 1
self.button = data_list[2]
if "true" in self.button:
self.button = True
else:
self.button = False
self.current_hand = [data_list[3], data_list[4]]
self.current_hand_strength = Hand.hand_win_odds(self.current_hand)
self.current_game_state = 'PREFLOP'
self.board_cards = []
self.last_actions = []
self.current_legal_actions = []
self.street_dict = {'0': 0, '3': 0, '4': 0, '5': 0}
self.has_two_bet = False
self.opponent_has_two_bet = False
self.has_three_bet = False
self.opponent_has_three_bet = False
self.has_four_bet = False
self.opponent_has_four_bet = False
self.has_bet_aggressively = False
self.aggression_factor = False
self.discarded_card = None
def get_action(self, data_list):
self.current_pot_size = int(data_list[1])
self.opc = self.starting_stack_size - self.current_pot_size
self.time_bank = float(data_list[-1])
num_board_cards = int(data_list[2])
self.street_dict[str(num_board_cards)] += 1
if self.current_game_state == 'PREFLOP':
if self.street_dict['3'] > 0 and self.street_dict['4'] == 0:
self.has_two_bet = False
self.opponent_has_two_bet = False
self.has_three_bet = False
self.opponent_has_three_bet = False
self.has_four_bet = False
self.opponent_has_four_bet = False
self.has_bet_aggressively = False
self.current_game_state = 'FLOPTURN'
self.num_flop += 1
elif self.current_game_state == 'FLOPTURN':
if self.street_dict['4'] > 0 and self.street_dict['5'] == 0:
self.has_two_bet = False
self.opponent_has_two_bet = False
self.has_three_bet = False
self.opponent_has_three_bet = False
self.has_four_bet = False
self.opponent_has_four_bet = False
self.has_bet_aggressively = False
self.current_game_state = 'TURNRIVER'
elif self.current_game_state == 'TURNRIVER':
if self.street_dict['5'] > 0:
self.has_two_bet = False
self.opponent_has_two_bet = False
self.has_three_bet = False
self.opponent_has_three_bet = False
self.has_four_bet = False
self.opponent_has_four_bet = False
self.has_bet_aggressively = False
self.current_game_state = 'POSTRIVER'
for i in range(num_board_cards):
board_card = data_list[3 + i]
if board_card not in self.board_cards:
self.board_cards.append(data_list[3 + i])
if num_board_cards > 0:
board_cards = []
for board_card in self.board_cards:
board_cards.append(Card.new(board_card))
hand = []
for card in self.current_hand:
hand.append(Card.new(card))
self.hand_score = Evaluator().evaluate(hand, board_cards)
self.hand_class = Evaluator().class_to_string(Evaluator().get_rank_class(self.hand_score))
index = 3 + num_board_cards
num_last_actions = int(data_list[index])
index += 1
current_last_actions = []
for i in range(num_last_actions):
current_last_actions.append(data_list[index + i])
self.last_actions.append(current_last_actions)
if self.discard:
for action in current_last_actions:
if 'DISCARD' in action and self.name in action:
old_card = action[8:10]
new_card = action[11:13]
self.current_hand[self.current_hand.index(old_card)] = new_card
self.current_hand_strength = Hand.hand_win_odds(self.current_hand)
self.discard = False
break
if self.current_game_state == 'PREFLOP':
if self.current_pot_size == 4:
if self.button:
self.vpip += 1
self.has_called = True
else:
self.opponent_vpip += 1
self.opponent_has_called = True
else:
for action in current_last_actions:
if 'RAISE' in action:
round_num = self.street_dict['0']
if round_num == 1:
self.opponent_pfr += 1
self.opponent_vpip += 1
self.opponent_has_two_bet = True
elif round_num == 2:
if self.button:
if self.name in action:
self.pfr += 1
self.vpip += 1
self.has_two_bet = True
else:
self.opponent_pfr += 1
self.opponent_vpip += 1
self.opponent_has_three_bet = True
else:
if self.name in action:
self.pfr += 1
self.vpip += 1
self.has_three_bet = True
else:
self.opponent_pfr += 1
self.opponent_vpip += 1
self.opponent_has_four_bet = True
elif round_num == 3:
if self.name in action:
self.pfr += 1
self.vpip += 1
elif 'CALL' in action:
if self.name in action:
self.vpip += 1
else:
self.opponent_vpip += 1
elif self.current_game_state == 'FLOPTURN':
round_num = self.street_dict['3']
if round_num == 1:
self.discard = True
elif round_num == 2:
for action in current_last_actions:
if 'BET' in action:
self.opponent_c_bet += 1
break
elif round_num == 3:
for action in current_last_actions:
if 'BET' in action:
if self.name in action:
self.c_bet += 1
else:
self.opponent_c_bet += 1
elif 'RAISE' in action:
if self.name in action:
self.has_two_bet = True
else:
if self.button:
self.opponent_has_three_bet = True
else:
self.opponent_has_two_bet = True
elif round_num == 4:
for action in current_last_actions:
if 'RAISE' in action:
if self.name in action:
if self.button:
self.has_four_bet = True
else:
self.has_three_bet = True
break
elif self.current_game_state == 'TURNRIVER':
round_num = self.street_dict['4']
if round_num == 1:
self.discard = True
for action in current_last_actions:
if 'BET' in action:
if self.name in action:
self.c_bet += 1
else:
self.opponent_c_bet += 1
break
elif round_num == 2:
for action in current_last_actions:
if 'BET' in action:
self.opponent_c_bet += 1
break
elif round_num == 3:
for action in current_last_actions:
if 'BET' in action:
if self.name in action:
self.c_bet += 1
else:
self.opponent_c_bet += 1
elif 'RAISE' in action:
if self.name in action:
self.has_two_bet = True
else:
if self.button:
self.opponent_has_three_bet = True
else:
self.opponent_has_two_bet = True
elif round_num == 4:
for action in current_last_actions:
if 'RAISE' in action:
if self.name in action:
if self.button:
self.has_four_bet = True
else:
self.has_three_bet = True
break
elif self.current_game_state == 'POSTRIVER':
round_num = self.street_dict['5']
if round_num == 1:
for action in current_last_actions:
if 'BET' in action:
if self.name in action:
self.double_barrel += 1
else:
self.opponent_double_barrel += 1
break
index += num_last_actions
num_legal_actions = int(data_list[index])
index += 1
self.current_legal_actions = []
for i in range(num_legal_actions):
self.current_legal_actions.append(data_list[index + i])
def legal_action(self, action):
for legal_action in self.current_legal_actions:
if action in legal_action:
if action == 'BET' or action == 'RAISE':
index = legal_action.index(':') + 1
sub = legal_action[index:]
index = sub.index(':')
return [int(sub[:index]), int(sub[index+1:])]
if action == 'CALL':
for last_action in self.last_actions[-1]:
if 'RAISE' in last_action and self.opponent_name in last_action:
sub = last_action[last_action.index(':')+1:]
return int(sub[:sub.index(':')])
return True
return None
def hand_over(self, data_list):
num_board_cards = data_list[3]
index = 4+num_board_cards
num_last_actions = data_list[index]
current_last_actions = []
for i in range(num_last_actions):
current_last_actions.append(data_list[index+i])
if self.current_game_state == 'PREFLOP':
for action in current_last_actions:
if 'FOLD' in action and self.opponent_name in action:
if self.button:
for last_action in self.last_actions[-1]:
if 'RAISE' in last_action and self.name in last_action:
self.opponent_fold_pfr += 1
if self.has_three_bet and not self.has_four_bet:
self.opponent_fold_three_bet += 1
self.num_wins += 1
else:
for last_action in current_last_actions:
if 'RAISE' in last_action and self.name in last_action:
self.opponent_fold_pfr += 1
if self.has_three_bet and not self.has_four_bet:
self.opponent_fold_three_bet += 1
self.num_wins += 1
elif self.current_game_state == 'FLOPTURN':
for action in current_last_actions:
if self.button:
if 'FOLD' in action and self.opponent_name in action:
for last_action in self.last_actions[-1]:
if 'BET' in last_action and self.name in last_action:
self.opponent_fold_c_bet += 1
self.num_wins += 1
else:
if 'FOLD' in action and self.opponent_name in action:
for last_action in current_last_actions:
if 'BET' in last_action and self.name in last_action:
self.opponent_fold_c_bet += 1
self.num_wins += 1
elif self.current_game_state == 'POSTRIVER':
for action in current_last_actions:
if 'WIN' in action:
if self.name in action:
self.num_wins += 1
for last_action in current_last_actions:
if 'SHOW' in last_action:
self.showdown += 1
self.showdown_win += 1
break
break
| [((71, 37, 71, 74), 'HandRankings.hand_win_odds', 'Hand.hand_win_odds', ({(71, 56, 71, 73): 'self.current_hand'}, {}), '(self.current_hand)', True, 'import HandRankings as Hand\n'), ((132, 35, 132, 55), 'deuces.deuces.Card.new', 'Card.new', ({(132, 44, 132, 54): 'board_card'}, {}), '(board_card)', False, 'from deuces.deuces import Card, Evaluator\n'), ((135, 28, 135, 42), 'deuces.deuces.Card.new', 'Card.new', ({(135, 37, 135, 41): 'card'}, {}), '(card)', False, 'from deuces.deuces import Card, Evaluator\n'), ((136, 30, 136, 41), 'deuces.deuces.Evaluator', 'Evaluator', ({}, {}), '()', False, 'from deuces.deuces import Card, Evaluator\n'), ((137, 30, 137, 41), 'deuces.deuces.Evaluator', 'Evaluator', ({}, {}), '()', False, 'from deuces.deuces import Card, Evaluator\n'), ((153, 49, 153, 86), 'HandRankings.hand_win_odds', 'Hand.hand_win_odds', ({(153, 68, 153, 85): 'self.current_hand'}, {}), '(self.current_hand)', True, 'import HandRankings as Hand\n'), ((137, 58, 137, 69), 'deuces.deuces.Evaluator', 'Evaluator', ({}, {}), '()', False, 'from deuces.deuces import Card, Evaluator\n')] |
zyayoung/share-todo | todo/models.py | 84813545f9aa3e89441c560e64e85bc799835d30 | from django.db import models
from django.contrib.auth.models import User
from django.utils import timezone
class Todo(models.Model):
time_add = models.DateTimeField(auto_now_add=True)
title = models.CharField(max_length=64)
detail = models.TextField(blank=True)
deadline = models.DateTimeField(blank=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
done = models.BooleanField(default=False)
def __str__(self):
return self.title
def seconds_left(self):
return (self.deadline - timezone.now()).total_seconds()
def state(self):
if self.done:
return 'Done'
elif self.seconds_left() > 0:
return 'Todo'
else:
return 'Exceeded'
class Meta:
ordering = ['deadline']
| [((7, 15, 7, 54), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((8, 12, 8, 43), 'django.db.models.CharField', 'models.CharField', (), '', False, 'from django.db import models\n'), ((9, 13, 9, 41), 'django.db.models.TextField', 'models.TextField', (), '', False, 'from django.db import models\n'), ((10, 15, 10, 47), 'django.db.models.DateTimeField', 'models.DateTimeField', (), '', False, 'from django.db import models\n'), ((11, 11, 11, 60), 'django.db.models.ForeignKey', 'models.ForeignKey', (), '', False, 'from django.db import models\n'), ((12, 11, 12, 45), 'django.db.models.BooleanField', 'models.BooleanField', (), '', False, 'from django.db import models\n'), ((18, 32, 18, 46), 'django.utils.timezone.now', 'timezone.now', ({}, {}), '()', False, 'from django.utils import timezone\n')] |
DrewLazzeriKitware/trame | examples/Tutorial/Example/app.py | fdc73f07f17d2601e1b1d3934d2d6326a3c0281e | import os
from trame import change, update_state
from trame.layouts import SinglePageWithDrawer
from trame.html import vtk, vuetify, widgets
from vtkmodules.vtkCommonDataModel import vtkDataObject
from vtkmodules.vtkFiltersCore import vtkContourFilter
from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader
from vtkmodules.vtkRenderingAnnotation import vtkCubeAxesActor
from vtkmodules.vtkRenderingCore import (
vtkActor,
vtkDataSetMapper,
vtkRenderer,
vtkRenderWindow,
vtkRenderWindowInteractor,
)
# Required for interacter factory initialization
from vtkmodules.vtkInteractionStyle import vtkInteractorStyleSwitch # noqa
# Required for remote rendering factory initialization, not necessary for
# local rendering, but doesn't hurt to include it
import vtkmodules.vtkRenderingOpenGL2 # noqa
CURRENT_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
# -----------------------------------------------------------------------------
# Constants
# -----------------------------------------------------------------------------
class Representation:
Points = 0
Wireframe = 1
Surface = 2
SurfaceWithEdges = 3
class LookupTable:
Rainbow = 0
Inverted_Rainbow = 1
Greyscale = 2
Inverted_Greyscale = 3
# -----------------------------------------------------------------------------
# VTK pipeline
# -----------------------------------------------------------------------------
renderer = vtkRenderer()
renderWindow = vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderWindowInteractor.GetInteractorStyle().SetCurrentStyleToTrackballCamera()
# Read Data
reader = vtkXMLUnstructuredGridReader()
reader.SetFileName(os.path.join(CURRENT_DIRECTORY, "../data/disk_out_ref.vtu"))
reader.Update()
# Extract Array/Field information
dataset_arrays = []
fields = [
(reader.GetOutput().GetPointData(), vtkDataObject.FIELD_ASSOCIATION_POINTS),
(reader.GetOutput().GetCellData(), vtkDataObject.FIELD_ASSOCIATION_CELLS),
]
for field in fields:
field_arrays, association = field
for i in range(field_arrays.GetNumberOfArrays()):
array = field_arrays.GetArray(i)
array_range = array.GetRange()
dataset_arrays.append(
{
"text": array.GetName(),
"value": i,
"range": list(array_range),
"type": association,
}
)
default_array = dataset_arrays[0]
default_min, default_max = default_array.get("range")
# Mesh
mesh_mapper = vtkDataSetMapper()
mesh_mapper.SetInputConnection(reader.GetOutputPort())
mesh_actor = vtkActor()
mesh_actor.SetMapper(mesh_mapper)
renderer.AddActor(mesh_actor)
# Mesh: Setup default representation to surface
mesh_actor.GetProperty().SetRepresentationToSurface()
mesh_actor.GetProperty().SetPointSize(1)
mesh_actor.GetProperty().EdgeVisibilityOff()
# Mesh: Apply rainbow color map
mesh_lut = mesh_mapper.GetLookupTable()
mesh_lut.SetHueRange(0.666, 0.0)
mesh_lut.SetSaturationRange(1.0, 1.0)
mesh_lut.SetValueRange(1.0, 1.0)
mesh_lut.Build()
# Mesh: Color by default array
mesh_mapper.SelectColorArray(default_array.get("text"))
mesh_mapper.GetLookupTable().SetRange(default_min, default_max)
if default_array.get("type") == vtkDataObject.FIELD_ASSOCIATION_POINTS:
mesh_mapper.SetScalarModeToUsePointFieldData()
else:
mesh_mapper.SetScalarModeToUseCellFieldData()
mesh_mapper.SetScalarVisibility(True)
mesh_mapper.SetUseLookupTableScalarRange(True)
# Contour
contour = vtkContourFilter()
contour.SetInputConnection(reader.GetOutputPort())
contour_mapper = vtkDataSetMapper()
contour_mapper.SetInputConnection(contour.GetOutputPort())
contour_actor = vtkActor()
contour_actor.SetMapper(contour_mapper)
renderer.AddActor(contour_actor)
# Contour: ContourBy default array
contour_value = 0.5 * (default_max + default_min)
contour.SetInputArrayToProcess(
0, 0, 0, default_array.get("type"), default_array.get("text")
)
contour.SetValue(0, contour_value)
# Contour: Setup default representation to surface
contour_actor.GetProperty().SetRepresentationToSurface()
contour_actor.GetProperty().SetPointSize(1)
contour_actor.GetProperty().EdgeVisibilityOff()
# Contour: Apply rainbow color map
contour_lut = contour_mapper.GetLookupTable()
contour_lut.SetHueRange(0.666, 0.0)
contour_lut.SetSaturationRange(1.0, 1.0)
contour_lut.SetValueRange(1.0, 1.0)
contour_lut.Build()
# Contour: Color by default array
contour_mapper.GetLookupTable().SetRange(default_min, default_max)
contour_mapper.SelectColorArray(default_array.get("text"))
if default_array.get("type") == vtkDataObject.FIELD_ASSOCIATION_POINTS:
contour_mapper.SetScalarModeToUsePointFieldData()
else:
contour_mapper.SetScalarModeToUseCellFieldData()
contour_mapper.SetScalarVisibility(True)
contour_mapper.SetUseLookupTableScalarRange(True)
# Cube Axes
cube_axes = vtkCubeAxesActor()
renderer.AddActor(cube_axes)
# Cube Axes: Boundaries, camera, and styling
cube_axes.SetBounds(mesh_actor.GetBounds())
cube_axes.SetCamera(renderer.GetActiveCamera())
cube_axes.SetXLabelFormat("%6.1f")
cube_axes.SetYLabelFormat("%6.1f")
cube_axes.SetZLabelFormat("%6.1f")
cube_axes.SetFlyModeToOuterEdges()
renderer.ResetCamera()
# -----------------------------------------------------------------------------
# trame Views
# -----------------------------------------------------------------------------
local_view = vtk.VtkLocalView(renderWindow)
remote_view = vtk.VtkRemoteView(renderWindow, interactive_ratio=(1,))
html_view = local_view
# -----------------------------------------------------------------------------
# Callbacks
# -----------------------------------------------------------------------------
def update_view(**kwargs):
html_view.update()
# -----------------------------------------------------------------------------
# Toolbar Callbacks
# -----------------------------------------------------------------------------
@change("cube_axes_visibility")
def update_cube_axes_visibility(cube_axes_visibility, **kwargs):
cube_axes.SetVisibility(cube_axes_visibility)
update_view()
@change("local_vs_remote")
def update_local_vs_remote(local_vs_remote, **kwargs):
# Switch html_view
global html_view
if local_vs_remote:
html_view = local_view
else:
html_view = remote_view
# Update layout
layout.content.children[0].children[0] = html_view
layout.flush_content()
# Update View
update_view()
# -----------------------------------------------------------------------------
# Representation Callbacks
# -----------------------------------------------------------------------------
def update_representation(actor, mode):
property = actor.GetProperty()
if mode == Representation.Points:
property.SetRepresentationToPoints()
property.SetPointSize(5)
property.EdgeVisibilityOff()
elif mode == Representation.Wireframe:
property.SetRepresentationToWireframe()
property.SetPointSize(1)
property.EdgeVisibilityOff()
elif mode == Representation.Surface:
property.SetRepresentationToSurface()
property.SetPointSize(1)
property.EdgeVisibilityOff()
elif mode == Representation.SurfaceWithEdges:
property.SetRepresentationToSurface()
property.SetPointSize(1)
property.EdgeVisibilityOn()
@change("mesh_representation")
def update_mesh_representation(mesh_representation, **kwargs):
update_representation(mesh_actor, mesh_representation)
update_view()
@change("contour_representation")
def update_contour_representation(contour_representation, **kwargs):
update_representation(contour_actor, contour_representation)
update_view()
# -----------------------------------------------------------------------------
# ColorBy Callbacks
# -----------------------------------------------------------------------------
def color_by_array(actor, array):
_min, _max = array.get("range")
mapper = actor.GetMapper()
mapper.SelectColorArray(array.get("text"))
mapper.GetLookupTable().SetRange(_min, _max)
if array.get("type") == vtkDataObject.FIELD_ASSOCIATION_POINTS:
mesh_mapper.SetScalarModeToUsePointFieldData()
else:
mesh_mapper.SetScalarModeToUseCellFieldData()
mapper.SetScalarModeToUsePointFieldData()
mapper.SetScalarVisibility(True)
mapper.SetUseLookupTableScalarRange(True)
@change("mesh_color_array_idx")
def update_mesh_color_by_name(mesh_color_array_idx, **kwargs):
array = dataset_arrays[mesh_color_array_idx]
color_by_array(mesh_actor, array)
update_view()
@change("contour_color_array_idx")
def update_contour_color_by_name(contour_color_array_idx, **kwargs):
array = dataset_arrays[contour_color_array_idx]
color_by_array(contour_actor, array)
update_view()
# -----------------------------------------------------------------------------
# ColorMap Callbacks
# -----------------------------------------------------------------------------
def use_preset(actor, preset):
lut = actor.GetMapper().GetLookupTable()
if preset == LookupTable.Rainbow:
lut.SetHueRange(0.666, 0.0)
lut.SetSaturationRange(1.0, 1.0)
lut.SetValueRange(1.0, 1.0)
elif preset == LookupTable.Inverted_Rainbow:
lut.SetHueRange(0.0, 0.666)
lut.SetSaturationRange(1.0, 1.0)
lut.SetValueRange(1.0, 1.0)
elif preset == LookupTable.Greyscale:
lut.SetHueRange(0.0, 0.0)
lut.SetSaturationRange(0.0, 0.0)
lut.SetValueRange(0.0, 1.0)
elif preset == LookupTable.Inverted_Greyscale:
lut.SetHueRange(0.0, 0.666)
lut.SetSaturationRange(0.0, 0.0)
lut.SetValueRange(1.0, 0.0)
lut.Build()
@change("mesh_color_preset")
def update_mesh_color_preset(mesh_color_preset, **kwargs):
use_preset(mesh_actor, mesh_color_preset)
update_view()
@change("contour_color_preset")
def update_contour_color_preset(contour_color_preset, **kwargs):
use_preset(contour_actor, contour_color_preset)
update_view()
# -----------------------------------------------------------------------------
# Opacity Callbacks
# -----------------------------------------------------------------------------
@change("mesh_opacity")
def update_mesh_opacity(mesh_opacity, **kwargs):
mesh_actor.GetProperty().SetOpacity(mesh_opacity)
update_view()
@change("contour_opacity")
def update_contour_opacity(contour_opacity, **kwargs):
contour_actor.GetProperty().SetOpacity(contour_opacity)
update_view()
# -----------------------------------------------------------------------------
# Contour Callbacks
# -----------------------------------------------------------------------------
@change("contour_by_array_idx")
def update_contour_by(contour_by_array_idx, **kwargs):
array = dataset_arrays[contour_by_array_idx]
contour_min, contour_max = array.get("range")
contour_step = 0.01 * (contour_max - contour_min)
contour_value = 0.5 * (contour_max + contour_min)
contour.SetInputArrayToProcess(0, 0, 0, array.get("type"), array.get("text"))
contour.SetValue(0, contour_value)
# Update UI
update_state("contour_min", contour_min)
update_state("contour_max", contour_max)
update_state("contour_value", contour_value)
update_state("contour_step", contour_step)
# Update View
update_view()
@change("contour_value")
def update_contour_value(contour_value, **kwargs):
contour.SetValue(0, float(contour_value))
update_view()
# -----------------------------------------------------------------------------
# Pipeline Widget Callbacks
# -----------------------------------------------------------------------------
# Selection Change
def actives_change(ids):
_id = ids[0]
if _id == "1": # Mesh
update_state("active_ui", "mesh")
elif _id == "2": # Contour
update_state("active_ui", "contour")
else:
update_state("active_ui", "nothing")
# Visibility Change
def visibility_change(event):
_id = event["id"]
_visibility = event["visible"]
if _id == "1": # Mesh
mesh_actor.SetVisibility(_visibility)
elif _id == "2": # Contour
contour_actor.SetVisibility(_visibility)
update_view()
# -----------------------------------------------------------------------------
# GUI Toolbar Buttons
# -----------------------------------------------------------------------------
def standard_buttons():
vuetify.VCheckbox(
v_model=("cube_axes_visibility", True),
on_icon="mdi-cube-outline",
off_icon="mdi-cube-off-outline",
classes="mx-1",
hide_details=True,
dense=True,
)
vuetify.VCheckbox(
v_model="$vuetify.theme.dark",
on_icon="mdi-lightbulb-off-outline",
off_icon="mdi-lightbulb-outline",
classes="mx-1",
hide_details=True,
dense=True,
)
vuetify.VCheckbox(
v_model=("local_vs_remote", True),
on_icon="mdi-lan-disconnect",
off_icon="mdi-lan-connect",
classes="mx-1",
hide_details=True,
dense=True,
)
with vuetify.VBtn(icon=True, click="$refs.view.resetCamera()"):
vuetify.VIcon("mdi-crop-free")
# -----------------------------------------------------------------------------
# GUI Pipelines Widget
# -----------------------------------------------------------------------------
def pipeline_widget():
widgets.GitTree(
sources=(
"pipeline",
[
{"id": "1", "parent": "0", "visible": 1, "name": "Mesh"},
{"id": "2", "parent": "1", "visible": 1, "name": "Contour"},
],
),
actives_change=(actives_change, "[$event]"),
visibility_change=(visibility_change, "[$event]"),
)
# -----------------------------------------------------------------------------
# GUI Cards
# -----------------------------------------------------------------------------
def ui_card(title, ui_name):
with vuetify.VCard(v_show=f"active_ui == '{ui_name}'"):
vuetify.VCardTitle(
title,
classes="grey lighten-1 py-1 grey--text text--darken-3",
style="user-select: none; cursor: pointer",
hide_details=True,
dense=True,
)
content = vuetify.VCardText(classes="py-2")
return content
def mesh_card():
with ui_card(title="Mesh", ui_name="mesh"):
vuetify.VSelect(
v_model=("mesh_representation", Representation.Surface),
items=(
"representations",
[
{"text": "Points", "value": 0},
{"text": "Wireframe", "value": 1},
{"text": "Surface", "value": 2},
{"text": "SurfaceWithEdges", "value": 3},
],
),
label="Representation",
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
with vuetify.VRow(classes="pt-2", dense=True):
with vuetify.VCol(cols="6"):
vuetify.VSelect(
label="Color by",
v_model=("mesh_color_array_idx", 0),
items=("array_list", dataset_arrays),
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
with vuetify.VCol(cols="6"):
vuetify.VSelect(
label="Colormap",
v_model=("mesh_color_preset", LookupTable.Rainbow),
items=(
"colormaps",
[
{"text": "Rainbow", "value": 0},
{"text": "Inv Rainbow", "value": 1},
{"text": "Greyscale", "value": 2},
{"text": "Inv Greyscale", "value": 3},
],
),
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
vuetify.VSlider(
v_model=("mesh_opacity", 1.0),
min=0,
max=1,
step=0.1,
label="Opacity",
classes="mt-1",
hide_details=True,
dense=True,
)
def contour_card():
with ui_card(title="Contour", ui_name="contour"):
vuetify.VSelect(
label="Contour by",
v_model=("contour_by_array_idx", 0),
items=("array_list", dataset_arrays),
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
vuetify.VSlider(
v_model=("contour_value", contour_value),
min=("contour_min", default_min),
max=("contour_max", default_max),
step=("contour_step", 0.01 * (default_max - default_min)),
label="Value",
classes="my-1",
hide_details=True,
dense=True,
)
vuetify.VSelect(
v_model=("contour_representation", Representation.Surface),
items=(
"representations",
[
{"text": "Points", "value": 0},
{"text": "Wireframe", "value": 1},
{"text": "Surface", "value": 2},
{"text": "SurfaceWithEdges", "value": 3},
],
),
label="Representation",
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
with vuetify.VRow(classes="pt-2", dense=True):
with vuetify.VCol(cols="6"):
vuetify.VSelect(
label="Color by",
v_model=("contour_color_array_idx", 0),
items=("array_list", dataset_arrays),
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
with vuetify.VCol(cols="6"):
vuetify.VSelect(
label="Colormap",
v_model=("contour_color_preset", LookupTable.Rainbow),
items=(
"colormaps",
[
{"text": "Rainbow", "value": 0},
{"text": "Inv Rainbow", "value": 1},
{"text": "Greyscale", "value": 2},
{"text": "Inv Greyscale", "value": 3},
],
),
hide_details=True,
dense=True,
outlined=True,
classes="pt-1",
)
vuetify.VSlider(
v_model=("contour_opacity", 1.0),
min=0,
max=1,
step=0.1,
label="Opacity",
classes="mt-1",
hide_details=True,
dense=True,
)
# -----------------------------------------------------------------------------
# GUI
# -----------------------------------------------------------------------------
layout = SinglePageWithDrawer("Viewer", on_ready=update_view)
layout.title.set_text("Viewer")
with layout.toolbar:
# toolbar components
vuetify.VSpacer()
vuetify.VDivider(vertical=True, classes="mx-2")
standard_buttons()
with layout.drawer as drawer:
# drawer components
drawer.width = 325
pipeline_widget()
vuetify.VDivider(classes="mb-2")
mesh_card()
contour_card()
with layout.content:
# content components
vuetify.VContainer(
fluid=True,
classes="pa-0 fill-height",
children=[html_view],
)
# State use to track active ui card
layout.state = {
"active_ui": None,
}
# -----------------------------------------------------------------------------
# Main
# -----------------------------------------------------------------------------
if __name__ == "__main__":
layout.start()
| [((49, 11, 49, 24), 'vtkmodules.vtkRenderingCore.vtkRenderer', 'vtkRenderer', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((50, 15, 50, 32), 'vtkmodules.vtkRenderingCore.vtkRenderWindow', 'vtkRenderWindow', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((53, 25, 53, 52), 'vtkmodules.vtkRenderingCore.vtkRenderWindowInteractor', 'vtkRenderWindowInteractor', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((58, 9, 58, 39), 'vtkmodules.vtkIOXML.vtkXMLUnstructuredGridReader', 'vtkXMLUnstructuredGridReader', ({}, {}), '()', False, 'from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader\n'), ((85, 14, 85, 32), 'vtkmodules.vtkRenderingCore.vtkDataSetMapper', 'vtkDataSetMapper', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((87, 13, 87, 23), 'vtkmodules.vtkRenderingCore.vtkActor', 'vtkActor', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((114, 10, 114, 28), 'vtkmodules.vtkFiltersCore.vtkContourFilter', 'vtkContourFilter', ({}, {}), '()', False, 'from vtkmodules.vtkFiltersCore import vtkContourFilter\n'), ((116, 17, 116, 35), 'vtkmodules.vtkRenderingCore.vtkDataSetMapper', 'vtkDataSetMapper', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((118, 16, 118, 26), 'vtkmodules.vtkRenderingCore.vtkActor', 'vtkActor', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingCore import vtkActor, vtkDataSetMapper, vtkRenderer, vtkRenderWindow, vtkRenderWindowInteractor\n'), ((152, 12, 152, 30), 'vtkmodules.vtkRenderingAnnotation.vtkCubeAxesActor', 'vtkCubeAxesActor', ({}, {}), '()', False, 'from vtkmodules.vtkRenderingAnnotation import vtkCubeAxesActor\n'), ((169, 13, 169, 43), 'trame.html.vtk.VtkLocalView', 'vtk.VtkLocalView', ({(169, 30, 169, 42): 'renderWindow'}, {}), '(renderWindow)', False, 'from trame.html import vtk, vuetify, widgets\n'), ((170, 14, 170, 69), 'trame.html.vtk.VtkRemoteView', 'vtk.VtkRemoteView', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((187, 1, 187, 31), 'trame.change', 'change', ({(187, 8, 187, 30): '"""cube_axes_visibility"""'}, {}), "('cube_axes_visibility')", False, 'from trame import change, update_state\n'), ((193, 1, 193, 26), 'trame.change', 'change', ({(193, 8, 193, 25): '"""local_vs_remote"""'}, {}), "('local_vs_remote')", False, 'from trame import change, update_state\n'), ((235, 1, 235, 30), 'trame.change', 'change', ({(235, 8, 235, 29): '"""mesh_representation"""'}, {}), "('mesh_representation')", False, 'from trame import change, update_state\n'), ((241, 1, 241, 33), 'trame.change', 'change', ({(241, 8, 241, 32): '"""contour_representation"""'}, {}), "('contour_representation')", False, 'from trame import change, update_state\n'), ((266, 1, 266, 31), 'trame.change', 'change', ({(266, 8, 266, 30): '"""mesh_color_array_idx"""'}, {}), "('mesh_color_array_idx')", False, 'from trame import change, update_state\n'), ((273, 1, 273, 34), 'trame.change', 'change', ({(273, 8, 273, 33): '"""contour_color_array_idx"""'}, {}), "('contour_color_array_idx')", False, 'from trame import change, update_state\n'), ((306, 1, 306, 28), 'trame.change', 'change', ({(306, 8, 306, 27): '"""mesh_color_preset"""'}, {}), "('mesh_color_preset')", False, 'from trame import change, update_state\n'), ((312, 1, 312, 31), 'trame.change', 'change', ({(312, 8, 312, 30): '"""contour_color_preset"""'}, {}), "('contour_color_preset')", False, 'from trame import change, update_state\n'), ((323, 1, 323, 23), 'trame.change', 'change', ({(323, 8, 323, 22): '"""mesh_opacity"""'}, {}), "('mesh_opacity')", False, 'from trame import change, update_state\n'), ((329, 1, 329, 26), 'trame.change', 'change', ({(329, 8, 329, 25): '"""contour_opacity"""'}, {}), "('contour_opacity')", False, 'from trame import change, update_state\n'), ((340, 1, 340, 31), 'trame.change', 'change', ({(340, 8, 340, 30): '"""contour_by_array_idx"""'}, {}), "('contour_by_array_idx')", False, 'from trame import change, update_state\n'), ((359, 1, 359, 24), 'trame.change', 'change', ({(359, 8, 359, 23): '"""contour_value"""'}, {}), "('contour_value')", False, 'from trame import change, update_state\n'), ((606, 9, 606, 61), 'trame.layouts.SinglePageWithDrawer', 'SinglePageWithDrawer', (), '', False, 'from trame.layouts import SinglePageWithDrawer\n'), ((26, 36, 26, 61), 'os.path.dirname', 'os.path.dirname', ({(26, 52, 26, 60): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((59, 19, 59, 78), 'os.path.join', 'os.path.join', ({(59, 32, 59, 49): 'CURRENT_DIRECTORY', (59, 51, 59, 77): '"""../data/disk_out_ref.vtu"""'}, {}), "(CURRENT_DIRECTORY, '../data/disk_out_ref.vtu')", False, 'import os\n'), ((350, 4, 350, 44), 'trame.update_state', 'update_state', ({(350, 17, 350, 30): '"""contour_min"""', (350, 32, 350, 43): 'contour_min'}, {}), "('contour_min', contour_min)", False, 'from trame import change, update_state\n'), ((351, 4, 351, 44), 'trame.update_state', 'update_state', ({(351, 17, 351, 30): '"""contour_max"""', (351, 32, 351, 43): 'contour_max'}, {}), "('contour_max', contour_max)", False, 'from trame import change, update_state\n'), ((352, 4, 352, 48), 'trame.update_state', 'update_state', ({(352, 17, 352, 32): '"""contour_value"""', (352, 34, 352, 47): 'contour_value'}, {}), "('contour_value', contour_value)", False, 'from trame import change, update_state\n'), ((353, 4, 353, 46), 'trame.update_state', 'update_state', ({(353, 17, 353, 31): '"""contour_step"""', (353, 33, 353, 45): 'contour_step'}, {}), "('contour_step', contour_step)", False, 'from trame import change, update_state\n'), ((398, 4, 405, 5), 'trame.html.vuetify.VCheckbox', 'vuetify.VCheckbox', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((406, 4, 413, 5), 'trame.html.vuetify.VCheckbox', 'vuetify.VCheckbox', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((414, 4, 421, 5), 'trame.html.vuetify.VCheckbox', 'vuetify.VCheckbox', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((432, 4, 442, 5), 'trame.html.widgets.GitTree', 'widgets.GitTree', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((611, 4, 611, 21), 'trame.html.vuetify.VSpacer', 'vuetify.VSpacer', ({}, {}), '()', False, 'from trame.html import vtk, vuetify, widgets\n'), ((612, 4, 612, 51), 'trame.html.vuetify.VDivider', 'vuetify.VDivider', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((619, 4, 619, 36), 'trame.html.vuetify.VDivider', 'vuetify.VDivider', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((625, 4, 629, 5), 'trame.html.vuetify.VContainer', 'vuetify.VContainer', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((373, 8, 373, 41), 'trame.update_state', 'update_state', ({(373, 21, 373, 32): '"""active_ui"""', (373, 34, 373, 40): '"""mesh"""'}, {}), "('active_ui', 'mesh')", False, 'from trame import change, update_state\n'), ((422, 9, 422, 66), 'trame.html.vuetify.VBtn', 'vuetify.VBtn', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((423, 8, 423, 38), 'trame.html.vuetify.VIcon', 'vuetify.VIcon', ({(423, 22, 423, 37): '"""mdi-crop-free"""'}, {}), "('mdi-crop-free')", False, 'from trame.html import vtk, vuetify, widgets\n'), ((451, 9, 451, 58), 'trame.html.vuetify.VCard', 'vuetify.VCard', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((452, 8, 458, 9), 'trame.html.vuetify.VCardTitle', 'vuetify.VCardTitle', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((459, 18, 459, 51), 'trame.html.vuetify.VCardText', 'vuetify.VCardText', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((465, 8, 481, 9), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((511, 8, 520, 9), 'trame.html.vuetify.VSlider', 'vuetify.VSlider', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((525, 8, 533, 9), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((534, 8, 543, 9), 'trame.html.vuetify.VSlider', 'vuetify.VSlider', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((544, 8, 560, 9), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((590, 8, 599, 9), 'trame.html.vuetify.VSlider', 'vuetify.VSlider', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((375, 8, 375, 44), 'trame.update_state', 'update_state', ({(375, 21, 375, 32): '"""active_ui"""', (375, 34, 375, 43): '"""contour"""'}, {}), "('active_ui', 'contour')", False, 'from trame import change, update_state\n'), ((377, 8, 377, 44), 'trame.update_state', 'update_state', ({(377, 21, 377, 32): '"""active_ui"""', (377, 34, 377, 43): '"""nothing"""'}, {}), "('active_ui', 'nothing')", False, 'from trame import change, update_state\n'), ((482, 13, 482, 53), 'trame.html.vuetify.VRow', 'vuetify.VRow', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((561, 13, 561, 53), 'trame.html.vuetify.VRow', 'vuetify.VRow', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((483, 17, 483, 39), 'trame.html.vuetify.VCol', 'vuetify.VCol', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((484, 16, 492, 17), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((493, 17, 493, 39), 'trame.html.vuetify.VCol', 'vuetify.VCol', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((494, 16, 510, 17), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((562, 17, 562, 39), 'trame.html.vuetify.VCol', 'vuetify.VCol', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((563, 16, 571, 17), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((572, 17, 572, 39), 'trame.html.vuetify.VCol', 'vuetify.VCol', (), '', False, 'from trame.html import vtk, vuetify, widgets\n'), ((573, 16, 589, 17), 'trame.html.vuetify.VSelect', 'vuetify.VSelect', (), '', False, 'from trame.html import vtk, vuetify, widgets\n')] |
AleBurzio11/webots_ros2 | webots_ros2_tutorials/webots_ros2_tutorials/master.py | 99fa4a1a9d467e4ba71eff17ddf4e82444c78938 | # Copyright 1996-2021 Soft_illusion.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import rclpy
from rclpy.node import Node
from std_msgs.msg import Float64
from geometry_msgs.msg import Twist
class LineFollower(Node):
def __init__(self):
super().__init__('linefollower_cmdvel')
# Subscribe Infra Red sensors
self.subs_right_ir = self.create_subscription(
Float64, 'right_IR', self.right_infrared_callback, 1)
self.subs_left_ir = self.create_subscription(
Float64, 'left_IR', self.left_infrared_callback, 1)
self.subs_mid_ir = self.create_subscription(
Float64, 'mid_IR', self.mid_infrared_callback, 1)
# Publish cmd vel
self.pubs_cmdvel = self.create_publisher(Twist, 'cmd_vel', 1)
# vehicle parameters
self.speed = 0.2
self.angle_correction = 0.01
# Initialize parameters
self.ground_right, self.ground_mid, self.ground_left = 0, 0, 0
self.delta = 0
self.cmd = Twist()
self.stop = False
self.count = 0
self.count_threshold = 10
def lineFollowingModule(self):
# Constant velocity
self.cmd.linear.x = self.speed
# Correction parameters
self.delta = self.ground_right - self.ground_left
self.cmd.angular.z = self.angle_correction*self.delta
# Logic for stop if black line not seen .
if self.ground_right > 500 and self.ground_left > 500 and self.ground_mid > 500:
self.count += 1
else:
self.count = 0
if self.count > self.count_threshold:
self.stop = True
if self.stop:
self.cmd.linear.x = 0.0
self.cmd.angular.z = 0.0
# Publish cmd vel
self.pubs_cmdvel.publish(self.cmd)
self.stop = False
# Call backs to update sensor reading variables
def right_infrared_callback(self, msg):
self.ground_right = msg.data
self.lineFollowingModule()
def left_infrared_callback(self, msg):
self.ground_left = msg.data
def mid_infrared_callback(self, msg):
self.ground_mid = msg.data
def main(args=None):
rclpy.init(args=args)
ls = LineFollower()
rclpy.spin(ls)
ls.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| [((85, 4, 85, 25), 'rclpy.init', 'rclpy.init', (), '', False, 'import rclpy\n'), ((88, 4, 88, 18), 'rclpy.spin', 'rclpy.spin', ({(88, 15, 88, 17): 'ls'}, {}), '(ls)', False, 'import rclpy\n'), ((91, 4, 91, 20), 'rclpy.shutdown', 'rclpy.shutdown', ({}, {}), '()', False, 'import rclpy\n'), ((41, 19, 41, 26), 'geometry_msgs.msg.Twist', 'Twist', ({}, {}), '()', False, 'from geometry_msgs.msg import Twist\n')] |
arrowkato/pytest-CircleiCI | dev-template/src/mysql_connect_sample.py | 2f6a1460a48bf88547538cfc72880a9c86f9ec23 | import mysql.connector
from mysql.connector import errorcode
config = {
'user': 'user',
'password': 'password',
'host': 'mysql_container',
'database': 'sample_db',
'port': '3306',
}
if __name__ == "__main__":
try:
conn = mysql.connector.connect(**config)
cursor = conn.cursor()
cursor.execute('select * from users')
for row in cursor.fetchall():
print("name:" + str(row[0]) + "" + "time_zone_id" + str(row[1]))
conn.close()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
conn.close()
| [] |
viniciusbonito/CeV-Python-Exercicios | Mundo 1/ex011.py | 6182421332f6f0c0a567c3e125fdc05736fa6281 | # criar um programa que pergunte as dimensões de uma parede, calcule sua área e informe quantos litros de tinta
# seriam necessários para a pintura, após perguntar o rendimento da tinta informado na lata
print('=' * 40)
print('{:^40}'.format('Assistente de pintura'))
print('=' * 40)
altura = float(input('Informe a altura da parede em metros: '))
largura = float(input('Informe a largura da parede em metros: '))
area = altura * largura
print('\nA área total da parede é de {:.2f}m²'.format(area))
litros = float(input('\nQuantos litros contém a lata de tinta escolhida? '))
rendlata = float(input('Qual o rendimento em metros informado na lata? '))
rendlitro = rendlata / litros
print('\nSe a lata possui {:.2f}L e rende {:.2f}m²'.format(litros, rendlata))
print('então o rendimento por litro é de {:.2f}m²'.format(rendlitro))
print('\nSerão necessário {:.2f}L para pintar toda a parede'.format(area / rendlitro)) | [] |
henrique-tavares/Coisas | Python/Mundo 3/ex088.py | f740518b1bedec5b0ea8c12ae07a2cac21eb51ae | from random import sample
from time import sleep
jogos = list()
print('-' * 20)
print(f'{"MEGA SENA":^20}')
print('-' * 20)
while True:
n = int(input("\nQuatos jogos você quer que eu sorteie? "))
if (n > 0):
break
print('\n[ERRO] Valor fora do intervalo')
print()
print('-=' * 3, end=' ')
print(f'SORTEANDO {n} JOGOS', end=' ')
print('-=' * 3)
for i in range(n):
jogos.append(sample(range(1,61), 6))
sleep(0.6)
print(f'Jogo {i+1}: {jogos[i]}')
print('-=' * 5, end=' ')
print('< BOA SORTE >', end=' ')
print('-=' * 3, end='\n\n') | [((28, 4, 28, 14), 'time.sleep', 'sleep', ({(28, 10, 28, 13): '(0.6)'}, {}), '(0.6)', False, 'from time import sleep\n')] |
django-roles-access/master | tests/test_utils.py | 066d0d6b99b986eacc736e6973b415cbb9172d46 | from importlib import import_module
from unittest import TestCase as UnitTestCase
from django.contrib.auth.models import Group
from django.core.management import BaseCommand
from django.conf import settings
from django.test import TestCase
from django.views.generic import TemplateView
try:
from unittest.mock import Mock, patch, MagicMock
except:
from mock import Mock, patch
from django_roles_access.decorator import access_by_role
from django_roles_access.mixin import RolesMixin
from django_roles_access.models import ViewAccess
from tests import views
from django_roles_access.utils import (walk_site_url, get_views_by_app,
view_access_analyzer,
get_view_analyze_report,
check_django_roles_is_used,
analyze_by_role, APP_NAME_FOR_NONE,
NOT_SECURED_DEFAULT, SECURED_DEFAULT,
PUBLIC_DEFAULT, NONE_TYPE_DEFAULT,
DISABLED_DEFAULT, OutputReport)
class MockRegex:
def __init__(self):
self.pattern = '^fake-regex-pattern/$'
class MockRegexResolver:
def __init__(self):
self.pattern = '^fake-resolver/'
class MockRegexResolverNested:
def __init__(self):
self.pattern = '^fake-nested-resolver/'
class MockPattern:
def __init__(self):
self.regex = MockRegex()
self.callback = 'fake-callback'
self.name = 'fake-view-name'
class MockResolver:
def __init__(self):
self.url_patterns = [MockPattern()]
self.regex = MockRegexResolver()
self.app_name = 'fake-app-name'
self.namespace = 'fake-namespace'
class MockResolverNested:
def __init__(self):
self.url_patterns = [MockResolver()]
self.regex = MockRegexResolverNested()
self.app_name = 'fake-app-name'
self.namespace = 'nested-namespace'
class MockPatternDjango2:
def __init__(self):
self.pattern = '^fake-pattern/'
self.callback = 'fake-callback'
self.name = 'fake-view-name'
class MockPatternDjango2None:
def __init__(self):
self.pattern = '^fake-pattern/'
self.callback = 'fake-callback'
self.name = 'fake-view-none'
class MockResolverDjango2:
def __init__(self):
self.pattern = '^fake-resolver/'
self.url_patterns = [MockPatternDjango2()]
self.app_name = 'fake-app-name'
self.namespace = 'fake-namespace'
class MockResolverDjango2None:
def __init__(self):
self.pattern = '^fake-resolver/'
self.url_patterns = [MockPatternDjango2None()]
self.app_name = None
self.namespace = None
class MockResolverDjango2None2:
def __init__(self):
self.pattern = '^fake-resolver/'
self.url_patterns = [MockResolverDjango2None()]
self.app_name = 'fake-app-name'
self.namespace = 'fake-namespace'
class MockResolverDjangoNested:
def __init__(self):
self.pattern = '^fake-nested-resolver/'
self.url_patterns = [MockResolverDjango2()]
self.app_name = 'fake-app-name'
self.namespace = 'nested-namespace'
class UnitTestWalkSiteURL(UnitTestCase):
def setUp(self):
self.pattern_1 = MockPattern()
self.data = [self.pattern_1]
def test_second_param_is_optional_return_a_list(self):
result = walk_site_url(self.data)
self.assertIsInstance(result, list)
def test_first_param_list_of_pattern_and_view(self):
result = walk_site_url(self.data)
self.assertEqual(result, [('fake-regex-pattern/', 'fake-callback',
'fake-view-name', None)])
def test_first_param_list_of_patterns_and_views(self):
pattern_2 = MockPattern()
pattern_2.regex.pattern = 'fake-regex-pattern-2/'
pattern_2.callback = 'fake-view-2'
result = walk_site_url([self.pattern_1, pattern_2])
self.assertEqual(result, [('fake-regex-pattern/', 'fake-callback',
'fake-view-name', None),
('fake-regex-pattern-2/', 'fake-view-2',
'fake-view-name', None)])
def test_param_list_with_pattern_and_resolver_django_1(self):
expected_result = [
('fake-regex-pattern/', 'fake-callback', 'fake-view-name', None),
('fake-resolver/fake-regex-pattern/',
'fake-callback', 'fake-namespace:fake-view-name', 'fake-app-name'
)]
resolver = MockResolver()
result = walk_site_url([self.pattern_1, resolver])
self.assertEqual(result, expected_result)
def test_param_list_with_pattern_and_nested_resolver_django_1(self):
expected_result = [
('fake-regex-pattern/', 'fake-callback', 'fake-view-name', None),
('fake-nested-resolver/fake-resolver/fake-regex-pattern/',
'fake-callback', 'nested-namespace:fake-namespace:fake-view-name',
'fake-app-name'
)
]
resolver = MockResolverNested()
result = walk_site_url([self.pattern_1, resolver])
self.assertEqual(result, expected_result)
def test_param_list_with_pattern_and_resolver_django_2(self):
expected_result = [
('fake-pattern/', 'fake-callback', 'fake-view-name', None),
('fake-resolver/fake-pattern/',
'fake-callback', 'fake-namespace:fake-view-name', 'fake-app-name'
)
]
resolver = MockResolverDjango2()
result = walk_site_url([MockPatternDjango2(), resolver])
self.assertEqual(result, expected_result)
def test_param_list_with_pattern_and_nested_resolver_django_2(self):
expected_result = [
('fake-pattern/', 'fake-callback', 'fake-view-name', None),
('fake-nested-resolver/fake-resolver/fake-pattern/',
'fake-callback', 'nested-namespace:fake-namespace:fake-view-name',
'fake-app-name'
)
]
result = walk_site_url([MockPatternDjango2(),
MockResolverDjangoNested()])
self.assertEqual(result, expected_result)
def test_param_list_with_resolver_get_app_name_and_view_name_django_1(self):
expected_result = [
('fake-resolver/fake-regex-pattern/',
'fake-callback', 'fake-namespace:fake-view-name', 'fake-app-name'
),
('fake-nested-resolver/fake-resolver/fake-regex-pattern/',
'fake-callback', 'nested-namespace:fake-namespace:fake-view-name',
'fake-app-name'
)
]
result = walk_site_url([MockResolver(), MockResolverNested()])
self.assertEqual(result, expected_result)
def test_param_list_with_resolver_get_app_name_and_view_name_django_2(self):
expected_result = [
('fake-resolver/fake-pattern/',
'fake-callback', 'fake-namespace:fake-view-name', 'fake-app-name'
),
('fake-nested-resolver/fake-resolver/fake-pattern/',
'fake-callback', 'nested-namespace:fake-namespace:fake-view-name',
'fake-app-name'
)
]
resolver = MockResolverDjango2()
nested_resolver = MockResolverDjangoNested()
result = walk_site_url([resolver, nested_resolver])
self.assertEqual(result, expected_result)
def test_when_url_namespace_is_None(self):
expected_result = [
('fake-resolver/fake-resolver/fake-pattern/',
'fake-callback', 'fake-view-none', None
)
]
resolver = MockResolverDjango2None2()
result = walk_site_url([resolver])
self.assertEqual(result, expected_result)
# def test_when_view_name_is_None(self):
# expected_result = [
# ('fake-resolver/fake-pattern/',
# 'fake-callback', 'fake-view-name', None
# )
# ]
# resolver = MockResolverDjango2None2()
# result = walk_site_url([resolver])
# print(result)
# self.assertEqual(result, expected_result)
class IntegratedTestWalkSiteURL(TestCase):
def setUp(self):
self.url = import_module(settings.ROOT_URLCONF).urlpatterns
def test_found_direct_access_view(self):
expected_result = ('direct_access_view/',
views.protected_view_by_role,
'direct_access_view', None)
result = walk_site_url(self.url)
self.assertIn(expected_result, result)
def test_found_included_view_without_namespace(self):
expected_result = ('role-included[135]/view_by_role/',
views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
'django_roles_access')
result = walk_site_url(self.url)
self.assertIn(expected_result, result)
def test_found_included_view_with_namespace(self):
expected_result = ('role-included2/view_by_role/',
views.protected_view_by_role,
'app-ns2:view_protected_by_role',
'django_roles_access')
result = walk_site_url(self.url)
self.assertIn(expected_result, result)
def test_found_nested_access_view(self):
expected_result = ('nest1/nest2/view_by_role/',
views.protected_view_by_role,
'nest1_namespace:nest2_namespace:view_'
'protected_by_role',
'roles-app-name')
result = walk_site_url(self.url)
self.assertIn(expected_result, result)
class UnitTestGetViewsByApp(UnitTestCase):
"""
get_views_by_app receive the result of walk_site_url and is required to
return a dictionary with keys been installed applications.
"""
def setUp(self):
self.data = [('a', 'b', 'c', 'fake-app-1')]
@patch('django_roles_access.utils.settings')
def test_returns_a_dictionary(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2']
result = get_views_by_app(self.data)
self.assertIsInstance(result, dict)
@patch('django_roles_access.utils.settings')
def test_returns_a_dictionary_with_all_installed_apps(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2']
result = get_views_by_app(self.data)
assert 'fake-app-1' in result
assert 'fake-app-2' in result
@patch('django_roles_access.utils.settings')
def test_values_of_returned_dictionary_keys_are_lists(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2']
result = get_views_by_app(self.data)
self.assertIsInstance(result['fake-app-1'], list)
self.assertIsInstance(result['fake-app-2'], list)
@patch('django_roles_access.utils.settings')
def test_receive_list_of_tuples_with_4_element(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1']
result = get_views_by_app(self.data)
assert 'fake-app-1' in result
@patch('django_roles_access.utils.settings')
def test_raise_type_error_if_receive_list_of_tuples_with_3_element(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1']
data = [('a', 'b', 'c')]
with self.assertRaises(TypeError):
get_views_by_app(data)
@patch('django_roles_access.utils.settings')
def test_raise_type_error_if_receive_list_of_tuples_with_5_element(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1']
data = [('a', 'b', 'c', 'd', 'e')]
with self.assertRaises(TypeError):
get_views_by_app(data)
@patch('django_roles_access.utils.settings')
def test_received_data_is_ordered_and_returned_by_application(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2', None]
data = [('a', 'b', 'c', 'fake-app-1'), ('1', '2', '3', 'fake-app-2'),
('a1', 'b2', 'c3', None)]
expected_result = [('a', 'b', 'c')]
result = get_views_by_app(data)
self.assertEqual(expected_result, result['fake-app-1'])
@patch('django_roles_access.utils.settings')
def test_can_work_with_no_declared_application_name(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2', None]
data = [('a', 'b', 'c', 'fake-app-1'), ('1', '2', '3', 'fake-app-2'),
('a1', 'b2', 'c3', None)]
expected_result = [('a1', 'b2', 'c3')]
result = get_views_by_app(data)
self.assertEqual(expected_result, result[APP_NAME_FOR_NONE])
@patch('django_roles_access.utils.settings')
def test_if_application_is_not_in_installed_apps_will_not_be_in_dict(
self, mock_settings
):
mock_settings.INSTALLED_APPS = ['fake-app-1', 'fake-app-2', None]
result = get_views_by_app(self.data)
assert 'fake-app-3' not in result
class IntegratedTestGetViewsByApp(TestCase):
def setUp(self):
self.url = import_module(settings.ROOT_URLCONF).urlpatterns
def test_not_declared_app_are_recognized_as_undefined_app(self):
expected_result = ('direct_access_view/',
views.protected_view_by_role,
'direct_access_view')
result = get_views_by_app(walk_site_url(self.url))
self.assertIn(expected_result, result[APP_NAME_FOR_NONE])
def test_views_without_namespace_are_added_with_app_name_in_view_name(self):
expected_result = ('role-included[135]/view_by_role/',
views.protected_view_by_role,
'django_roles_access:view_protected_by_role')
result = get_views_by_app(walk_site_url(self.url))
self.assertIn(expected_result, result['django_roles_access'])
def test_view_with_namespace_are_added_with_correct_app_name(self):
expected_result = ('role-included2/view_by_role/',
views.protected_view_by_role,
'app-ns2:view_protected_by_role')
result = get_views_by_app(walk_site_url(self.url))
self.assertIn(expected_result, result['django_roles_access'])
def test_nested_namespace_are_added_with_correct_app_name(self):
expected_result = ('nest1/nest2/view_by_role/',
views.protected_view_by_role,
'nest1_namespace:nest2_namespace:view_'
'protected_by_role')
result = get_views_by_app(walk_site_url(self.url))
self.assertIn(expected_result, result['roles-app-name'])
class TestGetViewAnalyzeReport(UnitTestCase):
def test_report_for_no_application_type(self):
expected = u'\t' + NONE_TYPE_DEFAULT
result = get_view_analyze_report(None)
self.assertEqual(result, expected)
def test_report_for_application_type_NOT_SECURED(self):
expected = u'\t' + NOT_SECURED_DEFAULT
result = get_view_analyze_report('NOT_SECURED')
self.assertEqual(result, expected)
self.assertEqual(result, expected)
def test_report_for_application_type_DISABLED(self):
expected = u'\t' + DISABLED_DEFAULT
result = get_view_analyze_report('DISABLED')
self.assertEqual(result, expected)
def test_report_for_application_type_SECURED(self):
expected = u'\t' + SECURED_DEFAULT
result = get_view_analyze_report('SECURED')
self.assertEqual(result, expected)
def test_report_for_application_type_PUBLIC(self):
expected = u'\t' + PUBLIC_DEFAULT
result = get_view_analyze_report('PUBLIC')
self.assertEqual(result, expected)
class TestCheckDjangoRolesIsUsed(UnitTestCase):
def test_detect_view_is_decorated(self):
@access_by_role
def function():
pass
self.assertTrue(check_django_roles_is_used(function))
def test_detect_view_is_not_decorated(self):
def function():
pass
self.assertFalse(check_django_roles_is_used(function()))
def test_detect_view_use_mixin(self):
class Aview(RolesMixin, TemplateView):
template_name = 'dummyTemplate.html'
self.assertTrue(check_django_roles_is_used(Aview))
def test_detect_view_not_use_mixin(self):
class Aview(TemplateView):
template_name = 'dummyTemplate.html'
self.assertFalse(check_django_roles_is_used(Aview))
@patch('django_roles_access.utils.ViewAccess')
class UnitTestAnalyzeByRoleAccess(UnitTestCase):
def test_detect_access_is_by_role(
self, mock_view_access
):
expected = u'ERROR: No roles configured to access de view.'
mock_view_access.type = 'br'
mock_view_access.roles.count.return_value = 0
result = analyze_by_role(mock_view_access)
self.assertEqual(result, expected)
def test_detect_access_is_not_by_role(
self, mock_view_access
):
expected = u''
mock_view_access.type = 'pu'
result = analyze_by_role(mock_view_access)
self.assertEqual(result, expected)
def test_detect_access_is_not_by_role_with_roles(
self, mock_view_access
):
expected = u'Roles with access: role-1, role-2'
mock_view_access.type = 'br'
role_1 = Mock()
role_1.name = u'role-1'
role_2 = Mock()
role_2.name = u'role-2'
mock_view_access.roles.all.return_value = [role_1, role_2]
result = analyze_by_role(mock_view_access)
self.assertEqual(result, expected)
def test_detect_access_is_not_by_role_without_roles(
self, mock_view_access
):
expected = u'ERROR: No roles configured to access de view.'
mock_view_access.type = 'br'
mock_view_access.roles.count.return_value = 0
result = analyze_by_role(mock_view_access)
self.assertEqual(result, expected)
class IntegratedTestAnalyzeByRoleAccess(TestCase):
def test_detect_access_is_by_role(self):
expected = u'ERROR: No roles configured to access de view.'
view_access = ViewAccess.objects.create(view='any-name', type='br')
result = analyze_by_role(view_access)
self.assertEqual(result, expected)
def test_detect_access_is_not_by_role(self):
expected = u''
view_access = ViewAccess.objects.create(view='any-name', type='pu')
result = analyze_by_role(view_access)
self.assertEqual(result, expected)
def test_detect_access_is_by_role_with_roles(self):
expected = u'Roles with access: role-1, role-2'
view_access = ViewAccess.objects.create(view='any-name', type='br')
role_1, created = Group.objects.get_or_create(name='role-1')
role_2, created = Group.objects.get_or_create(name='role-2')
view_access.roles.add(role_1)
view_access.roles.add(role_2)
view_access.save()
result = analyze_by_role(view_access)
self.assertEqual(result, expected)
def test_detect_access_is_not_by_role_without_roles(self):
expected = u'ERROR: No roles configured to access de view.'
view_access = ViewAccess.objects.create(view='any-name', type='br')
result = analyze_by_role(view_access)
self.assertEqual(result, expected)
@patch('django_roles_access.utils.ViewAccess.objects')
class UnitTestViewAnalyzer(UnitTestCase):
def test_view_analyzer_return_a_report(
self, mock_objects
):
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
result = view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', 'fake-site-active')
try:
self.assertIsInstance(result, unicode)
except:
self.assertIsInstance(result, str)
def test_view_analyzer_search_view_access_for_the_view(
self, mock_objects
):
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', 'fake-site-active')
assert mock_objects.first.called
def test_view_analyzer_search_view_access_for_the_view_once(
self, mock_objects
):
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', 'fake-site-active')
self.assertEqual(mock_objects.filter.call_count, 1)
def test_view_analyzer_search_view_access_with_view_name(
self, mock_objects
):
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', 'fake-site-active')
mock_objects.filter.assert_called_once_with(view='fake-view-name')
def test_view_access_type_when_site_active_and_exists_view_access(
self, mock_objects
):
expected = u'View access is of type Public.'
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
result = view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
@patch('django_roles_access.utils.analyze_by_role')
def test_view_access_type_by_role_call_analyze_by_role(
self, mock_analyze_by_role, mock_objects
):
view_access = Mock()
view_access.type = 'br'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', True)
assert mock_analyze_by_role.called
@patch('django_roles_access.utils.analyze_by_role')
def test_view_access_type_by_role_call_analyze_by_role_once(
self, mock_analyze_by_role, mock_objects
):
view_access = Mock()
view_access.type = 'br'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', True)
self.assertEqual(mock_analyze_by_role.call_count, 1)
@patch('django_roles_access.utils.analyze_by_role')
def test_view_access_type_by_role_call_analyze_by_role_with_view_access(
self, mock_analyze_by_role, mock_objects
):
view_access = Mock()
view_access.type = 'br'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
view_access_analyzer('fake-app-type', 'fake-callback',
'fake-view-name', True)
mock_analyze_by_role.assert_called_once_with(view_access)
def test_no_view_access_object_for_the_view_and_site_active_no_app_type(
self, mock_objects
):
expected = u'\t' + NONE_TYPE_DEFAULT
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer(None, 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
def test_no_view_access_object_and_site_active_app_type_NOT_SECURED(
self, mock_objects
):
expected = u'\t' + NOT_SECURED_DEFAULT
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('NOT_SECURED', 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
def test_no_view_access_object_and_site_active_app_type_DISABLED(
self, mock_objects
):
expected = u'\t' + DISABLED_DEFAULT
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('DISABLED', 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
def test_no_view_access_object_and_site_active_app_type_SECURED(
self, mock_objects
):
expected = u'\t' + SECURED_DEFAULT
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('SECURED', 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
def test_no_view_access_object_and_site_active_app_type_PUBLIC(
self, mock_objects
):
expected = u'\t' + PUBLIC_DEFAULT
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('PUBLIC', 'fake-callback',
'fake-view-name', True)
self.assertEqual(result, expected)
def test_middleware_not_used_view_access_object_exist_and_dr_tools_used(
self, mock_objects
):
expected = u'View access is of type Public.'
@access_by_role
def function():
pass
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
result = view_access_analyzer('fake-app-type', function,
'fake-view-name', False)
self.assertEqual(result, expected)
def test_middleware_not_used_view_access_object_exist_and_dr_tools_not_used(
self, mock_objects
):
expected = u'ERROR: View access object exist for the view, but no '
expected += u'Django role access tool is used: neither decorator, '
expected += u'mixin, or middleware.'
def function():
pass
view_access = Mock()
view_access.type = 'pu'
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = view_access
result = view_access_analyzer('fake-app-type', function,
'fake-view-name', False)
self.assertEqual(result, expected)
def test_middleware_not_used_dr_tools_are_used_no_view_access_object(
self, mock_objects
):
expected = u'\t' + PUBLIC_DEFAULT
@access_by_role
def function():
pass
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('PUBLIC', function,
'fake-view-name', False)
self.assertEqual(result, expected)
def test_no_django_roles_tools_used_no_application_type(
self, mock_objects
):
expected = u'No Django roles access tool used. Access to view depends '
expected += u'on its implementation.'
def function():
pass
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer(None, function,
'fake-view-name', False)
self.assertEqual(result, expected)
def test_no_django_roles_tools_used_application_type(
self, mock_objects
):
expected = u'No Django roles access tool used. Access to view depends '
expected += u'on its implementation.'
def function():
pass
mock_objects.filter.return_value = mock_objects
mock_objects.first.return_value = None
result = view_access_analyzer('Authorized', function,
'fake-view-name', False)
self.assertEqual(result, expected)
class IntegratedTestViewAnalyzezr(TestCase):
def test_with_middleware_SECURED_without_view_access_object(self):
expected = u'\t' + SECURED_DEFAULT
result = view_access_analyzer(
'SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(expected, result)
def test_with_middleware_NOT_SECURED_with_view_access_object(self):
ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='br')
result = view_access_analyzer(
'NOT_SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, u'\t' + NOT_SECURED_DEFAULT)
def test_with_middleware_DISABLED_with_view_access_object(self):
ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='pu')
result = view_access_analyzer(
'DISABLED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, u'\t' + DISABLED_DEFAULT)
def test_with_middleware_with_view_access_object(self):
expected = u'View access is of type By role.'
expected += u'ERROR: No roles configured to access de view.'
ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='br')
result = view_access_analyzer(
'SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, expected)
def test_with_middleware_with_view_access_object_with_roles(self):
expected = u'View access is of type By role.'
expected += u'Roles with access: test1, test2'
g1, created = Group.objects.get_or_create(name='test1')
g2, created = Group.objects.get_or_create(name='test2')
view_access = ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='br')
view_access.roles.add(g1)
view_access.roles.add(g2)
view_access.save()
result = view_access_analyzer(
'SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, expected)
def test_with_middleware_with_view_access_object_authorized(self):
expected = u'View access is of type Authorized.'
ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='au')
result = view_access_analyzer(
'SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, expected)
def test_with_middleware_with_view_access_object_public(self):
expected = u'View access is of type Public.'
ViewAccess.objects.create(
view='django_roles_access:middleware_view_class',
type='pu')
result = view_access_analyzer(
'SECURED', views.MiddlewareView.as_view,
'django_roles_access:middleware_view_class',
True)
self.assertEqual(result, expected)
def test_without_middleware_with_view_access_object(self):
expected = u'View access is of type By role.'
expected += u'ERROR: No roles configured to access de view.'
ViewAccess.objects.create(
view='django_roles_access:view_protected_by_role',
type='br')
result = view_access_analyzer(
'SECURED', views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_with_view_access_object_with_roles(self):
expected = u'View access is of type By role.'
expected += u'Roles with access: test1, test2'
g1, created = Group.objects.get_or_create(name='test1')
g2, created = Group.objects.get_or_create(name='test2')
view_access = ViewAccess.objects.create(
view='django_roles_access:view_protected_by_role',
type='br')
view_access.roles.add(g1)
view_access.roles.add(g2)
view_access.save()
result = view_access_analyzer(
'SECURED', views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_with_view_access_object_authorized(self):
expected = u'View access is of type Authorized.'
ViewAccess.objects.create(
view='django_roles_access:view_protected_by_role',
type='au')
result = view_access_analyzer(
'SECURED', views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_with_view_access_object_public(self):
expected = u'View access is of type Public.'
ViewAccess.objects.create(
view='django_roles_access:view_protected_by_role',
type='pu')
result = view_access_analyzer(
'SECURED', views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_without_view_access_object_and_view_protected(
self
):
expected = u'\t' + SECURED_DEFAULT
result = view_access_analyzer(
'SECURED', views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_no_view_access_object_and_view_protected_without_app(
self
):
expected = u'\t' + NONE_TYPE_DEFAULT
result = view_access_analyzer(
None, views.protected_view_by_role,
'django_roles_access:view_protected_by_role',
False)
self.assertEqual(result, expected)
def test_without_middleware_with_view_access_object_and_view_not_protected(
self
):
expected = u'ERROR: View access object exist for the view, '
expected += 'but no Django role access tool is used: neither '
expected += 'decorator, mixin, or middleware.'
ViewAccess.objects.create(
view='django_roles_access:middleware_view_func',
type='pu')
result = view_access_analyzer(
None, views.middleware_view,
'django_roles_access:middleware_view_func',
False)
self.assertEqual(result, expected)
class UnitTestOutputReport(UnitTestCase):
def setUp(self):
self.patch_mock_stdout = patch.object(BaseCommand(), 'style')
self.patch_mock_style = patch.object(BaseCommand(), 'stdout')
self.mock_stdout = self.patch_mock_stdout.start()
self.mock_style = self.patch_mock_style.start()
self._output = OutputReport(self.mock_stdout, self.mock_style)
def tearDown(self):
self.patch_mock_stdout.stop()
self.patch_mock_style.stop()
def test_initial_with_parameter(self):
assert self._output.stdout == self.mock_stdout
assert self._output.style == self.mock_style
def test_internal_attributes_are_initialize(self):
assert hasattr(self._output, '_row') and self._output._row == u''
assert hasattr(self._output, '_format') and self._output._format == \
'console'
def test_initial_without_parameter(self):
with self.assertRaises(TypeError) as e:
OutputReport()
def test_default_output_format_is_correct_type(self):
assert self._output._format == 'console'
def test_set_format(self):
self._output.set_format('csv')
assert self._output._format == 'csv'
def test_add_to_row(self):
self._output.add_to_row('text')
self._output.add_to_row('other')
self.assertIn('text', self._output._row)
self.assertIn('other', self._output._row)
def test_write_method_write_to_stdout(self):
self._output.write(u'some text')
assert self.mock_stdout.write.called
def test_write_method_use_stdout_write_once(self):
self._output.write(u'some text')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_write_method_use_SUCCESS_style_for_styling_output(self):
self._output.write(u'some text')
self.mock_stdout.write.assert_called_once_with(
self.mock_style.SUCCESS())
def test_write_method_use_SUCCESS_style_for_output(self):
self._output.write(u'some text')
assert self.mock_style.SUCCESS.called
def test_write_method_use_style_with_received_argument(self):
self._output.write(u'some text')
self.mock_style.SUCCESS.assert_called_once_with(u'some text')
def test_console_format_write_correct_header_to_stdout_with_SUCCESS_style(
self
):
expected = u'Start checking views access.\n'
expected += u'Start gathering information.'
self._output.write_header()
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
@patch('django_roles_access.utils.timezone')
def test_cvs_format_write_correct_header(
self, mock_timezone
):
mock_timezone.now.return_value = 'fake-date'
self._output.set_format('csv')
self._output.write_header()
self.mock_style.SUCCESS.assert_called_once_with(u'Reported: fake-date')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_write_correct_middleware_status_and_end_of_header(
self
):
expected = u'Django roles access middleware is active: False.\n'
self._output.write_middleware_status(False)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_write_correct_end_of_header(
self
):
expected = u'Finish gathering information.'
self._output.write_end_of_head()
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_write_correct_correct_middleware_status(
self
):
expected = u'Django roles access middleware is active: False.\n'
self._output.set_format('csv')
self._output.write_middleware_status(False)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_write_correct_csv_columns(
self
):
expected = u'App Name,Type,View Name,Url,Status,Status description'
self._output.set_format('csv')
self._output.write_end_of_head()
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_process_app_data_to_stdout_with_SUCCESS_style(self):
app_name = u'fake-app-name'
app_type = u'fake-app-type'
view_list = ['fake-view']
expected = u'\tAnalyzing: {}\n'.format(app_name)
expected += u'\t\t{} is {} type.'.format(app_name, app_type)
self._output.process_application_data(app_name, app_type, view_list)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_process_app_data_without_type(self):
app_name = u'fake-app-name'
app_type = None
view_list = ['fake-view']
expected = u'\tAnalyzing: {}\n'.format(app_name)
expected += u'\t\t{} has no type.'.format(app_name)
self._output.process_application_data(app_name, app_type, view_list)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_process_app_data_without_views(self):
app_name = u'fake-app-name'
app_type = u'fake-app-type'
view_list = []
expected = u'\tAnalyzing: {}\n'.format(app_name)
expected += u'\t\t{} is {} type.'.format(app_name, app_type)
expected += u'\t\t{} does not have configured views.'.format(app_name)
self._output.process_application_data(app_name, app_type, view_list)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_process_application_data_to_string(self):
app_name = u'fake-app-name'
app_type = u'fake-app-type'
view_list = ['fake-view-list']
expected = u'{},{},'.format(app_name, app_type, view_list)
self._output.set_format('csv')
self._output.process_application_data(app_name, app_type, view_list)
self.assertEqual(expected, self._output._row)
def test_cvs_format_process_application_data_without_type_to_string(self):
app_name = u'fake-app-name'
app_type = None
view_list = ['fake-view-list']
expected = u'fake-app-name,no type,'.format(app_name)
self._output.set_format('csv')
self._output.process_application_data(app_name, app_type, view_list)
self.assertEqual(expected, self._output._row)
def test_cvs_format_process_application_data_without_views(self):
app_name = u'fake-app-name'
app_type = u'fake-app-type'
view_list = []
expected = u'fake-app-name,fake-app-type,,,,,'.format(app_name)
self._output.set_format('csv')
self._output.process_application_data(app_name, app_type, view_list)
self.assertEqual(expected, self._output._row)
def test_console_format_process_view_data_to_stdout_with_SUCCESS_style(
self
):
view_name = u'fake-view-name'
url = '/fake-url/'
expected = u'\n\t\tAnalysis for view: {}'.format(view_name)
expected += u'\n\t\tView url: {}'.format(url)
self._output.process_view_data(view_name, url)
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_process_view_data(self):
view_name = u'fake-view-name'
url = '/fake-url/'
expected = u'{},{}'.format(view_name, url)
self._output.set_format('csv')
self._output.process_view_data(view_name, url)
self.assertIn(expected, self._output._row)
# View_access_analyzer output.
def test_console_format_write_vaa_to_stdout(self):
self._output.write_view_access_analyzer(u'some text')
assert self.mock_stdout.write.called
def test_console_format_use_stdout_write_once_with_vaa(self):
self._output.write_view_access_analyzer(u'some text')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_console_format_use_SUCCESS_style_for_styling_output_of_vaa(self):
self._output.write_view_access_analyzer(u'some text')
self.mock_stdout.write.assert_called_once_with(
self.mock_style.SUCCESS())
def test_console_format_use_SUCCESS_style_for_output_of_vaa(self):
self._output.write_view_access_analyzer(u'some text')
assert self.mock_style.SUCCESS.called
def test_console_format_use_style_with_vaa_result(self):
self._output.write_view_access_analyzer(u'some text')
self.mock_style.SUCCESS.assert_called_once_with(u'\t\tsome text')
def test_console_format_use_ERROR_style_for_output_if_error_in_vaa(self):
self._output.write_view_access_analyzer('ERROR: fake report')
assert self.mock_style.ERROR.called
def test_console_format_use_ERROR_style_with_the_error_in_vaa(self):
self._output.write_view_access_analyzer('ERROR: fake report')
self.mock_style.ERROR.assert_called_once_with('\t\t' +
'ERROR: fake report')
def test_console_format_use_WARNING_style_for_output_if_warning_in_vaa(self):
self._output.write_view_access_analyzer('WARNING: fake report')
assert self.mock_style.WARNING.called
def test_console_format_use_WARNING_style_with_the_warning_in_vaa(self):
self._output.write_view_access_analyzer('WARNING: fake report')
self.mock_style.WARNING.assert_called_once_with(
'\t\t' + 'WARNING: fake report')
def test_csv_format_write_view_access_analyzer_with_Normal_to_stdout(self):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
self._output._format = 'csv'
self._output.write_view_access_analyzer(u'fake-report')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_csv_format_write_view_access_analyzer_with_Normal_to_style(self):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
expected = u'fake-app,fake-type,fake-view,fake-url,Normal,fake-report\n'
self._output._format = 'csv'
self._output.write_view_access_analyzer(u'fake-report')
self.mock_style.SUCCESS.assert_called_once_with(expected)
def test_csv_format_write_view_access_analyzer_with_WARNING_to_stdout(self):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
self._output._format = 'csv'
self._output.write_view_access_analyzer('WARNING: fake-report')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_csv_format_write_view_access_analyzer_with_WARNING_with_style(
self
):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
expected = u'fake-app,fake-type,fake-view,fake-url,Warning,' \
u'fake-report\n'
self._output._format = 'csv'
self._output.write_view_access_analyzer('WARNING: fake-report')
self.mock_style.WARNING.assert_called_once_with(expected)
def test_csv_format_write_view_access_analyzer_with_ERROR_to_stdout(self):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
self._output._format = 'csv'
self._output.write_view_access_analyzer('ERROR: fake-report')
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_csv_format_write_view_access_analyzer_with_ERROR_with_style(self):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
expected = u'fake-app,fake-type,fake-view,fake-url,Error,fake-report\n'
self._output._format = 'csv'
self._output.write_view_access_analyzer('ERROR: fake-report')
self.mock_style.ERROR.assert_called_once_with(expected)
def test_csv_format_write_view_access_analyzer_reset_OutputFormater_row(
self
):
self._output.add_to_row('fake-app,fake-type,fake-view,fake-url,')
self._output._format = 'csv'
self._output.write_view_access_analyzer('fake-report')
self.assertEqual(self._output._row, u'fake-app,fake-type,')
def test_console_format_close_application_data_to_stdout_with_SUCCESS_style(
self
):
expected = u'\tFinish analyzing fake-app-name.'
self._output.close_application_data('fake-app-name')
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_close_application_data_to_string(self):
expected = u''
self._output.set_format('csv')
self._output.close_application_data('fake-app-name')
self.assertEqual(self._output._row, expected)
def test_console_format_write_footer_to_stdout_with_SUCCESS_style(self):
expected = u'End checking view access.'
self._output.write_footer()
self.mock_style.SUCCESS.assert_called_once_with(expected)
self.assertEqual(self.mock_stdout.write.call_count, 1)
def test_cvs_format_write_footer_to_string(self):
expected = u'\n'
self._output.set_format('csv')
self._output.write_footer()
self.assertEqual(self._output._row, expected)
| [((449, 1, 449, 46), 'mock.patch', 'patch', ({(449, 7, 449, 45): '"""django_roles_access.utils.ViewAccess"""'}, {}), "('django_roles_access.utils.ViewAccess')", False, 'from mock import Mock, patch\n'), ((524, 1, 524, 54), 'mock.patch', 'patch', ({(524, 7, 524, 53): '"""django_roles_access.utils.ViewAccess.objects"""'}, {}), "('django_roles_access.utils.ViewAccess.objects')", False, 'from mock import Mock, patch\n'), ((278, 5, 278, 48), 'mock.patch', 'patch', ({(278, 11, 278, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((286, 5, 286, 48), 'mock.patch', 'patch', ({(286, 11, 286, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((295, 5, 295, 48), 'mock.patch', 'patch', ({(295, 11, 295, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((304, 5, 304, 48), 'mock.patch', 'patch', ({(304, 11, 304, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((312, 5, 312, 48), 'mock.patch', 'patch', ({(312, 11, 312, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((321, 5, 321, 48), 'mock.patch', 'patch', ({(321, 11, 321, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((330, 5, 330, 48), 'mock.patch', 'patch', ({(330, 11, 330, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((341, 5, 341, 48), 'mock.patch', 'patch', ({(341, 11, 341, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((352, 5, 352, 48), 'mock.patch', 'patch', ({(352, 11, 352, 47): '"""django_roles_access.utils.settings"""'}, {}), "('django_roles_access.utils.settings')", False, 'from mock import Mock, patch\n'), ((586, 5, 586, 55), 'mock.patch', 'patch', ({(586, 11, 586, 54): '"""django_roles_access.utils.analyze_by_role"""'}, {}), "('django_roles_access.utils.analyze_by_role')", False, 'from mock import Mock, patch\n'), ((598, 5, 598, 55), 'mock.patch', 'patch', ({(598, 11, 598, 54): '"""django_roles_access.utils.analyze_by_role"""'}, {}), "('django_roles_access.utils.analyze_by_role')", False, 'from mock import Mock, patch\n'), ((610, 5, 610, 55), 'mock.patch', 'patch', ({(610, 11, 610, 54): '"""django_roles_access.utils.analyze_by_role"""'}, {}), "('django_roles_access.utils.analyze_by_role')", False, 'from mock import Mock, patch\n'), ((987, 5, 987, 48), 'mock.patch', 'patch', ({(987, 11, 987, 47): '"""django_roles_access.utils.timezone"""'}, {}), "('django_roles_access.utils.timezone')", False, 'from mock import Mock, patch\n'), ((119, 17, 119, 41), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(119, 31, 119, 40): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((123, 17, 123, 41), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(123, 31, 123, 40): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((131, 17, 131, 59), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(131, 31, 131, 58): '[self.pattern_1, pattern_2]'}, {}), '([self.pattern_1, pattern_2])', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((144, 17, 144, 58), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(144, 31, 144, 57): '[self.pattern_1, resolver]'}, {}), '([self.pattern_1, resolver])', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((156, 17, 156, 58), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(156, 31, 156, 57): '[self.pattern_1, resolver]'}, {}), '([self.pattern_1, resolver])', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((207, 17, 207, 59), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(207, 31, 207, 58): '[resolver, nested_resolver]'}, {}), '([resolver, nested_resolver])', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((217, 17, 217, 42), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(217, 31, 217, 41): '[resolver]'}, {}), '([resolver])', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((241, 17, 241, 40), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(241, 31, 241, 39): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((249, 17, 249, 40), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(249, 31, 249, 39): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((257, 17, 257, 40), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(257, 31, 257, 39): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((266, 17, 266, 40), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(266, 31, 266, 39): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((283, 17, 283, 44), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(283, 34, 283, 43): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((291, 17, 291, 44), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(291, 34, 291, 43): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((300, 17, 300, 44), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(300, 34, 300, 43): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((309, 17, 309, 44), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(309, 34, 309, 43): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((338, 17, 338, 39), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(338, 34, 338, 38): 'data'}, {}), '(data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((349, 17, 349, 39), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(349, 34, 349, 38): 'data'}, {}), '(data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((357, 17, 357, 44), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(357, 34, 357, 43): 'self.data'}, {}), '(self.data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((400, 17, 400, 46), 'django_roles_access.utils.get_view_analyze_report', 'get_view_analyze_report', ({(400, 41, 400, 45): 'None'}, {}), '(None)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((405, 17, 405, 55), 'django_roles_access.utils.get_view_analyze_report', 'get_view_analyze_report', ({(405, 41, 405, 54): '"""NOT_SECURED"""'}, {}), "('NOT_SECURED')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((411, 17, 411, 52), 'django_roles_access.utils.get_view_analyze_report', 'get_view_analyze_report', ({(411, 41, 411, 51): '"""DISABLED"""'}, {}), "('DISABLED')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((416, 17, 416, 51), 'django_roles_access.utils.get_view_analyze_report', 'get_view_analyze_report', ({(416, 41, 416, 50): '"""SECURED"""'}, {}), "('SECURED')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((421, 17, 421, 50), 'django_roles_access.utils.get_view_analyze_report', 'get_view_analyze_report', ({(421, 41, 421, 49): '"""PUBLIC"""'}, {}), "('PUBLIC')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((458, 17, 458, 50), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(458, 33, 458, 49): 'mock_view_access'}, {}), '(mock_view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((466, 17, 466, 50), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(466, 33, 466, 49): 'mock_view_access'}, {}), '(mock_view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((474, 17, 474, 23), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((476, 17, 476, 23), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((479, 17, 479, 50), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(479, 33, 479, 49): 'mock_view_access'}, {}), '(mock_view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((488, 17, 488, 50), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(488, 33, 488, 49): 'mock_view_access'}, {}), '(mock_view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((496, 22, 496, 75), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((497, 17, 497, 45), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(497, 33, 497, 44): 'view_access'}, {}), '(view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((502, 22, 502, 75), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((503, 17, 503, 45), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(503, 33, 503, 44): 'view_access'}, {}), '(view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((508, 22, 508, 75), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((509, 26, 509, 68), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((510, 26, 510, 68), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((514, 17, 514, 45), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(514, 33, 514, 44): 'view_access'}, {}), '(view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((519, 22, 519, 75), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((520, 17, 520, 45), 'django_roles_access.utils.analyze_by_role', 'analyze_by_role', ({(520, 33, 520, 44): 'view_access'}, {}), '(view_access)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((530, 22, 530, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((534, 17, 535, 75), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(534, 38, 534, 53): '"""fake-app-type"""', (534, 55, 534, 70): '"""fake-callback"""', (535, 38, 535, 54): '"""fake-view-name"""', (535, 56, 535, 74): '"""fake-site-active"""'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name',\n 'fake-site-active')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((544, 22, 544, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((548, 8, 549, 66), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(548, 29, 548, 44): '"""fake-app-type"""', (548, 46, 548, 61): '"""fake-callback"""', (549, 29, 549, 45): '"""fake-view-name"""', (549, 47, 549, 65): '"""fake-site-active"""'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name',\n 'fake-site-active')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((555, 22, 555, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((559, 8, 560, 66), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(559, 29, 559, 44): '"""fake-app-type"""', (559, 46, 559, 61): '"""fake-callback"""', (560, 29, 560, 45): '"""fake-view-name"""', (560, 47, 560, 65): '"""fake-site-active"""'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name',\n 'fake-site-active')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((566, 22, 566, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((570, 8, 571, 66), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(570, 29, 570, 44): '"""fake-app-type"""', (570, 46, 570, 61): '"""fake-callback"""', (571, 29, 571, 45): '"""fake-view-name"""', (571, 47, 571, 65): '"""fake-site-active"""'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name',\n 'fake-site-active')", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((578, 22, 578, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((582, 17, 583, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(582, 38, 582, 53): '"""fake-app-type"""', (582, 55, 582, 70): '"""fake-callback"""', (583, 38, 583, 54): '"""fake-view-name"""', (583, 56, 583, 60): 'True'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((590, 22, 590, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((594, 8, 595, 52), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(594, 29, 594, 44): '"""fake-app-type"""', (594, 46, 594, 61): '"""fake-callback"""', (595, 29, 595, 45): '"""fake-view-name"""', (595, 47, 595, 51): '(True)'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((602, 22, 602, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((606, 8, 607, 52), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(606, 29, 606, 44): '"""fake-app-type"""', (606, 46, 606, 61): '"""fake-callback"""', (607, 29, 607, 45): '"""fake-view-name"""', (607, 47, 607, 51): '(True)'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((614, 22, 614, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((618, 8, 619, 52), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(618, 29, 618, 44): '"""fake-app-type"""', (618, 46, 618, 61): '"""fake-callback"""', (619, 29, 619, 45): '"""fake-view-name"""', (619, 47, 619, 51): '(True)'}, {}), "('fake-app-type', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((628, 17, 629, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(628, 38, 628, 42): 'None', (628, 44, 628, 59): '"""fake-callback"""', (629, 38, 629, 54): '"""fake-view-name"""', (629, 56, 629, 60): 'True'}, {}), "(None, 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((638, 17, 639, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(638, 38, 638, 51): '"""NOT_SECURED"""', (638, 53, 638, 68): '"""fake-callback"""', (639, 38, 639, 54): '"""fake-view-name"""', (639, 56, 639, 60): 'True'}, {}), "('NOT_SECURED', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((648, 17, 649, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(648, 38, 648, 48): '"""DISABLED"""', (648, 50, 648, 65): '"""fake-callback"""', (649, 38, 649, 54): '"""fake-view-name"""', (649, 56, 649, 60): 'True'}, {}), "('DISABLED', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((658, 17, 659, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(658, 38, 658, 47): '"""SECURED"""', (658, 49, 658, 64): '"""fake-callback"""', (659, 38, 659, 54): '"""fake-view-name"""', (659, 56, 659, 60): 'True'}, {}), "('SECURED', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((668, 17, 669, 61), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(668, 38, 668, 46): '"""PUBLIC"""', (668, 48, 668, 63): '"""fake-callback"""', (669, 38, 669, 54): '"""fake-view-name"""', (669, 56, 669, 60): 'True'}, {}), "('PUBLIC', 'fake-callback', 'fake-view-name', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((680, 22, 680, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((684, 17, 685, 62), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(684, 38, 684, 53): '"""fake-app-type"""', (684, 55, 684, 63): 'function', (685, 38, 685, 54): '"""fake-view-name"""', (685, 56, 685, 61): 'False'}, {}), "('fake-app-type', function, 'fake-view-name', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((698, 22, 698, 28), 'mock.Mock', 'Mock', ({}, {}), '()', False, 'from mock import Mock, patch\n'), ((702, 17, 703, 62), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(702, 38, 702, 53): '"""fake-app-type"""', (702, 55, 702, 63): 'function', (703, 38, 703, 54): '"""fake-view-name"""', (703, 56, 703, 61): 'False'}, {}), "('fake-app-type', function, 'fake-view-name', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((716, 17, 717, 62), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(716, 38, 716, 46): '"""PUBLIC"""', (716, 48, 716, 56): 'function', (717, 38, 717, 54): '"""fake-view-name"""', (717, 56, 717, 61): 'False'}, {}), "('PUBLIC', function, 'fake-view-name', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((730, 17, 731, 62), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(730, 38, 730, 42): 'None', (730, 44, 730, 52): 'function', (731, 38, 731, 54): '"""fake-view-name"""', (731, 56, 731, 61): 'False'}, {}), "(None, function, 'fake-view-name', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((744, 17, 745, 62), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(744, 38, 744, 50): '"""Authorized"""', (744, 52, 744, 60): 'function', (745, 38, 745, 54): '"""fake-view-name"""', (745, 56, 745, 61): 'False'}, {}), "('Authorized', function, 'fake-view-name', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((753, 17, 756, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(754, 12, 754, 21): '"""SECURED"""', (754, 23, 754, 51): 'views.MiddlewareView.as_view', (755, 12, 755, 55): '"""django_roles_access:middleware_view_class"""', (756, 12, 756, 16): 'True'}, {}), "('SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((760, 8, 762, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((763, 17, 766, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(764, 12, 764, 25): '"""NOT_SECURED"""', (764, 27, 764, 55): 'views.MiddlewareView.as_view', (765, 12, 765, 55): '"""django_roles_access:middleware_view_class"""', (766, 12, 766, 16): 'True'}, {}), "('NOT_SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((770, 8, 772, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((773, 17, 776, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(774, 12, 774, 22): '"""DISABLED"""', (774, 24, 774, 52): 'views.MiddlewareView.as_view', (775, 12, 775, 55): '"""django_roles_access:middleware_view_class"""', (776, 12, 776, 16): 'True'}, {}), "('DISABLED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((782, 8, 784, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((785, 17, 788, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(786, 12, 786, 21): '"""SECURED"""', (786, 23, 786, 51): 'views.MiddlewareView.as_view', (787, 12, 787, 55): '"""django_roles_access:middleware_view_class"""', (788, 12, 788, 16): 'True'}, {}), "('SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((794, 22, 794, 63), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((795, 22, 795, 63), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((796, 22, 798, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((802, 17, 805, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(803, 12, 803, 21): '"""SECURED"""', (803, 23, 803, 51): 'views.MiddlewareView.as_view', (804, 12, 804, 55): '"""django_roles_access:middleware_view_class"""', (805, 12, 805, 16): 'True'}, {}), "('SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((810, 8, 812, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((813, 17, 816, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(814, 12, 814, 21): '"""SECURED"""', (814, 23, 814, 51): 'views.MiddlewareView.as_view', (815, 12, 815, 55): '"""django_roles_access:middleware_view_class"""', (816, 12, 816, 16): 'True'}, {}), "('SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((821, 8, 823, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((824, 17, 827, 17), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(825, 12, 825, 21): '"""SECURED"""', (825, 23, 825, 51): 'views.MiddlewareView.as_view', (826, 12, 826, 55): '"""django_roles_access:middleware_view_class"""', (827, 12, 827, 16): 'True'}, {}), "('SECURED', views.MiddlewareView.as_view,\n 'django_roles_access:middleware_view_class', True)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((833, 8, 835, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((836, 17, 839, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(837, 12, 837, 21): '"""SECURED"""', (837, 23, 837, 51): 'views.protected_view_by_role', (838, 12, 838, 56): '"""django_roles_access:view_protected_by_role"""', (839, 12, 839, 17): 'False'}, {}), "('SECURED', views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((845, 22, 845, 63), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((846, 22, 846, 63), 'django.contrib.auth.models.Group.objects.get_or_create', 'Group.objects.get_or_create', (), '', False, 'from django.contrib.auth.models import Group\n'), ((847, 22, 849, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((853, 17, 856, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(854, 12, 854, 21): '"""SECURED"""', (854, 23, 854, 51): 'views.protected_view_by_role', (855, 12, 855, 56): '"""django_roles_access:view_protected_by_role"""', (856, 12, 856, 17): 'False'}, {}), "('SECURED', views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((861, 8, 863, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((864, 17, 867, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(865, 12, 865, 21): '"""SECURED"""', (865, 23, 865, 51): 'views.protected_view_by_role', (866, 12, 866, 56): '"""django_roles_access:view_protected_by_role"""', (867, 12, 867, 17): 'False'}, {}), "('SECURED', views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((872, 8, 874, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((875, 17, 878, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(876, 12, 876, 21): '"""SECURED"""', (876, 23, 876, 51): 'views.protected_view_by_role', (877, 12, 877, 56): '"""django_roles_access:view_protected_by_role"""', (878, 12, 878, 17): 'False'}, {}), "('SECURED', views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((885, 17, 888, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(886, 12, 886, 21): '"""SECURED"""', (886, 23, 886, 51): 'views.protected_view_by_role', (887, 12, 887, 56): '"""django_roles_access:view_protected_by_role"""', (888, 12, 888, 17): 'False'}, {}), "('SECURED', views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((896, 17, 899, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(897, 12, 897, 16): 'None', (897, 18, 897, 46): 'views.protected_view_by_role', (898, 12, 898, 56): '"""django_roles_access:view_protected_by_role"""', (899, 12, 899, 17): 'False'}, {}), "(None, views.protected_view_by_role,\n 'django_roles_access:view_protected_by_role', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((908, 8, 910, 22), 'django_roles_access.models.ViewAccess.objects.create', 'ViewAccess.objects.create', (), '', False, 'from django_roles_access.models import ViewAccess\n'), ((911, 17, 914, 18), 'django_roles_access.utils.view_access_analyzer', 'view_access_analyzer', ({(912, 12, 912, 16): 'None', (912, 18, 912, 39): 'views.middleware_view', (913, 12, 913, 54): '"""django_roles_access:middleware_view_func"""', (914, 12, 914, 17): 'False'}, {}), "(None, views.middleware_view,\n 'django_roles_access:middleware_view_func', False)", False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((925, 23, 925, 70), 'django_roles_access.utils.OutputReport', 'OutputReport', ({(925, 36, 925, 52): 'self.mock_stdout', (925, 54, 925, 69): 'self.mock_style'}, {}), '(self.mock_stdout, self.mock_style)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((235, 19, 235, 55), 'importlib.import_module', 'import_module', ({(235, 33, 235, 54): 'settings.ROOT_URLCONF'}, {}), '(settings.ROOT_URLCONF)', False, 'from importlib import import_module\n'), ((319, 12, 319, 34), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(319, 29, 319, 33): 'data'}, {}), '(data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((328, 12, 328, 34), 'django_roles_access.utils.get_views_by_app', 'get_views_by_app', ({(328, 29, 328, 33): 'data'}, {}), '(data)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((364, 19, 364, 55), 'importlib.import_module', 'import_module', ({(364, 33, 364, 54): 'settings.ROOT_URLCONF'}, {}), '(settings.ROOT_URLCONF)', False, 'from importlib import import_module\n'), ((370, 34, 370, 57), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(370, 48, 370, 56): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((377, 34, 377, 57), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(377, 48, 377, 56): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((384, 34, 384, 57), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(384, 48, 384, 56): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((392, 34, 392, 57), 'django_roles_access.utils.walk_site_url', 'walk_site_url', ({(392, 48, 392, 56): 'self.url'}, {}), '(self.url)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((431, 24, 431, 60), 'django_roles_access.utils.check_django_roles_is_used', 'check_django_roles_is_used', ({(431, 51, 431, 59): 'function'}, {}), '(function)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((441, 24, 441, 57), 'django_roles_access.utils.check_django_roles_is_used', 'check_django_roles_is_used', ({(441, 51, 441, 56): 'Aview'}, {}), '(Aview)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((446, 25, 446, 58), 'django_roles_access.utils.check_django_roles_is_used', 'check_django_roles_is_used', ({(446, 52, 446, 57): 'Aview'}, {}), '(Aview)', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n'), ((921, 46, 921, 59), 'django.core.management.BaseCommand', 'BaseCommand', ({}, {}), '()', False, 'from django.core.management import BaseCommand\n'), ((922, 45, 922, 58), 'django.core.management.BaseCommand', 'BaseCommand', ({}, {}), '()', False, 'from django.core.management import BaseCommand\n'), ((942, 12, 942, 26), 'django_roles_access.utils.OutputReport', 'OutputReport', ({}, {}), '()', False, 'from django_roles_access.utils import walk_site_url, get_views_by_app, view_access_analyzer, get_view_analyze_report, check_django_roles_is_used, analyze_by_role, APP_NAME_FOR_NONE, NOT_SECURED_DEFAULT, SECURED_DEFAULT, PUBLIC_DEFAULT, NONE_TYPE_DEFAULT, DISABLED_DEFAULT, OutputReport\n')] |
jasondavis/FavoriteFiles | favorite_files.py | be088259ac36383399eebe85d8d5b35e235d25b0 | '''
Favorite Files
Licensed under MIT
Copyright (c) 2012 Isaac Muse <[email protected]>
'''
import sublime
import sublime_plugin
from os.path import join, exists, normpath
from favorites import Favorites
Favs = Favorites(join(sublime.packages_path(), 'User', 'favorite_files_list.json'))
class Refresh:
dummy_file = normpath(join(sublime.packages_path(), 'FavoriteFiles', 'refresh.txt'))
on = False
class CleanOrphanedFavoritesCommand(sublime_plugin.WindowCommand):
def run(self):
# Clean out all dead links
if not Favs.load(clean=True, win_id=self.window.id()):
Favs.load(force=True, clean=True, win_id=self.window.id())
class SelectFavoriteFileCommand(sublime_plugin.WindowCommand):
def open_file(self, value, group=False):
if value >= 0:
active_group = self.window.active_group()
if value < self.num_files or (group and value < self.num_files + 1):
# Open global file, file in group, or all fiels in group
names = []
if group:
if value == 0:
# Open all files in group
names = [self.files[x][1] for x in range(0, self.num_files)]
else:
# Open file in group
names.append(self.files[value - 1][1])
else:
# Open global file
names.append(self.files[value][1])
# Iterate through file list ensure they load in proper view index order
count = 0
for n in names:
if exists(n):
view = self.window.open_file(n)
if view != None:
if active_group >= 0:
self.window.set_view_index(view, active_group, count)
count += 1
else:
sublime.error_message("The following file does not exist:\n%s" % n)
else:
# Decend into group
value -= self.num_files
self.files = Favs.all_files(group_name=self.groups[value][0].replace("Group: ", "", 1))
self.num_files = len(self.files)
self.groups = []
self.num_groups = 0
# Show files in group
if self.num_files:
self.window.show_quick_panel(
["Open Group"] + self.files,
lambda x: self.open_file(x, group=True)
)
else:
sublime.error_message("No favorites found! Try adding some.")
def run(self):
if not Favs.load(win_id=self.window.id()):
self.files = Favs.all_files()
self.num_files = len(self.files)
self.groups = Favs.all_groups()
self.num_groups = len(self.groups)
if self.num_files + self.num_groups > 0:
self.window.show_quick_panel(
self.files + self.groups,
self.open_file
)
else:
sublime.error_message("No favorites found! Try adding some.")
class AddFavoriteFileCommand(sublime_plugin.WindowCommand):
def add(self, names, group_name=None):
disk_omit_count = 0
added = 0
# Iterate names and add them to group/global if not already added
for n in names:
if not Favs.exists(n, group_name=group_name):
if exists(n):
Favs.set(n, group_name=group_name)
added += 1
else:
# File does not exist on disk; cannot add
disk_omit_count += 1
if added:
# Save if files were added
Favs.save(True)
if disk_omit_count:
# Alert that files could be added
message = "1 file does not exist on disk!" if disk_omit_count == 1 else "%d file(s) do not exist on disk!" % disk_omit_count
sublime.error_message(message)
def create_group(self, value):
repeat = False
if value == "":
# Require an actual name
sublime.error_message("Please provide a valid group name.")
repeat = True
elif Favs.exists(value, group=True):
# Do not allow duplicates
sublime.error_message("Group \"%s\" already exists.")
repeat = True
else:
# Add group
Favs.add_group(value)
self.add(self.name, value)
if repeat:
# Ask again if name was not sufficient
v = self.window.show_input_panel(
"Create Group: ",
"New Group",
self.create_group,
None,
None
)
v.run_command("select_all")
def select_group(self, value, replace=False):
if value >= 0:
group_name = self.groups[value][0].replace("Group: ", "", 1)
if replace:
# Start with empty group for "Replace Group" selection
Favs.add_group(group_name)
# Add favorites
self.add(self.name, group_name)
def show_groups(self, replace=False):
# Show availabe groups
self.groups = Favs.all_groups()
self.window.show_quick_panel(
self.groups,
lambda x: self.select_group(x, replace=replace)
)
def group_answer(self, value):
if value >= 0:
if value == 0:
# No group; add file to favorites
self.add(self.name)
elif value == 1:
# Request new group name
v = self.window.show_input_panel(
"Create Group: ",
"New Group",
self.create_group,
None,
None
)
v.run_command("select_all")
elif value == 2:
# "Add to Group"
self.show_groups()
elif value == 3:
# "Replace Group"
self.show_groups(replace=True)
def group_prompt(self):
# Default options
self.group = ["No Group", "Create Group"]
if Favs.group_count() > 0:
# Options if groups already exit
self.group += ["Add to Group", "Replace Group"]
# Present group options
self.window.show_quick_panel(
self.group,
self.group_answer
)
def file_answer(self, value):
if value >= 0:
view = self.window.active_view()
if view != None:
if value == 0:
# Single file
name = view.file_name()
if name != None:
self.name.append(name)
self.group_prompt()
if value == 1:
# All files in window
views = self.window.views()
if len(views) > 0:
for v in views:
name = v.file_name()
if name != None:
self.name.append(name)
if len(self.name) > 0:
self.group_prompt()
if value == 2:
# All files in layout group
group, idx = self.window.get_view_index(view)
views = self.window.views_in_group(group)
if len(views) > 0:
for v in views:
name = v.file_name()
if name != None:
self.name.append(name)
if len(self.name) > 0:
self.group_prompt()
def file_prompt(self, view_code):
# Add current active file
options = ["Add Current File to Favorites"]
if view_code > 0:
# Add all files in window
options.append("Add All Files to Favorites")
if view_code > 1:
# Add all files in layout group
options.append("Add All Files to in Active Group to Favorites")
# Preset file options
self.window.show_quick_panel(
options,
self.file_answer
)
def run(self):
view = self.window.active_view()
self.name = []
if view != None:
view_code = 0
views = self.window.views()
# If there is more than one view open allow saving all views
# TODO: Widget views probably show up here too, maybe look into exclduing them
if len(views) > 1:
view_code = 1
# See if there is more than one group; if so allow saving of a specific group
if self.window.num_groups() > 1:
group, idx = self.window.get_view_index(view)
group_views = self.window.views_in_group(group)
if len(group_views) > 1:
view_code = 2
self.file_prompt(view_code)
else:
# Only single file open, proceed without file options
name = view.file_name()
if name != None:
self.name.append(name)
self.group_prompt()
class RemoveFavoriteFileCommand(sublime_plugin.WindowCommand):
def remove(self, value, group=False, group_name=None):
if value >= 0:
# Remove file from global, file from group list, or entire group
if value < self.num_files or (group and value < self.num_files + 1):
name = None
if group:
if group_name == None:
return
if value == 0:
# Remove group
Favs.remove_group(group_name)
Favs.save(True)
return
else:
# Remove group file
name = self.files[value - 1][1]
else:
# Remove global file
name = self.files[value][1]
# Remove file and save
Favs.remove(name, group_name=group_name)
Favs.save(True)
else:
# Decend into group
value -= self.num_files
group_name = self.groups[value][0].replace("Group: ", "", 1)
self.files = Favs.all_files(group_name=group_name)
self.num_files = len(self.files)
self.groups = []
self.num_groups = 0
# Show group files
if self.num_files:
self.window.show_quick_panel(
["Remove Group"] + self.files,
lambda x: self.remove(x, group=True, group_name=group_name)
)
else:
sublime.error_message("No favorites found! Try adding some.")
def run(self):
if not Favs.load(win_id=self.window.id()):
# Present both files and groups for removal
self.files = Favs.all_files()
self.num_files = len(self.files)
self.groups = Favs.all_groups()
self.num_groups = len(self.groups)
# Show panel
if self.num_files + self.num_groups > 0:
self.window.show_quick_panel(
self.files + self.groups,
self.remove
)
else:
sublime.error_message("No favorites to remove!")
class FavoritesForceRefreshListenerCommand(sublime_plugin.EventListener):
def on_post_save(self, view):
if Refresh.on:
path = view.file_name()
if path != None:
if normpath(view.file_name()) == Refresh.dummy_file:
# Close refresh file if more than one view is open
if len(view.window().views()) > 1:
sublime.set_timeout(lambda: sublime.active_window().run_command("close_file"), 100)
# Attempt toggle again
sublime.set_timeout(lambda: sublime.active_window().run_command("toggle_per_project_favorites"), 1000)
class TogglePerProjectFavoritesCommand(sublime_plugin.WindowCommand):
def save(self, view):
if Refresh.on:
path = view.file_name()
if path != None:
if normpath(view.file_name()) == Refresh.dummy_file:
view.run_command('save')
def run(self):
refresh = True
win_id = self.window.id()
if Refresh.on:
Refresh.on = False
refresh = False
# Try and toggle back to global first
if not Favs.toggle_global(win_id):
return
# Try and toggle per project
if refresh:
view = self.window.open_file(Refresh.dummy_file)
if view != None:
Refresh.on = True
self.window.focus_view(view)
sublime.set_timeout(lambda: self.save(view), 100)
else:
sublime.error_message('Could not find a project file!')
else:
if Favs.toggle_per_projects(win_id):
sublime.error_message('Could not find a project file!')
else:
Favs.open(win_id=self.window.id())
def is_enabled(self):
return sublime.load_settings("favorite_files.sublime-settings").get("enable_per_projects", False)
| [((12, 22, 12, 45), 'sublime.packages_path', 'sublime.packages_path', ({}, {}), '()', False, 'import sublime\n'), ((16, 31, 16, 54), 'sublime.packages_path', 'sublime.packages_path', ({}, {}), '()', False, 'import sublime\n'), ((108, 12, 108, 42), 'sublime.error_message', 'sublime.error_message', ({(108, 34, 108, 41): 'message'}, {}), '(message)', False, 'import sublime\n'), ((114, 12, 114, 71), 'sublime.error_message', 'sublime.error_message', ({(114, 34, 114, 70): '"""Please provide a valid group name."""'}, {}), "('Please provide a valid group name.')", False, 'import sublime\n'), ((86, 16, 86, 77), 'sublime.error_message', 'sublime.error_message', ({(86, 38, 86, 76): '"""No favorites found! Try adding some."""'}, {}), "('No favorites found! Try adding some.')", False, 'import sublime\n'), ((96, 19, 96, 28), 'os.path.exists', 'exists', ({(96, 26, 96, 27): 'n'}, {}), '(n)', False, 'from os.path import join, exists, normpath\n'), ((118, 12, 118, 65), 'sublime.error_message', 'sublime.error_message', ({(118, 34, 118, 64): '"""Group "%s" already exists."""'}, {}), '(\'Group "%s" already exists.\')', False, 'import sublime\n'), ((318, 16, 318, 64), 'sublime.error_message', 'sublime.error_message', ({(318, 38, 318, 63): '"""No favorites to remove!"""'}, {}), "('No favorites to remove!')", False, 'import sublime\n'), ((362, 16, 362, 71), 'sublime.error_message', 'sublime.error_message', ({(362, 38, 362, 70): '"""Could not find a project file!"""'}, {}), "('Could not find a project file!')", False, 'import sublime\n'), ((365, 16, 365, 71), 'sublime.error_message', 'sublime.error_message', ({(365, 38, 365, 70): '"""Could not find a project file!"""'}, {}), "('Could not find a project file!')", False, 'import sublime\n'), ((370, 15, 370, 71), 'sublime.load_settings', 'sublime.load_settings', ({(370, 37, 370, 70): '"""favorite_files.sublime-settings"""'}, {}), "('favorite_files.sublime-settings')", False, 'import sublime\n'), ((48, 23, 48, 32), 'os.path.exists', 'exists', ({(48, 30, 48, 31): 'n'}, {}), '(n)', False, 'from os.path import join, exists, normpath\n'), ((72, 20, 72, 81), 'sublime.error_message', 'sublime.error_message', ({(72, 42, 72, 80): '"""No favorites found! Try adding some."""'}, {}), "('No favorites found! Try adding some.')", False, 'import sublime\n'), ((301, 20, 301, 81), 'sublime.error_message', 'sublime.error_message', ({(301, 42, 301, 80): '"""No favorites found! Try adding some."""'}, {}), "('No favorites found! Try adding some.')", False, 'import sublime\n'), ((56, 24, 56, 91), 'sublime.error_message', 'sublime.error_message', ({(56, 46, 56, 90): '("""The following file does not exist:\n%s""" % n)'}, {}), '("""The following file does not exist:\n%s""" % n)', False, 'import sublime\n'), ((331, 48, 331, 71), 'sublime.active_window', 'sublime.active_window', ({}, {}), '()', False, 'import sublime\n'), ((329, 52, 329, 75), 'sublime.active_window', 'sublime.active_window', ({}, {}), '()', False, 'import sublime\n')] |
ktok07b6/polyphony | tests/list/list03.py | 657c5c7440520db6b4985970bd50547407693ac4 | from polyphony import testbench
def list03(x, y, z):
a = [1, 2, 3]
r0 = x
r1 = y
a[r0] = a[r1] + z
return a[r0]
@testbench
def test():
assert 4 == list03(0, 1 ,2)
assert 5 == list03(2, 1 ,3)
test()
| [] |
paskausks/sc2cm | sc2clanman/views.py | 9c80e581933531496333d4a54c40174d4fb583a5 | #!/bin/env python3
from collections import Counter
from django.conf import settings
from django.contrib.auth.decorators import login_required, permission_required
from django.db import models as dm
from django.shortcuts import get_object_or_404, render
from django.views.generic.list import BaseListView
from django.views.generic import TemplateView
from django.utils.decorators import method_decorator
from . import models, apps, sc2, mixins
class BaseView(TemplateView):
"""
A TemplateView subclass which adds the Opts object to context.
"""
current_model = 'clanmember'
def get_context_data(self, **kwargs):
ctx = super(BaseView, self).get_context_data(**kwargs)
# Get links so we can display links to admin.
class Opts(object):
app_label = 'sc2clanman'
model_name = self.current_model
ctx['opts'] = Opts()
ctx['is_authorized'] = self.request.user.is_superuser or self.request.user.is_staff
return ctx
class AuthenticatedView(BaseView):
"""
BaseView subclass with the login required decorator applied.
"""
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AuthenticatedView, self).dispatch(*args, **kwargs)
class ListView(BaseListView, BaseView):
"""
Combines BaseView with capability to show a paginated object list
"""
pass
class MemberView(ListView):
""" Show the clanmembers in a list ordered by ladder score"""
template_name = 'sc2clanman/members.html'
# No ordering since it's done by the front-end
queryset = models.ClanMember.clanmembers.all()
def get_context_data(self, **kwargs):
ctx = super(MemberView, self).get_context_data(**kwargs)
ctx['last_member_update'] = models.SyncLog.objects.filter(
action=models.SyncLog.CLAN_MEMBER_SYNC,
success=True,
).order_by('-time')[0].time
ctx['last_detail_update'] = models.SyncLog.objects.filter(
action=models.SyncLog.CLAN_MEMBER_DETAIL_SYNC,
success=True
).order_by('-time')[0].time
# Calculate quick stats
# Game stats - aggregate and sum wins and losses
gp = self.queryset.aggregate(dm.Sum('wins'), dm.Sum('losses'))
ctx['total_games_played'] = gp['wins__sum'] + gp['losses__sum']
# Annotate games played and winrate for each member
games_played = self.queryset.annotate(
games_played=dm.F('wins') + dm.F('losses')
).order_by('games_played')
ctx['least_games_played'] = games_played.filter(games_played__gt=0).first()
ctx['most_games_played'] = games_played.order_by('-games_played').first()
# Last game date
ctx['least_passionate'] = self.queryset.order_by('last_game').first()
# Most prominent league, country and race
league_breakdown = Counter(
self.queryset.exclude(score=models.ClanMember.SCORE_UNRANKED).values_list('league', flat=True)
).most_common()
ctx['league_breakdown'] = (
(sc2.League(l[0]), l[1]) for l in league_breakdown
)
ctx['country_breakdown'] = Counter(
self.queryset.exclude(country='').values_list('country', flat=True)
).most_common()
race_breakdown = Counter(
self.queryset.exclude(score=models.ClanMember.SCORE_UNRANKED).values_list('race', flat=True)
).most_common(4)
ctx['race_breakdown'] = (
(sc2.Race(r[0]), r[1]) for r in race_breakdown
)
ctx['version'] = apps.ClanManConfig.version_id
return ctx
class ClanWarView(BaseView):
template_name = 'sc2clanman/cw.html'
current_model = 'clanwar'
def get_context_data(self, **kwargs):
ctx = super(ClanWarView, self).get_context_data(**kwargs)
ctx['clanwars'] = models.ClanWar.objects.all()
return ctx
class ClanWarDetailView(BaseView):
template_name = 'sc2clanman/cwdetail.html'
current_model = 'clanwar'
def get_context_data(self, **kwargs):
ctx = super(ClanWarDetailView, self).get_context_data(**kwargs)
ctx['cw'] = get_object_or_404(models.ClanWar, id=kwargs.get('cw_id'))
ctx['clan_tag'] = settings.SC2_CLANMANAGER_CLAN_TAG
return ctx
| [((41, 5, 41, 37), 'django.utils.decorators.method_decorator', 'method_decorator', ({(41, 22, 41, 36): 'login_required'}, {}), '(login_required)', False, 'from django.utils.decorators import method_decorator\n'), ((74, 37, 74, 51), 'django.db.models.Sum', 'dm.Sum', ({(74, 44, 74, 50): '"""wins"""'}, {}), "('wins')", True, 'from django.db import models as dm\n'), ((74, 53, 74, 69), 'django.db.models.Sum', 'dm.Sum', ({(74, 60, 74, 68): '"""losses"""'}, {}), "('losses')", True, 'from django.db import models as dm\n'), ((79, 29, 79, 41), 'django.db.models.F', 'dm.F', ({(79, 34, 79, 40): '"""wins"""'}, {}), "('wins')", True, 'from django.db import models as dm\n'), ((79, 44, 79, 58), 'django.db.models.F', 'dm.F', ({(79, 49, 79, 57): '"""losses"""'}, {}), "('losses')", True, 'from django.db import models as dm\n')] |
parmentelat/manim | manimlib/mobject/functions.py | f05f94fbf51c70591bed3092587a5db0de439738 | from manimlib.constants import *
from manimlib.mobject.types.vectorized_mobject import VMobject
from manimlib.utils.config_ops import digest_config
from manimlib.utils.space_ops import get_norm
class ParametricCurve(VMobject):
CONFIG = {
"t_range": [0, 1, 0.1],
"min_samples": 10,
"epsilon": 1e-8,
# TODO, automatically figure out discontinuities
"discontinuities": [],
"smoothing": True,
}
def __init__(self, t_func, t_range=None, **kwargs):
digest_config(self, kwargs)
if t_range is not None:
self.t_range[:len(t_range)] = t_range
# To be backward compatible with all the scenes specifying t_min, t_max, step_size
self.t_range = [
kwargs.get("t_min", self.t_range[0]),
kwargs.get("t_max", self.t_range[1]),
kwargs.get("step_size", self.t_range[2]),
]
self.t_func = t_func
VMobject.__init__(self, **kwargs)
def get_point_from_function(self, t):
return self.t_func(t)
def init_points(self):
t_min, t_max, step = self.t_range
jumps = np.array(self.discontinuities)
jumps = jumps[(jumps > t_min) & (jumps < t_max)]
boundary_times = [t_min, t_max, *(jumps - self.epsilon), *(jumps + self.epsilon)]
boundary_times.sort()
for t1, t2 in zip(boundary_times[0::2], boundary_times[1::2]):
t_range = [*np.arange(t1, t2, step), t2]
points = np.array([self.t_func(t) for t in t_range])
self.start_new_path(points[0])
self.add_points_as_corners(points[1:])
if self.smoothing:
self.make_smooth()
return self
class FunctionGraph(ParametricCurve):
CONFIG = {
"color": YELLOW,
"x_range": [-8, 8, 0.25],
}
def __init__(self, function, x_range=None, **kwargs):
digest_config(self, kwargs)
self.function = function
if x_range is not None:
self.x_range[:len(x_range)] = x_range
def parametric_function(t):
return [t, function(t), 0]
super().__init__(parametric_function, self.x_range, **kwargs)
def get_function(self):
return self.function
def get_point_from_function(self, x):
return self.t_func(x)
| [((18, 8, 18, 35), 'manimlib.utils.config_ops.digest_config', 'digest_config', ({(18, 22, 18, 26): 'self', (18, 28, 18, 34): 'kwargs'}, {}), '(self, kwargs)', False, 'from manimlib.utils.config_ops import digest_config\n'), ((28, 8, 28, 41), 'manimlib.mobject.types.vectorized_mobject.VMobject.__init__', 'VMobject.__init__', ({(28, 26, 28, 30): 'self'}, {}), '(self, **kwargs)', False, 'from manimlib.mobject.types.vectorized_mobject import VMobject\n'), ((57, 8, 57, 35), 'manimlib.utils.config_ops.digest_config', 'digest_config', ({(57, 22, 57, 26): 'self', (57, 28, 57, 34): 'kwargs'}, {}), '(self, kwargs)', False, 'from manimlib.utils.config_ops import digest_config\n')] |
doudoudzj/ecsmate | lib/ecsmate/ecs.py | dda508a64ef9d6979dcc83377bb007d2a0acec30 | #-*- coding: utf-8 -*-
#
# Copyright (c) 2012, ECSMate development team
# All rights reserved.
#
# ECSMate is distributed under the terms of the (new) BSD License.
# The full license can be found in 'LICENSE.txt'.
"""ECS SDK
"""
import time
import hmac
import base64
import hashlib
import urllib
import json
import inspect
from random import random
class ECS(object):
def __init__(self, AccessKeyID, AccessKeySecret, gateway='https://ecs.aliyuncs.com'):
self.AccessKeyID = AccessKeyID
self.AccessKeySecret = AccessKeySecret
self.gateway = gateway
@classmethod
def _urlencode(self, string):
return urllib.quote(string, '~')
def _sign(self, params):
paramstrings = []
for k, v in sorted(params.items()):
paramstrings.append('%s=%s' % (ECS._urlencode(k), ECS._urlencode(v)))
datastrings = [
ECS._urlencode('GET'),
ECS._urlencode('/'),
ECS._urlencode('&'.join(paramstrings)),
]
datastring = '&'.join(datastrings)
signature = hmac.new(self.AccessKeySecret+'&', datastring, hashlib.sha1).digest()
return base64.b64encode(signature)
def _http_get(self, params):
url = self.gateway + '/?'
sysparams = {
'Format': 'JSON',
'Version': '2012-09-13',
'AccessKeyID': self.AccessKeyID,
'SignatureMethod': 'HMAC-SHA1',
'Timestamp': time.strftime('%Y-%m-%dT%XZ'),
'SignatureVersion': '1.0',
'SignatureNonce': str(random()).replace('0.', ''),
}
params.update(sysparams)
params['Signature'] = self._sign(params)
params = urllib.urlencode(params)
url += params
f = urllib.urlopen(url)
data = f.read()
f.close()
return json.loads(data)
def _parse_response(self, apiname, response):
if response.has_key('Error'):
respdata = response['Error']
reqid = respdata['RequestID']
del respdata['RequestID']
return [False, respdata, reqid]
else:
respdata = response[apiname+'Response']
return [True, respdata[apiname+'Result'], respdata['ResponseMetadata']['RequestID']]
def _make_params(self, params):
params = dict((k, str(v)) for k, v in params.items() if k != 'self' and v != None)
params['Action'] = inspect.stack()[1][3]
return params
def _execute(self, params):
response = self._http_get(params)
return self._parse_response(params['Action'], response)
def CreateInstance(self, RegionCode, DiskSize, InstanceType, GroupCode, ImageCode,
MaxBandwidthIn=None, MaxBandwidthOut=None, InstanceName=None, HostName=None,
Password=None, ZoneCode=None):
params = self._make_params(locals())
return self._execute(params)
def StartInstance(self, InstanceName):
params = self._make_params(locals())
return self._execute(params)
def StopInstance(self, InstanceName, ForceStop=None):
params = self._make_params(locals())
return self._execute(params)
def RebootInstance(self, InstanceName, ForceStop=None):
params = self._make_params(locals())
return self._execute(params)
def ResetInstance(self, InstanceName, ImageCode=None, DiskType=None):
params = self._make_params(locals())
return self._execute(params)
def ResetPassword(self, InstanceName, NewPassword=None):
params = self._make_params(locals())
return self._execute(params)
def DeleteInstance(self, InstanceName):
params = self._make_params(locals())
return self._execute(params)
def DescribeInstanceStatus(self, RegionCode=None, ZoneCode=None, PageNumber=None, PageSize=None):
params = self._make_params(locals())
return self._execute(params)
def DescribeInstanceAttribute(self, InstanceName):
params = self._make_params(locals())
return self._execute(params)
def ModifyInstanceAttribute(self, InstanceName, InstanceType):
params = self._make_params(locals())
return self._execute(params)
def ModifyBandwidth(self, InstanceName, MaxBandwidthOut, MaxBandwidthIn):
params = self._make_params(locals())
return self._execute(params)
def ModifyHostName(self, InstanceName, HostName):
params = self._make_params(locals())
return self._execute(params)
def CreateDisk(self, InstanceName, Size, SnapshotCode=None):
params = self._make_params(locals())
return self._execute(params)
def DeleteDisk(self, InstanceName, DiskCode):
params = self._make_params(locals())
return self._execute(params)
def DescribeDisks(self, InstanceName):
params = self._make_params(locals())
return self._execute(params)
def DescribeImages(self, RegionCode=None, PageNumber=None, PageSize=None):
params = self._make_params(locals())
return self._execute(params)
def AllocateAddress(self, InstanceName):
params = self._make_params(locals())
return self._execute(params)
def ReleaseAddress(self, PublicIpAddress):
params = self._make_params(locals())
return self._execute(params)
def CreateSecurityGroup(self, GroupCode, RegionCode, Description):
params = self._make_params(locals())
return self._execute(params)
def AuthorizeSecurityGroup(self, GroupCode, RegionCode, IpProtocol, PortRange,
SourceGroupCode=None, SourceCidrIp=None, Policy=None, NicType=None, Priority=None):
params = self._make_params(locals())
return self._execute(params)
def DescribeSecurityGroupAttribute(self, GroupCode, RegionCode, NicType=None):
params = self._make_params(locals())
return self._execute(params)
def DescribeSecurityGroups(self, RegionCode, PageNumber=None, PageSize=None):
params = self._make_params(locals())
return self._execute(params)
def ModifySecurityGroupAttribute(self, RegionCode, GroupCode, Adjust):
params = self._make_params(locals())
return self._execute(params)
def RevokeSecurityGroup(self, GroupCode, RegionCode, IpProtocol, PortRange,
SourceGroupCode=None, SourceCidrIp=None, Policy=None, NicType=None):
params = self._make_params(locals())
return self._execute(params)
def DeleteSecurityGroup(self, GroupCode, RegionCode):
params = self._make_params(locals())
return self._execute(params)
def CreateSnapshot(self, InstanceName, DiskCode):
params = self._make_params(locals())
return self._execute(params)
def DeleteSnapshot(self, DiskCode, InstanceName, SnapshotCode):
params = self._make_params(locals())
return self._execute(params)
def CancelSnapshotRequest(self, InstanceName, SnapshotCode):
params = self._make_params(locals())
return self._execute(params)
def DescribeSnapshots(self, InstanceName, DiskCode):
params = self._make_params(locals())
return self._execute(params)
def DescribeSnapshotAttribute(self, RegionCode, SnapshotCode):
params = self._make_params(locals())
return self._execute(params)
def RollbackSnapshot(self, InstanceName, DiskCode, SnapshotCode):
params = self._make_params(locals())
return self._execute(params)
def DescribeRegions(self):
params = self._make_params(locals())
return self._execute(params)
def DescribeZones(self, RegionCode):
params = self._make_params(locals())
return self._execute(params)
if __name__ == '__main__':
import pprint
pp = pprint.PrettyPrinter(indent=4)
AccessKeyID = ''
AccessKeySecret = ''
ecs = ECS(AccessKeyID, AccessKeySecret)
if 0:
print '## Regions\n'
regions = ecs.DescribeRegions()[1]
pp.pprint(regions)
print
for region in regions['Regions']:
print '## Zones in %s\n' % region['RegionCode']
zones = ecs.DescribeZones(region['RegionCode'])
if not zones[0]:
pp.pprint(zones)
continue
zones = zones[1]
pp.pprint(zones)
print
for zone in zones['Zones']:
print '## Instances in %s\n' % zone['ZoneCode']
instances = ecs.DescribeInstanceStatus(region['RegionCode'], zone['ZoneCode'])[1]
pp.pprint(instances)
print
print
#pp.pprint(ecs.DescribeInstanceStatus(PageSize=10, PageNumber=1))
#pp.pprint(ecs.DescribeInstanceStatus('cn-hangzhou-dg-a01', 'cn-hangzhou-dg101-a'))
#pp.pprint(ecs.StartInstance('AY1209220917063704221'))
#pp.pprint(ecs.StopInstance('AY1209220917063704221'))
#pp.pprint(ecs.RebootInstance('AY1209220917063704221'))
#pp.pprint(ecs.DescribeInstanceAttribute('AY1209220917063704221'))
#pp.pprint(ecs.DescribeImages(PageSize=10, PageNumber=9))
#pp.pprint(ecs.DescribeDisks('AY1209220917063704221'))
#pp.pprint(ecs.DescribeSnapshots('AY1209220917063704221', '1006-60002839'))
| [] |
mail2nsrajesh/tacker | tacker/api/v1/resource.py | dce6690659836c2885f1cf8227c19be234f8fe25 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility methods for working with WSGI servers redux
"""
from oslo_log import log as logging
import webob.dec
from tacker.api import api_common
from tacker import wsgi
LOG = logging.getLogger(__name__)
class Request(wsgi.Request):
pass
def Resource(controller, faults=None, deserializers=None, serializers=None):
"""API entity resource.
Represents an API entity resource and the associated serialization and
deserialization logic
"""
default_deserializers = {'application/json': wsgi.JSONDeserializer()}
default_serializers = {'application/json': wsgi.JSONDictSerializer()}
format_types = {'json': 'application/json'}
action_status = dict(create=201, delete=204)
default_deserializers.update(deserializers or {})
default_serializers.update(serializers or {})
deserializers = default_deserializers
serializers = default_serializers
faults = faults or {}
@webob.dec.wsgify(RequestClass=Request)
def resource(request):
route_args = request.environ.get('wsgiorg.routing_args')
if route_args:
args = route_args[1].copy()
else:
args = {}
# NOTE(jkoelker) by now the controller is already found, remove
# it from the args if it is in the matchdict
args.pop('controller', None)
fmt = args.pop('format', None)
action = args.pop('action', None)
content_type = format_types.get(fmt,
request.best_match_content_type())
language = request.best_match_language()
deserializer = deserializers.get(content_type)
serializer = serializers.get(content_type)
try:
if request.body:
args['body'] = deserializer.deserialize(request.body)['body']
method = getattr(controller, action)
result = method(request=request, **args)
except Exception as e:
mapped_exc = api_common.convert_exception_to_http_exc(e, faults,
language)
if hasattr(mapped_exc, 'code') and 400 <= mapped_exc.code < 500:
LOG.info(_('%(action)s failed (client error): %(exc)s'),
{'action': action, 'exc': mapped_exc})
else:
LOG.exception(
_('%(action)s failed: %(details)s'),
{
'action': action,
'details': extract_exc_details(e),
}
)
raise mapped_exc
status = action_status.get(action, 200)
body = serializer.serialize(result)
# NOTE(jkoelker) Comply with RFC2616 section 9.7
if status == 204:
content_type = ''
body = None
return webob.Response(request=request, status=status,
content_type=content_type,
body=body)
return resource
_NO_ARGS_MARKER = object()
def extract_exc_details(e):
for attr in ('_error_context_msg', '_error_context_args'):
if not hasattr(e, attr):
return _('No details.')
details = e._error_context_msg
args = e._error_context_args
if args is _NO_ARGS_MARKER:
return details
return details % args
| [((27, 6, 27, 33), 'oslo_log.log.getLogger', 'logging.getLogger', ({(27, 24, 27, 32): '__name__'}, {}), '(__name__)', True, 'from oslo_log import log as logging\n'), ((40, 49, 40, 72), 'tacker.wsgi.JSONDeserializer', 'wsgi.JSONDeserializer', ({}, {}), '()', False, 'from tacker import wsgi\n'), ((41, 47, 41, 72), 'tacker.wsgi.JSONDictSerializer', 'wsgi.JSONDictSerializer', ({}, {}), '()', False, 'from tacker import wsgi\n'), ((79, 25, 80, 75), 'tacker.api.api_common.convert_exception_to_http_exc', 'api_common.convert_exception_to_http_exc', ({(79, 66, 79, 67): 'e', (79, 69, 79, 75): 'faults', (80, 66, 80, 74): 'language'}, {}), '(e, faults, language)', False, 'from tacker.api import api_common\n')] |
GitHubEmploy/akinator.py | akinator/utils.py | 67c688b0332f4caa72bacc8fbc8f95abfe2290c9 | """
MIT License
Copyright (c) 2019 NinjaSnail1080
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from .exceptions import InvalidAnswerError, InvalidLanguageError, AkiConnectionFailure, AkiTimedOut, AkiNoQuestions, AkiServerDown, AkiTechnicalError
import re
import json
def ans_to_id(ans):
"""Convert an input answer string into an Answer ID for Akinator"""
ans = str(ans).lower()
if ans == "yes" or ans == "y" or ans == "0":
return "0"
elif ans == "no" or ans == "n" or ans == "1":
return "1"
elif ans == "i" or ans == "idk" or ans == "i dont know" or ans == "i don't know" or ans == "2":
return "2"
elif ans == "probably" or ans == "p" or ans == "3":
return "3"
elif ans == "probably not" or ans == "pn" or ans == "4":
return "4"
else:
raise InvalidAnswerError("""
You put "{}", which is an invalid answer.
The answer must be one of these:
- "yes" OR "y" OR "0" for YES
- "no" OR "n" OR "1" for NO
- "i" OR "idk" OR "i dont know" OR "i don't know" OR "2" for I DON'T KNOW
- "probably" OR "p" OR "3" for PROBABLY
- "probably not" OR "pn" OR "4" for PROBABLY NOT
""".format(ans))
def get_lang_and_theme(lang=None):
"""Returns the language code and theme based on what is input"""
if lang is None or lang == "en" or lang == "english":
return {"lang": "en", "theme": "c"}
elif lang == "en_animals" or lang == "english_animals":
return {"lang": "en", "theme": "a"}
elif lang == "en_objects" or lang == "english_objects":
return {"lang": "en", "theme": "o"}
elif lang == "ar" or lang == "arabic":
return {"lang": "ar", "theme": "c"}
elif lang == "cn" or lang == "chinese":
return {"lang": "cn", "theme": "c"}
elif lang == "de" or lang == "german":
return {"lang": "de", "theme": "c"}
elif lang == "de_animals" or lang == "german_animals":
return {"lang": "de", "theme": "a"}
elif lang == "es" or lang == "spanish":
return {"lang": "es", "theme": "c"}
elif lang == "es_animals" or lang == "spanish_animals":
return {"lang": "es", "theme": "a"}
elif lang == "fr" or lang == "french":
return {"lang": "fr", "theme": "c"}
elif lang == "fr_animals" or lang == "french_animals":
return {"lang": "fr", "theme": "a"}
elif lang == "fr_objects" or lang == "french_objects":
return {"lang": "fr", "theme": "o"}
elif lang == "il" or lang == "hebrew":
return {"lang": "il", "theme": "c"}
elif lang == "it" or lang == "italian":
return {"lang": "it", "theme": "c"}
elif lang == "it_animals" or lang == "italian_animals":
return {"lang": "it", "theme": "a"}
elif lang == "jp" or lang == "japanese":
return {"lang": "jp", "theme": "c"}
elif lang == "jp_animals" or lang == "japanese_animals":
return {"lang": "jp", "theme": "a"}
elif lang == "kr" or lang == "korean":
return {"lang": "kr", "theme": "c"}
elif lang == "nl" or lang == "dutch":
return {"lang": "nl", "theme": "c"}
elif lang == "pl" or lang == "polish":
return {"lang": "pl", "theme": "c"}
elif lang == "pt" or lang == "portuguese":
return {"lang": "pt", "theme": "c"}
elif lang == "ru" or lang == "russian":
return {"lang": "ru", "theme": "c"}
elif lang == "tr" or lang == "turkish":
return {"lang": "tr", "theme": "c"}
else:
raise InvalidLanguageError("You put \"{}\", which is an invalid language.".format(lang))
def raise_connection_error(response):
"""Raise the proper error if the API failed to connect"""
if response == "KO - SERVER DOWN":
raise AkiServerDown("Akinator's servers are down in this region. Try again later or use a different language")
elif response == "KO - TECHNICAL ERROR":
raise AkiTechnicalError("Akinator's servers have had a technical error. Try again later or use a different language")
elif response == "KO - TIMEOUT":
raise AkiTimedOut("Your Akinator session has timed out")
elif response == "KO - ELEM LIST IS EMPTY" or response == "WARN - NO QUESTION":
raise AkiNoQuestions("\"Akinator.step\" reached 80. No more questions")
else:
raise AkiConnectionFailure("An unknown error has occured. Server response: {}".format(response))
| [] |
movinalot/ucs | ucs-python/create_ucs_sp_template.py | dc0d37784592d6d78f46efee40c86b6f7ac928b4 | """
create_ucs_sp_template.py
Purpose:
UCS Manager Create a UCS Service Profile Template
Author:
John McDonough ([email protected]) github: (@movinalot)
Cisco Systems, Inc.
"""
from ucsmsdk.ucshandle import UcsHandle
from ucsmsdk.mometa.ls.LsServer import LsServer
from ucsmsdk.mometa.org.OrgOrg import OrgOrg
HANDLE = UcsHandle(
"sandbox-ucsm1.cisco.com",
"admin",
"password"
)
HANDLE.login()
ORG_ORG = OrgOrg(
parent_mo_or_dn='org-root',
name="devnet",
)
HANDLE.add_mo(ORG_ORG, modify_present=True)
HANDLE.commit()
SP_TEMPLATE = LsServer(
parent_mo_or_dn='org-root/org-devnet',
name="devcore_template",
type="updating-template"
)
HANDLE.add_mo(SP_TEMPLATE, modify_present=True)
HANDLE.commit()
HANDLE.logout()
| [((15, 9, 19, 1), 'ucsmsdk.ucshandle.UcsHandle', 'UcsHandle', ({(16, 4, 16, 29): '"""sandbox-ucsm1.cisco.com"""', (17, 4, 17, 11): '"""admin"""', (18, 4, 18, 14): '"""password"""'}, {}), "('sandbox-ucsm1.cisco.com', 'admin', 'password')", False, 'from ucsmsdk.ucshandle import UcsHandle\n'), ((23, 10, 26, 1), 'ucsmsdk.mometa.org.OrgOrg.OrgOrg', 'OrgOrg', (), '', False, 'from ucsmsdk.mometa.org.OrgOrg import OrgOrg\n'), ((30, 14, 34, 1), 'ucsmsdk.mometa.ls.LsServer.LsServer', 'LsServer', (), '', False, 'from ucsmsdk.mometa.ls.LsServer import LsServer\n')] |
132nd-etcher/epab | epab/core/config.py | 5226d3a36580f8cc50cf5dcac426adb1350a2c9b | # coding=utf-8
"""
Handles EPAB's config file
"""
import logging
import pathlib
import elib_config
CHANGELOG_DISABLE = elib_config.ConfigValueBool(
'changelog', 'disable', description='Disable changelog building', default=False
)
CHANGELOG_FILE_PATH = elib_config.ConfigValuePath(
'changelog', 'file_path', description='Path to changelog file', default='CHANGELOG.md'
)
CHANGELOG_FILE_PATH.must_be_file()
TEST_RUNNER_OPTIONS = elib_config.ConfigValueString(
'test', 'runner_options', description='Additional options for test run', default=''
)
TEST_DURATION_COUNT = elib_config.ConfigValueInteger(
'test', 'duration_count', description='Amount of "slow" tests to show', default=10
)
TEST_DURATION_COUNT.set_limits(min_=0, max_=50)
TEST_TARGET = elib_config.ConfigValueString(
'test', 'target', description='Target of pytest', default='test'
)
TEST_COVERAGE_FAIL_UNDER = elib_config.ConfigValueInteger(
'test', 'coverage_fail_under', description='Minimal coverage to pass tests', default=20
)
TEST_COVERAGE_FAIL_UNDER.set_limits(min_=0, max_=100)
TEST_PYTEST_TIMEOUT = elib_config.ConfigValueInteger(
'test', 'timeout', description='Timeout in seconds for pytest runner', default=300
)
TEST_PYTEST_TIMEOUT.set_limits(min_=0, max_=3600)
LINT_LINE_LENGTH = elib_config.ConfigValueInteger(
'lint', 'line_length', description='Linter max line width', default=120
)
LINT_LINE_LENGTH.set_limits(min_=0, max_=500)
PACKAGE_NAME = elib_config.ConfigValueString(
'package_name', description='Package name'
)
FREEZE_ENTRY_POINT = elib_config.ConfigValueString(
'freeze', 'entry_point', description='Main entry point for pyinstaller', default=''
)
FREEZE_DATA_FILES = elib_config.ConfigValueList(
'freeze', 'data_files', description='PyInstaller data-files list', element_type=str, default=[]
)
DOC_REPO = elib_config.ConfigValueString(
'doc', 'repo', description='Documentation repository on Github', default=''
)
DOC_FOLDER = elib_config.ConfigValuePath(
'doc', 'folder', description='Local documentation directory', default='./doc'
)
DOC_FOLDER.must_be_dir()
QUIET = elib_config.ConfigValueBool(
'quiet', description='Less console output', default=False
)
VERBOSE = elib_config.ConfigValueBool(
'verbose', description='More console output', default=False
)
TEST_AV_RUNNER_OPTIONS = elib_config.ConfigValueString(
'appveyor', 'test_runner_options', description='Additional command line options for tests run on AV',
default='--long'
)
ARTIFACTS = elib_config.ConfigValueList(
'appveyor', 'artifacts', description='List of artifacts for Appveyor', element_type=str, default=[]
)
FLAKE8_EXCLUDE = elib_config.ConfigValueString(
'lint', 'flake8_exclude', description='List of comma separated files for flake8 to exclude', default=''
)
MYPY_ARGS = elib_config.ConfigValueString(
'lint', 'mypy_args', description='Additional MyPy arguments', default=''
)
QT_RES_SRC = elib_config.ConfigValueString(
'qt', 'res_src', description='Qt resource file (.qrc) location', default=''
)
QT_RES_TGT = elib_config.ConfigValueString(
'qt', 'res_tgt', description='Compiled Qt resource file (.py) target location', default=''
)
UPLOAD_TO_TWINE = elib_config.ConfigValueBool(
'twine', 'upload', description='Upload package to Twine after build',
default=True,
)
MAKE_GRAPH = elib_config.ConfigValueBool(
'graph', 'make',
description='Generate graphs using PyReverse',
default=True,
)
def setup_config(epab_version: str):
"""
Set up elib_config package
:param epab_version: installed version of EPAB as as string
"""
logger = logging.getLogger('EPAB')
logger.debug('setting up config')
elib_config.ELIBConfig.setup(
app_name='EPAB',
app_version=epab_version,
config_file_path='pyproject.toml',
config_sep_str='__',
root_path=['tool', 'epab']
)
elib_config.write_example_config('pyproject.toml.example')
if not pathlib.Path('pyproject.toml').exists():
raise FileNotFoundError('pyproject.toml')
elib_config.validate_config()
| [((11, 20, 13, 1), 'elib_config.ConfigValueBool', 'elib_config.ConfigValueBool', (), '', False, 'import elib_config\n'), ((14, 22, 16, 1), 'elib_config.ConfigValuePath', 'elib_config.ConfigValuePath', (), '', False, 'import elib_config\n'), ((18, 22, 20, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((21, 22, 23, 1), 'elib_config.ConfigValueInteger', 'elib_config.ConfigValueInteger', (), '', False, 'import elib_config\n'), ((26, 14, 28, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((30, 27, 32, 1), 'elib_config.ConfigValueInteger', 'elib_config.ConfigValueInteger', (), '', False, 'import elib_config\n'), ((34, 22, 36, 1), 'elib_config.ConfigValueInteger', 'elib_config.ConfigValueInteger', (), '', False, 'import elib_config\n'), ((39, 19, 41, 1), 'elib_config.ConfigValueInteger', 'elib_config.ConfigValueInteger', (), '', False, 'import elib_config\n'), ((44, 15, 46, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((48, 21, 50, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((51, 20, 53, 1), 'elib_config.ConfigValueList', 'elib_config.ConfigValueList', (), '', False, 'import elib_config\n'), ((55, 11, 57, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((58, 13, 60, 1), 'elib_config.ConfigValuePath', 'elib_config.ConfigValuePath', (), '', False, 'import elib_config\n'), ((63, 8, 65, 1), 'elib_config.ConfigValueBool', 'elib_config.ConfigValueBool', (), '', False, 'import elib_config\n'), ((66, 10, 68, 1), 'elib_config.ConfigValueBool', 'elib_config.ConfigValueBool', (), '', False, 'import elib_config\n'), ((70, 25, 73, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((74, 12, 76, 1), 'elib_config.ConfigValueList', 'elib_config.ConfigValueList', (), '', False, 'import elib_config\n'), ((77, 17, 79, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((80, 12, 82, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((83, 13, 85, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((86, 13, 88, 1), 'elib_config.ConfigValueString', 'elib_config.ConfigValueString', (), '', False, 'import elib_config\n'), ((89, 18, 92, 1), 'elib_config.ConfigValueBool', 'elib_config.ConfigValueBool', (), '', False, 'import elib_config\n'), ((93, 13, 97, 1), 'elib_config.ConfigValueBool', 'elib_config.ConfigValueBool', (), '', False, 'import elib_config\n'), ((106, 13, 106, 38), 'logging.getLogger', 'logging.getLogger', ({(106, 31, 106, 37): '"""EPAB"""'}, {}), "('EPAB')", False, 'import logging\n'), ((108, 4, 114, 5), 'elib_config.ELIBConfig.setup', 'elib_config.ELIBConfig.setup', (), '', False, 'import elib_config\n'), ((115, 4, 115, 62), 'elib_config.write_example_config', 'elib_config.write_example_config', ({(115, 37, 115, 61): '"""pyproject.toml.example"""'}, {}), "('pyproject.toml.example')", False, 'import elib_config\n'), ((118, 4, 118, 33), 'elib_config.validate_config', 'elib_config.validate_config', ({}, {}), '()', False, 'import elib_config\n'), ((116, 11, 116, 41), 'pathlib.Path', 'pathlib.Path', ({(116, 24, 116, 40): '"""pyproject.toml"""'}, {}), "('pyproject.toml')", False, 'import pathlib\n')] |
Creativity-Hub/create_flask_app | create_flask_app.py | 4c4e2c7360c7773f6f5e3d2fd30e310777650f57 | import os
import argparse
def check_for_pkg(pkg):
try:
exec("import " + pkg)
except:
os.system("pip3 install --user " + pkg)
def create_flask_app(app='flask_app', threading=False, wsgiserver=False, unwanted_warnings=False, logging=False, further_logging=False, site_endpoints=None, endpoints=None, request_endpoints=None):
check_for_pkg('flask')
lines = ["from flask import Flask, send_from_directory","import codecs", "import os"]
params = {
'app': app,
'threading': threading,
'wsgiserver': wsgiserver,
'unwanted_warnings': unwanted_warnings,
'logging': logging,
'further_logging': further_logging,
'site_endpoints': site_endpoints,
'endpoints': endpoints,
'request_endpoints': request_endpoints
}
if __name__ == '__main__':
parser = argparse.ArgumentParser()
for param in params.keys():
if 'endpoints' in param:
parser.add_argument('-'+param[0].lower(), '--'+param.lower(), nargs='+', help='', required=False)
else:
parser.add_argument('-'+param[0].lower(), '--'+param.lower(), help='', required=False)
args = vars(parser.parse_args())
for param in args.keys():
if 'request' in param and len(args[param]) % 3 != 0:
print('Request method endpoint format invalid, enter "Method" "Endpoint" "Parameter"')
if param == 'app':
if args[param] != None:
params[param] = args[param]
else:
params[param] = args[param]
index = "<!DOCTYPE html>\n<html>\n<head>\n\t<title>endpoint</title>\n\t<link href='static/style.css' rel='stylesheet'>\n</head>\n<body>\n\n<script src='static/script.js'></script>\n</body>\n</html>"
project = params['app']
if not os.path.exists(project):
os.mkdir(project)
if not os.path.exists(project+'/web'):
os.mkdir(project+'/web')
if not os.path.exists(project+'/static'):
os.mkdir(project+'/static')
os.system('touch '+project+'/static/style.css')
os.system('touch '+project+'/static/script.js')
indexFile = open(project+"/web/index.html","w+")
indexFile.write(index.replace('endpoint', project))
indexFile.close()
f = open(project+'/'+project+".py","w+")
headers = {
'threading': ["", "#Threading", "from threading import Thread"],
'wsgiserver': ["", "#WSGIServer", "from gevent.pywsgi import WSGIServer"],
'unwanted_warnings': ["", "#Disable Warnings", "import warnings", "warnings.filterwarnings('ignore')"],
'logging': ["", "#Logging", "import logging", "", "#Logging configuration set to debug on debug.log file", "logging.basicConfig(filename='debug.log',level=logging.DEBUG)", "logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')"],
'further_logging': ["", "#Disable unneeded dependencies logging", "werkzeugLog = logging.getLogger('werkzeug')", "werkzeugLog.disabled = True", "requestsLog = logging.getLogger('urllib3.connectionpool')", "requestsLog.disabled = True"],
}
for param in headers.keys():
if params[param]:
for line in headers[param]:
lines.append(line)
lines.append("\ndef run():")
if params['wsgiserver']:
check_for_pkg('gevent')
lines.append("\t#WSGIServer")
lines.append("\tWSGIServer(('', 8081), app).serve_forever()")
else:
lines.append("\tapp.run(host='0.0.0.0',port=8081)")
if params['threading']:
for line in ["", "#Thread", "def keep_alive():", "\tt = Thread(target=run)", "\tt.start()"]:
lines.append(line)
for line in ["", "app = Flask(__name__)", "", "@app.route('/')", "def main():", "\t#index.html", "\treturn codecs.open('web/index.html', 'r', 'utf-8').read()", "", "@app.route('/favicon.ico')", "def favicon():", "\treturn send_from_directory(os.path.join(app.root_path, 'static'),'favicon.ico', mimetype='image/vnd.microsoft.icon')"]:
lines.append(line)
site_endpoints = params['site_endpoints']
if site_endpoints is not None:
for ep in site_endpoints:
print('Endpoint: ' + ep)
tp = ["\[email protected]('/endpoint')", "def endpoint():", "\t#endpoint.html", "\treturn codecs.open('web/endpoint.html', 'r', 'utf-8').read()"]
for line in tp:
lines.append(line.replace('endpoint', ep))
epFile = open(project+"/web/endpoint.html".replace('endpoint', ep),"w+")
epFile.write(index.replace('endpoint', ep).replace('style.css', ep+'.css').replace('script.js', ep+'.js'))
epFile.close()
os.system('touch '+project+'/static/'+ep+'.css')
os.system('touch '+project+'/static/'+ep+'.js')
endpoints = params['endpoints']
if endpoints is not None:
for ep in endpoints:
print('Endpoint: ' + ep)
tp = ["\[email protected]('/endpoint')", "def endpoint():", "\t#endpoint.html", "\treturn endpoint_route"]
for line in tp:
lines.append(line.replace('endpoint', ep))
request_endpoints = params['request_endpoints']
print(request_endpoints)
request_method = request_endpoints[0]
if request_endpoints is not None:
request_endpoints = [request_endpoints[i * 3:(i + 1) * 3] for i in range((len(request_endpoints) + 3 - 1) // 3)]
for request_method, ep, request_param in request_endpoints:
print('Endpoint: ' + ep, '\nMethod: ' + request_method, '\nParameter: ' + request_param)
tp = ["\[email protected]('/"+ep+"/<"+request_param+">', methods=['"+request_method+"'])", "def "+ep+"("+request_param+"):", "\t#"+request_method+" method endpoint", "\treturn do_something("+request_param+")"]
for line in tp:
lines.append(line)
lines.append("\nif __name__ == '__main__':")
if params['wsgiserver']:
lines.append("\t#Run server forever")
lines.append("\tkeep_alive()")
else:
lines.append("\t#Run server")
lines.append("\trun()")
for line in lines:
f.write(line+'\n')
f.close()
print('Created' + project + ' app succesfully.')
for param in params.keys():
if params[param] and param != 'app':
print(param, params[param])
os.system('open '+ project)
if __name__ == '__main__':
create_flask_app()
| [((56, 1, 56, 48), 'os.system', 'os.system', ({(56, 11, 56, 47): "('touch ' + project + '/static/style.css')"}, {}), "('touch ' + project + '/static/style.css')", False, 'import os\n'), ((57, 1, 57, 48), 'os.system', 'os.system', ({(57, 11, 57, 47): "('touch ' + project + '/static/script.js')"}, {}), "('touch ' + project + '/static/script.js')", False, 'import os\n'), ((144, 1, 144, 28), 'os.system', 'os.system', ({(144, 11, 144, 27): "('open ' + project)"}, {}), "('open ' + project)", False, 'import os\n'), ((29, 11, 29, 36), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((50, 8, 50, 31), 'os.path.exists', 'os.path.exists', ({(50, 23, 50, 30): 'project'}, {}), '(project)', False, 'import os\n'), ((51, 2, 51, 19), 'os.mkdir', 'os.mkdir', ({(51, 11, 51, 18): 'project'}, {}), '(project)', False, 'import os\n'), ((52, 8, 52, 38), 'os.path.exists', 'os.path.exists', ({(52, 23, 52, 37): "(project + '/web')"}, {}), "(project + '/web')", False, 'import os\n'), ((53, 2, 53, 26), 'os.mkdir', 'os.mkdir', ({(53, 11, 53, 25): "(project + '/web')"}, {}), "(project + '/web')", False, 'import os\n'), ((54, 8, 54, 41), 'os.path.exists', 'os.path.exists', ({(54, 23, 54, 40): "(project + '/static')"}, {}), "(project + '/static')", False, 'import os\n'), ((55, 2, 55, 29), 'os.mkdir', 'os.mkdir', ({(55, 11, 55, 28): "(project + '/static')"}, {}), "(project + '/static')", False, 'import os\n'), ((8, 2, 8, 41), 'os.system', 'os.system', ({(8, 12, 8, 40): "('pip3 install --user ' + pkg)"}, {}), "('pip3 install --user ' + pkg)", False, 'import os\n'), ((106, 3, 106, 51), 'os.system', 'os.system', ({(106, 13, 106, 50): "('touch ' + project + '/static/' + ep + '.css')"}, {}), "('touch ' + project + '/static/' + ep + '.css')", False, 'import os\n'), ((107, 3, 107, 50), 'os.system', 'os.system', ({(107, 13, 107, 49): "('touch ' + project + '/static/' + ep + '.js')"}, {}), "('touch ' + project + '/static/' + ep + '.js')", False, 'import os\n')] |
Flared/flask-sqlalchemy | examples/flaskr/flaskr/__init__.py | e73abd51d957a4436bca6b5eadbf5d63771cf5ef | import os
import click
from flask import Flask
from flask.cli import with_appcontext
from flask_sqlalchemy import SQLAlchemy
__version__ = (1, 0, 0, "dev")
db = SQLAlchemy()
def create_app(test_config=None):
"""Create and configure an instance of the Flask application."""
app = Flask(__name__, instance_relative_config=True)
# some deploy systems set the database url in the environ
db_url = os.environ.get("DATABASE_URL")
if db_url is None:
# default to a sqlite database in the instance folder
db_url = "sqlite:///" + os.path.join(app.instance_path, "flaskr.sqlite")
# ensure the instance folder exists
os.makedirs(app.instance_path, exist_ok=True)
app.config.from_mapping(
# default secret that should be overridden in environ or config
SECRET_KEY=os.environ.get("SECRET_KEY", "dev"),
SQLALCHEMY_DATABASE_URI=db_url,
SQLALCHEMY_TRACK_MODIFICATIONS=False,
)
if test_config is None:
# load the instance config, if it exists, when not testing
app.config.from_pyfile("config.py", silent=True)
else:
# load the test config if passed in
app.config.update(test_config)
# initialize Flask-SQLAlchemy and the init-db command
db.init_app(app)
app.cli.add_command(init_db_command)
# apply the blueprints to the app
from flaskr import auth, blog
app.register_blueprint(auth.bp)
app.register_blueprint(blog.bp)
# make "index" point at "/", which is handled by "blog.index"
app.add_url_rule("/", endpoint="index")
return app
def init_db():
db.drop_all()
db.create_all()
@click.command("init-db")
@with_appcontext
def init_db_command():
"""Clear existing data and create new tables."""
init_db()
click.echo("Initialized the database.")
| [((10, 5, 10, 17), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ({}, {}), '()', False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((61, 1, 61, 25), 'click.command', 'click.command', ({(61, 15, 61, 24): '"""init-db"""'}, {}), "('init-db')", False, 'import click\n'), ((15, 10, 15, 56), 'flask.Flask', 'Flask', (), '', False, 'from flask import Flask\n'), ((18, 13, 18, 43), 'os.environ.get', 'os.environ.get', ({(18, 28, 18, 42): '"""DATABASE_URL"""'}, {}), "('DATABASE_URL')", False, 'import os\n'), ((66, 4, 66, 43), 'click.echo', 'click.echo', ({(66, 15, 66, 42): '"""Initialized the database."""'}, {}), "('Initialized the database.')", False, 'import click\n'), ((24, 8, 24, 53), 'os.makedirs', 'os.makedirs', (), '', False, 'import os\n'), ((22, 32, 22, 80), 'os.path.join', 'os.path.join', ({(22, 45, 22, 62): 'app.instance_path', (22, 64, 22, 79): '"""flaskr.sqlite"""'}, {}), "(app.instance_path, 'flaskr.sqlite')", False, 'import os\n'), ((28, 19, 28, 54), 'os.environ.get', 'os.environ.get', ({(28, 34, 28, 46): '"""SECRET_KEY"""', (28, 48, 28, 53): '"""dev"""'}, {}), "('SECRET_KEY', 'dev')", False, 'import os\n')] |
mcfx/trivm | simulator/cc.py | 5b77ea157c562cfbfe87f7e7d256fb9702f8ceec | import os, sys
fn = sys.argv[1]
if os.system('python compile.py %s __tmp.S' % fn) == 0:
os.system('python asm.py __tmp.S %s' % fn[:-2])
| [((5, 3, 5, 49), 'os.system', 'os.system', ({(5, 13, 5, 48): "('python compile.py %s __tmp.S' % fn)"}, {}), "('python compile.py %s __tmp.S' % fn)", False, 'import os, sys\n'), ((6, 4, 6, 51), 'os.system', 'os.system', ({(6, 14, 6, 50): "('python asm.py __tmp.S %s' % fn[:-2])"}, {}), "('python asm.py __tmp.S %s' % fn[:-2])", False, 'import os, sys\n')] |
ariadnepinheiro/Disease_Simulator | ad2/Actor.py | e875036f4b0485575327463a17f4282487350cb3 | #!/usr/bin/env python
# coding: UTF-8
#
# @package Actor
# @author Ariadne Pinheiro
# @date 26/08/2020
#
# Actor class, which is the base class for Disease objects.
#
##
class Actor:
# Holds the value of the next "free" id.
__ID = 0
##
# Construct a new Actor object.
# - Sets the initial values of its member variables.
# - Sets the unique ID for the object and initializes the reference to the World
# object to which this Actor object belongs to null.
# - The ID of the first Actor object is 0.
# - The ID gets incremented by one each time a new Actor object is created.
# - Sets the iteration counter to zero and initialize the location of the
# object to cell (0,0).
#
def __init__(self):
# X coordinate of this actor.
self.__locX = 0
# Y coordinate of this actor.
self.__locY = 0
# World this actor belongs to.
self.__world = None
# Unique identifier for this actor.
self.__actorID = Actor.__ID
Actor.__ID += 1
# Iteration counter.
self.__itCounter = 0
##
# Used for testing
# @return ActorID
#
def getID(self):
return self.__actorID
##
# Used for testing
# @return number of iterations
#
def Iteration(self):
return self.__itCounter
##
# Prints on screen in the format "Iteration <ID>: Actor <Actor ID>".
#
# The @f$<ID>@f$ is replaced by the current iteration number. @f$<Actor ID>@f$ is
# replaced by the unique ID of the Actor object that performs the act(self)
# method.
#
# For instance, the actor with ID 1 shows the following result on
# the output screen after its act(self) method has been called twice.
# <PRE>
# Iteration 0: Actor 1
# Iteration 1: Actor 1
# </PRE>
#
def act(self):
print("Iteration {}: Actor {}".format(self.__itCounter, self.__actorID))
self.__itCounter += 1
##
# Sets the cell coordinates of this object.
#
# @param x the column.
# @param y the row.
#
# @throws ValueError when x < 0 or x >= world width,
# @throws ValueError when y < 0 or y >= world height,
# @throws RuntimeError when the world is null.
#
def setLocation(self, x, y):
if self.__world is None:
raise RuntimeError
if (0 <= x < self.__world.getWidth()) and (0 <= y < self.__world.getHeight()):
self.__locX = x
self.__locY = y
else:
raise ValueError
##
# Sets the world this actor is into.
#
# @param world Reference to the World object this Actor object is added.
# @throws RuntimeError when world is null.
#
def addedToWorld(self, world):
if world is None:
raise RuntimeError
self.__world = world
##
# Gets the world this object in into.
#
# @return the world this object belongs to
#
def getWorld(self):
return self.__world
##
# Gets the X coordinate of the cell this actor object is into.
#
# @return the x coordinate of this Actor object.
#
def getX(self):
return self.__locX
##
# Gets the Y coordinate of the cell this actor object is into.
#
# @return the y coordinate of this Actor object.
#
def getY(self):
return self.__locY
##
# Return a string with this actor ID and position.
#
def __str__(self):
try:
st = "ID = %d "u'\u2192 '.encode('utf-8') % self.getID()
st += 'position = (%d, %d)\n' % (self.getX(), self.getY())
except TypeError:
st = "ID = %d "u'\u2192 ' % self.getID()
st += 'position = (%d, %d)\n' % (self.getX(), self.getY())
return st
| [] |
smukk9/Python | conversions/decimal_to_binary.py | 5f4da5d616926dbe77ece828986b8d19c7d65cb5 | """Convert a Decimal Number to a Binary Number."""
def decimal_to_binary(num: int) -> str:
"""
Convert a Integer Decimal Number to a Binary Number as str.
>>> decimal_to_binary(0)
'0b0'
>>> decimal_to_binary(2)
'0b10'
>>> decimal_to_binary(7)
'0b111'
>>> decimal_to_binary(35)
'0b100011'
>>> # negatives work too
>>> decimal_to_binary(-2)
'-0b10'
>>> # other floats will error
>>> decimal_to_binary(16.16) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
TypeError: 'float' object cannot be interpreted as an integer
>>> # strings will error as well
>>> decimal_to_binary('0xfffff') # doctest: +ELLIPSIS
Traceback (most recent call last):
...
TypeError: 'str' object cannot be interpreted as an integer
"""
if type(num) == float:
raise TypeError("'float' object cannot be interpreted as an integer")
if type(num) == str:
raise TypeError("'str' object cannot be interpreted as an integer")
if num == 0:
return "0b0"
negative = False
if num < 0:
negative = True
num = -num
binary = []
while num > 0:
binary.insert(0, num % 2)
num >>= 1
if negative:
return "-0b" + "".join(str(e) for e in binary)
return "0b" + "".join(str(e) for e in binary)
if __name__ == "__main__":
import doctest
doctest.testmod()
| [((59, 4, 59, 21), 'doctest.testmod', 'doctest.testmod', ({}, {}), '()', False, 'import doctest\n')] |
xausky/hand-network | src/HandNetwork.py | e885003c5bb9157cd06dc3ea3aabddbb7162a0ab | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import urllib.parse
import json
import base64
import requests
import logging
class Network():
LOGIN_URL = 'http://192.168.211.101/portal/pws?t=li'
BEAT_URL = 'http://192.168.211.101/portal/page/doHeartBeat.jsp'
COMMON_HERADERS = {
'Accept-Language': 'en-US',
'Accept': 'text/html'
}
def __init__(self, username, password):
b64Password = base64.b64encode(bytes(password,'utf8'))
self.data = {'userName': username, 'userPwd': b64Password}
def login(self):
logging.info('login:%s'%(self.data))
response = requests.post(Network.LOGIN_URL, data=self.data,
headers=Network.COMMON_HERADERS, timeout=3)
responseText = base64.b64decode(response.text + '==')
responseJson = urllib.parse.unquote(responseText.decode('utf8'))
jsonDict = json.loads(responseJson)
heartBeatCyc = jsonDict.get('heartBeatCyc')
if heartBeatCyc == None:
raise BaseException(responseJson)
logging.info('login seccuss: %s'%(responseJson))
self.heartBeatCyc = int(heartBeatCyc)
self.serialNo = jsonDict.get('serialNo')
return self.heartBeatCyc
def beat(self):
response = requests.post(Network.BEAT_URL, data={'serialNo': self.serialNo},
headers=Network.COMMON_HERADERS, timeout=3)
if response.text.find('v_failedTimes') is -1:
raise BaseException(response.text)
| [((21, 8, 21, 44), 'logging.info', 'logging.info', ({(21, 21, 21, 43): "('login:%s' % self.data)"}, {}), "('login:%s' % self.data)", False, 'import logging\n'), ((22, 19, 23, 52), 'requests.post', 'requests.post', (), '', False, 'import requests\n'), ((24, 23, 24, 61), 'base64.b64decode', 'base64.b64decode', ({(24, 40, 24, 60): "response.text + '=='"}, {}), "(response.text + '==')", False, 'import base64\n'), ((26, 19, 26, 43), 'json.loads', 'json.loads', ({(26, 30, 26, 42): 'responseJson'}, {}), '(responseJson)', False, 'import json\n'), ((30, 8, 30, 56), 'logging.info', 'logging.info', ({(30, 21, 30, 55): "('login seccuss: %s' % responseJson)"}, {}), "('login seccuss: %s' % responseJson)", False, 'import logging\n'), ((36, 19, 37, 52), 'requests.post', 'requests.post', (), '', False, 'import requests\n')] |
Fongeme/algorithms-keeper | algorithms_keeper/parser/rules/use_fstring.py | ea80d9342b4d2efd246a6bc409889ed780accf08 | import libcst as cst
import libcst.matchers as m
from fixit import CstLintRule
from fixit import InvalidTestCase as Invalid
from fixit import ValidTestCase as Valid
class UseFstringRule(CstLintRule):
MESSAGE: str = (
"As mentioned in the [Contributing Guidelines]"
+ "(https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md), "
+ "please do not use printf style formatting or `str.format()`. "
+ "Use [f-string](https://realpython.com/python-f-strings/) instead to be "
+ "more readable and efficient."
)
VALID = [
Valid("assigned='string'; f'testing {assigned}'"),
Valid("'simple string'"),
Valid("'concatenated' + 'string'"),
Valid("b'bytes %s' % 'string'.encode('utf-8')"),
]
INVALID = [
Invalid("'hello, {name}'.format(name='you')"),
Invalid("'hello, %s' % 'you'"),
Invalid("r'raw string value=%s' % val"),
]
def visit_Call(self, node: cst.Call) -> None:
if m.matches(
node,
m.Call(
func=m.Attribute(value=m.SimpleString(), attr=m.Name(value="format"))
),
):
self.report(node)
def visit_BinaryOperation(self, node: cst.BinaryOperation) -> None:
if (
m.matches(
node, m.BinaryOperation(left=m.SimpleString(), operator=m.Modulo())
)
# SimpleString can be bytes and fstring don't support bytes.
# https://www.python.org/dev/peps/pep-0498/#no-binary-f-strings
and isinstance(
cst.ensure_type(node.left, cst.SimpleString).evaluated_value, str
)
):
self.report(node)
| [((19, 8, 19, 57), 'fixit.ValidTestCase', 'Valid', ({(19, 14, 19, 56): '"""assigned=\'string\'; f\'testing {assigned}\'"""'}, {}), '("assigned=\'string\'; f\'testing {assigned}\'")', True, 'from fixit import ValidTestCase as Valid\n'), ((20, 8, 20, 32), 'fixit.ValidTestCase', 'Valid', ({(20, 14, 20, 31): '"""\'simple string\'"""'}, {}), '("\'simple string\'")', True, 'from fixit import ValidTestCase as Valid\n'), ((21, 8, 21, 42), 'fixit.ValidTestCase', 'Valid', ({(21, 14, 21, 41): '"""\'concatenated\' + \'string\'"""'}, {}), '("\'concatenated\' + \'string\'")', True, 'from fixit import ValidTestCase as Valid\n'), ((22, 8, 22, 55), 'fixit.ValidTestCase', 'Valid', ({(22, 14, 22, 54): '"""b\'bytes %s\' % \'string\'.encode(\'utf-8\')"""'}, {}), '("b\'bytes %s\' % \'string\'.encode(\'utf-8\')")', True, 'from fixit import ValidTestCase as Valid\n'), ((26, 8, 26, 53), 'fixit.InvalidTestCase', 'Invalid', ({(26, 16, 26, 52): '"""\'hello, {name}\'.format(name=\'you\')"""'}, {}), '("\'hello, {name}\'.format(name=\'you\')")', True, 'from fixit import InvalidTestCase as Invalid\n'), ((27, 8, 27, 38), 'fixit.InvalidTestCase', 'Invalid', ({(27, 16, 27, 37): '"""\'hello, %s\' % \'you\'"""'}, {}), '("\'hello, %s\' % \'you\'")', True, 'from fixit import InvalidTestCase as Invalid\n'), ((28, 8, 28, 47), 'fixit.InvalidTestCase', 'Invalid', ({(28, 16, 28, 46): '"""r\'raw string value=%s\' % val"""'}, {}), '("r\'raw string value=%s\' % val")', True, 'from fixit import InvalidTestCase as Invalid\n'), ((48, 16, 48, 60), 'libcst.ensure_type', 'cst.ensure_type', ({(48, 32, 48, 41): 'node.left', (48, 43, 48, 59): 'cst.SimpleString'}, {}), '(node.left, cst.SimpleString)', True, 'import libcst as cst\n'), ((43, 45, 43, 61), 'libcst.matchers.SimpleString', 'm.SimpleString', ({}, {}), '()', True, 'import libcst.matchers as m\n'), ((43, 72, 43, 82), 'libcst.matchers.Modulo', 'm.Modulo', ({}, {}), '()', True, 'import libcst.matchers as m\n'), ((35, 39, 35, 55), 'libcst.matchers.SimpleString', 'm.SimpleString', ({}, {}), '()', True, 'import libcst.matchers as m\n'), ((35, 62, 35, 84), 'libcst.matchers.Name', 'm.Name', (), '', True, 'import libcst.matchers as m\n')] |
akashnd/bert-multitask-learning | bert_multitask_learning/model_fn.py | aee5be006ef6a3feadf0c751a6f9b42c24c3fd21 | # AUTOGENERATED! DO NOT EDIT! File to edit: source_nbs/13_model_fn.ipynb (unless otherwise specified).
__all__ = ['variable_summaries', 'filter_loss', 'BertMultiTaskBody', 'BertMultiTaskTop', 'BertMultiTask']
# Cell
from typing import Dict, Tuple
from inspect import signature
import tensorflow as tf
import transformers
from .modeling import MultiModalBertModel
from .params import BaseParams
from .top import (Classification, MultiLabelClassification, PreTrain,
Seq2Seq, SequenceLabel, MaskLM)
from .utils import get_embedding_table_from_model, get_transformer_main_model
def variable_summaries(var, name):
"""Attach a lot of summaries to a Tensor (for TensorBoard visualization)."""
with tf.compat.v1.name_scope(name):
mean = tf.reduce_mean(input_tensor=var)
tf.compat.v1.summary.scalar('mean', mean)
with tf.compat.v1.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(
input_tensor=tf.square(var - mean)))
tf.compat.v1.summary.scalar('stddev', stddev)
tf.compat.v1.summary.scalar('max', tf.reduce_max(input_tensor=var))
tf.compat.v1.summary.scalar('min', tf.reduce_min(input_tensor=var))
tf.compat.v1.summary.histogram('histogram', var)
@tf.function
def filter_loss(loss, features, problem):
if tf.reduce_mean(input_tensor=features['%s_loss_multiplier' % problem]) == 0:
return_loss = 0.0
else:
return_loss = loss
return return_loss
class BertMultiTaskBody(tf.keras.Model):
"""Model to extract bert features and dispatch corresponding rows to each problem_chunk.
for each problem chunk, we extract corresponding features
and hidden features for that problem. The reason behind this
is to save computation for downstream processing.
For example, we have a batch of two instances and they're from
problem a and b respectively:
Input:
[{'input_ids': [1,2,3], 'a_loss_multiplier': 1, 'b_loss_multiplier': 0},
{'input_ids': [4,5,6], 'a_loss_multiplier': 0, 'b_loss_multiplier': 1}]
Output:
{
'a': {'input_ids': [1,2,3], 'a_loss_multiplier': 1, 'b_loss_multiplier': 0}
'b': {'input_ids': [4,5,6], 'a_loss_multiplier': 0, 'b_loss_multiplier': 1}
}
"""
def __init__(self, params: BaseParams, name='BertMultiTaskBody'):
super(BertMultiTaskBody, self).__init__(name=name)
self.params = params
self.bert = MultiModalBertModel(params=self.params)
if self.params.custom_pooled_hidden_size:
self.custom_pooled_layer = tf.keras.layers.Dense(
self.params.custom_pooled_hidden_size, activation=tf.keras.activations.selu)
else:
self.custom_pooled_layer = None
@tf.function
def get_features_for_problem(self, features, hidden_feature, problem, mode):
# get features with ind == 1
if mode == tf.estimator.ModeKeys.PREDICT:
feature_this_round = features
hidden_feature_this_round = hidden_feature
else:
multiplier_name = '%s_loss_multiplier' % problem
record_ind = tf.where(tf.cast(
tf.squeeze(features[multiplier_name]), tf.bool))
hidden_feature_this_round = {}
for hidden_feature_name in hidden_feature:
if hidden_feature_name != 'embed_table':
hidden_feature_this_round[hidden_feature_name] = tf.squeeze(tf.gather(
hidden_feature[hidden_feature_name], record_ind, axis=0
), axis=1)
hidden_feature_this_round[hidden_feature_name].set_shape(
hidden_feature[hidden_feature_name].shape.as_list())
else:
hidden_feature_this_round[hidden_feature_name] = hidden_feature[hidden_feature_name]
feature_this_round = {}
for features_name in features:
feature_this_round[features_name] = tf.gather_nd(
features[features_name],
record_ind)
return feature_this_round, hidden_feature_this_round
def call(self, inputs: Dict[str, tf.Tensor],
mode: str) -> Tuple[Dict[str, Dict[str, tf.Tensor]], Dict[str, Dict[str, tf.Tensor]]]:
_ = self.bert(inputs, mode == tf.estimator.ModeKeys.TRAIN)
# extract bert hidden features
inputs['model_input_mask'] = self.bert.get_input_mask()
inputs['model_token_type_ids'] = self.bert.get_token_type_ids()
hidden_feature = {}
for logit_type in ['seq', 'pooled', 'all', 'embed', 'embed_table']:
if logit_type == 'seq':
# tensor, [batch_size, seq_length, hidden_size]
hidden_feature[logit_type] = self.bert.get_sequence_output()
elif logit_type == 'pooled':
# tensor, [batch_size, hidden_size]
hidden_feature[logit_type] = self.bert.get_pooled_output()
if self.custom_pooled_layer:
hidden_feature[logit_type] = self.custom_pooled_layer(
hidden_feature[logit_type])
elif logit_type == 'all':
# list, num_hidden_layers * [batch_size, seq_length, hidden_size]
hidden_feature[logit_type] = self.bert.get_all_encoder_layers()
elif logit_type == 'embed':
# for res connection
hidden_feature[logit_type] = self.bert.get_embedding_output()
elif logit_type == 'embed_table':
hidden_feature[logit_type] = self.bert.get_embedding_table()
# for each problem chunk, we extract corresponding features
# and hidden features for that problem. The reason behind this
# is to save computation for downstream processing.
# For example, we have a batch of two instances and they're from
# problem a and b respectively:
# Input:
# [{'input_ids': [1,2,3], 'a_loss_multiplier': 1, 'b_loss_multiplier': 0},
# {'input_ids': [4,5,6], 'a_loss_multiplier': 0, 'b_loss_multiplier': 1}]
# Output:
# {
# 'a': {'input_ids': [1,2,3], 'a_loss_multiplier': 1, 'b_loss_multiplier': 0}
# 'b': {'input_ids': [4,5,6], 'a_loss_multiplier': 0, 'b_loss_multiplier': 1}
# }
features = inputs
return_feature = {}
return_hidden_feature = {}
for problem_dict in self.params.run_problem_list:
for problem in problem_dict:
if self.params.task_transformer:
# hidden_feature = task_tranformer_hidden_feature[problem]
raise NotImplementedError
if len(self.params.run_problem_list) > 1:
feature_this_round, hidden_feature_this_round = self.get_features_for_problem(
features, hidden_feature, problem, mode)
else:
feature_this_round, hidden_feature_this_round = features, hidden_feature
if self.params.label_transfer and self.params.grid_transformer:
raise ValueError(
'Label Transfer and grid transformer cannot be enabled in the same time.'
)
if self.params.grid_transformer:
raise NotImplementedError
return_hidden_feature[problem] = hidden_feature_this_round
return_feature[problem] = feature_this_round
return return_feature, return_hidden_feature
# Cell
class BertMultiTaskTop(tf.keras.Model):
"""Model to create top layer, aka classification layer, for each problem.
"""
def __init__(self, params: BaseParams, name='BertMultiTaskTop', input_embeddings: tf.Tensor = None):
super(BertMultiTaskTop, self).__init__(name=name)
self.params = params
problem_type_layer = {
'seq_tag': SequenceLabel,
'cls': Classification,
'seq2seq_tag': Seq2Seq,
'seq2seq_text': Seq2Seq,
'multi_cls': MultiLabelClassification,
'pretrain': PreTrain,
'masklm': MaskLM
}
problem_type_layer.update(self.params.top_layer)
self.top_layer_dict = {}
for problem_dict in self.params.run_problem_list:
for problem in problem_dict:
problem_type = self.params.problem_type[problem]
# some layers has different signatures, assign inputs accordingly
layer_signature_name = signature(
problem_type_layer[problem_type].__init__).parameters.keys()
inputs_kwargs = {
'params': self.params,
'problem_name': problem
}
for signature_name in layer_signature_name:
if signature_name == 'input_embeddings':
inputs_kwargs.update(
{signature_name: input_embeddings})
self.top_layer_dict[problem] = problem_type_layer[problem_type](
**inputs_kwargs)
def call(self,
inputs: Tuple[Dict[str, Dict[str, tf.Tensor]], Dict[str, Dict[str, tf.Tensor]]],
mode: str) -> Dict[str, tf.Tensor]:
features, hidden_feature = inputs
return_dict = {}
for problem_dict in self.params.run_problem_list:
for problem in problem_dict:
feature_this_round = features[problem]
hidden_feature_this_round = hidden_feature[problem]
problem_type = self.params.problem_type[problem]
# if pretrain, return pretrain logit
if problem_type == 'pretrain':
pretrain = self.top_layer_dict[problem]
return_dict[problem] = pretrain(
(feature_this_round, hidden_feature_this_round), mode)
return return_dict
if self.params.label_transfer and self.params.grid_transformer:
raise ValueError(
'Label Transfer and grid transformer cannot be enabled in the same time.'
)
with tf.name_scope(problem):
layer = self.top_layer_dict[problem]
return_dict[problem] = layer(
(feature_this_round, hidden_feature_this_round), mode)
if self.params.augument_mask_lm and mode == tf.estimator.ModeKeys.TRAIN:
raise NotImplementedError
# try:
# mask_lm_top = MaskLM(self.params)
# return_dict['augument_mask_lm'] = \
# mask_lm_top(features,
# hidden_feature, mode, 'dummy')
# except ValueError:
# pass
return return_dict
# Cell
class BertMultiTask(tf.keras.Model):
def __init__(self, params: BaseParams, name='BertMultiTask') -> None:
super(BertMultiTask, self).__init__(name=name)
self.params = params
# initialize body model, aka transformers
self.body = BertMultiTaskBody(params=self.params)
# mlm might need word embedding from bert
# build sub-model
_ = get_embedding_table_from_model(self.body.bert.bert_model)
main_model = get_transformer_main_model(self.body.bert.bert_model)
# input_embeddings = self.body.bert.bert_model.bert.embeddings
input_embeddings = main_model.embeddings
self.top = BertMultiTaskTop(
params=self.params, input_embeddings=input_embeddings)
def call(self, inputs, mode=tf.estimator.ModeKeys.TRAIN):
feature_per_problem, hidden_feature_per_problem = self.body(
inputs, mode)
pred_per_problem = self.top(
(feature_per_problem, hidden_feature_per_problem), mode)
return pred_per_problem
def compile(self):
super(BertMultiTask, self).compile()
logger = tf.get_logger()
logger.info('Initial lr: {}'.format(self.params.lr))
logger.info('Train steps: {}'.format(self.params.train_steps))
logger.info('Warmup steps: {}'.format(self.params.num_warmup_steps))
self.optimizer, self.lr_scheduler = transformers.optimization_tf.create_optimizer(
init_lr=self.params.lr,
num_train_steps=self.params.train_steps,
num_warmup_steps=self.params.num_warmup_steps,
weight_decay_rate=0.01
)
self.mean_acc = tf.keras.metrics.Mean(name='mean_acc')
def train_step(self, data):
with tf.GradientTape() as tape:
# Forward pass
_ = self(data, mode=tf.estimator.ModeKeys.TRAIN)
# gather losses from all problems
loss_dict = {'{}_loss'.format(problem_name): tf.reduce_sum(top_layer.losses) for problem_name,
top_layer in self.top.top_layer_dict.items()}
# metric_dict = {'{}_metric'.format(problem_name): tf.reduce_mean(top_layer.metrics) for problem_name,
# top_layer in self.top.top_layer_dict.items()}
metric_dict = {m.name: m.result() for m in self.metrics}
# Compute gradients
trainable_vars = self.trainable_variables
gradients = tape.gradient(self.losses, trainable_vars)
# Update weights
self.optimizer.apply_gradients(zip(gradients, trainable_vars))
self.mean_acc.update_state(
[v for n, v in metric_dict.items() if n != 'mean_acc'])
return_dict = metric_dict
return_dict.update(loss_dict)
return_dict[self.mean_acc.name] = self.mean_acc.result()
# Return a dict mapping metric names to current value.
# Note that it will include the loss (tracked in self.metrics).
return return_dict
def test_step(self, data):
"""The logic for one evaluation step.
This method can be overridden to support custom evaluation logic.
This method is called by `Model.make_test_function`.
This function should contain the mathemetical logic for one step of
evaluation.
This typically includes the forward pass, loss calculation, and metrics
updates.
Configuration details for *how* this logic is run (e.g. `tf.function` and
`tf.distribute.Strategy` settings), should be left to
`Model.make_test_function`, which can also be overridden.
Arguments:
data: A nested structure of `Tensor`s.
Returns:
A `dict` containing values that will be passed to
`tf.keras.callbacks.CallbackList.on_train_batch_end`. Typically, the
values of the `Model`'s metrics are returned.
"""
y_pred = self(data, mode=tf.estimator.ModeKeys.EVAL)
# Updates stateful loss metrics.
self.compiled_loss(
None, y_pred, None, regularization_losses=self.losses)
self.compiled_metrics.update_state(None, y_pred, None)
# get metrics to calculate mean
m_list = []
for metric in self.metrics:
if 'mean_acc' in metric.name:
continue
if 'acc' in metric.name:
m_list.append(metric.result())
if 'f1' in metric.name:
m_list.append(metric.result())
self.mean_acc.update_state(
m_list)
return {m.name: m.result() for m in self.metrics}
def predict_step(self, data):
return self(data, mode=tf.estimator.ModeKeys.PREDICT)
| [((20, 9, 20, 38), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', ({(20, 33, 20, 37): 'name'}, {}), '(name)', True, 'import tensorflow as tf\n'), ((21, 15, 21, 47), 'tensorflow.reduce_mean', 'tf.reduce_mean', (), '', True, 'import tensorflow as tf\n'), ((22, 8, 22, 49), 'tensorflow.compat.v1.summary.scalar', 'tf.compat.v1.summary.scalar', ({(22, 36, 22, 42): '"""mean"""', (22, 44, 22, 48): 'mean'}, {}), "('mean', mean)", True, 'import tensorflow as tf\n'), ((26, 8, 26, 53), 'tensorflow.compat.v1.summary.scalar', 'tf.compat.v1.summary.scalar', ({(26, 36, 26, 44): '"""stddev"""', (26, 46, 26, 52): 'stddev'}, {}), "('stddev', stddev)", True, 'import tensorflow as tf\n'), ((29, 8, 29, 56), 'tensorflow.compat.v1.summary.histogram', 'tf.compat.v1.summary.histogram', ({(29, 39, 29, 50): '"""histogram"""', (29, 52, 29, 55): 'var'}, {}), "('histogram', var)", True, 'import tensorflow as tf\n'), ((35, 7, 35, 76), 'tensorflow.reduce_mean', 'tf.reduce_mean', (), '', True, 'import tensorflow as tf\n'), ((280, 17, 280, 32), 'tensorflow.get_logger', 'tf.get_logger', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((284, 44, 289, 9), 'transformers.optimization_tf.create_optimizer', 'transformers.optimization_tf.create_optimizer', (), '', False, 'import transformers\n'), ((290, 24, 290, 62), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', (), '', True, 'import tensorflow as tf\n'), ((23, 13, 23, 46), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', ({(23, 37, 23, 45): '"""stddev"""'}, {}), "('stddev')", True, 'import tensorflow as tf\n'), ((27, 43, 27, 74), 'tensorflow.reduce_max', 'tf.reduce_max', (), '', True, 'import tensorflow as tf\n'), ((28, 43, 28, 74), 'tensorflow.reduce_min', 'tf.reduce_min', (), '', True, 'import tensorflow as tf\n'), ((67, 39, 68, 92), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (), '', True, 'import tensorflow as tf\n'), ((294, 13, 294, 30), 'tensorflow.GradientTape', 'tf.GradientTape', ({}, {}), '()', True, 'import tensorflow as tf\n'), ((97, 52, 99, 31), 'tensorflow.gather_nd', 'tf.gather_nd', ({(98, 20, 98, 43): 'features[features_name]', (99, 20, 99, 30): 'record_ind'}, {}), '(features[features_name], record_ind)', True, 'import tensorflow as tf\n'), ((298, 57, 298, 88), 'tensorflow.reduce_sum', 'tf.reduce_sum', ({(298, 71, 298, 87): 'top_layer.losses'}, {}), '(top_layer.losses)', True, 'import tensorflow as tf\n'), ((82, 16, 82, 53), 'tensorflow.squeeze', 'tf.squeeze', ({(82, 27, 82, 52): 'features[multiplier_name]'}, {}), '(features[multiplier_name])', True, 'import tensorflow as tf\n'), ((235, 21, 235, 43), 'tensorflow.name_scope', 'tf.name_scope', ({(235, 35, 235, 42): 'problem'}, {}), '(problem)', True, 'import tensorflow as tf\n'), ((25, 29, 25, 50), 'tensorflow.square', 'tf.square', ({(25, 39, 25, 49): 'var - mean'}, {}), '(var - mean)', True, 'import tensorflow as tf\n'), ((87, 80, 89, 21), 'tensorflow.gather', 'tf.gather', (), '', True, 'import tensorflow as tf\n'), ((197, 39, 198, 62), 'inspect.signature', 'signature', ({(198, 20, 198, 61): 'problem_type_layer[problem_type].__init__'}, {}), '(problem_type_layer[problem_type].__init__)', False, 'from inspect import signature\n')] |
Jennyx18/SiMon | SiMon/visualization.py | 522432ff708954ac37050609cfd6f42dd96467e4 | import os
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import math
from datetime import datetime
from matplotlib.colors import ListedColormap, BoundaryNorm
from matplotlib.collections import LineCollection
from matplotlib import cm
from SiMon.simulation import Simulation
from SiMon.callback import Callback
from matplotlib.ticker import MaxNLocator
import time
class VisualizationCallback(Callback):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
def run(self):
self.plot_progress()
def plot_progress(self):
"""
Creates a graph showing the progress of the simulations
:param num_sim: number of simulations
:return:
"""
if 'container' in self.kwargs:
sim_inst_dict = self.kwargs['container'].sim_inst_dict
else:
return
num_sim = len(sim_inst_dict)
status = np.array([])
progresses = np.array([])
sim_idx = np.array([])
for i, sim_name in enumerate(sim_inst_dict):
sim = sim_inst_dict[sim_name]
sim_id = sim.id
if sim_id == 0:
continue # skip the root simulation instance, which is only a place holder
# only plot level=1 simulations
if sim.level > 1:
continue
s = sim.sim_get_status()
if sim.t_max > 0:
p = sim.t / sim.t_max
else:
p = 0.0
status = np.append(s, status)
progresses = np.append(p, progresses)
sim_idx = np.append(sim_id, sim_idx)
# Checks if num_sim has a square
if int(math.sqrt(num_sim) + 0.5) ** 2 == num_sim:
number = int(math.sqrt(num_sim))
y_num = num_sim // number
# If not square, find divisible number to get rectangle
else:
number = int(math.sqrt(num_sim))
while num_sim % number != 0:
number = number - 1
y_num = num_sim // number # Y-axis limit
# If prime number
if number == 1:
number = int(math.sqrt(num_sim)) + 1 # Make sure graph fits all num_sim
y_num = number
# 'Removes' extra white line if graph is too big
if (y_num * number) > num_sim and ((y_num - 1) * number) >= num_sim:
y_num = y_num - 1
x_sim = sim_idx % number
y_sim = sim_idx // number
plt.figure(1, figsize=(12, 12))
ax = plt.gca() # get the axis
ax.set_ylim(ax.get_ylim()[::-1]) # invert the axis
ax.xaxis.tick_top() # and move the X-Axis
ax.yaxis.set_ticks(np.arange(-0.5, y_num)) # set y-ticks
ax.yaxis.set_major_locator(MaxNLocator(integer=True)) # set to integers
ax.yaxis.tick_left() # remove right y-Ticks
symbols = ['o', 's', '>', '^', '*', 'x']
labels = ['NEW', 'STOP', 'RUN', 'STALL', 'DONE', 'ERROR']
for i, symbol in enumerate(symbols):
if (status == i).sum() == 0:
continue
else:
plt.scatter(
x_sim[status == i],
y_sim[status == i],
marker=symbol,
s=500,
c=progresses[status == i],
cmap=cm.RdYlBu,
vmin = 0., vmax = 1.,
label=labels[i])
for i in range(sim_idx.shape[0]):
plt.annotate(
text=str(sim_inst_dict[i].id),
xy=(x_sim[i], y_sim[i]),
color='black',
weight='bold',
size=15
)
plt.legend(
bbox_to_anchor=(0., -.15, 1., .102),
loc='lower center',
ncol=4,
mode="expand",
borderaxespad=0.,
borderpad=2,
labelspacing=3
)
plt.colorbar()
# # Save file with a new name
# if os.path.exists('progress.pdf'):
# plt.savefig('progress_{}.pdf'.format(int(time.time())))
# else:
# print('saving figure')
if 'plot_dir' in self.kwargs:
plot_dir = self.kwargs['plot_dir']
else:
plot_dir = os.getcwd()
if not os.path.isdir(plot_dir):
os.mkdir(plot_dir)
fn = datetime.now().strftime("%d_%m_%Y-%H_%M_%S")
if 'format' in self.kwargs:
fmt = self.kwargs['format']
else:
fmt = 'png'
fullpath = os.path.join(plot_dir, '%s.%s' % (fn, fmt))
print('Progress plot saved on %s' % fullpath)
plt.savefig(fullpath)
plt.close(1) | [((3, 0, 3, 21), 'matplotlib.use', 'matplotlib.use', ({(3, 15, 3, 20): '"""Agg"""'}, {}), "('Agg')", False, 'import matplotlib\n'), ((39, 17, 39, 29), 'numpy.array', 'np.array', ({(39, 26, 39, 28): '[]'}, {}), '([])', True, 'import numpy as np\n'), ((40, 21, 40, 33), 'numpy.array', 'np.array', ({(40, 30, 40, 32): '[]'}, {}), '([])', True, 'import numpy as np\n'), ((41, 18, 41, 30), 'numpy.array', 'np.array', ({(41, 27, 41, 29): '[]'}, {}), '([])', True, 'import numpy as np\n'), ((84, 8, 84, 39), 'matplotlib.pyplot.figure', 'plt.figure', (), '', True, 'import matplotlib.pyplot as plt\n'), ((85, 13, 85, 22), 'matplotlib.pyplot.gca', 'plt.gca', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((119, 8, 127, 9), 'matplotlib.pyplot.legend', 'plt.legend', (), '', True, 'import matplotlib.pyplot as plt\n'), ((129, 8, 129, 22), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((149, 19, 149, 62), 'os.path.join', 'os.path.join', ({(149, 32, 149, 40): 'plot_dir', (149, 42, 149, 61): "'%s.%s' % (fn, fmt)"}, {}), "(plot_dir, '%s.%s' % (fn, fmt))", False, 'import os\n'), ((151, 8, 151, 29), 'matplotlib.pyplot.savefig', 'plt.savefig', ({(151, 20, 151, 28): 'fullpath'}, {}), '(fullpath)', True, 'import matplotlib.pyplot as plt\n'), ((152, 8, 152, 20), 'matplotlib.pyplot.close', 'plt.close', ({(152, 18, 152, 19): '(1)'}, {}), '(1)', True, 'import matplotlib.pyplot as plt\n'), ((57, 21, 57, 41), 'numpy.append', 'np.append', ({(57, 31, 57, 32): 's', (57, 34, 57, 40): 'status'}, {}), '(s, status)', True, 'import numpy as np\n'), ((58, 25, 58, 49), 'numpy.append', 'np.append', ({(58, 35, 58, 36): 'p', (58, 38, 58, 48): 'progresses'}, {}), '(p, progresses)', True, 'import numpy as np\n'), ((59, 22, 59, 48), 'numpy.append', 'np.append', ({(59, 32, 59, 38): 'sim_id', (59, 40, 59, 47): 'sim_idx'}, {}), '(sim_id, sim_idx)', True, 'import numpy as np\n'), ((88, 27, 88, 49), 'numpy.arange', 'np.arange', ({(88, 37, 88, 41): '(-0.5)', (88, 43, 88, 48): 'y_num'}, {}), '(-0.5, y_num)', True, 'import numpy as np\n'), ((89, 35, 89, 60), 'matplotlib.ticker.MaxNLocator', 'MaxNLocator', (), '', False, 'from matplotlib.ticker import MaxNLocator\n'), ((139, 23, 139, 34), 'os.getcwd', 'os.getcwd', ({}, {}), '()', False, 'import os\n'), ((141, 15, 141, 38), 'os.path.isdir', 'os.path.isdir', ({(141, 29, 141, 37): 'plot_dir'}, {}), '(plot_dir)', False, 'import os\n'), ((142, 12, 142, 30), 'os.mkdir', 'os.mkdir', ({(142, 21, 142, 29): 'plot_dir'}, {}), '(plot_dir)', False, 'import os\n'), ((63, 25, 63, 43), 'math.sqrt', 'math.sqrt', ({(63, 35, 63, 42): 'num_sim'}, {}), '(num_sim)', False, 'import math\n'), ((68, 25, 68, 43), 'math.sqrt', 'math.sqrt', ({(68, 35, 68, 42): 'num_sim'}, {}), '(num_sim)', False, 'import math\n'), ((100, 16, 108, 36), 'matplotlib.pyplot.scatter', 'plt.scatter', (), '', True, 'import matplotlib.pyplot as plt\n'), ((144, 13, 144, 27), 'datetime.datetime.now', 'datetime.now', ({}, {}), '()', False, 'from datetime import datetime\n'), ((62, 15, 62, 33), 'math.sqrt', 'math.sqrt', ({(62, 25, 62, 32): 'num_sim'}, {}), '(num_sim)', False, 'import math\n'), ((75, 29, 75, 47), 'math.sqrt', 'math.sqrt', ({(75, 39, 75, 46): 'num_sim'}, {}), '(num_sim)', False, 'import math\n')] |
ChrisBarker-NOAA/tamoc | bin/psm/oil_jet.py | c797cbb6fee28d788b76d21cc5b0cc0df5444ba8 | """
Particle Size Models: Pure Oil Jet
===================================
Use the ``TAMOC`` `particle_size_models` module to simulate a laboratory
scale pure oil jet into water. This script demonstrates the typical steps
involved in using the `particle_size_models.PureJet` object, which requires
specification of all of the fluid properties of the jet.
"""
# S. Socolofsky, March 2020, Texas A&M University <[email protected]>.
from __future__ import (absolute_import, division, print_function)
from tamoc import seawater, particle_size_models
import numpy as np
import warnings
warnings.filterwarnings("ignore")
if __name__ == '__main__':
print('\n---------------------------------------------------------------')
print('Demonstration using the PureJet class in the')
print('particle_size_models module of TAMOC for the ')
print('experiments in the paper by Brandvik et al. (2013).')
print('\nComparisons are for the data reported in Table 3')
print('of the paper')
print('---------------------------------------------------------------')
# Simulate an experiment from Brandvik et al. (2013). Their data uses
# Oseberg oil, with the following reported properties
rho_oil = 839.3
mu_oil = 5.e-3
sigma = 15.5e-3
# We will simulate data from Table 3 in the Brandvik et al. (2013) paper.
# These experiments have a nozzle diameter of 1.5 mm
d0 = 0.0015
# They also used seawater (assumed salinity of 34.5 psu) and released the
# oil from a depth of about 6 m at a temperature of 13 deg C
T = 273.15 + 13.
S = 34.5
rho = seawater.density(T, S, 101325.)
P = 101325. + rho * 9.81 * 6.
rho = seawater.density(T, S, P)
mu = seawater.mu(T, S, P)
# With this information, we can initialize a
# `particle_size_models.PureJet` object
jet = particle_size_models.PureJet(rho_oil, mu_oil, sigma, rho, mu,
fp_type = 1)
# Brandvik et al. (2013) report the exit velocity at the nozzle. We
# need to convert this to a mass flow rate. The mass flow rate should
# always be reported within a numpy array, which allows for different
# mass fluxes for different pseudocomponents of the oil.
u_oil = 11.3
A_oil = np.pi * (d0 / 2.)**2
q_oil = u_oil * A_oil
md_oil = np.array([rho_oil * q_oil])
# To simulate the no-dispersant case, all of the oil properties in the
# jet object are currently correct. Hence, we may use:
jet.simulate(d0, md_oil)
# We compare the result to the measured data as follows:
print('\nThe median droplet size for the no-disperant experiment is:')
print(' Measured: %3.3d um' % 237)
print(' Modeled : %3.3d um\n' % (jet.get_d50() * 1.e6))
# When dispersant is added in sufficient quantities, the interfacial
# tension reduces and the droplet size gets smaller. At a dispersant
# to oil ratio of 50, sigma is:
sigma = 0.05e-3
# We can run this case by updating the properties of the jet object and
# re-running the simualtion
jet.update_properties(rho_oil, mu_oil, sigma, rho, mu, fp_type = 1)
jet.simulate(d0, md_oil)
# We compare the result to the measured data as follows:
print('\nThe median droplet size for an experiments with a')
print('dispersant to oil ratio of 50 is:')
print(' Measured: %3.3d um' % 170)
print(' Modeled : %3.3d um\n' % (jet.get_d50() * 1.e6))
# We can also plot the size distribution
print('\nThe corresponding size distribution is plotted in Figure 1')
jet.get_distributions(15)
jet.plot_psd(1)
| [((19, 0, 19, 33), 'warnings.filterwarnings', 'warnings.filterwarnings', ({(19, 24, 19, 32): '"""ignore"""'}, {}), "('ignore')", False, 'import warnings\n'), ((45, 10, 45, 41), 'tamoc.seawater.density', 'seawater.density', ({(45, 27, 45, 28): 'T', (45, 30, 45, 31): 'S', (45, 33, 45, 40): '101325.0'}, {}), '(T, S, 101325.0)', False, 'from tamoc import seawater, particle_size_models\n'), ((47, 10, 47, 35), 'tamoc.seawater.density', 'seawater.density', ({(47, 27, 47, 28): 'T', (47, 30, 47, 31): 'S', (47, 33, 47, 34): 'P'}, {}), '(T, S, P)', False, 'from tamoc import seawater, particle_size_models\n'), ((48, 9, 48, 29), 'tamoc.seawater.mu', 'seawater.mu', ({(48, 21, 48, 22): 'T', (48, 24, 48, 25): 'S', (48, 27, 48, 28): 'P'}, {}), '(T, S, P)', False, 'from tamoc import seawater, particle_size_models\n'), ((52, 10, 53, 51), 'tamoc.particle_size_models.PureJet', 'particle_size_models.PureJet', (), '', False, 'from tamoc import seawater, particle_size_models\n'), ((62, 13, 62, 40), 'numpy.array', 'np.array', ({(62, 22, 62, 39): '[rho_oil * q_oil]'}, {}), '([rho_oil * q_oil])', True, 'import numpy as np\n')] |
sdss/tron | tron/Nubs/hal.py | 886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322 | import os.path
import tron.Misc
from tron import g, hub
from tron.Hub.Command.Encoders.ASCIICmdEncoder import ASCIICmdEncoder
from tron.Hub.Nub.SocketActorNub import SocketActorNub
from tron.Hub.Reply.Decoders.ASCIIReplyDecoder import ASCIIReplyDecoder
name = 'hal'
def start(poller):
cfg = tron.Misc.cfg.get(g.location, 'actors', doFlush=True)[name]
stop()
initCmds = ('ping', 'status', 'version')
safeCmdsList = ['ping', 'version', 'status']
safeCmds = r'^\s*({0})\s*$'.format('|'.join(safeCmdsList))
d = ASCIIReplyDecoder(cidFirst=True, debug=1)
e = ASCIICmdEncoder(sendCommander=True, useCID=False, debug=1)
nub = SocketActorNub(
poller,
cfg['host'],
cfg['port'],
name=name,
encoder=e,
decoder=d,
grabCID=True, # the actor spontaneously generates a line we can eat.
initCmds=initCmds,
safeCmds=safeCmds,
needsAuth=True,
logDir=os.path.join(g.logDir, name),
debug=3)
hub.addActor(nub)
def stop():
n = hub.findActor(name)
if n:
hub.dropActor(n)
del n
| [((23, 8, 23, 49), 'tron.Hub.Reply.Decoders.ASCIIReplyDecoder.ASCIIReplyDecoder', 'ASCIIReplyDecoder', (), '', False, 'from tron.Hub.Reply.Decoders.ASCIIReplyDecoder import ASCIIReplyDecoder\n'), ((24, 8, 24, 66), 'tron.Hub.Command.Encoders.ASCIICmdEncoder.ASCIICmdEncoder', 'ASCIICmdEncoder', (), '', False, 'from tron.Hub.Command.Encoders.ASCIICmdEncoder import ASCIICmdEncoder\n'), ((38, 4, 38, 21), 'tron.hub.addActor', 'hub.addActor', ({(38, 17, 38, 20): 'nub'}, {}), '(nub)', False, 'from tron import g, hub\n'), ((42, 8, 42, 27), 'tron.hub.findActor', 'hub.findActor', ({(42, 22, 42, 26): 'name'}, {}), '(name)', False, 'from tron import g, hub\n'), ((44, 8, 44, 24), 'tron.hub.dropActor', 'hub.dropActor', ({(44, 22, 44, 23): 'n'}, {}), '(n)', False, 'from tron import g, hub\n')] |
gramm/xsdata | tests/fixtures/defxmlschema/chapter15.py | 082c780757c6d76a5c31a6757276ef6912901ed2 | from dataclasses import dataclass, field
from decimal import Decimal
from typing import Optional
from xsdata.models.datatype import XmlDate
@dataclass
class SizeType:
value: Optional[int] = field(
default=None,
metadata={
"required": True,
}
)
system: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
}
)
@dataclass
class ShirtType:
description: Optional[str] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
comment: Optional[str] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
number: Optional[int] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
name: Optional[str] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
size: Optional[SizeType] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
}
)
id: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
"required": True,
}
)
version: Optional[Decimal] = field(
default=None,
metadata={
"type": "Attribute",
}
)
eff_date: Optional[XmlDate] = field(
default=None,
metadata={
"name": "effDate",
"type": "Attribute",
}
)
@dataclass
class Shirt(ShirtType):
class Meta:
name = "shirt"
| [((9, 27, 14, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((15, 28, 20, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((25, 33, 31, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((32, 29, 38, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((39, 28, 45, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((46, 26, 52, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((53, 31, 60, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((61, 24, 67, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((68, 33, 73, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n'), ((74, 34, 80, 5), 'dataclasses.field', 'field', (), '', False, 'from dataclasses import dataclass, field\n')] |
anubhavsinha98/oppia | extensions/domain.py | 9a64ea2e91d2f471ce22bd39da77b43dccd5b51f | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects used within multiple extensions."""
from __future__ import absolute_import # pylint: disable=import-only-modules
import python_utils
class CustomizationArgSpec(python_utils.OBJECT):
"""Value object for a customization arg specification."""
def __init__(self, name, description, schema, default_value):
self.name = name
self.description = description
self.schema = schema
self.default_value = default_value
| [] |
LaudateCorpus1/oci-ansible-collection | plugins/modules/oci_database_management_object_privilege_facts.py | 2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7 | #!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_database_management_object_privilege_facts
short_description: Fetches details about one or multiple ObjectPrivilege resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple ObjectPrivilege resources in Oracle Cloud Infrastructure
- Gets the list of Object Privileges granted for the specified user.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
managed_database_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the Managed Database.
type: str
required: true
user_name:
description:
- The name of the user whose details are to be viewed.
type: str
required: true
name:
description:
- A filter to return only resources that match the entire name.
type: str
sort_by:
description:
- The field to sort information by. Only one sortOrder can be used. The default sort order
for 'NAME' is ascending. The 'NAME' sort order is case-sensitive.
type: str
choices:
- "NAME"
sort_order:
description:
- The option to sort information in ascending ('ASC') or descending ('DESC') order. Ascending order is the default order.
type: str
choices:
- "ASC"
- "DESC"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: List object_privileges
oci_database_management_object_privilege_facts:
# required
managed_database_id: "ocid1.manageddatabase.oc1..xxxxxxEXAMPLExxxxxx"
user_name: user_name_example
# optional
name: name_example
sort_by: NAME
sort_order: ASC
"""
RETURN = """
object_privileges:
description:
- List of ObjectPrivilege resources
returned: on success
type: complex
contains:
name:
description:
- The name of the privilege on the object.
returned: on success
type: str
sample: name_example
schema_type:
description:
- The type of the object.
returned: on success
type: str
sample: schema_type_example
owner:
description:
- The owner of the object.
returned: on success
type: str
sample: owner_example
grantor:
description:
- The name of the user who performed the grant
returned: on success
type: str
sample: grantor_example
hierarchy:
description:
- Indicates whether the privilege was granted with the HIERARCHY OPTION (YES) or not (NO)
returned: on success
type: str
sample: YES
object:
description:
- The name of the object. The object can be any object, including tables, packages, indexes, sequences, and so on.
returned: on success
type: str
sample: object_example
grant_option:
description:
- Indicates whether the privilege was granted with the GRANT OPTION (YES) or not (NO)
returned: on success
type: str
sample: YES
common:
description:
- "Indicates how the grant was made. Possible values:
YES if the role was granted commonly (CONTAINER=ALL was used)
NO if the role was granted locally (CONTAINER=ALL was not used)"
returned: on success
type: str
sample: YES
inherited:
description:
- Indicates whether the role grant was inherited from another container (YES) or not (NO)
returned: on success
type: str
sample: YES
sample: [{
"name": "name_example",
"schema_type": "schema_type_example",
"owner": "owner_example",
"grantor": "grantor_example",
"hierarchy": "YES",
"object": "object_example",
"grant_option": "YES",
"common": "YES",
"inherited": "YES"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.database_management import DbManagementClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ObjectPrivilegeFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: list"""
def get_required_params_for_list(self):
return [
"managed_database_id",
"user_name",
]
def list_resources(self):
optional_list_method_params = [
"name",
"sort_by",
"sort_order",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_object_privileges,
managed_database_id=self.module.params.get("managed_database_id"),
user_name=self.module.params.get("user_name"),
**optional_kwargs
)
ObjectPrivilegeFactsHelperCustom = get_custom_class("ObjectPrivilegeFactsHelperCustom")
class ResourceFactsHelper(
ObjectPrivilegeFactsHelperCustom, ObjectPrivilegeFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
managed_database_id=dict(type="str", required=True),
user_name=dict(type="str", required=True),
name=dict(type="str"),
sort_by=dict(type="str", choices=["NAME"]),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="object_privilege",
service_client_class=DbManagementClient,
namespace="database_management",
)
result = []
if resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(object_privileges=result)
if __name__ == "__main__":
main()
| [((194, 35, 194, 87), 'ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class', 'get_custom_class', ({(194, 52, 194, 86): '"""ObjectPrivilegeFactsHelperCustom"""'}, {}), "('ObjectPrivilegeFactsHelperCustom')", False, 'from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import OCIResourceFactsHelperBase, get_custom_class\n'), ((204, 18, 204, 56), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec', 'oci_common_utils.get_common_arg_spec', ({}, {}), '()', False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils\n'), ((215, 13, 215, 53), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', (), '', False, 'from ansible.module_utils.basic import AnsibleModule\n')] |
guilhermeleobas/rbc | rbc/externals/stdio.py | 4b568b91c6ce3ef7727fee001169302c3803c4fd | """https://en.cppreference.com/w/c/io
"""
from rbc import irutils
from llvmlite import ir
from rbc.targetinfo import TargetInfo
from numba.core import cgutils, extending
from numba.core import types as nb_types
from rbc.errors import NumbaTypeError # some errors are available for Numba >= 0.55
int32_t = ir.IntType(32)
def cg_fflush(builder):
int8_t = ir.IntType(8)
fflush_fnty = ir.FunctionType(int32_t, [int8_t.as_pointer()])
fflush_fn = irutils.get_or_insert_function(builder.module, fflush_fnty, name="fflush")
builder.call(fflush_fn, [int8_t.as_pointer()(None)])
@extending.intrinsic
def fflush(typingctx):
"""``fflush`` that can be called from Numba jit-decorated functions.
.. note::
``fflush`` is available only for CPU target.
"""
sig = nb_types.void(nb_types.void)
def codegen(context, builder, signature, args):
target_info = TargetInfo()
if target_info.is_cpu:
cg_fflush(builder)
return sig, codegen
@extending.intrinsic
def printf(typingctx, format_type, *args):
"""``printf`` that can be called from Numba jit-decorated functions.
.. note::
``printf`` is available only for CPU target.
"""
if isinstance(format_type, nb_types.StringLiteral):
sig = nb_types.void(format_type, nb_types.BaseTuple.from_types(args))
def codegen(context, builder, signature, args):
target_info = TargetInfo()
if target_info.is_cpu:
cgutils.printf(builder, format_type.literal_value, *args[1:])
cg_fflush(builder)
return sig, codegen
else:
raise NumbaTypeError(f"expected StringLiteral but got {type(format_type).__name__}")
| [((11, 10, 11, 24), 'llvmlite.ir.IntType', 'ir.IntType', ({(11, 21, 11, 23): '32'}, {}), '(32)', False, 'from llvmlite import ir\n'), ((15, 13, 15, 26), 'llvmlite.ir.IntType', 'ir.IntType', ({(15, 24, 15, 25): '8'}, {}), '(8)', False, 'from llvmlite import ir\n'), ((17, 16, 17, 90), 'rbc.irutils.get_or_insert_function', 'irutils.get_or_insert_function', (), '', False, 'from rbc import irutils\n'), ((29, 10, 29, 38), 'numba.core.types.void', 'nb_types.void', ({(29, 24, 29, 37): 'nb_types.void'}, {}), '(nb_types.void)', True, 'from numba.core import types as nb_types\n'), ((32, 22, 32, 34), 'rbc.targetinfo.TargetInfo', 'TargetInfo', ({}, {}), '()', False, 'from rbc.targetinfo import TargetInfo\n'), ((48, 41, 48, 76), 'numba.core.types.BaseTuple.from_types', 'nb_types.BaseTuple.from_types', ({(48, 71, 48, 75): 'args'}, {}), '(args)', True, 'from numba.core import types as nb_types\n'), ((51, 26, 51, 38), 'rbc.targetinfo.TargetInfo', 'TargetInfo', ({}, {}), '()', False, 'from rbc.targetinfo import TargetInfo\n'), ((53, 16, 53, 77), 'numba.core.cgutils.printf', 'cgutils.printf', ({(53, 31, 53, 38): 'builder', (53, 40, 53, 65): 'format_type.literal_value', (53, 67, 53, 76): '*args[1:]'}, {}), '(builder, format_type.literal_value, *args[1:])', False, 'from numba.core import cgutils, extending\n')] |
clach04/discoverhue | setup.py | 8f35cbc8ff9b5aab80b8be0443427058c1da51ed | from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert_file('README.md', 'rst', extra_args=())
except ImportError:
import codecs
long_description = codecs.open('README.md', encoding='utf-8').read()
long_description = '\n'.join(long_description.splitlines())
setup(
name='discoverhue',
description='Auto discovery of Hue bridges',
long_description=long_description,
version='1.0.2',
url='https://github.com/Overboard/discoverhue',
author='Overboard',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='philips hue',
packages=['discoverhue'],
install_requires=['httpfind'],
)
| [((12, 0, 37, 1), 'setuptools.setup', 'setup', (), '', False, 'from setuptools import setup\n'), ((5, 23, 5, 79), 'pypandoc.convert_file', 'pypandoc.convert_file', (), '', False, 'import pypandoc\n'), ((8, 23, 8, 65), 'codecs.open', 'codecs.open', (), '', False, 'import codecs\n')] |
dbonattoj/Real-Time-Voice-Cloning | utils/logmmse.py | 7ce361b0e900cb0fad4289884f526578ba276481 | # The MIT License (MIT)
#
# Copyright (c) 2015 braindead
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#
# This code was extracted from the logmmse package (https://pypi.org/project/logmmse/) and I
# simply modified the interface to meet my needs.
import numpy as np
import math
from scipy.special import expn
from collections import namedtuple
NoiseProfile = namedtuple("NoiseProfile", "sampling_rate window_size len1 len2 win n_fft noise_mu2")
def profile_noise(noise, sampling_rate, window_size=0):
"""
Creates a profile of the noise in a given waveform.
:param noise: a waveform containing noise ONLY, as a numpy array of floats or ints.
:param sampling_rate: the sampling rate of the audio
:param window_size: the size of the window the logmmse algorithm operates on. A default value
will be picked if left as 0.
:return: a NoiseProfile object
"""
noise, dtype = to_float(noise)
noise += np.finfo(np.float64).eps
if window_size == 0:
window_size = int(math.floor(0.02 * sampling_rate))
if window_size % 2 == 1:
window_size = window_size + 1
perc = 50
len1 = int(math.floor(window_size * perc / 100))
len2 = int(window_size - len1)
win = np.hanning(window_size)
win = win * len2 / np.sum(win)
n_fft = 2 * window_size
noise_mean = np.zeros(n_fft)
n_frames = len(noise) // window_size
for j in range(0, window_size * n_frames, window_size):
noise_mean += np.absolute(np.fft.fft(win * noise[j:j + window_size], n_fft, axis=0))
noise_mu2 = (noise_mean / n_frames) ** 2
return NoiseProfile(sampling_rate, window_size, len1, len2, win, n_fft, noise_mu2)
def denoise(wav, noise_profile: NoiseProfile, eta=0.15):
"""
Cleans the noise from a speech waveform given a noise profile. The waveform must have the
same sampling rate as the one used to create the noise profile.
:param wav: a speech waveform as a numpy array of floats or ints.
:param noise_profile: a NoiseProfile object that was created from a similar (or a segment of
the same) waveform.
:param eta: voice threshold for noise update. While the voice activation detection value is
below this threshold, the noise profile will be continuously updated throughout the audio.
Set to 0 to disable updating the noise profile.
:return: the clean wav as a numpy array of floats or ints of the same length.
"""
wav, dtype = to_float(wav)
wav += np.finfo(np.float64).eps
p = noise_profile
nframes = int(math.floor(len(wav) / p.len2) - math.floor(p.window_size / p.len2))
x_final = np.zeros(nframes * p.len2)
aa = 0.98
mu = 0.98
ksi_min = 10 ** (-25 / 10)
x_old = np.zeros(p.len1)
xk_prev = np.zeros(p.len1)
noise_mu2 = p.noise_mu2
for k in range(0, nframes * p.len2, p.len2):
insign = p.win * wav[k:k + p.window_size]
spec = np.fft.fft(insign, p.n_fft, axis=0)
sig = np.absolute(spec)
sig2 = sig ** 2
gammak = np.minimum(sig2 / noise_mu2, 40)
if xk_prev.all() == 0:
ksi = aa + (1 - aa) * np.maximum(gammak - 1, 0)
else:
ksi = aa * xk_prev / noise_mu2 + (1 - aa) * np.maximum(gammak - 1, 0)
ksi = np.maximum(ksi_min, ksi)
log_sigma_k = gammak * ksi/(1 + ksi) - np.log(1 + ksi)
vad_decision = np.sum(log_sigma_k) / p.window_size
if vad_decision < eta:
noise_mu2 = mu * noise_mu2 + (1 - mu) * sig2
a = ksi / (1 + ksi)
vk = a * gammak
ei_vk = 0.5 * expn(1, np.maximum(vk, 1e-8))
hw = a * np.exp(ei_vk)
sig = sig * hw
xk_prev = sig ** 2
xi_w = np.fft.ifft(hw * spec, p.n_fft, axis=0)
xi_w = np.real(xi_w)
x_final[k:k + p.len2] = x_old + xi_w[0:p.len1]
x_old = xi_w[p.len1:p.window_size]
output = from_float(x_final, dtype)
output = np.pad(output, (0, len(wav) - len(output)), mode="constant")
return output
def to_float(_input):
if _input.dtype == np.float64:
return _input, _input.dtype
elif _input.dtype == np.float32:
return _input.astype(np.float64), _input.dtype
elif _input.dtype == np.uint8:
return (_input - 128) / 128., _input.dtype
elif _input.dtype == np.int16:
return _input / 32768., _input.dtype
elif _input.dtype == np.int32:
return _input / 2147483648., _input.dtype
raise ValueError('Unsupported wave file format')
def from_float(_input, dtype):
if dtype == np.float64:
return _input, np.float64
elif dtype == np.float32:
return _input.astype(np.float32)
elif dtype == np.uint8:
return ((_input * 128) + 128).astype(np.uint8)
elif dtype == np.int16:
return (_input * 32768).astype(np.int16)
elif dtype == np.int32:
print(_input)
return (_input * 2147483648).astype(np.int32)
raise ValueError('Unsupported wave file format')
| [((33, 15, 33, 100), 'collections.namedtuple', 'namedtuple', ({(33, 26, 33, 40): '"""NoiseProfile"""', (33, 42, 33, 99): '"""sampling_rate window_size len1 len2 win n_fft noise_mu2"""'}, {}), "('NoiseProfile',\n 'sampling_rate window_size len1 len2 win n_fft noise_mu2')", False, 'from collections import namedtuple\n'), ((59, 10, 59, 33), 'numpy.hanning', 'np.hanning', ({(59, 21, 59, 32): 'window_size'}, {}), '(window_size)', True, 'import numpy as np\n'), ((63, 17, 63, 32), 'numpy.zeros', 'np.zeros', ({(63, 26, 63, 31): 'n_fft'}, {}), '(n_fft)', True, 'import numpy as np\n'), ((90, 14, 90, 40), 'numpy.zeros', 'np.zeros', ({(90, 23, 90, 39): 'nframes * p.len2'}, {}), '(nframes * p.len2)', True, 'import numpy as np\n'), ((96, 12, 96, 28), 'numpy.zeros', 'np.zeros', ({(96, 21, 96, 27): 'p.len1'}, {}), '(p.len1)', True, 'import numpy as np\n'), ((97, 14, 97, 30), 'numpy.zeros', 'np.zeros', ({(97, 23, 97, 29): 'p.len1'}, {}), '(p.len1)', True, 'import numpy as np\n'), ((47, 13, 47, 33), 'numpy.finfo', 'np.finfo', ({(47, 22, 47, 32): 'np.float64'}, {}), '(np.float64)', True, 'import numpy as np\n'), ((56, 15, 56, 51), 'math.floor', 'math.floor', ({(56, 26, 56, 50): 'window_size * perc / 100'}, {}), '(window_size * perc / 100)', False, 'import math\n'), ((60, 23, 60, 34), 'numpy.sum', 'np.sum', ({(60, 30, 60, 33): 'win'}, {}), '(win)', True, 'import numpy as np\n'), ((86, 11, 86, 31), 'numpy.finfo', 'np.finfo', ({(86, 20, 86, 30): 'np.float64'}, {}), '(np.float64)', True, 'import numpy as np\n'), ((102, 15, 102, 50), 'numpy.fft.fft', 'np.fft.fft', (), '', True, 'import numpy as np\n'), ((103, 14, 103, 31), 'numpy.absolute', 'np.absolute', ({(103, 26, 103, 30): 'spec'}, {}), '(spec)', True, 'import numpy as np\n'), ((106, 17, 106, 49), 'numpy.minimum', 'np.minimum', ({(106, 28, 106, 44): 'sig2 / noise_mu2', (106, 46, 106, 48): '40'}, {}), '(sig2 / noise_mu2, 40)', True, 'import numpy as np\n'), ((125, 15, 125, 54), 'numpy.fft.ifft', 'np.fft.ifft', (), '', True, 'import numpy as np\n'), ((126, 15, 126, 28), 'numpy.real', 'np.real', ({(126, 23, 126, 27): 'xi_w'}, {}), '(xi_w)', True, 'import numpy as np\n'), ((50, 26, 50, 58), 'math.floor', 'math.floor', ({(50, 37, 50, 57): '0.02 * sampling_rate'}, {}), '(0.02 * sampling_rate)', False, 'import math\n'), ((66, 34, 66, 91), 'numpy.fft.fft', 'np.fft.fft', (), '', True, 'import numpy as np\n'), ((89, 50, 89, 84), 'math.floor', 'math.floor', ({(89, 61, 89, 83): 'p.window_size / p.len2'}, {}), '(p.window_size / p.len2)', False, 'import math\n'), ((112, 18, 112, 42), 'numpy.maximum', 'np.maximum', ({(112, 29, 112, 36): 'ksi_min', (112, 38, 112, 41): 'ksi'}, {}), '(ksi_min, ksi)', True, 'import numpy as np\n'), ((114, 47, 114, 62), 'numpy.log', 'np.log', ({(114, 54, 114, 61): '(1 + ksi)'}, {}), '(1 + ksi)', True, 'import numpy as np\n'), ((115, 23, 115, 42), 'numpy.sum', 'np.sum', ({(115, 30, 115, 41): 'log_sigma_k'}, {}), '(log_sigma_k)', True, 'import numpy as np\n'), ((122, 17, 122, 30), 'numpy.exp', 'np.exp', ({(122, 24, 122, 29): 'ei_vk'}, {}), '(ei_vk)', True, 'import numpy as np\n'), ((121, 30, 121, 50), 'numpy.maximum', 'np.maximum', ({(121, 41, 121, 43): 'vk', (121, 45, 121, 49): '(1e-08)'}, {}), '(vk, 1e-08)', True, 'import numpy as np\n'), ((109, 34, 109, 59), 'numpy.maximum', 'np.maximum', ({(109, 45, 109, 55): '(gammak - 1)', (109, 57, 109, 58): '(0)'}, {}), '(gammak - 1, 0)', True, 'import numpy as np\n'), ((111, 56, 111, 81), 'numpy.maximum', 'np.maximum', ({(111, 67, 111, 77): '(gammak - 1)', (111, 79, 111, 80): '(0)'}, {}), '(gammak - 1, 0)', True, 'import numpy as np\n')] |
6un9-h0-Dan/CIRTKit | lib/core/session.py | 58b8793ada69320ffdbdd4ecdc04a3bb2fa83c37 | # This file is part of Viper - https://github.com/viper-framework/viper
# See the file 'LICENSE' for copying permission.
import time
import datetime
from lib.common.out import *
from lib.common.objects import File
from lib.core.database import Database
from lib.core.investigation import __project__
class Session(object):
def __init__(self):
self.id = None
# This will be assigned with the File object of the file currently
# being analyzed.
self.file = None
# Timestamp of the creation of the session.
self.created_at = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')
# MISP event associated to the object
self.misp_event = None
class Sessions(object):
def __init__(self):
self.current = None
self.sessions = []
# Store the results of the last "find" command.
self.find = None
def close(self):
self.current = None
def is_set(self):
# Check if the session has been opened or not.
if self.current:
return True
else:
return False
def switch(self, session):
self.current = session
print_info("Switched to session #{0} on {1}".format(self.current.id, self.current.file.path))
def new(self, path=None, misp_event=None):
if path is None and misp_event is None:
print_error("You have to open a session on a path or on a misp event.")
return
if __project__.name:
pass
else:
print_error("You must open an investigation to store files")
return
session = Session()
total = len(self.sessions)
session.id = total + 1
if path is not None:
if self.is_set() and self.current.misp_event:
session.misp_event = self.current.misp_event
# Open a section on the given file.
session.file = File(path)
# Try to lookup the file in the database. If it is already present
# we get file name and
row = Database().find(key='sha256', value=session.file.sha256)
if row:
session.file.name = row[0].name
session.file.tags = ', '.join(tag.to_dict()['tag'] for tag in row[0].tag)
print_info("Session opened on {0}".format(path))
if misp_event is not None:
if self.is_set() and self.current.file:
session.file = self.current.file
refresh = False
if self.current is not None and self.current.misp_event is not None \
and self.current.misp_event.event_id == misp_event.event_id:
refresh = True
session.misp_event = misp_event
if refresh:
print_info("Session on MISP event {0} refreshed.".format(misp_event.event_id))
else:
print_info("Session opened on MISP event {0}.".format(misp_event.event_id))
if session.file is not None:
# Loop through all existing sessions and check whether there's another
# session open on the same file and delete it. This is to avoid
# duplicates in sessions.
# NOTE: in the future we might want to remove this if sessions have
# unique attributes (for example, an history just for each of them).
for entry in self.sessions:
if entry.file is not None and entry.file.sha256 == session.file.sha256:
self.sessions.remove(entry)
# Add new session to the list.
self.sessions.append(session)
# Mark the new session as the current one.
self.current = session
__sessions__ = Sessions()
| [((67, 27, 67, 37), 'lib.common.objects.File', 'File', ({(67, 32, 67, 36): 'path'}, {}), '(path)', False, 'from lib.common.objects import File\n'), ((20, 58, 20, 69), 'time.time', 'time.time', ({}, {}), '()', False, 'import time\n'), ((70, 18, 70, 28), 'lib.core.database.Database', 'Database', ({}, {}), '()', False, 'from lib.core.database import Database\n')] |
glibin/simple-report | src/simple_report/xls/document.py | 1e68b2fe568d6f7a7d9332d0e83b9a21661419e0 | #coding: utf-8
import xlrd
from simple_report.core.document_wrap import BaseDocument, SpreadsheetDocument
from simple_report.xls.workbook import Workbook
from simple_report.xls.output_options import XSL_OUTPUT_SETTINGS
class DocumentXLS(BaseDocument, SpreadsheetDocument):
"""
Обертка для отчетов в формате XLS
"""
def __init__(self, ffile, tags=None, **kwargs):
self.file = ffile
self._workbook = Workbook(ffile, **kwargs)
@property
def workbook(self):
"""
Получение рабочей книги
:result: рабочая книга
"""
return self._workbook
def build(self, dst):
"""
Сборка отчета
:param dst: путь до выходного файла
:result:
"""
self._workbook.build(dst)
def __setattr__(self, key, value):
if key in XSL_OUTPUT_SETTINGS:
setattr(self._workbook, key, value)
else:
super(DocumentXLS, self).__setattr__(key, value)
| [((16, 25, 16, 50), 'simple_report.xls.workbook.Workbook', 'Workbook', ({(16, 34, 16, 39): 'ffile'}, {}), '(ffile, **kwargs)', False, 'from simple_report.xls.workbook import Workbook\n')] |
zengchen1024/mindinsight | tests/ut/datavisual/common/test_error_handler.py | 228a448b46707e889efc1fb23502158e27ab56ca | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Function:
Test error handler.
Usage:
pytest tests/ut/datavisual
"""
from unittest.mock import patch
from werkzeug.exceptions import MethodNotAllowed, NotFound
from ...backend.datavisual.conftest import TRAIN_ROUTES
from ..mock import MockLogger
from ....utils.tools import get_url
from mindinsight.datavisual.processors import scalars_processor
from mindinsight.datavisual.processors.scalars_processor import ScalarsProcessor
class TestErrorHandler:
"""Test train visual api."""
@patch.object(ScalarsProcessor, 'get_metadata_list')
def test_handle_http_exception_error_not_found(self, mock_scalar_processor, client):
"""Test handle http exception error not found."""
scalars_processor.logger = MockLogger
text = 'Test Message'
# NotFound
def get_metadata_list(train_ids, tag):
raise NotFound("%s" % text)
mock_scalar_processor.side_effect = get_metadata_list
test_train_ids = "aa"
test_tag = "bb"
params = dict(train_ids=test_train_ids, tag=test_tag)
url = get_url(TRAIN_ROUTES['scalar_metadata'], params)
response = client.get(url)
assert response.status_code == 404
response = response.get_json()
assert response['error_code'] == '50545001'
assert response['error_msg'] == '404 Not Found.'
@patch.object(ScalarsProcessor, 'get_metadata_list')
def test_handle_http_exception_error_method_not_allowed(self, mock_scalar_processor, client):
"""Test handling http exception error method not allowed."""
scalars_processor.logger = MockLogger
text = 'Test Message'
# MethodNotAllowed
def get_metadata_list(train_ids, tag):
raise MethodNotAllowed("%s" % text)
mock_scalar_processor.side_effect = get_metadata_list
test_train_ids = "aa"
test_tag = "bb"
params = dict(train_ids=test_train_ids, tag=test_tag)
url = get_url(TRAIN_ROUTES['scalar_metadata'], params)
response = client.get(url)
assert response.status_code == 405
response = response.get_json()
assert response['error_code'] == '50545002'
assert response['error_msg'] == '405 Method Not Allowed.'
@patch.object(ScalarsProcessor, 'get_metadata_list')
def test_handle_http_exception_error_method_other_errors(self, mock_scalar_processor, client):
"""Test handling http exception error method other errors."""
scalars_processor.logger = MockLogger
text = 'Test Message'
# Other errors
def get_metadata_list(train_ids, tag):
raise KeyError("%s" % text)
mock_scalar_processor.side_effect = get_metadata_list
test_train_ids = "aa"
test_tag = "bb"
params = dict(train_ids=test_train_ids, tag=test_tag)
url = get_url(TRAIN_ROUTES['scalar_metadata'], params)
response = client.get(url)
assert response.status_code == 500
response = response.get_json()
assert response['error_code'] == '50540000'
assert response['error_msg'] == 'System error.'
| [((35, 5, 35, 56), 'unittest.mock.patch.object', 'patch.object', ({(35, 18, 35, 34): 'ScalarsProcessor', (35, 36, 35, 55): '"""get_metadata_list"""'}, {}), "(ScalarsProcessor, 'get_metadata_list')", False, 'from unittest.mock import patch\n'), ((58, 5, 58, 56), 'unittest.mock.patch.object', 'patch.object', ({(58, 18, 58, 34): 'ScalarsProcessor', (58, 36, 58, 55): '"""get_metadata_list"""'}, {}), "(ScalarsProcessor, 'get_metadata_list')", False, 'from unittest.mock import patch\n'), ((81, 5, 81, 56), 'unittest.mock.patch.object', 'patch.object', ({(81, 18, 81, 34): 'ScalarsProcessor', (81, 36, 81, 55): '"""get_metadata_list"""'}, {}), "(ScalarsProcessor, 'get_metadata_list')", False, 'from unittest.mock import patch\n'), ((43, 18, 43, 39), 'werkzeug.exceptions.NotFound', 'NotFound', ({(43, 27, 43, 38): "('%s' % text)"}, {}), "('%s' % text)", False, 'from werkzeug.exceptions import MethodNotAllowed, NotFound\n'), ((66, 18, 66, 47), 'werkzeug.exceptions.MethodNotAllowed', 'MethodNotAllowed', ({(66, 35, 66, 46): "('%s' % text)"}, {}), "('%s' % text)", False, 'from werkzeug.exceptions import MethodNotAllowed, NotFound\n')] |
easyopsapis/easyops-api-python | pipeline_sdk/api/build/cancel_build_pb2.py | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cancel_build.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='cancel_build.proto',
package='build',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x12\x63\x61ncel_build.proto\x12\x05\x62uild\x1a\x1bgoogle/protobuf/empty.proto\"!\n\rCancelRequest\x12\x10\n\x08\x62uild_id\x18\x01 \x01(\t\"o\n\x15\x43\x61ncelResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12$\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x16.google.protobuf.Emptyb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_CANCELREQUEST = _descriptor.Descriptor(
name='CancelRequest',
full_name='build.CancelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='build_id', full_name='build.CancelRequest.build_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=58,
serialized_end=91,
)
_CANCELRESPONSEWRAPPER = _descriptor.Descriptor(
name='CancelResponseWrapper',
full_name='build.CancelResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='build.CancelResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='build.CancelResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='build.CancelResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='build.CancelResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=93,
serialized_end=204,
)
_CANCELRESPONSEWRAPPER.fields_by_name['data'].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY
DESCRIPTOR.message_types_by_name['CancelRequest'] = _CANCELREQUEST
DESCRIPTOR.message_types_by_name['CancelResponseWrapper'] = _CANCELRESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CancelRequest = _reflection.GeneratedProtocolMessageType('CancelRequest', (_message.Message,), {
'DESCRIPTOR' : _CANCELREQUEST,
'__module__' : 'cancel_build_pb2'
# @@protoc_insertion_point(class_scope:build.CancelRequest)
})
_sym_db.RegisterMessage(CancelRequest)
CancelResponseWrapper = _reflection.GeneratedProtocolMessageType('CancelResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _CANCELRESPONSEWRAPPER,
'__module__' : 'cancel_build_pb2'
# @@protoc_insertion_point(class_scope:build.CancelResponseWrapper)
})
_sym_db.RegisterMessage(CancelResponseWrapper)
# @@protoc_insertion_point(module_scope)
| [((13, 10, 13, 36), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ({}, {}), '()', True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((118, 16, 122, 4), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(118, 57, 118, 72): '"""CancelRequest"""', (118, 74, 118, 93): '(_message.Message,)', (118, 95, 122, 3): "{'DESCRIPTOR': _CANCELREQUEST, '__module__': 'cancel_build_pb2'}"}, {}), "('CancelRequest', (_message.Message\n ,), {'DESCRIPTOR': _CANCELREQUEST, '__module__': 'cancel_build_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((125, 24, 129, 4), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', ({(125, 65, 125, 88): '"""CancelResponseWrapper"""', (125, 90, 125, 109): '(_message.Message,)', (125, 111, 129, 3): "{'DESCRIPTOR': _CANCELRESPONSEWRAPPER, '__module__': 'cancel_build_pb2'}"}, {}), "('CancelResponseWrapper', (_message\n .Message,), {'DESCRIPTOR': _CANCELRESPONSEWRAPPER, '__module__':\n 'cancel_build_pb2'})", True, 'from google.protobuf import reflection as _reflection\n'), ((69, 4, 75, 47), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n'), ((90, 4, 96, 47), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', (), '', True, 'from google.protobuf import descriptor as _descriptor\n')] |
geochri/Intel_Edge_AI-Computer_Pointer_controller | src/.ipynb_checkpoints/headpose_model-checkpoint.py | 068947fa0cbe0c5d1b74e2c0eb69a85bbc439131 | '''
This is a sample class for a model. You may choose to use it as-is or make any changes to it.
This has been provided just to give you an idea of how to structure your model class.
'''
from openvino.inference_engine import IENetwork, IECore
import numpy as np
import os
import cv2
import sys
class Model_HeadPose:
'''
Class for the Head Pose Estimation Model.
'''
def __init__(self, model_name, device='CPU', extensions=None):
self.model_weights = model_name+'.bin'
self.model_structure = model_name+'.xml'
self.device = device
self.extensions = extensions
# self.check_model()
# try:
# self.input_name = next(iter(self.model.inputs))
# self.input_shape = self.model.inputs[self.input_name].shape
# self.output_name = next(iter(self.model.outputs))
# self.output_shape = self.model.outputs[self.output_name].shape
# print('Initialise.. completed.')
# except Exception as e:
# raise ValueError('Something is wrong with input and output values..')
def load_model(self):
'''
This method is for loading the model to the device specified by the user.
If your model requires any Plugins, this is where you can load them.
'''
try:
print('Model is loading...')
self.core = IECore()
self.net = self.core.read_network(model=self.model_structure,weights=self.model_weights)
supported = self.core.query_network(self.net, self.device)
not_supported = [layer for layer in self.net.layers.keys() if layer not in supported]
if len(not_supported) != 0 and self.device == 'CPU':
print('Unsuported', not_supported)
if not self.extensions == None:
print('***Quick fix.\n ~CPU Extension added')
self.core.add_extension(self.extensions, device)
supported = self.core.query_network(self.net, self.device)
not_supported = [layer for layer in self.net.layers.keys() if layer not in supported]
if len(not_supported) == 0:
print('***Quick fix, Failed.')
else:
print('Check the extension path.')
self.net_exec = self.core.load_network(network=self.net, device_name=self.device)
except Exception as e:
raise('Something is wrong.. ~debug load model~')
try:
self.input_name = next(iter(self.net.inputs))
self.input_shape = self.net.inputs[self.input_name].shape
self.output_name = next(iter(self.net.outputs))
self.output_shape = self.net.outputs[self.output_name].shape
print('Initialise.. completed.')
except Exception as e:
raise ValueError('Something is wrong with input and output values..')
def predict(self, image):
'''
This method is meant for running predictions on the input image.
'''
self.image = image
print('HeadPose predict..')
pre_image = self.preprocess_input(self.image)
input_name = self.input_name
input_dict = {input_name: pre_image}
# infer = self.net_exec.start_async(request_id=0, inputs=input_dict)
# status = infer.wait()
results = self.net_exec.infer(input_dict)
outputs = self.preprocess_output(results)
# if status == 0:
# results = infer.outputs[self.output_name]
# print(results)
# print(self.input_name)
# outputs = self.preprocess_output(results)
return outputs
def check_model(self):
'''
Check - initialise the model
'''
try:
self.model = IENetwork(self.model_structure, self.model_weights)
except Exception as e:
raise ValueError("Could not Initialise the network. Have you enterred the correct model path?")
def preprocess_input(self, image):
'''
An input image in [1xCxHxW] format.
B - batch size
C - number of channels
H - image height
W - image width
'''
image = cv2.resize(image, (self.input_shape[3], self.input_shape[2]))
image = image.transpose((2, 0, 1))
image = image.reshape(1, *image.shape)
return image
def preprocess_output(self, outputs):
'''
Output layer names in Inference Engine format:
name: "angle_y_fc", shape: [1, 1] - Estimated yaw (in degrees).
name: "angle_p_fc", shape: [1, 1] - Estimated pitch (in degrees).
name: "angle_r_fc", shape: [1, 1] - Estimated roll (in degrees).
'''
object_list = []
print('PreOutput-headpose..')
# print(outputs)
object_list.append(outputs['angle_y_fc'].tolist()[0][0])
object_list.append(outputs['angle_p_fc'].tolist()[0][0])
object_list.append(outputs['angle_r_fc'].tolist()[0][0])
return object_list | [((103, 16, 103, 77), 'cv2.resize', 'cv2.resize', ({(103, 27, 103, 32): 'image', (103, 34, 103, 76): '(self.input_shape[3], self.input_shape[2])'}, {}), '(image, (self.input_shape[3], self.input_shape[2]))', False, 'import cv2\n'), ((37, 24, 37, 32), 'openvino.inference_engine.IECore', 'IECore', ({}, {}), '()', False, 'from openvino.inference_engine import IENetwork, IECore\n'), ((90, 25, 90, 76), 'openvino.inference_engine.IENetwork', 'IENetwork', ({(90, 35, 90, 55): 'self.model_structure', (90, 57, 90, 75): 'self.model_weights'}, {}), '(self.model_structure, self.model_weights)', False, 'from openvino.inference_engine import IENetwork, IECore\n')] |
HENNGE/minisaml | src/minisaml/internal/constants.py | d96aa5d294eee60521ad3c7084e8659b25935cee | NAMES_SAML2_PROTOCOL = "urn:oasis:names:tc:SAML:2.0:protocol"
NAMES_SAML2_ASSERTION = "urn:oasis:names:tc:SAML:2.0:assertion"
NAMEID_FORMAT_UNSPECIFIED = "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified"
BINDINGS_HTTP_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
DATE_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
DATE_TIME_FORMAT_FRACTIONAL = "%Y-%m-%dT%H:%M:%S.%fZ"
| [] |
PK-100/Competitive_Programming | Contests/Snackdown19_Qualifier/CHEFPRMS.py | d0863feaaa99462b2999e85dcf115f7a6c08bb8d | import math
def square(n):
tmp=round(math.sqrt(n))
if tmp*tmp==n:
return False
else:
return True
def semprime(n):
ch = 0
if square(n)==False:
return False
for i in range(2, int(math.sqrt(n)) + 1):
while n%i==0:
n//=i
ch+=1
if ch >= 2:
break
if(n > 1):
ch += 1
return ch == 2
def check(n):
if semprime(n) == True:
return True
else:
return False
for _ in range(int(input())):
n=int(input())
flag=0
for i in range(2,n//2+1):
if check(i)==True and check(n-i)==True:
#print(i,n-i,square(i),square(n-i),"Yes")
print("YES")
flag=1
break
if flag==0:
#print(i,n-i,square(i),square(n-i),"No")
print("NO")
| [((3, 14, 3, 26), 'math.sqrt', 'math.sqrt', ({(3, 24, 3, 25): 'n'}, {}), '(n)', False, 'import math\n'), ((12, 26, 12, 38), 'math.sqrt', 'math.sqrt', ({(12, 36, 12, 37): 'n'}, {}), '(n)', False, 'import math\n')] |
arokem/afq-deep-learning | setup.py | 61d7746f03914d63c56253d10d0f6a21e6c78e90 | from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='This repository hosts some work-in-progress experiments applying deep learning to predict age using tractometry data.',
author='Joanna Qiao',
license='BSD-3',
)
| [((5, 13, 5, 28), 'setuptools.find_packages', 'find_packages', ({}, {}), '()', False, 'from setuptools import find_packages, setup\n')] |
thiagodasilva/runway | make_base_container.py | a5455e885302df534fcfff0470881fbd2ad8eed5 | #!/usr/bin/env python3
import argparse
import os
import random
import requests
import sys
import tempfile
import uuid
from libs import colorprint
from libs.cli import run_command
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
# assume well-known lvm volume group on host
# ...later we'll figure out how to make this dynamic
VG_NAME = "swift-runway-vg01"
SWIFTSTACK_IMAGES_PREFIX = "ss-"
SWIFTSTACK_IMAGES_BASE_URL = \
"https://tellus.swiftstack.com/v1/AUTH_runway/lxd-images"
IMAGE_MANIFEST_OBJECT_NAME = "manifest.json"
UNIFIED_TARBALL_TYPE = "unified"
SPLIT_TARBALL_TYPE = "split"
TARBALL_TYPES = [UNIFIED_TARBALL_TYPE, SPLIT_TARBALL_TYPE]
def exit_with_error(error_text):
colorprint.error(error_text)
sys.exit(1)
def get_default_image(distro):
if distro.lower() == "rhel":
return "images:centos/7/amd64"
else:
return "ubuntu:16.04"
def is_swiftstack_hosted_image(base_image):
return base_image.lower().startswith(SWIFTSTACK_IMAGES_PREFIX)
def get_image_manifest(swift_container_name):
manifest_obj_url = "{}/{}/{}".format(SWIFTSTACK_IMAGES_BASE_URL,
swift_container_name,
IMAGE_MANIFEST_OBJECT_NAME)
try:
r = requests.get(manifest_obj_url)
r.raise_for_status()
return r.json()
except Exception as e:
raise Exception("Could not download container image manifest from '{}'."
"\n{}".format(manifest_obj_url, e))
def is_image_already_imported(fingerprint):
try:
run_command("lxc image info {} >/dev/null 2>&1".format(fingerprint),
shell=True)
except Exception:
return False
return True
def delete_image_with_alias(alias):
try:
run_command("lxc image delete {}".format(alias))
except Exception:
pass
def download_unified_image_file(manifest):
tarball_url = "{}/{}".format(SWIFTSTACK_IMAGES_BASE_URL,
manifest["tarball-object"])
try:
r = requests.get(tarball_url, stream=True)
r.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False) as f:
file_path = f.name
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
except Exception as e:
print("Could not download file from '{}': {}".format(tarball_url, e))
return file_path
def import_unified_image(manifest, alias):
tarball_path = download_unified_image_file(manifest)
# There might be an older image with the same alias
delete_image_with_alias(alias)
run_command("lxc image import {} --alias {}".format(tarball_path, alias))
os.unlink(tarball_path)
def download_split_image_files(manifest):
metadata_tarball_url = "{}/{}".format(SWIFTSTACK_IMAGES_BASE_URL,
manifest["metadata-object"])
rootfs_tarball_url = "{}/{}".format(SWIFTSTACK_IMAGES_BASE_URL,
manifest["rootfs-object"])
file_paths = []
for url in [metadata_tarball_url, rootfs_tarball_url]:
try:
r = requests.get(url, stream=True)
r.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False) as f:
file_paths.append(f.name)
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
except Exception as e:
print("Could not download file from '{}': {}".format(url, e))
return tuple(file_paths)
def import_split_image(manifest, alias):
metadata_tarball_path, rootfs_tarball_path = \
download_split_image_files(manifest)
# There might be an older image with the same alias
delete_image_with_alias(alias)
run_command("lxc image import {} {} --alias {}".format(
metadata_tarball_path, rootfs_tarball_path, alias))
os.unlink(metadata_tarball_path)
os.unlink(rootfs_tarball_path)
def import_image(manifest, alias):
'''
There are 2 possible image formats: unified and split. We support both.
For unified format, the manifest will look like this:
{
"tarball_type": "unified",
"fingerprint": "629d2c18b7bb0b52b80dfe62ae309937123d05b563ef057233e7802c9e18c018",
"tarball-object": "centos7.5/629d2c18b7bb0b52b80dfe62ae309937123d05b563ef057233e7802c9e18c018.tar.gz"
}
For split format, the manifest will look like this:
{
"tarball_type": "split",
"fingerprint": "22abbefe0c68943f264a7139c7a699a0b2adfbcf46fc661d2e89b1232301a5de",
"metadata-object": "centos7.5/meta-22abbefe0c68943f264a7139c7a699a0b2adfbcf46fc661d2e89b1232301a5de.tar.xz",
"rootfs-object": "centos7.5/22abbefe0c68943f264a7139c7a699a0b2adfbcf46fc661d2e89b1232301a5de.squashfs"
}
'''
if manifest["tarball_type"] not in TARBALL_TYPES:
raise Exception("Invalid tarball type: {}".format(
manifest["tarball_type"]))
elif manifest["tarball_type"] == UNIFIED_TARBALL_TYPE:
import_unified_image(manifest, alias)
elif manifest["tarball_type"] == SPLIT_TARBALL_TYPE:
import_split_image(manifest, alias)
else:
raise Exception("Tarball type '{}' is valid, but a method to import "
"it has not been implemented yet.")
def import_image_if_needed(base_image):
if not is_swiftstack_hosted_image(base_image):
raise Exception("{} is not an image hosted by "
"SwiftStack".format(base_image))
swift_container_name = base_image[len(SWIFTSTACK_IMAGES_PREFIX):]
manifest = get_image_manifest(swift_container_name)
if not is_image_already_imported(manifest["fingerprint"]):
print("Importing image '{}'...".format(base_image))
import_image(manifest, base_image)
else:
print("Image '{}' is already imported".format(base_image))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('distro', type=str, help='Container distro')
parser.add_argument('cname', metavar='containername', help='Container '
'name')
parser.add_argument('volsize', help='Volume size')
parser.add_argument('volcount', type=int, help='Volume count')
parser.add_argument('baseimage', nargs='?',
help='Base image. Defaults: \'images:centos/7/amd64\' '
'for RHEL distro, \'ubuntu:16.04\' otherwise')
args = parser.parse_args()
distro = args.distro
container_name = args.cname
base_image = args.baseimage
volume_size = args.volsize
volume_count = args.volcount
if is_swiftstack_hosted_image(distro):
import_image_if_needed(distro)
default_image = distro
else:
default_image = get_default_image(distro)
if base_image is None:
base_image = default_image
try:
# make a container profile that maps 8 block devices to the guest
rand_file_name = str(uuid.UUID(int=random.getrandbits(128)))
run_command("./make_lxc_profile.py {} {} {} {} > "
"/tmp/{}".format(container_name, VG_NAME, volume_size,
volume_count, rand_file_name),
cwd=SCRIPT_DIR, shell=True)
run_command("lxc profile create {}-profile".format(container_name))
run_command("cat /tmp/{} | lxc profile edit {}-profile".format(
rand_file_name, container_name), cwd=SCRIPT_DIR, shell=True)
# launch the new container
print("Trying to launch container from base image "
"{}".format(base_image))
run_command("lxc launch {} {} -p {}-profile || "
"lxc launch {} {} -p {}-profile".format(base_image,
container_name,
container_name,
default_image,
container_name,
container_name),
shell=True)
except Exception as e:
exit_with_error(str(e))
| [((14, 29, 14, 54), 'os.path.dirname', 'os.path.dirname', ({(14, 45, 14, 53): '__file__'}, {}), '(__file__)', False, 'import os\n'), ((28, 4, 28, 32), 'libs.colorprint.error', 'colorprint.error', ({(28, 21, 28, 31): 'error_text'}, {}), '(error_text)', False, 'from libs import colorprint\n'), ((29, 4, 29, 15), 'sys.exit', 'sys.exit', ({(29, 13, 29, 14): '(1)'}, {}), '(1)', False, 'import sys\n'), ((96, 4, 96, 27), 'os.unlink', 'os.unlink', ({(96, 14, 96, 26): 'tarball_path'}, {}), '(tarball_path)', False, 'import os\n'), ((128, 4, 128, 36), 'os.unlink', 'os.unlink', ({(128, 14, 128, 35): 'metadata_tarball_path'}, {}), '(metadata_tarball_path)', False, 'import os\n'), ((129, 4, 129, 34), 'os.unlink', 'os.unlink', ({(129, 14, 129, 33): 'rootfs_tarball_path'}, {}), '(rootfs_tarball_path)', False, 'import os\n'), ((181, 13, 181, 38), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ({}, {}), '()', False, 'import argparse\n'), ((49, 12, 49, 42), 'requests.get', 'requests.get', ({(49, 25, 49, 41): 'manifest_obj_url'}, {}), '(manifest_obj_url)', False, 'import requests\n'), ((78, 12, 78, 50), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((80, 13, 80, 54), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (), '', False, 'import tempfile\n'), ((108, 16, 108, 46), 'requests.get', 'requests.get', (), '', False, 'import requests\n'), ((110, 17, 110, 58), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (), '', False, 'import tempfile\n'), ((209, 43, 209, 66), 'random.getrandbits', 'random.getrandbits', ({(209, 62, 209, 65): '128'}, {}), '(128)', False, 'import random\n')] |
jfklima/prog_pratica | exercicios_antigos/ex_01.py | 72c795e3372e46f04ce0c92c05187aec651777cf | """Criar uma função que retorne min e max de uma sequência numérica
aleatória.
Só pode usar if, comparações, recursão e funções que sejam de sua
autoria.
Se quiser usar laços também pode.
Deve informar via docstring qual é a complexidade de tempo e espaço da
sua solução
"""
from math import inf
def minimo_e_maximo(sequencia_numerica):
''' Retorna o minimo e o maximo de uma sequência numérica aleatória.
Complexidade:
execução: O(n)
espaço: O(3)
'''
maximo = -inf # 1
minimo = +inf # 1
for elem in sequencia_numerica: # 1
if elem > maximo: # 2
maximo = elem # 1
if elem < minimo: # 2
minimo = elem # 2
return minimo, maximo # 1
def recursivo_minmax(sequencia_numerica):
def r_minimo(sequencia):
primeiro = sequencia[0]
if len(sequencia) == 1:
return primeiro
else:
menor = r_minimo(sequencia[1:])
return menor if menor < primeiro else primeiro
def r_maximo(sequencia):
primeiro = sequencia[0]
if len(sequencia) == 1:
return primeiro
else:
maior = r_maximo(sequencia[1:])
return maior if maior > primeiro else primeiro
return r_minimo(sequencia_numerica), r_maximo(sequencia_numerica)
def recursivo_minmax_1x(sequencia_numerica):
primeiro = sequencia_numerica[0]
if len(sequencia_numerica) == 1:
return primeiro, primeiro
else:
return
# print(minimo_e_maximo([1, 2, 3, 4]))
# print(minimo_e_maximo([1, 3, 10, 12, 44, 2, 24, 25]))
# print(minimo_e_maximo([88, 66, 10, 2, 8]))
print(recursivo_minmax([1, 2, 3, 4]))
| [] |
harisbal/dash-bootstrap-components | docs/demos/theme_explorer/util.py | d7c91c08e0821ccfd81330db912cde71ec57c171 | import dash_bootstrap_components as dbc
import dash_html_components as html
DBC_DOCS = (
"https://dash-bootstrap-components.opensource.faculty.ai/docs/components/"
)
def make_subheading(label, link):
slug = label.replace(" ", "")
heading = html.H2(
html.Span(
[
label,
html.A(
html.I(className="fas fa-book fa-xs ml-2"),
href=f"{DBC_DOCS}{link}",
target="_blank",
id=f"tooltip_target_{slug}",
),
],
),
)
return html.Div(
[
heading,
dbc.Tooltip(
f"See {label} documentation", target=f"tooltip_target_{slug}"
),
],
className="mt-3",
)
| [((29, 12, 31, 13), 'dash_bootstrap_components.Tooltip', 'dbc.Tooltip', (), '', True, 'import dash_bootstrap_components as dbc\n'), ((17, 20, 17, 62), 'dash_html_components.I', 'html.I', (), '', True, 'import dash_html_components as html\n')] |
MathGaron/pytorch_toolbox | pytorch_toolbox/visualization/visdom_logger.py | 2afd13e50ba71dfce66467a4b070d9b922668502 | '''
The visualization class provides an easy access to some of the visdom functionalities
Accept as input a number that will be ploted over time or an image of type np.ndarray
'''
from visdom import Visdom
import numpy as np
import numbers
class VisdomLogger:
items_iterator = {}
items_to_visualize = {}
windows = {}
vis = Visdom()
def check_availability(vis):
# check if the Visdom server is running. only once.
is_done = vis.text('visdom check')
if is_done is False:
raise RuntimeError('Visdom server is not running. Run the server first: python -m visdom.server')
else:
print('Visdom available at: %s:%s' % (vis.server, vis.port))
vis.close() # close visdom check
check_availability(vis)
@classmethod
def visualize(cls, item, name, **args):
"""
Visualize an item in a new window (if the parameter "name" is not on the list of previously given names) or
updates an existing window identified by "name"
:param item: Item to be visualized (a number or a numpy image).
:param name: String to identify the item.
:param args: dict containing options for visdom
"""
if name not in cls.items_to_visualize:
cls.new_item(item, name, **args)
else:
cls.update_item(item, name, **args)
cls.items_to_visualize[name] = item
@classmethod
def new_item(cls, item, name, **args):
if isinstance(item, numbers.Number):
cls.items_iterator[name] = 0
win = cls.vis.line(
X=np.array([cls.items_iterator[name]]),
Y=np.array([item]),
opts=dict(title=name)
)
cls.windows[name] = win
elif isinstance(item, np.ndarray):
win = cls.vis.image(
item,
opts=args,
)
cls.windows[name] = win
else:
print("type {} not supported for visualization".format(type(item)))
@classmethod
def update_item(cls, item, name, **args):
if isinstance(item, numbers.Number):
cls.vis.line(
# to plot the number we need to give its position in the x axis hence we keep track of how many times we
# updates this item (stored in items_iterator)
X=np.array([cls.items_iterator[name]]),
Y=np.array([item]),
win=cls.windows[name],
update='append'
)
cls.items_iterator[name] += 1
elif isinstance(item, np.ndarray):
cls.vis.image(
item,
opts=args,
win=cls.windows[name]
)
else:
print("type {} not supported for visualization".format(type(item)))
| [((15, 10, 15, 18), 'visdom.Visdom', 'Visdom', ({}, {}), '()', False, 'from visdom import Visdom\n'), ((47, 18, 47, 54), 'numpy.array', 'np.array', ({(47, 27, 47, 53): '[cls.items_iterator[name]]'}, {}), '([cls.items_iterator[name]])', True, 'import numpy as np\n'), ((48, 18, 48, 34), 'numpy.array', 'np.array', ({(48, 27, 48, 33): '[item]'}, {}), '([item])', True, 'import numpy as np\n'), ((67, 18, 67, 54), 'numpy.array', 'np.array', ({(67, 27, 67, 53): '[cls.items_iterator[name]]'}, {}), '([cls.items_iterator[name]])', True, 'import numpy as np\n'), ((68, 18, 68, 34), 'numpy.array', 'np.array', ({(68, 27, 68, 33): '[item]'}, {}), '([item])', True, 'import numpy as np\n')] |
gyyang/olfaction_evolution | analytical/conditionnumber.py | 434baa85b91f450e1ab63c6b9eafb8d370f1df96 | """Analyze condition number of the network."""
import numpy as np
import matplotlib.pyplot as plt
# import model
def _get_sparse_mask(nx, ny, non, complex=False, nOR=50):
"""Generate a binary mask.
The mask will be of size (nx, ny)
For all the nx connections to each 1 of the ny units, only non connections are 1.
Args:
nx: int
ny: int
non: int, must not be larger than nx
Return:
mask: numpy array (nx, ny)
"""
mask = np.zeros((nx, ny))
if not complex:
mask[:non] = 1
for i in range(ny):
np.random.shuffle(mask[:, i]) # shuffling in-place
return mask.astype(np.float32)
def _get_cond(q, n_orn, n_pn, n_kc, n_kc_claw):
M = np.random.rand(n_orn, n_pn)
M_new = M * (1-q) + np.eye(n_orn) * q
# J = np.random.rand(N_PN, N_KC) / np.sqrt(N_PN + N_KC)
# J = np.random.randn(N_PN, N_KC) / np.sqrt(N_PN + N_KC)
J = np.random.rand(n_pn, n_kc)
mask = _get_sparse_mask(n_pn, n_kc, n_kc_claw) / n_kc_claw
J = J * mask
K = np.dot(M_new, J)
# cond = np.linalg.cond(K)
cond = np.linalg.norm(np.linalg.pinv(K)) * np.linalg.norm(K)
return cond
def get_logcond(q=1, n_orn=50, n_pn=50, n_kc=2500, n_kc_claw=7, n_rep=10):
conds = [_get_cond(q, n_orn, n_pn, n_kc, n_kc_claw) for i in range(n_rep)]
return np.mean(np.log10(conds))
def plot_cond_by_q(n_kc=2500):
qs = np.linspace(0, 1, 100)
conds = [get_logcond(q=q, n_kc=n_kc) for q in qs]
plt.figure()
plt.plot(qs, conds, 'o-')
plt.title('N_KC: ' + str(n_kc))
plt.xlabel('fraction diagonal')
plt.ylabel('log condition number')
# plt.savefig('figures/condvsfracdiag_nkc'+str(n_kc)+'.pdf', transparent=True)
def plot_cond_by_n_kc():
n_kcs = np.logspace(1, 4, 10).astype(int)
conds_q1 = np.array([get_logcond(n_kc=n_kc, q=1) for n_kc in n_kcs])
plt.figure()
plt.plot(np.log10(n_kcs), conds_q1, 'o-')
plt.xticks(np.log10(n_kcs), n_kcs)
plt.xlabel('N_KC')
n_kcs = np.logspace(1, 4, 10).astype(int)
conds_q0 = np.array([get_logcond(n_kc=n_kc, q=0) for n_kc in n_kcs])
plt.figure()
plt.plot(np.log10(n_kcs), conds_q0, 'o-')
plt.xticks(np.log10(n_kcs), n_kcs)
plt.xlabel('N_KC')
plt.figure()
plt.plot(np.log10(n_kcs), conds_q1 - conds_q0, 'o-')
plt.xticks(np.log10(n_kcs), n_kcs)
plt.ylabel('Log decrease in condition number')
plt.xlabel('N_KC')
n_kc_claws = np.arange(1, 50)
conds = np.array([get_logcond(n_kc_claw=n) for n in n_kc_claws])
plt.figure()
plt.plot(n_kc_claws, conds, 'o-')
plt.xticks(n_kc_claws)
plt.xlabel('N_KC_claw')
plt.show()
| [((91, 13, 91, 29), 'numpy.arange', 'np.arange', ({(91, 23, 91, 24): '1', (91, 26, 91, 28): '50'}, {}), '(1, 50)', True, 'import numpy as np\n'), ((94, 0, 94, 12), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((95, 0, 95, 33), 'matplotlib.pyplot.plot', 'plt.plot', ({(95, 9, 95, 19): 'n_kc_claws', (95, 21, 95, 26): 'conds', (95, 28, 95, 32): '"""o-"""'}, {}), "(n_kc_claws, conds, 'o-')", True, 'import matplotlib.pyplot as plt\n'), ((96, 0, 96, 22), 'matplotlib.pyplot.xticks', 'plt.xticks', ({(96, 11, 96, 21): 'n_kc_claws'}, {}), '(n_kc_claws)', True, 'import matplotlib.pyplot as plt\n'), ((97, 0, 97, 23), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(97, 11, 97, 22): '"""N_KC_claw"""'}, {}), "('N_KC_claw')", True, 'import matplotlib.pyplot as plt\n'), ((98, 0, 98, 10), 'matplotlib.pyplot.show', 'plt.show', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((22, 11, 22, 29), 'numpy.zeros', 'np.zeros', ({(22, 20, 22, 28): '(nx, ny)'}, {}), '((nx, ny))', True, 'import numpy as np\n'), ((31, 8, 31, 35), 'numpy.random.rand', 'np.random.rand', ({(31, 23, 31, 28): 'n_orn', (31, 30, 31, 34): 'n_pn'}, {}), '(n_orn, n_pn)', True, 'import numpy as np\n'), ((36, 8, 36, 34), 'numpy.random.rand', 'np.random.rand', ({(36, 23, 36, 27): 'n_pn', (36, 29, 36, 33): 'n_kc'}, {}), '(n_pn, n_kc)', True, 'import numpy as np\n'), ((40, 8, 40, 24), 'numpy.dot', 'np.dot', ({(40, 15, 40, 20): 'M_new', (40, 22, 40, 23): 'J'}, {}), '(M_new, J)', True, 'import numpy as np\n'), ((55, 9, 55, 31), 'numpy.linspace', 'np.linspace', ({(55, 21, 55, 22): '0', (55, 24, 55, 25): '1', (55, 27, 55, 30): '100'}, {}), '(0, 1, 100)', True, 'import numpy as np\n'), ((58, 4, 58, 16), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((59, 4, 59, 29), 'matplotlib.pyplot.plot', 'plt.plot', ({(59, 13, 59, 15): 'qs', (59, 17, 59, 22): 'conds', (59, 24, 59, 28): '"""o-"""'}, {}), "(qs, conds, 'o-')", True, 'import matplotlib.pyplot as plt\n'), ((61, 4, 61, 35), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(61, 15, 61, 34): '"""fraction diagonal"""'}, {}), "('fraction diagonal')", True, 'import matplotlib.pyplot as plt\n'), ((62, 4, 62, 38), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(62, 15, 62, 37): '"""log condition number"""'}, {}), "('log condition number')", True, 'import matplotlib.pyplot as plt\n'), ((70, 4, 70, 16), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((73, 4, 73, 22), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(73, 15, 73, 21): '"""N_KC"""'}, {}), "('N_KC')", True, 'import matplotlib.pyplot as plt\n'), ((79, 4, 79, 16), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((82, 4, 82, 22), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(82, 15, 82, 21): '"""N_KC"""'}, {}), "('N_KC')", True, 'import matplotlib.pyplot as plt\n'), ((84, 4, 84, 16), 'matplotlib.pyplot.figure', 'plt.figure', ({}, {}), '()', True, 'import matplotlib.pyplot as plt\n'), ((87, 4, 87, 50), 'matplotlib.pyplot.ylabel', 'plt.ylabel', ({(87, 15, 87, 49): '"""Log decrease in condition number"""'}, {}), "('Log decrease in condition number')", True, 'import matplotlib.pyplot as plt\n'), ((88, 4, 88, 22), 'matplotlib.pyplot.xlabel', 'plt.xlabel', ({(88, 15, 88, 21): '"""N_KC"""'}, {}), "('N_KC')", True, 'import matplotlib.pyplot as plt\n'), ((44, 47, 44, 64), 'numpy.linalg.norm', 'np.linalg.norm', ({(44, 62, 44, 63): 'K'}, {}), '(K)', True, 'import numpy as np\n'), ((50, 19, 50, 34), 'numpy.log10', 'np.log10', ({(50, 28, 50, 33): 'conds'}, {}), '(conds)', True, 'import numpy as np\n'), ((71, 13, 71, 28), 'numpy.log10', 'np.log10', ({(71, 22, 71, 27): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((72, 15, 72, 30), 'numpy.log10', 'np.log10', ({(72, 24, 72, 29): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((80, 13, 80, 28), 'numpy.log10', 'np.log10', ({(80, 22, 80, 27): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((81, 15, 81, 30), 'numpy.log10', 'np.log10', ({(81, 24, 81, 29): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((85, 13, 85, 28), 'numpy.log10', 'np.log10', ({(85, 22, 85, 27): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((86, 15, 86, 30), 'numpy.log10', 'np.log10', ({(86, 24, 86, 29): 'n_kcs'}, {}), '(n_kcs)', True, 'import numpy as np\n'), ((27, 12, 27, 41), 'numpy.random.shuffle', 'np.random.shuffle', ({(27, 30, 27, 40): 'mask[:, (i)]'}, {}), '(mask[:, (i)])', True, 'import numpy as np\n'), ((32, 24, 32, 37), 'numpy.eye', 'np.eye', ({(32, 31, 32, 36): 'n_orn'}, {}), '(n_orn)', True, 'import numpy as np\n'), ((44, 26, 44, 43), 'numpy.linalg.pinv', 'np.linalg.pinv', ({(44, 41, 44, 42): 'K'}, {}), '(K)', True, 'import numpy as np\n'), ((67, 12, 67, 33), 'numpy.logspace', 'np.logspace', ({(67, 24, 67, 25): '1', (67, 27, 67, 28): '4', (67, 30, 67, 32): '10'}, {}), '(1, 4, 10)', True, 'import numpy as np\n'), ((76, 12, 76, 33), 'numpy.logspace', 'np.logspace', ({(76, 24, 76, 25): '1', (76, 27, 76, 28): '4', (76, 30, 76, 32): '10'}, {}), '(1, 4, 10)', True, 'import numpy as np\n')] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.