file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
12.1k
suffix
large_stringlengths
0
12k
middle
large_stringlengths
0
7.51k
fim_type
large_stringclasses
4 values
generator.py
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Extracts OpenStack config option info from module(s).""" from __future__ import print_function import argparse import imp import os import re import socket import sys import textwrap from oslo.config import cfg import six import stevedore.named from marconi.openstack.common import gettextutils from marconi.openstack.common import importutils gettextutils.install('marconi') STROPT = "StrOpt" BOOLOPT = "BoolOpt" INTOPT = "IntOpt" FLOATOPT = "FloatOpt" LISTOPT = "ListOpt" DICTOPT = "DictOpt" MULTISTROPT = "MultiStrOpt" OPT_TYPES = { STROPT: 'string value', BOOLOPT: 'boolean value', INTOPT: 'integer value', FLOATOPT: 'floating point value', LISTOPT: 'list value', DICTOPT: 'dict value', MULTISTROPT: 'multi valued', } OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, FLOATOPT, LISTOPT, DICTOPT, MULTISTROPT])) PY_EXT = ".py" BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../")) WORDWRAP_WIDTH = 60 def generate(argv): parser = argparse.ArgumentParser( description='generate sample configuration file', ) parser.add_argument('-m', dest='modules', action='append') parser.add_argument('-l', dest='libraries', action='append') parser.add_argument('srcfiles', nargs='*') parsed_args = parser.parse_args(argv) mods_by_pkg = dict() for filepath in parsed_args.srcfiles: pkg_name = filepath.split(os.sep)[1] mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), os.path.basename(filepath).split('.')[0]]) mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) # NOTE(lzyeval): place top level modules before packages pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) pkg_names.extend(ext_names) # opts_by_group is a mapping of group name to an options list # The options list is a list of (module, options) tuples opts_by_group = {'DEFAULT': []} if parsed_args.modules: for module_name in parsed_args.modules: module = _import_module(module_name) if module: for group, opts in _list_opts(module): opts_by_group.setdefault(group, []).append((module_name, opts)) # Look for entry points defined in libraries (or applications) for # option discovery, and include their return values in the output. # # Each entry point should be a function returning an iterable # of pairs with the group name (or None for the default group) # and the list of Opt instances for that group. if parsed_args.libraries: loader = stevedore.named.NamedExtensionManager( 'oslo.config.opts', names=list(set(parsed_args.libraries)), invoke_on_load=False, ) for ext in loader: for group, opts in ext.plugin(): opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) opt_list.append((ext.name, opts)) for pkg_name in pkg_names: mods = mods_by_pkg.get(pkg_name) mods.sort() for mod_str in mods: if mod_str.endswith('.__init__'): mod_str = mod_str[:mod_str.rfind(".")] mod_obj = _import_module(mod_str) if not mod_obj: raise RuntimeError("Unable to import module %s" % mod_str) for group, opts in _list_opts(mod_obj): opts_by_group.setdefault(group, []).append((mod_str, opts)) print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) for group in sorted(opts_by_group.keys()): print_group_opts(group, opts_by_group[group]) def _import_module(mod_str): try: if mod_str.startswith('bin.'): imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) return sys.modules[mod_str[4:]] else: return importutils.import_module(mod_str) except Exception as e: sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) return None def _is_in_group(opt, group): "Check if opt is in group." for value in group._opts.values(): # NOTE(llu): Temporary workaround for bug #1262148, wait until # newly released oslo.config support '==' operator. if not(value['opt'] != opt): return True return False def _guess_groups(opt, mod_obj): # is it in the DEFAULT group? if _is_in_group(opt, cfg.CONF): return 'DEFAULT' # what other groups is it in? for value in cfg.CONF.values(): if isinstance(value, cfg.CONF.GroupAttr): if _is_in_group(opt, value._group): return value._group.name raise RuntimeError( "Unable to find group for option %s, " "maybe it's defined twice in the same group?" % opt.name ) def _list_opts(obj): def is_opt(o): return (isinstance(o, cfg.Opt) and not isinstance(o, cfg.SubCommandOpt)) opts = list() for attr_str in dir(obj): attr_obj = getattr(obj, attr_str) if is_opt(attr_obj): opts.append(attr_obj) elif (isinstance(attr_obj, list) and all(map(lambda x: is_opt(x), attr_obj))): opts.extend(attr_obj) ret = {} for opt in opts: ret.setdefault(_guess_groups(opt, obj), []).append(opt) return ret.items() def print_group_opts(group, opts_by_module): print("[%s]" % group) print('') for mod, opts in opts_by_module: print('#') print('# Options defined in %s' % mod) print('#') print('') for opt in opts: _print_opt(opt) print('') def _get_my_ip(): try: csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) csock.connect(('8.8.8.8', 80)) (addr, port) = csock.getsockname() csock.close() return addr except socket.error: return None def _sanitize_default(name, value): """Set up a reasonably sensible default for pybasedir, my_ip and host.""" if value.startswith(sys.prefix): # NOTE(jd) Don't use os.path.join, because it is likely to think the # second part is an absolute pathname and therefore drop the first # part. value = os.path.normpath("/usr/" + value[len(sys.prefix):]) elif value.startswith(BASEDIR): return value.replace(BASEDIR, '/usr/lib/python/site-packages') elif BASEDIR in value: return value.replace(BASEDIR, '') elif value == _get_my_ip(): return '10.0.0.1' elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: return 'marconi' elif value.strip() != value: return '"%s"' % value return value def _print_opt(opt): opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help if not opt_help: sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) opt_help = "" opt_type = None try: opt_type = OPTION_REGEX.search(str(type(opt))).group(0) except (ValueError, AttributeError) as err: sys.stderr.write("%s\n" % str(err)) sys.exit(1) opt_help = u'%s (%s)' % (opt_help, OPT_TYPES[opt_type]) print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) if opt.deprecated_opts: for deprecated_opt in opt.deprecated_opts: if deprecated_opt.name: deprecated_group = (deprecated_opt.group if deprecated_opt.group else "DEFAULT") print('# Deprecated group/name - [%s]/%s' % (deprecated_group, deprecated_opt.name)) try: if opt_default is None: print('#%s=<None>' % opt_name) elif opt_type == STROPT: assert(isinstance(opt_default, six.string_types)) print('#%s=%s' % (opt_name, _sanitize_default(opt_name, opt_default))) elif opt_type == BOOLOPT: assert(isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, str(opt_default).lower())) elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and not isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, opt_default))
conditional_block
common.py
team_mapping = { "SY": "Sydney", "WB": "Western Bulldogs", "WC": "West Coast", "HW": "Hawthorn", "GE": "Geelong", "FR": "Fremantle", "RI": "Richmond", "CW": "Collingwood", "CA": "Carlton", "GW": "Greater Western Sydney", "AD": "Adelaide", "GC": "Gold Coast", "ES": "Essendon", "ME": "Melbourne", "NM": "North Melbourne", "PA": "Port Adelaide", "BL": "Brisbane Lions", "SK": "St Kilda" } def get_team_name(code): return team_mapping[code] def get_team_code(full_name): for code, name in team_mapping.items(): if name == full_name: return code return full_name def get_match_description(response):
def get_match_urls(response): for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract(): yield response.urljoin(match)
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0] match_details = match_container.xpath(".//text()").extract() return { "round": match_details[1], "venue": match_details[3], "date": match_details[6], "attendance": match_details[8], "homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(), "awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(), "homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()), "awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first()) }
identifier_body
common.py
team_mapping = { "SY": "Sydney", "WB": "Western Bulldogs", "WC": "West Coast", "HW": "Hawthorn", "GE": "Geelong", "FR": "Fremantle", "RI": "Richmond", "CW": "Collingwood", "CA": "Carlton", "GW": "Greater Western Sydney", "AD": "Adelaide", "GC": "Gold Coast", "ES": "Essendon", "ME": "Melbourne", "NM": "North Melbourne", "PA": "Port Adelaide", "BL": "Brisbane Lions", "SK": "St Kilda" } def get_team_name(code): return team_mapping[code] def get_team_code(full_name): for code, name in team_mapping.items(): if name == full_name: return code return full_name def
(response): match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0] match_details = match_container.xpath(".//text()").extract() return { "round": match_details[1], "venue": match_details[3], "date": match_details[6], "attendance": match_details[8], "homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(), "awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(), "homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()), "awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first()) } def get_match_urls(response): for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract(): yield response.urljoin(match)
get_match_description
identifier_name
common.py
team_mapping = { "SY": "Sydney", "WB": "Western Bulldogs", "WC": "West Coast", "HW": "Hawthorn", "GE": "Geelong", "FR": "Fremantle", "RI": "Richmond", "CW": "Collingwood", "CA": "Carlton", "GW": "Greater Western Sydney", "AD": "Adelaide", "GC": "Gold Coast", "ES": "Essendon", "ME": "Melbourne", "NM": "North Melbourne", "PA": "Port Adelaide", "BL": "Brisbane Lions", "SK": "St Kilda" } def get_team_name(code): return team_mapping[code] def get_team_code(full_name): for code, name in team_mapping.items(): if name == full_name: return code return full_name def get_match_description(response): match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0] match_details = match_container.xpath(".//text()").extract() return { "round": match_details[1],
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()), "awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first()) } def get_match_urls(response): for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract(): yield response.urljoin(match)
"venue": match_details[3], "date": match_details[6], "attendance": match_details[8], "homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(), "awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
random_line_split
common.py
team_mapping = { "SY": "Sydney", "WB": "Western Bulldogs", "WC": "West Coast", "HW": "Hawthorn", "GE": "Geelong", "FR": "Fremantle", "RI": "Richmond", "CW": "Collingwood", "CA": "Carlton", "GW": "Greater Western Sydney", "AD": "Adelaide", "GC": "Gold Coast", "ES": "Essendon", "ME": "Melbourne", "NM": "North Melbourne", "PA": "Port Adelaide", "BL": "Brisbane Lions", "SK": "St Kilda" } def get_team_name(code): return team_mapping[code] def get_team_code(full_name): for code, name in team_mapping.items(): if name == full_name: return code return full_name def get_match_description(response): match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0] match_details = match_container.xpath(".//text()").extract() return { "round": match_details[1], "venue": match_details[3], "date": match_details[6], "attendance": match_details[8], "homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(), "awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(), "homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()), "awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first()) } def get_match_urls(response): for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)
conditional_block
main.rs
extern crate event_handler; extern crate client; use std::net::TcpListener; use std::thread; use client::*; use event_handler::*; fn main() { // 클라이언트의 접속을 처리할 listen port 생성 let listener = TcpListener::bind("127.0.0.1:9000").unwrap(); println!("Start to listen, ready to accept"); // 각종 이벤트를 처리할 로직 생성 let sample_event_handler = EventHandler::new(); // 클라이언트의 접속 이벤트를 전송하기 위한 channel(Send) 할당 let _tx = sample_event_handler.get_transmitter().unwrap(); thread::spawn(move|| { handle_event_handler(sample_event_handler); }); for stream in listener.incoming() { match stream { Ok(new_stream) => { // 새로운 클라이언트의 연결이 생기면 이벤트 처리 스레드로 이벤트 생성, 전달 let new_client
= Client::new(new_stream, Some(_tx.clone())); _tx.send(Signal::NewClient(new_client)); } Err(_) => { println!("connection failed"); } } } drop(listener); }
conditional_block
main.rs
extern crate event_handler; extern crate client; use std::net::TcpListener; use std::thread; use client::*; use event_handler::*; fn main() { // 클라이언트의 접속을 처리할 listen port 생성 let listener = TcpListener::bind("127.0.0.1:9000").unwrap(); println!("Start to listen, ready to accept"); // 각종 이벤트를 처리할 로직 생성 let sample_event_handler = EventHandler::new(); // 클라이언트의 접속 이벤트를 전송하기 위한 channel(Send) 할당 let _tx = sample_event_handler.get_transmitter().unwrap(); thread::spawn(move|| { handle_event_handler(sample_event_handler); }); for stream in listener.incoming() {
_tx.send(Signal::NewClient(new_client)); } Err(_) => { println!("connection failed"); } } } drop(listener); }
match stream { Ok(new_stream) => { // 새로운 클라이언트의 연결이 생기면 이벤트 처리 스레드로 이벤트 생성, 전달 let new_client = Client::new(new_stream, Some(_tx.clone()));
random_line_split
main.rs
extern crate event_handler; extern crate client; use std::net::TcpListener; use std::thread; use client::*; use event_handler::*; fn main()
_tx.send(Signal::NewClient(new_client)); } Err(_) => { println!("connection failed"); } } } drop(listener); }
{ // 클라이언트의 접속을 처리할 listen port 생성 let listener = TcpListener::bind("127.0.0.1:9000").unwrap(); println!("Start to listen, ready to accept"); // 각종 이벤트를 처리할 로직 생성 let sample_event_handler = EventHandler::new(); // 클라이언트의 접속 이벤트를 전송하기 위한 channel(Send) 할당 let _tx = sample_event_handler.get_transmitter().unwrap(); thread::spawn(move|| { handle_event_handler(sample_event_handler); }); for stream in listener.incoming() { match stream { Ok(new_stream) => { // 새로운 클라이언트의 연결이 생기면 이벤트 처리 스레드로 이벤트 생성, 전달 let new_client = Client::new(new_stream, Some(_tx.clone()));
identifier_body
main.rs
extern crate event_handler; extern crate client; use std::net::TcpListener; use std::thread; use client::*; use event_handler::*; fn
() { // 클라이언트의 접속을 처리할 listen port 생성 let listener = TcpListener::bind("127.0.0.1:9000").unwrap(); println!("Start to listen, ready to accept"); // 각종 이벤트를 처리할 로직 생성 let sample_event_handler = EventHandler::new(); // 클라이언트의 접속 이벤트를 전송하기 위한 channel(Send) 할당 let _tx = sample_event_handler.get_transmitter().unwrap(); thread::spawn(move|| { handle_event_handler(sample_event_handler); }); for stream in listener.incoming() { match stream { Ok(new_stream) => { // 새로운 클라이언트의 연결이 생기면 이벤트 처리 스레드로 이벤트 생성, 전달 let new_client = Client::new(new_stream, Some(_tx.clone())); _tx.send(Signal::NewClient(new_client)); } Err(_) => { println!("connection failed"); } } } drop(listener); }
main
identifier_name
gsinfo.py
= ( ('weeks', 604800), # 60 * 60 * 24 * 7 ('days', 86400), # 60 * 60 * 24 ('hours', 3600), # 60 * 60 ('minutes', 60), ('seconds', 1), ) def display_time(seconds, granularity=2): # Thanks economy.py result = [] # And thanks http://stackoverflow.com/a/24542445 for name, count in intervals: value = seconds // count if value: seconds -= value * count if value == 1: name = name.rstrip('s') result.append("{} {}".format(int(value), name)) return ', '.join(result[:granularity]) class GSInfo: def __init__(self,bot): self.bot = bot; async def _query_players(self,ip,port,msgtoedit): """Internal function for querying playerlists, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False if info["player_count"] > 0 and (do_players or not muteplayers): playerlist = discord.Embed(title="Players", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) count = 0 charlength = 0 for player in sorted(players['players'], key=lambda p: p['score'], reverse=True): if (count == 16) or (charlength > 1300): playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) count = 0 charlength = 0 playerlist = discord.Embed(title="Players (continued)", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) name = ''.join([i if ord(i) < 128 else '?' for i in player["name"]]) if player['name'] == "": name = "_CONNECTING_" value = "**Score**: {}\nConnected for {}".format(str(player['score']), display_time(player['duration'])) playerlist.add_field(name=name,value=value) charlength += len(name) + len(value) count += 1 playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) elif do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") elif info["player_count"] == 0: await self.bot.say(":no_entry: **Error!** Playercount is 0, there is nothing for me to list.") await self.bot.delete_message(msgtoedit) msg_return = "Join this server by clicking here --> steam://connect/"+ip+":"+str(port) await self.bot.say(msg_return) return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" async def _query_server(self,ip,port,msgtoedit): """Internal function for querying servers, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False vac_enabled = "No" if info["vac_enabled"] == 1: vac_enabled = "Yes" msg_server = "**{}**".format(info["server_name"]) msg_server += "\n:video_game: **Game:** {}\n:map: **Map:** {}\n:shield: **VAC Secured:** {}\n:robot: **Bot Count:** {}\n:basketball_player: **Player Count:** {}/{}".format(info["game"],info["map"],vac_enabled,info["bot_count"],info["player_count"],info["max_players"]) someurl = "http://api.steampowered.com/ISteamUserStats/GetSchemaForGame/v2/?key=079D7A41C1ECEF33960716B75A7B39D4&appid={}".format(info["app_id"]) async with aiohttp.get(someurl) as response: soupObject = BeautifulSoup(await response.text(), "html.parser") try: gameschema = json.loads(soupObject.get_text()) #gameschema = json.loads(response.read()) if gameschema["game"]["gameName"] == info["game"]: game = "Running **{}**".format(info["game"]) else: game = "Running _{}_ on **{}**".format(info["game"],gameschema["game"]["gameName"]) except: game = "Running **{}**".format(info["game"]) connection_url = "steam://connect/{}:{}".format(ip,str(port)) playercount = "{}/{}".format(info["player_count"],info["max_players"]) data = discord.Embed(description=game,color=0x00ff00) data.add_field(name="Current Map", value=info['map']) data.add_field(name="VAC Secured", value=vac_enabled) data.add_field(name="Player Count", value=playercount) data.add_field(name="Bot Count", value=str(info['bot_count'])) data.add_field(name="Connect", value="steam://connect/{}:{}".format(ip,str(port))) #data.set_footer(text="Click here to connect to this server!") data.set_author(name=info["server_name"]) playermsgs = [] try: #await self.bot.edit_message(msgtoedit,"",embed=data) await self.bot.say(embed=data) await self.bot.delete_message(msgtoedit) except discord.HTTPException: await self.bot.edit_message(msgtoedit,msg_server) if do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" @commands.command(pass_context=True, no_pm=True) async def gsinfo(self, ctx, ip : str, port : int=27015): """Queries a source engine server for information. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port > 65535 or port < 1: msg = ":no_entry: **Error!** `Port out of range, expected a value between 1 and 65535 inclusive, got {}`".format(str(port))
else: try: valid_ip = validate_ip(ip) except IndexError: valid_ip = False if valid_ip: msg = await self._query_server(ip,port,msgtoedit) else: try: ip = socket.gethostbyname(ip) except socket.error: msg = ":no_entry: **Error!** `Invalid IP address, invalid URL, or no IP address resolved from that URL. I'm not sure which.`" else: msg = await self._query_server(ip,port,msgtoedit) if msg: await self.bot.edit_message(msgtoedit,msg) @commands.command(pass_context=True, no_pm=True) async def gsplayers(self, ctx, ip : str, port : int=27015): """Queries a source engine server for players. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port >
random_line_split
gsinfo.py
= ( ('weeks', 604800), # 60 * 60 * 24 * 7 ('days', 86400), # 60 * 60 * 24 ('hours', 3600), # 60 * 60 ('minutes', 60), ('seconds', 1), ) def display_time(seconds, granularity=2): # Thanks economy.py result = [] # And thanks http://stackoverflow.com/a/24542445 for name, count in intervals: value = seconds // count if value: seconds -= value * count if value == 1: name = name.rstrip('s') result.append("{} {}".format(int(value), name)) return ', '.join(result[:granularity]) class GSInfo: def __init__(self,bot): self.bot = bot; async def _query_players(self,ip,port,msgtoedit): """Internal function for querying playerlists, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False if info["player_count"] > 0 and (do_players or not muteplayers): playerlist = discord.Embed(title="Players", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) count = 0 charlength = 0 for player in sorted(players['players'], key=lambda p: p['score'], reverse=True): if (count == 16) or (charlength > 1300): playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) count = 0 charlength = 0 playerlist = discord.Embed(title="Players (continued)", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) name = ''.join([i if ord(i) < 128 else '?' for i in player["name"]]) if player['name'] == "": name = "_CONNECTING_" value = "**Score**: {}\nConnected for {}".format(str(player['score']), display_time(player['duration'])) playerlist.add_field(name=name,value=value) charlength += len(name) + len(value) count += 1 playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) elif do_players == False:
elif info["player_count"] == 0: await self.bot.say(":no_entry: **Error!** Playercount is 0, there is nothing for me to list.") await self.bot.delete_message(msgtoedit) msg_return = "Join this server by clicking here --> steam://connect/"+ip+":"+str(port) await self.bot.say(msg_return) return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" async def _query_server(self,ip,port,msgtoedit): """Internal function for querying servers, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False vac_enabled = "No" if info["vac_enabled"] == 1: vac_enabled = "Yes" msg_server = "**{}**".format(info["server_name"]) msg_server += "\n:video_game: **Game:** {}\n:map: **Map:** {}\n:shield: **VAC Secured:** {}\n:robot: **Bot Count:** {}\n:basketball_player: **Player Count:** {}/{}".format(info["game"],info["map"],vac_enabled,info["bot_count"],info["player_count"],info["max_players"]) someurl = "http://api.steampowered.com/ISteamUserStats/GetSchemaForGame/v2/?key=079D7A41C1ECEF33960716B75A7B39D4&appid={}".format(info["app_id"]) async with aiohttp.get(someurl) as response: soupObject = BeautifulSoup(await response.text(), "html.parser") try: gameschema = json.loads(soupObject.get_text()) #gameschema = json.loads(response.read()) if gameschema["game"]["gameName"] == info["game"]: game = "Running **{}**".format(info["game"]) else: game = "Running _{}_ on **{}**".format(info["game"],gameschema["game"]["gameName"]) except: game = "Running **{}**".format(info["game"]) connection_url = "steam://connect/{}:{}".format(ip,str(port)) playercount = "{}/{}".format(info["player_count"],info["max_players"]) data = discord.Embed(description=game,color=0x00ff00) data.add_field(name="Current Map", value=info['map']) data.add_field(name="VAC Secured", value=vac_enabled) data.add_field(name="Player Count", value=playercount) data.add_field(name="Bot Count", value=str(info['bot_count'])) data.add_field(name="Connect", value="steam://connect/{}:{}".format(ip,str(port))) #data.set_footer(text="Click here to connect to this server!") data.set_author(name=info["server_name"]) playermsgs = [] try: #await self.bot.edit_message(msgtoedit,"",embed=data) await self.bot.say(embed=data) await self.bot.delete_message(msgtoedit) except discord.HTTPException: await self.bot.edit_message(msgtoedit,msg_server) if do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" @commands.command(pass_context=True, no_pm=True) async def gsinfo(self, ctx, ip : str, port : int=27015): """Queries a source engine server for information. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port > 65535 or port < 1: msg = ":no_entry: **Error!** `Port out of range, expected a value between 1 and 65535 inclusive, got {}`".format(str(port)) else: try: valid_ip = validate_ip(ip) except IndexError: valid_ip = False if valid_ip: msg = await self._query_server(ip,port,msgtoedit) else: try: ip = socket.gethostbyname(ip) except socket.error: msg = ":no_entry: **Error!** `Invalid IP address, invalid URL, or no IP address resolved from that URL. I'm not sure which.`" else: msg = await self._query_server(ip,port,msgtoedit) if msg: await self.bot.edit_message(msgtoedit,msg) @commands.command(pass_context=True, no_pm=True) async def gsplayers(self, ctx, ip : str, port : int=27015): """Queries a source engine server for players. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port
await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.")
conditional_block
gsinfo.py
ervals = ( ('weeks', 604800), # 60 * 60 * 24 * 7 ('days', 86400), # 60 * 60 * 24 ('hours', 3600), # 60 * 60 ('minutes', 60), ('seconds', 1), ) def display_time(seconds, granularity=2): # Thanks economy.py result = [] # And thanks http://stackoverflow.com/a/24542445 for name, count in intervals: value = seconds // count if value: seconds -= value * count if value == 1: name = name.rstrip('s') result.append("{} {}".format(int(value), name)) return ', '.join(result[:granularity]) class GSInfo: def __init__(self,bot): self.bot = bot; async def _query_players(self,ip,port,msgtoedit):
playerlist = discord.Embed(title="Players (continued)", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) name = ''.join([i if ord(i) < 128 else '?' for i in player["name"]]) if player['name'] == "": name = "_CONNECTING_" value = "**Score**: {}\nConnected for {}".format(str(player['score']), display_time(player['duration'])) playerlist.add_field(name=name,value=value) charlength += len(name) + len(value) count += 1 playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) elif do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") elif info["player_count"] == 0: await self.bot.say(":no_entry: **Error!** Playercount is 0, there is nothing for me to list.") await self.bot.delete_message(msgtoedit) msg_return = "Join this server by clicking here --> steam://connect/"+ip+":"+str(port) await self.bot.say(msg_return) return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" async def _query_server(self,ip,port,msgtoedit): """Internal function for querying servers, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False vac_enabled = "No" if info["vac_enabled"] == 1: vac_enabled = "Yes" msg_server = "**{}**".format(info["server_name"]) msg_server += "\n:video_game: **Game:** {}\n:map: **Map:** {}\n:shield: **VAC Secured:** {}\n:robot: **Bot Count:** {}\n:basketball_player: **Player Count:** {}/{}".format(info["game"],info["map"],vac_enabled,info["bot_count"],info["player_count"],info["max_players"]) someurl = "http://api.steampowered.com/ISteamUserStats/GetSchemaForGame/v2/?key=079D7A41C1ECEF33960716B75A7B39D4&appid={}".format(info["app_id"]) async with aiohttp.get(someurl) as response: soupObject = BeautifulSoup(await response.text(), "html.parser") try: gameschema = json.loads(soupObject.get_text()) #gameschema = json.loads(response.read()) if gameschema["game"]["gameName"] == info["game"]: game = "Running **{}**".format(info["game"]) else: game = "Running _{}_ on **{}**".format(info["game"],gameschema["game"]["gameName"]) except: game = "Running **{}**".format(info["game"]) connection_url = "steam://connect/{}:{}".format(ip,str(port)) playercount = "{}/{}".format(info["player_count"],info["max_players"]) data = discord.Embed(description=game,color=0x00ff00) data.add_field(name="Current Map", value=info['map']) data.add_field(name="VAC Secured", value=vac_enabled) data.add_field(name="Player Count", value=playercount) data.add_field(name="Bot Count", value=str(info['bot_count'])) data.add_field(name="Connect", value="steam://connect/{}:{}".format(ip,str(port))) #data.set_footer(text="Click here to connect to this server!") data.set_author(name=info["server_name"]) playermsgs = [] try: #await self.bot.edit_message(msgtoedit,"",embed=data) await self.bot.say(embed=data) await self.bot.delete_message(msgtoedit) except discord.HTTPException: await self.bot.edit_message(msgtoedit,msg_server) if do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" @commands.command(pass_context=True, no_pm=True) async def gsinfo(self, ctx, ip : str, port : int=27015): """Queries a source engine server for information. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port > 65535 or port < 1: msg = ":no_entry: **Error!** `Port out of range, expected a value between 1 and 65535 inclusive, got {}`".format(str(port)) else: try: valid_ip = validate_ip(ip) except IndexError: valid_ip = False if valid_ip: msg = await self._query_server(ip,port,msgtoedit) else: try: ip = socket.gethostbyname(ip) except socket.error: msg = ":no_entry: **Error!** `Invalid IP address, invalid URL, or no IP address resolved from that URL. I'm not sure which.`" else: msg = await self._query_server(ip,port,msgtoedit) if msg: await self.bot.edit_message(msgtoedit,msg) @commands.command(pass_context=True, no_pm=True) async def gsplayers(self, ctx, ip : str, port : int=27015): """Queries a source engine server for players. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port >
"""Internal function for querying playerlists, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False if info["player_count"] > 0 and (do_players or not muteplayers): playerlist = discord.Embed(title="Players", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) count = 0 charlength = 0 for player in sorted(players['players'], key=lambda p: p['score'], reverse=True): if (count == 16) or (charlength > 1300): playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) count = 0 charlength = 0
identifier_body
gsinfo.py
join(result[:granularity]) class GSInfo: def __init__(self,bot): self.bot = bot; async def _query_players(self,ip,port,msgtoedit): """Internal function for querying playerlists, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False if info["player_count"] > 0 and (do_players or not muteplayers): playerlist = discord.Embed(title="Players", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) count = 0 charlength = 0 for player in sorted(players['players'], key=lambda p: p['score'], reverse=True): if (count == 16) or (charlength > 1300): playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) count = 0 charlength = 0 playerlist = discord.Embed(title="Players (continued)", description="Playerlist for {}".format(info["server_name"]), colour=0x6666ff) name = ''.join([i if ord(i) < 128 else '?' for i in player["name"]]) if player['name'] == "": name = "_CONNECTING_" value = "**Score**: {}\nConnected for {}".format(str(player['score']), display_time(player['duration'])) playerlist.add_field(name=name,value=value) charlength += len(name) + len(value) count += 1 playerlist.set_footer(text="Non-ascii characters are replaced with '?'") await self.bot.say(embed=playerlist) elif do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") elif info["player_count"] == 0: await self.bot.say(":no_entry: **Error!** Playercount is 0, there is nothing for me to list.") await self.bot.delete_message(msgtoedit) msg_return = "Join this server by clicking here --> steam://connect/"+ip+":"+str(port) await self.bot.say(msg_return) return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" async def _query_server(self,ip,port,msgtoedit): """Internal function for querying servers, keeps it from being piled into one function.""" try: server = valve.source.a2s.ServerQuerier([ip,port]) info = server.info() do_players = True try: players = server.players() except valve.source.messages.BrokenMessageError: do_players = False vac_enabled = "No" if info["vac_enabled"] == 1: vac_enabled = "Yes" msg_server = "**{}**".format(info["server_name"]) msg_server += "\n:video_game: **Game:** {}\n:map: **Map:** {}\n:shield: **VAC Secured:** {}\n:robot: **Bot Count:** {}\n:basketball_player: **Player Count:** {}/{}".format(info["game"],info["map"],vac_enabled,info["bot_count"],info["player_count"],info["max_players"]) someurl = "http://api.steampowered.com/ISteamUserStats/GetSchemaForGame/v2/?key=079D7A41C1ECEF33960716B75A7B39D4&appid={}".format(info["app_id"]) async with aiohttp.get(someurl) as response: soupObject = BeautifulSoup(await response.text(), "html.parser") try: gameschema = json.loads(soupObject.get_text()) #gameschema = json.loads(response.read()) if gameschema["game"]["gameName"] == info["game"]: game = "Running **{}**".format(info["game"]) else: game = "Running _{}_ on **{}**".format(info["game"],gameschema["game"]["gameName"]) except: game = "Running **{}**".format(info["game"]) connection_url = "steam://connect/{}:{}".format(ip,str(port)) playercount = "{}/{}".format(info["player_count"],info["max_players"]) data = discord.Embed(description=game,color=0x00ff00) data.add_field(name="Current Map", value=info['map']) data.add_field(name="VAC Secured", value=vac_enabled) data.add_field(name="Player Count", value=playercount) data.add_field(name="Bot Count", value=str(info['bot_count'])) data.add_field(name="Connect", value="steam://connect/{}:{}".format(ip,str(port))) #data.set_footer(text="Click here to connect to this server!") data.set_author(name=info["server_name"]) playermsgs = [] try: #await self.bot.edit_message(msgtoedit,"",embed=data) await self.bot.say(embed=data) await self.bot.delete_message(msgtoedit) except discord.HTTPException: await self.bot.edit_message(msgtoedit,msg_server) if do_players == False: await self.bot.say(":warning: **Warning!** `This gameserver is not reporting it's playerlist.`\nThis is a feature in CS:GO and some other games. We cannot display the playerlist for this server.") return None except valve.source.a2s.NoResponseError: return ":no_entry: **Error!** `Request timed out, is there a server at that address?`" @commands.command(pass_context=True, no_pm=True) async def gsinfo(self, ctx, ip : str, port : int=27015): """Queries a source engine server for information. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port > 65535 or port < 1: msg = ":no_entry: **Error!** `Port out of range, expected a value between 1 and 65535 inclusive, got {}`".format(str(port)) else: try: valid_ip = validate_ip(ip) except IndexError: valid_ip = False if valid_ip: msg = await self._query_server(ip,port,msgtoedit) else: try: ip = socket.gethostbyname(ip) except socket.error: msg = ":no_entry: **Error!** `Invalid IP address, invalid URL, or no IP address resolved from that URL. I'm not sure which.`" else: msg = await self._query_server(ip,port,msgtoedit) if msg: await self.bot.edit_message(msgtoedit,msg) @commands.command(pass_context=True, no_pm=True) async def gsplayers(self, ctx, ip : str, port : int=27015): """Queries a source engine server for players. Arguments: <ip> | An IP address or URL that points to a source-engine server. This one uses a test for embedding playercount.""" msgtoedit = await self.bot.say("_Working, please wait..._") ipport = ip.split(":") try: port = int(ipport[1]) except IndexError: pass except ValueError: await self.bot.say(":warning: **Warning!** `Detected port looked funny and couldn't be turned into an integer, defaulting to 27015`") port = int(27015) else: ip = ipport[0] if port > 65535 or port < 1: msg = ":no_entry: **Error!** `Port out of range, expected a value between 1 and 65535 inclusive, got {}`".format(str(port)) else: try: valid_ip = validate_ip(ip) except IndexError: valid_ip = False if valid_ip: msg = await self._query_players(ip,port,msgtoedit) else: try: ip = socket.gethostbyname(ip) except socket.error: msg = ":no_entry: **Error!** `Invalid IP address, invalid URL, or no IP address resolved from that URL. I'm not sure which.`" else: msg = await self._query_players(ip,port,msgtoedit) if msg: await self.bot.edit_message(msgtoedit,msg) def
setup
identifier_name
GitIgnore.ts
import {TemplateInterface} from './../../TemplateInterface'; const html = require('common-tags').html; /** * create the RelutionHjson file for the Project * @link [template](https://github.com/github/gitignore/blob/master/Node.gitignore) */ export class
implements TemplateInterface { public publishName: string = '.gitignore'; public name: string = 'gitignore'; get template() { return (html` # Logs logs *.log npm-debug.log* # Runtime data pids *.pid *.seed # Directory for instrumented libs generated by jscoverage/JSCover lib-cov # Coverage directory used by tools like istanbul coverage # nyc test coverage .nyc_output # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) .grunt # node-waf configuration .lock-wscript # Compiled binary addons (http://nodejs.org/api/addons.html) build/Release # Dependency directories node_modules jspm_packages # Optional npm cache directory .npm # Optional REPL history .node_repl_history `); } }
GitIgnore
identifier_name
GitIgnore.ts
import {TemplateInterface} from './../../TemplateInterface'; const html = require('common-tags').html; /** * create the RelutionHjson file for the Project * @link [template](https://github.com/github/gitignore/blob/master/Node.gitignore) */ export class GitIgnore implements TemplateInterface { public publishName: string = '.gitignore'; public name: string = 'gitignore'; get template() { return (html`
logs *.log npm-debug.log* # Runtime data pids *.pid *.seed # Directory for instrumented libs generated by jscoverage/JSCover lib-cov # Coverage directory used by tools like istanbul coverage # nyc test coverage .nyc_output # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) .grunt # node-waf configuration .lock-wscript # Compiled binary addons (http://nodejs.org/api/addons.html) build/Release # Dependency directories node_modules jspm_packages # Optional npm cache directory .npm # Optional REPL history .node_repl_history `); } }
# Logs
random_line_split
trainer.py
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The functions for computing gradient updates.""" from typing import Callable, NamedTuple, Sequence import chex import haiku as hk import jax import optax from brave.datasets import datasets from brave.models import embedding_model class ModelUpdates(NamedTuple): params: hk.Params state: hk.State opt_state: optax.OptState scalars: embedding_model.Scalars UpdateFn = Callable[ [chex.PRNGKey, datasets.MiniBatch, hk.Params, hk.State, optax.OptState], ModelUpdates] def build_update_fn(optimizer: optax.GradientTransformation, loss_fn: embedding_model.LossFn) -> UpdateFn: """Returns a function for computing model updates. Args: optimizer: The optimizer to use e.g. the result of optax.sgd(...). loss_fn: An instance of the loss function, pmapped across all devices. Returns: A callable function that takes one step in the optimization problem using the gradients of the loss computed by the model loss function. """ def
(rng: chex.PRNGKey, minibatch: datasets.MiniBatch, params: hk.Params, state: hk.State, opt_state: optax.OptState) -> ModelUpdates: grad_fn = jax.grad(loss_fn, has_aux=True) grad, (state, scalars) = grad_fn(params, state, rng, minibatch) grad = jax.lax.pmean(grad, axis_name='i') scalars = jax.lax.pmean(scalars, axis_name='i') updates, opt_state = optimizer.update(grad, opt_state, params) params = optax.apply_updates(params, updates) return ModelUpdates(params, state, opt_state, scalars) return update_fn def get_batch_dims(global_batch_size: int, device_count: int, local_device_count: int) -> Sequence[int]: """Compute the batch dims for this host. The global_batch_size is the number of data samples that are optimized over in one step of the optimization. This value must be split up so that each individual device gets some share of the batch. When running with multiple devices, there may be multiple hosts, each with multiple local devices. Each host has a local copy of the program, and runs a local copy of the code. Each host must therefore use a batch size so that when all of the hosts run together, the total number of batched elements matches the global batch size. We do this by splitting up the global batch size evenly amongst all devices, and setting the batch size per host to the number of host devices times the device batch size. Args: global_batch_size: The target total batch size per optimization step. device_count: The total number of devices sharing computation per step. local_device_count: The number of devices available on the current host. Returns: The batch dimensions to use on the currently running host. """ per_device_batch_size, remainder = divmod(global_batch_size, device_count) if remainder: raise ValueError( f'Cannot split batch of {global_batch_size} evenly across {local_device_count} devices.' ) host_batch_dims = (local_device_count, per_device_batch_size) return host_batch_dims
update_fn
identifier_name
trainer.py
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The functions for computing gradient updates.""" from typing import Callable, NamedTuple, Sequence import chex import haiku as hk import jax import optax from brave.datasets import datasets from brave.models import embedding_model class ModelUpdates(NamedTuple): params: hk.Params state: hk.State opt_state: optax.OptState scalars: embedding_model.Scalars UpdateFn = Callable[ [chex.PRNGKey, datasets.MiniBatch, hk.Params, hk.State, optax.OptState], ModelUpdates] def build_update_fn(optimizer: optax.GradientTransformation, loss_fn: embedding_model.LossFn) -> UpdateFn: """Returns a function for computing model updates. Args: optimizer: The optimizer to use e.g. the result of optax.sgd(...). loss_fn: An instance of the loss function, pmapped across all devices. Returns: A callable function that takes one step in the optimization problem using the gradients of the loss computed by the model loss function. """ def update_fn(rng: chex.PRNGKey, minibatch: datasets.MiniBatch, params: hk.Params, state: hk.State, opt_state: optax.OptState) -> ModelUpdates: grad_fn = jax.grad(loss_fn, has_aux=True) grad, (state, scalars) = grad_fn(params, state, rng, minibatch) grad = jax.lax.pmean(grad, axis_name='i') scalars = jax.lax.pmean(scalars, axis_name='i') updates, opt_state = optimizer.update(grad, opt_state, params) params = optax.apply_updates(params, updates) return ModelUpdates(params, state, opt_state, scalars)
return update_fn def get_batch_dims(global_batch_size: int, device_count: int, local_device_count: int) -> Sequence[int]: """Compute the batch dims for this host. The global_batch_size is the number of data samples that are optimized over in one step of the optimization. This value must be split up so that each individual device gets some share of the batch. When running with multiple devices, there may be multiple hosts, each with multiple local devices. Each host has a local copy of the program, and runs a local copy of the code. Each host must therefore use a batch size so that when all of the hosts run together, the total number of batched elements matches the global batch size. We do this by splitting up the global batch size evenly amongst all devices, and setting the batch size per host to the number of host devices times the device batch size. Args: global_batch_size: The target total batch size per optimization step. device_count: The total number of devices sharing computation per step. local_device_count: The number of devices available on the current host. Returns: The batch dimensions to use on the currently running host. """ per_device_batch_size, remainder = divmod(global_batch_size, device_count) if remainder: raise ValueError( f'Cannot split batch of {global_batch_size} evenly across {local_device_count} devices.' ) host_batch_dims = (local_device_count, per_device_batch_size) return host_batch_dims
random_line_split
trainer.py
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The functions for computing gradient updates.""" from typing import Callable, NamedTuple, Sequence import chex import haiku as hk import jax import optax from brave.datasets import datasets from brave.models import embedding_model class ModelUpdates(NamedTuple): params: hk.Params state: hk.State opt_state: optax.OptState scalars: embedding_model.Scalars UpdateFn = Callable[ [chex.PRNGKey, datasets.MiniBatch, hk.Params, hk.State, optax.OptState], ModelUpdates] def build_update_fn(optimizer: optax.GradientTransformation, loss_fn: embedding_model.LossFn) -> UpdateFn: """Returns a function for computing model updates. Args: optimizer: The optimizer to use e.g. the result of optax.sgd(...). loss_fn: An instance of the loss function, pmapped across all devices. Returns: A callable function that takes one step in the optimization problem using the gradients of the loss computed by the model loss function. """ def update_fn(rng: chex.PRNGKey, minibatch: datasets.MiniBatch, params: hk.Params, state: hk.State, opt_state: optax.OptState) -> ModelUpdates: grad_fn = jax.grad(loss_fn, has_aux=True) grad, (state, scalars) = grad_fn(params, state, rng, minibatch) grad = jax.lax.pmean(grad, axis_name='i') scalars = jax.lax.pmean(scalars, axis_name='i') updates, opt_state = optimizer.update(grad, opt_state, params) params = optax.apply_updates(params, updates) return ModelUpdates(params, state, opt_state, scalars) return update_fn def get_batch_dims(global_batch_size: int, device_count: int, local_device_count: int) -> Sequence[int]: """Compute the batch dims for this host. The global_batch_size is the number of data samples that are optimized over in one step of the optimization. This value must be split up so that each individual device gets some share of the batch. When running with multiple devices, there may be multiple hosts, each with multiple local devices. Each host has a local copy of the program, and runs a local copy of the code. Each host must therefore use a batch size so that when all of the hosts run together, the total number of batched elements matches the global batch size. We do this by splitting up the global batch size evenly amongst all devices, and setting the batch size per host to the number of host devices times the device batch size. Args: global_batch_size: The target total batch size per optimization step. device_count: The total number of devices sharing computation per step. local_device_count: The number of devices available on the current host. Returns: The batch dimensions to use on the currently running host. """ per_device_batch_size, remainder = divmod(global_batch_size, device_count) if remainder:
host_batch_dims = (local_device_count, per_device_batch_size) return host_batch_dims
raise ValueError( f'Cannot split batch of {global_batch_size} evenly across {local_device_count} devices.' )
conditional_block
trainer.py
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The functions for computing gradient updates.""" from typing import Callable, NamedTuple, Sequence import chex import haiku as hk import jax import optax from brave.datasets import datasets from brave.models import embedding_model class ModelUpdates(NamedTuple): params: hk.Params state: hk.State opt_state: optax.OptState scalars: embedding_model.Scalars UpdateFn = Callable[ [chex.PRNGKey, datasets.MiniBatch, hk.Params, hk.State, optax.OptState], ModelUpdates] def build_update_fn(optimizer: optax.GradientTransformation, loss_fn: embedding_model.LossFn) -> UpdateFn: """Returns a function for computing model updates. Args: optimizer: The optimizer to use e.g. the result of optax.sgd(...). loss_fn: An instance of the loss function, pmapped across all devices. Returns: A callable function that takes one step in the optimization problem using the gradients of the loss computed by the model loss function. """ def update_fn(rng: chex.PRNGKey, minibatch: datasets.MiniBatch, params: hk.Params, state: hk.State, opt_state: optax.OptState) -> ModelUpdates:
return update_fn def get_batch_dims(global_batch_size: int, device_count: int, local_device_count: int) -> Sequence[int]: """Compute the batch dims for this host. The global_batch_size is the number of data samples that are optimized over in one step of the optimization. This value must be split up so that each individual device gets some share of the batch. When running with multiple devices, there may be multiple hosts, each with multiple local devices. Each host has a local copy of the program, and runs a local copy of the code. Each host must therefore use a batch size so that when all of the hosts run together, the total number of batched elements matches the global batch size. We do this by splitting up the global batch size evenly amongst all devices, and setting the batch size per host to the number of host devices times the device batch size. Args: global_batch_size: The target total batch size per optimization step. device_count: The total number of devices sharing computation per step. local_device_count: The number of devices available on the current host. Returns: The batch dimensions to use on the currently running host. """ per_device_batch_size, remainder = divmod(global_batch_size, device_count) if remainder: raise ValueError( f'Cannot split batch of {global_batch_size} evenly across {local_device_count} devices.' ) host_batch_dims = (local_device_count, per_device_batch_size) return host_batch_dims
grad_fn = jax.grad(loss_fn, has_aux=True) grad, (state, scalars) = grad_fn(params, state, rng, minibatch) grad = jax.lax.pmean(grad, axis_name='i') scalars = jax.lax.pmean(scalars, axis_name='i') updates, opt_state = optimizer.update(grad, opt_state, params) params = optax.apply_updates(params, updates) return ModelUpdates(params, state, opt_state, scalars)
identifier_body
core.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// except according to those terms. use rustc; use rustc::{driver, middle}; use syntax::ast; use syntax::diagnostic; use syntax::parse; use syntax; use std::os; use std::local_data; use visit_ast::RustdocVisitor; use clean; use clean::Clean; pub struct DocContext { crate: @ast::Crate, tycx: middle::ty::ctxt, sess: driver::session::Session } /// Parses, resolves, and typechecks the given crate fn get_ast_and_resolve(cpath: &Path, libs: ~[Path]) -> DocContext { use syntax::codemap::dummy_spanned; use rustc::driver::driver::*; let parsesess = parse::new_parse_sess(None); let input = file_input(cpath.clone()); let sessopts = @driver::session::options { binary: @"rustdoc", maybe_sysroot: Some(@os::self_exe_path().unwrap().pop()), addl_lib_search_paths: @mut libs, .. (*rustc::driver::session::basic_options()).clone() }; let diagnostic_handler = syntax::diagnostic::mk_handler(None); let span_diagnostic_handler = syntax::diagnostic::mk_span_handler(diagnostic_handler, parsesess.cm); let sess = driver::driver::build_session_(sessopts, parsesess.cm, @diagnostic::DefaultEmitter as @diagnostic::Emitter, span_diagnostic_handler); let mut cfg = build_configuration(sess); cfg.push(@dummy_spanned(ast::MetaWord(@"stage2"))); let mut crate = phase_1_parse_input(sess, cfg.clone(), &input); crate = phase_2_configure_and_expand(sess, cfg, crate); let analysis = phase_3_run_analysis_passes(sess, crate); debug!("crate: %?", crate); DocContext { crate: crate, tycx: analysis.ty_cx, sess: sess } } pub fn run_core (libs: ~[Path], path: &Path) -> clean::Crate { let ctxt = @get_ast_and_resolve(path, libs); debug!("defmap:"); for (k, v) in ctxt.tycx.def_map.iter() { debug!("%?: %?", k, v); } local_data::set(super::ctxtkey, ctxt); let v = @mut RustdocVisitor::new(); v.visit(ctxt.crate); v.clean() }
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
random_line_split
core.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc; use rustc::{driver, middle}; use syntax::ast; use syntax::diagnostic; use syntax::parse; use syntax; use std::os; use std::local_data; use visit_ast::RustdocVisitor; use clean; use clean::Clean; pub struct DocContext { crate: @ast::Crate, tycx: middle::ty::ctxt, sess: driver::session::Session } /// Parses, resolves, and typechecks the given crate fn get_ast_and_resolve(cpath: &Path, libs: ~[Path]) -> DocContext { use syntax::codemap::dummy_spanned; use rustc::driver::driver::*; let parsesess = parse::new_parse_sess(None); let input = file_input(cpath.clone()); let sessopts = @driver::session::options { binary: @"rustdoc", maybe_sysroot: Some(@os::self_exe_path().unwrap().pop()), addl_lib_search_paths: @mut libs, .. (*rustc::driver::session::basic_options()).clone() }; let diagnostic_handler = syntax::diagnostic::mk_handler(None); let span_diagnostic_handler = syntax::diagnostic::mk_span_handler(diagnostic_handler, parsesess.cm); let sess = driver::driver::build_session_(sessopts, parsesess.cm, @diagnostic::DefaultEmitter as @diagnostic::Emitter, span_diagnostic_handler); let mut cfg = build_configuration(sess); cfg.push(@dummy_spanned(ast::MetaWord(@"stage2"))); let mut crate = phase_1_parse_input(sess, cfg.clone(), &input); crate = phase_2_configure_and_expand(sess, cfg, crate); let analysis = phase_3_run_analysis_passes(sess, crate); debug!("crate: %?", crate); DocContext { crate: crate, tycx: analysis.ty_cx, sess: sess } } pub fn run_core (libs: ~[Path], path: &Path) -> clean::Crate
{ let ctxt = @get_ast_and_resolve(path, libs); debug!("defmap:"); for (k, v) in ctxt.tycx.def_map.iter() { debug!("%?: %?", k, v); } local_data::set(super::ctxtkey, ctxt); let v = @mut RustdocVisitor::new(); v.visit(ctxt.crate); v.clean() }
identifier_body
core.rs
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc; use rustc::{driver, middle}; use syntax::ast; use syntax::diagnostic; use syntax::parse; use syntax; use std::os; use std::local_data; use visit_ast::RustdocVisitor; use clean; use clean::Clean; pub struct
{ crate: @ast::Crate, tycx: middle::ty::ctxt, sess: driver::session::Session } /// Parses, resolves, and typechecks the given crate fn get_ast_and_resolve(cpath: &Path, libs: ~[Path]) -> DocContext { use syntax::codemap::dummy_spanned; use rustc::driver::driver::*; let parsesess = parse::new_parse_sess(None); let input = file_input(cpath.clone()); let sessopts = @driver::session::options { binary: @"rustdoc", maybe_sysroot: Some(@os::self_exe_path().unwrap().pop()), addl_lib_search_paths: @mut libs, .. (*rustc::driver::session::basic_options()).clone() }; let diagnostic_handler = syntax::diagnostic::mk_handler(None); let span_diagnostic_handler = syntax::diagnostic::mk_span_handler(diagnostic_handler, parsesess.cm); let sess = driver::driver::build_session_(sessopts, parsesess.cm, @diagnostic::DefaultEmitter as @diagnostic::Emitter, span_diagnostic_handler); let mut cfg = build_configuration(sess); cfg.push(@dummy_spanned(ast::MetaWord(@"stage2"))); let mut crate = phase_1_parse_input(sess, cfg.clone(), &input); crate = phase_2_configure_and_expand(sess, cfg, crate); let analysis = phase_3_run_analysis_passes(sess, crate); debug!("crate: %?", crate); DocContext { crate: crate, tycx: analysis.ty_cx, sess: sess } } pub fn run_core (libs: ~[Path], path: &Path) -> clean::Crate { let ctxt = @get_ast_and_resolve(path, libs); debug!("defmap:"); for (k, v) in ctxt.tycx.def_map.iter() { debug!("%?: %?", k, v); } local_data::set(super::ctxtkey, ctxt); let v = @mut RustdocVisitor::new(); v.visit(ctxt.crate); v.clean() }
DocContext
identifier_name
dataload.py
#******************************************************************************* # Copyright (c) 2015 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #******************************************************************************/
import json from helpers.dbutils import CloudantDbUtils from helpers.acmeair_utils import AcmeAirUtils import conftest # get the cloudant credentials from pytest config file test_properties = conftest.test_properties() class DataLoader: """ Test data loader related functions """ def load_AcmeData(self, num_of_cust): """ Reset databases and use the AcmeAir database loader to populate initial customer, flight and airportmapping data. Does NOT generate user data like bookings. """ print ("num_of_cust: ", num_of_cust) acmeair = AcmeAirUtils() try: if acmeair.is_acmeair_running() != 0: raise RuntimeError(""" AcmeAir is already running which may cause unexpected results when resetting databases. Please shut down the app and try again. """) else: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() acmeair.start_acmeair() acmeair.load_data(num_of_cust) finally: acmeair.stop_acmeair() def remove_AcmeDb(self, num_of_cust): """ Drop all AcmeAir databases """ acmeair = AcmeAirUtils() if acmeair.is_acmeair_running() != 0: acmeair.stop_acmeair() cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases() def load_SpecCharValuePredicateData(self): """ Create booking data needed to test SpecCharValuePredicate """ try: acmeair = AcmeAirUtils() acmeair.start_acmeair() # book flights AA93 and AA330 flight1 = "AA93" flight2 = "AA330" # Step#1 - need to find the flights generated _id required for booking flight1_id = acmeair.get_flightId_by_number(flight1) print ("{} id = {}".format(flight1, flight1_id)) flight2_id = acmeair.get_flightId_by_number(flight2) print ("{} id = {}".format(flight2, flight2_id)) # Step#2 - add the boooking acmeair.book_flights("[email protected]", flight1, flight2) finally: acmeair.stop_acmeair() if __name__ =='__main__': """ Utility to create test databases and load data """ import argparse parser = argparse.ArgumentParser(description="Utility to load AcmeAir data required for python spark-cloudant tests") group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-cleanup', action='store_true', help='Drop all test databases') group.add_argument('-load', help='Reset and Load databases with the given # of users. -load 0 to just recreate databases and indexes.', type=int) args = parser.parse_args() dataloader = DataLoader() if args.load is not None: if args.load == 0: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() else: dataloader.load_AcmeData(args.load) dataloader.load_SpecCharValuePredicateData() elif args.cleanup: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases()
import requests import sys import os
random_line_split
dataload.py
#******************************************************************************* # Copyright (c) 2015 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #******************************************************************************/ import requests import sys import os import json from helpers.dbutils import CloudantDbUtils from helpers.acmeair_utils import AcmeAirUtils import conftest # get the cloudant credentials from pytest config file test_properties = conftest.test_properties() class DataLoader: """ Test data loader related functions """ def load_AcmeData(self, num_of_cust): """ Reset databases and use the AcmeAir database loader to populate initial customer, flight and airportmapping data. Does NOT generate user data like bookings. """ print ("num_of_cust: ", num_of_cust) acmeair = AcmeAirUtils() try: if acmeair.is_acmeair_running() != 0: raise RuntimeError(""" AcmeAir is already running which may cause unexpected results when resetting databases. Please shut down the app and try again. """) else: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() acmeair.start_acmeair() acmeair.load_data(num_of_cust) finally: acmeair.stop_acmeair() def remove_AcmeDb(self, num_of_cust): """ Drop all AcmeAir databases """ acmeair = AcmeAirUtils() if acmeair.is_acmeair_running() != 0: acmeair.stop_acmeair() cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases() def
(self): """ Create booking data needed to test SpecCharValuePredicate """ try: acmeair = AcmeAirUtils() acmeair.start_acmeair() # book flights AA93 and AA330 flight1 = "AA93" flight2 = "AA330" # Step#1 - need to find the flights generated _id required for booking flight1_id = acmeair.get_flightId_by_number(flight1) print ("{} id = {}".format(flight1, flight1_id)) flight2_id = acmeair.get_flightId_by_number(flight2) print ("{} id = {}".format(flight2, flight2_id)) # Step#2 - add the boooking acmeair.book_flights("[email protected]", flight1, flight2) finally: acmeair.stop_acmeair() if __name__ =='__main__': """ Utility to create test databases and load data """ import argparse parser = argparse.ArgumentParser(description="Utility to load AcmeAir data required for python spark-cloudant tests") group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-cleanup', action='store_true', help='Drop all test databases') group.add_argument('-load', help='Reset and Load databases with the given # of users. -load 0 to just recreate databases and indexes.', type=int) args = parser.parse_args() dataloader = DataLoader() if args.load is not None: if args.load == 0: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() else: dataloader.load_AcmeData(args.load) dataloader.load_SpecCharValuePredicateData() elif args.cleanup: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases()
load_SpecCharValuePredicateData
identifier_name
dataload.py
#******************************************************************************* # Copyright (c) 2015 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #******************************************************************************/ import requests import sys import os import json from helpers.dbutils import CloudantDbUtils from helpers.acmeair_utils import AcmeAirUtils import conftest # get the cloudant credentials from pytest config file test_properties = conftest.test_properties() class DataLoader: """ Test data loader related functions """ def load_AcmeData(self, num_of_cust): """ Reset databases and use the AcmeAir database loader to populate initial customer, flight and airportmapping data. Does NOT generate user data like bookings. """ print ("num_of_cust: ", num_of_cust) acmeair = AcmeAirUtils() try: if acmeair.is_acmeair_running() != 0: raise RuntimeError(""" AcmeAir is already running which may cause unexpected results when resetting databases. Please shut down the app and try again. """) else: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() acmeair.start_acmeair() acmeair.load_data(num_of_cust) finally: acmeair.stop_acmeair() def remove_AcmeDb(self, num_of_cust): """ Drop all AcmeAir databases """ acmeair = AcmeAirUtils() if acmeair.is_acmeair_running() != 0: acmeair.stop_acmeair() cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases() def load_SpecCharValuePredicateData(self): """ Create booking data needed to test SpecCharValuePredicate """ try: acmeair = AcmeAirUtils() acmeair.start_acmeair() # book flights AA93 and AA330 flight1 = "AA93" flight2 = "AA330" # Step#1 - need to find the flights generated _id required for booking flight1_id = acmeair.get_flightId_by_number(flight1) print ("{} id = {}".format(flight1, flight1_id)) flight2_id = acmeair.get_flightId_by_number(flight2) print ("{} id = {}".format(flight2, flight2_id)) # Step#2 - add the boooking acmeair.book_flights("[email protected]", flight1, flight2) finally: acmeair.stop_acmeair() if __name__ =='__main__':
elif args.cleanup: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases()
""" Utility to create test databases and load data """ import argparse parser = argparse.ArgumentParser(description="Utility to load AcmeAir data required for python spark-cloudant tests") group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-cleanup', action='store_true', help='Drop all test databases') group.add_argument('-load', help='Reset and Load databases with the given # of users. -load 0 to just recreate databases and indexes.', type=int) args = parser.parse_args() dataloader = DataLoader() if args.load is not None: if args.load == 0: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() else: dataloader.load_AcmeData(args.load) dataloader.load_SpecCharValuePredicateData()
conditional_block
dataload.py
#******************************************************************************* # Copyright (c) 2015 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #******************************************************************************/ import requests import sys import os import json from helpers.dbutils import CloudantDbUtils from helpers.acmeair_utils import AcmeAirUtils import conftest # get the cloudant credentials from pytest config file test_properties = conftest.test_properties() class DataLoader: """ Test data loader related functions """ def load_AcmeData(self, num_of_cust): """ Reset databases and use the AcmeAir database loader to populate initial customer, flight and airportmapping data. Does NOT generate user data like bookings. """ print ("num_of_cust: ", num_of_cust) acmeair = AcmeAirUtils() try: if acmeair.is_acmeair_running() != 0: raise RuntimeError(""" AcmeAir is already running which may cause unexpected results when resetting databases. Please shut down the app and try again. """) else: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() acmeair.start_acmeair() acmeair.load_data(num_of_cust) finally: acmeair.stop_acmeair() def remove_AcmeDb(self, num_of_cust): """ Drop all AcmeAir databases """ acmeair = AcmeAirUtils() if acmeair.is_acmeair_running() != 0: acmeair.stop_acmeair() cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases() def load_SpecCharValuePredicateData(self):
finally: acmeair.stop_acmeair() if __name__ =='__main__': """ Utility to create test databases and load data """ import argparse parser = argparse.ArgumentParser(description="Utility to load AcmeAir data required for python spark-cloudant tests") group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-cleanup', action='store_true', help='Drop all test databases') group.add_argument('-load', help='Reset and Load databases with the given # of users. -load 0 to just recreate databases and indexes.', type=int) args = parser.parse_args() dataloader = DataLoader() if args.load is not None: if args.load == 0: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.reset_databases() else: dataloader.load_AcmeData(args.load) dataloader.load_SpecCharValuePredicateData() elif args.cleanup: cloudantUtils = CloudantDbUtils(test_properties) cloudantUtils.drop_all_databases()
""" Create booking data needed to test SpecCharValuePredicate """ try: acmeair = AcmeAirUtils() acmeair.start_acmeair() # book flights AA93 and AA330 flight1 = "AA93" flight2 = "AA330" # Step#1 - need to find the flights generated _id required for booking flight1_id = acmeair.get_flightId_by_number(flight1) print ("{} id = {}".format(flight1, flight1_id)) flight2_id = acmeair.get_flightId_by_number(flight2) print ("{} id = {}".format(flight2, flight2_id)) # Step#2 - add the boooking acmeair.book_flights("[email protected]", flight1, flight2)
identifier_body
main.controller.ts
import * as _ from 'lodash'; import { IAppService } from '../app.service'; export class ma
title: string; static $inject = ['$scope', '$state', 'appService', '$mdDialog']; constructor(private $scope, private $state: ng.ui.IStateService, private appService: IAppService, private $mdDialog: ng.material.IDialogService) { this.title = 'MAIN'; var clearEvt1 = appService.events.filter(x => x.target === 'websocket@open').subscribe(data => this.appService.send('login', {})); var clearEvt2 = appService.events.filter(x => x.target === 'websocket@close').subscribe(data => this.OnWSClose(data)); var clearEvt3 = appService.events.filter(x => x.target === 'websocket@error').subscribe(data => this.OnWSError(data)); var clearEvt4 = appService.events.filter(x => x.target === 'login').subscribe(data => { if (this.$state.current.name === 'login') return; if (data && !data.error) { this.appService.permissions = data.content.permissions; this.appService.username = data.content.username; } else { this.$state.go('login'); } }); this.appService.connect(); $scope.$on("$destroy", () => { clearEvt1.unsubscribe(); clearEvt2.unsubscribe(); clearEvt3.unsubscribe(); clearEvt4.unsubscribe(); }); } OnWSError(data) { console.error('error: ', data); } OnWSClose(data) { this.appService.permissions = ''; this.appService.username = ''; if (data.code === 3001) return; this.$mdDialog.show(this.$mdDialog.confirm() .cancel('No') .ok('Yes') .textContent('The server connection is closed. Do you want to open it back?') .clickOutsideToClose(true) .escapeToClose(true) //.focusOnOpen(false) //.onComplete(() => { // this.appService.connect(); ) .then(() => { this.appService.connect(); }); } }
inCtrl {
identifier_name
main.controller.ts
import * as _ from 'lodash'; import { IAppService } from '../app.service'; export class mainCtrl { title: string; static $inject = ['$scope', '$state', 'appService', '$mdDialog']; constructor(private $scope, private $state: ng.ui.IStateService, private appService: IAppService, private $mdDialog: ng.material.IDialogService) { this.title = 'MAIN'; var clearEvt1 = appService.events.filter(x => x.target === 'websocket@open').subscribe(data => this.appService.send('login', {})); var clearEvt2 = appService.events.filter(x => x.target === 'websocket@close').subscribe(data => this.OnWSClose(data)); var clearEvt3 = appService.events.filter(x => x.target === 'websocket@error').subscribe(data => this.OnWSError(data)); var clearEvt4 = appService.events.filter(x => x.target === 'login').subscribe(data => { if (this.$state.current.name === 'login') return; if (data && !data.error) { this.appService.permissions = data.content.permissions; this.appService.username = data.content.username; } else { this.$state.go('login'); } }); this.appService.connect(); $scope.$on("$destroy", () => { clearEvt1.unsubscribe(); clearEvt2.unsubscribe(); clearEvt3.unsubscribe(); clearEvt4.unsubscribe(); }); } OnWSError(data) { console.error('error: ', data); } OnWSClose(data) { this.appService.permissions = ''; this.appService.username = '';
.ok('Yes') .textContent('The server connection is closed. Do you want to open it back?') .clickOutsideToClose(true) .escapeToClose(true) //.focusOnOpen(false) //.onComplete(() => { // this.appService.connect(); ) .then(() => { this.appService.connect(); }); } }
if (data.code === 3001) return; this.$mdDialog.show(this.$mdDialog.confirm() .cancel('No')
random_line_split
main.controller.ts
import * as _ from 'lodash'; import { IAppService } from '../app.service'; export class mainCtrl { title: string; static $inject = ['$scope', '$state', 'appService', '$mdDialog']; constructor(private $scope, private $state: ng.ui.IStateService, private appService: IAppService, private $mdDialog: ng.material.IDialogService) {
clearEvt1.unsubscribe(); clearEvt2.unsubscribe(); clearEvt3.unsubscribe(); clearEvt4.unsubscribe(); }); } OnWSError(data) { console.error('error: ', data); } OnWSClose(data) { this.appService.permissions = ''; this.appService.username = ''; if (data.code === 3001) return; this.$mdDialog.show(this.$mdDialog.confirm() .cancel('No') .ok('Yes') .textContent('The server connection is closed. Do you want to open it back?') .clickOutsideToClose(true) .escapeToClose(true) //.focusOnOpen(false) //.onComplete(() => { // this.appService.connect(); ) .then(() => { this.appService.connect(); }); } }
this.title = 'MAIN'; var clearEvt1 = appService.events.filter(x => x.target === 'websocket@open').subscribe(data => this.appService.send('login', {})); var clearEvt2 = appService.events.filter(x => x.target === 'websocket@close').subscribe(data => this.OnWSClose(data)); var clearEvt3 = appService.events.filter(x => x.target === 'websocket@error').subscribe(data => this.OnWSError(data)); var clearEvt4 = appService.events.filter(x => x.target === 'login').subscribe(data => { if (this.$state.current.name === 'login') return; if (data && !data.error) { this.appService.permissions = data.content.permissions; this.appService.username = data.content.username; } else { this.$state.go('login'); } }); this.appService.connect(); $scope.$on("$destroy", () => {
identifier_body
getInspirationalQuoteOfDay.js
// // getInspirationalQuoteOfDay.js
// // created by Liv Erickson on 12/04/18 // Copyright 2018 High Fidelity, Inc. // Quotes provided by 'They Said So' - https://theysaidso.com/api // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // (function(){ var API_URL = "https://quotes.rest/qod?category=inspire"; var INTERVAL_FREQUENCY = 360000; var myEntityID, interval; var req = Script.require('./utils/request.js'); var InspirationalSign = function() {}; function checkForNewQuote() { getInspirationOfTheDay(); } InspirationalSign.prototype = { preload : function(entityID) { myEntityID = entityID; getInspirationOfTheDay(); interval = Script.setInterval(checkForNewQuote, INTERVAL_FREQUENCY); }, unload: function() { if (interval) { Script.clearInterval(interval); } } }; function getInspirationOfTheDay() { req.request(API_URL, function(error, data) { try { var quote = data.contents.quotes[0].quote; var author = data.contents.quotes[0].author; var string = quote + " - " + author; Entities.editEntity(myEntityID, {'text' : string}); } catch (e) { print("Error getting quote"); } }); } return new InspirationalSign(); });
random_line_split
getInspirationalQuoteOfDay.js
// // getInspirationalQuoteOfDay.js // // created by Liv Erickson on 12/04/18 // Copyright 2018 High Fidelity, Inc. // Quotes provided by 'They Said So' - https://theysaidso.com/api // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // (function(){ var API_URL = "https://quotes.rest/qod?category=inspire"; var INTERVAL_FREQUENCY = 360000; var myEntityID, interval; var req = Script.require('./utils/request.js'); var InspirationalSign = function() {}; function checkForNewQuote() { getInspirationOfTheDay(); } InspirationalSign.prototype = { preload : function(entityID) { myEntityID = entityID; getInspirationOfTheDay(); interval = Script.setInterval(checkForNewQuote, INTERVAL_FREQUENCY); }, unload: function() { if (interval) { Script.clearInterval(interval); } } }; function
() { req.request(API_URL, function(error, data) { try { var quote = data.contents.quotes[0].quote; var author = data.contents.quotes[0].author; var string = quote + " - " + author; Entities.editEntity(myEntityID, {'text' : string}); } catch (e) { print("Error getting quote"); } }); } return new InspirationalSign(); });
getInspirationOfTheDay
identifier_name
getInspirationalQuoteOfDay.js
// // getInspirationalQuoteOfDay.js // // created by Liv Erickson on 12/04/18 // Copyright 2018 High Fidelity, Inc. // Quotes provided by 'They Said So' - https://theysaidso.com/api // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // (function(){ var API_URL = "https://quotes.rest/qod?category=inspire"; var INTERVAL_FREQUENCY = 360000; var myEntityID, interval; var req = Script.require('./utils/request.js'); var InspirationalSign = function() {}; function checkForNewQuote() { getInspirationOfTheDay(); } InspirationalSign.prototype = { preload : function(entityID) { myEntityID = entityID; getInspirationOfTheDay(); interval = Script.setInterval(checkForNewQuote, INTERVAL_FREQUENCY); }, unload: function() { if (interval) { Script.clearInterval(interval); } } }; function getInspirationOfTheDay()
return new InspirationalSign(); });
{ req.request(API_URL, function(error, data) { try { var quote = data.contents.quotes[0].quote; var author = data.contents.quotes[0].author; var string = quote + " - " + author; Entities.editEntity(myEntityID, {'text' : string}); } catch (e) { print("Error getting quote"); } }); }
identifier_body
getInspirationalQuoteOfDay.js
// // getInspirationalQuoteOfDay.js // // created by Liv Erickson on 12/04/18 // Copyright 2018 High Fidelity, Inc. // Quotes provided by 'They Said So' - https://theysaidso.com/api // // Distributed under the Apache License, Version 2.0. // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // (function(){ var API_URL = "https://quotes.rest/qod?category=inspire"; var INTERVAL_FREQUENCY = 360000; var myEntityID, interval; var req = Script.require('./utils/request.js'); var InspirationalSign = function() {}; function checkForNewQuote() { getInspirationOfTheDay(); } InspirationalSign.prototype = { preload : function(entityID) { myEntityID = entityID; getInspirationOfTheDay(); interval = Script.setInterval(checkForNewQuote, INTERVAL_FREQUENCY); }, unload: function() { if (interval)
} }; function getInspirationOfTheDay() { req.request(API_URL, function(error, data) { try { var quote = data.contents.quotes[0].quote; var author = data.contents.quotes[0].author; var string = quote + " - " + author; Entities.editEntity(myEntityID, {'text' : string}); } catch (e) { print("Error getting quote"); } }); } return new InspirationalSign(); });
{ Script.clearInterval(interval); }
conditional_block
mod.rs
use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::rc::Rc; use std::ops::DerefMut; use syntax::abi; use syntax::ast::TokenTree; use syntax::ast; use syntax::ast_util::empty_generics; use syntax::codemap::{Span, DUMMY_SP}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::quote::rt::{ToTokens, ExtParseUtils}; use syntax::parse::token::InternedString; use syntax::ptr::P; use node; mod mcu; mod os; pub mod meta_args; pub struct Builder { main_stmts: Vec<P<ast::Stmt>>, type_items: Vec<P<ast::Item>>, pt: Rc<node::PlatformTree>, } impl Builder { pub fn build(cx: &mut ExtCtxt, pt: Rc<node::PlatformTree>) -> Option<Builder> { let mut builder = Builder::new(pt.clone(), cx); if !pt.expect_subnodes(cx, &["mcu", "os", "drivers"]) { return None; } match pt.get_by_path("mcu") { Some(node) => mcu::attach(&mut builder, cx, node), None => (), // TODO(farcaller): should it actaully fail? } match pt.get_by_path("os") { Some(node) => os::attach(&mut builder, cx, node), None => (), // TODO(farcaller): this should fail. } match pt.get_by_path("drivers") { Some(node) => ::drivers_pt::attach(&mut builder, cx, node), None => (), } for sub in pt.nodes().iter() { Builder::walk_mutate(&mut builder, cx, sub); } let base_node = pt.get_by_path("mcu").and_then(|mcu|{mcu.get_by_path("clock")}); match base_node { Some(node) => Builder::walk_materialize(&mut builder, cx, node), None => { cx.parse_sess().span_diagnostic.span_err(DUMMY_SP, "root node `mcu::clock` must be present"); } } Some(builder) } fn walk_mutate(builder: &mut Builder, cx: &mut ExtCtxt, node: &Rc<node::Node>) { let maybe_mut = node.mutator.get(); if maybe_mut.is_some() { maybe_mut.unwrap()(builder, cx, node.clone()); } for sub in node.subnodes().iter() { Builder::walk_mutate(builder, cx, sub); } } // FIXME(farcaller): verify that all nodes have been materialized fn walk_materialize(builder: &mut Builder, cx: &mut ExtCtxt, node: Rc<node::Node>) { let maybe_mat = node.materializer.get(); if maybe_mat.is_some() { maybe_mat.unwrap()(builder, cx, node.clone()); } let rev_depends = node.rev_depends_on.borrow(); for weak_sub in rev_depends.iter() { let sub = weak_sub.upgrade().unwrap(); let mut sub_deps = sub.depends_on.borrow_mut(); let deps = sub_deps.deref_mut(); let mut index = None; let mut i = 0usize; // FIXME: iter().position() for dep in deps.iter() { let strong_dep = dep.upgrade().unwrap(); if node == strong_dep { index = Some(i); break; } i = i + 1; } if index.is_none() { panic!("no index found"); } else { deps.remove(index.unwrap()); if deps.len() == 0 { Builder::walk_materialize(builder, cx, sub.clone()); } } } } pub fn new(pt: Rc<node::PlatformTree>, cx: &ExtCtxt) -> Builder { let use_zinc = cx.item_use_simple(DUMMY_SP, ast::Inherited, cx.path_ident( DUMMY_SP, cx.ident_of("zinc"))); Builder { main_stmts: vec!(), type_items: vec!(use_zinc), pt: pt, } } pub fn main_stmts(&self) -> Vec<P<ast::Stmt>> { self.main_stmts.clone() } pub fn pt(&self) -> Rc<node::PlatformTree> { self.pt.clone() } pub fn add_main_statement(&mut self, stmt: P<ast::Stmt>) { self.main_stmts.push(stmt); } pub fn add_type_item(&mut self, item: P<ast::Item>) { self.type_items.push(item); } fn emit_main(&self, cx: &ExtCtxt) -> P<ast::Item> { // init stack let init_stack_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_stack(); )); // init data let init_data_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_data(); )); let mut stmts = vec!(init_stack_stmt, init_data_stmt); for s in self.main_stmts.clone().into_iter() { stmts.push(s); } let body = cx.block(DUMMY_SP, stmts, None); let unused_variables = cx.meta_word(DUMMY_SP, InternedString::new("unused_variables")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(unused_variables)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); self.item_fn(cx, DUMMY_SP, "platformtree_main", &[allow_noncamel], body) } fn emit_start(&self, cx: &ExtCtxt) -> P<ast::Item> { quote_item!(cx, #[start] fn start(_: isize, _: *const *const u8) -> isize { unsafe { platformtree_main(); } 0 } ).unwrap() } fn emit_morestack(&self, cx: &ExtCtxt) -> P<ast::Item> { let stmt = cx.stmt_expr(quote_expr!(&*cx, core::intrinsics::abort() // or // zinc::os::task::morestack(); )); let empty_span = DUMMY_SP; let body = cx.block(empty_span, vec!(stmt), None); self.item_fn(cx, empty_span, "__morestack", &[], body) } pub fn emit_items(&self, cx: &ExtCtxt) -> Vec<P<ast::Item>> { let non_camel_case_types = cx.meta_word(DUMMY_SP, InternedString::new("non_camel_case_types")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(non_camel_case_types)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); let pt_mod_item = cx.item_mod(DUMMY_SP, DUMMY_SP, cx.ident_of("pt"), vec!(allow_noncamel), self.type_items.clone()); if self.type_items.len() > 1 { vec!(pt_mod_item, self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } else { vec!(self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } } fn item_fn(&self, cx: &ExtCtxt, span: Span, name: &str, local_attrs: &[ast::Attribute], body: P<ast::Block>) -> P<ast::Item> { let attr_no_mangle = cx.attribute(span, cx.meta_word( span, InternedString::new("no_mangle"))); let mut attrs = vec!(attr_no_mangle); for a in local_attrs { attrs.push(a.clone()); } P(ast::Item { ident: cx.ident_of(name), attrs: attrs, id: ast::DUMMY_NODE_ID, node: ast::ItemFn( cx.fn_decl(Vec::new(), cx.ty(DUMMY_SP, ast::Ty_::TyTup(Vec::new()))), ast::Unsafety::Unsafe, ast::Constness::NotConst, abi::Rust, // TODO(farcaller): should this be abi::C? empty_generics(), body), vis: ast::Public, span: span, }) } } pub struct TokenString(pub String); impl ToTokens for TokenString {
let &TokenString(ref s) = self; (cx as &ExtParseUtils).parse_tts(s.clone()) } } pub fn add_node_dependency(node: &Rc<node::Node>, dep: &Rc<node::Node>) { let
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
random_line_split
mod.rs
use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::rc::Rc; use std::ops::DerefMut; use syntax::abi; use syntax::ast::TokenTree; use syntax::ast; use syntax::ast_util::empty_generics; use syntax::codemap::{Span, DUMMY_SP}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::quote::rt::{ToTokens, ExtParseUtils}; use syntax::parse::token::InternedString; use syntax::ptr::P; use node; mod mcu; mod os; pub mod meta_args; pub struct Builder { main_stmts: Vec<P<ast::Stmt>>, type_items: Vec<P<ast::Item>>, pt: Rc<node::PlatformTree>, } impl Builder { pub fn build(cx: &mut ExtCtxt, pt: Rc<node::PlatformTree>) -> Option<Builder> { let mut builder = Builder::new(pt.clone(), cx); if !pt.expect_subnodes(cx, &["mcu", "os", "drivers"]) { return None; } match pt.get_by_path("mcu") { Some(node) => mcu::attach(&mut builder, cx, node), None => (), // TODO(farcaller): should it actaully fail? } match pt.get_by_path("os") { Some(node) => os::attach(&mut builder, cx, node), None => (), // TODO(farcaller): this should fail. } match pt.get_by_path("drivers") { Some(node) => ::drivers_pt::attach(&mut builder, cx, node), None => (), } for sub in pt.nodes().iter() { Builder::walk_mutate(&mut builder, cx, sub); } let base_node = pt.get_by_path("mcu").and_then(|mcu|{mcu.get_by_path("clock")}); match base_node { Some(node) => Builder::walk_materialize(&mut builder, cx, node), None => { cx.parse_sess().span_diagnostic.span_err(DUMMY_SP, "root node `mcu::clock` must be present"); } } Some(builder) } fn walk_mutate(builder: &mut Builder, cx: &mut ExtCtxt, node: &Rc<node::Node>) { let maybe_mut = node.mutator.get(); if maybe_mut.is_some() { maybe_mut.unwrap()(builder, cx, node.clone()); } for sub in node.subnodes().iter() { Builder::walk_mutate(builder, cx, sub); } } // FIXME(farcaller): verify that all nodes have been materialized fn walk_materialize(builder: &mut Builder, cx: &mut ExtCtxt, node: Rc<node::Node>) { let maybe_mat = node.materializer.get(); if maybe_mat.is_some() { maybe_mat.unwrap()(builder, cx, node.clone()); } let rev_depends = node.rev_depends_on.borrow(); for weak_sub in rev_depends.iter() { let sub = weak_sub.upgrade().unwrap(); let mut sub_deps = sub.depends_on.borrow_mut(); let deps = sub_deps.deref_mut(); let mut index = None; let mut i = 0usize; // FIXME: iter().position() for dep in deps.iter() { let strong_dep = dep.upgrade().unwrap(); if node == strong_dep { index = Some(i); break; } i = i + 1; } if index.is_none() { panic!("no index found"); } else { deps.remove(index.unwrap()); if deps.len() == 0 { Builder::walk_materialize(builder, cx, sub.clone()); } } } } pub fn new(pt: Rc<node::PlatformTree>, cx: &ExtCtxt) -> Builder { let use_zinc = cx.item_use_simple(DUMMY_SP, ast::Inherited, cx.path_ident( DUMMY_SP, cx.ident_of("zinc"))); Builder { main_stmts: vec!(), type_items: vec!(use_zinc), pt: pt, } } pub fn main_stmts(&self) -> Vec<P<ast::Stmt>> { self.main_stmts.clone() } pub fn pt(&self) -> Rc<node::PlatformTree> { self.pt.clone() } pub fn add_main_statement(&mut self, stmt: P<ast::Stmt>)
pub fn add_type_item(&mut self, item: P<ast::Item>) { self.type_items.push(item); } fn emit_main(&self, cx: &ExtCtxt) -> P<ast::Item> { // init stack let init_stack_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_stack(); )); // init data let init_data_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_data(); )); let mut stmts = vec!(init_stack_stmt, init_data_stmt); for s in self.main_stmts.clone().into_iter() { stmts.push(s); } let body = cx.block(DUMMY_SP, stmts, None); let unused_variables = cx.meta_word(DUMMY_SP, InternedString::new("unused_variables")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(unused_variables)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); self.item_fn(cx, DUMMY_SP, "platformtree_main", &[allow_noncamel], body) } fn emit_start(&self, cx: &ExtCtxt) -> P<ast::Item> { quote_item!(cx, #[start] fn start(_: isize, _: *const *const u8) -> isize { unsafe { platformtree_main(); } 0 } ).unwrap() } fn emit_morestack(&self, cx: &ExtCtxt) -> P<ast::Item> { let stmt = cx.stmt_expr(quote_expr!(&*cx, core::intrinsics::abort() // or // zinc::os::task::morestack(); )); let empty_span = DUMMY_SP; let body = cx.block(empty_span, vec!(stmt), None); self.item_fn(cx, empty_span, "__morestack", &[], body) } pub fn emit_items(&self, cx: &ExtCtxt) -> Vec<P<ast::Item>> { let non_camel_case_types = cx.meta_word(DUMMY_SP, InternedString::new("non_camel_case_types")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(non_camel_case_types)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); let pt_mod_item = cx.item_mod(DUMMY_SP, DUMMY_SP, cx.ident_of("pt"), vec!(allow_noncamel), self.type_items.clone()); if self.type_items.len() > 1 { vec!(pt_mod_item, self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } else { vec!(self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } } fn item_fn(&self, cx: &ExtCtxt, span: Span, name: &str, local_attrs: &[ast::Attribute], body: P<ast::Block>) -> P<ast::Item> { let attr_no_mangle = cx.attribute(span, cx.meta_word( span, InternedString::new("no_mangle"))); let mut attrs = vec!(attr_no_mangle); for a in local_attrs { attrs.push(a.clone()); } P(ast::Item { ident: cx.ident_of(name), attrs: attrs, id: ast::DUMMY_NODE_ID, node: ast::ItemFn( cx.fn_decl(Vec::new(), cx.ty(DUMMY_SP, ast::Ty_::TyTup(Vec::new()))), ast::Unsafety::Unsafe, ast::Constness::NotConst, abi::Rust, // TODO(farcaller): should this be abi::C? empty_generics(), body), vis: ast::Public, span: span, }) } } pub struct TokenString(pub String); impl ToTokens for TokenString { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let &TokenString(ref s) = self; (cx as &ExtParseUtils).parse_tts(s.clone()) } } pub fn add_node_dependency(node: &Rc<node::Node>, dep: &Rc<node::Node>) {
{ self.main_stmts.push(stmt); }
identifier_body
mod.rs
use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::rc::Rc; use std::ops::DerefMut; use syntax::abi; use syntax::ast::TokenTree; use syntax::ast; use syntax::ast_util::empty_generics; use syntax::codemap::{Span, DUMMY_SP}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::quote::rt::{ToTokens, ExtParseUtils}; use syntax::parse::token::InternedString; use syntax::ptr::P; use node; mod mcu; mod os; pub mod meta_args; pub struct Builder { main_stmts: Vec<P<ast::Stmt>>, type_items: Vec<P<ast::Item>>, pt: Rc<node::PlatformTree>, } impl Builder { pub fn build(cx: &mut ExtCtxt, pt: Rc<node::PlatformTree>) -> Option<Builder> { let mut builder = Builder::new(pt.clone(), cx); if !pt.expect_subnodes(cx, &["mcu", "os", "drivers"]) { return None; } match pt.get_by_path("mcu") { Some(node) => mcu::attach(&mut builder, cx, node), None => (), // TODO(farcaller): should it actaully fail? } match pt.get_by_path("os") { Some(node) => os::attach(&mut builder, cx, node), None => (), // TODO(farcaller): this should fail. } match pt.get_by_path("drivers") { Some(node) => ::drivers_pt::attach(&mut builder, cx, node), None => (), } for sub in pt.nodes().iter() { Builder::walk_mutate(&mut builder, cx, sub); } let base_node = pt.get_by_path("mcu").and_then(|mcu|{mcu.get_by_path("clock")}); match base_node { Some(node) => Builder::walk_materialize(&mut builder, cx, node), None =>
} Some(builder) } fn walk_mutate(builder: &mut Builder, cx: &mut ExtCtxt, node: &Rc<node::Node>) { let maybe_mut = node.mutator.get(); if maybe_mut.is_some() { maybe_mut.unwrap()(builder, cx, node.clone()); } for sub in node.subnodes().iter() { Builder::walk_mutate(builder, cx, sub); } } // FIXME(farcaller): verify that all nodes have been materialized fn walk_materialize(builder: &mut Builder, cx: &mut ExtCtxt, node: Rc<node::Node>) { let maybe_mat = node.materializer.get(); if maybe_mat.is_some() { maybe_mat.unwrap()(builder, cx, node.clone()); } let rev_depends = node.rev_depends_on.borrow(); for weak_sub in rev_depends.iter() { let sub = weak_sub.upgrade().unwrap(); let mut sub_deps = sub.depends_on.borrow_mut(); let deps = sub_deps.deref_mut(); let mut index = None; let mut i = 0usize; // FIXME: iter().position() for dep in deps.iter() { let strong_dep = dep.upgrade().unwrap(); if node == strong_dep { index = Some(i); break; } i = i + 1; } if index.is_none() { panic!("no index found"); } else { deps.remove(index.unwrap()); if deps.len() == 0 { Builder::walk_materialize(builder, cx, sub.clone()); } } } } pub fn new(pt: Rc<node::PlatformTree>, cx: &ExtCtxt) -> Builder { let use_zinc = cx.item_use_simple(DUMMY_SP, ast::Inherited, cx.path_ident( DUMMY_SP, cx.ident_of("zinc"))); Builder { main_stmts: vec!(), type_items: vec!(use_zinc), pt: pt, } } pub fn main_stmts(&self) -> Vec<P<ast::Stmt>> { self.main_stmts.clone() } pub fn pt(&self) -> Rc<node::PlatformTree> { self.pt.clone() } pub fn add_main_statement(&mut self, stmt: P<ast::Stmt>) { self.main_stmts.push(stmt); } pub fn add_type_item(&mut self, item: P<ast::Item>) { self.type_items.push(item); } fn emit_main(&self, cx: &ExtCtxt) -> P<ast::Item> { // init stack let init_stack_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_stack(); )); // init data let init_data_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_data(); )); let mut stmts = vec!(init_stack_stmt, init_data_stmt); for s in self.main_stmts.clone().into_iter() { stmts.push(s); } let body = cx.block(DUMMY_SP, stmts, None); let unused_variables = cx.meta_word(DUMMY_SP, InternedString::new("unused_variables")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(unused_variables)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); self.item_fn(cx, DUMMY_SP, "platformtree_main", &[allow_noncamel], body) } fn emit_start(&self, cx: &ExtCtxt) -> P<ast::Item> { quote_item!(cx, #[start] fn start(_: isize, _: *const *const u8) -> isize { unsafe { platformtree_main(); } 0 } ).unwrap() } fn emit_morestack(&self, cx: &ExtCtxt) -> P<ast::Item> { let stmt = cx.stmt_expr(quote_expr!(&*cx, core::intrinsics::abort() // or // zinc::os::task::morestack(); )); let empty_span = DUMMY_SP; let body = cx.block(empty_span, vec!(stmt), None); self.item_fn(cx, empty_span, "__morestack", &[], body) } pub fn emit_items(&self, cx: &ExtCtxt) -> Vec<P<ast::Item>> { let non_camel_case_types = cx.meta_word(DUMMY_SP, InternedString::new("non_camel_case_types")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(non_camel_case_types)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); let pt_mod_item = cx.item_mod(DUMMY_SP, DUMMY_SP, cx.ident_of("pt"), vec!(allow_noncamel), self.type_items.clone()); if self.type_items.len() > 1 { vec!(pt_mod_item, self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } else { vec!(self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } } fn item_fn(&self, cx: &ExtCtxt, span: Span, name: &str, local_attrs: &[ast::Attribute], body: P<ast::Block>) -> P<ast::Item> { let attr_no_mangle = cx.attribute(span, cx.meta_word( span, InternedString::new("no_mangle"))); let mut attrs = vec!(attr_no_mangle); for a in local_attrs { attrs.push(a.clone()); } P(ast::Item { ident: cx.ident_of(name), attrs: attrs, id: ast::DUMMY_NODE_ID, node: ast::ItemFn( cx.fn_decl(Vec::new(), cx.ty(DUMMY_SP, ast::Ty_::TyTup(Vec::new()))), ast::Unsafety::Unsafe, ast::Constness::NotConst, abi::Rust, // TODO(farcaller): should this be abi::C? empty_generics(), body), vis: ast::Public, span: span, }) } } pub struct TokenString(pub String); impl ToTokens for TokenString { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let &TokenString(ref s) = self; (cx as &ExtParseUtils).parse_tts(s.clone()) } } pub fn add_node_dependency(node: &Rc<node::Node>, dep: &Rc<node::Node>) {
{ cx.parse_sess().span_diagnostic.span_err(DUMMY_SP, "root node `mcu::clock` must be present"); }
conditional_block
mod.rs
use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::rc::Rc; use std::ops::DerefMut; use syntax::abi; use syntax::ast::TokenTree; use syntax::ast; use syntax::ast_util::empty_generics; use syntax::codemap::{Span, DUMMY_SP}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::quote::rt::{ToTokens, ExtParseUtils}; use syntax::parse::token::InternedString; use syntax::ptr::P; use node; mod mcu; mod os; pub mod meta_args; pub struct Builder { main_stmts: Vec<P<ast::Stmt>>, type_items: Vec<P<ast::Item>>, pt: Rc<node::PlatformTree>, } impl Builder { pub fn build(cx: &mut ExtCtxt, pt: Rc<node::PlatformTree>) -> Option<Builder> { let mut builder = Builder::new(pt.clone(), cx); if !pt.expect_subnodes(cx, &["mcu", "os", "drivers"]) { return None; } match pt.get_by_path("mcu") { Some(node) => mcu::attach(&mut builder, cx, node), None => (), // TODO(farcaller): should it actaully fail? } match pt.get_by_path("os") { Some(node) => os::attach(&mut builder, cx, node), None => (), // TODO(farcaller): this should fail. } match pt.get_by_path("drivers") { Some(node) => ::drivers_pt::attach(&mut builder, cx, node), None => (), } for sub in pt.nodes().iter() { Builder::walk_mutate(&mut builder, cx, sub); } let base_node = pt.get_by_path("mcu").and_then(|mcu|{mcu.get_by_path("clock")}); match base_node { Some(node) => Builder::walk_materialize(&mut builder, cx, node), None => { cx.parse_sess().span_diagnostic.span_err(DUMMY_SP, "root node `mcu::clock` must be present"); } } Some(builder) } fn walk_mutate(builder: &mut Builder, cx: &mut ExtCtxt, node: &Rc<node::Node>) { let maybe_mut = node.mutator.get(); if maybe_mut.is_some() { maybe_mut.unwrap()(builder, cx, node.clone()); } for sub in node.subnodes().iter() { Builder::walk_mutate(builder, cx, sub); } } // FIXME(farcaller): verify that all nodes have been materialized fn walk_materialize(builder: &mut Builder, cx: &mut ExtCtxt, node: Rc<node::Node>) { let maybe_mat = node.materializer.get(); if maybe_mat.is_some() { maybe_mat.unwrap()(builder, cx, node.clone()); } let rev_depends = node.rev_depends_on.borrow(); for weak_sub in rev_depends.iter() { let sub = weak_sub.upgrade().unwrap(); let mut sub_deps = sub.depends_on.borrow_mut(); let deps = sub_deps.deref_mut(); let mut index = None; let mut i = 0usize; // FIXME: iter().position() for dep in deps.iter() { let strong_dep = dep.upgrade().unwrap(); if node == strong_dep { index = Some(i); break; } i = i + 1; } if index.is_none() { panic!("no index found"); } else { deps.remove(index.unwrap()); if deps.len() == 0 { Builder::walk_materialize(builder, cx, sub.clone()); } } } } pub fn new(pt: Rc<node::PlatformTree>, cx: &ExtCtxt) -> Builder { let use_zinc = cx.item_use_simple(DUMMY_SP, ast::Inherited, cx.path_ident( DUMMY_SP, cx.ident_of("zinc"))); Builder { main_stmts: vec!(), type_items: vec!(use_zinc), pt: pt, } } pub fn main_stmts(&self) -> Vec<P<ast::Stmt>> { self.main_stmts.clone() } pub fn pt(&self) -> Rc<node::PlatformTree> { self.pt.clone() } pub fn add_main_statement(&mut self, stmt: P<ast::Stmt>) { self.main_stmts.push(stmt); } pub fn add_type_item(&mut self, item: P<ast::Item>) { self.type_items.push(item); } fn emit_main(&self, cx: &ExtCtxt) -> P<ast::Item> { // init stack let init_stack_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_stack(); )); // init data let init_data_stmt = cx.stmt_expr(quote_expr!(&*cx, zinc::hal::mem_init::init_data(); )); let mut stmts = vec!(init_stack_stmt, init_data_stmt); for s in self.main_stmts.clone().into_iter() { stmts.push(s); } let body = cx.block(DUMMY_SP, stmts, None); let unused_variables = cx.meta_word(DUMMY_SP, InternedString::new("unused_variables")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(unused_variables)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); self.item_fn(cx, DUMMY_SP, "platformtree_main", &[allow_noncamel], body) } fn emit_start(&self, cx: &ExtCtxt) -> P<ast::Item> { quote_item!(cx, #[start] fn start(_: isize, _: *const *const u8) -> isize { unsafe { platformtree_main(); } 0 } ).unwrap() } fn
(&self, cx: &ExtCtxt) -> P<ast::Item> { let stmt = cx.stmt_expr(quote_expr!(&*cx, core::intrinsics::abort() // or // zinc::os::task::morestack(); )); let empty_span = DUMMY_SP; let body = cx.block(empty_span, vec!(stmt), None); self.item_fn(cx, empty_span, "__morestack", &[], body) } pub fn emit_items(&self, cx: &ExtCtxt) -> Vec<P<ast::Item>> { let non_camel_case_types = cx.meta_word(DUMMY_SP, InternedString::new("non_camel_case_types")); let allow = cx.meta_list( DUMMY_SP, InternedString::new("allow"), vec!(non_camel_case_types)); let allow_noncamel = cx.attribute(DUMMY_SP, allow); let pt_mod_item = cx.item_mod(DUMMY_SP, DUMMY_SP, cx.ident_of("pt"), vec!(allow_noncamel), self.type_items.clone()); if self.type_items.len() > 1 { vec!(pt_mod_item, self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } else { vec!(self.emit_main(cx), self.emit_start(cx), self.emit_morestack(cx)) } } fn item_fn(&self, cx: &ExtCtxt, span: Span, name: &str, local_attrs: &[ast::Attribute], body: P<ast::Block>) -> P<ast::Item> { let attr_no_mangle = cx.attribute(span, cx.meta_word( span, InternedString::new("no_mangle"))); let mut attrs = vec!(attr_no_mangle); for a in local_attrs { attrs.push(a.clone()); } P(ast::Item { ident: cx.ident_of(name), attrs: attrs, id: ast::DUMMY_NODE_ID, node: ast::ItemFn( cx.fn_decl(Vec::new(), cx.ty(DUMMY_SP, ast::Ty_::TyTup(Vec::new()))), ast::Unsafety::Unsafe, ast::Constness::NotConst, abi::Rust, // TODO(farcaller): should this be abi::C? empty_generics(), body), vis: ast::Public, span: span, }) } } pub struct TokenString(pub String); impl ToTokens for TokenString { fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let &TokenString(ref s) = self; (cx as &ExtParseUtils).parse_tts(s.clone()) } } pub fn add_node_dependency(node: &Rc<node::Node>, dep: &Rc<node::Node>) {
emit_morestack
identifier_name
font.rs
use webrender_traits; macro_rules! ot_tag { ($t1:expr, $t2:expr, $t3:expr, $t4:expr) => ( (($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32) ); } pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S'); pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B'); pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n'); static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT; // FontHandle encapsulates access to the platform's font API, // e.g. quartz, FreeType. It provides access to metrics and tables // needed by the text shaper as well as access to the underlying font // resources needed by the graphics layer to draw glyphs. pub trait FontHandleMethods: Sized { fn new_from_template(fctx: &FontContextHandle, template: Arc<FontTemplateData>, pt_size: Option<Au>) -> Result<Self, ()>; fn template(&self) -> Arc<FontTemplateData>; fn family_name(&self) -> String; fn face_name(&self) -> String; fn is_italic(&self) -> bool; fn boldness(&self) -> font_weight::T; fn stretchiness(&self) -> font_stretch::T; fn glyph_index(&self, codepoint: char) -> Option<GlyphId>; fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>; fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel; /// Can this font do basic horizontal LTR shaping without Harfbuzz? fn can_do_fast_shaping(&self) -> bool; fn metrics(&self) -> FontMetrics; fn table_for_tag(&self, FontTableTag) -> Option<FontTable>; } // Used to abstract over the shaper's choice of fixed int representation. pub type FractionalPixel = f64; pub type FontTableTag = u32; trait FontTableTagConversions { fn tag_to_str(&self) -> String; } impl FontTableTagConversions for FontTableTag { fn tag_to_str(&self) -> String { let bytes = [(self >> 24) as u8, (self >> 16) as u8, (self >> 8) as u8, (self >> 0) as u8]; str::from_utf8(&bytes).unwrap().to_owned() } } pub trait FontTableMethods { fn buffer(&self) -> &[u8]; } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct FontMetrics { pub underline_size: Au, pub underline_offset: Au, pub strikeout_size: Au, pub strikeout_offset: Au, pub leading: Au, pub x_height: Au, pub em_size: Au, pub ascent: Au, pub descent: Au, pub max_advance: Au, pub average_advance: Au, pub line_gap: Au, } #[derive(Debug)] pub struct Font { pub handle: FontHandle, pub metrics: FontMetrics, pub variant: font_variant::T, pub descriptor: FontTemplateDescriptor, pub requested_pt_size: Au, pub actual_pt_size: Au, shaper: Option<Shaper>, shape_cache: RefCell<HashCache<ShapeCacheEntry, Arc<GlyphStore>>>, glyph_advance_cache: RefCell<HashCache<u32, FractionalPixel>>, pub font_key: Option<webrender_traits::FontKey>, } impl Font { pub fn new(handle: FontHandle, variant: font_variant::T, descriptor: FontTemplateDescriptor, requested_pt_size: Au, actual_pt_size: Au, font_key: Option<webrender_traits::FontKey>) -> Font { let metrics = handle.metrics(); Font { handle: handle, shaper: None, variant: variant, descriptor: descriptor, requested_pt_size: requested_pt_size, actual_pt_size: actual_pt_size, metrics: metrics, shape_cache: RefCell::new(HashCache::new()), glyph_advance_cache: RefCell::new(HashCache::new()), font_key: font_key, } } } bitflags! { pub flags ShapingFlags: u8 { #[doc = "Set if the text is entirely whitespace."] const IS_WHITESPACE_SHAPING_FLAG = 0x01, #[doc = "Set if we are to ignore ligatures."] const IGNORE_LIGATURES_SHAPING_FLAG = 0x02, #[doc = "Set if we are to disable kerning."] const DISABLE_KERNING_SHAPING_FLAG = 0x04, #[doc = "Text direction is right-to-left."] const RTL_FLAG = 0x08, } } /// Various options that control text shaping. #[derive(Clone, Eq, PartialEq, Hash, Copy, Debug)] pub struct ShapingOptions { /// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property. /// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null. pub letter_spacing: Option<Au>, /// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property. pub word_spacing: Au, /// The Unicode script property of the characters in this run. pub script: Script, /// Various flags. pub flags: ShapingFlags, } /// An entry in the shape cache. #[derive(Clone, Eq, PartialEq, Hash, Debug)] struct ShapeCacheEntry { text: String, options: ShapingOptions, } impl Font { pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> { let this = self as *const Font; let mut shaper = self.shaper.take(); let lookup_key = ShapeCacheEntry { text: text.to_owned(), options: *options,
options.flags.contains(IS_WHITESPACE_SHAPING_FLAG), options.flags.contains(RTL_FLAG)); if self.can_do_fast_shaping(text, options) { debug!("shape_text: Using ASCII fast path."); self.shape_text_fast(text, options, &mut glyphs); } else { debug!("shape_text: Using Harfbuzz."); if shaper.is_none() { shaper = Some(Shaper::new(this)); } shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs); } let end_time = time::precise_time_ns(); TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize, Ordering::Relaxed); Arc::new(glyphs) }); self.shaper = shaper; result } fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool { options.script == Script::Latin && !options.flags.contains(RTL_FLAG) && self.handle.can_do_fast_shaping() && text.is_ascii() } /// Fast path for ASCII text that only needs simple horizontal LTR kerning. fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) { let mut prev_glyph_id = None; for (i, byte) in text.bytes().enumerate() { let character = byte as char; let glyph_id = match self.glyph_index(character) { Some(id) => id, None => continue, }; let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id)); if character == ' ' { advance += options.word_spacing; } if let Some(letter_spacing) = options.letter_spacing { advance += letter_spacing; } let offset = prev_glyph_id.map(|prev| { let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id)); advance += h_kerning; Point2D::new(h_kerning, Au(0)) }); let glyph = GlyphData::new(glyph_id, advance, offset, true, true); glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph); prev_glyph_id = Some(glyph_id); } glyphs.finalize_changes(); } pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> { let result = self.handle.table_for_tag(tag); let status = if result.is_some() { "Found" } else { "Didn't find" }; debug!("{} font table[{}] with family={}, face={}", status, tag.tag_to
}; let result = self.shape_cache.borrow_mut().find_or_create(lookup_key, || { let start_time = time::precise_time_ns(); let mut glyphs = GlyphStore::new(text.len(),
random_line_split
font.rs
use webrender_traits; macro_rules! ot_tag { ($t1:expr, $t2:expr, $t3:expr, $t4:expr) => ( (($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32) ); } pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S'); pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B'); pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n'); static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT; // FontHandle encapsulates access to the platform's font API, // e.g. quartz, FreeType. It provides access to metrics and tables // needed by the text shaper as well as access to the underlying font // resources needed by the graphics layer to draw glyphs. pub trait FontHandleMethods: Sized { fn new_from_template(fctx: &FontContextHandle, template: Arc<FontTemplateData>, pt_size: Option<Au>) -> Result<Self, ()>; fn template(&self) -> Arc<FontTemplateData>; fn family_name(&self) -> String; fn face_name(&self) -> String; fn is_italic(&self) -> bool; fn boldness(&self) -> font_weight::T; fn stretchiness(&self) -> font_stretch::T; fn glyph_index(&self, codepoint: char) -> Option<GlyphId>; fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>; fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel; /// Can this font do basic horizontal LTR shaping without Harfbuzz? fn can_do_fast_shaping(&self) -> bool; fn metrics(&self) -> FontMetrics; fn table_for_tag(&self, FontTableTag) -> Option<FontTable>; } // Used to abstract over the shaper's choice of fixed int representation. pub type FractionalPixel = f64; pub type FontTableTag = u32; trait FontTableTagConversions { fn tag_to_str(&self) -> String; } impl FontTableTagConversions for FontTableTag { fn tag_to_str(&self) -> String { let bytes = [(self >> 24) as u8, (self >> 16) as u8, (self >> 8) as u8, (self >> 0) as u8]; str::from_utf8(&bytes).unwrap().to_owned() } } pub trait FontTableMethods { fn buffer(&self) -> &[u8]; } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct FontMetrics { pub underline_size: Au, pub underline_offset: Au, pub strikeout_size: Au, pub strikeout_offset: Au, pub leading: Au, pub x_height: Au, pub em_size: Au, pub ascent: Au, pub descent: Au, pub max_advance: Au, pub average_advance: Au, pub line_gap: Au, } #[derive(Debug)] pub struct Font { pub handle: FontHandle, pub metrics: FontMetrics, pub variant: font_variant::T, pub descriptor: FontTemplateDescriptor, pub requested_pt_size: Au, pub actual_pt_size: Au, shaper: Option<Shaper>, shape_cache: RefCell<HashCache<ShapeCacheEntry, Arc<GlyphStore>>>, glyph_advance_cache: RefCell<HashCache<u32, FractionalPixel>>, pub font_key: Option<webrender_traits::FontKey>, } impl Font { pub fn new(handle: FontHandle, variant: font_variant::T, descriptor: FontTemplateDescriptor, requested_pt_size: Au, actual_pt_size: Au, font_key: Option<webrender_traits::FontKey>) -> Font { let metrics = handle.metrics(); Font { handle: handle, shaper: None, variant: variant, descriptor: descriptor, requested_pt_size: requested_pt_size, actual_pt_size: actual_pt_size, metrics: metrics, shape_cache: RefCell::new(HashCache::new()), glyph_advance_cache: RefCell::new(HashCache::new()), font_key: font_key, } } } bitflags! { pub flags ShapingFlags: u8 { #[doc = "Set if the text is entirely whitespace."] const IS_WHITESPACE_SHAPING_FLAG = 0x01, #[doc = "Set if we are to ignore ligatures."] const IGNORE_LIGATURES_SHAPING_FLAG = 0x02, #[doc = "Set if we are to disable kerning."] const DISABLE_KERNING_SHAPING_FLAG = 0x04, #[doc = "Text direction is right-to-left."] const RTL_FLAG = 0x08, } } /// Various options that control text shaping. #[derive(Clone, Eq, PartialEq, Hash, Copy, Debug)] pub struct ShapingOptions { /// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property. /// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null. pub letter_spacing: Option<Au>, /// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property. pub word_spacing: Au, /// The Unicode script property of the characters in this run. pub script: Script, /// Various flags. pub flags: ShapingFlags, } /// An entry in the shape cache. #[derive(Clone, Eq, PartialEq, Hash, Debug)] struct ShapeCacheEntry { text: String, options: ShapingOptions, } impl Font { pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> { let this = self as *const Font; let mut shaper = self.shaper.take(); let lookup_key = ShapeCacheEntry { text: text.to_owned(), options: *options, }; let result = self.shape_cache.borrow_mut().find_or_create(lookup_key, || { let start_time = time::precise_time_ns(); let mut glyphs = GlyphStore::new(text.len(), options.flags.contains(IS_WHITESPACE_SHAPING_FLAG), options.flags.contains(RTL_FLAG)); if self.can_do_fast_shaping(text, options) { debug!("shape_text: Using ASCII fast path."); self.shape_text_fast(text, options, &mut glyphs); } else { debug!("shape_text: Using Harfbuzz."); if shaper.is_none() { shaper = Some(Shaper::new(this)); } shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs); } let end_time = time::precise_time_ns(); TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize, Ordering::Relaxed); Arc::new(glyphs) }); self.shaper = shaper; result } fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool { options.script == Script::Latin && !options.flags.contains(RTL_FLAG) && self.handle.can_do_fast_shaping() && text.is_ascii() } /// Fast path for ASCII text that only needs simple horizontal LTR kerning. fn
(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) { let mut prev_glyph_id = None; for (i, byte) in text.bytes().enumerate() { let character = byte as char; let glyph_id = match self.glyph_index(character) { Some(id) => id, None => continue, }; let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id)); if character == ' ' { advance += options.word_spacing; } if let Some(letter_spacing) = options.letter_spacing { advance += letter_spacing; } let offset = prev_glyph_id.map(|prev| { let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id)); advance += h_kerning; Point2D::new(h_kerning, Au(0)) }); let glyph = GlyphData::new(glyph_id, advance, offset, true, true); glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph); prev_glyph_id = Some(glyph_id); } glyphs.finalize_changes(); } pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> { let result = self.handle.table_for_tag(tag); let status = if result.is_some() { "Found" } else { "Didn't find" }; debug!("{} font table[{}] with family={}, face={}", status, tag.tag
shape_text_fast
identifier_name
ovnclients.py
# Copyright 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from rally.common import logging from rally.common.utils import RandomNameGeneratorMixin from rally_ovs.plugins.ovs import ovsclients from rally_ovs.plugins.ovs import utils LOG = logging.getLogger(__name__) class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin): def _get_ovn_controller(self, install_method="sandbox"): ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", install_method, self.context['controller']['host_container']) ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None)) return ovn_nbctl def _start_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) return ovn_nbctl.start_daemon() def _stop_daemon(self):
def _restart_daemon(self): self._stop_daemon() return self._start_daemon() def _create_lswitches(self, lswitch_create_args, num_switches=-1): self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX" if (num_switches == -1): num_switches = lswitch_create_args.get("amount", 1) batch = lswitch_create_args.get("batch", num_switches) start_cidr = lswitch_create_args.get("start_cidr", "") if start_cidr: start_cidr = netaddr.IPNetwork(start_cidr) mcast_snoop = lswitch_create_args.get("mcast_snoop", "true") mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300) mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048) LOG.info("Create lswitches method: %s" % self.install_method) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lswitches = [] for i in range(num_switches): name = self.generate_random_name() if start_cidr: cidr = start_cidr.next(i) name = "lswitch_%s" % cidr else: name = self.generate_random_name() other_cfg = { 'mcast_snoop': mcast_snoop, 'mcast_idle_timeout': mcast_idle, 'mcast_table_size': mcast_table_size } lswitch = ovn_nbctl.lswitch_add(name, other_cfg) if start_cidr: lswitch["cidr"] = cidr LOG.info("create %(name)s %(cidr)s" % \ {"name": name, "cidr": lswitch.get("cidr", "")}) lswitches.append(lswitch) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lswitches def _create_routers(self, router_create_args): self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX" amount = router_create_args.get("amount", 1) batch = router_create_args.get("batch", 1) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lrouters = [] for i in range(amount): name = self.generate_random_name() lrouter = ovn_nbctl.lrouter_add(name) lrouters.append(lrouter) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lrouters def _connect_network_to_router(self, router, network): LOG.info("Connect network %s to router %s" % (network["name"], router["name"])) ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", self.install_method, self.context['controller']['host_container']) ovn_nbctl.enable_batch_mode(False) base_mac = [i[:2] for i in self.task["uuid"].split('-')] base_mac[0] = str(hex(int(base_mac[0], 16) & 254)) base_mac[3:] = ['00']*3 mac = utils.get_random_mac(base_mac) lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac, str(network["cidr"])) ovn_nbctl.flush() switch_router_port = "rp-" + network["name"] lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port) ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port, ('options', {"router-port":network["name"]}), ('type', 'router'), ('address', 'router')) ovn_nbctl.flush() def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router): for lrouter in lrouters: LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"])) for lnetwork in lnetworks[:networks_per_router]: LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"])) self._connect_network_to_router(lrouter, lnetwork) lnetworks = lnetworks[networks_per_router:]
ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.stop_daemon()
identifier_body
ovnclients.py
# Copyright 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from rally.common import logging from rally.common.utils import RandomNameGeneratorMixin from rally_ovs.plugins.ovs import ovsclients from rally_ovs.plugins.ovs import utils LOG = logging.getLogger(__name__) class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin): def _get_ovn_controller(self, install_method="sandbox"): ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", install_method, self.context['controller']['host_container']) ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None)) return ovn_nbctl def _start_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) return ovn_nbctl.start_daemon() def _stop_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.stop_daemon() def _restart_daemon(self): self._stop_daemon() return self._start_daemon() def _create_lswitches(self, lswitch_create_args, num_switches=-1): self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX" if (num_switches == -1): num_switches = lswitch_create_args.get("amount", 1) batch = lswitch_create_args.get("batch", num_switches) start_cidr = lswitch_create_args.get("start_cidr", "") if start_cidr: start_cidr = netaddr.IPNetwork(start_cidr) mcast_snoop = lswitch_create_args.get("mcast_snoop", "true") mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300) mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048) LOG.info("Create lswitches method: %s" % self.install_method) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lswitches = [] for i in range(num_switches): name = self.generate_random_name() if start_cidr: cidr = start_cidr.next(i) name = "lswitch_%s" % cidr else: name = self.generate_random_name() other_cfg = { 'mcast_snoop': mcast_snoop, 'mcast_idle_timeout': mcast_idle, 'mcast_table_size': mcast_table_size } lswitch = ovn_nbctl.lswitch_add(name, other_cfg) if start_cidr: lswitch["cidr"] = cidr LOG.info("create %(name)s %(cidr)s" % \ {"name": name, "cidr": lswitch.get("cidr", "")}) lswitches.append(lswitch) flush_count -= 1 if flush_count < 1:
ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lswitches def _create_routers(self, router_create_args): self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX" amount = router_create_args.get("amount", 1) batch = router_create_args.get("batch", 1) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lrouters = [] for i in range(amount): name = self.generate_random_name() lrouter = ovn_nbctl.lrouter_add(name) lrouters.append(lrouter) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lrouters def _connect_network_to_router(self, router, network): LOG.info("Connect network %s to router %s" % (network["name"], router["name"])) ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", self.install_method, self.context['controller']['host_container']) ovn_nbctl.enable_batch_mode(False) base_mac = [i[:2] for i in self.task["uuid"].split('-')] base_mac[0] = str(hex(int(base_mac[0], 16) & 254)) base_mac[3:] = ['00']*3 mac = utils.get_random_mac(base_mac) lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac, str(network["cidr"])) ovn_nbctl.flush() switch_router_port = "rp-" + network["name"] lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port) ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port, ('options', {"router-port":network["name"]}), ('type', 'router'), ('address', 'router')) ovn_nbctl.flush() def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router): for lrouter in lrouters: LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"])) for lnetwork in lnetworks[:networks_per_router]: LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"])) self._connect_network_to_router(lrouter, lnetwork) lnetworks = lnetworks[networks_per_router:]
ovn_nbctl.flush() flush_count = batch
conditional_block
ovnclients.py
# Copyright 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from rally.common import logging from rally.common.utils import RandomNameGeneratorMixin from rally_ovs.plugins.ovs import ovsclients from rally_ovs.plugins.ovs import utils LOG = logging.getLogger(__name__) class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin): def _get_ovn_controller(self, install_method="sandbox"): ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", install_method, self.context['controller']['host_container']) ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None)) return ovn_nbctl def _start_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) return ovn_nbctl.start_daemon() def _stop_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.stop_daemon() def _restart_daemon(self): self._stop_daemon() return self._start_daemon() def _create_lswitches(self, lswitch_create_args, num_switches=-1): self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX" if (num_switches == -1): num_switches = lswitch_create_args.get("amount", 1) batch = lswitch_create_args.get("batch", num_switches) start_cidr = lswitch_create_args.get("start_cidr", "") if start_cidr: start_cidr = netaddr.IPNetwork(start_cidr) mcast_snoop = lswitch_create_args.get("mcast_snoop", "true") mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300) mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048) LOG.info("Create lswitches method: %s" % self.install_method) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lswitches = [] for i in range(num_switches): name = self.generate_random_name() if start_cidr: cidr = start_cidr.next(i) name = "lswitch_%s" % cidr else: name = self.generate_random_name() other_cfg = { 'mcast_snoop': mcast_snoop, 'mcast_idle_timeout': mcast_idle, 'mcast_table_size': mcast_table_size } lswitch = ovn_nbctl.lswitch_add(name, other_cfg) if start_cidr: lswitch["cidr"] = cidr LOG.info("create %(name)s %(cidr)s" % \ {"name": name, "cidr": lswitch.get("cidr", "")}) lswitches.append(lswitch) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lswitches def _create_routers(self, router_create_args): self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX" amount = router_create_args.get("amount", 1)
flush_count = batch lrouters = [] for i in range(amount): name = self.generate_random_name() lrouter = ovn_nbctl.lrouter_add(name) lrouters.append(lrouter) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lrouters def _connect_network_to_router(self, router, network): LOG.info("Connect network %s to router %s" % (network["name"], router["name"])) ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", self.install_method, self.context['controller']['host_container']) ovn_nbctl.enable_batch_mode(False) base_mac = [i[:2] for i in self.task["uuid"].split('-')] base_mac[0] = str(hex(int(base_mac[0], 16) & 254)) base_mac[3:] = ['00']*3 mac = utils.get_random_mac(base_mac) lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac, str(network["cidr"])) ovn_nbctl.flush() switch_router_port = "rp-" + network["name"] lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port) ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port, ('options', {"router-port":network["name"]}), ('type', 'router'), ('address', 'router')) ovn_nbctl.flush() def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router): for lrouter in lrouters: LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"])) for lnetwork in lnetworks[:networks_per_router]: LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"])) self._connect_network_to_router(lrouter, lnetwork) lnetworks = lnetworks[networks_per_router:]
batch = router_create_args.get("batch", 1) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode()
random_line_split
ovnclients.py
# Copyright 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from rally.common import logging from rally.common.utils import RandomNameGeneratorMixin from rally_ovs.plugins.ovs import ovsclients from rally_ovs.plugins.ovs import utils LOG = logging.getLogger(__name__) class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin): def _get_ovn_controller(self, install_method="sandbox"): ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", install_method, self.context['controller']['host_container']) ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None)) return ovn_nbctl def _start_daemon(self): ovn_nbctl = self._get_ovn_controller(self.install_method) return ovn_nbctl.start_daemon() def
(self): ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.stop_daemon() def _restart_daemon(self): self._stop_daemon() return self._start_daemon() def _create_lswitches(self, lswitch_create_args, num_switches=-1): self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX" if (num_switches == -1): num_switches = lswitch_create_args.get("amount", 1) batch = lswitch_create_args.get("batch", num_switches) start_cidr = lswitch_create_args.get("start_cidr", "") if start_cidr: start_cidr = netaddr.IPNetwork(start_cidr) mcast_snoop = lswitch_create_args.get("mcast_snoop", "true") mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300) mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048) LOG.info("Create lswitches method: %s" % self.install_method) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lswitches = [] for i in range(num_switches): name = self.generate_random_name() if start_cidr: cidr = start_cidr.next(i) name = "lswitch_%s" % cidr else: name = self.generate_random_name() other_cfg = { 'mcast_snoop': mcast_snoop, 'mcast_idle_timeout': mcast_idle, 'mcast_table_size': mcast_table_size } lswitch = ovn_nbctl.lswitch_add(name, other_cfg) if start_cidr: lswitch["cidr"] = cidr LOG.info("create %(name)s %(cidr)s" % \ {"name": name, "cidr": lswitch.get("cidr", "")}) lswitches.append(lswitch) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lswitches def _create_routers(self, router_create_args): self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX" amount = router_create_args.get("amount", 1) batch = router_create_args.get("batch", 1) ovn_nbctl = self._get_ovn_controller(self.install_method) ovn_nbctl.enable_batch_mode() flush_count = batch lrouters = [] for i in range(amount): name = self.generate_random_name() lrouter = ovn_nbctl.lrouter_add(name) lrouters.append(lrouter) flush_count -= 1 if flush_count < 1: ovn_nbctl.flush() flush_count = batch ovn_nbctl.flush() # ensure all commands be run ovn_nbctl.enable_batch_mode(False) return lrouters def _connect_network_to_router(self, router, network): LOG.info("Connect network %s to router %s" % (network["name"], router["name"])) ovn_nbctl = self.controller_client("ovn-nbctl") ovn_nbctl.set_sandbox("controller-sandbox", self.install_method, self.context['controller']['host_container']) ovn_nbctl.enable_batch_mode(False) base_mac = [i[:2] for i in self.task["uuid"].split('-')] base_mac[0] = str(hex(int(base_mac[0], 16) & 254)) base_mac[3:] = ['00']*3 mac = utils.get_random_mac(base_mac) lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac, str(network["cidr"])) ovn_nbctl.flush() switch_router_port = "rp-" + network["name"] lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port) ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port, ('options', {"router-port":network["name"]}), ('type', 'router'), ('address', 'router')) ovn_nbctl.flush() def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router): for lrouter in lrouters: LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"])) for lnetwork in lnetworks[:networks_per_router]: LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"])) self._connect_network_to_router(lrouter, lnetwork) lnetworks = lnetworks[networks_per_router:]
_stop_daemon
identifier_name
utils.ts
import { Condition, Status, STATUS_TYPE, K8sObject } from 'kubeflow'; import { V1Container } from '@kubernetes/client-node'; import { InferenceServiceK8s, PredictorSpec, PredictorExtensionSpec, ExplainerSpec, } from '../types/kfserving/v1beta1'; /* * general util functions */ export function dictIsEmpty(obj: any): boolean { return Object.keys(obj).length === 0; } /* * kfserving helpers */ export function svcHasComponent( svc: InferenceServiceK8s, component: string, ): boolean { return !!svc.spec[component]; } export function getSvcComponents(svc: InferenceServiceK8s): string[] { const components: string[] = []; ['predictor', 'transformer', 'explainer'].forEach(c => { if (!svcHasComponent(svc, c)) { return; } components.push(c); }); return components; } export function getReadyCondition(obj: K8sObject): Condition { let cs: Condition[] = []; try { cs = obj.status.conditions; } catch (err) { return undefined; } if (!cs) { return undefined; } for (const c of cs) { if (c.type !== 'Ready') { continue; } return c; } } export function getK8sObjectUiStatus(obj: K8sObject): Status { const status: Status = { phase: '', state: '', message: '' }; if (obj.metadata.deletionTimestamp) { status.phase = STATUS_TYPE.TERMINATING; status.message = `${obj.kind} is being deleted`; return status; } if (!obj.status) { status.phase = STATUS_TYPE.UNAVAILABLE; status.message = `${obj.kind} has no status`; return status; } const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { status.phase = STATUS_TYPE.WARNING; status.message = 'No Ready condition available'; return status; } if (readyCondition.status === 'True') { status.phase = STATUS_TYPE.READY; status.message = `${obj.kind} is Ready`; return status; } status.phase = STATUS_TYPE.WAITING; status.message = readyCondition.message; return status; } export function getK8sObjectStatus(obj: K8sObject): [string, string] { const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { return [`Couldn't deduce the status. Missing Ready condition`, 'warning']; }
return ['Ready', 'check_circle']; } return [readyCondition.message, 'warning']; } // functions for processing the InferenceService spec export function getPredictorType(predictor: PredictorSpec): string { if ('tensorflow' in predictor) { return 'Tensorflow'; } if ('triton' in predictor) { return 'Triton'; } if ('sklearn' in predictor) { return 'SKLearn'; } if ('onnx' in predictor) { return 'Onnx'; } if ('pytorch' in predictor) { return 'PyTorch'; } if ('xgboost' in predictor) { return 'XGBoost'; } if ('pmml' in predictor) { return 'PMML'; } if ('lightgbm' in predictor) { return 'LightGBM'; } return 'Custom'; } export function getPredictorExtensionSpec( predictor: PredictorSpec, ): PredictorExtensionSpec { if ('tensorflow' in predictor) { return predictor.tensorflow; } if ('triton' in predictor) { return predictor.triton; } if ('sklearn' in predictor) { return predictor.sklearn; } if ('onnx' in predictor) { return predictor.onnx; } if ('pytorch' in predictor) { return predictor.pytorch; } if ('xgboost' in predictor) { return predictor.xgboost; } if ('pmml' in predictor) { return predictor.pmml; } if ('lightgbm' in predictor) { return predictor.lightgbm; } return null; } export function getExplainerContainer(explainer: ExplainerSpec): V1Container { if ('alibi' in explainer) { return explainer.alibi; } if ('aix' in explainer) { return explainer.aix; } return null; }
if (readyCondition.status === 'True') {
random_line_split
utils.ts
import { Condition, Status, STATUS_TYPE, K8sObject } from 'kubeflow'; import { V1Container } from '@kubernetes/client-node'; import { InferenceServiceK8s, PredictorSpec, PredictorExtensionSpec, ExplainerSpec, } from '../types/kfserving/v1beta1'; /* * general util functions */ export function dictIsEmpty(obj: any): boolean { return Object.keys(obj).length === 0; } /* * kfserving helpers */ export function svcHasComponent( svc: InferenceServiceK8s, component: string, ): boolean { return !!svc.spec[component]; } export function getSvcComponents(svc: InferenceServiceK8s): string[] { const components: string[] = []; ['predictor', 'transformer', 'explainer'].forEach(c => { if (!svcHasComponent(svc, c)) { return; } components.push(c); }); return components; } export function getReadyCondition(obj: K8sObject): Condition { let cs: Condition[] = []; try { cs = obj.status.conditions; } catch (err) { return undefined; } if (!cs) { return undefined; } for (const c of cs) { if (c.type !== 'Ready') { continue; } return c; } } export function getK8sObjectUiStatus(obj: K8sObject): Status { const status: Status = { phase: '', state: '', message: '' }; if (obj.metadata.deletionTimestamp) { status.phase = STATUS_TYPE.TERMINATING; status.message = `${obj.kind} is being deleted`; return status; } if (!obj.status) { status.phase = STATUS_TYPE.UNAVAILABLE; status.message = `${obj.kind} has no status`; return status; } const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { status.phase = STATUS_TYPE.WARNING; status.message = 'No Ready condition available'; return status; } if (readyCondition.status === 'True') { status.phase = STATUS_TYPE.READY; status.message = `${obj.kind} is Ready`; return status; } status.phase = STATUS_TYPE.WAITING; status.message = readyCondition.message; return status; } export function getK8sObjectStatus(obj: K8sObject): [string, string] { const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { return [`Couldn't deduce the status. Missing Ready condition`, 'warning']; } if (readyCondition.status === 'True')
return [readyCondition.message, 'warning']; } // functions for processing the InferenceService spec export function getPredictorType(predictor: PredictorSpec): string { if ('tensorflow' in predictor) { return 'Tensorflow'; } if ('triton' in predictor) { return 'Triton'; } if ('sklearn' in predictor) { return 'SKLearn'; } if ('onnx' in predictor) { return 'Onnx'; } if ('pytorch' in predictor) { return 'PyTorch'; } if ('xgboost' in predictor) { return 'XGBoost'; } if ('pmml' in predictor) { return 'PMML'; } if ('lightgbm' in predictor) { return 'LightGBM'; } return 'Custom'; } export function getPredictorExtensionSpec( predictor: PredictorSpec, ): PredictorExtensionSpec { if ('tensorflow' in predictor) { return predictor.tensorflow; } if ('triton' in predictor) { return predictor.triton; } if ('sklearn' in predictor) { return predictor.sklearn; } if ('onnx' in predictor) { return predictor.onnx; } if ('pytorch' in predictor) { return predictor.pytorch; } if ('xgboost' in predictor) { return predictor.xgboost; } if ('pmml' in predictor) { return predictor.pmml; } if ('lightgbm' in predictor) { return predictor.lightgbm; } return null; } export function getExplainerContainer(explainer: ExplainerSpec): V1Container { if ('alibi' in explainer) { return explainer.alibi; } if ('aix' in explainer) { return explainer.aix; } return null; }
{ return ['Ready', 'check_circle']; }
conditional_block
utils.ts
import { Condition, Status, STATUS_TYPE, K8sObject } from 'kubeflow'; import { V1Container } from '@kubernetes/client-node'; import { InferenceServiceK8s, PredictorSpec, PredictorExtensionSpec, ExplainerSpec, } from '../types/kfserving/v1beta1'; /* * general util functions */ export function dictIsEmpty(obj: any): boolean { return Object.keys(obj).length === 0; } /* * kfserving helpers */ export function svcHasComponent( svc: InferenceServiceK8s, component: string, ): boolean { return !!svc.spec[component]; } export function getSvcComponents(svc: InferenceServiceK8s): string[] { const components: string[] = []; ['predictor', 'transformer', 'explainer'].forEach(c => { if (!svcHasComponent(svc, c)) { return; } components.push(c); }); return components; } export function getReadyCondition(obj: K8sObject): Condition { let cs: Condition[] = []; try { cs = obj.status.conditions; } catch (err) { return undefined; } if (!cs) { return undefined; } for (const c of cs) { if (c.type !== 'Ready') { continue; } return c; } } export function getK8sObjectUiStatus(obj: K8sObject): Status { const status: Status = { phase: '', state: '', message: '' }; if (obj.metadata.deletionTimestamp) { status.phase = STATUS_TYPE.TERMINATING; status.message = `${obj.kind} is being deleted`; return status; } if (!obj.status) { status.phase = STATUS_TYPE.UNAVAILABLE; status.message = `${obj.kind} has no status`; return status; } const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { status.phase = STATUS_TYPE.WARNING; status.message = 'No Ready condition available'; return status; } if (readyCondition.status === 'True') { status.phase = STATUS_TYPE.READY; status.message = `${obj.kind} is Ready`; return status; } status.phase = STATUS_TYPE.WAITING; status.message = readyCondition.message; return status; } export function getK8sObjectStatus(obj: K8sObject): [string, string] { const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { return [`Couldn't deduce the status. Missing Ready condition`, 'warning']; } if (readyCondition.status === 'True') { return ['Ready', 'check_circle']; } return [readyCondition.message, 'warning']; } // functions for processing the InferenceService spec export function getPredictorType(predictor: PredictorSpec): string
if ('xgboost' in predictor) { return 'XGBoost'; } if ('pmml' in predictor) { return 'PMML'; } if ('lightgbm' in predictor) { return 'LightGBM'; } return 'Custom'; } export function getPredictorExtensionSpec( predictor: PredictorSpec, ): PredictorExtensionSpec { if ('tensorflow' in predictor) { return predictor.tensorflow; } if ('triton' in predictor) { return predictor.triton; } if ('sklearn' in predictor) { return predictor.sklearn; } if ('onnx' in predictor) { return predictor.onnx; } if ('pytorch' in predictor) { return predictor.pytorch; } if ('xgboost' in predictor) { return predictor.xgboost; } if ('pmml' in predictor) { return predictor.pmml; } if ('lightgbm' in predictor) { return predictor.lightgbm; } return null; } export function getExplainerContainer(explainer: ExplainerSpec): V1Container { if ('alibi' in explainer) { return explainer.alibi; } if ('aix' in explainer) { return explainer.aix; } return null; }
{ if ('tensorflow' in predictor) { return 'Tensorflow'; } if ('triton' in predictor) { return 'Triton'; } if ('sklearn' in predictor) { return 'SKLearn'; } if ('onnx' in predictor) { return 'Onnx'; } if ('pytorch' in predictor) { return 'PyTorch'; }
identifier_body
utils.ts
import { Condition, Status, STATUS_TYPE, K8sObject } from 'kubeflow'; import { V1Container } from '@kubernetes/client-node'; import { InferenceServiceK8s, PredictorSpec, PredictorExtensionSpec, ExplainerSpec, } from '../types/kfserving/v1beta1'; /* * general util functions */ export function dictIsEmpty(obj: any): boolean { return Object.keys(obj).length === 0; } /* * kfserving helpers */ export function svcHasComponent( svc: InferenceServiceK8s, component: string, ): boolean { return !!svc.spec[component]; } export function
(svc: InferenceServiceK8s): string[] { const components: string[] = []; ['predictor', 'transformer', 'explainer'].forEach(c => { if (!svcHasComponent(svc, c)) { return; } components.push(c); }); return components; } export function getReadyCondition(obj: K8sObject): Condition { let cs: Condition[] = []; try { cs = obj.status.conditions; } catch (err) { return undefined; } if (!cs) { return undefined; } for (const c of cs) { if (c.type !== 'Ready') { continue; } return c; } } export function getK8sObjectUiStatus(obj: K8sObject): Status { const status: Status = { phase: '', state: '', message: '' }; if (obj.metadata.deletionTimestamp) { status.phase = STATUS_TYPE.TERMINATING; status.message = `${obj.kind} is being deleted`; return status; } if (!obj.status) { status.phase = STATUS_TYPE.UNAVAILABLE; status.message = `${obj.kind} has no status`; return status; } const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { status.phase = STATUS_TYPE.WARNING; status.message = 'No Ready condition available'; return status; } if (readyCondition.status === 'True') { status.phase = STATUS_TYPE.READY; status.message = `${obj.kind} is Ready`; return status; } status.phase = STATUS_TYPE.WAITING; status.message = readyCondition.message; return status; } export function getK8sObjectStatus(obj: K8sObject): [string, string] { const readyCondition = getReadyCondition(obj); if (readyCondition === undefined) { return [`Couldn't deduce the status. Missing Ready condition`, 'warning']; } if (readyCondition.status === 'True') { return ['Ready', 'check_circle']; } return [readyCondition.message, 'warning']; } // functions for processing the InferenceService spec export function getPredictorType(predictor: PredictorSpec): string { if ('tensorflow' in predictor) { return 'Tensorflow'; } if ('triton' in predictor) { return 'Triton'; } if ('sklearn' in predictor) { return 'SKLearn'; } if ('onnx' in predictor) { return 'Onnx'; } if ('pytorch' in predictor) { return 'PyTorch'; } if ('xgboost' in predictor) { return 'XGBoost'; } if ('pmml' in predictor) { return 'PMML'; } if ('lightgbm' in predictor) { return 'LightGBM'; } return 'Custom'; } export function getPredictorExtensionSpec( predictor: PredictorSpec, ): PredictorExtensionSpec { if ('tensorflow' in predictor) { return predictor.tensorflow; } if ('triton' in predictor) { return predictor.triton; } if ('sklearn' in predictor) { return predictor.sklearn; } if ('onnx' in predictor) { return predictor.onnx; } if ('pytorch' in predictor) { return predictor.pytorch; } if ('xgboost' in predictor) { return predictor.xgboost; } if ('pmml' in predictor) { return predictor.pmml; } if ('lightgbm' in predictor) { return predictor.lightgbm; } return null; } export function getExplainerContainer(explainer: ExplainerSpec): V1Container { if ('alibi' in explainer) { return explainer.alibi; } if ('aix' in explainer) { return explainer.aix; } return null; }
getSvcComponents
identifier_name
webpack.config.js
const path = require('path'); const nodeExternals = require('webpack-node-externals'); // 외부 Node.js 모듈들을 포함하지 않기 위해 로드. const WebpackShellPlugin = require('webpack-shell-plugin'); const OutputFileName = 'aether.agent.package.js'; var serverCfg = { context: path.resolve(__dirname, 'src'), entry: './app.js', target: 'node', externals: [nodeExternals()],//node_modules는 무시하도록 설정 output: { path: path.resolve(__dirname, 'dist'), filename: OutputFileName }, module: { rules:[{ test: /\.js$/, include: path.resolve(__dirname, 'src'), exclude: /node_modules/, use: [{ loader: 'babel-loader', options: { presets: [ ['es2016'] ], plugins: [require('babel-plugin-transform-strict-mode')]// 'use strict'가 없어도 별다른 경고 뱉지 않도록 빌드시 페이지마다 'use strict' 자동삽입 } }] }] }, plugins: [ new WebpackShellPlugin({ onBuildStart:['echo "Webpack Start"'], onBuildEnd:['echo "Build End: ' + path.resolve(__dirname, 'dist') + '/' + OutputFileName + '"'], }) ]
}; module.exports = serverCfg;
random_line_split
createDrawAxes.ts
import { vec3, vec4 } from 'gl-matrix'; // tslint:disable-next-line:import-name import REGL = require('regl'); interface Attributes { position: vec3; color: vec3; } /* * All the information needed to be able to draw axes to the screen */ export interface DrawAxesProps { positions: vec4[]; colors: vec3[]; count: number; } /* * Shader to draw axes in the corner of the screen */ export function
( regl: REGL.Regl ): REGL.DrawCommand<REGL.DefaultContext, DrawAxesProps> { return regl<{}, Attributes, DrawAxesProps>({ vert: ` precision mediump float; attribute vec4 position; attribute vec3 color; varying vec3 vertexColor; void main() { gl_Position = position; vertexColor = color; } `, frag: ` precision mediump float; varying vec3 vertexColor; void main() { gl_FragColor = vec4(vertexColor, 1.0); } `, primitive: 'lines', attributes: { position: regl.prop<DrawAxesProps, keyof DrawAxesProps>('positions'), color: regl.prop<DrawAxesProps, keyof DrawAxesProps>('colors') }, uniforms: {}, count: regl.prop<DrawAxesProps, keyof DrawAxesProps>('count') }); }
createDrawAxes
identifier_name
createDrawAxes.ts
import { vec3, vec4 } from 'gl-matrix'; // tslint:disable-next-line:import-name import REGL = require('regl'); interface Attributes { position: vec3; color: vec3; } /* * All the information needed to be able to draw axes to the screen */ export interface DrawAxesProps { positions: vec4[]; colors: vec3[]; count: number; } /* * Shader to draw axes in the corner of the screen */ export function createDrawAxes( regl: REGL.Regl ): REGL.DrawCommand<REGL.DefaultContext, DrawAxesProps>
void main() { gl_FragColor = vec4(vertexColor, 1.0); } `, primitive: 'lines', attributes: { position: regl.prop<DrawAxesProps, keyof DrawAxesProps>('positions'), color: regl.prop<DrawAxesProps, keyof DrawAxesProps>('colors') }, uniforms: {}, count: regl.prop<DrawAxesProps, keyof DrawAxesProps>('count') }); }
{ return regl<{}, Attributes, DrawAxesProps>({ vert: ` precision mediump float; attribute vec4 position; attribute vec3 color; varying vec3 vertexColor; void main() { gl_Position = position; vertexColor = color; } `, frag: ` precision mediump float; varying vec3 vertexColor;
identifier_body
createDrawAxes.ts
import { vec3, vec4 } from 'gl-matrix'; // tslint:disable-next-line:import-name import REGL = require('regl'); interface Attributes { position: vec3; color: vec3; } /* * All the information needed to be able to draw axes to the screen */ export interface DrawAxesProps { positions: vec4[]; colors: vec3[]; count: number; } /*
* Shader to draw axes in the corner of the screen */ export function createDrawAxes( regl: REGL.Regl ): REGL.DrawCommand<REGL.DefaultContext, DrawAxesProps> { return regl<{}, Attributes, DrawAxesProps>({ vert: ` precision mediump float; attribute vec4 position; attribute vec3 color; varying vec3 vertexColor; void main() { gl_Position = position; vertexColor = color; } `, frag: ` precision mediump float; varying vec3 vertexColor; void main() { gl_FragColor = vec4(vertexColor, 1.0); } `, primitive: 'lines', attributes: { position: regl.prop<DrawAxesProps, keyof DrawAxesProps>('positions'), color: regl.prop<DrawAxesProps, keyof DrawAxesProps>('colors') }, uniforms: {}, count: regl.prop<DrawAxesProps, keyof DrawAxesProps>('count') }); }
random_line_split
test_auth_resources.py
# -*- coding: utf-8 -*- # Copyright (C) 2014-2017 Andrey Antukh <[email protected]> # Copyright (C) 2014-2017 Jesús Espino <[email protected]> # Copyright (C) 2014-2017 David Barragán <[email protected]> # Copyright (C) 2014-2017 Alejandro Alonso <[email protected]> # Copyright (C) 2014-2017 Anler Hernández <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.urlresolvers import reverse from taiga.base.utils import json from tests import factories as f from tests.utils import disconnect_signals, reconnect_signals import pytest pytestmark = pytest.mark.django_db def setup_module(module): disconnect_signals() def teardown_module(module): reconnect_signals() def test_auth_create(client): url = reverse('auth-list') user = f.UserFactory.create() login_data = json.dumps({ "type": "normal", "username": user.username, "password": user.username, }) result = client.post(url, login_data, content_type="application/json") assert result.status_code == 200 def test_auth_action_register(client, settings): set
tings.PUBLIC_REGISTER_ENABLED = True url = reverse('auth-register') register_data = json.dumps({ "type": "public", "username": "test", "password": "test", "full_name": "test", "email": "[email protected]", }) result = client.post(url, register_data, content_type="application/json") assert result.status_code == 201
identifier_body
test_auth_resources.py
# -*- coding: utf-8 -*- # Copyright (C) 2014-2017 Andrey Antukh <[email protected]> # Copyright (C) 2014-2017 Jesús Espino <[email protected]> # Copyright (C) 2014-2017 David Barragán <[email protected]> # Copyright (C) 2014-2017 Alejandro Alonso <[email protected]> # Copyright (C) 2014-2017 Anler Hernández <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.urlresolvers import reverse from taiga.base.utils import json from tests import factories as f from tests.utils import disconnect_signals, reconnect_signals import pytest pytestmark = pytest.mark.django_db def set
dule): disconnect_signals() def teardown_module(module): reconnect_signals() def test_auth_create(client): url = reverse('auth-list') user = f.UserFactory.create() login_data = json.dumps({ "type": "normal", "username": user.username, "password": user.username, }) result = client.post(url, login_data, content_type="application/json") assert result.status_code == 200 def test_auth_action_register(client, settings): settings.PUBLIC_REGISTER_ENABLED = True url = reverse('auth-register') register_data = json.dumps({ "type": "public", "username": "test", "password": "test", "full_name": "test", "email": "[email protected]", }) result = client.post(url, register_data, content_type="application/json") assert result.status_code == 201
up_module(mo
identifier_name
test_auth_resources.py
# -*- coding: utf-8 -*- # Copyright (C) 2014-2017 Andrey Antukh <[email protected]> # Copyright (C) 2014-2017 Jesús Espino <[email protected]> # Copyright (C) 2014-2017 David Barragán <[email protected]>
# License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.core.urlresolvers import reverse from taiga.base.utils import json from tests import factories as f from tests.utils import disconnect_signals, reconnect_signals import pytest pytestmark = pytest.mark.django_db def setup_module(module): disconnect_signals() def teardown_module(module): reconnect_signals() def test_auth_create(client): url = reverse('auth-list') user = f.UserFactory.create() login_data = json.dumps({ "type": "normal", "username": user.username, "password": user.username, }) result = client.post(url, login_data, content_type="application/json") assert result.status_code == 200 def test_auth_action_register(client, settings): settings.PUBLIC_REGISTER_ENABLED = True url = reverse('auth-register') register_data = json.dumps({ "type": "public", "username": "test", "password": "test", "full_name": "test", "email": "[email protected]", }) result = client.post(url, register_data, content_type="application/json") assert result.status_code == 201
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]> # Copyright (C) 2014-2017 Anler Hernández <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the
random_line_split
Opportunity.tsx
import { t } from '@lingui/macro'; import { formatPercentage } from 'common/format'; import SPELLS from 'common/SPELLS'; import { SpellLink } from 'interface'; import Analyzer, { Options } from 'parser/core/Analyzer'; import { NumberThreshold, ThresholdStyle, When } from 'parser/core/ParseResults'; import DamageTracker from 'parser/shared/modules/AbilityTracker'; import React from 'react'; import OpportunityDamageTracker from './OpportunityDamageTracker'; class Opportunity extends Analyzer { get thresholds(): NumberThreshold { const total = this.damageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); const filtered = this.opportunityDamageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); return { actual: filtered.casts / total.casts, isGreaterThan: { minor: 0, average: 0.05, major: 0.1, }, style: ThresholdStyle.PERCENTAGE, }; } static dependencies = { damageTracker: DamageTracker, opportunityDamageTracker: OpportunityDamageTracker, }; protected damageTracker!: DamageTracker; protected opportunityDamageTracker!: OpportunityDamageTracker; constructor(options: Options & { opportunityDamageTracker: OpportunityDamageTracker })
suggestions(when: When) { when(this.thresholds).addSuggestion((suggest, actual, recommended) => suggest( <> You casted <SpellLink id={SPELLS.SINISTER_STRIKE.id} /> while having an{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> proc. Try to prioritize{' '} <SpellLink id={SPELLS.PISTOL_SHOT.id} /> as your combo point builder when you have{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> active to avoid the possibility of missing additional procs. </>, ) .icon(SPELLS.OPPORTUNITY.icon) .actual( t({ id: 'rogue.outlaw.suggestions.opportunity.efficiency', message: `${formatPercentage(actual)}% inefficient casts`, }), ) .recommended(`${formatPercentage(recommended)}% is recommended`), ); } } export default Opportunity;
{ super(options); options.opportunityDamageTracker.subscribeInefficientCast( [SPELLS.SINISTER_STRIKE], () => `Pistol Shot should be used as your builder during Opportunity`, ); }
identifier_body
Opportunity.tsx
import { t } from '@lingui/macro'; import { formatPercentage } from 'common/format'; import SPELLS from 'common/SPELLS'; import { SpellLink } from 'interface'; import Analyzer, { Options } from 'parser/core/Analyzer'; import { NumberThreshold, ThresholdStyle, When } from 'parser/core/ParseResults'; import DamageTracker from 'parser/shared/modules/AbilityTracker'; import React from 'react';
class Opportunity extends Analyzer { get thresholds(): NumberThreshold { const total = this.damageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); const filtered = this.opportunityDamageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); return { actual: filtered.casts / total.casts, isGreaterThan: { minor: 0, average: 0.05, major: 0.1, }, style: ThresholdStyle.PERCENTAGE, }; } static dependencies = { damageTracker: DamageTracker, opportunityDamageTracker: OpportunityDamageTracker, }; protected damageTracker!: DamageTracker; protected opportunityDamageTracker!: OpportunityDamageTracker; constructor(options: Options & { opportunityDamageTracker: OpportunityDamageTracker }) { super(options); options.opportunityDamageTracker.subscribeInefficientCast( [SPELLS.SINISTER_STRIKE], () => `Pistol Shot should be used as your builder during Opportunity`, ); } suggestions(when: When) { when(this.thresholds).addSuggestion((suggest, actual, recommended) => suggest( <> You casted <SpellLink id={SPELLS.SINISTER_STRIKE.id} /> while having an{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> proc. Try to prioritize{' '} <SpellLink id={SPELLS.PISTOL_SHOT.id} /> as your combo point builder when you have{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> active to avoid the possibility of missing additional procs. </>, ) .icon(SPELLS.OPPORTUNITY.icon) .actual( t({ id: 'rogue.outlaw.suggestions.opportunity.efficiency', message: `${formatPercentage(actual)}% inefficient casts`, }), ) .recommended(`${formatPercentage(recommended)}% is recommended`), ); } } export default Opportunity;
import OpportunityDamageTracker from './OpportunityDamageTracker';
random_line_split
Opportunity.tsx
import { t } from '@lingui/macro'; import { formatPercentage } from 'common/format'; import SPELLS from 'common/SPELLS'; import { SpellLink } from 'interface'; import Analyzer, { Options } from 'parser/core/Analyzer'; import { NumberThreshold, ThresholdStyle, When } from 'parser/core/ParseResults'; import DamageTracker from 'parser/shared/modules/AbilityTracker'; import React from 'react'; import OpportunityDamageTracker from './OpportunityDamageTracker'; class Opportunity extends Analyzer { get thresholds(): NumberThreshold { const total = this.damageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); const filtered = this.opportunityDamageTracker.getAbility(SPELLS.SINISTER_STRIKE.id); return { actual: filtered.casts / total.casts, isGreaterThan: { minor: 0, average: 0.05, major: 0.1, }, style: ThresholdStyle.PERCENTAGE, }; } static dependencies = { damageTracker: DamageTracker, opportunityDamageTracker: OpportunityDamageTracker, }; protected damageTracker!: DamageTracker; protected opportunityDamageTracker!: OpportunityDamageTracker; constructor(options: Options & { opportunityDamageTracker: OpportunityDamageTracker }) { super(options); options.opportunityDamageTracker.subscribeInefficientCast( [SPELLS.SINISTER_STRIKE], () => `Pistol Shot should be used as your builder during Opportunity`, ); }
(when: When) { when(this.thresholds).addSuggestion((suggest, actual, recommended) => suggest( <> You casted <SpellLink id={SPELLS.SINISTER_STRIKE.id} /> while having an{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> proc. Try to prioritize{' '} <SpellLink id={SPELLS.PISTOL_SHOT.id} /> as your combo point builder when you have{' '} <SpellLink id={SPELLS.OPPORTUNITY.id} /> active to avoid the possibility of missing additional procs. </>, ) .icon(SPELLS.OPPORTUNITY.icon) .actual( t({ id: 'rogue.outlaw.suggestions.opportunity.efficiency', message: `${formatPercentage(actual)}% inefficient casts`, }), ) .recommended(`${formatPercentage(recommended)}% is recommended`), ); } } export default Opportunity;
suggestions
identifier_name
service.rs
use std::sync::atomic::*; use futures::channel::mpsc; use futures::{FutureExt, SinkExt, StreamExt, TryFutureExt}; use grpcio::{self, *}; use kvproto::backup::*; use tikv_util::worker::*; use super::Task; /// Service handles the RPC messages for the `Backup` service. #[derive(Clone)] pub struct Service { scheduler: Scheduler<Task>, } impl Service { /// Create a new backup service. pub fn new(scheduler: Scheduler<Task>) -> Service { Service { scheduler } } } impl Backup for Service { fn backup( &mut self, ctx: RpcContext, req: BackupRequest, mut sink: ServerStreamingSink<BackupResponse>, ) { let mut cancel = None; // TODO: make it a bounded channel. let (tx, rx) = mpsc::unbounded(); if let Err(status) = match Task::new(req, tx) { Ok((task, c)) => { cancel = Some(c); self.scheduler.schedule(task).map_err(|e| { RpcStatus::new(RpcStatusCode::INVALID_ARGUMENT, Some(format!("{:?}", e))) }) } Err(e) => Err(RpcStatus::new( RpcStatusCode::UNKNOWN, Some(format!("{:?}", e)), )), } { error!("backup task initiate failed"; "error" => ?status); ctx.spawn( sink.fail(status) .unwrap_or_else(|e| error!("backup failed to send error"; "error" => ?e)), ); return; }; let send_task = async move { let mut s = rx.map(|resp| Ok((resp, WriteFlags::default()))); sink.send_all(&mut s).await?; sink.close().await?; Ok(()) } .map(|res: Result<()>| { match res { Ok(_) => { info!("backup closed"); } Err(e) => { if let Some(c) = cancel { // Cancel the running task. c.store(true, Ordering::SeqCst); } error!("backup canceled"; "error" => ?e); } } }); ctx.spawn(send_task); } } #[cfg(test)] mod tests { use std::sync::Arc; use std::time::Duration; use super::*; use crate::endpoint::tests::*; use external_storage::make_local_backend; use tikv::storage::txn::tests::{must_commit, must_prewrite_put}; use tikv_util::worker::{dummy_scheduler, ReceiverWrapper}; use txn_types::TimeStamp; fn new_rpc_suite() -> (Server, BackupClient, ReceiverWrapper<Task>) { let env = Arc::new(EnvBuilder::new().build()); let (scheduler, rx) = dummy_scheduler(); let backup_service = super::Service::new(scheduler); let builder = ServerBuilder::new(env.clone()).register_service(create_backup(backup_service)); let mut server = builder.bind("127.0.0.1", 0).build().unwrap(); server.start(); let (_, port) = server.bind_addrs().next().unwrap(); let addr = format!("127.0.0.1:{}", port); let channel = ChannelBuilder::new(env).connect(&addr); let client = BackupClient::new(channel); (server, client, rx) } #[test] fn
() { let (_server, client, mut rx) = new_rpc_suite(); let (tmp, endpoint) = new_endpoint(); let engine = endpoint.engine.clone(); endpoint.region_info.set_regions(vec![ (b"".to_vec(), b"2".to_vec(), 1), (b"2".to_vec(), b"5".to_vec(), 2), ]); let mut ts: TimeStamp = 1.into(); let mut alloc_ts = || *ts.incr(); for i in 0..5 { let start = alloc_ts(); let key = format!("{}", i); must_prewrite_put( &engine, key.as_bytes(), key.as_bytes(), key.as_bytes(), start, ); let commit = alloc_ts(); must_commit(&engine, key.as_bytes(), start, commit); } let now = alloc_ts(); let mut req = BackupRequest::default(); req.set_start_key(vec![]); req.set_end_key(vec![b'5']); req.set_start_version(now.into_inner()); req.set_end_version(now.into_inner()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(now.to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let mut stream = client.backup(&req).unwrap(); // Drop steam once it received something. client.spawn(async move { let _ = stream.next().await; }); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv().unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // Wait util the task is canceled in map_err. loop { std::thread::sleep(Duration::from_millis(100)); if task.resp.unbounded_send(Default::default()).is_err() { break; } } // The task should be canceled. assert!(task.has_canceled()); // A stopped remote must not cause panic. endpoint.handle_backup_task(task); } }
test_client_stop
identifier_name
service.rs
use std::sync::atomic::*; use futures::channel::mpsc; use futures::{FutureExt, SinkExt, StreamExt, TryFutureExt}; use grpcio::{self, *};
/// Service handles the RPC messages for the `Backup` service. #[derive(Clone)] pub struct Service { scheduler: Scheduler<Task>, } impl Service { /// Create a new backup service. pub fn new(scheduler: Scheduler<Task>) -> Service { Service { scheduler } } } impl Backup for Service { fn backup( &mut self, ctx: RpcContext, req: BackupRequest, mut sink: ServerStreamingSink<BackupResponse>, ) { let mut cancel = None; // TODO: make it a bounded channel. let (tx, rx) = mpsc::unbounded(); if let Err(status) = match Task::new(req, tx) { Ok((task, c)) => { cancel = Some(c); self.scheduler.schedule(task).map_err(|e| { RpcStatus::new(RpcStatusCode::INVALID_ARGUMENT, Some(format!("{:?}", e))) }) } Err(e) => Err(RpcStatus::new( RpcStatusCode::UNKNOWN, Some(format!("{:?}", e)), )), } { error!("backup task initiate failed"; "error" => ?status); ctx.spawn( sink.fail(status) .unwrap_or_else(|e| error!("backup failed to send error"; "error" => ?e)), ); return; }; let send_task = async move { let mut s = rx.map(|resp| Ok((resp, WriteFlags::default()))); sink.send_all(&mut s).await?; sink.close().await?; Ok(()) } .map(|res: Result<()>| { match res { Ok(_) => { info!("backup closed"); } Err(e) => { if let Some(c) = cancel { // Cancel the running task. c.store(true, Ordering::SeqCst); } error!("backup canceled"; "error" => ?e); } } }); ctx.spawn(send_task); } } #[cfg(test)] mod tests { use std::sync::Arc; use std::time::Duration; use super::*; use crate::endpoint::tests::*; use external_storage::make_local_backend; use tikv::storage::txn::tests::{must_commit, must_prewrite_put}; use tikv_util::worker::{dummy_scheduler, ReceiverWrapper}; use txn_types::TimeStamp; fn new_rpc_suite() -> (Server, BackupClient, ReceiverWrapper<Task>) { let env = Arc::new(EnvBuilder::new().build()); let (scheduler, rx) = dummy_scheduler(); let backup_service = super::Service::new(scheduler); let builder = ServerBuilder::new(env.clone()).register_service(create_backup(backup_service)); let mut server = builder.bind("127.0.0.1", 0).build().unwrap(); server.start(); let (_, port) = server.bind_addrs().next().unwrap(); let addr = format!("127.0.0.1:{}", port); let channel = ChannelBuilder::new(env).connect(&addr); let client = BackupClient::new(channel); (server, client, rx) } #[test] fn test_client_stop() { let (_server, client, mut rx) = new_rpc_suite(); let (tmp, endpoint) = new_endpoint(); let engine = endpoint.engine.clone(); endpoint.region_info.set_regions(vec![ (b"".to_vec(), b"2".to_vec(), 1), (b"2".to_vec(), b"5".to_vec(), 2), ]); let mut ts: TimeStamp = 1.into(); let mut alloc_ts = || *ts.incr(); for i in 0..5 { let start = alloc_ts(); let key = format!("{}", i); must_prewrite_put( &engine, key.as_bytes(), key.as_bytes(), key.as_bytes(), start, ); let commit = alloc_ts(); must_commit(&engine, key.as_bytes(), start, commit); } let now = alloc_ts(); let mut req = BackupRequest::default(); req.set_start_key(vec![]); req.set_end_key(vec![b'5']); req.set_start_version(now.into_inner()); req.set_end_version(now.into_inner()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(now.to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let mut stream = client.backup(&req).unwrap(); // Drop steam once it received something. client.spawn(async move { let _ = stream.next().await; }); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv().unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // Wait util the task is canceled in map_err. loop { std::thread::sleep(Duration::from_millis(100)); if task.resp.unbounded_send(Default::default()).is_err() { break; } } // The task should be canceled. assert!(task.has_canceled()); // A stopped remote must not cause panic. endpoint.handle_backup_task(task); } }
use kvproto::backup::*; use tikv_util::worker::*; use super::Task;
random_line_split
service.rs
use std::sync::atomic::*; use futures::channel::mpsc; use futures::{FutureExt, SinkExt, StreamExt, TryFutureExt}; use grpcio::{self, *}; use kvproto::backup::*; use tikv_util::worker::*; use super::Task; /// Service handles the RPC messages for the `Backup` service. #[derive(Clone)] pub struct Service { scheduler: Scheduler<Task>, } impl Service { /// Create a new backup service. pub fn new(scheduler: Scheduler<Task>) -> Service { Service { scheduler } } } impl Backup for Service { fn backup( &mut self, ctx: RpcContext, req: BackupRequest, mut sink: ServerStreamingSink<BackupResponse>, ) { let mut cancel = None; // TODO: make it a bounded channel. let (tx, rx) = mpsc::unbounded(); if let Err(status) = match Task::new(req, tx) { Ok((task, c)) => { cancel = Some(c); self.scheduler.schedule(task).map_err(|e| { RpcStatus::new(RpcStatusCode::INVALID_ARGUMENT, Some(format!("{:?}", e))) }) } Err(e) => Err(RpcStatus::new( RpcStatusCode::UNKNOWN, Some(format!("{:?}", e)), )), } { error!("backup task initiate failed"; "error" => ?status); ctx.spawn( sink.fail(status) .unwrap_or_else(|e| error!("backup failed to send error"; "error" => ?e)), ); return; }; let send_task = async move { let mut s = rx.map(|resp| Ok((resp, WriteFlags::default()))); sink.send_all(&mut s).await?; sink.close().await?; Ok(()) } .map(|res: Result<()>| { match res { Ok(_) => { info!("backup closed"); } Err(e) => { if let Some(c) = cancel { // Cancel the running task. c.store(true, Ordering::SeqCst); } error!("backup canceled"; "error" => ?e); } } }); ctx.spawn(send_task); } } #[cfg(test)] mod tests { use std::sync::Arc; use std::time::Duration; use super::*; use crate::endpoint::tests::*; use external_storage::make_local_backend; use tikv::storage::txn::tests::{must_commit, must_prewrite_put}; use tikv_util::worker::{dummy_scheduler, ReceiverWrapper}; use txn_types::TimeStamp; fn new_rpc_suite() -> (Server, BackupClient, ReceiverWrapper<Task>)
#[test] fn test_client_stop() { let (_server, client, mut rx) = new_rpc_suite(); let (tmp, endpoint) = new_endpoint(); let engine = endpoint.engine.clone(); endpoint.region_info.set_regions(vec![ (b"".to_vec(), b"2".to_vec(), 1), (b"2".to_vec(), b"5".to_vec(), 2), ]); let mut ts: TimeStamp = 1.into(); let mut alloc_ts = || *ts.incr(); for i in 0..5 { let start = alloc_ts(); let key = format!("{}", i); must_prewrite_put( &engine, key.as_bytes(), key.as_bytes(), key.as_bytes(), start, ); let commit = alloc_ts(); must_commit(&engine, key.as_bytes(), start, commit); } let now = alloc_ts(); let mut req = BackupRequest::default(); req.set_start_key(vec![]); req.set_end_key(vec![b'5']); req.set_start_version(now.into_inner()); req.set_end_version(now.into_inner()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(now.to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let mut stream = client.backup(&req).unwrap(); // Drop steam once it received something. client.spawn(async move { let _ = stream.next().await; }); let task = rx.recv_timeout(Duration::from_secs(5)).unwrap(); // A stopped remote must not cause panic. endpoint.handle_backup_task(task.unwrap()); // Set an unique path to avoid AlreadyExists error. req.set_storage_backend(make_local_backend(&tmp.path().join(alloc_ts().to_string()))); let stream = client.backup(&req).unwrap(); let task = rx.recv().unwrap(); // Drop stream without start receiving will cause cancel error. drop(stream); // Wait util the task is canceled in map_err. loop { std::thread::sleep(Duration::from_millis(100)); if task.resp.unbounded_send(Default::default()).is_err() { break; } } // The task should be canceled. assert!(task.has_canceled()); // A stopped remote must not cause panic. endpoint.handle_backup_task(task); } }
{ let env = Arc::new(EnvBuilder::new().build()); let (scheduler, rx) = dummy_scheduler(); let backup_service = super::Service::new(scheduler); let builder = ServerBuilder::new(env.clone()).register_service(create_backup(backup_service)); let mut server = builder.bind("127.0.0.1", 0).build().unwrap(); server.start(); let (_, port) = server.bind_addrs().next().unwrap(); let addr = format!("127.0.0.1:{}", port); let channel = ChannelBuilder::new(env).connect(&addr); let client = BackupClient::new(channel); (server, client, rx) }
identifier_body
Gruntfile.js
module.exports = function(grunt) { grunt.initConfig({ pkg: grunt.file.readJSON('package.json'), jshint: {
files: [ {expand: true, cwd: "app", src: ['**/*'], dest: '/var/www/html/status/'}, ] } }, watch : { files : [ 'app/**' ], tasks : ['test', 'deploy'] } }); // concat was used at one point but I decided to keep it simple... for now // grunt.loadNpmTasks('grunt-contrib-concat'); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-copy'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.registerTask('test', ['jshint']); grunt.registerTask('deploy', ['copy:deploy']); grunt.registerTask('default', ['test', 'deploy', 'watch']); }
files: ['app/js/*.js'] }, copy: { deploy: {
random_line_split
tests.rs
use tempdir; use libxch; mod util; #[test] fn test_success()
#[test] fn test_failure() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, dir.path()).is_err()); assert!(util::ensure_file_content(&file1, b"content1").expect("Could not read file")); }
{ let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); let file2 = dir.path().join("file2"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); util::create_file_with_content(&file2, b"content2").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, &file2).is_ok()); assert!(util::ensure_file_content(&file1, b"content2").expect("Could not read file")); assert!(util::ensure_file_content(&file2, b"content1").expect("Could not read file")); }
identifier_body
tests.rs
use tempdir; use libxch; mod util; #[test] fn test_success() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); let file2 = dir.path().join("file2"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); util::create_file_with_content(&file2, b"content2").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, &file2).is_ok()); assert!(util::ensure_file_content(&file1, b"content2").expect("Could not read file")); assert!(util::ensure_file_content(&file2, b"content1").expect("Could not read file")); }
let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, dir.path()).is_err()); assert!(util::ensure_file_content(&file1, b"content1").expect("Could not read file")); }
#[test] fn test_failure() {
random_line_split
tests.rs
use tempdir; use libxch; mod util; #[test] fn
() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); let file2 = dir.path().join("file2"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); util::create_file_with_content(&file2, b"content2").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, &file2).is_ok()); assert!(util::ensure_file_content(&file1, b"content2").expect("Could not read file")); assert!(util::ensure_file_content(&file2, b"content1").expect("Could not read file")); } #[test] fn test_failure() { let dir = tempdir::TempDir::new("test").expect("Could not create temporary directory"); let file1 = dir.path().join("file1"); util::create_file_with_content(&file1, b"content1").expect("Could not create file in tempdir"); assert!(libxch::xch_non_atomic(&file1, dir.path()).is_err()); assert!(util::ensure_file_content(&file1, b"content1").expect("Could not read file")); }
test_success
identifier_name
sl.js
/* Slovenian locals for flatpickr */ var flatpickr = flatpickr || { l10ns: {} }; flatpickr.l10ns.sl = {}; flatpickr.l10ns.sl.weekdays = { shorthand: ["Ned", "Pon", "Tor", "Sre", "Čet", "Pet", "Sob"], longhand: ["Nedelja", "Ponedeljek", "Torek", "Sreda", "Četrtek", "Petek", "Sobota"] }; flatpickr.l10ns.sl.months = { shorthand: ["Jan", "Feb", "Mar", "Apr", "Maj", "Jun", "Jul", "Avg", "Sep", "Okt", "Nov", "Dec"], longhand: ["Januar", "Februar", "Marec", "April", "Maj", "Junij", "Julij", "Avgust", "September", "Oktober", "November", "December"] };
flatpickr.l10ns.sl.ordinal = function () { return "."; }; if (typeof module !== "undefined") module.exports = flatpickr.l10ns;
flatpickr.l10ns.sl.firstDayOfWeek = 1; flatpickr.l10ns.sl.rangeSeparator = " do ";
random_line_split
eagerLoadDownRefs.ts
import { Dictionary, includes } from 'lodash'; import UnstoredStatementModel from '../../../models/UnstoredStatementModel'; import ClientModel from '../../../models/ClientModel';
import Statement from '../../../models/Statement'; import Config from '../../Config'; import groupStatementsById from './groupStatementsById'; const getGroupedDownRefs = async ( config: Config, models: UnstoredStatementModel[], client: ClientModel ): Promise<Dictionary<Statement>> => { const allIds: string[] = models.map((model) => { return model.statement.id; }); const targetIds: string[] = models.reduce((results, model) => { if ( model.statement.object.objectType === 'StatementRef' && includes(allIds, model.statement.object.id) // No need to load models that we already have. ) { return [...results, model.statement.object.id]; } return results; }, [] as string[]); const targetStatements = await config.repo.getStatementsByIds({ ids: targetIds, client }); const groupedStatements = groupStatementsById(targetStatements); return groupedStatements; }; const getGroupedModels = (models: UnstoredStatementModel[]) => { const statements = models.map((model) => { return model.statement; }); const groupedStatements = groupStatementsById(statements); return groupedStatements; }; export default async ( config: Config, models: UnstoredStatementModel[], client: ClientModel ): Promise<Dictionary<Statement>> => { const groupedDownRefs = await getGroupedDownRefs(config, models, client); const groupedModels = getGroupedModels(models); return { ...groupedDownRefs, ...groupedModels, }; };
random_line_split
eagerLoadDownRefs.ts
import { Dictionary, includes } from 'lodash'; import UnstoredStatementModel from '../../../models/UnstoredStatementModel'; import ClientModel from '../../../models/ClientModel'; import Statement from '../../../models/Statement'; import Config from '../../Config'; import groupStatementsById from './groupStatementsById'; const getGroupedDownRefs = async ( config: Config, models: UnstoredStatementModel[], client: ClientModel ): Promise<Dictionary<Statement>> => { const allIds: string[] = models.map((model) => { return model.statement.id; }); const targetIds: string[] = models.reduce((results, model) => { if ( model.statement.object.objectType === 'StatementRef' && includes(allIds, model.statement.object.id) // No need to load models that we already have. )
return results; }, [] as string[]); const targetStatements = await config.repo.getStatementsByIds({ ids: targetIds, client }); const groupedStatements = groupStatementsById(targetStatements); return groupedStatements; }; const getGroupedModels = (models: UnstoredStatementModel[]) => { const statements = models.map((model) => { return model.statement; }); const groupedStatements = groupStatementsById(statements); return groupedStatements; }; export default async ( config: Config, models: UnstoredStatementModel[], client: ClientModel ): Promise<Dictionary<Statement>> => { const groupedDownRefs = await getGroupedDownRefs(config, models, client); const groupedModels = getGroupedModels(models); return { ...groupedDownRefs, ...groupedModels, }; };
{ return [...results, model.statement.object.id]; }
conditional_block
pipeline_info_editor_spec.tsx
/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import m from "mithril"; import Stream from "mithril/stream"; import {PipelineConfig} from "models/pipeline_configs/pipeline_config"; import {PipelineGroupCache} from "models/pipeline_configs/pipeline_groups_cache"; import {TemplateCache} from "models/pipeline_configs/templates_cache"; import {Option} from "views/components/forms/input_fields";
import {TestHelper} from "views/pages/spec/test_helper"; import {PipelineInfoEditor} from "../pipeline_info_editor"; const flag: (val?: boolean) => Stream<boolean> = Stream; describe("AddPipeline: PipelineInfoEditor", () => { const helper = new TestHelper(); let config: PipelineConfig; beforeEach(() => { config = new PipelineConfig("", [], []).withGroup("foo"); helper.mount(() => <PipelineInfoEditor pipelineConfig={config} cache={new TestCache()} isUsingTemplate={flag(false)} templatesCache={new EmptyTemplatesTestCache()}/>); }); afterEach(helper.unmount.bind(helper)); it("Generates structure", () => { expect(helper.byTestId("form-field-label-pipeline-name")).toBeTruthy(); expect(helper.byTestId("form-field-label-pipeline-name").textContent).toBe("Pipeline Name*"); expect(helper.byTestId("form-field-input-pipeline-name")).toBeTruthy(); }); it("Binds to model", () => { expect(config.name()).toBe(""); helper.oninput(helper.byTestId("form-field-input-pipeline-name"), "my-pipeline"); expect(config.name()).toBe("my-pipeline"); }); }); class TestCache implements PipelineGroupCache<Option> { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void) {} // tslint:disable-next-line invalidate() {} contents() { return []; } pipelineGroups() { return []; } stages(pipeline: string) { return []; } failureReason() { return undefined; } failed() { return false; } } class EmptyTemplatesTestCache extends TemplateCache { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void) { onComplete(); } // tslint:disable-next-line invalidate() {} contents() { return []; } templates() { return []; } failureReason() { return undefined; } failed() { return false; } }
random_line_split
pipeline_info_editor_spec.tsx
/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import m from "mithril"; import Stream from "mithril/stream"; import {PipelineConfig} from "models/pipeline_configs/pipeline_config"; import {PipelineGroupCache} from "models/pipeline_configs/pipeline_groups_cache"; import {TemplateCache} from "models/pipeline_configs/templates_cache"; import {Option} from "views/components/forms/input_fields"; import {TestHelper} from "views/pages/spec/test_helper"; import {PipelineInfoEditor} from "../pipeline_info_editor"; const flag: (val?: boolean) => Stream<boolean> = Stream; describe("AddPipeline: PipelineInfoEditor", () => { const helper = new TestHelper(); let config: PipelineConfig; beforeEach(() => { config = new PipelineConfig("", [], []).withGroup("foo"); helper.mount(() => <PipelineInfoEditor pipelineConfig={config} cache={new TestCache()} isUsingTemplate={flag(false)} templatesCache={new EmptyTemplatesTestCache()}/>); }); afterEach(helper.unmount.bind(helper)); it("Generates structure", () => { expect(helper.byTestId("form-field-label-pipeline-name")).toBeTruthy(); expect(helper.byTestId("form-field-label-pipeline-name").textContent).toBe("Pipeline Name*"); expect(helper.byTestId("form-field-input-pipeline-name")).toBeTruthy(); }); it("Binds to model", () => { expect(config.name()).toBe(""); helper.oninput(helper.byTestId("form-field-input-pipeline-name"), "my-pipeline"); expect(config.name()).toBe("my-pipeline"); }); }); class TestCache implements PipelineGroupCache<Option> { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void)
// tslint:disable-next-line invalidate() {} contents() { return []; } pipelineGroups() { return []; } stages(pipeline: string) { return []; } failureReason() { return undefined; } failed() { return false; } } class EmptyTemplatesTestCache extends TemplateCache { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void) { onComplete(); } // tslint:disable-next-line invalidate() {} contents() { return []; } templates() { return []; } failureReason() { return undefined; } failed() { return false; } }
{}
identifier_body
pipeline_info_editor_spec.tsx
/* * Copyright 2020 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import m from "mithril"; import Stream from "mithril/stream"; import {PipelineConfig} from "models/pipeline_configs/pipeline_config"; import {PipelineGroupCache} from "models/pipeline_configs/pipeline_groups_cache"; import {TemplateCache} from "models/pipeline_configs/templates_cache"; import {Option} from "views/components/forms/input_fields"; import {TestHelper} from "views/pages/spec/test_helper"; import {PipelineInfoEditor} from "../pipeline_info_editor"; const flag: (val?: boolean) => Stream<boolean> = Stream; describe("AddPipeline: PipelineInfoEditor", () => { const helper = new TestHelper(); let config: PipelineConfig; beforeEach(() => { config = new PipelineConfig("", [], []).withGroup("foo"); helper.mount(() => <PipelineInfoEditor pipelineConfig={config} cache={new TestCache()} isUsingTemplate={flag(false)} templatesCache={new EmptyTemplatesTestCache()}/>); }); afterEach(helper.unmount.bind(helper)); it("Generates structure", () => { expect(helper.byTestId("form-field-label-pipeline-name")).toBeTruthy(); expect(helper.byTestId("form-field-label-pipeline-name").textContent).toBe("Pipeline Name*"); expect(helper.byTestId("form-field-input-pipeline-name")).toBeTruthy(); }); it("Binds to model", () => { expect(config.name()).toBe(""); helper.oninput(helper.byTestId("form-field-input-pipeline-name"), "my-pipeline"); expect(config.name()).toBe("my-pipeline"); }); }); class TestCache implements PipelineGroupCache<Option> { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void) {} // tslint:disable-next-line invalidate() {} contents() { return []; } pipelineGroups() { return []; } stages(pipeline: string) { return []; } failureReason() { return undefined; } failed() { return false; } } class EmptyTemplatesTestCache extends TemplateCache { ready() { return true; } // tslint:disable-next-line prime(onComplete: () => void) { onComplete(); } // tslint:disable-next-line invalidate() {} contents() { return []; } templates() { return []; }
() { return undefined; } failed() { return false; } }
failureReason
identifier_name
self_assessment_module.py
submit, then see a rubric and rate themselves. Persists student supplied hints, answers, and assessment judgment (currently only correct/incorrect). Parses xml definition file--see below for exact format. Sample XML format: <selfassessment> <hintprompt> What hint about this problem would you give to someone? </hintprompt> <submitmessage> Save Succcesful. Thanks for participating! </submitmessage> </selfassessment> """ TEMPLATE_DIR = "combinedopenended/selfassessment" # states INITIAL = 'initial' ASSESSING = 'assessing' REQUEST_HINT = 'request_hint' DONE = 'done' def setup_response(self, system, location, definition, descriptor): """ Sets up the module @param system: Modulesystem @param location: location, to let the module know where it is. @param definition: XML definition of the module. @param descriptor: SelfAssessmentDescriptor @return: None """ self.child_prompt = stringify_children(self.child_prompt) self.child_rubric = stringify_children(self.child_rubric) def get_html(self, system): """ Gets context and renders HTML that represents the module @param system: Modulesystem @return: Rendered HTML """ # set context variables and render template previous_answer = self.get_display_answer() # Use the module name as a unique id to pass to the template. try: module_id = self.system.location.name except AttributeError: # In cases where we don't have a system or a location, use a fallback. module_id = "self_assessment" context = { 'prompt': self.child_prompt, 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), 'state': self.child_state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', 'accept_file_upload': self.accept_file_upload, 'module_id': module_id, } html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context) return html def handle_ajax(self, dispatch, data, system): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'save_answer': self.save_answer, 'save_assessment': self.save_assessment, 'save_post_assessment': self.save_hint, 'store_answer': self.store_answer, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) before = self.get_progress() d = handlers[dispatch](data, system) after = self.get_progress() d.update({ 'progress_changed': after != before, 'progress_status': Progress.to_js_status_str(after), }) return json.dumps(d, cls=ComplexEncoder) def get_rubric_html(self, system): """ Return the appropriate version of the rubric, based on the state. """ if self.child_state == self.INITIAL: return '' rubric_renderer = CombinedOpenEndedRubric(system, False) rubric_dict = rubric_renderer.render_rubric(self.child_rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] # we'll render it context = { 'rubric': rubric_html, 'max_score': self._max_score, } if self.child_state == self.ASSESSING: context['read_only'] = False elif self.child_state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_rubric.html'.format(self.TEMPLATE_DIR), context) def get_hint_html(self, system): """ Return the appropriate version of the hint view, based on state. """ if self.child_state in (self.INITIAL, self.ASSESSING): return '' if self.child_state == self.DONE: # display the previous hint latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' context = {'hint': hint} if self.child_state == self.POST_ASSESSMENT: context['read_only'] = False elif self.child_state == self.DONE: context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context) def save_answer(self, data, system): """ After the answer is submitted, show the rubric. Args: data: the request dictionary passed to the ajax request. Should contain a key 'student_answer' Returns: Dictionary with keys 'success' and either 'error' (if not success), or 'rubric_html' (if success). """ # Check to see if this problem is closed closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.INITIAL: return self.out_of_sync_error(data) error_message = "" # add new history element with answer and empty score and hint. success, error_message, data = self.append_file_link_to_student_answer(data) if success: data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING) return { 'success': success, 'rubric_html': self.get_rubric_html(system), 'error': error_message, 'student_response': data['student_answer'].replace("\n","<br/>") } def save_assessment(self, data, _system): """ Save the assessment. If the student said they're right, don't ask for a hint, and go straight to the done state. Otherwise, do ask for a hint. Returns a dict { 'success': bool, 'state': state, 'hint_html': hint_html OR 'message_html': html and 'allow_reset', 'error': error-msg}, with 'error' only present if 'success' is False, and 'hint_html' or 'message_html' only if success is true :param data: A `webob.multidict.MultiDict` containing the keys asasssment: The sum of assessment scores score_list[]: A multivalue key containing all the individual scores """ closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.ASSESSING: return self.out_of_sync_error(data) try: score = int(data.get('assessment')) score_list = [int(x) for x in data.getall('score_list[]')] except (ValueError, TypeError): # This is a dev_facing_error log.error("Non-integer score value passed to save_assessment, or no score list present.") # This is a student_facing_error _ = self.system.service(self, "i18n").ugettext return { 'success': False, 'error': _("Error saving your score. Please notify course staff.") } # Record score as assessment and rubric scores as post assessment self.record_latest_score(score) self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) d['allow_reset'] = self._allow_reset() d['state'] = self.child_state return d def save_hint(self, data, _system): ''' Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, 'error': error-msg, 'allow_reset': bool}, with the error key only present if success is False and message_html only if True. ''' if self.child_state != self.POST_ASSESSMENT: # Note: because we only ask for hints on wrong answers, may not have # the same number of hints and answers. return self.out_of_sync_error(data) self.record_latest_post_assessment(data['hint']) self.change_state(self.DONE) return
class SelfAssessmentModule(openendedchild.OpenEndedChild): """ A Self Assessment module that allows students to write open-ended responses,
random_line_split
self_assessment_module.py
Sample XML format: <selfassessment> <hintprompt> What hint about this problem would you give to someone? </hintprompt> <submitmessage> Save Succcesful. Thanks for participating! </submitmessage> </selfassessment> """ TEMPLATE_DIR = "combinedopenended/selfassessment" # states INITIAL = 'initial' ASSESSING = 'assessing' REQUEST_HINT = 'request_hint' DONE = 'done' def setup_response(self, system, location, definition, descriptor): """ Sets up the module @param system: Modulesystem @param location: location, to let the module know where it is. @param definition: XML definition of the module. @param descriptor: SelfAssessmentDescriptor @return: None """ self.child_prompt = stringify_children(self.child_prompt) self.child_rubric = stringify_children(self.child_rubric) def get_html(self, system): """ Gets context and renders HTML that represents the module @param system: Modulesystem @return: Rendered HTML """ # set context variables and render template previous_answer = self.get_display_answer() # Use the module name as a unique id to pass to the template. try: module_id = self.system.location.name except AttributeError: # In cases where we don't have a system or a location, use a fallback. module_id = "self_assessment" context = { 'prompt': self.child_prompt, 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), 'state': self.child_state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', 'accept_file_upload': self.accept_file_upload, 'module_id': module_id, } html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context) return html def handle_ajax(self, dispatch, data, system): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'save_answer': self.save_answer, 'save_assessment': self.save_assessment, 'save_post_assessment': self.save_hint, 'store_answer': self.store_answer, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) before = self.get_progress() d = handlers[dispatch](data, system) after = self.get_progress() d.update({ 'progress_changed': after != before, 'progress_status': Progress.to_js_status_str(after), }) return json.dumps(d, cls=ComplexEncoder) def get_rubric_html(self, system): """ Return the appropriate version of the rubric, based on the state. """ if self.child_state == self.INITIAL: return '' rubric_renderer = CombinedOpenEndedRubric(system, False) rubric_dict = rubric_renderer.render_rubric(self.child_rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] # we'll render it context = { 'rubric': rubric_html, 'max_score': self._max_score, } if self.child_state == self.ASSESSING: context['read_only'] = False elif self.child_state in (self.POST_ASSESSMENT, self.DONE):
else: # This is a dev_facing_error raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_rubric.html'.format(self.TEMPLATE_DIR), context) def get_hint_html(self, system): """ Return the appropriate version of the hint view, based on state. """ if self.child_state in (self.INITIAL, self.ASSESSING): return '' if self.child_state == self.DONE: # display the previous hint latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' context = {'hint': hint} if self.child_state == self.POST_ASSESSMENT: context['read_only'] = False elif self.child_state == self.DONE: context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context) def save_answer(self, data, system): """ After the answer is submitted, show the rubric. Args: data: the request dictionary passed to the ajax request. Should contain a key 'student_answer' Returns: Dictionary with keys 'success' and either 'error' (if not success), or 'rubric_html' (if success). """ # Check to see if this problem is closed closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.INITIAL: return self.out_of_sync_error(data) error_message = "" # add new history element with answer and empty score and hint. success, error_message, data = self.append_file_link_to_student_answer(data) if success: data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING) return { 'success': success, 'rubric_html': self.get_rubric_html(system), 'error': error_message, 'student_response': data['student_answer'].replace("\n","<br/>") } def save_assessment(self, data, _system): """ Save the assessment. If the student said they're right, don't ask for a hint, and go straight to the done state. Otherwise, do ask for a hint. Returns a dict { 'success': bool, 'state': state, 'hint_html': hint_html OR 'message_html': html and 'allow_reset', 'error': error-msg}, with 'error' only present if 'success' is False, and 'hint_html' or 'message_html' only if success is true :param data: A `webob.multidict.MultiDict` containing the keys asasssment: The sum of assessment scores score_list[]: A multivalue key containing all the individual scores """ closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.ASSESSING: return self.out_of_sync_error(data) try: score = int(data.get('assessment')) score_list = [int(x) for x in data.getall('score_list[]')] except (ValueError, TypeError): # This is a dev_facing_error log.error("Non-integer score value passed to save_assessment, or no score list present.") # This is a student_facing_error _ = self.system.service(self, "i18n").ugettext return { 'success': False, 'error': _("Error saving your score. Please notify course staff.") } # Record score as assessment and rubric scores as post assessment self.record_latest_score(score) self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) d['allow_reset'] = self._allow_reset() d['state'] = self.child_state return d def save_hint(self, data, _system): ''' Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, 'error': error-msg, 'allow_reset': bool}, with the error key only present if success is False and message_html only if True. ''' if self.child_state != self.POST_ASSESSMENT: # Note: because we only ask for hints on wrong answers, may not have # the same number of hints and answers. return self.out_of_sync_error(data) self.record_latest_post_assessment(data['hint']) self.change_state(self.DONE) return { 'success': True, 'message_html': '', 'allow_reset': self._allow_reset(), } def latest_post_assessment(self, system): latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system) try: rubric_scores = json.loads(latest_post_assessment) except:
context['read_only'] = True
conditional_block
self_assessment_module.py
@return: Rendered HTML """ # set context variables and render template previous_answer = self.get_display_answer() # Use the module name as a unique id to pass to the template. try: module_id = self.system.location.name except AttributeError: # In cases where we don't have a system or a location, use a fallback. module_id = "self_assessment" context = { 'prompt': self.child_prompt, 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), 'state': self.child_state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', 'accept_file_upload': self.accept_file_upload, 'module_id': module_id, } html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context) return html def handle_ajax(self, dispatch, data, system): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'save_answer': self.save_answer, 'save_assessment': self.save_assessment, 'save_post_assessment': self.save_hint, 'store_answer': self.store_answer, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) before = self.get_progress() d = handlers[dispatch](data, system) after = self.get_progress() d.update({ 'progress_changed': after != before, 'progress_status': Progress.to_js_status_str(after), }) return json.dumps(d, cls=ComplexEncoder) def get_rubric_html(self, system): """ Return the appropriate version of the rubric, based on the state. """ if self.child_state == self.INITIAL: return '' rubric_renderer = CombinedOpenEndedRubric(system, False) rubric_dict = rubric_renderer.render_rubric(self.child_rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] # we'll render it context = { 'rubric': rubric_html, 'max_score': self._max_score, } if self.child_state == self.ASSESSING: context['read_only'] = False elif self.child_state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_rubric.html'.format(self.TEMPLATE_DIR), context) def get_hint_html(self, system): """ Return the appropriate version of the hint view, based on state. """ if self.child_state in (self.INITIAL, self.ASSESSING): return '' if self.child_state == self.DONE: # display the previous hint latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' context = {'hint': hint} if self.child_state == self.POST_ASSESSMENT: context['read_only'] = False elif self.child_state == self.DONE: context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context) def save_answer(self, data, system): """ After the answer is submitted, show the rubric. Args: data: the request dictionary passed to the ajax request. Should contain a key 'student_answer' Returns: Dictionary with keys 'success' and either 'error' (if not success), or 'rubric_html' (if success). """ # Check to see if this problem is closed closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.INITIAL: return self.out_of_sync_error(data) error_message = "" # add new history element with answer and empty score and hint. success, error_message, data = self.append_file_link_to_student_answer(data) if success: data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING) return { 'success': success, 'rubric_html': self.get_rubric_html(system), 'error': error_message, 'student_response': data['student_answer'].replace("\n","<br/>") } def save_assessment(self, data, _system): """ Save the assessment. If the student said they're right, don't ask for a hint, and go straight to the done state. Otherwise, do ask for a hint. Returns a dict { 'success': bool, 'state': state, 'hint_html': hint_html OR 'message_html': html and 'allow_reset', 'error': error-msg}, with 'error' only present if 'success' is False, and 'hint_html' or 'message_html' only if success is true :param data: A `webob.multidict.MultiDict` containing the keys asasssment: The sum of assessment scores score_list[]: A multivalue key containing all the individual scores """ closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.ASSESSING: return self.out_of_sync_error(data) try: score = int(data.get('assessment')) score_list = [int(x) for x in data.getall('score_list[]')] except (ValueError, TypeError): # This is a dev_facing_error log.error("Non-integer score value passed to save_assessment, or no score list present.") # This is a student_facing_error _ = self.system.service(self, "i18n").ugettext return { 'success': False, 'error': _("Error saving your score. Please notify course staff.") } # Record score as assessment and rubric scores as post assessment self.record_latest_score(score) self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) d['allow_reset'] = self._allow_reset() d['state'] = self.child_state return d def save_hint(self, data, _system): ''' Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, 'error': error-msg, 'allow_reset': bool}, with the error key only present if success is False and message_html only if True. ''' if self.child_state != self.POST_ASSESSMENT: # Note: because we only ask for hints on wrong answers, may not have # the same number of hints and answers. return self.out_of_sync_error(data) self.record_latest_post_assessment(data['hint']) self.change_state(self.DONE) return { 'success': True, 'message_html': '', 'allow_reset': self._allow_reset(), } def latest_post_assessment(self, system): latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system) try: rubric_scores = json.loads(latest_post_assessment) except: rubric_scores = [] return [rubric_scores] class SelfAssessmentDescriptor(): """ Module for adding self assessment questions to courses """ mako_template = "widgets/html-edit.html" module_class = SelfAssessmentModule filename_extension = "xml" has_score = True def __init__(self, system): self.system = system @classmethod def definition_from_xml(cls, xml_object, system): """ Pull out the rubric, prompt, and submitmessage into a dictionary. Returns: { 'submitmessage': 'some-html' 'hintprompt': 'some-html' } """ expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) != 1: # This is a staff_facing_error raise ValueError( u"Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format( child)) def parse(k):
"""Assumes that xml_object has child k""" return stringify_children(xml_object.xpath(k)[0])
identifier_body
self_assessment_module.py
Sample XML format: <selfassessment> <hintprompt> What hint about this problem would you give to someone? </hintprompt> <submitmessage> Save Succcesful. Thanks for participating! </submitmessage> </selfassessment> """ TEMPLATE_DIR = "combinedopenended/selfassessment" # states INITIAL = 'initial' ASSESSING = 'assessing' REQUEST_HINT = 'request_hint' DONE = 'done' def setup_response(self, system, location, definition, descriptor): """ Sets up the module @param system: Modulesystem @param location: location, to let the module know where it is. @param definition: XML definition of the module. @param descriptor: SelfAssessmentDescriptor @return: None """ self.child_prompt = stringify_children(self.child_prompt) self.child_rubric = stringify_children(self.child_rubric) def get_html(self, system): """ Gets context and renders HTML that represents the module @param system: Modulesystem @return: Rendered HTML """ # set context variables and render template previous_answer = self.get_display_answer() # Use the module name as a unique id to pass to the template. try: module_id = self.system.location.name except AttributeError: # In cases where we don't have a system or a location, use a fallback. module_id = "self_assessment" context = { 'prompt': self.child_prompt, 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), 'state': self.child_state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', 'accept_file_upload': self.accept_file_upload, 'module_id': module_id, } html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context) return html def handle_ajax(self, dispatch, data, system): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'save_answer': self.save_answer, 'save_assessment': self.save_assessment, 'save_post_assessment': self.save_hint, 'store_answer': self.store_answer, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) before = self.get_progress() d = handlers[dispatch](data, system) after = self.get_progress() d.update({ 'progress_changed': after != before, 'progress_status': Progress.to_js_status_str(after), }) return json.dumps(d, cls=ComplexEncoder) def get_rubric_html(self, system): """ Return the appropriate version of the rubric, based on the state. """ if self.child_state == self.INITIAL: return '' rubric_renderer = CombinedOpenEndedRubric(system, False) rubric_dict = rubric_renderer.render_rubric(self.child_rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] # we'll render it context = { 'rubric': rubric_html, 'max_score': self._max_score, } if self.child_state == self.ASSESSING: context['read_only'] = False elif self.child_state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_rubric.html'.format(self.TEMPLATE_DIR), context) def get_hint_html(self, system): """ Return the appropriate version of the hint view, based on state. """ if self.child_state in (self.INITIAL, self.ASSESSING): return '' if self.child_state == self.DONE: # display the previous hint latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' context = {'hint': hint} if self.child_state == self.POST_ASSESSMENT: context['read_only'] = False elif self.child_state == self.DONE: context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context) def save_answer(self, data, system): """ After the answer is submitted, show the rubric. Args: data: the request dictionary passed to the ajax request. Should contain a key 'student_answer' Returns: Dictionary with keys 'success' and either 'error' (if not success), or 'rubric_html' (if success). """ # Check to see if this problem is closed closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.INITIAL: return self.out_of_sync_error(data) error_message = "" # add new history element with answer and empty score and hint. success, error_message, data = self.append_file_link_to_student_answer(data) if success: data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING) return { 'success': success, 'rubric_html': self.get_rubric_html(system), 'error': error_message, 'student_response': data['student_answer'].replace("\n","<br/>") } def save_assessment(self, data, _system): """ Save the assessment. If the student said they're right, don't ask for a hint, and go straight to the done state. Otherwise, do ask for a hint. Returns a dict { 'success': bool, 'state': state, 'hint_html': hint_html OR 'message_html': html and 'allow_reset', 'error': error-msg}, with 'error' only present if 'success' is False, and 'hint_html' or 'message_html' only if success is true :param data: A `webob.multidict.MultiDict` containing the keys asasssment: The sum of assessment scores score_list[]: A multivalue key containing all the individual scores """ closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.ASSESSING: return self.out_of_sync_error(data) try: score = int(data.get('assessment')) score_list = [int(x) for x in data.getall('score_list[]')] except (ValueError, TypeError): # This is a dev_facing_error log.error("Non-integer score value passed to save_assessment, or no score list present.") # This is a student_facing_error _ = self.system.service(self, "i18n").ugettext return { 'success': False, 'error': _("Error saving your score. Please notify course staff.") } # Record score as assessment and rubric scores as post assessment self.record_latest_score(score) self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) d['allow_reset'] = self._allow_reset() d['state'] = self.child_state return d def
(self, data, _system): ''' Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, 'error': error-msg, 'allow_reset': bool}, with the error key only present if success is False and message_html only if True. ''' if self.child_state != self.POST_ASSESSMENT: # Note: because we only ask for hints on wrong answers, may not have # the same number of hints and answers. return self.out_of_sync_error(data) self.record_latest_post_assessment(data['hint']) self.change_state(self.DONE) return { 'success': True, 'message_html': '', 'allow_reset': self._allow_reset(), } def latest_post_assessment(self, system): latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system) try: rubric_scores = json.loads(latest_post_assessment) except:
save_hint
identifier_name
tag_view.js
var tag_view = Vue.component('tag_view',{ template: '<div class="col-sm-12 col-md-7 col-md-push-3">\ <div class="panel panel-default">\
<div is="pretty_header" v-bind:title="tag"></div>\ </div>\ </div>\ <div is="post_input" v-bind:user_prop="user" v-show="auth" v-bind:prefill="mention_prefill"></div>\ <div is="post_list" v-bind:posts.sync="posts" rest_api="/api/tag_posts" v-bind:api_param="api_param" :enable_refresh="true"></div>\ </div>\ <div class="col-sm-12 col-md-3 col-md-pull-7" v-show="auth">\ <div is="user_card" v-bind:user.sync="user"></div>\ </div>', data: function() { return { posts: [], user: {}, tag: this.$route.params.tag, auth: false } }, events: { 'post_added': function() { this.$broadcast('post_added'); }, 'respond': function(id, user_name) { window.scrollTo(0,0); this.$broadcast('respond', id, user_name) } }, ready: function() { $.ajax({ method: 'GET', url: '/api/check_auth', dataType : "json", contentType: "application/json; charset=utf-8", context: this, cache: false, success: function(result) { this.auth = result.auth; this.$broadcast('get_posts'); this.$broadcast('get_user'); } }); }, computed: { 'mention_prefill': function() { return this.tag; }, 'api_param': function() { return {tag: this.tag}; } } });
<div class="panel-body" style="padding: 0px;">\
random_line_split
Main.py
#!/usr/bin/python import feedparser
import sqlite3 import time RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss'] sleep=3600/len(RssUrlList) def mkdir(path): import os path=path.strip() path=path.rstrip("\\") isExists=os.path.exists(path) if not isExists: os.makedirs(path) conn = sqlite3.connect('tumblr.db') def DownloadVideo(rss_url): feeds = feedparser.parse(rss_url) table=rss_url[7:-15].replace('-','') try: conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table) conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url)) # conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next() except: pass # conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''') # conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table) mkdir(rss_url[7:-4]) for post in feeds.entries: thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))) if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime: break if post.description.find("video_file") == -1: continue sourceadd= post.description.find("source src=") tumblradd= post.description[sourceadd:].find("tumblr_") typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video") video_id=post.description[sourceadd:][tumblradd:][:typeadd-2] if video_id.find("/") !=-1: video_id=video_id[:video_id.find("/")] try: list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next()) except: print(post.title + ": " + post.link + post.published+"\n") wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4]) print("\n") conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))) #wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4]) conn.commit() while(1): for rss_url in RssUrlList: print("Downloading "+rss_url) DownloadVideo(rss_url) print("Sleep "+str(sleep)+" seconds") time.sleep(sleep)
import wget
random_line_split
Main.py
#!/usr/bin/python import feedparser import wget import sqlite3 import time RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss'] sleep=3600/len(RssUrlList) def mkdir(path): import os path=path.strip() path=path.rstrip("\\") isExists=os.path.exists(path) if not isExists: os.makedirs(path) conn = sqlite3.connect('tumblr.db') def
(rss_url): feeds = feedparser.parse(rss_url) table=rss_url[7:-15].replace('-','') try: conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table) conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url)) # conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next() except: pass # conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''') # conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table) mkdir(rss_url[7:-4]) for post in feeds.entries: thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))) if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime: break if post.description.find("video_file") == -1: continue sourceadd= post.description.find("source src=") tumblradd= post.description[sourceadd:].find("tumblr_") typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video") video_id=post.description[sourceadd:][tumblradd:][:typeadd-2] if video_id.find("/") !=-1: video_id=video_id[:video_id.find("/")] try: list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next()) except: print(post.title + ": " + post.link + post.published+"\n") wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4]) print("\n") conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))) #wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4]) conn.commit() while(1): for rss_url in RssUrlList: print("Downloading "+rss_url) DownloadVideo(rss_url) print("Sleep "+str(sleep)+" seconds") time.sleep(sleep)
DownloadVideo
identifier_name
Main.py
#!/usr/bin/python import feedparser import wget import sqlite3 import time RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss'] sleep=3600/len(RssUrlList) def mkdir(path):
conn = sqlite3.connect('tumblr.db') def DownloadVideo(rss_url): feeds = feedparser.parse(rss_url) table=rss_url[7:-15].replace('-','') try: conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table) conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url)) # conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next() except: pass # conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''') # conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table) mkdir(rss_url[7:-4]) for post in feeds.entries: thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))) if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime: break if post.description.find("video_file") == -1: continue sourceadd= post.description.find("source src=") tumblradd= post.description[sourceadd:].find("tumblr_") typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video") video_id=post.description[sourceadd:][tumblradd:][:typeadd-2] if video_id.find("/") !=-1: video_id=video_id[:video_id.find("/")] try: list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next()) except: print(post.title + ": " + post.link + post.published+"\n") wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4]) print("\n") conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))) #wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4]) conn.commit() while(1): for rss_url in RssUrlList: print("Downloading "+rss_url) DownloadVideo(rss_url) print("Sleep "+str(sleep)+" seconds") time.sleep(sleep)
import os path=path.strip() path=path.rstrip("\\") isExists=os.path.exists(path) if not isExists: os.makedirs(path)
identifier_body
Main.py
#!/usr/bin/python import feedparser import wget import sqlite3 import time RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss'] sleep=3600/len(RssUrlList) def mkdir(path): import os path=path.strip() path=path.rstrip("\\") isExists=os.path.exists(path) if not isExists: os.makedirs(path) conn = sqlite3.connect('tumblr.db') def DownloadVideo(rss_url): feeds = feedparser.parse(rss_url) table=rss_url[7:-15].replace('-','') try: conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table) conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url)) # conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next() except: pass # conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''') # conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table) mkdir(rss_url[7:-4]) for post in feeds.entries: thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))) if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime: break if post.description.find("video_file") == -1: continue sourceadd= post.description.find("source src=") tumblradd= post.description[sourceadd:].find("tumblr_") typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video") video_id=post.description[sourceadd:][tumblradd:][:typeadd-2] if video_id.find("/") !=-1: video_id=video_id[:video_id.find("/")] try: list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next()) except: print(post.title + ": " + post.link + post.published+"\n") wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4]) print("\n") conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))) #wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4]) conn.commit() while(1):
for rss_url in RssUrlList: print("Downloading "+rss_url) DownloadVideo(rss_url) print("Sleep "+str(sleep)+" seconds") time.sleep(sleep)
conditional_block
SheetList.ts
import { Map, Range, Record } from 'immutable'; import { RootState } from '../ducks'; import Sheet, { ISheet } from './Sheet'; export interface ISheetList { list: Map<number, Sheet>; } const defaultValue: ISheetList = { list: Map(), }; export default class SheetList extends Record(defaultValue) { constructor(params?: Partial<ISheetList>) { params ? super(params) : super(); } public toggleVersion(version: number)
public updateList(params: ISheet[]) { let newList = Map<number, Sheet>(); params.forEach((sheet) => { newList = newList.set(sheet.id, new Sheet(sheet)); }); return this.set('list', newList); } public whereAbility(ability: number, type: RootState['$$sheet']['type']) { return this.list .map((sheet) => { if (sheet[type] !== ability || sheet.hide) { return; } return sheet; }) .filter((sheet) => sheet !== undefined) .sortBy((sheet) => { if (sheet?.title === undefined) { return; } return sheet.title.toLocaleLowerCase(); }); } public chunk(list: Map<number, Sheet | undefined>, chunkSize = 5) { return Range(0, list.count(), chunkSize).map((chunkStart) => list.slice(chunkStart, chunkStart + chunkSize), ); } }
{ const newList = this.list.map((sheet) => { if (sheet.version !== version) { return sheet; } return sheet.set('hide', !sheet.hide); }); return this.set('list', newList); }
identifier_body
SheetList.ts
import { Map, Range, Record } from 'immutable'; import { RootState } from '../ducks'; import Sheet, { ISheet } from './Sheet'; export interface ISheetList { list: Map<number, Sheet>; } const defaultValue: ISheetList = { list: Map(), }; export default class SheetList extends Record(defaultValue) { constructor(params?: Partial<ISheetList>) { params ? super(params) : super(); } public toggleVersion(version: number) { const newList = this.list.map((sheet) => { if (sheet.version !== version) { return sheet; } return sheet.set('hide', !sheet.hide); }); return this.set('list', newList); } public updateList(params: ISheet[]) { let newList = Map<number, Sheet>(); params.forEach((sheet) => { newList = newList.set(sheet.id, new Sheet(sheet)); }); return this.set('list', newList); } public whereAbility(ability: number, type: RootState['$$sheet']['type']) { return this.list .map((sheet) => { if (sheet[type] !== ability || sheet.hide) { return; } return sheet; }) .filter((sheet) => sheet !== undefined) .sortBy((sheet) => {
}); } public chunk(list: Map<number, Sheet | undefined>, chunkSize = 5) { return Range(0, list.count(), chunkSize).map((chunkStart) => list.slice(chunkStart, chunkStart + chunkSize), ); } }
if (sheet?.title === undefined) { return; } return sheet.title.toLocaleLowerCase();
random_line_split
SheetList.ts
import { Map, Range, Record } from 'immutable'; import { RootState } from '../ducks'; import Sheet, { ISheet } from './Sheet'; export interface ISheetList { list: Map<number, Sheet>; } const defaultValue: ISheetList = { list: Map(), }; export default class SheetList extends Record(defaultValue) { constructor(params?: Partial<ISheetList>) { params ? super(params) : super(); } public toggleVersion(version: number) { const newList = this.list.map((sheet) => { if (sheet.version !== version) { return sheet; } return sheet.set('hide', !sheet.hide); }); return this.set('list', newList); } public updateList(params: ISheet[]) { let newList = Map<number, Sheet>(); params.forEach((sheet) => { newList = newList.set(sheet.id, new Sheet(sheet)); }); return this.set('list', newList); } public whereAbility(ability: number, type: RootState['$$sheet']['type']) { return this.list .map((sheet) => { if (sheet[type] !== ability || sheet.hide)
return sheet; }) .filter((sheet) => sheet !== undefined) .sortBy((sheet) => { if (sheet?.title === undefined) { return; } return sheet.title.toLocaleLowerCase(); }); } public chunk(list: Map<number, Sheet | undefined>, chunkSize = 5) { return Range(0, list.count(), chunkSize).map((chunkStart) => list.slice(chunkStart, chunkStart + chunkSize), ); } }
{ return; }
conditional_block
SheetList.ts
import { Map, Range, Record } from 'immutable'; import { RootState } from '../ducks'; import Sheet, { ISheet } from './Sheet'; export interface ISheetList { list: Map<number, Sheet>; } const defaultValue: ISheetList = { list: Map(), }; export default class SheetList extends Record(defaultValue) { constructor(params?: Partial<ISheetList>) { params ? super(params) : super(); } public toggleVersion(version: number) { const newList = this.list.map((sheet) => { if (sheet.version !== version) { return sheet; } return sheet.set('hide', !sheet.hide); }); return this.set('list', newList); } public
(params: ISheet[]) { let newList = Map<number, Sheet>(); params.forEach((sheet) => { newList = newList.set(sheet.id, new Sheet(sheet)); }); return this.set('list', newList); } public whereAbility(ability: number, type: RootState['$$sheet']['type']) { return this.list .map((sheet) => { if (sheet[type] !== ability || sheet.hide) { return; } return sheet; }) .filter((sheet) => sheet !== undefined) .sortBy((sheet) => { if (sheet?.title === undefined) { return; } return sheet.title.toLocaleLowerCase(); }); } public chunk(list: Map<number, Sheet | undefined>, chunkSize = 5) { return Range(0, list.count(), chunkSize).map((chunkStart) => list.slice(chunkStart, chunkStart + chunkSize), ); } }
updateList
identifier_name
tooltipAdvancedHelper.js
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ getClassList: function() { return 'uiTooltip advanced-wrapper visible'; }, initStyle: function(component) { this.ttLib.tooltip.computeTooltipStyle(component); }, show: function(cmp) { var classList = this.ttLib.tooltip.getClassList(cmp); classList.push('transition-start'); cmp.set('v.classList', classList.join(' ')); this.position(cmp); requestAnimationFrame($A.getCallback(function() { if(!cmp.isValid()) { return; } if(cmp.get('v.isVisible')) { classList.push('visible'); cmp.set('v.classList', classList.join(' ')); } })); }, hide: function(cmp) { if(cmp.isValid()) { cmp.set('v.classList', this.ttLib.tooltip.getClassList(cmp).join(' ')); } }, position: function(component) { var FLIP_THRESHOLD = 50; var node = component.find('tooltip').getElement(); var direction = component.get('v.direction'); var ttbodyNode = component.find('tooltipbody').getElement(); var ttWrapper = component.find('tooltipwrapper').getElement(); var classList = component.get('v.classList'); var pointer = node.querySelector('.pointer'); var target = $A.getComponent(component.get('v.target')).getElement(); var allowFlips = $A.util.getBooleanValue(component.get('v.allowFlips')); var boundingRect = target.getBoundingClientRect(); var lib = this.lib.panelPositioning; var bbDirections; var thisConstraint; var classArr = classList.split(' '); if(allowFlips && boundingRect.top < FLIP_THRESHOLD) { direction = 'south'; } else if (allowFlips && document.documentElement.clientHeight - (boundingRect.top + boundingRect.height) < FLIP_THRESHOLD) { direction = 'north'; } classArr = classArr.filter(function(item) { if(item.match(/north|east|west|south/)) { return false; } else { return true; } }); classArr.push(direction); component.set('v.direction', direction); component.set('v.classList', classArr.join(' ')); ttWrapper.className = classArr.join(' '); var constraintMap = { north : { align: 'center bottom', targetAlign : 'center top', bbDirections : { left:true, right:true } }, south : { align: 'center top', targetAlign : 'center bottom', bbDirections : { left:true, right:true } }, west : { align: 'right center', targetAlign : 'left center', bbDirections : { top:true, bottom:true } }, east : { align : 'left center', targetAlign : 'right center', bbDirections : { top:true, bottom:true } } }; component.originalDirection = direction; /* IMPORTANT. The order in which constraints are applied matters. The last constraint applied has the highest priority. */ component.constraints = {}; node.style.display = 'block'; for(thisConstraint in constraintMap) { component.constraints[thisConstraint] = lib.createRelationship({ element:ttbodyNode, target:target, align:constraintMap[thisConstraint].align, targetAlign:constraintMap[thisConstraint].targetAlign, enable: false, pad: 10 }); } component.constraints.windowBox = lib.createRelationship({ element:ttbodyNode, target:window, type:'bounding box', enable: true, pad: 5, boxDirections: { left:true, right:true } }); for(thisConstraint in constraintMap) { component.constraints[thisConstraint + '_pointer'] = lib.createRelationship({ element:pointer, target:target, align:constraintMap[thisConstraint].align, targetAlign: constraintMap[thisConstraint].targetAlign, enable: false, pad: 0 }); } for(thisConstraint in constraintMap) { if(thisConstraint === 'north' || thisConstraint === 'south')
else { bbDirections = { top: true, bottom:true }; } component.constraints[thisConstraint + 'pointerBox'] = lib.createRelationship({ element:pointer, target:ttbodyNode, type: 'bounding box', enable: false, boxDirections: bbDirections, pad: 0 }); } component.constraints.eastPointerOverlap = lib.createRelationship({ element:pointer, target:ttbodyNode, align: 'right center', targetAlign: 'left center', enable: true, pad: 0 }); component.constraints.westPointerOverlap = lib.createRelationship({ element:pointer, target:ttbodyNode, align: 'left center', targetAlign: 'right center', enable: false, pad: 0 }); this.updateConstraints(component); }, updateConstraints: function(component) { /* TODO this method talks to the DOM a lot because I wrote this before I understood aura and is in general janky, please fix in 200: W-2726214 */ var direction = component.get('v.direction'); var ttWrapper = component.find('tooltipwrapper').getElement(); component.set('v.direction', direction); ['north', 'south', 'west' , 'east'].forEach(function(directions) { component.constraints[directions].disable(); component.constraints[directions + '_pointer'].disable(); component.constraints[directions + 'pointerBox'].disable(); // Manipulating classes directly to avoid re-render: ttWrapper.classList.remove(directions); if(component.constraints[directions + 'PointerOverlap']) { component.constraints[directions + 'PointerOverlap'].disable(); component.constraints[directions + 'pointerBox'].disable(); } }); component.constraints[direction].enable(); ttWrapper.classList.add(direction); component.constraints[direction + '_pointer'].enable(); if(component.constraints[direction + 'PointerOverlap']) { component.constraints[direction + 'PointerOverlap'].enable(); } component.constraints[direction + 'pointerBox'].enable(); this.lib.panelPositioning.reposition(); // classname must be set after constraints // for advanced tooltips to avoid positioning issues } })// eslint-disable-line semi
{ bbDirections = { left:true, right:true }; }
conditional_block
tooltipAdvancedHelper.js
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ getClassList: function() { return 'uiTooltip advanced-wrapper visible'; }, initStyle: function(component) { this.ttLib.tooltip.computeTooltipStyle(component); }, show: function(cmp) { var classList = this.ttLib.tooltip.getClassList(cmp); classList.push('transition-start'); cmp.set('v.classList', classList.join(' ')); this.position(cmp); requestAnimationFrame($A.getCallback(function() { if(!cmp.isValid()) { return; } if(cmp.get('v.isVisible')) { classList.push('visible'); cmp.set('v.classList', classList.join(' ')); } })); }, hide: function(cmp) { if(cmp.isValid()) { cmp.set('v.classList', this.ttLib.tooltip.getClassList(cmp).join(' ')); } }, position: function(component) { var FLIP_THRESHOLD = 50; var node = component.find('tooltip').getElement(); var direction = component.get('v.direction'); var ttbodyNode = component.find('tooltipbody').getElement(); var ttWrapper = component.find('tooltipwrapper').getElement(); var classList = component.get('v.classList'); var pointer = node.querySelector('.pointer'); var target = $A.getComponent(component.get('v.target')).getElement(); var allowFlips = $A.util.getBooleanValue(component.get('v.allowFlips')); var boundingRect = target.getBoundingClientRect(); var lib = this.lib.panelPositioning; var bbDirections; var thisConstraint; var classArr = classList.split(' '); if(allowFlips && boundingRect.top < FLIP_THRESHOLD) { direction = 'south'; } else if (allowFlips && document.documentElement.clientHeight - (boundingRect.top + boundingRect.height) < FLIP_THRESHOLD) { direction = 'north'; } classArr = classArr.filter(function(item) { if(item.match(/north|east|west|south/)) { return false; } else { return true; } }); classArr.push(direction); component.set('v.direction', direction); component.set('v.classList', classArr.join(' ')); ttWrapper.className = classArr.join(' '); var constraintMap = { north : { align: 'center bottom', targetAlign : 'center top', bbDirections : { left:true, right:true } }, south : { align: 'center top', targetAlign : 'center bottom', bbDirections : { left:true, right:true } }, west : { align: 'right center', targetAlign : 'left center', bbDirections : { top:true, bottom:true } }, east : { align : 'left center', targetAlign : 'right center', bbDirections : { top:true, bottom:true } } }; component.originalDirection = direction; /* IMPORTANT. The order in which constraints are applied matters. The last constraint applied has the highest priority. */ component.constraints = {}; node.style.display = 'block'; for(thisConstraint in constraintMap) { component.constraints[thisConstraint] = lib.createRelationship({ element:ttbodyNode, target:target, align:constraintMap[thisConstraint].align, targetAlign:constraintMap[thisConstraint].targetAlign, enable: false, pad: 10 }); } component.constraints.windowBox = lib.createRelationship({ element:ttbodyNode, target:window, type:'bounding box', enable: true, pad: 5, boxDirections: { left:true, right:true } }); for(thisConstraint in constraintMap) { component.constraints[thisConstraint + '_pointer'] = lib.createRelationship({ element:pointer, target:target, align:constraintMap[thisConstraint].align, targetAlign: constraintMap[thisConstraint].targetAlign, enable: false, pad: 0 }); } for(thisConstraint in constraintMap) { if(thisConstraint === 'north' || thisConstraint === 'south') { bbDirections = { left:true, right:true }; } else { bbDirections = { top: true, bottom:true }; } component.constraints[thisConstraint + 'pointerBox'] = lib.createRelationship({ element:pointer, target:ttbodyNode, type: 'bounding box', enable: false, boxDirections: bbDirections, pad: 0 }); } component.constraints.eastPointerOverlap = lib.createRelationship({ element:pointer, target:ttbodyNode, align: 'right center', targetAlign: 'left center', enable: true, pad: 0 }); component.constraints.westPointerOverlap = lib.createRelationship({ element:pointer, target:ttbodyNode, align: 'left center', targetAlign: 'right center', enable: false, pad: 0 }); this.updateConstraints(component); }, updateConstraints: function(component) { /* TODO this method talks to the DOM a lot because I wrote this before I understood aura and is in general janky, please fix in 200: W-2726214 */ var direction = component.get('v.direction'); var ttWrapper = component.find('tooltipwrapper').getElement(); component.set('v.direction', direction);
['north', 'south', 'west' , 'east'].forEach(function(directions) { component.constraints[directions].disable(); component.constraints[directions + '_pointer'].disable(); component.constraints[directions + 'pointerBox'].disable(); // Manipulating classes directly to avoid re-render: ttWrapper.classList.remove(directions); if(component.constraints[directions + 'PointerOverlap']) { component.constraints[directions + 'PointerOverlap'].disable(); component.constraints[directions + 'pointerBox'].disable(); } }); component.constraints[direction].enable(); ttWrapper.classList.add(direction); component.constraints[direction + '_pointer'].enable(); if(component.constraints[direction + 'PointerOverlap']) { component.constraints[direction + 'PointerOverlap'].enable(); } component.constraints[direction + 'pointerBox'].enable(); this.lib.panelPositioning.reposition(); // classname must be set after constraints // for advanced tooltips to avoid positioning issues } })// eslint-disable-line semi
random_line_split
vmSearchTaxonRepository.py
# -*- coding:utf-8 -*- from sqlalchemy import desc, func from atlas.modeles.entities.vmSearchTaxon import VmSearchTaxon def listeTaxons(session): """ revoie un tableau de dict : label = nom latin et nom francais concatene, value = cd_ref TODO Fonction inutile à supprimer !!! """ req = session.query(VmSearchTaxon.search_name, VmSearchTaxon.cd_ref).all() taxonList = list() for r in req: temp = {"label": r[0], "value": r[1]} taxonList.append(temp) return taxonList def l
session, search, limit=50): """ Recherche dans la VmSearchTaxon en ilike Utilisé pour l'autocomplétion de la recherche de taxon :query SQLA_Session session :query str search : chaine de charactere pour la recherche :query int limit: limite des résultats **Returns:** list: retourne un tableau {'label':'str': 'value': 'int'} label = search_name value = cd_ref """ req = session.query( VmSearchTaxon.search_name, VmSearchTaxon.cd_ref, func.similarity(VmSearchTaxon.search_name, search).label("idx_trgm"), ).distinct() search = search.replace(" ", "%") req = ( req.filter(VmSearchTaxon.search_name.ilike("%" + search + "%")) .order_by(desc("idx_trgm")) .order_by(VmSearchTaxon.cd_ref == VmSearchTaxon.cd_nom) .limit(limit) ) data = req.all() return [{"label": d[0], "value": d[1]} for d in data]
isteTaxonsSearch(
identifier_name
vmSearchTaxonRepository.py
# -*- coding:utf-8 -*- from sqlalchemy import desc, func from atlas.modeles.entities.vmSearchTaxon import VmSearchTaxon def listeTaxons(session): """ revoie un tableau de dict : label = nom latin et nom francais concatene, value = cd_ref TODO Fonction inutile à supprimer !!! """ req = session.query(VmSearchTaxon.search_name, VmSearchTaxon.cd_ref).all() taxonList = list() for r in req: temp = {"label": r[0], "value": r[1]} taxonList.append(temp) return taxonList def listeTaxonsSearch(session, search, limit=50): """ Recherche dans la VmSearchTaxon en ilike
:query str search : chaine de charactere pour la recherche :query int limit: limite des résultats **Returns:** list: retourne un tableau {'label':'str': 'value': 'int'} label = search_name value = cd_ref """ req = session.query( VmSearchTaxon.search_name, VmSearchTaxon.cd_ref, func.similarity(VmSearchTaxon.search_name, search).label("idx_trgm"), ).distinct() search = search.replace(" ", "%") req = ( req.filter(VmSearchTaxon.search_name.ilike("%" + search + "%")) .order_by(desc("idx_trgm")) .order_by(VmSearchTaxon.cd_ref == VmSearchTaxon.cd_nom) .limit(limit) ) data = req.all() return [{"label": d[0], "value": d[1]} for d in data]
Utilisé pour l'autocomplétion de la recherche de taxon :query SQLA_Session session
random_line_split
vmSearchTaxonRepository.py
# -*- coding:utf-8 -*- from sqlalchemy import desc, func from atlas.modeles.entities.vmSearchTaxon import VmSearchTaxon def listeTaxons(session): """ revoie un tableau de dict : label = nom latin et nom francais concatene, value = cd_ref TODO Fonction inutile à supprimer !!! """ req = session.query(VmSearchTaxon.search_name, VmSearchTaxon.cd_ref).all() taxonList = list() for r in req: temp = {"label": r[0], "value": r[1]} taxonList.append(temp) return taxonList def listeTaxonsSearch(session, search, limit=50): "
search = search.replace(" ", "%") req = ( req.filter(VmSearchTaxon.search_name.ilike("%" + search + "%")) .order_by(desc("idx_trgm")) .order_by(VmSearchTaxon.cd_ref == VmSearchTaxon.cd_nom) .limit(limit) ) data = req.all() return [{"label": d[0], "value": d[1]} for d in data]
"" Recherche dans la VmSearchTaxon en ilike Utilisé pour l'autocomplétion de la recherche de taxon :query SQLA_Session session :query str search : chaine de charactere pour la recherche :query int limit: limite des résultats **Returns:** list: retourne un tableau {'label':'str': 'value': 'int'} label = search_name value = cd_ref """ req = session.query( VmSearchTaxon.search_name, VmSearchTaxon.cd_ref, func.similarity(VmSearchTaxon.search_name, search).label("idx_trgm"), ).distinct()
identifier_body
vmSearchTaxonRepository.py
# -*- coding:utf-8 -*- from sqlalchemy import desc, func from atlas.modeles.entities.vmSearchTaxon import VmSearchTaxon def listeTaxons(session): """ revoie un tableau de dict : label = nom latin et nom francais concatene, value = cd_ref TODO Fonction inutile à supprimer !!! """ req = session.query(VmSearchTaxon.search_name, VmSearchTaxon.cd_ref).all() taxonList = list() for r in req: t
return taxonList def listeTaxonsSearch(session, search, limit=50): """ Recherche dans la VmSearchTaxon en ilike Utilisé pour l'autocomplétion de la recherche de taxon :query SQLA_Session session :query str search : chaine de charactere pour la recherche :query int limit: limite des résultats **Returns:** list: retourne un tableau {'label':'str': 'value': 'int'} label = search_name value = cd_ref """ req = session.query( VmSearchTaxon.search_name, VmSearchTaxon.cd_ref, func.similarity(VmSearchTaxon.search_name, search).label("idx_trgm"), ).distinct() search = search.replace(" ", "%") req = ( req.filter(VmSearchTaxon.search_name.ilike("%" + search + "%")) .order_by(desc("idx_trgm")) .order_by(VmSearchTaxon.cd_ref == VmSearchTaxon.cd_nom) .limit(limit) ) data = req.all() return [{"label": d[0], "value": d[1]} for d in data]
emp = {"label": r[0], "value": r[1]} taxonList.append(temp)
conditional_block
wallpaper.tsx
import React = require('react') import './wallpaper.less' interface WallPaperProps { src?: string style?: React.CSSProperties className?: string } export const defualtWallPaper = '../../assets/pattern.png' export const defualWallpaperStyle: React.CSSProperties = { backgroundRepeat: 'repeat', backgroundPosition: 'center' } export class WallPaper extends React.Component<WallPaperProps, {}> { static defualtWallPaper = defualtWallPaper static defualStyle = defualWallpaperStyle private e: HTMLDivElement constructor(props) { super(props) } componentDidMount()
ref = (el) => this.e = el render() { const { style = {}, className = '' } = this.props return <div className={ 'wallpaper ' + className } ref={ this.ref } style={ style }> { this.props.children } </div> } }
{ if (this.props.src) { const bg = new Image() bg.src = this.props.src || '' bg.onload = () => { this.e.style.backgroundImage = `url('${this.props.src}')` } } }
identifier_body
wallpaper.tsx
import React = require('react') import './wallpaper.less' interface WallPaperProps { src?: string style?: React.CSSProperties className?: string } export const defualtWallPaper = '../../assets/pattern.png' export const defualWallpaperStyle: React.CSSProperties = { backgroundRepeat: 'repeat', backgroundPosition: 'center' } export class WallPaper extends React.Component<WallPaperProps, {}> { static defualtWallPaper = defualtWallPaper static defualStyle = defualWallpaperStyle private e: HTMLDivElement constructor(props) { super(props) } componentDidMount() { if (this.props.src) { const bg = new Image() bg.src = this.props.src || '' bg.onload = () => {
ref = (el) => this.e = el render() { const { style = {}, className = '' } = this.props return <div className={ 'wallpaper ' + className } ref={ this.ref } style={ style }> { this.props.children } </div> } }
this.e.style.backgroundImage = `url('${this.props.src}')` } } }
random_line_split
wallpaper.tsx
import React = require('react') import './wallpaper.less' interface WallPaperProps { src?: string style?: React.CSSProperties className?: string } export const defualtWallPaper = '../../assets/pattern.png' export const defualWallpaperStyle: React.CSSProperties = { backgroundRepeat: 'repeat', backgroundPosition: 'center' } export class WallPaper extends React.Component<WallPaperProps, {}> { static defualtWallPaper = defualtWallPaper static defualStyle = defualWallpaperStyle private e: HTMLDivElement constructor(props) { super(props) } componentDidMount() { if (this.props.src)
} ref = (el) => this.e = el render() { const { style = {}, className = '' } = this.props return <div className={ 'wallpaper ' + className } ref={ this.ref } style={ style }> { this.props.children } </div> } }
{ const bg = new Image() bg.src = this.props.src || '' bg.onload = () => { this.e.style.backgroundImage = `url('${this.props.src}')` } }
conditional_block
wallpaper.tsx
import React = require('react') import './wallpaper.less' interface WallPaperProps { src?: string style?: React.CSSProperties className?: string } export const defualtWallPaper = '../../assets/pattern.png' export const defualWallpaperStyle: React.CSSProperties = { backgroundRepeat: 'repeat', backgroundPosition: 'center' } export class
extends React.Component<WallPaperProps, {}> { static defualtWallPaper = defualtWallPaper static defualStyle = defualWallpaperStyle private e: HTMLDivElement constructor(props) { super(props) } componentDidMount() { if (this.props.src) { const bg = new Image() bg.src = this.props.src || '' bg.onload = () => { this.e.style.backgroundImage = `url('${this.props.src}')` } } } ref = (el) => this.e = el render() { const { style = {}, className = '' } = this.props return <div className={ 'wallpaper ' + className } ref={ this.ref } style={ style }> { this.props.children } </div> } }
WallPaper
identifier_name
mutation_utils.rs
use std::ops; use cge::gene::GeneExtras; use rand::{Rng, thread_rng}; use crate::utils::Individual; use crate::cge_utils::Mutation; use crate::NNFitnessFunction; // A few convenience methods for helping with determining which mutation operators are valid impl<T: NNFitnessFunction+ Clone> Individual<T> { // Returns the amount of connections from an input with the given id pub fn get_input_copies(&self, id: usize) -> usize { self.network.genome.iter().fold(0, |acc, g| { if let GeneExtras::Input(_) = (*g).variant { acc + ((g.id == id) as usize) } else { acc } }) } // Returns a vector with each element being the length of the shortest path between the // corresponding neuron and the nearest output pub fn get_depths(&self, include_connections: bool) -> Vec<usize> { let mut depths = Vec::new(); let mut stack = Vec::new(); for gene in &self.network.genome { let depth = stack.len(); if let GeneExtras::Neuron(_, ref inputs) = gene.variant
else { if include_connections { depths.push(depth); } while let Some(&1) = stack.last() { stack.pop(); } if let Some(last) = stack.last_mut() { *last -= 1; } } } depths } pub fn random_index(&self) -> usize { let indices = (0..self.next_id).map(|i| { self.network.get_neuron_index(i).unwrap() }).collect::<Vec<usize>>(); *thread_rng().choose(&indices).unwrap() } pub fn subnetwork_index(&self, index: usize) -> ops::Range<usize> { let mut i = index; let mut sum = 0; while sum != 1 { if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[i].variant { sum += 1 - *inputs as i32; } else { sum += 1; } i += 1; } ops::Range { start: index, end: i } } } // Wrap the Network implementation, to adjust the gene_ages field as well as the genome impl<T: NNFitnessFunction + Clone> Mutation for Individual<T> { // Inputs and outputs aren't used; read from field instead fn add_subnetwork(&mut self, _: usize, output: usize, _: usize) { self.network.add_subnetwork(self.next_id, output, self.inputs); if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[output].variant { for _ in 0..*inputs + 1 { self.ages.insert(output, 0); } } self.next_id += 1; } fn add_forward(&mut self, input: usize, output: usize) { self.network.add_forward(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_recurrent(&mut self, input: usize, output: usize) { self.network.add_recurrent(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_bias(&mut self, output: usize) { self.network.add_bias(output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_input(&mut self, input: usize, output: usize) { self.network.add_input(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn remove_connection(&mut self, index: usize, output: usize) { self.network.remove_connection(index, output); // Remove the age associated with the removed connection self.ages.remove(index); } // does not need to be implemented fn previous_neuron_index(&self, _: usize) -> Option<usize> { unimplemented!(); } }
{ depths.push(depth); stack.push(*inputs); }
conditional_block
mutation_utils.rs
use std::ops; use cge::gene::GeneExtras; use rand::{Rng, thread_rng}; use crate::utils::Individual; use crate::cge_utils::Mutation; use crate::NNFitnessFunction; // A few convenience methods for helping with determining which mutation operators are valid impl<T: NNFitnessFunction+ Clone> Individual<T> { // Returns the amount of connections from an input with the given id pub fn
(&self, id: usize) -> usize { self.network.genome.iter().fold(0, |acc, g| { if let GeneExtras::Input(_) = (*g).variant { acc + ((g.id == id) as usize) } else { acc } }) } // Returns a vector with each element being the length of the shortest path between the // corresponding neuron and the nearest output pub fn get_depths(&self, include_connections: bool) -> Vec<usize> { let mut depths = Vec::new(); let mut stack = Vec::new(); for gene in &self.network.genome { let depth = stack.len(); if let GeneExtras::Neuron(_, ref inputs) = gene.variant { depths.push(depth); stack.push(*inputs); } else { if include_connections { depths.push(depth); } while let Some(&1) = stack.last() { stack.pop(); } if let Some(last) = stack.last_mut() { *last -= 1; } } } depths } pub fn random_index(&self) -> usize { let indices = (0..self.next_id).map(|i| { self.network.get_neuron_index(i).unwrap() }).collect::<Vec<usize>>(); *thread_rng().choose(&indices).unwrap() } pub fn subnetwork_index(&self, index: usize) -> ops::Range<usize> { let mut i = index; let mut sum = 0; while sum != 1 { if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[i].variant { sum += 1 - *inputs as i32; } else { sum += 1; } i += 1; } ops::Range { start: index, end: i } } } // Wrap the Network implementation, to adjust the gene_ages field as well as the genome impl<T: NNFitnessFunction + Clone> Mutation for Individual<T> { // Inputs and outputs aren't used; read from field instead fn add_subnetwork(&mut self, _: usize, output: usize, _: usize) { self.network.add_subnetwork(self.next_id, output, self.inputs); if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[output].variant { for _ in 0..*inputs + 1 { self.ages.insert(output, 0); } } self.next_id += 1; } fn add_forward(&mut self, input: usize, output: usize) { self.network.add_forward(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_recurrent(&mut self, input: usize, output: usize) { self.network.add_recurrent(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_bias(&mut self, output: usize) { self.network.add_bias(output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_input(&mut self, input: usize, output: usize) { self.network.add_input(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn remove_connection(&mut self, index: usize, output: usize) { self.network.remove_connection(index, output); // Remove the age associated with the removed connection self.ages.remove(index); } // does not need to be implemented fn previous_neuron_index(&self, _: usize) -> Option<usize> { unimplemented!(); } }
get_input_copies
identifier_name
mutation_utils.rs
use std::ops; use cge::gene::GeneExtras; use rand::{Rng, thread_rng}; use crate::utils::Individual; use crate::cge_utils::Mutation; use crate::NNFitnessFunction; // A few convenience methods for helping with determining which mutation operators are valid impl<T: NNFitnessFunction+ Clone> Individual<T> { // Returns the amount of connections from an input with the given id pub fn get_input_copies(&self, id: usize) -> usize { self.network.genome.iter().fold(0, |acc, g| { if let GeneExtras::Input(_) = (*g).variant { acc + ((g.id == id) as usize) } else { acc } }) } // Returns a vector with each element being the length of the shortest path between the // corresponding neuron and the nearest output pub fn get_depths(&self, include_connections: bool) -> Vec<usize> { let mut depths = Vec::new(); let mut stack = Vec::new(); for gene in &self.network.genome { let depth = stack.len(); if let GeneExtras::Neuron(_, ref inputs) = gene.variant { depths.push(depth); stack.push(*inputs); } else { if include_connections { depths.push(depth); } while let Some(&1) = stack.last() { stack.pop(); } if let Some(last) = stack.last_mut() { *last -= 1; } } } depths } pub fn random_index(&self) -> usize { let indices = (0..self.next_id).map(|i| { self.network.get_neuron_index(i).unwrap() }).collect::<Vec<usize>>(); *thread_rng().choose(&indices).unwrap() } pub fn subnetwork_index(&self, index: usize) -> ops::Range<usize> { let mut i = index; let mut sum = 0; while sum != 1 { if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[i].variant { sum += 1 - *inputs as i32; } else { sum += 1; } i += 1; } ops::Range { start: index, end: i } } } // Wrap the Network implementation, to adjust the gene_ages field as well as the genome impl<T: NNFitnessFunction + Clone> Mutation for Individual<T> { // Inputs and outputs aren't used; read from field instead fn add_subnetwork(&mut self, _: usize, output: usize, _: usize) { self.network.add_subnetwork(self.next_id, output, self.inputs); if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[output].variant { for _ in 0..*inputs + 1 { self.ages.insert(output, 0); } } self.next_id += 1; } fn add_forward(&mut self, input: usize, output: usize) { self.network.add_forward(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_recurrent(&mut self, input: usize, output: usize) { self.network.add_recurrent(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_bias(&mut self, output: usize) { self.network.add_bias(output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_input(&mut self, input: usize, output: usize) { self.network.add_input(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn remove_connection(&mut self, index: usize, output: usize) { self.network.remove_connection(index, output); // Remove the age associated with the removed connection self.ages.remove(index); }
// does not need to be implemented fn previous_neuron_index(&self, _: usize) -> Option<usize> { unimplemented!(); } }
random_line_split
mutation_utils.rs
use std::ops; use cge::gene::GeneExtras; use rand::{Rng, thread_rng}; use crate::utils::Individual; use crate::cge_utils::Mutation; use crate::NNFitnessFunction; // A few convenience methods for helping with determining which mutation operators are valid impl<T: NNFitnessFunction+ Clone> Individual<T> { // Returns the amount of connections from an input with the given id pub fn get_input_copies(&self, id: usize) -> usize { self.network.genome.iter().fold(0, |acc, g| { if let GeneExtras::Input(_) = (*g).variant { acc + ((g.id == id) as usize) } else { acc } }) } // Returns a vector with each element being the length of the shortest path between the // corresponding neuron and the nearest output pub fn get_depths(&self, include_connections: bool) -> Vec<usize>
} } depths } pub fn random_index(&self) -> usize { let indices = (0..self.next_id).map(|i| { self.network.get_neuron_index(i).unwrap() }).collect::<Vec<usize>>(); *thread_rng().choose(&indices).unwrap() } pub fn subnetwork_index(&self, index: usize) -> ops::Range<usize> { let mut i = index; let mut sum = 0; while sum != 1 { if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[i].variant { sum += 1 - *inputs as i32; } else { sum += 1; } i += 1; } ops::Range { start: index, end: i } } } // Wrap the Network implementation, to adjust the gene_ages field as well as the genome impl<T: NNFitnessFunction + Clone> Mutation for Individual<T> { // Inputs and outputs aren't used; read from field instead fn add_subnetwork(&mut self, _: usize, output: usize, _: usize) { self.network.add_subnetwork(self.next_id, output, self.inputs); if let GeneExtras::Neuron(_, ref inputs) = self.network.genome[output].variant { for _ in 0..*inputs + 1 { self.ages.insert(output, 0); } } self.next_id += 1; } fn add_forward(&mut self, input: usize, output: usize) { self.network.add_forward(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_recurrent(&mut self, input: usize, output: usize) { self.network.add_recurrent(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_bias(&mut self, output: usize) { self.network.add_bias(output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn add_input(&mut self, input: usize, output: usize) { self.network.add_input(input, output); // Add an age associated with the new connection self.ages.insert(output, 0); } fn remove_connection(&mut self, index: usize, output: usize) { self.network.remove_connection(index, output); // Remove the age associated with the removed connection self.ages.remove(index); } // does not need to be implemented fn previous_neuron_index(&self, _: usize) -> Option<usize> { unimplemented!(); } }
{ let mut depths = Vec::new(); let mut stack = Vec::new(); for gene in &self.network.genome { let depth = stack.len(); if let GeneExtras::Neuron(_, ref inputs) = gene.variant { depths.push(depth); stack.push(*inputs); } else { if include_connections { depths.push(depth); } while let Some(&1) = stack.last() { stack.pop(); } if let Some(last) = stack.last_mut() { *last -= 1; }
identifier_body
mod.rs
use communication::Message; pub use self::counter::Counter; pub mod counter; /// The pullable design may need to be upgraded: right now there is no obvious connection between /// subsequent calls to pull; although multiple calls may produce the same time, they don't need to /// and defensive implementations must constantly check this. This complicates data exchange, which /// may conservatively over-flush, if the defensive implementation isn't well done (e.g. now). /// An alternate design is for a Pullable<T, D> to return a (&T, Session<D>), where Session<D> is a /// new type implementing Iterator<Item=Message<D>>, or Iterator<Item=D>, or PullableSession<D>, or /// something like that. Ideally, the Session<D> notice how many records are consumed, and only /// treats those res pub trait Pullable<T, D> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)>; } impl<T, D, P: ?Sized + Pullable<T, D>> Pullable<T, D> for Box<P> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)>
}
{ (**self).pull() }
identifier_body
mod.rs
use communication::Message; pub use self::counter::Counter; pub mod counter; /// The pullable design may need to be upgraded: right now there is no obvious connection between /// subsequent calls to pull; although multiple calls may produce the same time, they don't need to /// and defensive implementations must constantly check this. This complicates data exchange, which /// may conservatively over-flush, if the defensive implementation isn't well done (e.g. now). /// An alternate design is for a Pullable<T, D> to return a (&T, Session<D>), where Session<D> is a /// new type implementing Iterator<Item=Message<D>>, or Iterator<Item=D>, or PullableSession<D>, or /// something like that. Ideally, the Session<D> notice how many records are consumed, and only /// treats those res pub trait Pullable<T, D> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)>; } impl<T, D, P: ?Sized + Pullable<T, D>> Pullable<T, D> for Box<P> { fn
(&mut self) -> Option<(&T, &mut Message<D>)> { (**self).pull() } }
pull
identifier_name
mod.rs
use communication::Message; pub use self::counter::Counter;
/// may conservatively over-flush, if the defensive implementation isn't well done (e.g. now). /// An alternate design is for a Pullable<T, D> to return a (&T, Session<D>), where Session<D> is a /// new type implementing Iterator<Item=Message<D>>, or Iterator<Item=D>, or PullableSession<D>, or /// something like that. Ideally, the Session<D> notice how many records are consumed, and only /// treats those res pub trait Pullable<T, D> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)>; } impl<T, D, P: ?Sized + Pullable<T, D>> Pullable<T, D> for Box<P> { fn pull(&mut self) -> Option<(&T, &mut Message<D>)> { (**self).pull() } }
pub mod counter; /// The pullable design may need to be upgraded: right now there is no obvious connection between /// subsequent calls to pull; although multiple calls may produce the same time, they don't need to /// and defensive implementations must constantly check this. This complicates data exchange, which
random_line_split