file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
main.js | var dp = jQuery;
dp.noConflict();
dp(document).ready(function() {
//SMOOTH SCROLL
dp('a[href^="#"]').bind('click.smoothscroll', function(e) {
e.preventDefault();
dp('html,body').animate({
scrollTop: dp(this.hash).offset().top
}, 1200);
});
//SUPER SLIDES
// dp('#home-slide').superslides({
// animation: 'fade', // You can choose either fade or slide
// play: 6000
// });
//ANIMAZE
dp('.animaze').bind('inview', function(event, visible) {
if (visible) |
/* REMOVE THIS if you want to repeat the animation after the element not in view
else {
$(this).stop().animate({ opacity: 0 });
$(this).removeAttr('style');
}*/
});
dp('.animaze').stop().animate({
opacity: 0
});
//SERVICES
dp("#dp-service").sudoSlider({
customLink: 'a.servicesLink',
responsive: true,
speed: 350,
prevNext: false,
useCSS: true,
effect: "fadeOutIn",
continuous: true,
updateBefore: true
});
//TEXT ROTATOR
dp(".rotatez").textrotator({
animation: "fade",
separator: ",",
speed: 1700
});
//PORTFOLIO
dp('.portfolioContainer').mixitup({
filterSelector: '.portfolioFilter a',
targetSelector: '.portfolio-item',
effects: ['fade', 'scale']
});
//QUOTE SLIDE
dp("#quote-slider").sudoSlider({
customLink: 'a.quoteLink',
speed: 425,
prevNext: true,
responsive: true,
prevHtml: '<a href="#" class="quote-left-indicator"><i class="icon-arrow-left"></i></a>',
nextHtml: '<a href="#" class="quote-right-indicator"><i class="icon-arrow-right"></i></a>',
useCSS: true,
continuous: true,
effect: "fadeOutIn",
updateBefore: true
});
//MAGNIFIC POPUP
dp('.popup').magnificPopup({
type: 'image'
});
//PARALLAX
dp('.parallaxize').parallax("50%", 0.3);
// CONTACT SLIDER
dp("#contact-slider").sudoSlider({
customLink: 'a.contactLink',
speed: 750,
responsive: true,
prevNext: false,
useCSS: false,
continuous: false,
updateBefore: true,
effect: "fadeOutIn"
});
//Map
dp('#map').gmap3({
map: {
options: {
maxZoom: 15
}
},
marker: {
address: "Haltern am See, Weseler Str. 151", // PUT YOUR ADDRESS HERE
options: {
icon: new google.maps.MarkerImage(
"http://cdn.webiconset.com/map-icons/images/pin6.png",
new google.maps.Size(42, 69, "px", "px")
)
}
}
},
"autofit");
});
dp(window).load(function() {
dp("#lazyload").fadeOut();
});
| {
dp(this).stop().animate({
opacity: 1,
top: '0px'
}, 500);
} | conditional_block |
main.js | var dp = jQuery;
dp.noConflict();
dp(document).ready(function() {
//SMOOTH SCROLL
dp('a[href^="#"]').bind('click.smoothscroll', function(e) {
e.preventDefault();
dp('html,body').animate({
scrollTop: dp(this.hash).offset().top
}, 1200);
});
//SUPER SLIDES
// dp('#home-slide').superslides({
// animation: 'fade', // You can choose either fade or slide
// play: 6000
// });
//ANIMAZE
dp('.animaze').bind('inview', function(event, visible) {
if (visible) {
dp(this).stop().animate({
opacity: 1,
top: '0px'
}, 500);
}
/* REMOVE THIS if you want to repeat the animation after the element not in view
else {
$(this).stop().animate({ opacity: 0 });
$(this).removeAttr('style');
}*/
});
dp('.animaze').stop().animate({
opacity: 0
});
//SERVICES
dp("#dp-service").sudoSlider({
customLink: 'a.servicesLink',
responsive: true, | effect: "fadeOutIn",
continuous: true,
updateBefore: true
});
//TEXT ROTATOR
dp(".rotatez").textrotator({
animation: "fade",
separator: ",",
speed: 1700
});
//PORTFOLIO
dp('.portfolioContainer').mixitup({
filterSelector: '.portfolioFilter a',
targetSelector: '.portfolio-item',
effects: ['fade', 'scale']
});
//QUOTE SLIDE
dp("#quote-slider").sudoSlider({
customLink: 'a.quoteLink',
speed: 425,
prevNext: true,
responsive: true,
prevHtml: '<a href="#" class="quote-left-indicator"><i class="icon-arrow-left"></i></a>',
nextHtml: '<a href="#" class="quote-right-indicator"><i class="icon-arrow-right"></i></a>',
useCSS: true,
continuous: true,
effect: "fadeOutIn",
updateBefore: true
});
//MAGNIFIC POPUP
dp('.popup').magnificPopup({
type: 'image'
});
//PARALLAX
dp('.parallaxize').parallax("50%", 0.3);
// CONTACT SLIDER
dp("#contact-slider").sudoSlider({
customLink: 'a.contactLink',
speed: 750,
responsive: true,
prevNext: false,
useCSS: false,
continuous: false,
updateBefore: true,
effect: "fadeOutIn"
});
//Map
dp('#map').gmap3({
map: {
options: {
maxZoom: 15
}
},
marker: {
address: "Haltern am See, Weseler Str. 151", // PUT YOUR ADDRESS HERE
options: {
icon: new google.maps.MarkerImage(
"http://cdn.webiconset.com/map-icons/images/pin6.png",
new google.maps.Size(42, 69, "px", "px")
)
}
}
},
"autofit");
});
dp(window).load(function() {
dp("#lazyload").fadeOut();
}); | speed: 350,
prevNext: false,
useCSS: true, | random_line_split |
util.py | # Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
from decimal import Decimal
import json
import shutil
import subprocess
import time
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
START_P2P_PORT=11000
START_RPC_PORT=11100
def check_json_precision():
"""Make sure json library being used does not lose precision converting DIME values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
bitcoind_processes = []
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "bitcoind", "-keypool=1", "-datadir="+datadir ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(START_P2P_PORT))
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
# Shut them down, and remove debug.logs:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
|
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
def start_nodes(num_nodes, dir):
# Start bitcoinds, and wait for RPC interface to be up and running:
devnull = open("/dev/null", "w+")
for i in range(num_nodes):
datadir = os.path.join(dir, "node"+str(i))
args = [ "bitcoind", "-datadir="+datadir ]
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
# Create&return JSON-RPC connections
rpc_connections = []
for i in range(num_nodes):
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpc_connections.append(AuthServiceProxy(url))
return rpc_connections
def debug_log(dir, n_node):
return os.path.join(dir, "node"+str(n_node), "regtest", "debug.log")
def stop_nodes(nodes):
for i in range(len(nodes)):
nodes[i].stop()
del nodes[:] # Emptying array closes connections as a side effect
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes:
bitcoind.wait()
del bitcoind_processes[:]
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(START_P2P_PORT+node_num)
from_connection.addnode(ip_port, "onetry")
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
| os.remove(debug_log("cache", i)) | conditional_block |
util.py | # Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
from decimal import Decimal
import json
import shutil
import subprocess
import time
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
START_P2P_PORT=11000
START_RPC_PORT=11100
def check_json_precision():
"""Make sure json library being used does not lose precision converting DIME values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
bitcoind_processes = []
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "bitcoind", "-keypool=1", "-datadir="+datadir ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(START_P2P_PORT))
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
# Shut them down, and remove debug.logs:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
os.remove(debug_log("cache", i))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
def start_nodes(num_nodes, dir):
# Start bitcoinds, and wait for RPC interface to be up and running:
devnull = open("/dev/null", "w+")
for i in range(num_nodes):
datadir = os.path.join(dir, "node"+str(i))
args = [ "bitcoind", "-datadir="+datadir ]
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
# Create&return JSON-RPC connections
rpc_connections = []
for i in range(num_nodes):
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpc_connections.append(AuthServiceProxy(url))
return rpc_connections
def debug_log(dir, n_node):
return os.path.join(dir, "node"+str(n_node), "regtest", "debug.log")
def stop_nodes(nodes):
for i in range(len(nodes)):
nodes[i].stop()
del nodes[:] # Emptying array closes connections as a side effect
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes:
bitcoind.wait()
del bitcoind_processes[:]
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(START_P2P_PORT+node_num)
from_connection.addnode(ip_port, "onetry")
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2))) | # Copyright (c) 2014 The Bitcoin Core developers | random_line_split |
|
util.py | # Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
from decimal import Decimal
import json
import shutil
import subprocess
import time
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
START_P2P_PORT=11000
START_RPC_PORT=11100
def check_json_precision():
"""Make sure json library being used does not lose precision converting DIME values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def | (rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
bitcoind_processes = []
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "bitcoind", "-keypool=1", "-datadir="+datadir ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(START_P2P_PORT))
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
# Shut them down, and remove debug.logs:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
os.remove(debug_log("cache", i))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
def start_nodes(num_nodes, dir):
# Start bitcoinds, and wait for RPC interface to be up and running:
devnull = open("/dev/null", "w+")
for i in range(num_nodes):
datadir = os.path.join(dir, "node"+str(i))
args = [ "bitcoind", "-datadir="+datadir ]
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
# Create&return JSON-RPC connections
rpc_connections = []
for i in range(num_nodes):
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpc_connections.append(AuthServiceProxy(url))
return rpc_connections
def debug_log(dir, n_node):
return os.path.join(dir, "node"+str(n_node), "regtest", "debug.log")
def stop_nodes(nodes):
for i in range(len(nodes)):
nodes[i].stop()
del nodes[:] # Emptying array closes connections as a side effect
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes:
bitcoind.wait()
del bitcoind_processes[:]
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(START_P2P_PORT+node_num)
from_connection.addnode(ip_port, "onetry")
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
| sync_mempools | identifier_name |
util.py | # Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
from decimal import Decimal
import json
import shutil
import subprocess
import time
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
START_P2P_PORT=11000
START_RPC_PORT=11100
def check_json_precision():
"""Make sure json library being used does not lose precision converting DIME values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def sync_blocks(rpc_connections):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(1)
def sync_mempools(rpc_connections):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(1)
bitcoind_processes = []
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
bitcoind and bitcoin-cli must be in search path.
"""
if not os.path.isdir(os.path.join("cache", "node0")):
devnull = open("/dev/null", "w+")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir = os.path.join("cache", "node"+str(i))
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w') as f:
f.write("regtest=1\n");
f.write("rpcuser=rt\n");
f.write("rpcpassword=rt\n");
f.write("port="+str(START_P2P_PORT+i)+"\n");
f.write("rpcport="+str(START_RPC_PORT+i)+"\n");
args = [ "bitcoind", "-keypool=1", "-datadir="+datadir ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(START_P2P_PORT))
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
rpcs = []
for i in range(4):
try:
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpcs.append(AuthServiceProxy(url))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
for i in range(4):
rpcs[i].setgenerate(True, 25)
sync_blocks(rpcs)
# Shut them down, and remove debug.logs:
stop_nodes(rpcs)
wait_bitcoinds()
for i in range(4):
os.remove(debug_log("cache", i))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
def start_nodes(num_nodes, dir):
# Start bitcoinds, and wait for RPC interface to be up and running:
devnull = open("/dev/null", "w+")
for i in range(num_nodes):
datadir = os.path.join(dir, "node"+str(i))
args = [ "bitcoind", "-datadir="+datadir ]
bitcoind_processes.append(subprocess.Popen(args))
subprocess.check_call([ "bitcoin-cli", "-datadir="+datadir,
"-rpcwait", "getblockcount"], stdout=devnull)
devnull.close()
# Create&return JSON-RPC connections
rpc_connections = []
for i in range(num_nodes):
url = "http://rt:[email protected]:%d"%(START_RPC_PORT+i,)
rpc_connections.append(AuthServiceProxy(url))
return rpc_connections
def debug_log(dir, n_node):
return os.path.join(dir, "node"+str(n_node), "regtest", "debug.log")
def stop_nodes(nodes):
for i in range(len(nodes)):
nodes[i].stop()
del nodes[:] # Emptying array closes connections as a side effect
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
|
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(START_P2P_PORT+node_num)
from_connection.addnode(ip_port, "onetry")
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
| for bitcoind in bitcoind_processes:
bitcoind.wait()
del bitcoind_processes[:] | identifier_body |
install-shrinkwrapped-git.js | var fs = require('fs')
var path = require('path')
var resolve = path.resolve
var osenv = require('osenv')
var mkdirp = require('mkdirp')
var rimraf = require('rimraf')
var test = require('tap').test
var npm = require('../../lib/npm')
var common = require('../common-tap')
var chain = require('slide').chain
var mockPath = resolve(__dirname, 'install-shrinkwrapped')
var parentPath = resolve(mockPath, 'parent')
var parentNodeModulesPath = path.join(parentPath, 'node_modules')
var outdatedNodeModulesPath = resolve(mockPath, 'node-modules-backup')
var childPath = resolve(mockPath, 'child.git')
var gitDaemon
var gitDaemonPID
var git
var parentPackageJSON = JSON.stringify({
name: 'parent',
version: '0.1.0'
})
var childPackageJSON = JSON.stringify({
name: 'child',
version: '0.1.0'
})
test('setup', function (t) {
cleanup()
setup(function (err, result) {
t.ifError(err, 'git started up successfully')
if (!err) {
gitDaemon = result[result.length - 2]
gitDaemonPID = result[result.length - 1]
}
t.end()
})
})
test('shrinkwrapped git dependency got updated', function (t) {
t.comment('test for https://github.com/npm/npm/issues/12718')
// Prepare the child package git repo with two commits
prepareChildAndGetRefs(function (refs) {
chain([
// Install & shrinkwrap child package's first commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[0]]],
// Backup node_modules with the first commit
[fs.rename, parentNodeModulesPath, outdatedNodeModulesPath],
// Install & shrinkwrap child package's second commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[1]]],
// Restore node_modules with the first commit
[rimraf, parentNodeModulesPath],
[fs.rename, outdatedNodeModulesPath, parentNodeModulesPath],
// Update node_modules
[npm.commands.install, []]
], function () {
var childPackageJSON = require(path.join(parentNodeModulesPath, 'child', 'package.json'))
t.equal(
childPackageJSON._resolved,
'git://localhost:1234/child.git#' + refs[1],
"Child package wasn't updated"
)
t.end()
})
})
})
test('clean', function (t) {
gitDaemon.on('close', function () {
cleanup()
t.end()
})
process.kill(gitDaemonPID)
})
function | (cb) {
// Setup parent package
mkdirp.sync(parentPath)
fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON)
process.chdir(parentPath)
// Setup child
mkdirp.sync(childPath)
fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON)
// Setup npm and then git
npm.load({
registry: common.registry,
loglevel: 'silent',
save: true // Always install packages with --save
}, function () {
// It's important to initialize git after npm because it uses config
initializeGit(cb)
})
}
function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(mockPath)
rimraf.sync(common['npm_config_cache'])
}
function prepareChildAndGetRefs (cb) {
var opts = { cwd: childPath, env: { PATH: process.env.PATH } }
chain([
[fs.writeFile, path.join(childPath, 'README.md'), ''],
git.chainableExec(['add', 'README.md'], opts),
git.chainableExec(['commit', '-m', 'Add README'], opts),
git.chainableExec(['log', '--pretty=format:"%H"', '-2'], opts)
], function () {
var gitLogStdout = arguments[arguments.length - 1]
var refs = gitLogStdout[gitLogStdout.length - 1].split('\n').map(function (ref) {
return ref.match(/^"(.+)"$/)[1]
}).reverse() // Reverse refs order: last, first -> first, last
cb(refs)
})
}
function initializeGit (cb) {
git = require('../../lib/utils/git')
common.makeGitRepo({
path: childPath,
commands: [startGitDaemon]
}, cb)
}
function startGitDaemon (cb) {
var daemon = git.spawn(
[
'daemon',
'--verbose',
'--listen=localhost',
'--export-all',
'--base-path=' + mockPath, // Path to the dir that contains child.git
'--reuseaddr',
'--port=1234'
],
{
cwd: parentPath,
env: process.env,
stdio: ['pipe', 'pipe', 'pipe']
}
)
daemon.stderr.on('data', function findChild (c) {
var cpid = c.toString().match(/^\[(\d+)\]/)
if (cpid[1]) {
this.removeListener('data', findChild)
cb(null, [daemon, cpid[1]])
}
})
}
| setup | identifier_name |
install-shrinkwrapped-git.js | var fs = require('fs')
var path = require('path')
var resolve = path.resolve
var osenv = require('osenv')
var mkdirp = require('mkdirp')
var rimraf = require('rimraf')
var test = require('tap').test
var npm = require('../../lib/npm')
var common = require('../common-tap')
var chain = require('slide').chain
var mockPath = resolve(__dirname, 'install-shrinkwrapped')
var parentPath = resolve(mockPath, 'parent')
var parentNodeModulesPath = path.join(parentPath, 'node_modules')
var outdatedNodeModulesPath = resolve(mockPath, 'node-modules-backup')
var childPath = resolve(mockPath, 'child.git')
var gitDaemon
var gitDaemonPID
var git
var parentPackageJSON = JSON.stringify({
name: 'parent',
version: '0.1.0'
})
var childPackageJSON = JSON.stringify({
name: 'child',
version: '0.1.0'
})
test('setup', function (t) {
cleanup()
setup(function (err, result) {
t.ifError(err, 'git started up successfully')
if (!err) {
gitDaemon = result[result.length - 2]
gitDaemonPID = result[result.length - 1]
}
t.end()
})
})
test('shrinkwrapped git dependency got updated', function (t) {
t.comment('test for https://github.com/npm/npm/issues/12718')
// Prepare the child package git repo with two commits
prepareChildAndGetRefs(function (refs) {
chain([
// Install & shrinkwrap child package's first commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[0]]],
// Backup node_modules with the first commit
[fs.rename, parentNodeModulesPath, outdatedNodeModulesPath],
// Install & shrinkwrap child package's second commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[1]]],
// Restore node_modules with the first commit
[rimraf, parentNodeModulesPath],
[fs.rename, outdatedNodeModulesPath, parentNodeModulesPath],
// Update node_modules
[npm.commands.install, []]
], function () {
var childPackageJSON = require(path.join(parentNodeModulesPath, 'child', 'package.json'))
t.equal(
childPackageJSON._resolved,
'git://localhost:1234/child.git#' + refs[1],
"Child package wasn't updated"
)
t.end()
})
})
})
test('clean', function (t) {
gitDaemon.on('close', function () {
cleanup()
t.end()
})
process.kill(gitDaemonPID)
})
function setup (cb) {
// Setup parent package
mkdirp.sync(parentPath)
fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON)
process.chdir(parentPath)
// Setup child
mkdirp.sync(childPath)
fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON)
// Setup npm and then git
npm.load({
registry: common.registry,
loglevel: 'silent',
save: true // Always install packages with --save
}, function () {
// It's important to initialize git after npm because it uses config
initializeGit(cb)
})
}
function cleanup () { |
function prepareChildAndGetRefs (cb) {
var opts = { cwd: childPath, env: { PATH: process.env.PATH } }
chain([
[fs.writeFile, path.join(childPath, 'README.md'), ''],
git.chainableExec(['add', 'README.md'], opts),
git.chainableExec(['commit', '-m', 'Add README'], opts),
git.chainableExec(['log', '--pretty=format:"%H"', '-2'], opts)
], function () {
var gitLogStdout = arguments[arguments.length - 1]
var refs = gitLogStdout[gitLogStdout.length - 1].split('\n').map(function (ref) {
return ref.match(/^"(.+)"$/)[1]
}).reverse() // Reverse refs order: last, first -> first, last
cb(refs)
})
}
function initializeGit (cb) {
git = require('../../lib/utils/git')
common.makeGitRepo({
path: childPath,
commands: [startGitDaemon]
}, cb)
}
function startGitDaemon (cb) {
var daemon = git.spawn(
[
'daemon',
'--verbose',
'--listen=localhost',
'--export-all',
'--base-path=' + mockPath, // Path to the dir that contains child.git
'--reuseaddr',
'--port=1234'
],
{
cwd: parentPath,
env: process.env,
stdio: ['pipe', 'pipe', 'pipe']
}
)
daemon.stderr.on('data', function findChild (c) {
var cpid = c.toString().match(/^\[(\d+)\]/)
if (cpid[1]) {
this.removeListener('data', findChild)
cb(null, [daemon, cpid[1]])
}
})
} | process.chdir(osenv.tmpdir())
rimraf.sync(mockPath)
rimraf.sync(common['npm_config_cache'])
} | random_line_split |
install-shrinkwrapped-git.js | var fs = require('fs')
var path = require('path')
var resolve = path.resolve
var osenv = require('osenv')
var mkdirp = require('mkdirp')
var rimraf = require('rimraf')
var test = require('tap').test
var npm = require('../../lib/npm')
var common = require('../common-tap')
var chain = require('slide').chain
var mockPath = resolve(__dirname, 'install-shrinkwrapped')
var parentPath = resolve(mockPath, 'parent')
var parentNodeModulesPath = path.join(parentPath, 'node_modules')
var outdatedNodeModulesPath = resolve(mockPath, 'node-modules-backup')
var childPath = resolve(mockPath, 'child.git')
var gitDaemon
var gitDaemonPID
var git
var parentPackageJSON = JSON.stringify({
name: 'parent',
version: '0.1.0'
})
var childPackageJSON = JSON.stringify({
name: 'child',
version: '0.1.0'
})
test('setup', function (t) {
cleanup()
setup(function (err, result) {
t.ifError(err, 'git started up successfully')
if (!err) {
gitDaemon = result[result.length - 2]
gitDaemonPID = result[result.length - 1]
}
t.end()
})
})
test('shrinkwrapped git dependency got updated', function (t) {
t.comment('test for https://github.com/npm/npm/issues/12718')
// Prepare the child package git repo with two commits
prepareChildAndGetRefs(function (refs) {
chain([
// Install & shrinkwrap child package's first commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[0]]],
// Backup node_modules with the first commit
[fs.rename, parentNodeModulesPath, outdatedNodeModulesPath],
// Install & shrinkwrap child package's second commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[1]]],
// Restore node_modules with the first commit
[rimraf, parentNodeModulesPath],
[fs.rename, outdatedNodeModulesPath, parentNodeModulesPath],
// Update node_modules
[npm.commands.install, []]
], function () {
var childPackageJSON = require(path.join(parentNodeModulesPath, 'child', 'package.json'))
t.equal(
childPackageJSON._resolved,
'git://localhost:1234/child.git#' + refs[1],
"Child package wasn't updated"
)
t.end()
})
})
})
test('clean', function (t) {
gitDaemon.on('close', function () {
cleanup()
t.end()
})
process.kill(gitDaemonPID)
})
function setup (cb) {
// Setup parent package
mkdirp.sync(parentPath)
fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON)
process.chdir(parentPath)
// Setup child
mkdirp.sync(childPath)
fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON)
// Setup npm and then git
npm.load({
registry: common.registry,
loglevel: 'silent',
save: true // Always install packages with --save
}, function () {
// It's important to initialize git after npm because it uses config
initializeGit(cb)
})
}
function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(mockPath)
rimraf.sync(common['npm_config_cache'])
}
function prepareChildAndGetRefs (cb) {
var opts = { cwd: childPath, env: { PATH: process.env.PATH } }
chain([
[fs.writeFile, path.join(childPath, 'README.md'), ''],
git.chainableExec(['add', 'README.md'], opts),
git.chainableExec(['commit', '-m', 'Add README'], opts),
git.chainableExec(['log', '--pretty=format:"%H"', '-2'], opts)
], function () {
var gitLogStdout = arguments[arguments.length - 1]
var refs = gitLogStdout[gitLogStdout.length - 1].split('\n').map(function (ref) {
return ref.match(/^"(.+)"$/)[1]
}).reverse() // Reverse refs order: last, first -> first, last
cb(refs)
})
}
function initializeGit (cb) {
git = require('../../lib/utils/git')
common.makeGitRepo({
path: childPath,
commands: [startGitDaemon]
}, cb)
}
function startGitDaemon (cb) {
var daemon = git.spawn(
[
'daemon',
'--verbose',
'--listen=localhost',
'--export-all',
'--base-path=' + mockPath, // Path to the dir that contains child.git
'--reuseaddr',
'--port=1234'
],
{
cwd: parentPath,
env: process.env,
stdio: ['pipe', 'pipe', 'pipe']
}
)
daemon.stderr.on('data', function findChild (c) {
var cpid = c.toString().match(/^\[(\d+)\]/)
if (cpid[1]) |
})
}
| {
this.removeListener('data', findChild)
cb(null, [daemon, cpid[1]])
} | conditional_block |
install-shrinkwrapped-git.js | var fs = require('fs')
var path = require('path')
var resolve = path.resolve
var osenv = require('osenv')
var mkdirp = require('mkdirp')
var rimraf = require('rimraf')
var test = require('tap').test
var npm = require('../../lib/npm')
var common = require('../common-tap')
var chain = require('slide').chain
var mockPath = resolve(__dirname, 'install-shrinkwrapped')
var parentPath = resolve(mockPath, 'parent')
var parentNodeModulesPath = path.join(parentPath, 'node_modules')
var outdatedNodeModulesPath = resolve(mockPath, 'node-modules-backup')
var childPath = resolve(mockPath, 'child.git')
var gitDaemon
var gitDaemonPID
var git
var parentPackageJSON = JSON.stringify({
name: 'parent',
version: '0.1.0'
})
var childPackageJSON = JSON.stringify({
name: 'child',
version: '0.1.0'
})
test('setup', function (t) {
cleanup()
setup(function (err, result) {
t.ifError(err, 'git started up successfully')
if (!err) {
gitDaemon = result[result.length - 2]
gitDaemonPID = result[result.length - 1]
}
t.end()
})
})
test('shrinkwrapped git dependency got updated', function (t) {
t.comment('test for https://github.com/npm/npm/issues/12718')
// Prepare the child package git repo with two commits
prepareChildAndGetRefs(function (refs) {
chain([
// Install & shrinkwrap child package's first commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[0]]],
// Backup node_modules with the first commit
[fs.rename, parentNodeModulesPath, outdatedNodeModulesPath],
// Install & shrinkwrap child package's second commit
[npm.commands.install, ['git://localhost:1234/child.git#' + refs[1]]],
// Restore node_modules with the first commit
[rimraf, parentNodeModulesPath],
[fs.rename, outdatedNodeModulesPath, parentNodeModulesPath],
// Update node_modules
[npm.commands.install, []]
], function () {
var childPackageJSON = require(path.join(parentNodeModulesPath, 'child', 'package.json'))
t.equal(
childPackageJSON._resolved,
'git://localhost:1234/child.git#' + refs[1],
"Child package wasn't updated"
)
t.end()
})
})
})
test('clean', function (t) {
gitDaemon.on('close', function () {
cleanup()
t.end()
})
process.kill(gitDaemonPID)
})
function setup (cb) |
function cleanup () {
process.chdir(osenv.tmpdir())
rimraf.sync(mockPath)
rimraf.sync(common['npm_config_cache'])
}
function prepareChildAndGetRefs (cb) {
var opts = { cwd: childPath, env: { PATH: process.env.PATH } }
chain([
[fs.writeFile, path.join(childPath, 'README.md'), ''],
git.chainableExec(['add', 'README.md'], opts),
git.chainableExec(['commit', '-m', 'Add README'], opts),
git.chainableExec(['log', '--pretty=format:"%H"', '-2'], opts)
], function () {
var gitLogStdout = arguments[arguments.length - 1]
var refs = gitLogStdout[gitLogStdout.length - 1].split('\n').map(function (ref) {
return ref.match(/^"(.+)"$/)[1]
}).reverse() // Reverse refs order: last, first -> first, last
cb(refs)
})
}
function initializeGit (cb) {
git = require('../../lib/utils/git')
common.makeGitRepo({
path: childPath,
commands: [startGitDaemon]
}, cb)
}
function startGitDaemon (cb) {
var daemon = git.spawn(
[
'daemon',
'--verbose',
'--listen=localhost',
'--export-all',
'--base-path=' + mockPath, // Path to the dir that contains child.git
'--reuseaddr',
'--port=1234'
],
{
cwd: parentPath,
env: process.env,
stdio: ['pipe', 'pipe', 'pipe']
}
)
daemon.stderr.on('data', function findChild (c) {
var cpid = c.toString().match(/^\[(\d+)\]/)
if (cpid[1]) {
this.removeListener('data', findChild)
cb(null, [daemon, cpid[1]])
}
})
}
| {
// Setup parent package
mkdirp.sync(parentPath)
fs.writeFileSync(resolve(parentPath, 'package.json'), parentPackageJSON)
process.chdir(parentPath)
// Setup child
mkdirp.sync(childPath)
fs.writeFileSync(resolve(childPath, 'package.json'), childPackageJSON)
// Setup npm and then git
npm.load({
registry: common.registry,
loglevel: 'silent',
save: true // Always install packages with --save
}, function () {
// It's important to initialize git after npm because it uses config
initializeGit(cb)
})
} | identifier_body |
machine_specs.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Machine Specifications: the rows of this table represent the default
values of machine "models" so that users don't need to manaully enter the
low level details of each one since this is mostly repeated data in large
grid deployments, such as Saphire """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
ForeignKey, UniqueConstraint)
from sqlalchemy.orm import relation, backref
from aquilon.aqdb.model import Base, Model, Vendor, Cpu
from aquilon.aqdb.model.disk import disk_types, controller_types
from aquilon.aqdb.column_types import Enum
class MachineSpecs(Base):
""" Captures the configuration hardware components for a given model """
#TODO: Maybe this entire table is in fact a part of the model "subtype"
_def_cpu_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 4 }
_def_nic_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 2 }
_def_memory = { 'workstation': 2048, 'blade': 8192, 'rackmount': 16384 }
__tablename__ = 'machine_specs'
id = Column( Integer, Sequence('mach_specs_id_seq'), primary_key=True)
model_id = Column(Integer, ForeignKey('model.id',
name='mach_spec_model_fk'),
nullable=False)
cpu_id = Column(Integer, ForeignKey('cpu.id', name='mach_spec_cpu_fk'), nullable=False)
cpu_quantity = Column(Integer, nullable=False) #Constrain to below 512?
memory = Column(Integer, nullable=False, default=0)
disk_type = Column(Enum(64, disk_types), nullable=False)
disk_capacity = Column(Integer, nullable=False, default=36)
controller_type = Column(Enum(64, controller_types), nullable=False)
nic_count = Column(Integer, nullable=False, default=2)
creation_date = Column('creation_date', DateTime, default=datetime.now) |
model = relation(Model, backref=backref('machine_specs', uselist=False))
cpu = relation(Cpu)
machine_specs = MachineSpecs.__table__
machine_specs.primary_key.name='machine_specs_pk'
#for now, need a UK on model_id. WILL be a name AND a model_id as UK.
machine_specs.append_constraint(
UniqueConstraint('model_id', name='machine_specs_model_uk'))
table = machine_specs
def populate(sess, *args, **kw):
if len(sess.query(MachineSpecs).all()) < 1:
from sqlalchemy import insert
specs = [["hs20-884345u", "xeon_2660", 2, 8192, 'scsi', 36, 2],
["hs21-8853l5u", "xeon_2660", 2, 8192, 'scsi', 68, 2],
["poweredge_6650", "xeon_3000", 4, 16384, 'scsi', 36, 2],
["bl45p", "opteron_2600", 2, 32768, 'scsi', 36, 2],
["bl260c", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["vb1205xm", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["aurora_model", "aurora_cpu", 0, 0, 'scsi', 0, 0]]
for ms in specs:
try:
dbmodel = sess.query(Model).filter_by(name=ms[0]).one()
dbcpu = sess.query(Cpu).filter_by(name=ms[1]).one()
cpu_quantity = ms[2]
memory = ms[3]
disk_type = 'local'
controller_type = ms[4]
disk_capacity = ms[5]
nic_count = ms[6]
dbms = MachineSpecs(model=dbmodel, cpu=dbcpu,
cpu_quantity=cpu_quantity, memory=memory,
disk_type=disk_type, controller_type=controller_type,
disk_capacity=disk_capacity, nic_count=nic_count)
sess.add(dbms)
except Exception,e:
sess.rollback()
print 'Creating machine specs: %s' % e
continue
try:
sess.commit()
except Exception,e:
sess.rollback()
print 'Commiting ',e
continue | comments = Column('comments', String(255), nullable=True) | random_line_split |
machine_specs.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Machine Specifications: the rows of this table represent the default
values of machine "models" so that users don't need to manaully enter the
low level details of each one since this is mostly repeated data in large
grid deployments, such as Saphire """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
ForeignKey, UniqueConstraint)
from sqlalchemy.orm import relation, backref
from aquilon.aqdb.model import Base, Model, Vendor, Cpu
from aquilon.aqdb.model.disk import disk_types, controller_types
from aquilon.aqdb.column_types import Enum
class MachineSpecs(Base):
""" Captures the configuration hardware components for a given model """
#TODO: Maybe this entire table is in fact a part of the model "subtype"
_def_cpu_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 4 }
_def_nic_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 2 }
_def_memory = { 'workstation': 2048, 'blade': 8192, 'rackmount': 16384 }
__tablename__ = 'machine_specs'
id = Column( Integer, Sequence('mach_specs_id_seq'), primary_key=True)
model_id = Column(Integer, ForeignKey('model.id',
name='mach_spec_model_fk'),
nullable=False)
cpu_id = Column(Integer, ForeignKey('cpu.id', name='mach_spec_cpu_fk'), nullable=False)
cpu_quantity = Column(Integer, nullable=False) #Constrain to below 512?
memory = Column(Integer, nullable=False, default=0)
disk_type = Column(Enum(64, disk_types), nullable=False)
disk_capacity = Column(Integer, nullable=False, default=36)
controller_type = Column(Enum(64, controller_types), nullable=False)
nic_count = Column(Integer, nullable=False, default=2)
creation_date = Column('creation_date', DateTime, default=datetime.now)
comments = Column('comments', String(255), nullable=True)
model = relation(Model, backref=backref('machine_specs', uselist=False))
cpu = relation(Cpu)
machine_specs = MachineSpecs.__table__
machine_specs.primary_key.name='machine_specs_pk'
#for now, need a UK on model_id. WILL be a name AND a model_id as UK.
machine_specs.append_constraint(
UniqueConstraint('model_id', name='machine_specs_model_uk'))
table = machine_specs
def populate(sess, *args, **kw):
| disk_capacity = ms[5]
nic_count = ms[6]
dbms = MachineSpecs(model=dbmodel, cpu=dbcpu,
cpu_quantity=cpu_quantity, memory=memory,
disk_type=disk_type, controller_type=controller_type,
disk_capacity=disk_capacity, nic_count=nic_count)
sess.add(dbms)
except Exception,e:
sess.rollback()
print 'Creating machine specs: %s' % e
continue
try:
sess.commit()
except Exception,e:
sess.rollback()
print 'Commiting ',e
continue
| if len(sess.query(MachineSpecs).all()) < 1:
from sqlalchemy import insert
specs = [["hs20-884345u", "xeon_2660", 2, 8192, 'scsi', 36, 2],
["hs21-8853l5u", "xeon_2660", 2, 8192, 'scsi', 68, 2],
["poweredge_6650", "xeon_3000", 4, 16384, 'scsi', 36, 2],
["bl45p", "opteron_2600", 2, 32768, 'scsi', 36, 2],
["bl260c", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["vb1205xm", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["aurora_model", "aurora_cpu", 0, 0, 'scsi', 0, 0]]
for ms in specs:
try:
dbmodel = sess.query(Model).filter_by(name=ms[0]).one()
dbcpu = sess.query(Cpu).filter_by(name=ms[1]).one()
cpu_quantity = ms[2]
memory = ms[3]
disk_type = 'local'
controller_type = ms[4] | identifier_body |
machine_specs.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Machine Specifications: the rows of this table represent the default
values of machine "models" so that users don't need to manaully enter the
low level details of each one since this is mostly repeated data in large
grid deployments, such as Saphire """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
ForeignKey, UniqueConstraint)
from sqlalchemy.orm import relation, backref
from aquilon.aqdb.model import Base, Model, Vendor, Cpu
from aquilon.aqdb.model.disk import disk_types, controller_types
from aquilon.aqdb.column_types import Enum
class | (Base):
""" Captures the configuration hardware components for a given model """
#TODO: Maybe this entire table is in fact a part of the model "subtype"
_def_cpu_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 4 }
_def_nic_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 2 }
_def_memory = { 'workstation': 2048, 'blade': 8192, 'rackmount': 16384 }
__tablename__ = 'machine_specs'
id = Column( Integer, Sequence('mach_specs_id_seq'), primary_key=True)
model_id = Column(Integer, ForeignKey('model.id',
name='mach_spec_model_fk'),
nullable=False)
cpu_id = Column(Integer, ForeignKey('cpu.id', name='mach_spec_cpu_fk'), nullable=False)
cpu_quantity = Column(Integer, nullable=False) #Constrain to below 512?
memory = Column(Integer, nullable=False, default=0)
disk_type = Column(Enum(64, disk_types), nullable=False)
disk_capacity = Column(Integer, nullable=False, default=36)
controller_type = Column(Enum(64, controller_types), nullable=False)
nic_count = Column(Integer, nullable=False, default=2)
creation_date = Column('creation_date', DateTime, default=datetime.now)
comments = Column('comments', String(255), nullable=True)
model = relation(Model, backref=backref('machine_specs', uselist=False))
cpu = relation(Cpu)
machine_specs = MachineSpecs.__table__
machine_specs.primary_key.name='machine_specs_pk'
#for now, need a UK on model_id. WILL be a name AND a model_id as UK.
machine_specs.append_constraint(
UniqueConstraint('model_id', name='machine_specs_model_uk'))
table = machine_specs
def populate(sess, *args, **kw):
if len(sess.query(MachineSpecs).all()) < 1:
from sqlalchemy import insert
specs = [["hs20-884345u", "xeon_2660", 2, 8192, 'scsi', 36, 2],
["hs21-8853l5u", "xeon_2660", 2, 8192, 'scsi', 68, 2],
["poweredge_6650", "xeon_3000", 4, 16384, 'scsi', 36, 2],
["bl45p", "opteron_2600", 2, 32768, 'scsi', 36, 2],
["bl260c", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["vb1205xm", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["aurora_model", "aurora_cpu", 0, 0, 'scsi', 0, 0]]
for ms in specs:
try:
dbmodel = sess.query(Model).filter_by(name=ms[0]).one()
dbcpu = sess.query(Cpu).filter_by(name=ms[1]).one()
cpu_quantity = ms[2]
memory = ms[3]
disk_type = 'local'
controller_type = ms[4]
disk_capacity = ms[5]
nic_count = ms[6]
dbms = MachineSpecs(model=dbmodel, cpu=dbcpu,
cpu_quantity=cpu_quantity, memory=memory,
disk_type=disk_type, controller_type=controller_type,
disk_capacity=disk_capacity, nic_count=nic_count)
sess.add(dbms)
except Exception,e:
sess.rollback()
print 'Creating machine specs: %s' % e
continue
try:
sess.commit()
except Exception,e:
sess.rollback()
print 'Commiting ',e
continue
| MachineSpecs | identifier_name |
machine_specs.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Machine Specifications: the rows of this table represent the default
values of machine "models" so that users don't need to manaully enter the
low level details of each one since this is mostly repeated data in large
grid deployments, such as Saphire """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
ForeignKey, UniqueConstraint)
from sqlalchemy.orm import relation, backref
from aquilon.aqdb.model import Base, Model, Vendor, Cpu
from aquilon.aqdb.model.disk import disk_types, controller_types
from aquilon.aqdb.column_types import Enum
class MachineSpecs(Base):
""" Captures the configuration hardware components for a given model """
#TODO: Maybe this entire table is in fact a part of the model "subtype"
_def_cpu_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 4 }
_def_nic_cnt = { 'workstation':1, 'blade': 2, 'rackmount' : 2 }
_def_memory = { 'workstation': 2048, 'blade': 8192, 'rackmount': 16384 }
__tablename__ = 'machine_specs'
id = Column( Integer, Sequence('mach_specs_id_seq'), primary_key=True)
model_id = Column(Integer, ForeignKey('model.id',
name='mach_spec_model_fk'),
nullable=False)
cpu_id = Column(Integer, ForeignKey('cpu.id', name='mach_spec_cpu_fk'), nullable=False)
cpu_quantity = Column(Integer, nullable=False) #Constrain to below 512?
memory = Column(Integer, nullable=False, default=0)
disk_type = Column(Enum(64, disk_types), nullable=False)
disk_capacity = Column(Integer, nullable=False, default=36)
controller_type = Column(Enum(64, controller_types), nullable=False)
nic_count = Column(Integer, nullable=False, default=2)
creation_date = Column('creation_date', DateTime, default=datetime.now)
comments = Column('comments', String(255), nullable=True)
model = relation(Model, backref=backref('machine_specs', uselist=False))
cpu = relation(Cpu)
machine_specs = MachineSpecs.__table__
machine_specs.primary_key.name='machine_specs_pk'
#for now, need a UK on model_id. WILL be a name AND a model_id as UK.
machine_specs.append_constraint(
UniqueConstraint('model_id', name='machine_specs_model_uk'))
table = machine_specs
def populate(sess, *args, **kw):
if len(sess.query(MachineSpecs).all()) < 1:
from sqlalchemy import insert
specs = [["hs20-884345u", "xeon_2660", 2, 8192, 'scsi', 36, 2],
["hs21-8853l5u", "xeon_2660", 2, 8192, 'scsi', 68, 2],
["poweredge_6650", "xeon_3000", 4, 16384, 'scsi', 36, 2],
["bl45p", "opteron_2600", 2, 32768, 'scsi', 36, 2],
["bl260c", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["vb1205xm", "xeon_2500", 2, 24576, 'scsi', 36, 2],
["aurora_model", "aurora_cpu", 0, 0, 'scsi', 0, 0]]
for ms in specs:
| except Exception,e:
sess.rollback()
print 'Commiting ',e
continue
| try:
dbmodel = sess.query(Model).filter_by(name=ms[0]).one()
dbcpu = sess.query(Cpu).filter_by(name=ms[1]).one()
cpu_quantity = ms[2]
memory = ms[3]
disk_type = 'local'
controller_type = ms[4]
disk_capacity = ms[5]
nic_count = ms[6]
dbms = MachineSpecs(model=dbmodel, cpu=dbcpu,
cpu_quantity=cpu_quantity, memory=memory,
disk_type=disk_type, controller_type=controller_type,
disk_capacity=disk_capacity, nic_count=nic_count)
sess.add(dbms)
except Exception,e:
sess.rollback()
print 'Creating machine specs: %s' % e
continue
try:
sess.commit() | conditional_block |
rastrigin.rs | // Copyright 2016 Martin Ankerl.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate differential_evolution;
use differential_evolution::self_adaptive_de;
use std::f32::consts::PI;
use std::env;
// The Rastrigin function is a non-convex function used as a
// performance test problem for optimization algorithms.
// see https://en.wikipedia.org/wiki/Rastrigin_function
fn rastrigin(pos: &[f32]) -> f32 {
pos.iter().fold(0.0, |sum, x|
sum + x * x - 10.0 * (2.0 * PI * x).cos() + 10.0)
}
fn main() | println!("{:?} best position", pos);
} | {
// command line args: dimension, number of evaluations
let args: Vec<String> = env::args().collect();
let dim = args[1].parse::<usize>().unwrap();
// initial search space for each dimension
let initial_min_max = vec![(-5.12, 5.12); dim];
// initialize differential evolution
let mut de = self_adaptive_de(initial_min_max, rastrigin);
// perform optimization for a maximum of 100000 cost evaluations,
// or until best cost is below 0.1.
de.iter().take(100000).find(|&cost| cost < 0.1);
// see what we've found
println!("{} evaluations done", de.num_cost_evaluations());
let (cost, pos) = de.best().unwrap();
println!("{} best cost", cost); | identifier_body |
rastrigin.rs | // Copyright 2016 Martin Ankerl.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate differential_evolution;
use differential_evolution::self_adaptive_de;
use std::f32::consts::PI;
use std::env;
// The Rastrigin function is a non-convex function used as a
// performance test problem for optimization algorithms.
// see https://en.wikipedia.org/wiki/Rastrigin_function
fn | (pos: &[f32]) -> f32 {
pos.iter().fold(0.0, |sum, x|
sum + x * x - 10.0 * (2.0 * PI * x).cos() + 10.0)
}
fn main() {
// command line args: dimension, number of evaluations
let args: Vec<String> = env::args().collect();
let dim = args[1].parse::<usize>().unwrap();
// initial search space for each dimension
let initial_min_max = vec![(-5.12, 5.12); dim];
// initialize differential evolution
let mut de = self_adaptive_de(initial_min_max, rastrigin);
// perform optimization for a maximum of 100000 cost evaluations,
// or until best cost is below 0.1.
de.iter().take(100000).find(|&cost| cost < 0.1);
// see what we've found
println!("{} evaluations done", de.num_cost_evaluations());
let (cost, pos) = de.best().unwrap();
println!("{} best cost", cost);
println!("{:?} best position", pos);
} | rastrigin | identifier_name |
rastrigin.rs | // Copyright 2016 Martin Ankerl.
// | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate differential_evolution;
use differential_evolution::self_adaptive_de;
use std::f32::consts::PI;
use std::env;
// The Rastrigin function is a non-convex function used as a
// performance test problem for optimization algorithms.
// see https://en.wikipedia.org/wiki/Rastrigin_function
fn rastrigin(pos: &[f32]) -> f32 {
pos.iter().fold(0.0, |sum, x|
sum + x * x - 10.0 * (2.0 * PI * x).cos() + 10.0)
}
fn main() {
// command line args: dimension, number of evaluations
let args: Vec<String> = env::args().collect();
let dim = args[1].parse::<usize>().unwrap();
// initial search space for each dimension
let initial_min_max = vec![(-5.12, 5.12); dim];
// initialize differential evolution
let mut de = self_adaptive_de(initial_min_max, rastrigin);
// perform optimization for a maximum of 100000 cost evaluations,
// or until best cost is below 0.1.
de.iter().take(100000).find(|&cost| cost < 0.1);
// see what we've found
println!("{} evaluations done", de.num_cost_evaluations());
let (cost, pos) = de.best().unwrap();
println!("{} best cost", cost);
println!("{:?} best position", pos);
} | random_line_split |
|
big64.js | import test from 'ava'; | expected = get64(...expected);
t.deepEqual(big64(a, o), expected);
}
macro.title = (providedTitle, a, o, expected) =>
`${providedTitle || ''} big64(${a}, ${o}) === ${expected}`.trim();
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0xff_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_ff],
);
test(
macro,
[0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_ff_00, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00],
0,
[0x00_00_00_00, 0x00_00_01_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_a0, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x03, 0x0c],
1,
[0x00_00_a0_00, 0x00_00_00_03],
); |
import {big64, get64} from '../../src/index.js';
function macro(t, a, o, expected) { | random_line_split |
big64.js | import test from 'ava';
import {big64, get64} from '../../src/index.js';
function macro(t, a, o, expected) |
macro.title = (providedTitle, a, o, expected) =>
`${providedTitle || ''} big64(${a}, ${o}) === ${expected}`.trim();
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0xff_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_ff],
);
test(
macro,
[0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_ff_00, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00],
0,
[0x00_00_00_00, 0x00_00_01_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_a0, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x03, 0x0c],
1,
[0x00_00_a0_00, 0x00_00_00_03],
);
| {
expected = get64(...expected);
t.deepEqual(big64(a, o), expected);
} | identifier_body |
big64.js | import test from 'ava';
import {big64, get64} from '../../src/index.js';
function | (t, a, o, expected) {
expected = get64(...expected);
t.deepEqual(big64(a, o), expected);
}
macro.title = (providedTitle, a, o, expected) =>
`${providedTitle || ''} big64(${a}, ${o}) === ${expected}`.trim();
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0xff_00_00_00, 0x00_00_00_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_00],
);
test(
macro,
[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff],
0,
[0xff_ff_ff_ff, 0xff_ff_ff_ff],
);
test(
macro,
[0x00, 0x00, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_ff_00, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00],
0,
[0x00_00_00_00, 0x00_00_01_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00],
0,
[0x00_00_00_a0, 0x00_00_00_00],
);
test(
macro,
[0x00, 0x00, 0x00, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x03, 0x0c],
1,
[0x00_00_a0_00, 0x00_00_00_03],
);
| macro | identifier_name |
color_world.py |
self.reward = None
if pydaq:
self.reward = pydaq.GiveReward()
self.reward_count = 0
# self.color_map always corresponds to (r, g, b)
# does not change during game, each game uses a particular color space
self.color_dict = square.make_color_map(self.config['colors'])
# sets the range of colors for this map
self.c_range = self.config['c_range']
# color variables (make dictionary?)
# color_list is set in beginning, and then after that this is only
# called again for non-random (training)
self.color_list = square.set_start_position_colors(self.config)
self.color_match = [0, 0, 0]
self.color_tolerance = []
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'starting avt position', self.last_avt
print 'map avatar factor', self.avt_factor
self.random = True
if self.config.get('match_direction'):
self.random = False
# adjustment to speed so corresponds to gobananas task
# 7 seconds to cross original environment
# speed needs to be adjusted to both speed in original
# environment and c_range of colors
# self.speed = 0.05 * (self.c_range[1] - self.c_range[0])
# speed is own variable, so can be changed during training.
self.speed = self.config['speed']
# map avatar variables
self.render2d = None
self.match_square = None
self.map_avt_node = []
# need a multiplier to the joystick output to tolerable speed
self.vel_base = 3
self.max_vel = [500, 500, 0]
self.card = None
self.base = ShowBase()
self.base.disableMouse()
# assume we are showing windows unless proven otherwise
if self.config.get('win', True):
# only need inputs if we have a window
self.inputs = Inputs(self.base)
props = WindowProperties()
props.setCursorHidden(True)
props.setForeground(True)
print self.config.get('resolution')
if self.config.get('resolution'):
props.set_size(int(self.config['resolution'][0]), int(self.config['resolution'][1]))
props.set_origin(0, 0)
else:
props.set_size(600, 600)
props.set_origin(400, 50)
self.base.win.requestProperties(props)
# print self.base.win.get_size()
# setup color map on second window
sq_node = square.setup_square(self.config)
self.setup_display2(sq_node)
# print 'background color', self.base.getBackgroundColor()
# create the avatar
self.avatar = NodePath(ActorNode("avatar"))
self.avatar.reparentTo(self.base.render)
self.avatar.setH(self.base.camera.getH())
self.base.camera.reparentTo(self.avatar)
self.base.camera.setPos(0, 0, 0)
# initialize task variables
self.frame_task = None
self.started_game = None
self.showed_match = None
self.gave_reward = None
# initialize and start the game
self.set_next_trial()
# print 'end init'
def start_loop(self):
# need to get new match
print 'start loop'
self.started_game = self.base.taskMgr.doMethodLater(5, self.start_play, 'start_play')
self.showed_match = self.base.taskMgr.add(self.show_match_sample, 'match_image')
# Task methods
def show_match_sample(self, task):
print 'show match sample'
print self.color_match[:]
# match_image.fill(*self.color_match[:])
card = CardMaker('card')
color_match = self.color_match[:]
# add alpha channel
color_match.append(1)
print color_match
card.set_color(*color_match[:])
card.set_frame(-12, -8, 0, 4)
# log this
self.card = self.base.render.attach_new_node(card.generate())
return task.done
def start_play(self, task):
print 'start play'
# log this
self.base.taskMgr.remove('match_image')
self.card.removeNode()
# print self.base.render.ls()
self.frame_task = self.base.taskMgr.add(self.game_loop, "game_loop")
self.frame_task.last = 0 # initiate task time of the last frame
# log this
self.base.setBackgroundColor(self.color_list[:])
return task.done
def game_loop(self, task):
dt = task.time - task.last
task.last = task.time
self.velocity = self.inputs.poll_inputs(self.velocity)
move = self.move_avatar(dt)
stop = self.change_background(move)
self.move_map_avatar(move, stop)
match = self.check_color_match()
if match:
self.give_reward()
return task.done
return task.cont
def reward_loop(self, task):
self.reward_count += 1
if self.reward_count <= self.config['num_beeps']:
if self.reward:
# log this
print 'give a bloody reward already'
self.reward.pumpOut()
print 'give reward'
return task.again
else:
self.end_loop()
return task.done
def move_avatar(self, dt):
# print 'velocity', self.velocity
# this makes for smooth (correct speed) diagonal movement
# print 'velocity', self.velocity
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def check_color_match(self):
# print 'match this', self.color_tolerance
| self.config = config | conditional_block |
|
color_world.py | 7 seconds to cross original environment
# speed needs to be adjusted to both speed in original
# environment and c_range of colors
# self.speed = 0.05 * (self.c_range[1] - self.c_range[0])
# speed is own variable, so can be changed during training.
self.speed = self.config['speed']
# map avatar variables
self.render2d = None
self.match_square = None
self.map_avt_node = []
# need a multiplier to the joystick output to tolerable speed
self.vel_base = 3
self.max_vel = [500, 500, 0]
self.card = None
self.base = ShowBase()
self.base.disableMouse()
# assume we are showing windows unless proven otherwise
if self.config.get('win', True):
# only need inputs if we have a window
self.inputs = Inputs(self.base)
props = WindowProperties()
props.setCursorHidden(True)
props.setForeground(True)
print self.config.get('resolution')
if self.config.get('resolution'):
props.set_size(int(self.config['resolution'][0]), int(self.config['resolution'][1]))
props.set_origin(0, 0)
else:
props.set_size(600, 600)
props.set_origin(400, 50)
self.base.win.requestProperties(props)
# print self.base.win.get_size()
# setup color map on second window
sq_node = square.setup_square(self.config)
self.setup_display2(sq_node)
# print 'background color', self.base.getBackgroundColor()
# create the avatar
self.avatar = NodePath(ActorNode("avatar"))
self.avatar.reparentTo(self.base.render)
self.avatar.setH(self.base.camera.getH())
self.base.camera.reparentTo(self.avatar)
self.base.camera.setPos(0, 0, 0)
# initialize task variables
self.frame_task = None
self.started_game = None
self.showed_match = None
self.gave_reward = None
# initialize and start the game
self.set_next_trial()
# print 'end init'
def start_loop(self):
# need to get new match
print 'start loop'
self.started_game = self.base.taskMgr.doMethodLater(5, self.start_play, 'start_play')
self.showed_match = self.base.taskMgr.add(self.show_match_sample, 'match_image')
# Task methods
def show_match_sample(self, task):
print 'show match sample'
print self.color_match[:]
# match_image.fill(*self.color_match[:])
card = CardMaker('card')
color_match = self.color_match[:]
# add alpha channel
color_match.append(1)
print color_match
card.set_color(*color_match[:])
card.set_frame(-12, -8, 0, 4)
# log this
self.card = self.base.render.attach_new_node(card.generate())
return task.done
def start_play(self, task):
print 'start play'
# log this
self.base.taskMgr.remove('match_image')
self.card.removeNode()
# print self.base.render.ls()
self.frame_task = self.base.taskMgr.add(self.game_loop, "game_loop")
self.frame_task.last = 0 # initiate task time of the last frame
# log this
self.base.setBackgroundColor(self.color_list[:])
return task.done
def game_loop(self, task):
dt = task.time - task.last
task.last = task.time
self.velocity = self.inputs.poll_inputs(self.velocity)
move = self.move_avatar(dt)
stop = self.change_background(move)
self.move_map_avatar(move, stop)
match = self.check_color_match()
if match:
self.give_reward()
return task.done
return task.cont
def reward_loop(self, task):
self.reward_count += 1
if self.reward_count <= self.config['num_beeps']:
if self.reward:
# log this
print 'give a bloody reward already' | return task.again
else:
self.end_loop()
return task.done
def move_avatar(self, dt):
# print 'velocity', self.velocity
# this makes for smooth (correct speed) diagonal movement
# print 'velocity', self.velocity
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def check_color_match(self):
# print 'match this', self.color_tolerance
# print self.color_list
check_color = [j[0] < self.color_list[i] < j[1] for i, j in enumerate(self.color_tolerance)]
# print check_color
if all(check_color):
return True
else:
return False
def give_reward(self):
# clear the background
self.base.setBackgroundColor(0.41, 0.41, 0.41)
print 'give first reward'
self.reward_count = 1
if self.reward:
# log this
self.reward.pumpOut()
self.gave_reward = self.base.taskMgr.doMethodLater(self.config['pump_delay'], self.reward_loop, 'reward_loop')
def end_loop(self):
print 'end loop'
# clear avatar map
self.clear_avatar_map()
# if there is a match set, return to center of color gradient,
# set new match, if applicable
self.set_next_trial()
def clear_avatar_map(self):
for i, j in enumerate(self.map_avt_node):
j.removeNode()
self.map_avt_node = []
def plot_match_square(self, corners):
print 'plot match square | self.reward.pumpOut()
print 'give reward' | random_line_split |
color_world.py |
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def check_color_match(self):
# print 'match this', self.color_tolerance
# print self.color_list
check_color = [j[0] < self.color_list[i] < j[1] for i, j in enumerate(self.color_tolerance)]
# print check_color
if all(check_color):
return True
else:
return False
def give_reward(self):
# clear the background
self.base.setBackgroundColor(0.41, 0.41, 0.41)
print 'give first reward'
self.reward_count = 1
if self.reward:
# log this
self.reward.pumpOut()
self.gave_reward = self.base.taskMgr.doMethodLater(self.config['pump_delay'], self.reward_loop, 'reward_loop')
def end_loop(self):
print 'end loop'
# clear avatar map
self.clear_avatar_map()
# if there is a match set, return to center of color gradient,
# set new match, if applicable
self.set_next_trial()
def clear_avatar_map(self):
for i, j in enumerate(self.map_avt_node):
j.removeNode()
self.map_avt_node = []
def plot_match_square(self, corners):
print 'plot match square'
print corners
match = LineSegs()
match.setThickness(1.5)
match.setColor(0, 0, 0)
match.move_to(corners[0][0], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][0])
# print self.render2d
self.match_square = self.render2d.attach_new_node(match.create())
def create_avatar_map_match_square(self, config=None):
print 'make new square for map'
if config is not None:
config_dict = config
else:
config_dict = self.config
# create square on avatar map for new color match
map_color_match, factor = square.translate_color_map(config_dict, self.color_dict, self.color_match)
tolerance = config_dict['tolerance'] * factor
map_color_tolerance = [(i - tolerance, i + tolerance) for i in map_color_match]
print map_color_tolerance
if self.render2d:
if self.match_square:
self.match_square.removeNode()
self.plot_match_square(map_color_tolerance)
def set_next_trial(self):
print 'set next trial'
# move avatar back to beginning position, only matters for
# showing card for next color match
self.avatar.set_pos(-10, -10, 2)
# set color_list with starting color
# if random, won't use this again, but for manual, will
# return to center
# need to update self.config to new direction, if there is one
if self.config.get('match_direction'):
self.check_key_map()
# return to center, otherwise random will start where you left off
self.color_list = square.set_start_position_colors(self.config)
# starting position for map avatar, just translate new color_list
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'start color', self.color_list
print self.color_dict
# again need to update self.config for match if using keys
self.color_match = square.set_match_colors(self.config, self.color_dict)
# sets the tolerance for how close to a color for reward
self.color_tolerance = [(i - self.config['tolerance'], i + self.config['tolerance']) for i in self.color_match]
print 'color match', self.color_match
print 'color tolerance', self.color_tolerance
self.create_avatar_map_match_square(self.config)
# start the game
self.start_loop()
def check_key_map(self):
if self.config['colors'][0]:
if self.inputs.key_map['r']:
self.config['match_direction'] = ['right']
elif self.inputs.key_map['r'] is not None:
self.config['match_direction'] = ['left']
elif self.config['colors'][1]:
if self.inputs.key_map['f']:
self.config['match_direction'] = ['front']
elif self.inputs.key_map['f'] is not None:
self.config['match_direction'] = ['back']
def setup_display2(self, display_node):
| print 'setup display2'
props = WindowProperties()
props.set_cursor_hidden(True)
props.set_foreground(False)
if self.config.get('resolution'):
props.setSize(700, 700)
props.setOrigin(-int(self.config['resolution'][0] - 5), 5)
else:
props.setSize(300, 300)
props.setOrigin(10, 10)
window2 = self.base.openWindow(props=props, aspectRatio=1)
lens = OrthographicLens()
lens.set_film_size(2, 2)
lens.setNearFar(-100, 100)
self.render2d = NodePath('render2d')
self.render2d.attach_new_node(display_node)
camera2d = self.base.makeCamera(window2)
camera2d.setPos(0, -10, 0)
camera2d.node().setLens(lens)
camera2d.reparentTo(self.render2d) | identifier_body |
|
color_world.py | 7 seconds to cross original environment
# speed needs to be adjusted to both speed in original
# environment and c_range of colors
# self.speed = 0.05 * (self.c_range[1] - self.c_range[0])
# speed is own variable, so can be changed during training.
self.speed = self.config['speed']
# map avatar variables
self.render2d = None
self.match_square = None
self.map_avt_node = []
# need a multiplier to the joystick output to tolerable speed
self.vel_base = 3
self.max_vel = [500, 500, 0]
self.card = None
self.base = ShowBase()
self.base.disableMouse()
# assume we are showing windows unless proven otherwise
if self.config.get('win', True):
# only need inputs if we have a window
self.inputs = Inputs(self.base)
props = WindowProperties()
props.setCursorHidden(True)
props.setForeground(True)
print self.config.get('resolution')
if self.config.get('resolution'):
props.set_size(int(self.config['resolution'][0]), int(self.config['resolution'][1]))
props.set_origin(0, 0)
else:
props.set_size(600, 600)
props.set_origin(400, 50)
self.base.win.requestProperties(props)
# print self.base.win.get_size()
# setup color map on second window
sq_node = square.setup_square(self.config)
self.setup_display2(sq_node)
# print 'background color', self.base.getBackgroundColor()
# create the avatar
self.avatar = NodePath(ActorNode("avatar"))
self.avatar.reparentTo(self.base.render)
self.avatar.setH(self.base.camera.getH())
self.base.camera.reparentTo(self.avatar)
self.base.camera.setPos(0, 0, 0)
# initialize task variables
self.frame_task = None
self.started_game = None
self.showed_match = None
self.gave_reward = None
# initialize and start the game
self.set_next_trial()
# print 'end init'
def start_loop(self):
# need to get new match
print 'start loop'
self.started_game = self.base.taskMgr.doMethodLater(5, self.start_play, 'start_play')
self.showed_match = self.base.taskMgr.add(self.show_match_sample, 'match_image')
# Task methods
def show_match_sample(self, task):
print 'show match sample'
print self.color_match[:]
# match_image.fill(*self.color_match[:])
card = CardMaker('card')
color_match = self.color_match[:]
# add alpha channel
color_match.append(1)
print color_match
card.set_color(*color_match[:])
card.set_frame(-12, -8, 0, 4)
# log this
self.card = self.base.render.attach_new_node(card.generate())
return task.done
def start_play(self, task):
print 'start play'
# log this
self.base.taskMgr.remove('match_image')
self.card.removeNode()
# print self.base.render.ls()
self.frame_task = self.base.taskMgr.add(self.game_loop, "game_loop")
self.frame_task.last = 0 # initiate task time of the last frame
# log this
self.base.setBackgroundColor(self.color_list[:])
return task.done
def game_loop(self, task):
dt = task.time - task.last
task.last = task.time
self.velocity = self.inputs.poll_inputs(self.velocity)
move = self.move_avatar(dt)
stop = self.change_background(move)
self.move_map_avatar(move, stop)
match = self.check_color_match()
if match:
self.give_reward()
return task.done
return task.cont
def reward_loop(self, task):
self.reward_count += 1
if self.reward_count <= self.config['num_beeps']:
if self.reward:
# log this
print 'give a bloody reward already'
self.reward.pumpOut()
print 'give reward'
return task.again
else:
self.end_loop()
return task.done
def move_avatar(self, dt):
# print 'velocity', self.velocity
# this makes for smooth (correct speed) diagonal movement
# print 'velocity', self.velocity
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def | (self):
# print 'match this', self.color_tolerance
# print self.color_list
check_color = [j[0] < self.color_list[i] < j[1] for i, j in enumerate(self.color_tolerance)]
# print check_color
if all(check_color):
return True
else:
return False
def give_reward(self):
# clear the background
self.base.setBackgroundColor(0.41, 0.41, 0.41)
print 'give first reward'
self.reward_count = 1
if self.reward:
# log this
self.reward.pumpOut()
self.gave_reward = self.base.taskMgr.doMethodLater(self.config['pump_delay'], self.reward_loop, 'reward_loop')
def end_loop(self):
print 'end loop'
# clear avatar map
self.clear_avatar_map()
# if there is a match set, return to center of color gradient,
# set new match, if applicable
self.set_next_trial()
def clear_avatar_map(self):
for i, j in enumerate(self.map_avt_node):
j.removeNode()
self.map_avt_node = []
def plot_match_square(self, corners):
print 'plot match square | check_color_match | identifier_name |
functions_e.js | ['relative_5fname',['relative_name',['../classmemoryoracle_1_1descriptions_1_1MemoryDescription.html#a342c00383637914cc5b77d165016a41f',1,'memoryoracle::descriptions::MemoryDescription']]]
]; | var searchData=
[
['read_5fregister',['read_register',['../classmemoryoracle_1_1frame_1_1Frame.html#ad5bb885a0accba9ff8f15c6b0e51bfc4',1,'memoryoracle::frame::Frame']]],
['read_5fvar',['read_var',['../classmemoryoracle_1_1frame_1_1Frame.html#ae54a6f6a355f6bb63162d2a78a761c30',1,'memoryoracle::frame::Frame']]],
['register_5fhandler',['register_handler',['../classmemoryoracle_1_1registry_1_1TypeRegistration.html#a3f017524239fb80650039bd4ab927f73',1,'memoryoracle::registry::TypeRegistration']]], | random_line_split |
|
ShadowNode.js | var UTIL = require('./util');
var ShadowNode;
module.exports = ShadowNode = function(patch,options){
this.shadow = options.shadow;
this.native = options.native;
this.elem = new patch.type(this);
this.elem.props = patch.props;
this.elem.props.children = patch.children;
this.elem.state = this.elem.getInitialState ? this.elem.getInitialState() : {};
this.elem.componentWillMount();
this.render();
this.elem.componentDidMount();
};
var proto = ShadowNode.prototype;
Object.defineProperty(proto,'parent',{
get : function(){
return this.native.parent;
}
});
proto.setPatch = function(patch){
var oldProps = this.elem.props;
this.elem._isUpdating = true;
var newProps = patch.props;
newProps.children = patch.children;
this.elem.componentWillRecieveProps(newProps);
this.elem.props = newProps;
this.elem.componentDidRecieveProps(oldProps);
this.update();
};
proto.update = function(){
// This is called by set state and by props updating
this.elem.componentWillUpdate();
this.render();
this.elem.componentWillUpdate();
};
proto.remove = function(){
this.elem.componentWillUnmount();
this.destroyed = true;
if (this.figure){
return this.figure.remove();
}
this.shadow = void 0;
this.figure = void 0;
this.native = void 0;
this.elem.componentDidUnmount();
};
proto.render = function(){
var newPatch = this.elem.render();
var lastPatch = this.lastPatch;
this.lastPatch = newPatch;
if (!lastPatch && !newPatch) return;
if (UTIL.isNative(newPatch)){
if (this.figure){
this.figure.remove();
this.figure = void 0;
}
this.native.shadowTail = this;
return this.native.setPatch(newPatch);
}
if (UTIL.differentTypes(lastPatch,newPatch)){
if (this.figure) this.figure.remove();
this.figure = new ShadowNode(newPatch,{ | // component will update
this.figure.setPatch(newPatch);
// component did update
}
}; | shadow : this,native : this.native
});
return this.figure;
}
if (UTIL.differentPatch(lastPatch,newPatch)){ | random_line_split |
index.js | import { InvalidArgumentError } from '../errors/InvalidArgumentError';
import { NotImplementedError } from '../errors/NotImplementedError';
import mustache from 'mustache';
import '../utils/Function';
// The base class for a control
export class Control {
// The constructor of a control
// id: The id of the control
// template: The template used for rendering the control
// localCSS: An object whose properties are CSS class names and whose values are the localized CSS class names
// The control will change the matching CSS class names in the templates.
constructor(id, template, localCSS) {
if (typeof id !== 'string' || !isNaN(id)) {
throw new InvalidArgumentError('Cannot create the control because the id is not a string');
}
if (id.length < 1) {
throw new InvalidArgumentError('Cannot create the control because the id is not a non-empty string');
}
if (null === template || undefined === template) {
throw new InvalidArgumentError('Cannot create the control because the template cannot be null or undefined');
}
if (typeof template !== 'string') {
throw new InvalidArgumentError('Cannot create the control because the template is not a string');
}
this.id = id;
this.template = template;
if (template && localCSS) {
// localize the CSS class names in the templates
for (const oCN in localCSS) {
const nCN = localCSS[oCN];
this.template = this.template
.replace(new RegExp(`class="${oCN}"`, 'gi'), `class="${nCN}"`)
.replace(new RegExp(`class='${oCN}'`, 'gi'), `class='${nCN}'`);
}
}
this.controls = {};
}
// Adds a child control to the control
// control: The Control instance
addControl(control) |
// Removes a child control from the control
// val: Either a controlId or a Control instance
removeControl(val) {
if (val instanceof Control) {
delete this.controls[val.id];
} else {
delete this.controls[val];
}
}
// Renders the control (and all its contained controls)
// data: The object that contains the data to substitute into the template
// eventObj: Event related data for the event that caused the control to render
render(data, eventObj) {
if (this.controls) {
const controlData = {};
for (let controlId in this.controls) {
const control = this.controls[controlId];
controlData[control.constructor.getConstructorName()] = {};
controlData[control.constructor.getConstructorName()][control.id] = control.render(data, eventObj);
}
for (let key in controlData) {
data[key] = controlData[key];
}
}
return mustache.render(this.template, data);
}
// This method is invoked so the control can bind events after the DOM has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdated(domContainerElement, eventObj) {
if (this.onDOMUpdatedNotification) {
this.onDOMUpdatedNotification(domContainerElement, eventObj);
}
if (this.controls) {
for (let controlId in this.controls) {
const control = this.controls[controlId];
control.onDOMUpdated(domContainerElement, eventObj);
}
}
}
// The Control classes that extend this type can add custom logic here to be executed after the domContainerElement
// has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdatedNotification(domContainerElement, eventObj) {
}
}; | {
if (!(control instanceof Control)) {
throw new InvalidArgumentError('Cannot add sub-control because it is invalid');
}
this.controls[control.id] = control;
} | identifier_body |
index.js | import { InvalidArgumentError } from '../errors/InvalidArgumentError';
import { NotImplementedError } from '../errors/NotImplementedError';
import mustache from 'mustache';
import '../utils/Function';
// The base class for a control
export class Control {
// The constructor of a control
// id: The id of the control
// template: The template used for rendering the control
// localCSS: An object whose properties are CSS class names and whose values are the localized CSS class names
// The control will change the matching CSS class names in the templates.
constructor(id, template, localCSS) {
if (typeof id !== 'string' || !isNaN(id)) {
throw new InvalidArgumentError('Cannot create the control because the id is not a string');
}
if (id.length < 1) {
throw new InvalidArgumentError('Cannot create the control because the id is not a non-empty string');
}
if (null === template || undefined === template) {
throw new InvalidArgumentError('Cannot create the control because the template cannot be null or undefined');
}
if (typeof template !== 'string') {
throw new InvalidArgumentError('Cannot create the control because the template is not a string');
}
this.id = id;
this.template = template;
if (template && localCSS) {
// localize the CSS class names in the templates
for (const oCN in localCSS) {
const nCN = localCSS[oCN];
this.template = this.template
.replace(new RegExp(`class="${oCN}"`, 'gi'), `class="${nCN}"`)
.replace(new RegExp(`class='${oCN}'`, 'gi'), `class='${nCN}'`);
}
}
this.controls = {};
}
// Adds a child control to the control
// control: The Control instance
addControl(control) {
if (!(control instanceof Control)) {
throw new InvalidArgumentError('Cannot add sub-control because it is invalid');
}
this.controls[control.id] = control;
}
// Removes a child control from the control
// val: Either a controlId or a Control instance
| (val) {
if (val instanceof Control) {
delete this.controls[val.id];
} else {
delete this.controls[val];
}
}
// Renders the control (and all its contained controls)
// data: The object that contains the data to substitute into the template
// eventObj: Event related data for the event that caused the control to render
render(data, eventObj) {
if (this.controls) {
const controlData = {};
for (let controlId in this.controls) {
const control = this.controls[controlId];
controlData[control.constructor.getConstructorName()] = {};
controlData[control.constructor.getConstructorName()][control.id] = control.render(data, eventObj);
}
for (let key in controlData) {
data[key] = controlData[key];
}
}
return mustache.render(this.template, data);
}
// This method is invoked so the control can bind events after the DOM has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdated(domContainerElement, eventObj) {
if (this.onDOMUpdatedNotification) {
this.onDOMUpdatedNotification(domContainerElement, eventObj);
}
if (this.controls) {
for (let controlId in this.controls) {
const control = this.controls[controlId];
control.onDOMUpdated(domContainerElement, eventObj);
}
}
}
// The Control classes that extend this type can add custom logic here to be executed after the domContainerElement
// has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdatedNotification(domContainerElement, eventObj) {
}
}; | removeControl | identifier_name |
index.js | import { InvalidArgumentError } from '../errors/InvalidArgumentError';
import { NotImplementedError } from '../errors/NotImplementedError';
import mustache from 'mustache';
import '../utils/Function';
// The base class for a control
export class Control {
// The constructor of a control
// id: The id of the control
// template: The template used for rendering the control
// localCSS: An object whose properties are CSS class names and whose values are the localized CSS class names
// The control will change the matching CSS class names in the templates.
constructor(id, template, localCSS) {
if (typeof id !== 'string' || !isNaN(id)) {
throw new InvalidArgumentError('Cannot create the control because the id is not a string');
}
if (id.length < 1) {
throw new InvalidArgumentError('Cannot create the control because the id is not a non-empty string');
}
if (null === template || undefined === template) {
throw new InvalidArgumentError('Cannot create the control because the template cannot be null or undefined');
}
if (typeof template !== 'string') |
this.id = id;
this.template = template;
if (template && localCSS) {
// localize the CSS class names in the templates
for (const oCN in localCSS) {
const nCN = localCSS[oCN];
this.template = this.template
.replace(new RegExp(`class="${oCN}"`, 'gi'), `class="${nCN}"`)
.replace(new RegExp(`class='${oCN}'`, 'gi'), `class='${nCN}'`);
}
}
this.controls = {};
}
// Adds a child control to the control
// control: The Control instance
addControl(control) {
if (!(control instanceof Control)) {
throw new InvalidArgumentError('Cannot add sub-control because it is invalid');
}
this.controls[control.id] = control;
}
// Removes a child control from the control
// val: Either a controlId or a Control instance
removeControl(val) {
if (val instanceof Control) {
delete this.controls[val.id];
} else {
delete this.controls[val];
}
}
// Renders the control (and all its contained controls)
// data: The object that contains the data to substitute into the template
// eventObj: Event related data for the event that caused the control to render
render(data, eventObj) {
if (this.controls) {
const controlData = {};
for (let controlId in this.controls) {
const control = this.controls[controlId];
controlData[control.constructor.getConstructorName()] = {};
controlData[control.constructor.getConstructorName()][control.id] = control.render(data, eventObj);
}
for (let key in controlData) {
data[key] = controlData[key];
}
}
return mustache.render(this.template, data);
}
// This method is invoked so the control can bind events after the DOM has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdated(domContainerElement, eventObj) {
if (this.onDOMUpdatedNotification) {
this.onDOMUpdatedNotification(domContainerElement, eventObj);
}
if (this.controls) {
for (let controlId in this.controls) {
const control = this.controls[controlId];
control.onDOMUpdated(domContainerElement, eventObj);
}
}
}
// The Control classes that extend this type can add custom logic here to be executed after the domContainerElement
// has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdatedNotification(domContainerElement, eventObj) {
}
}; | {
throw new InvalidArgumentError('Cannot create the control because the template is not a string');
} | conditional_block |
index.js | import { InvalidArgumentError } from '../errors/InvalidArgumentError';
import { NotImplementedError } from '../errors/NotImplementedError';
import mustache from 'mustache';
import '../utils/Function';
// The base class for a control
export class Control {
// The constructor of a control
// id: The id of the control
// template: The template used for rendering the control
// localCSS: An object whose properties are CSS class names and whose values are the localized CSS class names
// The control will change the matching CSS class names in the templates.
constructor(id, template, localCSS) {
if (typeof id !== 'string' || !isNaN(id)) {
throw new InvalidArgumentError('Cannot create the control because the id is not a string');
}
if (id.length < 1) {
throw new InvalidArgumentError('Cannot create the control because the id is not a non-empty string');
}
if (null === template || undefined === template) {
throw new InvalidArgumentError('Cannot create the control because the template cannot be null or undefined');
}
if (typeof template !== 'string') {
throw new InvalidArgumentError('Cannot create the control because the template is not a string');
}
this.id = id;
this.template = template;
if (template && localCSS) {
// localize the CSS class names in the templates
for (const oCN in localCSS) {
const nCN = localCSS[oCN];
this.template = this.template
.replace(new RegExp(`class="${oCN}"`, 'gi'), `class="${nCN}"`)
.replace(new RegExp(`class='${oCN}'`, 'gi'), `class='${nCN}'`);
}
}
this.controls = {};
}
// Adds a child control to the control
// control: The Control instance
addControl(control) {
if (!(control instanceof Control)) {
throw new InvalidArgumentError('Cannot add sub-control because it is invalid');
}
this.controls[control.id] = control;
}
// Removes a child control from the control
// val: Either a controlId or a Control instance
removeControl(val) {
if (val instanceof Control) {
delete this.controls[val.id];
} else {
delete this.controls[val];
}
}
// Renders the control (and all its contained controls)
// data: The object that contains the data to substitute into the template
// eventObj: Event related data for the event that caused the control to render
render(data, eventObj) {
if (this.controls) {
const controlData = {};
for (let controlId in this.controls) {
const control = this.controls[controlId];
controlData[control.constructor.getConstructorName()] = {};
controlData[control.constructor.getConstructorName()][control.id] = control.render(data, eventObj);
}
for (let key in controlData) {
data[key] = controlData[key];
}
}
return mustache.render(this.template, data);
}
// This method is invoked so the control can bind events after the DOM has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdated(domContainerElement, eventObj) {
if (this.onDOMUpdatedNotification) { | this.onDOMUpdatedNotification(domContainerElement, eventObj);
}
if (this.controls) {
for (let controlId in this.controls) {
const control = this.controls[controlId];
control.onDOMUpdated(domContainerElement, eventObj);
}
}
}
// The Control classes that extend this type can add custom logic here to be executed after the domContainerElement
// has been updated
// domContainerElement: The DOM container element into which the control was rendered
// eventObj: Event related data for the event that caused the control to render
onDOMUpdatedNotification(domContainerElement, eventObj) {
}
}; | random_line_split |
|
ScriptBinding.py | """Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
|
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()
| autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None | conditional_block |
ScriptBinding.py | """Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def | (self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()
| check_module_event | identifier_name |
ScriptBinding.py | """Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
| err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set() | random_line_split |
|
ScriptBinding.py | """Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
| if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()
| self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
| identifier_body |
test_check_topic_list_on_zk_restart.py | import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_plan
import sdk_security
import sdk_utils
from tests import config
from tests import test_utils
LOG = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def zookeeper_server(configure_security):
service_options = {
"service": {"name": config.ZOOKEEPER_SERVICE_NAME, "virtual_network_enabled": True}
}
zk_account = "test-zookeeper-service-account"
zk_secret = "test-zookeeper-secret"
try:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
service_options = sdk_utils.merge_dictionaries(
{"service": {"service_account": zk_account, "service_account_secret": zk_secret}},
service_options,
)
sdk_security.setup_security(
config.ZOOKEEPER_SERVICE_NAME,
service_account=zk_account,
service_account_secret=zk_secret,
)
sdk_install.install(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
config.ZOOKEEPER_TASK_COUNT,
package_version=config.ZOOKEEPER_PACKAGE_VERSION,
additional_options=service_options,
timeout_seconds=30 * 60,
insert_strict_options=False,
)
yield {**service_options, **{"package_name": config.ZOOKEEPER_PACKAGE_NAME}}
finally:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
sdk_security.delete_service_account(
service_account_name=zk_account, service_account_secret=zk_secret
)
@pytest.fixture(scope="module", autouse=True)
def kafka_server(zookeeper_server):
try:
# Get the zookeeper DNS values
zookeeper_dns = sdk_cmd.svc_cli(
zookeeper_server["package_name"],
zookeeper_server["service"]["name"],
"endpoint clientport",
parse_json=True,
)[1]["dns"]
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
config.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
config.DEFAULT_BROKER_COUNT,
additional_options={"kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)}},
)
# wait for brokers to finish registering before starting tests
test_utils.broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=config.SERVICE_NAME)
yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def topic_create(kafka_server: dict):
return test_utils.create_topic(config.EPHEMERAL_TOPIC_NAME, kafka_server["service"]["name"])
def fetch_topic(kafka_server: dict):
_, topic_list, _ = sdk_cmd.svc_cli(
config.PACKAGE_NAME, kafka_server["service"]["name"], "topic list", parse_json=True
)
return topic_list
def restart_zookeeper_node(id: int, kafka_server: dict):
sdk_cmd.svc_cli(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
"pod restart zookeeper-{}".format(id),
)
sdk_plan.wait_for_completed_recovery(config.ZOOKEEPER_SERVICE_NAME)
@pytest.mark.sanity
@pytest.mark.zookeeper
def | (kafka_server: dict):
topic_create(kafka_server)
topic_list_before = fetch_topic(kafka_server)
for id in range(0, int(config.ZOOKEEPER_TASK_COUNT / 2)):
restart_zookeeper_node(id, kafka_server)
topic_list_after = fetch_topic(kafka_server)
assert topic_list_before == topic_list_after
| test_check_topic_list_on_zk_restart | identifier_name |
test_check_topic_list_on_zk_restart.py | import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_plan
import sdk_security
import sdk_utils
from tests import config
from tests import test_utils
LOG = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def zookeeper_server(configure_security):
service_options = {
"service": {"name": config.ZOOKEEPER_SERVICE_NAME, "virtual_network_enabled": True}
}
zk_account = "test-zookeeper-service-account"
zk_secret = "test-zookeeper-secret"
try:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
service_options = sdk_utils.merge_dictionaries(
{"service": {"service_account": zk_account, "service_account_secret": zk_secret}},
service_options,
)
sdk_security.setup_security(
config.ZOOKEEPER_SERVICE_NAME,
service_account=zk_account,
service_account_secret=zk_secret,
)
sdk_install.install(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
config.ZOOKEEPER_TASK_COUNT,
package_version=config.ZOOKEEPER_PACKAGE_VERSION,
additional_options=service_options,
timeout_seconds=30 * 60,
insert_strict_options=False,
)
yield {**service_options, **{"package_name": config.ZOOKEEPER_PACKAGE_NAME}}
finally:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
sdk_security.delete_service_account(
service_account_name=zk_account, service_account_secret=zk_secret
)
@pytest.fixture(scope="module", autouse=True)
def kafka_server(zookeeper_server):
try:
# Get the zookeeper DNS values
zookeeper_dns = sdk_cmd.svc_cli(
zookeeper_server["package_name"],
zookeeper_server["service"]["name"],
"endpoint clientport",
parse_json=True,
)[1]["dns"]
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
config.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
config.DEFAULT_BROKER_COUNT,
additional_options={"kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)}},
)
# wait for brokers to finish registering before starting tests
test_utils.broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=config.SERVICE_NAME)
yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def topic_create(kafka_server: dict):
return test_utils.create_topic(config.EPHEMERAL_TOPIC_NAME, kafka_server["service"]["name"])
def fetch_topic(kafka_server: dict):
|
def restart_zookeeper_node(id: int, kafka_server: dict):
sdk_cmd.svc_cli(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
"pod restart zookeeper-{}".format(id),
)
sdk_plan.wait_for_completed_recovery(config.ZOOKEEPER_SERVICE_NAME)
@pytest.mark.sanity
@pytest.mark.zookeeper
def test_check_topic_list_on_zk_restart(kafka_server: dict):
topic_create(kafka_server)
topic_list_before = fetch_topic(kafka_server)
for id in range(0, int(config.ZOOKEEPER_TASK_COUNT / 2)):
restart_zookeeper_node(id, kafka_server)
topic_list_after = fetch_topic(kafka_server)
assert topic_list_before == topic_list_after
| _, topic_list, _ = sdk_cmd.svc_cli(
config.PACKAGE_NAME, kafka_server["service"]["name"], "topic list", parse_json=True
)
return topic_list | identifier_body |
test_check_topic_list_on_zk_restart.py | import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_plan
import sdk_security
import sdk_utils
from tests import config
from tests import test_utils
| LOG = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def zookeeper_server(configure_security):
service_options = {
"service": {"name": config.ZOOKEEPER_SERVICE_NAME, "virtual_network_enabled": True}
}
zk_account = "test-zookeeper-service-account"
zk_secret = "test-zookeeper-secret"
try:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
service_options = sdk_utils.merge_dictionaries(
{"service": {"service_account": zk_account, "service_account_secret": zk_secret}},
service_options,
)
sdk_security.setup_security(
config.ZOOKEEPER_SERVICE_NAME,
service_account=zk_account,
service_account_secret=zk_secret,
)
sdk_install.install(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
config.ZOOKEEPER_TASK_COUNT,
package_version=config.ZOOKEEPER_PACKAGE_VERSION,
additional_options=service_options,
timeout_seconds=30 * 60,
insert_strict_options=False,
)
yield {**service_options, **{"package_name": config.ZOOKEEPER_PACKAGE_NAME}}
finally:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
sdk_security.delete_service_account(
service_account_name=zk_account, service_account_secret=zk_secret
)
@pytest.fixture(scope="module", autouse=True)
def kafka_server(zookeeper_server):
try:
# Get the zookeeper DNS values
zookeeper_dns = sdk_cmd.svc_cli(
zookeeper_server["package_name"],
zookeeper_server["service"]["name"],
"endpoint clientport",
parse_json=True,
)[1]["dns"]
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
config.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
config.DEFAULT_BROKER_COUNT,
additional_options={"kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)}},
)
# wait for brokers to finish registering before starting tests
test_utils.broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=config.SERVICE_NAME)
yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def topic_create(kafka_server: dict):
return test_utils.create_topic(config.EPHEMERAL_TOPIC_NAME, kafka_server["service"]["name"])
def fetch_topic(kafka_server: dict):
_, topic_list, _ = sdk_cmd.svc_cli(
config.PACKAGE_NAME, kafka_server["service"]["name"], "topic list", parse_json=True
)
return topic_list
def restart_zookeeper_node(id: int, kafka_server: dict):
sdk_cmd.svc_cli(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
"pod restart zookeeper-{}".format(id),
)
sdk_plan.wait_for_completed_recovery(config.ZOOKEEPER_SERVICE_NAME)
@pytest.mark.sanity
@pytest.mark.zookeeper
def test_check_topic_list_on_zk_restart(kafka_server: dict):
topic_create(kafka_server)
topic_list_before = fetch_topic(kafka_server)
for id in range(0, int(config.ZOOKEEPER_TASK_COUNT / 2)):
restart_zookeeper_node(id, kafka_server)
topic_list_after = fetch_topic(kafka_server)
assert topic_list_before == topic_list_after | random_line_split |
|
test_check_topic_list_on_zk_restart.py | import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_plan
import sdk_security
import sdk_utils
from tests import config
from tests import test_utils
LOG = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def zookeeper_server(configure_security):
service_options = {
"service": {"name": config.ZOOKEEPER_SERVICE_NAME, "virtual_network_enabled": True}
}
zk_account = "test-zookeeper-service-account"
zk_secret = "test-zookeeper-secret"
try:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
|
sdk_install.install(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
config.ZOOKEEPER_TASK_COUNT,
package_version=config.ZOOKEEPER_PACKAGE_VERSION,
additional_options=service_options,
timeout_seconds=30 * 60,
insert_strict_options=False,
)
yield {**service_options, **{"package_name": config.ZOOKEEPER_PACKAGE_NAME}}
finally:
sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME)
if sdk_utils.is_strict_mode():
sdk_security.delete_service_account(
service_account_name=zk_account, service_account_secret=zk_secret
)
@pytest.fixture(scope="module", autouse=True)
def kafka_server(zookeeper_server):
try:
# Get the zookeeper DNS values
zookeeper_dns = sdk_cmd.svc_cli(
zookeeper_server["package_name"],
zookeeper_server["service"]["name"],
"endpoint clientport",
parse_json=True,
)[1]["dns"]
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
config.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
config.DEFAULT_BROKER_COUNT,
additional_options={"kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)}},
)
# wait for brokers to finish registering before starting tests
test_utils.broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=config.SERVICE_NAME)
yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def topic_create(kafka_server: dict):
return test_utils.create_topic(config.EPHEMERAL_TOPIC_NAME, kafka_server["service"]["name"])
def fetch_topic(kafka_server: dict):
_, topic_list, _ = sdk_cmd.svc_cli(
config.PACKAGE_NAME, kafka_server["service"]["name"], "topic list", parse_json=True
)
return topic_list
def restart_zookeeper_node(id: int, kafka_server: dict):
sdk_cmd.svc_cli(
config.ZOOKEEPER_PACKAGE_NAME,
config.ZOOKEEPER_SERVICE_NAME,
"pod restart zookeeper-{}".format(id),
)
sdk_plan.wait_for_completed_recovery(config.ZOOKEEPER_SERVICE_NAME)
@pytest.mark.sanity
@pytest.mark.zookeeper
def test_check_topic_list_on_zk_restart(kafka_server: dict):
topic_create(kafka_server)
topic_list_before = fetch_topic(kafka_server)
for id in range(0, int(config.ZOOKEEPER_TASK_COUNT / 2)):
restart_zookeeper_node(id, kafka_server)
topic_list_after = fetch_topic(kafka_server)
assert topic_list_before == topic_list_after
| service_options = sdk_utils.merge_dictionaries(
{"service": {"service_account": zk_account, "service_account_secret": zk_secret}},
service_options,
)
sdk_security.setup_security(
config.ZOOKEEPER_SERVICE_NAME,
service_account=zk_account,
service_account_secret=zk_secret,
) | conditional_block |
Charting.py | # Copyright (c) 2011 Nokia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
import Common
from common import Log
# Try to import matplotlib for charting
try:
import matplotlib
matplotlib.use("Agg")
import pylab
except ImportError, e:
matplotlib = None
pylab = None
Log.warn("Matplotlib or one of it's dependencies not found (%s). Charts will not be generated." % e)
def slicePlot(x, y, sliceLength = 100, style = "line", *args, **kwargs):
assert len(x) == len(y)
if style == "line":
plotFunc = pylab.plot
elif style == "bar":
plotFunc = pylab.bar
else:
raise RuntimeError("Unknown plotting style: %s" % style)
if len(x) < sliceLength:
plotFunc(x, y, *args, **kwargs)
return
slices = int(len(x) / sliceLength)
pylab.figure(figsize = (8, slices * 1))
for i in range(slices):
pylab.subplot(slices, 1, i + 1)
plotFunc(x[i * sliceLength: (i + 1) * sliceLength], y[i * sliceLength: (i + 1) * sliceLength], *args, **kwargs) | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
| random_line_split |
Charting.py | # Copyright (c) 2011 Nokia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import Common
from common import Log
# Try to import matplotlib for charting
try:
import matplotlib
matplotlib.use("Agg")
import pylab
except ImportError, e:
matplotlib = None
pylab = None
Log.warn("Matplotlib or one of it's dependencies not found (%s). Charts will not be generated." % e)
def slicePlot(x, y, sliceLength = 100, style = "line", *args, **kwargs):
| assert len(x) == len(y)
if style == "line":
plotFunc = pylab.plot
elif style == "bar":
plotFunc = pylab.bar
else:
raise RuntimeError("Unknown plotting style: %s" % style)
if len(x) < sliceLength:
plotFunc(x, y, *args, **kwargs)
return
slices = int(len(x) / sliceLength)
pylab.figure(figsize = (8, slices * 1))
for i in range(slices):
pylab.subplot(slices, 1, i + 1)
plotFunc(x[i * sliceLength: (i + 1) * sliceLength], y[i * sliceLength: (i + 1) * sliceLength], *args, **kwargs) | identifier_body |
|
Charting.py | # Copyright (c) 2011 Nokia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import Common
from common import Log
# Try to import matplotlib for charting
try:
import matplotlib
matplotlib.use("Agg")
import pylab
except ImportError, e:
matplotlib = None
pylab = None
Log.warn("Matplotlib or one of it's dependencies not found (%s). Charts will not be generated." % e)
def | (x, y, sliceLength = 100, style = "line", *args, **kwargs):
assert len(x) == len(y)
if style == "line":
plotFunc = pylab.plot
elif style == "bar":
plotFunc = pylab.bar
else:
raise RuntimeError("Unknown plotting style: %s" % style)
if len(x) < sliceLength:
plotFunc(x, y, *args, **kwargs)
return
slices = int(len(x) / sliceLength)
pylab.figure(figsize = (8, slices * 1))
for i in range(slices):
pylab.subplot(slices, 1, i + 1)
plotFunc(x[i * sliceLength: (i + 1) * sliceLength], y[i * sliceLength: (i + 1) * sliceLength], *args, **kwargs)
| slicePlot | identifier_name |
Charting.py | # Copyright (c) 2011 Nokia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import Common
from common import Log
# Try to import matplotlib for charting
try:
import matplotlib
matplotlib.use("Agg")
import pylab
except ImportError, e:
matplotlib = None
pylab = None
Log.warn("Matplotlib or one of it's dependencies not found (%s). Charts will not be generated." % e)
def slicePlot(x, y, sliceLength = 100, style = "line", *args, **kwargs):
assert len(x) == len(y)
if style == "line":
plotFunc = pylab.plot
elif style == "bar":
plotFunc = pylab.bar
else:
raise RuntimeError("Unknown plotting style: %s" % style)
if len(x) < sliceLength:
|
slices = int(len(x) / sliceLength)
pylab.figure(figsize = (8, slices * 1))
for i in range(slices):
pylab.subplot(slices, 1, i + 1)
plotFunc(x[i * sliceLength: (i + 1) * sliceLength], y[i * sliceLength: (i + 1) * sliceLength], *args, **kwargs)
| plotFunc(x, y, *args, **kwargs)
return | conditional_block |
MobilePromptSample.tsx | /**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React, {useState} from 'react'; |
import {ReactComponent as CelebrateSvg} from 'assets/celebrate.svg';
/**
* MobilePromptSample that shows how a mobile prompt can be used.
*/
const MobilePromptSample: React.FC = () => {
const [isVisible, setIsVisible] = useState(false);
const onClickOpen = () => setIsVisible(true);
const onClickClose = () => setIsVisible(false);
return (
<>
<Button onClick={onClickOpen} color="primary">
Open Prompt
</Button>
<MobilePrompt
isVisible={isVisible}
title="You have successfully opened a prompt!"
header={<CelebrateSvg />}
buttons={[
{
name: 'Dismiss',
onClick: onClickClose,
},
]}
>
Some sample body text.
</MobilePrompt>
</>
);
};
export default MobilePromptSample; |
import MobilePrompt from 'components/common/MobilePrompt';
import Button from 'muicss/lib/react/button'; | random_line_split |
broken.rs | // Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use capnp::{any_pointer};
use capnp::Error;
use capnp::private::capability::{ClientHook, ParamsHook, PipelineHook, PipelineOp,
RequestHook, ResultsHook};
use capnp::capability::{Promise, RemotePromise};
use std::rc::{Rc};
pub struct Pipeline {
error: Error,
}
impl Pipeline {
pub fn new(error: Error) -> Pipeline {
Pipeline {
error: error
}
}
}
impl PipelineHook for Pipeline {
fn add_ref(&self) -> Box<dyn PipelineHook> {
Box::new(Pipeline::new(self.error.clone()))
}
fn get_pipelined_cap(&self, _ops: &[PipelineOp]) -> Box<dyn ClientHook> {
new_cap(self.error.clone())
}
}
pub struct | {
error: Error,
message: ::capnp::message::Builder<::capnp::message::HeapAllocator>,
}
impl Request {
pub fn new(error: Error, _size_hint: Option<::capnp::MessageSize>) -> Request {
Request {
error: error,
message: ::capnp::message::Builder::new_default(),
}
}
}
impl RequestHook for Request {
fn get<'a>(&'a mut self) -> any_pointer::Builder<'a> {
self.message.get_root().unwrap()
}
fn get_brand(&self) -> usize {
0
}
fn send<'a>(self: Box<Self>) -> RemotePromise<any_pointer::Owned> {
let pipeline = Pipeline::new(self.error.clone());
RemotePromise {
promise: Promise::err(self.error),
pipeline: any_pointer::Pipeline::new(Box::new(pipeline)),
}
}
fn tail_send(self: Box<Self>)
-> Option<(u32, Promise<(), Error>, Box<dyn PipelineHook>)>
{
None
}
}
struct ClientInner {
error: Error,
_resolved: bool,
brand: usize,
}
pub struct Client {
inner: Rc<ClientInner>,
}
impl Client {
pub fn new(error: Error, resolved: bool, brand: usize) -> Client {
Client {
inner: Rc::new(ClientInner {
error: error,
_resolved: resolved,
brand: brand,
}),
}
}
}
impl ClientHook for Client {
fn add_ref(&self) -> Box<dyn ClientHook> {
Box::new(Client { inner: self.inner.clone() } )
}
fn new_call(&self, _interface_id: u64, _method_id: u16,
size_hint: Option<::capnp::MessageSize>)
-> ::capnp::capability::Request<any_pointer::Owned, any_pointer::Owned>
{
::capnp::capability::Request::new(
Box::new(Request::new(self.inner.error.clone(), size_hint)))
}
fn call(&self, _interface_id: u64, _method_id: u16, _params: Box<dyn ParamsHook>, _results: Box<dyn ResultsHook>)
-> Promise<(), Error>
{
Promise::err(self.inner.error.clone())
}
fn get_ptr(&self) -> usize {
(self.inner.as_ref()) as * const _ as usize
}
fn get_brand(&self) -> usize {
self.inner.brand
}
fn get_resolved(&self) -> Option<Box<dyn ClientHook>> {
None
}
fn when_more_resolved(&self) -> Option<Promise<Box<dyn ClientHook>, Error>> {
None
}
}
pub fn new_cap(exception: Error) -> Box<dyn ClientHook> {
Box::new(Client::new(exception, false, 0))
}
| Request | identifier_name |
broken.rs | // Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use capnp::{any_pointer};
use capnp::Error;
use capnp::private::capability::{ClientHook, ParamsHook, PipelineHook, PipelineOp,
RequestHook, ResultsHook};
use capnp::capability::{Promise, RemotePromise};
use std::rc::{Rc};
pub struct Pipeline {
error: Error,
}
impl Pipeline {
pub fn new(error: Error) -> Pipeline {
Pipeline {
error: error
}
}
}
impl PipelineHook for Pipeline {
fn add_ref(&self) -> Box<dyn PipelineHook> {
Box::new(Pipeline::new(self.error.clone()))
}
fn get_pipelined_cap(&self, _ops: &[PipelineOp]) -> Box<dyn ClientHook> {
new_cap(self.error.clone())
}
}
pub struct Request {
error: Error,
message: ::capnp::message::Builder<::capnp::message::HeapAllocator>,
}
impl Request {
pub fn new(error: Error, _size_hint: Option<::capnp::MessageSize>) -> Request {
Request {
error: error, | }
}
}
impl RequestHook for Request {
fn get<'a>(&'a mut self) -> any_pointer::Builder<'a> {
self.message.get_root().unwrap()
}
fn get_brand(&self) -> usize {
0
}
fn send<'a>(self: Box<Self>) -> RemotePromise<any_pointer::Owned> {
let pipeline = Pipeline::new(self.error.clone());
RemotePromise {
promise: Promise::err(self.error),
pipeline: any_pointer::Pipeline::new(Box::new(pipeline)),
}
}
fn tail_send(self: Box<Self>)
-> Option<(u32, Promise<(), Error>, Box<dyn PipelineHook>)>
{
None
}
}
struct ClientInner {
error: Error,
_resolved: bool,
brand: usize,
}
pub struct Client {
inner: Rc<ClientInner>,
}
impl Client {
pub fn new(error: Error, resolved: bool, brand: usize) -> Client {
Client {
inner: Rc::new(ClientInner {
error: error,
_resolved: resolved,
brand: brand,
}),
}
}
}
impl ClientHook for Client {
fn add_ref(&self) -> Box<dyn ClientHook> {
Box::new(Client { inner: self.inner.clone() } )
}
fn new_call(&self, _interface_id: u64, _method_id: u16,
size_hint: Option<::capnp::MessageSize>)
-> ::capnp::capability::Request<any_pointer::Owned, any_pointer::Owned>
{
::capnp::capability::Request::new(
Box::new(Request::new(self.inner.error.clone(), size_hint)))
}
fn call(&self, _interface_id: u64, _method_id: u16, _params: Box<dyn ParamsHook>, _results: Box<dyn ResultsHook>)
-> Promise<(), Error>
{
Promise::err(self.inner.error.clone())
}
fn get_ptr(&self) -> usize {
(self.inner.as_ref()) as * const _ as usize
}
fn get_brand(&self) -> usize {
self.inner.brand
}
fn get_resolved(&self) -> Option<Box<dyn ClientHook>> {
None
}
fn when_more_resolved(&self) -> Option<Promise<Box<dyn ClientHook>, Error>> {
None
}
}
pub fn new_cap(exception: Error) -> Box<dyn ClientHook> {
Box::new(Client::new(exception, false, 0))
} | message: ::capnp::message::Builder::new_default(), | random_line_split |
store.ts | import {BaseObject} from "../lib/object";
import * as trusted from "../lib/trusted";
export interface IPkiItem extends IPkiCrl, IPkiCertificate, IPkiRequest, IPkiKey{
/**
* DER | PEM
*/
format: string;
/**
* CRL | CERTIFICATE | KEY | REQUEST
*/
type: string;
uri: string;
provider: string;
categoty: string;
hash: string;
}
export interface IPkiKey{
encrypted?: boolean;
hash: string;
}
export interface IPkiCrl {
authorityKeyid?: string;
crlNumber?: string;
issuerName?: string;
issuerFriendlyName?: string;
lastUpdate?: Date;
nextUpdate?: Date;
hash: string; // thumbprint SHA1
}
export interface IPkiRequest{
subjectName?: string;
subjectFriendlyName?: string;
key?: string; // thumbprint ket SHA1
hash: string; // thumbprint SHA1
}
export interface IPkiCertificate{
subjectName?: string;
subjectFriendlyName?: string;
issuerName?: string;
issuerFriendlyName?: string;
notAfter?: Date;
notBefore?: Date;
serial?: string;
key?: string; // thumbprint ket SHA1
hash: string; // thumbprint SHA1
}
export interface IFilter {
/**
* PkiItem
* CRL | CERTIFICATE | KEY | REQUEST
*/
type?: string[];
/**
* Provider
* SYSTEM, MICROSOFT, CRYPTOPRO, TSL, PKCS11, TRUSTEDNET
*/
provider?: string[];
/**
* MY, OTHER, CA, TRUSTED
*/
category?: string[];
hash?: string;
subjectName?: string;
subjectFriendlyName?: string;
issuerName?: string;
issuerFriendlyName?: string;
isValid?: boolean;
serial?: string;
}
export declare abstract class Provider {
type: string;
/**
* Возвращает полный список хранимых элементов
*/
items: IPkiItem[];
/**
* Возвращает коллекцию элементов по заданным критериям
*/
find(filter: IFilter): IPkiItem[];
toJSON();
static fromJSON(): Provider;
fromJSON();
} |
export declare class ProviderSystem extends Provider {
constructor(folder: string);
}
export declare class ProviderMicrosoft extends Provider {
constructor();
}
export declare class ProviderTSL extends Provider {
constructor(url: string);
}
export declare class PkiStore {
constructor(json: string);
cash: CashJson;
items: IPkiItem[];
/**
* Возвращает набор элементов по фильтру
* - если фильтр пустой, возвращает все элементы
*/
find(filter?: IFilter): IPkiItem[];
/**
* ?
*/
find(item: IPkiItem, filter: IFilter): IPkiItem[];
/**
* Возвращает ключ по фильтру
* - фильтр задается относительно элементов, которые могут быть связаны с ключом
*/
findKey(filter: IFilter): IPkiItem;
/**
* Возвращает объект из структуры
*/
getItem(item: IPkiItem): any;
/**
* Коллекция провайдеров
*/
providers: Provider[];
}
export declare class CashJson {
filenName: string;
contructor(fileName: string);
save(fileName: string);
load(fileName: string);
export(): IPkiItem[];
import(items: IPkiItem[]);
} | random_line_split |
|
store.ts | import {BaseObject} from "../lib/object";
import * as trusted from "../lib/trusted";
export interface IPkiItem extends IPkiCrl, IPkiCertificate, IPkiRequest, IPkiKey{
/**
* DER | PEM
*/
format: string;
/**
* CRL | CERTIFICATE | KEY | REQUEST
*/
type: string;
uri: string;
provider: string;
categoty: string;
hash: string;
}
export interface IPkiKey{
encrypted?: boolean;
hash: string;
}
export interface IPkiCrl {
authorityKeyid?: string;
crlNumber?: string;
issuerName?: string;
issuerFriendlyName?: string;
lastUpdate?: Date;
nextUpdate?: Date;
hash: string; // thumbprint SHA1
}
export interface IPkiRequest{
subjectName?: string;
subjectFriendlyName?: string;
key?: string; // thumbprint ket SHA1
hash: string; // thumbprint SHA1
}
export interface IPkiCertificate{
subjectName?: string;
subjectFriendlyName?: string;
issuerName?: string;
issuerFriendlyName?: string;
notAfter?: Date;
notBefore?: Date;
serial?: string;
key?: string; // thumbprint ket SHA1
hash: string; // thumbprint SHA1
}
export interface IFilter {
/**
* PkiItem
* CRL | CERTIFICATE | KEY | REQUEST
*/
type?: string[];
/**
* Provider
* SYSTEM, MICROSOFT, CRYPTOPRO, TSL, PKCS11, TRUSTEDNET
*/
provider?: string[];
/**
* MY, OTHER, CA, TRUSTED
*/
category?: string[];
hash?: string;
subjectName?: string;
subjectFriendlyName?: string;
issuerName?: string;
issuerFriendlyName?: string;
isValid?: boolean;
serial?: string;
}
export declare abstract class Provider {
type: string;
/**
* Возвращает полный список хранимых элементов
*/
items: IPkiItem[];
/**
* Возвращает коллекцию элементов по заданным критериям
*/
find(filter: IFilter): IPkiItem[];
toJSON();
static fromJSON(): Provider;
fromJSON();
}
export declare class ProviderSystem extends Provider {
constructor(folder: string);
}
export declare c | icrosoft extends Provider {
constructor();
}
export declare class ProviderTSL extends Provider {
constructor(url: string);
}
export declare class PkiStore {
constructor(json: string);
cash: CashJson;
items: IPkiItem[];
/**
* Возвращает набор элементов по фильтру
* - если фильтр пустой, возвращает все элементы
*/
find(filter?: IFilter): IPkiItem[];
/**
* ?
*/
find(item: IPkiItem, filter: IFilter): IPkiItem[];
/**
* Возвращает ключ по фильтру
* - фильтр задается относительно элементов, которые могут быть связаны с ключом
*/
findKey(filter: IFilter): IPkiItem;
/**
* Возвращает объект из структуры
*/
getItem(item: IPkiItem): any;
/**
* Коллекция провайдеров
*/
providers: Provider[];
}
export declare class CashJson {
filenName: string;
contructor(fileName: string);
save(fileName: string);
load(fileName: string);
export(): IPkiItem[];
import(items: IPkiItem[]);
}
| lass ProviderM | identifier_name |
image_widget.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Grigoriy A. Armeev, 2015
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as·
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License v2 for more details.
| # Cheers, Satary.
#
# This widget provides simple interface for showing pictures.
# It can be resized saving aspect ratio and it provides signals for mouse click and hover.
import sys, os
from PyQt4 import QtGui,QtCore
'''
This widget implements folders and pictures
'''
class ImageWidget(QtGui.QWidget):
def __init__(self,parent=None):
super(ImageWidget, self).__init__()
mainLayout=QtGui.QVBoxLayout(self)
mainLayout.setSpacing(0)
mainLayout.setContentsMargins(0,0,0,0)
self.label=CustomLabel(self)
mainLayout.addWidget(self.label)
self.connect(self.label,QtCore.SIGNAL("mousePressSignal"),self.emitWigetPressedSignal)
self.connect(self.label,QtCore.SIGNAL("mouseHoverSignal"),self.emitWigetHoveredSignal)
def setImage(self,path):
'''
Reads image from path and sets it to wiget
'''
self.currentPath=self.label.path=path
self.pixmap=QtGui.QPixmap(path)
self.label.initsize=self.pixmap.size()
size=self.size()#-QtCore.QSize(20,20)
self.resizeImage(size)
def emitWigetPressedSignal(self, coord, path):
'''
Emits signal imageWigetPressed wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetPressed"),coord,path)
def emitWigetHoveredSignal(self, coord, path):
'''
Emits signal imageWigetHovered wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetHovered"),coord,path)
def resizeEvent( self, resizeEvent):
'''
Overrides QWigets resizeEvent for better quality resizing of the pixmap
'''
super(ImageWidget, self).resizeEvent(resizeEvent)
self.resizeImage(resizeEvent.size())
def resizeImage(self, size):
'''
Resizes image keeping aspect ratio
'''
try:
self.label.resize(size)
pixmap=self.pixmap.scaled(size, QtCore.Qt.KeepAspectRatio)
self.label.setPixmap(pixmap)
self.label.update()
self.label.picSize=pixmap.size()
except:
self.label.setText('No image loaded.')
class CustomLabel(QtGui.QLabel):
'''
This class provides modified QLabel
which provides QSignals with coordinates in coord. system of initial image
Provides signal for mouse hovering.
'''
def __init__(self, parent=None, path=None):
super(CustomLabel, self).__init__(parent)
self.path=path
self.setMouseTracking(True)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.hoverMouse)
self.setAlignment(QtCore.Qt.AlignCenter)
self.setSizePolicy(QtGui.QSizePolicy.Ignored,QtGui.QSizePolicy.Ignored)
def calcCoord(self,x,y):
lblwidth=self.size().width()
picwidth=self.picSize.width()
lblheight=self.size().height()
picheight=self.picSize.height()
initwidth=self.initsize.width()
initheight=self.initsize.height()
x=int((x-(lblwidth-picwidth)/2.0)*initwidth/picwidth)
y=int((y-(lblheight-picheight)/2.0)*initheight/picheight)
if (x>0) and (y>0) and (x < initwidth) and (y < initheight):
return x,y
else:
return None
def mousePressEvent(self, e):
try:
super(CustomLabel, self).mousePressEvent(e)
coord=self.calcCoord(e.x(),e.y())
if coord!=None:
self.coord=coord
self.emit(QtCore.SIGNAL("mousePressSignal"),self.coord,self.path)
except:
pass
def mouseMoveEvent(self, e):
try:
super(CustomLabel, self).mouseMoveEvent(e)
self.coord=self.calcCoord(e.x(),e.y())
self.timer.stop()
self.timer.start(600)
except:
pass
def hoverMouse(self):
self.timer.stop()
if self.underMouse() and (self.coord!=None):
self.emit(QtCore.SIGNAL("mouseHoverSignal"),self.coord,self.path) | random_line_split |
|
image_widget.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Grigoriy A. Armeev, 2015
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as·
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License v2 for more details.
# Cheers, Satary.
#
# This widget provides simple interface for showing pictures.
# It can be resized saving aspect ratio and it provides signals for mouse click and hover.
import sys, os
from PyQt4 import QtGui,QtCore
'''
This widget implements folders and pictures
'''
class ImageWidget(QtGui.QWidget):
def __init__(self,parent=None):
super(ImageWidget, self).__init__()
mainLayout=QtGui.QVBoxLayout(self)
mainLayout.setSpacing(0)
mainLayout.setContentsMargins(0,0,0,0)
self.label=CustomLabel(self)
mainLayout.addWidget(self.label)
self.connect(self.label,QtCore.SIGNAL("mousePressSignal"),self.emitWigetPressedSignal)
self.connect(self.label,QtCore.SIGNAL("mouseHoverSignal"),self.emitWigetHoveredSignal)
def setImage(self,path):
'''
Reads image from path and sets it to wiget
'''
self.currentPath=self.label.path=path
self.pixmap=QtGui.QPixmap(path)
self.label.initsize=self.pixmap.size()
size=self.size()#-QtCore.QSize(20,20)
self.resizeImage(size)
def emitWigetPressedSignal(self, coord, path):
'''
Emits signal imageWigetPressed wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetPressed"),coord,path)
def emitWigetHoveredSignal(self, coord, path):
'''
Emits signal imageWigetHovered wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetHovered"),coord,path)
def resizeEvent( self, resizeEvent):
'''
Overrides QWigets resizeEvent for better quality resizing of the pixmap
'''
super(ImageWidget, self).resizeEvent(resizeEvent)
self.resizeImage(resizeEvent.size())
def resizeImage(self, size):
'''
Resizes image keeping aspect ratio
'''
try:
self.label.resize(size)
pixmap=self.pixmap.scaled(size, QtCore.Qt.KeepAspectRatio)
self.label.setPixmap(pixmap)
self.label.update()
self.label.picSize=pixmap.size()
except:
self.label.setText('No image loaded.')
class CustomLabel(QtGui.QLabel):
'''
This class provides modified QLabel
which provides QSignals with coordinates in coord. system of initial image
Provides signal for mouse hovering.
'''
def __init__(self, parent=None, path=None):
super(CustomLabel, self).__init__(parent)
self.path=path
self.setMouseTracking(True)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.hoverMouse)
self.setAlignment(QtCore.Qt.AlignCenter)
self.setSizePolicy(QtGui.QSizePolicy.Ignored,QtGui.QSizePolicy.Ignored)
def calcCoord(self,x,y):
lblwidth=self.size().width()
picwidth=self.picSize.width()
lblheight=self.size().height()
picheight=self.picSize.height()
initwidth=self.initsize.width()
initheight=self.initsize.height()
x=int((x-(lblwidth-picwidth)/2.0)*initwidth/picwidth)
y=int((y-(lblheight-picheight)/2.0)*initheight/picheight)
if (x>0) and (y>0) and (x < initwidth) and (y < initheight):
return x,y
else:
return None
def mousePressEvent(self, e):
try:
super(CustomLabel, self).mousePressEvent(e)
coord=self.calcCoord(e.x(),e.y())
if coord!=None:
self.coord=coord
self.emit(QtCore.SIGNAL("mousePressSignal"),self.coord,self.path)
except:
pass
def mouseMoveEvent(self, e):
try:
super(CustomLabel, self).mouseMoveEvent(e)
self.coord=self.calcCoord(e.x(),e.y())
self.timer.stop()
self.timer.start(600)
except:
pass
def hoverMouse(self):
self.timer.stop()
if self.underMouse() and (self.coord!=None):
s | elf.emit(QtCore.SIGNAL("mouseHoverSignal"),self.coord,self.path)
| conditional_block |
|
image_widget.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Grigoriy A. Armeev, 2015
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as·
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License v2 for more details.
# Cheers, Satary.
#
# This widget provides simple interface for showing pictures.
# It can be resized saving aspect ratio and it provides signals for mouse click and hover.
import sys, os
from PyQt4 import QtGui,QtCore
'''
This widget implements folders and pictures
'''
class ImageWidget(QtGui.QWidget):
def __init__(self,parent=None):
super(ImageWidget, self).__init__()
mainLayout=QtGui.QVBoxLayout(self)
mainLayout.setSpacing(0)
mainLayout.setContentsMargins(0,0,0,0)
self.label=CustomLabel(self)
mainLayout.addWidget(self.label)
self.connect(self.label,QtCore.SIGNAL("mousePressSignal"),self.emitWigetPressedSignal)
self.connect(self.label,QtCore.SIGNAL("mouseHoverSignal"),self.emitWigetHoveredSignal)
def setImage(self,path):
'''
Reads image from path and sets it to wiget
'''
self.currentPath=self.label.path=path
self.pixmap=QtGui.QPixmap(path)
self.label.initsize=self.pixmap.size()
size=self.size()#-QtCore.QSize(20,20)
self.resizeImage(size)
def emitWigetPressedSignal(self, coord, path):
'''
Emits signal imageWigetPressed wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetPressed"),coord,path)
def emitWigetHoveredSignal(self, coord, path):
'''
Emits signal imageWigetHovered wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetHovered"),coord,path)
def resizeEvent( self, resizeEvent):
'''
Overrides QWigets resizeEvent for better quality resizing of the pixmap
'''
super(ImageWidget, self).resizeEvent(resizeEvent)
self.resizeImage(resizeEvent.size())
def resizeImage(self, size):
'''
Resizes image keeping aspect ratio
'''
try:
self.label.resize(size)
pixmap=self.pixmap.scaled(size, QtCore.Qt.KeepAspectRatio)
self.label.setPixmap(pixmap)
self.label.update()
self.label.picSize=pixmap.size()
except:
self.label.setText('No image loaded.')
class CustomLabel(QtGui.QLabel):
'''
This class provides modified QLabel
which provides QSignals with coordinates in coord. system of initial image
Provides signal for mouse hovering.
'''
def __init__(self, parent=None, path=None):
super(CustomLabel, self).__init__(parent)
self.path=path
self.setMouseTracking(True)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.hoverMouse)
self.setAlignment(QtCore.Qt.AlignCenter)
self.setSizePolicy(QtGui.QSizePolicy.Ignored,QtGui.QSizePolicy.Ignored)
def c | self,x,y):
lblwidth=self.size().width()
picwidth=self.picSize.width()
lblheight=self.size().height()
picheight=self.picSize.height()
initwidth=self.initsize.width()
initheight=self.initsize.height()
x=int((x-(lblwidth-picwidth)/2.0)*initwidth/picwidth)
y=int((y-(lblheight-picheight)/2.0)*initheight/picheight)
if (x>0) and (y>0) and (x < initwidth) and (y < initheight):
return x,y
else:
return None
def mousePressEvent(self, e):
try:
super(CustomLabel, self).mousePressEvent(e)
coord=self.calcCoord(e.x(),e.y())
if coord!=None:
self.coord=coord
self.emit(QtCore.SIGNAL("mousePressSignal"),self.coord,self.path)
except:
pass
def mouseMoveEvent(self, e):
try:
super(CustomLabel, self).mouseMoveEvent(e)
self.coord=self.calcCoord(e.x(),e.y())
self.timer.stop()
self.timer.start(600)
except:
pass
def hoverMouse(self):
self.timer.stop()
if self.underMouse() and (self.coord!=None):
self.emit(QtCore.SIGNAL("mouseHoverSignal"),self.coord,self.path)
| alcCoord( | identifier_name |
image_widget.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Grigoriy A. Armeev, 2015
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as·
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License v2 for more details.
# Cheers, Satary.
#
# This widget provides simple interface for showing pictures.
# It can be resized saving aspect ratio and it provides signals for mouse click and hover.
import sys, os
from PyQt4 import QtGui,QtCore
'''
This widget implements folders and pictures
'''
class ImageWidget(QtGui.QWidget):
def __init__(self,parent=None):
super(ImageWidget, self).__init__()
mainLayout=QtGui.QVBoxLayout(self)
mainLayout.setSpacing(0)
mainLayout.setContentsMargins(0,0,0,0)
self.label=CustomLabel(self)
mainLayout.addWidget(self.label)
self.connect(self.label,QtCore.SIGNAL("mousePressSignal"),self.emitWigetPressedSignal)
self.connect(self.label,QtCore.SIGNAL("mouseHoverSignal"),self.emitWigetHoveredSignal)
def setImage(self,path):
'''
Reads image from path and sets it to wiget
'''
self.currentPath=self.label.path=path
self.pixmap=QtGui.QPixmap(path)
self.label.initsize=self.pixmap.size()
size=self.size()#-QtCore.QSize(20,20)
self.resizeImage(size)
def emitWigetPressedSignal(self, coord, path):
'''
Emits signal imageWigetPressed wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetPressed"),coord,path)
def emitWigetHoveredSignal(self, coord, path):
'''
Emits signal imageWigetHovered wich provides coordinates of in pixels (as on initial picture)
and path to that picture
'''
self.emit(QtCore.SIGNAL("imageWigetHovered"),coord,path)
def resizeEvent( self, resizeEvent):
'''
Overrides QWigets resizeEvent for better quality resizing of the pixmap
'''
super(ImageWidget, self).resizeEvent(resizeEvent)
self.resizeImage(resizeEvent.size())
def resizeImage(self, size):
'''
Resizes image keeping aspect ratio
'''
try:
self.label.resize(size)
pixmap=self.pixmap.scaled(size, QtCore.Qt.KeepAspectRatio)
self.label.setPixmap(pixmap)
self.label.update()
self.label.picSize=pixmap.size()
except:
self.label.setText('No image loaded.')
class CustomLabel(QtGui.QLabel):
'''
This class provides modified QLabel
which provides QSignals with coordinates in coord. system of initial image
Provides signal for mouse hovering.
'''
def __init__(self, parent=None, path=None):
super(CustomLabel, self).__init__(parent)
self.path=path
self.setMouseTracking(True)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.hoverMouse)
self.setAlignment(QtCore.Qt.AlignCenter)
self.setSizePolicy(QtGui.QSizePolicy.Ignored,QtGui.QSizePolicy.Ignored)
def calcCoord(self,x,y):
lblwidth=self.size().width()
picwidth=self.picSize.width()
lblheight=self.size().height()
picheight=self.picSize.height()
initwidth=self.initsize.width()
initheight=self.initsize.height()
x=int((x-(lblwidth-picwidth)/2.0)*initwidth/picwidth)
y=int((y-(lblheight-picheight)/2.0)*initheight/picheight)
if (x>0) and (y>0) and (x < initwidth) and (y < initheight):
return x,y
else:
return None
def mousePressEvent(self, e):
try:
super(CustomLabel, self).mousePressEvent(e)
coord=self.calcCoord(e.x(),e.y())
if coord!=None:
self.coord=coord
self.emit(QtCore.SIGNAL("mousePressSignal"),self.coord,self.path)
except:
pass
def mouseMoveEvent(self, e):
try:
super(CustomLabel, self).mouseMoveEvent(e)
self.coord=self.calcCoord(e.x(),e.y())
self.timer.stop()
self.timer.start(600)
except:
pass
def hoverMouse(self):
s | elf.timer.stop()
if self.underMouse() and (self.coord!=None):
self.emit(QtCore.SIGNAL("mouseHoverSignal"),self.coord,self.path)
| identifier_body |
|
faces.component.ts | import {Component, ElementRef, OnInit, ViewChild} from '@angular/core';
import {FacesService} from './faces.service';
import {QueryService} from '../../model/query.service';
import {map} from 'rxjs/operators';
import {PersonDTO} from '../../../../common/entities/PersonDTO';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-faces',
templateUrl: './faces.component.html',
styleUrls: ['./faces.component.css']
})
export class FacesComponent implements OnInit {
@ViewChild('container') container: ElementRef;
public size: number;
favourites: Observable<PersonDTO[]>;
nonFavourites: Observable<PersonDTO[]>;
constructor(public facesService: FacesService,
public queryService: QueryService) {
this.facesService.getPersons().catch(console.error);
const personCmp = (p1: PersonDTO, p2: PersonDTO) => {
return p1.name.localeCompare(p2.name);
};
this.favourites = this.facesService.persons.pipe(
map(value => value.filter(p => p.isFavourite)
.sort(personCmp))
); | this.nonFavourites = this.facesService.persons.pipe(
map(value =>
value.filter(p => !p.isFavourite)
.sort(personCmp))
);
}
ngOnInit() {
this.updateSize();
}
private updateSize() {
const size = 220 + 5;
// body - container margin
const containerWidth = this.container.nativeElement.clientWidth - 30;
this.size = (containerWidth / Math.round((containerWidth / size))) - 5;
}
} | random_line_split |
|
faces.component.ts | import {Component, ElementRef, OnInit, ViewChild} from '@angular/core';
import {FacesService} from './faces.service';
import {QueryService} from '../../model/query.service';
import {map} from 'rxjs/operators';
import {PersonDTO} from '../../../../common/entities/PersonDTO';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-faces',
templateUrl: './faces.component.html',
styleUrls: ['./faces.component.css']
})
export class FacesComponent implements OnInit {
@ViewChild('container') container: ElementRef;
public size: number;
favourites: Observable<PersonDTO[]>;
nonFavourites: Observable<PersonDTO[]>;
constructor(public facesService: FacesService,
public queryService: QueryService) |
ngOnInit() {
this.updateSize();
}
private updateSize() {
const size = 220 + 5;
// body - container margin
const containerWidth = this.container.nativeElement.clientWidth - 30;
this.size = (containerWidth / Math.round((containerWidth / size))) - 5;
}
}
| {
this.facesService.getPersons().catch(console.error);
const personCmp = (p1: PersonDTO, p2: PersonDTO) => {
return p1.name.localeCompare(p2.name);
};
this.favourites = this.facesService.persons.pipe(
map(value => value.filter(p => p.isFavourite)
.sort(personCmp))
);
this.nonFavourites = this.facesService.persons.pipe(
map(value =>
value.filter(p => !p.isFavourite)
.sort(personCmp))
);
} | identifier_body |
faces.component.ts | import {Component, ElementRef, OnInit, ViewChild} from '@angular/core';
import {FacesService} from './faces.service';
import {QueryService} from '../../model/query.service';
import {map} from 'rxjs/operators';
import {PersonDTO} from '../../../../common/entities/PersonDTO';
import {Observable} from 'rxjs/Observable';
@Component({
selector: 'app-faces',
templateUrl: './faces.component.html',
styleUrls: ['./faces.component.css']
})
export class FacesComponent implements OnInit {
@ViewChild('container') container: ElementRef;
public size: number;
favourites: Observable<PersonDTO[]>;
nonFavourites: Observable<PersonDTO[]>;
constructor(public facesService: FacesService,
public queryService: QueryService) {
this.facesService.getPersons().catch(console.error);
const personCmp = (p1: PersonDTO, p2: PersonDTO) => {
return p1.name.localeCompare(p2.name);
};
this.favourites = this.facesService.persons.pipe(
map(value => value.filter(p => p.isFavourite)
.sort(personCmp))
);
this.nonFavourites = this.facesService.persons.pipe(
map(value =>
value.filter(p => !p.isFavourite)
.sort(personCmp))
);
}
ngOnInit() {
this.updateSize();
}
private | () {
const size = 220 + 5;
// body - container margin
const containerWidth = this.container.nativeElement.clientWidth - 30;
this.size = (containerWidth / Math.round((containerWidth / size))) - 5;
}
}
| updateSize | identifier_name |
XFileSharingProFolder.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSCrypter import XFSCrypter, create_getInfo
class XFileSharingProFolder(XFSCrypter):
__name__ = "XFileSharingProFolder"
__type__ = "crypter"
__version__ = "0.14"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:\w+\.)*?(?P<DOMAIN>(?:[\d.]+|[\w\-^_]{3,}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)/(?:user|folder)s?/\w+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharingPro dummy folder decrypter plugin for hook"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def _log(self, level, plugintype, pluginname, messages):
return super(XFileSharingProFolder, self)._log(level,
plugintype,
"%s: %s" % (pluginname, self.PLUGIN_NAME),
messages)
def init(self):
super(XFileSharingProFolder, self).init()
self.__pattern__ = self.pyload.pluginManager.crypterPlugins[self.__name__]['pattern']
self.PLUGIN_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group("DOMAIN").lower()
self.PLUGIN_NAME = "".join(part.capitalize() for part in re.split(r'(\.|\d+|-)', self.PLUGIN_DOMAIN) if part != '.')
def _setup(self):
account_name = self.__name__ if self.account.PLUGIN_DOMAIN is None else self.PLUGIN_NAME
self.chunk_limit = 1
self.multiDL = True
if self.account:
self.req = self.pyload.requestFactory.getRequest(accountname, self.account.user)
self.premium = self.account.premium
self.resume_download = self.premium
else:
self.req = self.pyload.requestFactory.getRequest(account_name)
self.premium = False
self.resume_download = False
def load_account(self):
if self.req:
self.req.close()
if not self.account:
|
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
if self.account:
if not self.account.PLUGIN_DOMAIN:
self.account.PLUGIN_DOMAIN = self.PLUGIN_DOMAIN
if not self.account.user: #@TODO: Move to `Account` in 0.4.10
self.account.user = self.account.select()[0]
if not self.account.logged:
self.account = False
getInfo = create_getInfo(XFileSharingProFolder)
| self.account = self.pyload.accountManager.getAccountPlugin(self.PLUGIN_NAME) | conditional_block |
XFileSharingProFolder.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSCrypter import XFSCrypter, create_getInfo
class XFileSharingProFolder(XFSCrypter):
__name__ = "XFileSharingProFolder"
__type__ = "crypter"
__version__ = "0.14"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:\w+\.)*?(?P<DOMAIN>(?:[\d.]+|[\w\-^_]{3,}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)/(?:user|folder)s?/\w+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharingPro dummy folder decrypter plugin for hook"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def _log(self, level, plugintype, pluginname, messages):
return super(XFileSharingProFolder, self)._log(level,
plugintype,
"%s: %s" % (pluginname, self.PLUGIN_NAME),
messages)
def init(self):
super(XFileSharingProFolder, self).init()
self.__pattern__ = self.pyload.pluginManager.crypterPlugins[self.__name__]['pattern']
self.PLUGIN_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group("DOMAIN").lower()
self.PLUGIN_NAME = "".join(part.capitalize() for part in re.split(r'(\.|\d+|-)', self.PLUGIN_DOMAIN) if part != '.')
def | (self):
account_name = self.__name__ if self.account.PLUGIN_DOMAIN is None else self.PLUGIN_NAME
self.chunk_limit = 1
self.multiDL = True
if self.account:
self.req = self.pyload.requestFactory.getRequest(accountname, self.account.user)
self.premium = self.account.premium
self.resume_download = self.premium
else:
self.req = self.pyload.requestFactory.getRequest(account_name)
self.premium = False
self.resume_download = False
def load_account(self):
if self.req:
self.req.close()
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.PLUGIN_NAME)
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
if self.account:
if not self.account.PLUGIN_DOMAIN:
self.account.PLUGIN_DOMAIN = self.PLUGIN_DOMAIN
if not self.account.user: #@TODO: Move to `Account` in 0.4.10
self.account.user = self.account.select()[0]
if not self.account.logged:
self.account = False
getInfo = create_getInfo(XFileSharingProFolder)
| _setup | identifier_name |
XFileSharingProFolder.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSCrypter import XFSCrypter, create_getInfo
class XFileSharingProFolder(XFSCrypter):
|
def init(self):
super(XFileSharingProFolder, self).init()
self.__pattern__ = self.pyload.pluginManager.crypterPlugins[self.__name__]['pattern']
self.PLUGIN_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group("DOMAIN").lower()
self.PLUGIN_NAME = "".join(part.capitalize() for part in re.split(r'(\.|\d+|-)', self.PLUGIN_DOMAIN) if part != '.')
def _setup(self):
account_name = self.__name__ if self.account.PLUGIN_DOMAIN is None else self.PLUGIN_NAME
self.chunk_limit = 1
self.multiDL = True
if self.account:
self.req = self.pyload.requestFactory.getRequest(accountname, self.account.user)
self.premium = self.account.premium
self.resume_download = self.premium
else:
self.req = self.pyload.requestFactory.getRequest(account_name)
self.premium = False
self.resume_download = False
def load_account(self):
if self.req:
self.req.close()
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.PLUGIN_NAME)
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
if self.account:
if not self.account.PLUGIN_DOMAIN:
self.account.PLUGIN_DOMAIN = self.PLUGIN_DOMAIN
if not self.account.user: #@TODO: Move to `Account` in 0.4.10
self.account.user = self.account.select()[0]
if not self.account.logged:
self.account = False
getInfo = create_getInfo(XFileSharingProFolder)
| __name__ = "XFileSharingProFolder"
__type__ = "crypter"
__version__ = "0.14"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:\w+\.)*?(?P<DOMAIN>(?:[\d.]+|[\w\-^_]{3,}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)/(?:user|folder)s?/\w+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharingPro dummy folder decrypter plugin for hook"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def _log(self, level, plugintype, pluginname, messages):
return super(XFileSharingProFolder, self)._log(level,
plugintype,
"%s: %s" % (pluginname, self.PLUGIN_NAME),
messages)
| identifier_body |
XFileSharingProFolder.py | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.XFSCrypter import XFSCrypter, create_getInfo
class XFileSharingProFolder(XFSCrypter):
__name__ = "XFileSharingProFolder"
__type__ = "crypter"
__version__ = "0.14"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:\w+\.)*?(?P<DOMAIN>(?:[\d.]+|[\w\-^_]{3,}(?:\.[a-zA-Z]{2,}){1,2})(?:\:\d+)?)/(?:user|folder)s?/\w+'
__config__ = [("use_subfolder" , "bool", "Save package to subfolder" , True),
("subfolder_per_pack", "bool", "Create a subfolder for each package", True)]
__description__ = """XFileSharingPro dummy folder decrypter plugin for hook"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def _log(self, level, plugintype, pluginname, messages):
return super(XFileSharingProFolder, self)._log(level,
plugintype,
"%s: %s" % (pluginname, self.PLUGIN_NAME),
messages)
def init(self):
super(XFileSharingProFolder, self).init()
self.__pattern__ = self.pyload.pluginManager.crypterPlugins[self.__name__]['pattern']
self.PLUGIN_DOMAIN = re.match(self.__pattern__, self.pyfile.url).group("DOMAIN").lower()
self.PLUGIN_NAME = "".join(part.capitalize() for part in re.split(r'(\.|\d+|-)', self.PLUGIN_DOMAIN) if part != '.')
def _setup(self):
account_name = self.__name__ if self.account.PLUGIN_DOMAIN is None else self.PLUGIN_NAME
self.chunk_limit = 1
self.multiDL = True
if self.account:
self.req = self.pyload.requestFactory.getRequest(accountname, self.account.user)
self.premium = self.account.premium
self.resume_download = self.premium
else:
self.req = self.pyload.requestFactory.getRequest(account_name) |
def load_account(self):
if self.req:
self.req.close()
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.PLUGIN_NAME)
if not self.account:
self.account = self.pyload.accountManager.getAccountPlugin(self.__name__)
if self.account:
if not self.account.PLUGIN_DOMAIN:
self.account.PLUGIN_DOMAIN = self.PLUGIN_DOMAIN
if not self.account.user: #@TODO: Move to `Account` in 0.4.10
self.account.user = self.account.select()[0]
if not self.account.logged:
self.account = False
getInfo = create_getInfo(XFileSharingProFolder) | self.premium = False
self.resume_download = False
| random_line_split |
tweetService.ts | import {Injectable} from '@angular/core';
import {Http} from '@angular/http';
import {Events, Platform} from 'ionic-angular';
@Injectable()
export class TweetService {
tweets: any;
timer: any;
error: any;
errorCount: number;
constructor(public http: Http, public events: Events, public platform: Platform) {
this.timer = null;
this.errorCount = 0;
this.tweets = [];
this.loadTweets();
// Reload Trains when we come out of the background
this.platform.resume.subscribe(() => {
this.loadTweets();
});
}
loadTweets() {
if (this.timer !== null) {
clearTimeout(this.timer);
}
if (this.http) {
let url = "http://klingmandesign.com/marta/twitter/index.php?key=martaApp&count=40&date=" + (new Date()).getTime();
//url = "http://klingmandesign.com/marta/data.php";
this.http.get(url)
.map(res => res.json()).subscribe(
data => {
this.tweets = data;
this.errorCount = 0;
this.events.publish('tweets:updated');
},
err => {
this.error = err;
this.events.publish('tweets:error');
},
() => {
//console.log("Finally");
this.timer = setTimeout(() => {
this.loadTweets()
}, 60000); // 1 minute
});
} else {
this.errorCount++;
this.timer = setTimeout(() => {
this.loadTweets()
}, 200); // try again in a fifth a second
console.log('http went missing');
if (this.errorCount > 10) {
this.events.publish('tweets:error');
}
}
}
getError() {
return this.error;
}
| () {
return this.tweets;
}
} | getTweets | identifier_name |
tweetService.ts | import {Injectable} from '@angular/core';
import {Http} from '@angular/http';
import {Events, Platform} from 'ionic-angular';
@Injectable()
export class TweetService {
tweets: any;
timer: any;
error: any;
errorCount: number;
constructor(public http: Http, public events: Events, public platform: Platform) {
this.timer = null;
this.errorCount = 0;
this.tweets = [];
this.loadTweets();
// Reload Trains when we come out of the background
this.platform.resume.subscribe(() => {
this.loadTweets();
});
}
loadTweets() {
if (this.timer !== null) {
clearTimeout(this.timer);
}
if (this.http) {
let url = "http://klingmandesign.com/marta/twitter/index.php?key=martaApp&count=40&date=" + (new Date()).getTime();
//url = "http://klingmandesign.com/marta/data.php";
this.http.get(url)
.map(res => res.json()).subscribe(
data => {
this.tweets = data;
this.errorCount = 0;
this.events.publish('tweets:updated');
},
err => {
this.error = err;
this.events.publish('tweets:error');
},
() => {
//console.log("Finally");
this.timer = setTimeout(() => {
this.loadTweets()
}, 60000); // 1 minute
});
} else {
this.errorCount++;
this.timer = setTimeout(() => {
this.loadTweets()
}, 200); // try again in a fifth a second
console.log('http went missing');
if (this.errorCount > 10) |
}
}
getError() {
return this.error;
}
getTweets() {
return this.tweets;
}
} | {
this.events.publish('tweets:error');
} | conditional_block |
tweetService.ts | import {Injectable} from '@angular/core';
import {Http} from '@angular/http';
import {Events, Platform} from 'ionic-angular';
@Injectable()
export class TweetService {
tweets: any;
timer: any;
error: any; | this.timer = null;
this.errorCount = 0;
this.tweets = [];
this.loadTweets();
// Reload Trains when we come out of the background
this.platform.resume.subscribe(() => {
this.loadTweets();
});
}
loadTweets() {
if (this.timer !== null) {
clearTimeout(this.timer);
}
if (this.http) {
let url = "http://klingmandesign.com/marta/twitter/index.php?key=martaApp&count=40&date=" + (new Date()).getTime();
//url = "http://klingmandesign.com/marta/data.php";
this.http.get(url)
.map(res => res.json()).subscribe(
data => {
this.tweets = data;
this.errorCount = 0;
this.events.publish('tweets:updated');
},
err => {
this.error = err;
this.events.publish('tweets:error');
},
() => {
//console.log("Finally");
this.timer = setTimeout(() => {
this.loadTweets()
}, 60000); // 1 minute
});
} else {
this.errorCount++;
this.timer = setTimeout(() => {
this.loadTweets()
}, 200); // try again in a fifth a second
console.log('http went missing');
if (this.errorCount > 10) {
this.events.publish('tweets:error');
}
}
}
getError() {
return this.error;
}
getTweets() {
return this.tweets;
}
} | errorCount: number;
constructor(public http: Http, public events: Events, public platform: Platform) { | random_line_split |
index.js | /* global Mousetrap */
import { typeOf } from '@ember/utils';
import { get } from "@ember/object";
export function bindKeyboardShortcuts(context) {
const shortcuts = get(context, 'keyboardShortcuts');
if (typeOf(shortcuts) !== 'object') {
return;
}
context._mousetraps = [];
Object.keys(shortcuts).forEach(function(shortcut) {
const actionObject = shortcuts[shortcut];
let mousetrap;
let preventDefault = true;
function invokeAction(action, eventType) {
let type = typeOf(action);
let callback;
if (type === 'string') {
callback = function() {
context.send(action);
return preventDefault !== true;
};
} else if (type === 'function') {
callback = action.bind(context);
} else {
throw new Error('Invalid value for keyboard shortcut: ' + action);
}
mousetrap.bind(shortcut, callback, eventType);
}
if (typeOf(actionObject) === 'object') {
if (actionObject.global === false) {
mousetrap = new Mousetrap(document);
} else if (actionObject.scoped) {
if (typeOf(actionObject.scoped) === 'boolean') {
mousetrap = new Mousetrap(get(context, 'element'));
} else if (typeOf(actionObject.scoped) === 'string') |
} else if (actionObject.targetElement) {
mousetrap = new Mousetrap(actionObject.targetElement);
} else {
mousetrap = new Mousetrap(document.body);
}
if (actionObject.preventDefault === false) {
preventDefault = false;
}
invokeAction(actionObject.action, actionObject.eventType);
} else {
mousetrap = new Mousetrap(document.body);
invokeAction(actionObject);
}
context._mousetraps.push(mousetrap);
});
}
export function unbindKeyboardShortcuts(context) {
const _removeEvent = (object, type, callback) => {
if (object.removeEventListener) {
object.removeEventListener(type, callback, false);
return;
}
object.detachEvent('on' + type, callback);
};
Array.isArray(context._mousetraps) && context._mousetraps.forEach(mousetrap => {
// manually unbind JS event
_removeEvent(mousetrap.target, 'keypress', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keydown', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keyup', mousetrap._handleKeyEvent);
mousetrap.reset();
});
context._mousetraps = [];
}
| {
mousetrap = new Mousetrap(
document.querySelector(actionObject.scoped)
);
} | conditional_block |
index.js | /* global Mousetrap */
import { typeOf } from '@ember/utils';
import { get } from "@ember/object";
export function bindKeyboardShortcuts(context) {
const shortcuts = get(context, 'keyboardShortcuts');
if (typeOf(shortcuts) !== 'object') {
return;
}
context._mousetraps = [];
Object.keys(shortcuts).forEach(function(shortcut) {
const actionObject = shortcuts[shortcut];
let mousetrap;
let preventDefault = true;
function invokeAction(action, eventType) {
let type = typeOf(action);
let callback;
if (type === 'string') {
callback = function() {
context.send(action);
return preventDefault !== true;
};
} else if (type === 'function') {
callback = action.bind(context);
} else {
throw new Error('Invalid value for keyboard shortcut: ' + action);
}
mousetrap.bind(shortcut, callback, eventType);
}
if (typeOf(actionObject) === 'object') {
if (actionObject.global === false) {
mousetrap = new Mousetrap(document);
} else if (actionObject.scoped) {
if (typeOf(actionObject.scoped) === 'boolean') {
mousetrap = new Mousetrap(get(context, 'element'));
} else if (typeOf(actionObject.scoped) === 'string') {
mousetrap = new Mousetrap(
document.querySelector(actionObject.scoped)
);
}
} else if (actionObject.targetElement) {
mousetrap = new Mousetrap(actionObject.targetElement);
} else {
mousetrap = new Mousetrap(document.body);
}
if (actionObject.preventDefault === false) {
preventDefault = false;
}
invokeAction(actionObject.action, actionObject.eventType);
} else {
mousetrap = new Mousetrap(document.body);
invokeAction(actionObject);
}
context._mousetraps.push(mousetrap);
});
}
export function | (context) {
const _removeEvent = (object, type, callback) => {
if (object.removeEventListener) {
object.removeEventListener(type, callback, false);
return;
}
object.detachEvent('on' + type, callback);
};
Array.isArray(context._mousetraps) && context._mousetraps.forEach(mousetrap => {
// manually unbind JS event
_removeEvent(mousetrap.target, 'keypress', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keydown', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keyup', mousetrap._handleKeyEvent);
mousetrap.reset();
});
context._mousetraps = [];
}
| unbindKeyboardShortcuts | identifier_name |
index.js | /* global Mousetrap */
import { typeOf } from '@ember/utils';
import { get } from "@ember/object";
export function bindKeyboardShortcuts(context) {
const shortcuts = get(context, 'keyboardShortcuts');
if (typeOf(shortcuts) !== 'object') {
return;
}
context._mousetraps = [];
Object.keys(shortcuts).forEach(function(shortcut) {
const actionObject = shortcuts[shortcut];
let mousetrap;
let preventDefault = true;
function invokeAction(action, eventType) {
let type = typeOf(action);
let callback;
if (type === 'string') {
callback = function() {
context.send(action);
return preventDefault !== true;
};
} else if (type === 'function') {
callback = action.bind(context);
} else {
throw new Error('Invalid value for keyboard shortcut: ' + action);
}
mousetrap.bind(shortcut, callback, eventType);
}
if (typeOf(actionObject) === 'object') {
if (actionObject.global === false) {
mousetrap = new Mousetrap(document);
} else if (actionObject.scoped) {
if (typeOf(actionObject.scoped) === 'boolean') {
mousetrap = new Mousetrap(get(context, 'element'));
} else if (typeOf(actionObject.scoped) === 'string') {
mousetrap = new Mousetrap(
document.querySelector(actionObject.scoped)
);
}
} else if (actionObject.targetElement) {
mousetrap = new Mousetrap(actionObject.targetElement);
} else {
mousetrap = new Mousetrap(document.body);
}
if (actionObject.preventDefault === false) {
preventDefault = false;
}
invokeAction(actionObject.action, actionObject.eventType);
} else {
mousetrap = new Mousetrap(document.body);
invokeAction(actionObject);
}
context._mousetraps.push(mousetrap);
});
}
export function unbindKeyboardShortcuts(context) | {
const _removeEvent = (object, type, callback) => {
if (object.removeEventListener) {
object.removeEventListener(type, callback, false);
return;
}
object.detachEvent('on' + type, callback);
};
Array.isArray(context._mousetraps) && context._mousetraps.forEach(mousetrap => {
// manually unbind JS event
_removeEvent(mousetrap.target, 'keypress', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keydown', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keyup', mousetrap._handleKeyEvent);
mousetrap.reset();
});
context._mousetraps = [];
} | identifier_body |
|
index.js | /* global Mousetrap */
import { typeOf } from '@ember/utils';
import { get } from "@ember/object";
export function bindKeyboardShortcuts(context) {
const shortcuts = get(context, 'keyboardShortcuts');
if (typeOf(shortcuts) !== 'object') {
return;
}
context._mousetraps = [];
Object.keys(shortcuts).forEach(function(shortcut) {
const actionObject = shortcuts[shortcut];
let mousetrap;
let preventDefault = true;
function invokeAction(action, eventType) {
let type = typeOf(action);
let callback;
if (type === 'string') {
callback = function() {
context.send(action);
return preventDefault !== true;
};
} else if (type === 'function') {
callback = action.bind(context);
} else {
throw new Error('Invalid value for keyboard shortcut: ' + action);
}
mousetrap.bind(shortcut, callback, eventType);
}
if (typeOf(actionObject) === 'object') {
if (actionObject.global === false) {
mousetrap = new Mousetrap(document);
} else if (actionObject.scoped) {
if (typeOf(actionObject.scoped) === 'boolean') {
mousetrap = new Mousetrap(get(context, 'element'));
} else if (typeOf(actionObject.scoped) === 'string') {
mousetrap = new Mousetrap(
document.querySelector(actionObject.scoped)
);
}
} else if (actionObject.targetElement) {
mousetrap = new Mousetrap(actionObject.targetElement);
} else {
mousetrap = new Mousetrap(document.body);
}
if (actionObject.preventDefault === false) {
preventDefault = false;
}
invokeAction(actionObject.action, actionObject.eventType);
} else {
mousetrap = new Mousetrap(document.body);
invokeAction(actionObject);
}
context._mousetraps.push(mousetrap);
});
}
export function unbindKeyboardShortcuts(context) {
const _removeEvent = (object, type, callback) => {
if (object.removeEventListener) {
object.removeEventListener(type, callback, false);
return;
}
object.detachEvent('on' + type, callback);
};
Array.isArray(context._mousetraps) && context._mousetraps.forEach(mousetrap => { | // manually unbind JS event
_removeEvent(mousetrap.target, 'keypress', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keydown', mousetrap._handleKeyEvent);
_removeEvent(mousetrap.target, 'keyup', mousetrap._handleKeyEvent);
mousetrap.reset();
});
context._mousetraps = [];
} | random_line_split |
|
border-base.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function | (obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _postcss = require('postcss');
var _clone = require('../clone');
var _clone2 = _interopRequireDefault(_clone);
var _hasAllProps = require('../hasAllProps');
var _hasAllProps2 = _interopRequireDefault(_hasAllProps);
var _getLastNode = require('../getLastNode');
var _getLastNode2 = _interopRequireDefault(_getLastNode);
var _canMerge = require('../canMerge');
var _canMerge2 = _interopRequireDefault(_canMerge);
exports['default'] = function (direction) {
var wsc = ['width', 'style', 'color'].map(function (d) {
return 'border-' + direction + '-' + d;
});
var defaults = ['medium', 'none', 'currentColor'];
var declaration = 'border-' + direction;
var processor = {
explode: function explode(rule) {
rule.walkDecls(declaration, function (decl) {
var values = _postcss.list.space(decl.value);
wsc.forEach(function (prop, index) {
var node = (0, _clone2['default'])(decl);
node.prop = prop;
node.value = values[index];
if (node.value === undefined) {
node.value = defaults[index];
}
rule.insertAfter(decl, node);
});
decl.remove();
});
},
merge: function merge(rule) {
var decls = rule.nodes.filter(function (node) {
return node.prop && ~wsc.indexOf(node.prop);
});
var _loop = function () {
var lastNode = decls[decls.length - 1];
var props = decls.filter(function (node) {
return node.important === lastNode.important;
});
if (_hasAllProps2['default'].apply(undefined, [props].concat(wsc)) && _canMerge2['default'].apply(undefined, props)) {
var values = wsc.map(function (prop) {
return (0, _getLastNode2['default'])(props, prop).value;
});
var value = values.concat(['']).reduceRight(function (prev, cur, i) {
if (prev === '' && cur === defaults[i]) {
return prev;
}
return cur + " " + prev;
}).trim();
if (value === '') {
value = defaults[0];
}
var shorthand = (0, _clone2['default'])(lastNode);
shorthand.prop = declaration;
shorthand.value = value;
rule.insertAfter(lastNode, shorthand);
props.forEach(function (prop) {
return prop.remove();
});
}
decls = decls.filter(function (node) {
return ! ~props.indexOf(node);
});
};
while (decls.length) {
_loop();
}
}
};
return processor;
};
module.exports = exports['default']; | _interopRequireDefault | identifier_name |
border-base.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) |
var _postcss = require('postcss');
var _clone = require('../clone');
var _clone2 = _interopRequireDefault(_clone);
var _hasAllProps = require('../hasAllProps');
var _hasAllProps2 = _interopRequireDefault(_hasAllProps);
var _getLastNode = require('../getLastNode');
var _getLastNode2 = _interopRequireDefault(_getLastNode);
var _canMerge = require('../canMerge');
var _canMerge2 = _interopRequireDefault(_canMerge);
exports['default'] = function (direction) {
var wsc = ['width', 'style', 'color'].map(function (d) {
return 'border-' + direction + '-' + d;
});
var defaults = ['medium', 'none', 'currentColor'];
var declaration = 'border-' + direction;
var processor = {
explode: function explode(rule) {
rule.walkDecls(declaration, function (decl) {
var values = _postcss.list.space(decl.value);
wsc.forEach(function (prop, index) {
var node = (0, _clone2['default'])(decl);
node.prop = prop;
node.value = values[index];
if (node.value === undefined) {
node.value = defaults[index];
}
rule.insertAfter(decl, node);
});
decl.remove();
});
},
merge: function merge(rule) {
var decls = rule.nodes.filter(function (node) {
return node.prop && ~wsc.indexOf(node.prop);
});
var _loop = function () {
var lastNode = decls[decls.length - 1];
var props = decls.filter(function (node) {
return node.important === lastNode.important;
});
if (_hasAllProps2['default'].apply(undefined, [props].concat(wsc)) && _canMerge2['default'].apply(undefined, props)) {
var values = wsc.map(function (prop) {
return (0, _getLastNode2['default'])(props, prop).value;
});
var value = values.concat(['']).reduceRight(function (prev, cur, i) {
if (prev === '' && cur === defaults[i]) {
return prev;
}
return cur + " " + prev;
}).trim();
if (value === '') {
value = defaults[0];
}
var shorthand = (0, _clone2['default'])(lastNode);
shorthand.prop = declaration;
shorthand.value = value;
rule.insertAfter(lastNode, shorthand);
props.forEach(function (prop) {
return prop.remove();
});
}
decls = decls.filter(function (node) {
return ! ~props.indexOf(node);
});
};
while (decls.length) {
_loop();
}
}
};
return processor;
};
module.exports = exports['default']; | { return obj && obj.__esModule ? obj : { 'default': obj }; } | identifier_body |
border-base.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _postcss = require('postcss');
var _clone = require('../clone');
var _clone2 = _interopRequireDefault(_clone);
var _hasAllProps = require('../hasAllProps');
var _hasAllProps2 = _interopRequireDefault(_hasAllProps);
var _getLastNode = require('../getLastNode');
var _getLastNode2 = _interopRequireDefault(_getLastNode);
var _canMerge = require('../canMerge');
var _canMerge2 = _interopRequireDefault(_canMerge);
exports['default'] = function (direction) {
var wsc = ['width', 'style', 'color'].map(function (d) {
return 'border-' + direction + '-' + d;
});
var defaults = ['medium', 'none', 'currentColor'];
var declaration = 'border-' + direction;
var processor = {
explode: function explode(rule) {
rule.walkDecls(declaration, function (decl) {
var values = _postcss.list.space(decl.value);
wsc.forEach(function (prop, index) {
var node = (0, _clone2['default'])(decl);
node.prop = prop;
node.value = values[index];
if (node.value === undefined) {
node.value = defaults[index];
}
rule.insertAfter(decl, node);
});
decl.remove();
});
},
merge: function merge(rule) {
var decls = rule.nodes.filter(function (node) {
return node.prop && ~wsc.indexOf(node.prop);
});
var _loop = function () {
var lastNode = decls[decls.length - 1];
var props = decls.filter(function (node) {
return node.important === lastNode.important;
});
if (_hasAllProps2['default'].apply(undefined, [props].concat(wsc)) && _canMerge2['default'].apply(undefined, props)) {
var values = wsc.map(function (prop) {
return (0, _getLastNode2['default'])(props, prop).value;
});
var value = values.concat(['']).reduceRight(function (prev, cur, i) {
if (prev === '' && cur === defaults[i]) {
return prev;
}
return cur + " " + prev;
}).trim();
if (value === '') {
value = defaults[0];
}
var shorthand = (0, _clone2['default'])(lastNode);
shorthand.prop = declaration;
shorthand.value = value;
rule.insertAfter(lastNode, shorthand);
props.forEach(function (prop) {
return prop.remove();
});
}
decls = decls.filter(function (node) {
return ! ~props.indexOf(node);
});
};
while (decls.length) {
_loop();
}
}
};
return processor; | };
module.exports = exports['default']; | random_line_split |
|
border-base.js | 'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _postcss = require('postcss');
var _clone = require('../clone');
var _clone2 = _interopRequireDefault(_clone);
var _hasAllProps = require('../hasAllProps');
var _hasAllProps2 = _interopRequireDefault(_hasAllProps);
var _getLastNode = require('../getLastNode');
var _getLastNode2 = _interopRequireDefault(_getLastNode);
var _canMerge = require('../canMerge');
var _canMerge2 = _interopRequireDefault(_canMerge);
exports['default'] = function (direction) {
var wsc = ['width', 'style', 'color'].map(function (d) {
return 'border-' + direction + '-' + d;
});
var defaults = ['medium', 'none', 'currentColor'];
var declaration = 'border-' + direction;
var processor = {
explode: function explode(rule) {
rule.walkDecls(declaration, function (decl) {
var values = _postcss.list.space(decl.value);
wsc.forEach(function (prop, index) {
var node = (0, _clone2['default'])(decl);
node.prop = prop;
node.value = values[index];
if (node.value === undefined) |
rule.insertAfter(decl, node);
});
decl.remove();
});
},
merge: function merge(rule) {
var decls = rule.nodes.filter(function (node) {
return node.prop && ~wsc.indexOf(node.prop);
});
var _loop = function () {
var lastNode = decls[decls.length - 1];
var props = decls.filter(function (node) {
return node.important === lastNode.important;
});
if (_hasAllProps2['default'].apply(undefined, [props].concat(wsc)) && _canMerge2['default'].apply(undefined, props)) {
var values = wsc.map(function (prop) {
return (0, _getLastNode2['default'])(props, prop).value;
});
var value = values.concat(['']).reduceRight(function (prev, cur, i) {
if (prev === '' && cur === defaults[i]) {
return prev;
}
return cur + " " + prev;
}).trim();
if (value === '') {
value = defaults[0];
}
var shorthand = (0, _clone2['default'])(lastNode);
shorthand.prop = declaration;
shorthand.value = value;
rule.insertAfter(lastNode, shorthand);
props.forEach(function (prop) {
return prop.remove();
});
}
decls = decls.filter(function (node) {
return ! ~props.indexOf(node);
});
};
while (decls.length) {
_loop();
}
}
};
return processor;
};
module.exports = exports['default']; | {
node.value = defaults[index];
} | conditional_block |
index.ts | import analytics from './analytics';
import auth from './auth';
import channel from './channel';
import channelsApi from './channelsApi';
import clipboard from './clipboard';
import confirmModal from './confirm-modal';
import distangle from './distangle';
import documentItemsApi from './document-items-api';
import documentSubscriptionsApi from './document-subscriptions-api';
import documentUploadModal from './document-upload-modal';
import featureFlags from './feature-flags';
import feedbackModal from './feedback-modal';
import meta from './meta';
import notifications from './notifications';
import peopleApi from './people-api';
import person from './person';
import scroll from './scroll';
import websockets from './websockets';
export default function(app) {
analytics(app);
auth(app); | clipboard(app);
confirmModal(app);
distangle(app);
documentUploadModal(app);
documentItemsApi(app);
documentSubscriptionsApi(app);
featureFlags(app);
feedbackModal(app);
meta(app);
notifications(app);
peopleApi(app);
person(app);
scroll(app);
websockets(app);
} | channel(app);
channelsApi(app); | random_line_split |
autoderef-method-priority.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
trait double {
fn double(self) -> uint;
}
impl double for uint {
fn double(self) -> uint { self }
}
impl double for Box<uint> {
fn | (self) -> uint { *self * 2u }
}
pub fn main() {
let x = box 3u;
assert_eq!(x.double(), 6u);
}
| double | identifier_name |
autoderef-method-priority.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
trait double {
fn double(self) -> uint;
}
impl double for uint {
fn double(self) -> uint { self }
}
impl double for Box<uint> {
fn double(self) -> uint { *self * 2u }
}
pub fn main() {
let x = box 3u;
assert_eq!(x.double(), 6u); | } | random_line_split |
|
autoderef-method-priority.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
trait double {
fn double(self) -> uint;
}
impl double for uint {
fn double(self) -> uint { self }
}
impl double for Box<uint> {
fn double(self) -> uint |
}
pub fn main() {
let x = box 3u;
assert_eq!(x.double(), 6u);
}
| { *self * 2u } | identifier_body |
deploy_local.ts | // tslint:disable:no-var-requires
/*
Allows easier local debugging over SSH.
Running `npm run deploy_local` updates remote adapter files
and restarts the instance
*/
/*
CONFIGURATION:
- provide a deploy_password.json file in the local dir with contents
{
"host": "<HOSTNAME>",
"username": "<USERNAME>",
"password": "<PASSWORD>"
}
- specify which dirs and files should be uploaded
- specify where the root dir is relative to this script
*/
const uploadDirs = ["admin", "build"];
const uploadFiles = ["package.json", "io-package.json", "main.js"];
const rootDir = "../";
// =========================
// CAN'T TOUCH THIS
// =========================
import * as nodeSSH from "node-ssh";
import * as path from "path";
const localRoot = path.resolve(__dirname, rootDir);
const ioPack = require(path.join(rootDir, "io-package.json"));
const ADAPTER_NAME = ioPack.common.name;
const ssh = new nodeSSH();
const sshConfig = require(path.join(__dirname, "deploy_password.json"));
| await ssh.connect(sshConfig);
for (const dir of uploadDirs) {
console.log(`cleaning ${dir} dir...`);
await ssh.execCommand(`rm -rf ${path.join(remoteRoot, dir)}`);
console.log(`uploading ${dir} dir...`);
try {
await ssh.putDirectory(path.join(localRoot, dir), path.join(remoteRoot, dir), {
recursive: true,
concurrency: 10,
validate: (pathname) => {
const basename = path.basename(pathname);
if (basename.startsWith("deploy_")) return false;
if (basename.endsWith("Thumbs.db")) return false;
if (basename.endsWith(".map") && basename.indexOf(".bundle.") === -1) return false;
if (basename.indexOf(".test.") > -1) return false;
if (basename === "src") return false;
return true;
},
});
} catch (e) {
console.error(e);
}
}
for (const file of uploadFiles) {
console.log(`uploading ${file}...`);
await ssh.putFile(path.join(localRoot, file), path.join(remoteRoot, file));
}
// update in-mem adapter
let execResult;
console.log("updating in-mem adapter");
execResult = await ssh.execCommand(`iobroker upload ${ADAPTER_NAME}`);
console.log(execResult.stdout);
console.log(execResult.stderr);
if (process.argv.indexOf("--norestart") === -1) {
execResult = await ssh.execCommand(`iobroker restart ${ADAPTER_NAME}`);
console.log(execResult.stdout);
console.log(execResult.stderr);
}
console.log("done");
process.exit(0);
})(); | const remoteRoot = `/opt/iobroker/node_modules/iobroker.${ADAPTER_NAME}`;
(async function main() { | random_line_split |
simd-binop.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
#![allow(experimental)]
use std::unstable::simd::f32x4;
fn main() {
let _ = f32x4(0.0, 0.0, 0.0, 0.0) == f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `==` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) != f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `!=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) < f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) <= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) >= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) > f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
} | random_line_split |
|
simd-binop.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
#![allow(experimental)]
use std::unstable::simd::f32x4;
fn | () {
let _ = f32x4(0.0, 0.0, 0.0, 0.0) == f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `==` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) != f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `!=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) < f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) <= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) >= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) > f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
}
| main | identifier_name |
simd-binop.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
#![allow(experimental)]
use std::unstable::simd::f32x4;
fn main() | }
| {
let _ = f32x4(0.0, 0.0, 0.0, 0.0) == f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `==` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) != f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `!=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) < f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) <= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `<=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) >= f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>=` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
let _ = f32x4(0.0, 0.0, 0.0, 0.0) > f32x4(0.0, 0.0, 0.0, 0.0);
//~^ ERROR binary comparison operation `>` not supported for floating point SIMD vector `std::unstable::simd::f32x4`
| identifier_body |
socks4.rs | 91: request rejected or failed
RequestRejectedOrFailed,
/// 92: request rejected because SOCKS server cannot connect to identd on the client
RequestRejectedCannotConnect,
/// 93: request rejected because the client program and identd report different user-ids
RequestRejectedDifferentUserId,
/// Other replies
Other(u8),
}
impl ResultCode {
#[inline]
fn as_u8(self) -> u8 {
match self {
ResultCode::RequestGranted => consts::SOCKS4_RESULT_REQUEST_GRANTED,
ResultCode::RequestRejectedOrFailed => consts::SOCKS4_RESULT_REQUEST_REJECTED_OR_FAILED,
ResultCode::RequestRejectedCannotConnect => consts::SOCKS4_RESULT_REQUEST_REJECTED_CANNOT_CONNECT,
ResultCode::RequestRejectedDifferentUserId => consts::SOCKS4_RESULT_REQUEST_REJECTED_DIFFERENT_USER_ID,
ResultCode::Other(c) => c,
}
}
#[inline]
fn from_u8(code: u8) -> ResultCode {
match code {
consts::SOCKS4_RESULT_REQUEST_GRANTED => ResultCode::RequestGranted,
consts::SOCKS4_RESULT_REQUEST_REJECTED_OR_FAILED => ResultCode::RequestRejectedOrFailed,
consts::SOCKS4_RESULT_REQUEST_REJECTED_CANNOT_CONNECT => ResultCode::RequestRejectedCannotConnect,
consts::SOCKS4_RESULT_REQUEST_REJECTED_DIFFERENT_USER_ID => ResultCode::RequestRejectedDifferentUserId,
code => ResultCode::Other(code),
}
}
}
impl fmt::Display for ResultCode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
ResultCode::RequestGranted => f.write_str("request granted"),
ResultCode::RequestRejectedOrFailed => f.write_str("request rejected or failed"),
ResultCode::RequestRejectedCannotConnect => {
f.write_str("request rejected because SOCKS server cannot connect to identd on the client")
}
ResultCode::RequestRejectedDifferentUserId => {
f.write_str("request rejected because the client program and identd report different user-ids")
}
ResultCode::Other(code) => write!(f, "other result code {}", code),
}
}
}
/// SOCKS4 Address type
#[derive(Clone, PartialEq, Eq, Hash)]
pub enum Address {
/// Socket address (IP Address)
SocketAddress(SocketAddrV4),
/// Domain name address (SOCKS4a)
DomainNameAddress(String, u16),
}
impl fmt::Debug for Address {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Address::SocketAddress(ref addr) => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl fmt::Display for Address {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Address::SocketAddress(ref addr) => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl From<SocketAddrV4> for Address {
fn from(s: SocketAddrV4) -> Address {
Address::SocketAddress(s)
}
}
impl From<(String, u16)> for Address {
fn from((dn, port): (String, u16)) -> Address {
Address::DomainNameAddress(dn, port)
}
}
impl From<(&str, u16)> for Address {
fn from((dn, port): (&str, u16)) -> Address {
Address::DomainNameAddress(dn.to_owned(), port)
}
}
impl From<&Address> for Address {
fn from(addr: &Address) -> Address {
addr.clone()
}
}
impl From<Address> for socks5::Address {
fn from(addr: Address) -> socks5::Address {
match addr {
Address::SocketAddress(a) => socks5::Address::SocketAddress(SocketAddr::V4(a)),
Address::DomainNameAddress(d, p) => socks5::Address::DomainNameAddress(d, p),
}
}
}
/// Handshake Request
///
/// ```plain
/// The client connects to the SOCKS server and sends a CONNECT/BIND request when
/// it wants to establish a connection to an application server. The client
/// includes in the request packet the IP address and the port number of the
/// destination host, and userid, in the following format.
///
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// | VN | CD | DSTPORT | DSTIP | USERID |NULL|
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// # of bytes: 1 1 2 4 variable 1
///
/// VN is the SOCKS protocol version number and should be 4. CD is the
/// SOCKS command code and should be 1 for CONNECT request, 2 for BIND request. NULL is a byte
/// of all zero bits.
/// ```
#[derive(Debug, Clone)]
pub struct HandshakeRequest {
pub cd: Command,
pub dst: Address,
pub user_id: Vec<u8>,
}
impl HandshakeRequest {
/// Read from a reader
pub async fn read_from<R>(r: &mut R) -> Result<HandshakeRequest, Error>
where
R: AsyncBufRead + Unpin,
{
let mut buf = [0u8; 8];
let _ = r.read_exact(&mut buf).await?;
let vn = buf[0];
if vn != consts::SOCKS4_VERSION {
return Err(Error::UnsupportedSocksVersion(vn));
}
let cd = buf[1];
let command = match Command::from_u8(cd) {
Some(c) => c,
None => {
return Err(Error::UnsupportedSocksVersion(cd));
}
};
let port = BigEndian::read_u16(&buf[2..4]);
let mut user_id = Vec::new();
let _ = r.read_until(b'\0', &mut user_id).await?;
if user_id.is_empty() || user_id.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
user_id.pop(); // Pops the last b'\0'
let dst = if buf[4] == 0x00 && buf[5] == 0x00 && buf[6] == 0x00 && buf[7] != 0x00 {
// SOCKS4a, indicates that it is a HOST address
let mut host = Vec::new();
let _ = r.read_until(b'\0', &mut host).await?;
if host.is_empty() || host.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
host.pop(); // Pops the last b'\0'
match String::from_utf8(host) {
Ok(host) => Address::DomainNameAddress(host, port),
Err(..) => {
return Err(Error::AddressHostInvalidEncoding);
}
}
} else {
let ip = Ipv4Addr::new(buf[4], buf[5], buf[6], buf[7]);
Address::SocketAddress(SocketAddrV4::new(ip, port))
};
Ok(HandshakeRequest {
cd: command,
dst,
user_id,
})
}
/// Writes to writer
pub async fn write_to<W>(&self, w: &mut W) -> io::Result<()>
where
W: AsyncWrite + Unpin,
{
let mut buf = BytesMut::with_capacity(self.serialized_len());
self.write_to_buf(&mut buf);
w.write_all(&buf).await
}
/// Writes to buffer
pub fn write_to_buf<B: BufMut>(&self, buf: &mut B) {
debug_assert!(
!self.user_id.contains(&b'\0'),
"USERID shouldn't contain any NULL characters"
);
buf.put_u8(consts::SOCKS4_VERSION);
buf.put_u8(self.cd.as_u8());
match self.dst {
Address::SocketAddress(ref saddr) => {
let port = saddr.port();
buf.put_u16(port);
buf.put_slice(&saddr.ip().octets());
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
}
Address::DomainNameAddress(ref dname, port) => {
buf.put_u16(port);
// 0.0.0.x (x != 0)
const PLACEHOLDER: [u8; 4] = [0x00, 0x00, 0x00, 0xff];
buf.put_slice(&PLACEHOLDER);
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
buf.put_slice(dname.as_bytes());
buf.put_u8(b'\0');
}
}
}
/// Length in bytes
#[inline]
pub fn | serialized_len | identifier_name |
|
socks4.rs | => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl fmt::Display for Address {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Address::SocketAddress(ref addr) => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl From<SocketAddrV4> for Address {
fn from(s: SocketAddrV4) -> Address {
Address::SocketAddress(s)
}
}
impl From<(String, u16)> for Address {
fn from((dn, port): (String, u16)) -> Address {
Address::DomainNameAddress(dn, port)
}
}
impl From<(&str, u16)> for Address {
fn from((dn, port): (&str, u16)) -> Address {
Address::DomainNameAddress(dn.to_owned(), port)
}
}
impl From<&Address> for Address {
fn from(addr: &Address) -> Address {
addr.clone()
}
}
impl From<Address> for socks5::Address {
fn from(addr: Address) -> socks5::Address {
match addr {
Address::SocketAddress(a) => socks5::Address::SocketAddress(SocketAddr::V4(a)),
Address::DomainNameAddress(d, p) => socks5::Address::DomainNameAddress(d, p),
}
}
}
/// Handshake Request
///
/// ```plain
/// The client connects to the SOCKS server and sends a CONNECT/BIND request when
/// it wants to establish a connection to an application server. The client
/// includes in the request packet the IP address and the port number of the
/// destination host, and userid, in the following format.
///
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// | VN | CD | DSTPORT | DSTIP | USERID |NULL|
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// # of bytes: 1 1 2 4 variable 1
///
/// VN is the SOCKS protocol version number and should be 4. CD is the
/// SOCKS command code and should be 1 for CONNECT request, 2 for BIND request. NULL is a byte
/// of all zero bits.
/// ```
#[derive(Debug, Clone)]
pub struct HandshakeRequest {
pub cd: Command,
pub dst: Address,
pub user_id: Vec<u8>,
}
impl HandshakeRequest {
/// Read from a reader
pub async fn read_from<R>(r: &mut R) -> Result<HandshakeRequest, Error>
where
R: AsyncBufRead + Unpin,
{
let mut buf = [0u8; 8];
let _ = r.read_exact(&mut buf).await?;
let vn = buf[0];
if vn != consts::SOCKS4_VERSION {
return Err(Error::UnsupportedSocksVersion(vn));
}
let cd = buf[1];
let command = match Command::from_u8(cd) {
Some(c) => c,
None => {
return Err(Error::UnsupportedSocksVersion(cd));
}
};
let port = BigEndian::read_u16(&buf[2..4]);
let mut user_id = Vec::new();
let _ = r.read_until(b'\0', &mut user_id).await?;
if user_id.is_empty() || user_id.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
user_id.pop(); // Pops the last b'\0'
let dst = if buf[4] == 0x00 && buf[5] == 0x00 && buf[6] == 0x00 && buf[7] != 0x00 {
// SOCKS4a, indicates that it is a HOST address
let mut host = Vec::new();
let _ = r.read_until(b'\0', &mut host).await?;
if host.is_empty() || host.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
host.pop(); // Pops the last b'\0'
match String::from_utf8(host) {
Ok(host) => Address::DomainNameAddress(host, port),
Err(..) => {
return Err(Error::AddressHostInvalidEncoding);
}
}
} else {
let ip = Ipv4Addr::new(buf[4], buf[5], buf[6], buf[7]);
Address::SocketAddress(SocketAddrV4::new(ip, port))
};
Ok(HandshakeRequest {
cd: command,
dst,
user_id,
})
}
/// Writes to writer
pub async fn write_to<W>(&self, w: &mut W) -> io::Result<()>
where
W: AsyncWrite + Unpin,
{
let mut buf = BytesMut::with_capacity(self.serialized_len());
self.write_to_buf(&mut buf);
w.write_all(&buf).await
}
/// Writes to buffer
pub fn write_to_buf<B: BufMut>(&self, buf: &mut B) {
debug_assert!(
!self.user_id.contains(&b'\0'),
"USERID shouldn't contain any NULL characters"
);
buf.put_u8(consts::SOCKS4_VERSION);
buf.put_u8(self.cd.as_u8());
match self.dst {
Address::SocketAddress(ref saddr) => {
let port = saddr.port();
buf.put_u16(port);
buf.put_slice(&saddr.ip().octets());
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
}
Address::DomainNameAddress(ref dname, port) => {
buf.put_u16(port);
// 0.0.0.x (x != 0)
const PLACEHOLDER: [u8; 4] = [0x00, 0x00, 0x00, 0xff];
buf.put_slice(&PLACEHOLDER);
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
buf.put_slice(dname.as_bytes());
buf.put_u8(b'\0');
}
}
}
/// Length in bytes
#[inline]
pub fn serialized_len(&self) -> usize {
let mut s = 1 + 1 + 2 + 4 + self.user_id.len() + 1; // USERID.LEN + NULL
if let Address::DomainNameAddress(ref dname, _) = self.dst {
s += dname.len() + 1;
}
s
}
}
/// Handshake Response
///
/// ```plain
/// +----+----+----+----+----+----+----+----+
/// | VN | CD | DSTPORT | DSTIP |
/// +----+----+----+----+----+----+----+----+
/// # of bytes: 1 1 2 4
/// ```
#[derive(Debug, Clone)]
pub struct HandshakeResponse {
pub cd: ResultCode,
}
impl HandshakeResponse {
/// Create a response with code
pub fn new(code: ResultCode) -> HandshakeResponse {
HandshakeResponse { cd: code }
}
/// Read from a reader
pub async fn read_from<R>(r: &mut R) -> Result<HandshakeResponse, Error>
where
R: AsyncRead + Unpin,
{
let mut buf = [0u8; 8];
let _ = r.read_exact(&mut buf).await?;
let vn = buf[0];
if vn != 0 {
return Err(Error::UnsupportedSocksVersion(vn));
}
let cd = buf[1];
let result_code = ResultCode::from_u8(cd);
// DSTPORT, DSTIP are ignored
Ok(HandshakeResponse { cd: result_code })
}
/// Write data into a writer
pub async fn write_to<W>(&self, w: &mut W) -> io::Result<()>
where
W: AsyncWrite + Unpin,
{
let mut buf = BytesMut::with_capacity(self.serialized_len());
self.write_to_buf(&mut buf);
w.write_all(&buf).await
}
/// Writes to buffer
pub fn write_to_buf<B: BufMut>(&self, buf: &mut B) | {
let HandshakeResponse { ref cd } = *self;
buf.put_slice(&[
// VN: Result Code's version, must be 0
0x00,
// CD: Result Code
cd.as_u8(),
// DSTPORT: Ignored
0x00,
0x00,
// DSTIP: Ignored
0x00,
0x00,
0x00,
0x00,
]);
} | identifier_body |
|
socks4.rs | _str("request rejected because the client program and identd report different user-ids")
}
ResultCode::Other(code) => write!(f, "other result code {}", code),
}
}
}
/// SOCKS4 Address type
#[derive(Clone, PartialEq, Eq, Hash)]
pub enum Address {
/// Socket address (IP Address)
SocketAddress(SocketAddrV4),
/// Domain name address (SOCKS4a)
DomainNameAddress(String, u16),
}
impl fmt::Debug for Address {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Address::SocketAddress(ref addr) => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl fmt::Display for Address {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Address::SocketAddress(ref addr) => write!(f, "{}", addr),
Address::DomainNameAddress(ref addr, ref port) => write!(f, "{}:{}", addr, port),
}
}
}
impl From<SocketAddrV4> for Address {
fn from(s: SocketAddrV4) -> Address {
Address::SocketAddress(s)
}
}
impl From<(String, u16)> for Address {
fn from((dn, port): (String, u16)) -> Address {
Address::DomainNameAddress(dn, port)
}
}
impl From<(&str, u16)> for Address {
fn from((dn, port): (&str, u16)) -> Address {
Address::DomainNameAddress(dn.to_owned(), port)
}
}
impl From<&Address> for Address {
fn from(addr: &Address) -> Address {
addr.clone()
}
}
impl From<Address> for socks5::Address {
fn from(addr: Address) -> socks5::Address {
match addr {
Address::SocketAddress(a) => socks5::Address::SocketAddress(SocketAddr::V4(a)),
Address::DomainNameAddress(d, p) => socks5::Address::DomainNameAddress(d, p),
}
}
}
/// Handshake Request
///
/// ```plain
/// The client connects to the SOCKS server and sends a CONNECT/BIND request when
/// it wants to establish a connection to an application server. The client
/// includes in the request packet the IP address and the port number of the
/// destination host, and userid, in the following format.
///
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// | VN | CD | DSTPORT | DSTIP | USERID |NULL|
/// +----+----+----+----+----+----+----+----+----+----+....+----+
/// # of bytes: 1 1 2 4 variable 1
///
/// VN is the SOCKS protocol version number and should be 4. CD is the
/// SOCKS command code and should be 1 for CONNECT request, 2 for BIND request. NULL is a byte
/// of all zero bits.
/// ```
#[derive(Debug, Clone)]
pub struct HandshakeRequest {
pub cd: Command,
pub dst: Address,
pub user_id: Vec<u8>,
}
impl HandshakeRequest {
/// Read from a reader
pub async fn read_from<R>(r: &mut R) -> Result<HandshakeRequest, Error>
where
R: AsyncBufRead + Unpin,
{
let mut buf = [0u8; 8];
let _ = r.read_exact(&mut buf).await?;
let vn = buf[0];
if vn != consts::SOCKS4_VERSION {
return Err(Error::UnsupportedSocksVersion(vn));
}
let cd = buf[1];
let command = match Command::from_u8(cd) {
Some(c) => c,
None => {
return Err(Error::UnsupportedSocksVersion(cd));
}
};
let port = BigEndian::read_u16(&buf[2..4]);
let mut user_id = Vec::new();
let _ = r.read_until(b'\0', &mut user_id).await?;
if user_id.is_empty() || user_id.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
user_id.pop(); // Pops the last b'\0'
let dst = if buf[4] == 0x00 && buf[5] == 0x00 && buf[6] == 0x00 && buf[7] != 0x00 {
// SOCKS4a, indicates that it is a HOST address
let mut host = Vec::new();
let _ = r.read_until(b'\0', &mut host).await?;
if host.is_empty() || host.last() != Some(&b'\0') {
return Err(io::Error::from(ErrorKind::UnexpectedEof).into());
}
host.pop(); // Pops the last b'\0'
match String::from_utf8(host) {
Ok(host) => Address::DomainNameAddress(host, port),
Err(..) => {
return Err(Error::AddressHostInvalidEncoding);
}
}
} else {
let ip = Ipv4Addr::new(buf[4], buf[5], buf[6], buf[7]);
Address::SocketAddress(SocketAddrV4::new(ip, port))
};
Ok(HandshakeRequest {
cd: command,
dst,
user_id,
})
}
/// Writes to writer
pub async fn write_to<W>(&self, w: &mut W) -> io::Result<()>
where
W: AsyncWrite + Unpin,
{
let mut buf = BytesMut::with_capacity(self.serialized_len());
self.write_to_buf(&mut buf);
w.write_all(&buf).await
}
/// Writes to buffer
pub fn write_to_buf<B: BufMut>(&self, buf: &mut B) {
debug_assert!(
!self.user_id.contains(&b'\0'),
"USERID shouldn't contain any NULL characters"
);
buf.put_u8(consts::SOCKS4_VERSION);
buf.put_u8(self.cd.as_u8());
match self.dst {
Address::SocketAddress(ref saddr) => {
let port = saddr.port();
buf.put_u16(port);
buf.put_slice(&saddr.ip().octets());
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
}
Address::DomainNameAddress(ref dname, port) => {
buf.put_u16(port);
// 0.0.0.x (x != 0)
const PLACEHOLDER: [u8; 4] = [0x00, 0x00, 0x00, 0xff];
buf.put_slice(&PLACEHOLDER);
buf.put_slice(&self.user_id);
buf.put_u8(b'\0');
buf.put_slice(dname.as_bytes());
buf.put_u8(b'\0');
}
}
}
/// Length in bytes
#[inline]
pub fn serialized_len(&self) -> usize {
let mut s = 1 + 1 + 2 + 4 + self.user_id.len() + 1; // USERID.LEN + NULL
if let Address::DomainNameAddress(ref dname, _) = self.dst {
s += dname.len() + 1;
}
s
}
}
/// Handshake Response
///
/// ```plain
/// +----+----+----+----+----+----+----+----+
/// | VN | CD | DSTPORT | DSTIP |
/// +----+----+----+----+----+----+----+----+
/// # of bytes: 1 1 2 4
/// ```
#[derive(Debug, Clone)]
pub struct HandshakeResponse {
pub cd: ResultCode,
}
impl HandshakeResponse {
/// Create a response with code
pub fn new(code: ResultCode) -> HandshakeResponse {
HandshakeResponse { cd: code }
}
/// Read from a reader
pub async fn read_from<R>(r: &mut R) -> Result<HandshakeResponse, Error>
where
R: AsyncRead + Unpin,
{
let mut buf = [0u8; 8];
let _ = r.read_exact(&mut buf).await?;
let vn = buf[0];
if vn != 0 {
return Err(Error::UnsupportedSocksVersion(vn));
}
let cd = buf[1];
let result_code = ResultCode::from_u8(cd);
// DSTPORT, DSTIP are ignored
Ok(HandshakeResponse { cd: result_code })
}
/// Write data into a writer
pub async fn write_to<W>(&self, w: &mut W) -> io::Result<()>
where
W: AsyncWrite + Unpin,
{
let mut buf = BytesMut::with_capacity(self.serialized_len());
self.write_to_buf(&mut buf); | w.write_all(&buf).await | random_line_split |
|
text_mark.rs | // This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TextBuffer;
use ffi;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct TextMark(Object<ffi::GtkTextMark>);
match fn {
get_type => || ffi::gtk_text_mark_get_type(),
}
}
impl TextMark {
pub fn new<'a, P: Into<Option<&'a str>>>(name: P, left_gravity: bool) -> TextMark {
assert_initialized_main_thread!();
let name = name.into();
let name = name.to_glib_none();
unsafe {
from_glib_full(ffi::gtk_text_mark_new(name.0, left_gravity.to_glib()))
}
}
}
pub trait TextMarkExt {
fn get_buffer(&self) -> Option<TextBuffer>;
fn get_deleted(&self) -> bool;
fn get_left_gravity(&self) -> bool;
fn get_name(&self) -> Option<String>;
fn get_visible(&self) -> bool;
fn set_visible(&self, setting: bool);
}
impl<O: IsA<TextMark>> TextMarkExt for O {
fn get_buffer(&self) -> Option<TextBuffer> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_buffer(self.to_glib_none().0))
}
}
fn get_deleted(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_deleted(self.to_glib_none().0))
}
}
fn get_left_gravity(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_left_gravity(self.to_glib_none().0))
}
}
fn | (&self) -> Option<String> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_name(self.to_glib_none().0))
}
}
fn get_visible(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_visible(self.to_glib_none().0))
}
}
fn set_visible(&self, setting: bool) {
unsafe {
ffi::gtk_text_mark_set_visible(self.to_glib_none().0, setting.to_glib());
}
}
}
| get_name | identifier_name |
text_mark.rs | // This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TextBuffer;
use ffi;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct TextMark(Object<ffi::GtkTextMark>);
match fn {
get_type => || ffi::gtk_text_mark_get_type(),
}
}
impl TextMark {
pub fn new<'a, P: Into<Option<&'a str>>>(name: P, left_gravity: bool) -> TextMark {
assert_initialized_main_thread!();
let name = name.into();
let name = name.to_glib_none();
unsafe {
from_glib_full(ffi::gtk_text_mark_new(name.0, left_gravity.to_glib()))
}
}
}
pub trait TextMarkExt {
fn get_buffer(&self) -> Option<TextBuffer>;
fn get_deleted(&self) -> bool;
fn get_left_gravity(&self) -> bool;
fn get_name(&self) -> Option<String>;
fn get_visible(&self) -> bool;
fn set_visible(&self, setting: bool);
}
impl<O: IsA<TextMark>> TextMarkExt for O {
fn get_buffer(&self) -> Option<TextBuffer> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_buffer(self.to_glib_none().0))
}
}
fn get_deleted(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_deleted(self.to_glib_none().0))
}
}
fn get_left_gravity(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_left_gravity(self.to_glib_none().0)) | }
}
fn get_name(&self) -> Option<String> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_name(self.to_glib_none().0))
}
}
fn get_visible(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_visible(self.to_glib_none().0))
}
}
fn set_visible(&self, setting: bool) {
unsafe {
ffi::gtk_text_mark_set_visible(self.to_glib_none().0, setting.to_glib());
}
}
} | random_line_split |
|
text_mark.rs | // This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use TextBuffer;
use ffi;
use glib::object::IsA;
use glib::translate::*;
glib_wrapper! {
pub struct TextMark(Object<ffi::GtkTextMark>);
match fn {
get_type => || ffi::gtk_text_mark_get_type(),
}
}
impl TextMark {
pub fn new<'a, P: Into<Option<&'a str>>>(name: P, left_gravity: bool) -> TextMark {
assert_initialized_main_thread!();
let name = name.into();
let name = name.to_glib_none();
unsafe {
from_glib_full(ffi::gtk_text_mark_new(name.0, left_gravity.to_glib()))
}
}
}
pub trait TextMarkExt {
fn get_buffer(&self) -> Option<TextBuffer>;
fn get_deleted(&self) -> bool;
fn get_left_gravity(&self) -> bool;
fn get_name(&self) -> Option<String>;
fn get_visible(&self) -> bool;
fn set_visible(&self, setting: bool);
}
impl<O: IsA<TextMark>> TextMarkExt for O {
fn get_buffer(&self) -> Option<TextBuffer> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_buffer(self.to_glib_none().0))
}
}
fn get_deleted(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_deleted(self.to_glib_none().0))
}
}
fn get_left_gravity(&self) -> bool |
fn get_name(&self) -> Option<String> {
unsafe {
from_glib_none(ffi::gtk_text_mark_get_name(self.to_glib_none().0))
}
}
fn get_visible(&self) -> bool {
unsafe {
from_glib(ffi::gtk_text_mark_get_visible(self.to_glib_none().0))
}
}
fn set_visible(&self, setting: bool) {
unsafe {
ffi::gtk_text_mark_set_visible(self.to_glib_none().0, setting.to_glib());
}
}
}
| {
unsafe {
from_glib(ffi::gtk_text_mark_get_left_gravity(self.to_glib_none().0))
}
} | identifier_body |
gamma.rs | fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaSmallShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaLargeShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for Gamma {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
Small(ref g) => g.ind_sample(rng),
One(ref g) => g.ind_sample(rng),
Large(ref g) => g.ind_sample(rng),
}
}
}
impl IndependentSample<f64> for GammaSmallShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let Open01(u) = rng.gen::<Open01<f64>>();
self.large_shape.ind_sample(rng) * u.powf(self.inv_shape)
}
}
impl IndependentSample<f64> for GammaLargeShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
loop {
let StandardNormal(x) = rng.gen::<StandardNormal>();
let v_cbrt = 1.0 + self.c * x;
if v_cbrt <= 0.0 { // a^3 <= 0 iff a <= 0
continue
}
let v = v_cbrt * v_cbrt * v_cbrt;
let Open01(u) = rng.gen::<Open01<f64>>();
let x_sqr = x * x;
if u < 1.0 - 0.0331 * x_sqr * x_sqr ||
u.ln() < 0.5 * x_sqr + self.d * (1.0 - v + v.ln()) {
return self.d * v * self.scale
}
}
}
}
/// The chi-squared distribution `χ²(k)`, where `k` is the degrees of
/// freedom.
///
/// For `k > 0` integral, this distribution is the sum of the squares
/// of `k` independent standard normal random variables. For other
/// `k`, this uses the equivalent characterisation `χ²(k) = Gamma(k/2,
/// 2)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{ChiSquared, IndependentSample};
///
/// let chi = ChiSquared::new(11.0);
/// let v = chi.ind_sample(&mut rand::task_rng());
/// println!("{} is from a χ²(11) distribution", v)
/// ```
pub struct ChiSquared {
repr: ChiSquaredRepr,
}
enum ChiSquaredRepr {
// k == 1, Gamma(alpha, ..) is particularly slow for alpha < 1,
// e.g. when alpha = 1/2 as it would be for this case, so special-
// casing and using the definition of N(0,1)^2 is faster.
DoFExactlyOne,
DoFAnythingElse(Gamma),
}
impl ChiSquared {
/// Create a new chi-squared distribution with degrees-of-freedom
/// `k`. Panics if `k < 0`.
pub fn new(k: f64) -> ChiSquared {
let repr = if k == 1.0 {
DoFExactlyOne
} else {
assert!(k > 0.0, "ChiSquared::new called with `k` < 0");
DoFAnythingElse(Gamma::new(0.5 * k, 2.0))
};
ChiSquared { repr: repr }
}
}
impl Sample<f64> for ChiSquared {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for ChiSquared {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
DoFExactlyOne => {
// k == 1 => N(0,1)^2
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * norm
}
DoFAnythingElse(ref g) => g.ind_sample(rng)
}
}
}
/// The Fisher F distribution `F(m, n)`.
///
/// This distribution is equivalent to the ratio of two normalised
/// chi-squared distributions, that is, `F(m,n) = (χ²(m)/m) /
/// (χ²(n)/n)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{FisherF, IndependentSample};
///
/// let f = FisherF::new(2.0, 32.0);
/// let v = f.ind_sample(&mut rand::task_rng());
/// println!("{} is from an F(2, 32) distribution", v)
/// ```
pub struct FisherF {
numer: ChiSquared,
denom: ChiSquared,
// denom_dof / numer_dof so that this can just be a straight
// multiplication, rather than a division.
dof_ratio: f64,
}
impl FisherF {
/// Create a new `FisherF` distribution, with the given
/// parameter. Panics if either `m` or `n` are not positive.
pub fn new(m: f64, n: f64) -> FisherF {
assert!(m > 0.0, "FisherF::new called with `m < 0`");
assert!(n > 0.0, "FisherF::new called with `n < 0`");
FisherF {
numer: ChiSquared::new(m),
denom: ChiSquared::new(n),
dof_ratio: n / m
}
}
}
impl Sample<f64> for FisherF {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for FisherF {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
self.numer.ind_sample(rng) / self.denom.ind_sample(rng) * self.dof_ratio
}
}
/// The Student t distribution, `t(nu)`, where `nu` is the degrees of
/// freedom.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{StudentT, IndependentSample};
///
/// let t = StudentT::new(11.0);
/// let v = t.ind_sample(&mut rand::task_rng());
/// println!("{} is from a t(11) distribution", v)
/// ```
pub struct StudentT {
chi: ChiSquared,
dof: f64
}
impl StudentT {
/// Create a new Student t distribution with `n` degrees of
/// freedom. Panics if `n <= 0`.
pub fn new(n: f64) -> StudentT {
assert!(n > 0.0, "StudentT::new called with `n <= 0`");
StudentT {
chi: ChiSquared::new(n),
dof: n
}
}
}
impl Sample<f64> for StudentT {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for StudentT {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * (self.dof / self.chi.ind_sample(rng)).sqrt()
}
}
#[cfg(test)]
mod test {
use std::prelude::*;
use distributions::{Sample, IndependentSample};
use super::{ChiSquared, StudentT, FisherF};
#[test]
fn test_chi_squared_one() {
let mut chi = ChiSquared::new(1.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
fn test_chi_squared_small() {
let mut chi = ChiSquared::new(0.5);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
fn test_chi_squared_large() {
let mut chi = ChiSquared::new(30.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
#[should_fail]
fn test_chi_squared_invalid_dof() {
ChiSquared::new(-1.0);
}
#[test] | fn test_f() { | random_line_split |
|
gamma.rs | distribution where the shape parameter is greater than or
/// equal to 1.
///
/// See `Gamma` for sampling from a Gamma distribution with general
/// shape parameters.
struct GammaSmallShape {
inv_shape: f64,
large_shape: GammaLargeShape
}
/// Gamma distribution where the shape parameter is larger than 1.
///
/// See `Gamma` for sampling from a Gamma distribution with general
/// shape parameters.
struct GammaLargeShape {
scale: f64,
c: f64,
d: f64
}
impl Gamma {
/// Construct an object representing the `Gamma(shape, scale)`
/// distribution.
///
/// Panics if `shape <= 0` or `scale <= 0`.
pub fn new(shape: f64, scale: f64) -> Gamma {
assert!(shape > 0.0, "Gamma::new called with shape <= 0");
assert!(scale > 0.0, "Gamma::new called with scale <= 0");
let repr = match shape {
1.0 => One(Exp::new(1.0 / scale)),
0.0 ... 1.0 => Small(GammaSmallShape::new_raw(shape, scale)),
_ => Large(GammaLargeShape::new_raw(shape, scale))
};
Gamma { repr: repr }
}
}
impl GammaSmallShape {
fn new_raw(shape: f64, scale: f64) -> GammaSmallShape {
GammaSmallShape {
inv_shape: 1. / shape,
large_shape: GammaLargeShape::new_raw(shape + 1.0, scale)
}
}
}
impl GammaLargeShape {
fn new_raw(shape: f64, scale: f64) -> GammaLargeShape {
let d = shape - 1. / 3.;
GammaLargeShape {
scale: scale,
c: 1. / (9. * d).sqrt(),
d: d
}
}
}
impl Sample<f64> for Gamma {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaSmallShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaLargeShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for Gamma {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
Small(ref g) => g.ind_sample(rng),
One(ref g) => g.ind_sample(rng),
Large(ref g) => g.ind_sample(rng),
}
}
}
impl IndependentSample<f64> for GammaSmallShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let Open01(u) = rng.gen::<Open01<f64>>();
self.large_shape.ind_sample(rng) * u.powf(self.inv_shape)
}
}
impl IndependentSample<f64> for GammaLargeShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
loop {
let StandardNormal(x) = rng.gen::<StandardNormal>();
let v_cbrt = 1.0 + self.c * x;
if v_cbrt <= 0.0 { // a^3 <= 0 iff a <= 0
continue
}
let v = v_cbrt * v_cbrt * v_cbrt;
let Open01(u) = rng.gen::<Open01<f64>>();
let x_sqr = x * x;
if u < 1.0 - 0.0331 * x_sqr * x_sqr ||
u.ln() < 0.5 * x_sqr + self.d * (1.0 - v + v.ln()) {
return self.d * v * self.scale
}
}
}
}
/// The chi-squared distribution `χ²(k)`, where `k` is the degrees of
/// freedom.
///
/// For `k > 0` integral, this distribution is the sum of the squares
/// of `k` independent standard normal random variables. For other
/// `k`, this uses the equivalent characterisation `χ²(k) = Gamma(k/2,
/// 2)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{ChiSquared, IndependentSample};
///
/// let chi = ChiSquared::new(11.0);
/// let v = chi.ind_sample(&mut rand::task_rng());
/// println!("{} is from a χ²(11) distribution", v)
/// ```
pub struct ChiSquared {
repr: ChiSquaredRepr,
}
enum ChiSquaredRepr {
// k == 1, Gamma(alpha, ..) is particularly slow for alpha < 1,
// e.g. when alpha = 1/2 as it would be for this case, so special-
// casing and using the definition of N(0,1)^2 is faster.
DoFExactlyOne,
DoFAnythingElse(Gamma),
}
impl ChiSquared {
/// Create a new chi-squared distribution with degrees-of-freedom
/// `k`. Panics if `k < 0`.
pub fn new(k: f64) -> ChiSquared {
let repr = if k == 1.0 {
DoFExactlyOne
} else {
assert!(k > 0.0, "ChiSquared::new called with `k` < 0");
DoFAnythingElse(Gamma::new(0.5 * k, 2.0))
};
ChiSquared { repr: repr }
}
}
impl Sample<f64> for ChiSquared {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for ChiSquared {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
DoFExactlyOne => {
// k == 1 => N(0,1)^2
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * norm
}
DoFAnythingElse(ref g) => g.ind_sample(rng)
}
}
}
/// The Fisher F distribution `F(m, n)`.
///
/// This distribution is equivalent to the ratio of two normalised
/// chi-squared distributions, that is, `F(m,n) = (χ²(m)/m) /
/// (χ²(n)/n)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{FisherF, IndependentSample};
///
/// let f = FisherF::new(2.0, 32.0);
/// let v = f.ind_sample(&mut rand::task_rng());
/// println!("{} is from an F(2, 32) distribution", v)
/// ```
pub struct FisherF {
numer: ChiSquared,
denom: ChiSquared,
// denom_dof / numer_dof so that this can just be a straight
// multiplication, rather than a division.
dof_ratio: f64,
}
impl FisherF {
/// Create a new `FisherF` distribution, with the given
/// parameter. Panics if either `m` or `n` are not positive.
pub fn new(m: f64, n: f64) -> FisherF {
assert!(m > 0.0, "FisherF::new called with `m < 0`");
assert!(n > 0.0, "FisherF::new called with `n < 0`");
FisherF {
numer: ChiSquared::new(m),
denom: ChiSquared::new(n),
dof_ratio: n / m
}
}
}
impl Sample<f64> for FisherF {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for FisherF {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
self.numer.ind_sample(rng) / self.denom.ind_sample(rng) * self.dof_ratio
}
}
/// The Student t distribution, `t(nu)`, where `nu` is the degrees of
/// freedom.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{StudentT, IndependentSample};
///
/// let t = StudentT::new(11.0);
/// let v = t.ind_sample(&mut rand::task_rng());
/// println!("{} is from a t(11) distribution", v)
/// ```
pub struct StudentT {
chi: ChiSquared,
dof: f64
}
impl StudentT {
/// Create a new Student t distribution with `n` degrees of
/// freedom. Panics if `n <= 0`.
pub fn new(n: f64) -> S | tud | identifier_name |
|
gamma.rs | x_sqr = x * x;
if u < 1.0 - 0.0331 * x_sqr * x_sqr ||
u.ln() < 0.5 * x_sqr + self.d * (1.0 - v + v.ln()) {
return self.d * v * self.scale
}
}
}
}
/// The chi-squared distribution `χ²(k)`, where `k` is the degrees of
/// freedom.
///
/// For `k > 0` integral, this distribution is the sum of the squares
/// of `k` independent standard normal random variables. For other
/// `k`, this uses the equivalent characterisation `χ²(k) = Gamma(k/2,
/// 2)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{ChiSquared, IndependentSample};
///
/// let chi = ChiSquared::new(11.0);
/// let v = chi.ind_sample(&mut rand::task_rng());
/// println!("{} is from a χ²(11) distribution", v)
/// ```
pub struct ChiSquared {
repr: ChiSquaredRepr,
}
enum ChiSquaredRepr {
// k == 1, Gamma(alpha, ..) is particularly slow for alpha < 1,
// e.g. when alpha = 1/2 as it would be for this case, so special-
// casing and using the definition of N(0,1)^2 is faster.
DoFExactlyOne,
DoFAnythingElse(Gamma),
}
impl ChiSquared {
/// Create a new chi-squared distribution with degrees-of-freedom
/// `k`. Panics if `k < 0`.
pub fn new(k: f64) -> ChiSquared {
let repr = if k == 1.0 {
DoFExactlyOne
} else {
assert!(k > 0.0, "ChiSquared::new called with `k` < 0");
DoFAnythingElse(Gamma::new(0.5 * k, 2.0))
};
ChiSquared { repr: repr }
}
}
impl Sample<f64> for ChiSquared {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for ChiSquared {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
DoFExactlyOne => {
// k == 1 => N(0,1)^2
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * norm
}
DoFAnythingElse(ref g) => g.ind_sample(rng)
}
}
}
/// The Fisher F distribution `F(m, n)`.
///
/// This distribution is equivalent to the ratio of two normalised
/// chi-squared distributions, that is, `F(m,n) = (χ²(m)/m) /
/// (χ²(n)/n)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{FisherF, IndependentSample};
///
/// let f = FisherF::new(2.0, 32.0);
/// let v = f.ind_sample(&mut rand::task_rng());
/// println!("{} is from an F(2, 32) distribution", v)
/// ```
pub struct FisherF {
numer: ChiSquared,
denom: ChiSquared,
// denom_dof / numer_dof so that this can just be a straight
// multiplication, rather than a division.
dof_ratio: f64,
}
impl FisherF {
/// Create a new `FisherF` distribution, with the given
/// parameter. Panics if either `m` or `n` are not positive.
pub fn new(m: f64, n: f64) -> FisherF {
assert!(m > 0.0, "FisherF::new called with `m < 0`");
assert!(n > 0.0, "FisherF::new called with `n < 0`");
FisherF {
numer: ChiSquared::new(m),
denom: ChiSquared::new(n),
dof_ratio: n / m
}
}
}
impl Sample<f64> for FisherF {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for FisherF {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
self.numer.ind_sample(rng) / self.denom.ind_sample(rng) * self.dof_ratio
}
}
/// The Student t distribution, `t(nu)`, where `nu` is the degrees of
/// freedom.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{StudentT, IndependentSample};
///
/// let t = StudentT::new(11.0);
/// let v = t.ind_sample(&mut rand::task_rng());
/// println!("{} is from a t(11) distribution", v)
/// ```
pub struct StudentT {
chi: ChiSquared,
dof: f64
}
impl StudentT {
/// Create a new Student t distribution with `n` degrees of
/// freedom. Panics if `n <= 0`.
pub fn new(n: f64) -> StudentT {
assert!(n > 0.0, "StudentT::new called with `n <= 0`");
StudentT {
chi: ChiSquared::new(n),
dof: n
}
}
}
impl Sample<f64> for StudentT {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for StudentT {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * (self.dof / self.chi.ind_sample(rng)).sqrt()
}
}
#[cfg(test)]
mod test {
use std::prelude::*;
use distributions::{Sample, IndependentSample};
use super::{ChiSquared, StudentT, FisherF};
#[test]
fn test_chi_squared_one() {
let mut chi = ChiSquared::new(1.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
fn test_chi_squared_small() {
let mut chi = ChiSquared::new(0.5);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
fn test_chi_squared_large() {
let mut chi = ChiSquared::new(30.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
chi.sample(&mut rng);
chi.ind_sample(&mut rng);
}
}
#[test]
#[should_fail]
fn test_chi_squared_invalid_dof() {
ChiSquared::new(-1.0);
}
#[test]
fn test_f() {
let mut f = FisherF::new(2.0, 32.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
f.sample(&mut rng);
f.ind_sample(&mut rng);
}
}
#[test]
fn test_t() {
let mut t = StudentT::new(11.0);
let mut rng = ::test::rng();
for _ in range(0u, 1000) {
t.sample(&mut rng);
t.ind_sample(&mut rng);
}
}
}
#[cfg(test)]
mod bench {
extern crate test;
use std::prelude::*;
use self::test::Bencher;
use std::mem::size_of;
use distributions::IndependentSample;
use super::Gamma;
#[bench]
fn bench_gamma_large_shape(b: &mut Bencher) {
let gamma = Gamma::new(10., 1.0);
let mut rng = ::test::weak_rng();
b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) {
gamma.ind_sample(&mut rng);
}
});
b.bytes = size_of::<f64>() as u64 * ::RAND_BENCH_N;
}
#[bench]
fn bench_gamma_small_shape(b: &mut Bencher) {
let ga | mma = Gamma::new(0.1, 1.0);
let mut rng = ::test::weak_rng();
b.iter(|| {
for _ in range(0, ::RAND_BENCH_N) {
gamma.ind_sample(&mut rng);
}
});
b.bytes = size_of::<f64>() as u64 * ::RAND_BENCH_N;
}
}
| identifier_body |
|
gamma.rs | /// for Generating Gamma Variables" *ACM Trans. Math. Softw.* 26, 3
/// (September 2000),
/// 363-372. DOI:[10.1145/358407.358414](http://doi.acm.org/10.1145/358407.358414)
pub struct Gamma {
repr: GammaRepr,
}
enum GammaRepr {
Large(GammaLargeShape),
One(Exp),
Small(GammaSmallShape)
}
// These two helpers could be made public, but saving the
// match-on-Gamma-enum branch from using them directly (e.g. if one
// knows that the shape is always > 1) doesn't appear to be much
// faster.
/// Gamma distribution where the shape parameter is less than 1.
///
/// Note, samples from this require a compulsory floating-point `pow`
/// call, which makes it significantly slower than sampling from a
/// gamma distribution where the shape parameter is greater than or
/// equal to 1.
///
/// See `Gamma` for sampling from a Gamma distribution with general
/// shape parameters.
struct GammaSmallShape {
inv_shape: f64,
large_shape: GammaLargeShape
}
/// Gamma distribution where the shape parameter is larger than 1.
///
/// See `Gamma` for sampling from a Gamma distribution with general
/// shape parameters.
struct GammaLargeShape {
scale: f64,
c: f64,
d: f64
}
impl Gamma {
/// Construct an object representing the `Gamma(shape, scale)`
/// distribution.
///
/// Panics if `shape <= 0` or `scale <= 0`.
pub fn new(shape: f64, scale: f64) -> Gamma {
assert!(shape > 0.0, "Gamma::new called with shape <= 0");
assert!(scale > 0.0, "Gamma::new called with scale <= 0");
let repr = match shape {
1.0 => One(Exp::new(1.0 / scale)),
0.0 ... 1.0 => Small(GammaSmallShape::new_raw(shape, scale)),
_ => Large(GammaLargeShape::new_raw(shape, scale))
};
Gamma { repr: repr }
}
}
impl GammaSmallShape {
fn new_raw(shape: f64, scale: f64) -> GammaSmallShape {
GammaSmallShape {
inv_shape: 1. / shape,
large_shape: GammaLargeShape::new_raw(shape + 1.0, scale)
}
}
}
impl GammaLargeShape {
fn new_raw(shape: f64, scale: f64) -> GammaLargeShape {
let d = shape - 1. / 3.;
GammaLargeShape {
scale: scale,
c: 1. / (9. * d).sqrt(),
d: d
}
}
}
impl Sample<f64> for Gamma {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaSmallShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl Sample<f64> for GammaLargeShape {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for Gamma {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
Small(ref g) => g.ind_sample(rng),
One(ref g) => g.ind_sample(rng),
Large(ref g) => g.ind_sample(rng),
}
}
}
impl IndependentSample<f64> for GammaSmallShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
let Open01(u) = rng.gen::<Open01<f64>>();
self.large_shape.ind_sample(rng) * u.powf(self.inv_shape)
}
}
impl IndependentSample<f64> for GammaLargeShape {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
loop {
let StandardNormal(x) = rng.gen::<StandardNormal>();
let v_cbrt = 1.0 + self.c * x;
if v_cbrt <= 0.0 { // a | let v = v_cbrt * v_cbrt * v_cbrt;
let Open01(u) = rng.gen::<Open01<f64>>();
let x_sqr = x * x;
if u < 1.0 - 0.0331 * x_sqr * x_sqr ||
u.ln() < 0.5 * x_sqr + self.d * (1.0 - v + v.ln()) {
return self.d * v * self.scale
}
}
}
}
/// The chi-squared distribution `χ²(k)`, where `k` is the degrees of
/// freedom.
///
/// For `k > 0` integral, this distribution is the sum of the squares
/// of `k` independent standard normal random variables. For other
/// `k`, this uses the equivalent characterisation `χ²(k) = Gamma(k/2,
/// 2)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{ChiSquared, IndependentSample};
///
/// let chi = ChiSquared::new(11.0);
/// let v = chi.ind_sample(&mut rand::task_rng());
/// println!("{} is from a χ²(11) distribution", v)
/// ```
pub struct ChiSquared {
repr: ChiSquaredRepr,
}
enum ChiSquaredRepr {
// k == 1, Gamma(alpha, ..) is particularly slow for alpha < 1,
// e.g. when alpha = 1/2 as it would be for this case, so special-
// casing and using the definition of N(0,1)^2 is faster.
DoFExactlyOne,
DoFAnythingElse(Gamma),
}
impl ChiSquared {
/// Create a new chi-squared distribution with degrees-of-freedom
/// `k`. Panics if `k < 0`.
pub fn new(k: f64) -> ChiSquared {
let repr = if k == 1.0 {
DoFExactlyOne
} else {
assert!(k > 0.0, "ChiSquared::new called with `k` < 0");
DoFAnythingElse(Gamma::new(0.5 * k, 2.0))
};
ChiSquared { repr: repr }
}
}
impl Sample<f64> for ChiSquared {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample(rng) }
}
impl IndependentSample<f64> for ChiSquared {
fn ind_sample<R: Rng>(&self, rng: &mut R) -> f64 {
match self.repr {
DoFExactlyOne => {
// k == 1 => N(0,1)^2
let StandardNormal(norm) = rng.gen::<StandardNormal>();
norm * norm
}
DoFAnythingElse(ref g) => g.ind_sample(rng)
}
}
}
/// The Fisher F distribution `F(m, n)`.
///
/// This distribution is equivalent to the ratio of two normalised
/// chi-squared distributions, that is, `F(m,n) = (χ²(m)/m) /
/// (χ²(n)/n)`.
///
/// # Example
///
/// ```rust
/// use std::rand;
/// use std::rand::distributions::{FisherF, IndependentSample};
///
/// let f = FisherF::new(2.0, 32.0);
/// let v = f.ind_sample(&mut rand::task_rng());
/// println!("{} is from an F(2, 32) distribution", v)
/// ```
pub struct FisherF {
numer: ChiSquared,
denom: ChiSquared,
// denom_dof / numer_dof so that this can just be a straight
// multiplication, rather than a division.
dof_ratio: f64,
}
impl FisherF {
/// Create a new `FisherF` distribution, with the given
/// parameter. Panics if either `m` or `n` are not positive.
pub fn new(m: f64, n: f64) -> FisherF {
assert!(m > 0.0, "FisherF::new called with `m < 0`");
assert!(n > 0.0, "FisherF::new called with `n < 0`");
FisherF {
numer: ChiSquared::new(m),
denom: ChiSquared::new(n),
dof_ratio: n / m
}
}
}
impl Sample<f64> for FisherF {
fn sample<R: Rng>(&mut self, rng: &mut R) -> f64 { self.ind_sample | ^3 <= 0 iff a <= 0
continue
}
| conditional_block |
dom_element_schema_registry_spec.ts | import {
beforeEach,
ddescribe,
xdescribe,
describe,
expect,
iit,
inject,
it,
xit
} from 'angular2/testing_internal';
import {IS_DART} from 'angular2/src/facade/lang';
import {DomElementSchemaRegistry} from 'angular2/src/compiler/schema/dom_element_schema_registry';
export function main() | it('should re-map property names that are specified in DOM facade',
() => { expect(registry.getMappedPropName('readonly')).toEqual('readOnly'); });
it('should not re-map property names that are not specified in DOM facade', () => {
expect(registry.getMappedPropName('title')).toEqual('title');
expect(registry.getMappedPropName('exotic-unknown')).toEqual('exotic-unknown');
});
it('should detect properties on namespaced elements',
() => { expect(registry.hasProperty('@svg:g', 'id')).toBeTruthy(); });
});
}
| {
// DOMElementSchema can only be used on the JS side where we can safely
// use reflection for DOM elements
if (IS_DART) return;
var registry: DomElementSchemaRegistry;
beforeEach(() => { registry = new DomElementSchemaRegistry(); });
describe('DOMElementSchema', () => {
it('should detect properties on regular elements', () => {
expect(registry.hasProperty('div', 'id')).toBeTruthy();
expect(registry.hasProperty('div', 'title')).toBeTruthy();
expect(registry.hasProperty('div', 'unknown')).toBeFalsy();
});
it('should return true for custom-like elements',
() => { expect(registry.hasProperty('custom-like', 'unknown')).toBeTruthy(); });
| identifier_body |
dom_element_schema_registry_spec.ts | import {
beforeEach,
ddescribe,
xdescribe,
describe,
expect,
iit,
inject,
it,
xit
} from 'angular2/testing_internal';
import {IS_DART} from 'angular2/src/facade/lang';
import {DomElementSchemaRegistry} from 'angular2/src/compiler/schema/dom_element_schema_registry';
export function | () {
// DOMElementSchema can only be used on the JS side where we can safely
// use reflection for DOM elements
if (IS_DART) return;
var registry: DomElementSchemaRegistry;
beforeEach(() => { registry = new DomElementSchemaRegistry(); });
describe('DOMElementSchema', () => {
it('should detect properties on regular elements', () => {
expect(registry.hasProperty('div', 'id')).toBeTruthy();
expect(registry.hasProperty('div', 'title')).toBeTruthy();
expect(registry.hasProperty('div', 'unknown')).toBeFalsy();
});
it('should return true for custom-like elements',
() => { expect(registry.hasProperty('custom-like', 'unknown')).toBeTruthy(); });
it('should re-map property names that are specified in DOM facade',
() => { expect(registry.getMappedPropName('readonly')).toEqual('readOnly'); });
it('should not re-map property names that are not specified in DOM facade', () => {
expect(registry.getMappedPropName('title')).toEqual('title');
expect(registry.getMappedPropName('exotic-unknown')).toEqual('exotic-unknown');
});
it('should detect properties on namespaced elements',
() => { expect(registry.hasProperty('@svg:g', 'id')).toBeTruthy(); });
});
}
| main | identifier_name |
dom_element_schema_registry_spec.ts | import {
beforeEach,
ddescribe,
xdescribe,
describe,
expect,
iit,
inject,
it,
xit
} from 'angular2/testing_internal';
import {IS_DART} from 'angular2/src/facade/lang';
import {DomElementSchemaRegistry} from 'angular2/src/compiler/schema/dom_element_schema_registry';
export function main() {
// DOMElementSchema can only be used on the JS side where we can safely
// use reflection for DOM elements
if (IS_DART) return;
var registry: DomElementSchemaRegistry;
beforeEach(() => { registry = new DomElementSchemaRegistry(); });
describe('DOMElementSchema', () => {
it('should detect properties on regular elements', () => {
expect(registry.hasProperty('div', 'id')).toBeTruthy();
expect(registry.hasProperty('div', 'title')).toBeTruthy();
expect(registry.hasProperty('div', 'unknown')).toBeFalsy();
});
it('should return true for custom-like elements',
() => { expect(registry.hasProperty('custom-like', 'unknown')).toBeTruthy(); });
it('should re-map property names that are specified in DOM facade',
() => { expect(registry.getMappedPropName('readonly')).toEqual('readOnly'); });
|
it('should detect properties on namespaced elements',
() => { expect(registry.hasProperty('@svg:g', 'id')).toBeTruthy(); });
});
} | it('should not re-map property names that are not specified in DOM facade', () => {
expect(registry.getMappedPropName('title')).toEqual('title');
expect(registry.getMappedPropName('exotic-unknown')).toEqual('exotic-unknown');
}); | random_line_split |
hello.ctrl.ts | import { Sample } from './../services/serviceClient';
import { GlobalConfig } from '../index.config';
export class HelloController {
public selectedItem: string;
public searchText: string;
public foodInfo: Sample.FoodInfo;
public weights: Sample.FoodWeight[]; | Description: '100 g',
Amount: 1,
GramWeight: 100
};
/* @ngInject */
constructor(private ServiceClient: Sample.ServiceClient) {
this.weights = [this.defaultWeight];
this.selectedWeight = this.defaultWeight;
}
public getFactor() {
if (!this.amount || !this.selectedWeight) {
return 1;
}
return this.amount * this.selectedWeight.GramWeight / 100;
}
public selectedItemChange(item: Sample.Food) {
if (!item) {
this.foodInfo = null;
return;
}
this.ServiceClient.getFoodIdById(item.Id)
.then(x => this.foodInfo = x.data);
this.ServiceClient.getFoodByFoodIdWeight(item.Id)
.then(x => this.weights = [this.defaultWeight].concat(x.data));
}
public querySearch(searchText: string) {
return this.ServiceClient.getFoodSearchBySearchText(searchText)
.then(x => { return x.data; });
}
}
export class HelloComponent implements ng.IComponentOptions {
public bindings: any;
public controller: any;
public templateUrl: string;
public controllerAs: string;
/** @ngInject */
constructor() {
this.bindings = {};
this.controller = HelloController;
this.templateUrl = 'app/hello/hello.html';
this.controllerAs = 'vm';
}
} | public selectedWeight: Sample.FoodWeight;
public amount: number;
private defaultWeight: Sample.FoodWeight = { | random_line_split |
hello.ctrl.ts | import { Sample } from './../services/serviceClient';
import { GlobalConfig } from '../index.config';
export class HelloController {
public selectedItem: string;
public searchText: string;
public foodInfo: Sample.FoodInfo;
public weights: Sample.FoodWeight[];
public selectedWeight: Sample.FoodWeight;
public amount: number;
private defaultWeight: Sample.FoodWeight = {
Description: '100 g',
Amount: 1,
GramWeight: 100
};
/* @ngInject */
constructor(private ServiceClient: Sample.ServiceClient) {
this.weights = [this.defaultWeight];
this.selectedWeight = this.defaultWeight;
}
public getFactor() {
if (!this.amount || !this.selectedWeight) {
return 1;
}
return this.amount * this.selectedWeight.GramWeight / 100;
}
public selectedItemChange(item: Sample.Food) {
if (!item) {
this.foodInfo = null;
return;
}
this.ServiceClient.getFoodIdById(item.Id)
.then(x => this.foodInfo = x.data);
this.ServiceClient.getFoodByFoodIdWeight(item.Id)
.then(x => this.weights = [this.defaultWeight].concat(x.data));
}
public querySearch(searchText: string) {
return this.ServiceClient.getFoodSearchBySearchText(searchText)
.then(x => { return x.data; });
}
}
export class HelloComponent implements ng.IComponentOptions {
public bindings: any;
public controller: any;
public templateUrl: string;
public controllerAs: string;
/** @ngInject */
constructor() |
} | {
this.bindings = {};
this.controller = HelloController;
this.templateUrl = 'app/hello/hello.html';
this.controllerAs = 'vm';
} | identifier_body |
hello.ctrl.ts | import { Sample } from './../services/serviceClient';
import { GlobalConfig } from '../index.config';
export class HelloController {
public selectedItem: string;
public searchText: string;
public foodInfo: Sample.FoodInfo;
public weights: Sample.FoodWeight[];
public selectedWeight: Sample.FoodWeight;
public amount: number;
private defaultWeight: Sample.FoodWeight = {
Description: '100 g',
Amount: 1,
GramWeight: 100
};
/* @ngInject */
constructor(private ServiceClient: Sample.ServiceClient) {
this.weights = [this.defaultWeight];
this.selectedWeight = this.defaultWeight;
}
public getFactor() {
if (!this.amount || !this.selectedWeight) {
return 1;
}
return this.amount * this.selectedWeight.GramWeight / 100;
}
public selectedItemChange(item: Sample.Food) {
if (!item) {
this.foodInfo = null;
return;
}
this.ServiceClient.getFoodIdById(item.Id)
.then(x => this.foodInfo = x.data);
this.ServiceClient.getFoodByFoodIdWeight(item.Id)
.then(x => this.weights = [this.defaultWeight].concat(x.data));
}
public querySearch(searchText: string) {
return this.ServiceClient.getFoodSearchBySearchText(searchText)
.then(x => { return x.data; });
}
}
export class HelloComponent implements ng.IComponentOptions {
public bindings: any;
public controller: any;
public templateUrl: string;
public controllerAs: string;
/** @ngInject */
| () {
this.bindings = {};
this.controller = HelloController;
this.templateUrl = 'app/hello/hello.html';
this.controllerAs = 'vm';
}
} | constructor | identifier_name |
hello.ctrl.ts | import { Sample } from './../services/serviceClient';
import { GlobalConfig } from '../index.config';
export class HelloController {
public selectedItem: string;
public searchText: string;
public foodInfo: Sample.FoodInfo;
public weights: Sample.FoodWeight[];
public selectedWeight: Sample.FoodWeight;
public amount: number;
private defaultWeight: Sample.FoodWeight = {
Description: '100 g',
Amount: 1,
GramWeight: 100
};
/* @ngInject */
constructor(private ServiceClient: Sample.ServiceClient) {
this.weights = [this.defaultWeight];
this.selectedWeight = this.defaultWeight;
}
public getFactor() {
if (!this.amount || !this.selectedWeight) |
return this.amount * this.selectedWeight.GramWeight / 100;
}
public selectedItemChange(item: Sample.Food) {
if (!item) {
this.foodInfo = null;
return;
}
this.ServiceClient.getFoodIdById(item.Id)
.then(x => this.foodInfo = x.data);
this.ServiceClient.getFoodByFoodIdWeight(item.Id)
.then(x => this.weights = [this.defaultWeight].concat(x.data));
}
public querySearch(searchText: string) {
return this.ServiceClient.getFoodSearchBySearchText(searchText)
.then(x => { return x.data; });
}
}
export class HelloComponent implements ng.IComponentOptions {
public bindings: any;
public controller: any;
public templateUrl: string;
public controllerAs: string;
/** @ngInject */
constructor() {
this.bindings = {};
this.controller = HelloController;
this.templateUrl = 'app/hello/hello.html';
this.controllerAs = 'vm';
}
} | {
return 1;
} | conditional_block |
mod.rs | // This file is part of Rubik.
// Copyright Peter Beard, licensed under the GPLv3. See LICENSE for details.
//
//! Algorithms for solving Rubik's cubes
use super::cube::{Cube, Move};
/// Trait for things that can solve Rubik's cubes
pub trait Solver {
/// Calculate a sequence of moves that puts the cube in the solved state
fn find_solution(&mut self, cube: &Cube) -> Vec<Move>;
}
/// Solver that doesn't do anything
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::{Solver, NullSolver};
///
/// let mut c = Cube::new();
/// let mut ns = NullSolver::new();
///
/// assert_eq!(c.solve(&mut ns), vec![]);
/// ```
pub struct NullSolver;
impl NullSolver {
pub fn new() -> NullSolver {
NullSolver
}
}
impl Solver for NullSolver {
fn find_solution(&mut self, _: &Cube) -> Vec<Move> |
}
/// Solver that uses a simple iterative deepening algorithm
///
/// This algorithm is very slow and probably won't halt in a reasonable time for
/// most cubes
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::IDSolver;
///
/// let mut c = Cube::new();
/// let mut ids = IDSolver::new();
///
/// c.apply_moves("F'U'D'");
/// println!("{:?}", c.solve(&mut ids));
///
/// assert!(c.is_solved());
/// ```
pub struct IDSolver {
max_depth: u8,
}
impl IDSolver {
/// Create a new solver with the default maximum depth of 26
/// (all cubes are solveable in at most 26 moves)
pub fn new() -> IDSolver {
IDSolver {
max_depth: 26u8,
}
}
/// Create a solver with the given maximum depth (max number of moves)
pub fn with_max_depth(d: u8) -> IDSolver {
IDSolver {
max_depth: d,
}
}
}
impl Solver for IDSolver {
fn find_solution(&mut self, cube: &Cube) -> Vec<Move> {
let mut current_solution: Option<Vec<Move>> = None;
let mut current_depth = 1;
// A solved cube requires zero moves to solve
if !cube.is_solved() {
// Look until we find a solution or run out of moves
while current_depth <= self.max_depth && current_solution.is_none() {
current_solution = dbsearch(cube, current_depth);
current_depth += 1;
}
}
// Return no moves if there's no solution within the max depth
if let Some(s) = current_solution {
s
} else {
vec![]
}
}
}
/// Depth-bounded search for a solution
fn dbsearch(start: &Cube, maxdepth: u8) -> Option<Vec<Move>> {
// Zero means we're at the max depth
if maxdepth == 0 {
return None;
}
let possible_moves = [
Move::F,
Move::R,
Move::U,
Move::B,
Move::L,
Move::D,
Move::FPrime,
Move::RPrime,
Move::UPrime,
Move::BPrime,
Move::LPrime,
Move::DPrime,
];
let mut moves = Vec::new();
// Try every possible move and see where we get
for &m in &possible_moves {
let mut s = start.clone();
s.apply_move(m);
moves.push(m);
if s.is_solved() {
break;
}
if let Some(ms) = dbsearch(&s, maxdepth - 1) {
moves.append(&mut ms.clone());
break;
} else {
moves.pop();
}
}
if moves.len() > 0 {
Some(moves)
} else {
None
}
}
| {
vec![]
} | identifier_body |
mod.rs | // This file is part of Rubik.
// Copyright Peter Beard, licensed under the GPLv3. See LICENSE for details.
//
//! Algorithms for solving Rubik's cubes
use super::cube::{Cube, Move};
/// Trait for things that can solve Rubik's cubes
pub trait Solver {
/// Calculate a sequence of moves that puts the cube in the solved state
fn find_solution(&mut self, cube: &Cube) -> Vec<Move>;
}
/// Solver that doesn't do anything
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::{Solver, NullSolver};
///
/// let mut c = Cube::new();
/// let mut ns = NullSolver::new();
///
/// assert_eq!(c.solve(&mut ns), vec![]);
/// ```
pub struct NullSolver;
impl NullSolver {
pub fn new() -> NullSolver {
NullSolver
}
}
impl Solver for NullSolver {
fn find_solution(&mut self, _: &Cube) -> Vec<Move> {
vec![]
}
}
/// Solver that uses a simple iterative deepening algorithm
///
/// This algorithm is very slow and probably won't halt in a reasonable time for
/// most cubes
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::IDSolver;
///
/// let mut c = Cube::new();
/// let mut ids = IDSolver::new();
///
/// c.apply_moves("F'U'D'");
/// println!("{:?}", c.solve(&mut ids));
///
/// assert!(c.is_solved());
/// ```
pub struct IDSolver {
max_depth: u8,
}
impl IDSolver {
/// Create a new solver with the default maximum depth of 26
/// (all cubes are solveable in at most 26 moves)
pub fn new() -> IDSolver {
IDSolver {
max_depth: 26u8,
}
}
/// Create a solver with the given maximum depth (max number of moves)
pub fn with_max_depth(d: u8) -> IDSolver {
IDSolver {
max_depth: d,
}
}
}
impl Solver for IDSolver {
fn find_solution(&mut self, cube: &Cube) -> Vec<Move> {
let mut current_solution: Option<Vec<Move>> = None;
let mut current_depth = 1;
// A solved cube requires zero moves to solve
if !cube.is_solved() {
// Look until we find a solution or run out of moves
while current_depth <= self.max_depth && current_solution.is_none() {
current_solution = dbsearch(cube, current_depth);
current_depth += 1;
}
}
// Return no moves if there's no solution within the max depth
if let Some(s) = current_solution {
s
} else {
vec![]
}
}
}
/// Depth-bounded search for a solution
fn dbsearch(start: &Cube, maxdepth: u8) -> Option<Vec<Move>> {
// Zero means we're at the max depth
if maxdepth == 0 {
return None;
}
let possible_moves = [
Move::F,
Move::R,
Move::U,
Move::B,
Move::L,
Move::D,
Move::FPrime,
Move::RPrime,
Move::UPrime,
Move::BPrime,
Move::LPrime,
Move::DPrime,
];
let mut moves = Vec::new();
// Try every possible move and see where we get
for &m in &possible_moves {
let mut s = start.clone();
s.apply_move(m);
moves.push(m);
if s.is_solved() |
if let Some(ms) = dbsearch(&s, maxdepth - 1) {
moves.append(&mut ms.clone());
break;
} else {
moves.pop();
}
}
if moves.len() > 0 {
Some(moves)
} else {
None
}
}
| {
break;
} | conditional_block |
mod.rs | // This file is part of Rubik.
// Copyright Peter Beard, licensed under the GPLv3. See LICENSE for details.
//
//! Algorithms for solving Rubik's cubes
use super::cube::{Cube, Move};
/// Trait for things that can solve Rubik's cubes
pub trait Solver {
/// Calculate a sequence of moves that puts the cube in the solved state
fn find_solution(&mut self, cube: &Cube) -> Vec<Move>;
}
/// Solver that doesn't do anything
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::{Solver, NullSolver};
///
/// let mut c = Cube::new();
/// let mut ns = NullSolver::new();
///
/// assert_eq!(c.solve(&mut ns), vec![]);
/// ```
pub struct NullSolver;
impl NullSolver {
pub fn new() -> NullSolver {
NullSolver
}
}
impl Solver for NullSolver {
fn find_solution(&mut self, _: &Cube) -> Vec<Move> {
vec![]
}
}
/// Solver that uses a simple iterative deepening algorithm
///
/// This algorithm is very slow and probably won't halt in a reasonable time for
/// most cubes
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::IDSolver;
///
/// let mut c = Cube::new();
/// let mut ids = IDSolver::new();
///
/// c.apply_moves("F'U'D'");
/// println!("{:?}", c.solve(&mut ids));
/// |
impl IDSolver {
/// Create a new solver with the default maximum depth of 26
/// (all cubes are solveable in at most 26 moves)
pub fn new() -> IDSolver {
IDSolver {
max_depth: 26u8,
}
}
/// Create a solver with the given maximum depth (max number of moves)
pub fn with_max_depth(d: u8) -> IDSolver {
IDSolver {
max_depth: d,
}
}
}
impl Solver for IDSolver {
fn find_solution(&mut self, cube: &Cube) -> Vec<Move> {
let mut current_solution: Option<Vec<Move>> = None;
let mut current_depth = 1;
// A solved cube requires zero moves to solve
if !cube.is_solved() {
// Look until we find a solution or run out of moves
while current_depth <= self.max_depth && current_solution.is_none() {
current_solution = dbsearch(cube, current_depth);
current_depth += 1;
}
}
// Return no moves if there's no solution within the max depth
if let Some(s) = current_solution {
s
} else {
vec![]
}
}
}
/// Depth-bounded search for a solution
fn dbsearch(start: &Cube, maxdepth: u8) -> Option<Vec<Move>> {
// Zero means we're at the max depth
if maxdepth == 0 {
return None;
}
let possible_moves = [
Move::F,
Move::R,
Move::U,
Move::B,
Move::L,
Move::D,
Move::FPrime,
Move::RPrime,
Move::UPrime,
Move::BPrime,
Move::LPrime,
Move::DPrime,
];
let mut moves = Vec::new();
// Try every possible move and see where we get
for &m in &possible_moves {
let mut s = start.clone();
s.apply_move(m);
moves.push(m);
if s.is_solved() {
break;
}
if let Some(ms) = dbsearch(&s, maxdepth - 1) {
moves.append(&mut ms.clone());
break;
} else {
moves.pop();
}
}
if moves.len() > 0 {
Some(moves)
} else {
None
}
} | /// assert!(c.is_solved());
/// ```
pub struct IDSolver {
max_depth: u8,
} | random_line_split |
mod.rs | // This file is part of Rubik.
// Copyright Peter Beard, licensed under the GPLv3. See LICENSE for details.
//
//! Algorithms for solving Rubik's cubes
use super::cube::{Cube, Move};
/// Trait for things that can solve Rubik's cubes
pub trait Solver {
/// Calculate a sequence of moves that puts the cube in the solved state
fn find_solution(&mut self, cube: &Cube) -> Vec<Move>;
}
/// Solver that doesn't do anything
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::{Solver, NullSolver};
///
/// let mut c = Cube::new();
/// let mut ns = NullSolver::new();
///
/// assert_eq!(c.solve(&mut ns), vec![]);
/// ```
pub struct NullSolver;
impl NullSolver {
pub fn | () -> NullSolver {
NullSolver
}
}
impl Solver for NullSolver {
fn find_solution(&mut self, _: &Cube) -> Vec<Move> {
vec![]
}
}
/// Solver that uses a simple iterative deepening algorithm
///
/// This algorithm is very slow and probably won't halt in a reasonable time for
/// most cubes
///
/// # Example
/// ```
/// use rubik::cube::Cube;
/// use rubik::solver::IDSolver;
///
/// let mut c = Cube::new();
/// let mut ids = IDSolver::new();
///
/// c.apply_moves("F'U'D'");
/// println!("{:?}", c.solve(&mut ids));
///
/// assert!(c.is_solved());
/// ```
pub struct IDSolver {
max_depth: u8,
}
impl IDSolver {
/// Create a new solver with the default maximum depth of 26
/// (all cubes are solveable in at most 26 moves)
pub fn new() -> IDSolver {
IDSolver {
max_depth: 26u8,
}
}
/// Create a solver with the given maximum depth (max number of moves)
pub fn with_max_depth(d: u8) -> IDSolver {
IDSolver {
max_depth: d,
}
}
}
impl Solver for IDSolver {
fn find_solution(&mut self, cube: &Cube) -> Vec<Move> {
let mut current_solution: Option<Vec<Move>> = None;
let mut current_depth = 1;
// A solved cube requires zero moves to solve
if !cube.is_solved() {
// Look until we find a solution or run out of moves
while current_depth <= self.max_depth && current_solution.is_none() {
current_solution = dbsearch(cube, current_depth);
current_depth += 1;
}
}
// Return no moves if there's no solution within the max depth
if let Some(s) = current_solution {
s
} else {
vec![]
}
}
}
/// Depth-bounded search for a solution
fn dbsearch(start: &Cube, maxdepth: u8) -> Option<Vec<Move>> {
// Zero means we're at the max depth
if maxdepth == 0 {
return None;
}
let possible_moves = [
Move::F,
Move::R,
Move::U,
Move::B,
Move::L,
Move::D,
Move::FPrime,
Move::RPrime,
Move::UPrime,
Move::BPrime,
Move::LPrime,
Move::DPrime,
];
let mut moves = Vec::new();
// Try every possible move and see where we get
for &m in &possible_moves {
let mut s = start.clone();
s.apply_move(m);
moves.push(m);
if s.is_solved() {
break;
}
if let Some(ms) = dbsearch(&s, maxdepth - 1) {
moves.append(&mut ms.clone());
break;
} else {
moves.pop();
}
}
if moves.len() > 0 {
Some(moves)
} else {
None
}
}
| new | identifier_name |
config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('APP_SECRET_KEY', '')
# db config
DB_PORT = os.getenv('DB_PORT', '')
DB_HOST = os.getenv('DB_HOST', '')
DB_ROLE = os.getenv('DB_ROLE', '')
# TODO: abstract auth stuff to kubernetes manifests
DB_PASSWORD = os.getenv('DB_PASSWORD', '')
DB_NAME = os.getenv('DB_NAME', '')
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
DB_ROLE, DB_PASSWORD, DB_HOST, str(DB_PORT), DB_NAME)
class | (Config):
DEBUG = False
class StagingConfig(Config):
DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True
| ProductionConfig | identifier_name |
config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('APP_SECRET_KEY', '')
# db config
DB_PORT = os.getenv('DB_PORT', '')
DB_HOST = os.getenv('DB_HOST', '')
DB_ROLE = os.getenv('DB_ROLE', '')
# TODO: abstract auth stuff to kubernetes manifests
DB_PASSWORD = os.getenv('DB_PASSWORD', '')
DB_NAME = os.getenv('DB_NAME', '')
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(
DB_ROLE, DB_PASSWORD, DB_HOST, str(DB_PORT), DB_NAME)
class ProductionConfig(Config):
DEBUG = False
| DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True |
class StagingConfig(Config): | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.