commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
4f6b1a4dae7701cc79a523e96fe812efaa54745b | Add optimizers tests | tests/auto/test_optimizers.py | tests/auto/test_optimizers.py | Python | 0 | @@ -0,0 +1,1918 @@
+from __future__ import print_function%0Aimport numpy as np%0Anp.random.seed(1337)%0A%0Afrom keras.utils.test_utils import get_test_data%0Afrom keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam%0Afrom keras.models import Sequential%0Afrom keras.layers.core import Dense, Activation%0Afrom keras.utils.np_utils import to_categorical%0Aimport unittest%0A%0A%0A(X_train, y_train), (X_test, y_test) = get_test_data(nb_train=1000, nb_test=200, input_shape=(10,),%0A classification=True, nb_class=2)%0Ay_train = to_categorical(y_train)%0Ay_test = to_categorical(y_test)%0A%0A%0Adef get_model(input_dim, nb_hidden, output_dim):%0A model = Sequential()%0A model.add(Dense(input_dim, nb_hidden))%0A model.add(Activation('relu'))%0A model.add(Dense(nb_hidden, output_dim))%0A model.add(Activation('softmax'))%0A return model%0A%0A%0Adef test_optimizer(optimizer, target=0.9):%0A model = get_model(X_train.shape%5B1%5D, 10, y_train.shape%5B1%5D)%0A model.compile(loss='categorical_crossentropy', optimizer=optimizer)%0A history = model.fit(X_train, y_train, nb_epoch=12, batch_size=16, validation_data=(X_test, y_test), show_accuracy=True, verbose=2)%0A return history.history%5B'val_acc'%5D%5B-1%5D %3E target%0A%0A%0Aclass TestOptimizers(unittest.TestCase):%0A def test_sgd(self):%0A print('test SGD')%0A sgd = SGD(lr=0.01, momentum=0.9, nesterov=True)%0A self.assertTrue(test_optimizer(sgd))%0A%0A def test_rmsprop(self):%0A print('test RMSprop')%0A self.assertTrue(test_optimizer(RMSprop()))%0A%0A def test_adagrad(self):%0A print('test Adagrad')%0A self.assertTrue(test_optimizer(Adagrad()))%0A%0A def test_adadelta(self):%0A print('test Adadelta')%0A self.assertTrue(test_optimizer(Adadelta()))%0A%0A def test_adam(self):%0A print('test Adam')%0A self.assertTrue(test_optimizer(Adam()))%0A%0Aif __name__ == '__main__':%0A print('Test optimizers')%0A unittest.main()%0A
|
|
751b596482cdb473b1a7f9172501e25d00f15724 | Use default loop on TCP benchmark | tests/benchmark-tcp.py | tests/benchmark-tcp.py |
import sys
sys.path.insert(0, '../')
import signal
import threading
import pyuv
RESPONSE = "HTTP/1.1 200 OK\r\n" \
"Content-Type: text/plain\r\n" \
"Content-Length: 12\r\n" \
"\r\n" \
"hello world\n"
def on_client_shutdown(client):
client.close()
clients.remove(client)
def on_read(client, data):
if data is None:
client.close()
clients.remove(client)
return
data = data.strip()
if not data:
return
client.write(RESPONSE)
client.shutdown(on_client_shutdown)
def on_connection(server):
client = server.accept()
clients.append(client)
client.start_read(on_read)
def async_exit(async, data):
[c.close() for c in clients]
async.close()
signal_h.close()
server.close()
def signal_cb(sig, frame):
async.send(async_exit)
print "PyUV version %s" % pyuv.__version__
loop = pyuv.Loop()
async = pyuv.Async(loop)
clients = []
server = pyuv.TCP(loop)
server.bind(("0.0.0.0", 1234))
server.listen(on_connection)
signal_h = pyuv.Signal(loop)
signal_h.start()
t = threading.Thread(target=loop.run)
t.start()
signal.signal(signal.SIGINT, signal_cb)
signal.pause()
t.join()
print "Stopped!"
| Python | 0 | @@ -914,16 +914,29 @@
yuv.Loop
+.default_loop
()%0A%0Aasyn
|
6dbd81fb4b59e7394318cbd0b0f0fdb31fcd6dd2 | Add unit test to ensure we don't diff bare repos | tests/unit/states/test_git.py | tests/unit/states/test_git.py | Python | 0 | @@ -0,0 +1,2786 @@
+# -*- coding: utf-8 -*-%0A'''%0A :codeauthor: Erik Johnson %[email protected]%3E%0A'''%0A%0A# Import Python libs%0Afrom __future__ import absolute_import%0Aimport logging%0Aimport os%0A%0A# Import Salt Testing Libs%0Afrom tests.support.helpers import with_tempdir%0Afrom tests.support.mixins import LoaderModuleMockMixin%0Afrom tests.support.unit import TestCase, skipIf%0Afrom tests.support.mock import (%0A Mock,%0A MagicMock,%0A patch,%0A DEFAULT,%0A NO_MOCK,%0A NO_MOCK_REASON,%0A)%0A%0A# Import Salt Libs%0Aimport salt.states.git as git_state # Don't potentially shadow GitPython%0A%0Alog = logging.getLogger(__name__)%0A%0A%0A@skipIf(NO_MOCK, NO_MOCK_REASON)%0Aclass GitTestCase(TestCase, LoaderModuleMockMixin):%0A '''%0A Test cases for salt.states.git%0A '''%0A def setup_loader_modules(self):%0A return %7B%0A git_state: %7B%0A '__env__': 'base',%0A '__opts__': %7B'test': False%7D,%0A '__salt__': %7B%7D,%0A %7D%0A %7D%0A%0A @with_tempdir()%0A def test_latest_no_diff_for_bare_repo(self, target):%0A '''%0A This test ensures that we don't attempt to diff when cloning a repo%0A using either bare=True or mirror=True.%0A '''%0A name = 'https://foo.com/bar/baz.git'%0A gitdir = os.path.join(target, 'refs')%0A isdir_mock = MagicMock(%0A side_effect=lambda path: DEFAULT if path != gitdir else True)%0A%0A branches = %5B'foo', 'bar', 'baz'%5D%0A tags = %5B'v1.1.0', 'v.1.1.1', 'v1.2.0'%5D%0A local_head = 'b9ef06ab6b7524eb7c27d740dbbd5109c6d75ee4'%0A remote_head = 'eef672c1ec9b8e613905dbcd22a4612e31162807'%0A%0A git_diff = Mock()%0A dunder_salt = %7B%0A 'git.current_branch': MagicMock(return_value=branches%5B0%5D),%0A 'git.diff': git_diff,%0A 'git.fetch': MagicMock(return_value=%7B%7D),%0A 'git.is_worktree': MagicMock(return_value=False),%0A 'git.list_branches': MagicMock(return_value=branches),%0A 'git.list_tags': MagicMock(return_value=tags),%0A 'git.remote_refs': MagicMock(return_value=%7B'HEAD': remote_head%7D),%0A 'git.remotes': MagicMock(return_value=%7B%0A 'origin': %7B'fetch': name, 'push': name%7D,%0A %7D),%0A 'git.rev_parse': MagicMock(side_effect=git_state.CommandExecutionError()),%0A 'git.revision': MagicMock(return_value=local_head),%0A 'git.version': MagicMock(return_value='1.8.3.1'),%0A %7D%0A with patch('os.path.isdir', isdir_mock), %5C%0A patch.dict(git_state.__salt__, dunder_salt):%0A result = git_state.latest(%0A name=name,%0A target=target,%0A mirror=True, # mirror=True implies bare=True%0A )%0A assert result%5B'result'%5D is True, result%0A git_diff.assert_not_called()%0A
|
|
469eedab89d22a1051e9d3f6f7f6c94ba946fb37 | Add server tests for JOIN. | irctest/server_tests/test_channel_operations.py | irctest/server_tests/test_channel_operations.py | Python | 0 | @@ -0,0 +1,1986 @@
+%22%22%22%0ASection 3.2 of RFC 2812%0A%3Chttps://tools.ietf.org/html/rfc2812#section-3.2%3E%0A%22%22%22%0A%0Afrom irctest import cases%0Afrom irctest.irc_utils.message_parser import Message%0A%0Aclass JoinTestCase(cases.BaseServerTestCase):%0A def testJoin(self):%0A %22%22%22%E2%80%9CIf a JOIN is successful, the user receives a JOIN message as%0A confirmation and is then sent the channel's topic (using RPL_TOPIC) and%0A the list of users who are on the channel (using RPL_NAMREPLY), which%0A MUST include the user joining.%E2%80%9D%0A -- %3Chttps://tools.ietf.org/html/rfc2812#section-3.2.1%3E%0A%0A %E2%80%9CIf a JOIN is successful, the user is then sent the channel's topic%0A (using RPL_TOPIC) and the list of users who are on the channel (using%0A RPL_NAMREPLY), which must include the user joining.%E2%80%9D%0A -- %3Chttps://tools.ietf.org/html/rfc1459#section-4.2.1%3E%0A %22%22%22%0A self.connectClient('foo')%0A self.sendLine(1, 'JOIN #chan')%0A m = self.getMessage(1)%0A self.assertMessageEqual(m, command='JOIN', params=%5B'#chan'%5D)%0A m = self.getMessage(1)%0A got_topic = False%0A if m.command in ('331', '332'): # RPL_NOTOPIC, RPL_TOPIC%0A got_topic = True%0A m = self.getMessage(1)%0A m = self.assertMessageEqual(m, command='353') # RPL_NAMREPLY%0A m = self.getMessage(1)%0A m = self.assertMessageEqual(m, command='366') # RPL_ENDOFNAMES%0A else:%0A m = self.assertMessageEqual(m, command='353') # RPL_NAMREPLY%0A m = self.getMessage(1)%0A m = self.assertMessageEqual(m, command='366') # RPL_ENDOFNAMES%0A m = self.getMessage(1)%0A self.assertIn(m.command, ('331', '332'), m) # RPL_NOTOPIC, RPL_TOPIC%0A def testJoinTwice(self):%0A self.connectClient('foo')%0A self.sendLine(1, 'JOIN #chan')%0A m = self.getMessage(1)%0A self.assertMessageEqual(m, command='JOIN', params=%5B'#chan'%5D)%0A self.sendLine(1, 'JOIN #chan')%0A # What should we do now?%0A
|
|
ede8282eed8c198fc728985515e886e5a67ba3e0 | To create appropriate Dir structure | MROCPdjangoForm/ocpipeline/createDirStruct.py | MROCPdjangoForm/ocpipeline/createDirStruct.py | Python | 0.998603 | @@ -0,0 +1,2294 @@
+import os%0Aimport argparse%0Afrom shutil import move, rmtree # For moving files%0A%0A'''%0AModule creates a directory structure as defined by a string userDefProjectDir & moves files in%0Atuple args to the userDefProjectDir%0A'''%0Adef createDirStruct(userDefProjectDir, uploadDirPath, endingDir, tempDirPath, moveFileNames):%0A '''%0A userDefProjectDir - the user defined project directory structure%0A uploadDirPath - the location of the files to be placed in userDefProjectDir%0A moveFileNames - tuple of file names in temporary location uploadDirPath%0A tempDirPath - temp directory holding files we are concerned with%0A projectName - the temp project name different from user def project name%0A endingDir - is the directory where the files in the temp location should be moved to%0A '''%0A %0A dataProds = %5B'derivatives/', 'rawdata/', 'graphs/', 'graphInvariants/'%5D%0A %0A for folder in dataProds: %0A if not os.path.exists(userDefProjectDir + folder):%0A os.makedirs(userDefProjectDir + folder)%0A else:%0A print %22%5Cn Folder does exist!%22%0A %0A ''' Move files to appropriate location ''' %0A uploadedFiles = %5B os.path.join(uploadDirPath, moveFileNames%5B0%5D), os.path.join(uploadDirPath, moveFileNames%5B1%5D)%0A ,os.path.join(uploadDirPath, moveFileNames%5B2%5D) %5D%0A %0A i = 0%0A for thefile in uploadedFiles:%0A if not os.path.exists(os.path.join(endingDir,moveFileNames%5Bi%5D)): # If its already there... leave it alone & use the old one%0A move(thefile, endingDir) # Where to save derivatives%0A else:%0A print '%5Cn File does exist!'%0A i += 1%0A %0A %0A ''' Delete project in temp folder'''%0A rmtree(uploadDirPath)%0A %0Adef main():%0A %0A parser = argparse.ArgumentParser(description='Create appropriate dir structure for project & move files that are in temp folder')%0A parser.add_argument('userDefProjectDir', action=%22store%22)%0A parser.add_argument('uploadDirPath', action=%22store%22)%0A parser.add_argument('endingDir', action=%22store%22)%0A parser.add_argument('tempDirPath', action=%22store%22)%0A parser.add_argument('moveFileNames', action=%22store%22)%0A %0A result = parser.parse_args()%0A %0A createDirStruct(result.dirName, result.zipOutName)%0A %0Aif __name__ == '__main__':%0A main()
|
|
21742da132aeb9b834b128f7a7d01b7a2173137a | Add a tcp_server which simulates graphite-relay | tcp_server.py | tcp_server.py | Python | 0.000005 | @@ -0,0 +1,1498 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A# vim:fenc=utf-8%0A#%0A%22%22%22%0AA very simple TCP server for simulating a graphite relay, copied-paste from%0APython documentation. Few things were adjusted to make pylint happy and print%0Aincoming data.%0A%22%22%22%0Aimport asyncio%0A%0A%0Aclass EchoServerClientProtocol(asyncio.Protocol):%0A %22%22%22%0A A TCP server%0A %22%22%22%0A def __init__(self):%0A self.peername = None%0A self.transport = None%0A%0A def connection_made(self, transport):%0A self.peername = transport.get_extra_info('peername')%0A print('Connection from %7B%7D'.format(self.peername))%0A self.transport = transport%0A%0A def data_received(self, data):%0A message = data.decode()%0A print(message)%0A%0A def connection_lost(self, exc):%0A print('client %7B%7D closed connection %7B%7D'.format(self.peername, exc))%0A%0A%0Adef main():%0A %22%22%22%0A main code%0A %22%22%22%0A loop = asyncio.get_event_loop()%0A # Each client connection will create a new protocol instance%0A coro = loop.create_server(EchoServerClientProtocol, '127.0.0.1', 39991)%0A server = loop.run_until_complete(coro)%0A%0A # Serve requests until Ctrl+C is pressed%0A print('Serving on %7B%7D'.format(server.sockets%5B0%5D.getsockname()))%0A try:%0A loop.run_forever()%0A except KeyboardInterrupt:%0A pass%0A%0A # Close the server%0A server.close()%0A loop.run_until_complete(server.wait_closed())%0A loop.close()%0A%0A# This is the standard boilerplate that calls the main() function.%0Aif __name__ == '__main__':%0A main()%0A
|
|
ef52b314eb5e15c34d8b034d7e6f7bdd727b6586 | Add sp500_extractor_v1 version that does not use BeautifulSoup. | Code/sp500_extractor_v1_no_bs.py | Code/sp500_extractor_v1_no_bs.py | Python | 0 | @@ -0,0 +1,1988 @@
+import csv%0Afrom lxml import html%0Aimport time%0Aimport requests%0A%0A%22%22%22%0AMake it work, make it right, make it fast%0A%0AExtract the tickers from the S&P 500 table on Wikipedia, process them into%0Aa list and save them into a CSV file.%0A%0A# Retrieve HTML from URL with requests%0Ahttp://docs.python-requests.org/en/master/user/quickstart/%0A%0A# HTML table structure%0Ahttp://www.w3schools.com/html/html_tables.asp%0A%0A# Python HTML scraping%0Ahttp://docs.python-guide.org/en/latest/scenarios/scrape/%0A%0A# HTML table parsing with xpath%0Ahttp://www.w3schools.com/xml/xpath_syntax.asp%0A%0A# Save to CSV%0Ahttp://gis.stackexchange.com/a/72476%0A%22%22%22%0A%0Aurl = 'https://en.wikipedia.org/wiki/List_of_S%2526P_500_companies'%0A%0Acsv_output = 'sp500_tickers.csv'%0A%0Astart_time = time.time()%0A%0A# Download the S&P 500 table from Wikipedia, creating a string of the raw HTML%0Araw_html = requests.get(url).content%0Ahtml_string = html.fromstring(raw_html)%0A%0Aticker_list = %5B%5D%0A%0A# Pull first HTML table out of the HTML string, then loop through each HTML row%0Afor html_row in html_string.xpath('//table%5B1%5D'):%0A # Pull each HTML row's code that starts with a %3Ctr%3E flag%0A for col in html_row.xpath('.//tr'):%0A # Create a list of text values from each column in this HTML row%0A table_row_list = %5Bitem.text_content() for item in col.xpath('.//td')%5D%0A # Only process table row lists that have values%0A if table_row_list:%0A # Tickers are in the first column in the row (first list element)%0A ticker = table_row_list%5B0%5D.strip()%0A # Append each row's ticker to the ticker list%0A ticker_list.append(ticker)%0A%0A# Alphabetize ticker list%0Aticker_list.sort()%0A%0Aprint(ticker_list)%0A%0A# Save the ticker list to a csv file%0Awith open(csv_output, 'w', newline='') as file:%0A writer = csv.writer(file)%0A for ticker in ticker_list:%0A writer.writerow(%5Bticker%5D)%0A%0Aend_time = time.time()%0Arun_time = round(end_time - start_time, 2)%0Aprint('Finished extracting the S&P 500 ticker list in %25s seconds' %25 run_time)%0A
|
|
d98eebda6b3b0e42ac7ca34c6a1dd6cc8b05d342 | add functions and refactor fibonacci | quickTour/function.py | quickTour/function.py | Python | 0.00001 | @@ -0,0 +1,195 @@
+def fibonacci(n):%0A a,b = 0,1%0A%0A if(n==a):%0A return a%0A if(n==b):%0A return b%0A %0A return fibonacci(n-1)+fibonacci(n-2)%0A %0A%0A%0A%0Afor n in range(0,10):%0A print(fibonacci(n))%0A
|
|
17096036326d62e7e25368ff1247e708fa077cb1 | Support __traceback_hide__ on stacks | raven/utils/stacks.py | raven/utils/stacks.py | """
raven.utils.stacks
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import inspect
import re
from raven.utils.encoding import transform
_coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
def get_lines_from_file(filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
# Traceback (most recent call last):
# File "/Users/dcramer/Development/django-sentry/sentry/client/handlers.py", line 31, in emit
# get_client().create_from_record(record, request=request)
# File "/Users/dcramer/Development/django-sentry/sentry/client/base.py", line 325, in create_from_record
# data['__sentry__']['frames'] = varmap(shorten, get_stack_info(stack))
# File "/Users/dcramer/Development/django-sentry/sentry/utils/stacks.py", line 112, in get_stack_info
# pre_context_lineno, pre_context, context_line, post_context = get_lines_from_file(filename, lineno, 7, loader, module_name)
# File "/Users/dcramer/Development/django-sentry/sentry/utils/stacks.py", line 24, in get_lines_from_file
# source = loader.get_source(module_name)
# File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/pkgutil.py", line 287, in get_source
# fullname = self._fix_name(fullname)
# File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/pkgutil.py", line 262, in _fix_name
# "module %s" % (self.fullname, fullname))
# ImportError: Loader for module cProfile cannot handle module __main__
source = None
if source is not None:
source = source.splitlines()
if source is None:
try:
f = open(filename)
try:
source = f.readlines()
finally:
f.close()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = _coding_re.search(line)
if match:
encoding = match.group(1)
break
source = [unicode(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = [line.strip('\n') for line in source[lower_bound:lineno]]
context_line = source[lineno].strip('\n')
post_context = [line.strip('\n') for line in source[lineno+1:upper_bound]]
return lower_bound, pre_context, context_line, post_context
def get_culprit(frames, include_paths=[], exclude_paths=[]):
# We iterate through each frame looking for a deterministic culprit
# When one is found, we mark it as last "best guess" (best_guess) and then
# check it against ``exclude_paths``. If it isnt listed, then we
# use this option. If nothing is found, we use the "best guess".
best_guess = None
culprit = None
for frame in frames:
try:
culprit = '.'.join([frame['module'], frame['function']])
except KeyError:
continue
if any((culprit.startswith(k) for k in include_paths)):
if not (best_guess and any((culprit.startswith(k) for k in exclude_paths))):
best_guess = culprit
elif best_guess:
break
# Return either the best guess or the last frames call
return best_guess or culprit
def iter_traceback_frames(tb):
while tb:
# support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if not tb.tb_frame.f_locals.get('__traceback_hide__'):
yield tb.tb_frame
tb = tb.tb_next
def iter_stack_frames(frames=None):
if not frames:
frames = inspect.stack()[1:]
for frame_crud in frames:
yield frame_crud[0]
def get_stack_info(frames):
results = []
for frame in frames:
# Support hidden frames
if frame.f_locals.get('__traceback_hide__'):
continue
filename = frame.f_code.co_filename
function = frame.f_code.co_name
lineno = frame.f_lineno - 1
loader = frame.f_globals.get('__loader__')
module_name = frame.f_globals.get('__name__')
pre_context_lineno, pre_context, context_line, post_context = get_lines_from_file(filename, lineno, 7, loader, module_name)
if pre_context_lineno is not None:
results.append({
'id': id(frame),
'filename': filename,
'module': module_name,
'function': function,
'lineno': lineno + 1,
# TODO: vars need to be references
'vars': transform(frame.f_locals.items()),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
return results
| Python | 0.000038 | @@ -4372,21 +4372,31 @@
or frame
-_crud
+ in (f%5B0%5D for f
in fram
@@ -4389,32 +4389,33 @@
for f in frames
+)
:%0A yield
@@ -4412,27 +4412,93 @@
-yield
+if
frame
-_crud%5B0%5D
+.f_locals.get('__traceback_hide__'):%0A continue%0A yield frame
%0A%0Ade
|
fefb13108a151c5cbfe8c6acd5b94a480dac98ec | Add test for NPairLossScheme | tests/test_datasets.py | tests/test_datasets.py | Python | 0 | @@ -0,0 +1,884 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Tue Feb 21 20:30:26 2017%0A%0A@author: sakurai%0A%22%22%22%0A%0Aimport unittest%0A%0Aimport numpy as np%0A%0Afrom ..datasets.data_provider import NPairLossScheme%0A%0A%0Aclass TestNPairLossScheme(unittest.TestCase):%0A%0A def test_pairs_of_indexes(self):%0A batch_size = 20%0A labels = sum(%5B%5Bi%5D*10 for i in range(10)%5D, %5B%5D)%0A scheme = NPairLossScheme(labels, batch_size)%0A it = scheme.get_request_iterator()%0A for i in range(5):%0A indexes = next(it)%0A a_indexes = indexes%5B:batch_size / 2%5D%0A p_indexes = indexes%5Bbatch_size / 2:%5D%0A a_labels = np.array(labels)%5Ba_indexes%5D%0A p_labels = np.array(labels)%5Bp_indexes%5D%0A%0A np.testing.assert_array_equal(a_labels, p_labels)%0A np.testing.assert_equal(len(a_labels), len(np.unique(a_labels)))%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
33375a9333852eafa1bf262fb30f5d827c4534f7 | Create networkx.py | networkx.py | networkx.py | Python | 0.000007 | @@ -0,0 +1,16 @@
+import networkx%0A
|
|
1c2330d9e45b9e87ed70848fd0ce192b0d06c904 | Update build_status.py | infra/auto-setup/build_status.py | infra/auto-setup/build_status.py | #!/usr/bin/env python
import codecs
import datetime
import os
import subprocess
import sys
import jenkins
import jinja2
from jinja2 import Environment, FileSystemLoader
JENKINS_SERVER = ('localhost', 8080)
LOGS_BUCKET = 'oss-fuzz-build-logs'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
class Result(object):
"""Result."""
def __init__(self, name, output):
self.name = name
self.output = output
def get_build_results(server):
"""Return successes, failures."""
successes = []
failures = []
for job in server.get_jobs(1):
try:
name = job['fullname']
if not name.startswith('projects/'):
continue
print name
project = name[len('projects/'):]
info = server.get_job_info(name)
last_build_number = info['lastCompletedBuild']['number']
last_failed_builder_number = info['lastFailedBuild']['number']
if last_build_number == last_failed_builder_number:
failures.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
else:
successes.append(Result(
project,
server.get_build_console_output(name, last_build_number)))
except Exception as e:
print >>sys.stderr, e
return successes, failures
def upload_status(successes, failures):
"""Upload main status page."""
env = Environment(loader=FileSystemLoader(os.path.join(SCRIPT_DIR,
'templates')))
with open('status.html', 'w') as f:
f.write(
env.get_template('status_template.html').render(
failures=failures, successes=successes,
last_updated=datetime.datetime.utcnow().ctime()))
subprocess.check_output(['gsutil', 'cp', 'status.html', 'gs://' +
LOGS_BUCKET], stderr=subprocess.STDOUT)
def upload_build_logs(successes, failures):
"""Upload individual build logs."""
for result in failures + successes:
with codecs.open('latest.txt', 'w', encoding='utf-8') as f:
f.write(result.output)
subprocess.check_output(['gsutil', 'cp', 'latest.txt',
'gs://%s/build_logs/%s/' %
(LOGS_BUCKET, result.name)],
stderr=subprocess.STDOUT)
def main():
jenkins_login = get_jenkins_login()
server = jenkins.Jenkins('http://%s:%d' % JENKINS_SERVER,
username=jenkins_login[0], password=jenkins_login[1])
successes, failures = get_build_results(server)
upload_status(successes, failures)
upload_build_logs(successes, failures)
def get_jenkins_login():
"""Returns (username, password) for jenkins."""
username = os.getenv('JENKINS_USER')
password = os.getenv('JENKINS_PASS')
return username, password
if __name__ == '__main__':
main()
| Python | 0.000002 | @@ -84,16 +84,33 @@
port sys
+%0Aimport traceback
%0A%0Aimport
@@ -1256,29 +1256,29 @@
-print %3E%3Esys.stderr, e
+traceback.print_exc()
%0A%0A
|
98a053f945e2c7cc01d8fbdec374ab90305bc11f | Create new files. | ospathex.py | ospathex.py | Python | 0 | @@ -0,0 +1,1562 @@
+#!/usr/bin/python%0A#_*_coding:utf-8_*_%0A%0Aimport os%0A%0Afor tmpdir in ('temp', r'c:%5Cwindows%5Ctemp'):%0A if os.path.isdir(tmpdir):%0A break%0A else:%0A print 'no temp directory available'%0A tmpdir = ''%0A%0Aif tmpdir:%0A os.chdir(tmpdir)%0A cwd = os.getcwd()%0A print '*** current temporary directory'%0A print cwd%0A%0A print '*** creating temporary directory...'%0A os.mkdir('example')%0A os.chdir('example')%0A cwd = os.getcwd()%0A print '*** new working directory: '%0A print cwd%0A print '*** original directory listing: '%0A print os.listdir(cwd)%0A%0A print '*** creating test file...'%0A fobj = open('test', 'w')%0A fobj.write('foo%5Cn')%0A fobj.write('bar%5Cn')%0A fobj.close()%0A print '*** updated directory listing: '%0A print os.listdir(cwd)%0A%0A print %22*** renameing 'test' to 'filetest.txt'%22%0A os.rename('test', 'filename.txt')%0A print '*** updated directory listing: '%0A print os.listdir(cwd)%0A%0A path = os.path.join(cwd, os.listdir(cwd)%5B0%5D)%0A print '*** full file pathname'%0A print path%0A print '*** (pathname, basename)== '%0A print os.path.split(path)%0A print '*** (filename, extension) == '%0A print os.path.splitext(os.path.basename(path))%0A%0A print '*** displaying file contents: '%0A fobj = open(path)%0A for eachLine in fobj:%0A print eachLine%0A fobj.close()%0A%0A print '*** deleting test file'%0A os.remove(path)%0A print '*** updated directory listing: '%0A print os.listdir(cwd)%0A os.chdir(os.pardir)%0A print '*** deleting test directory'%0A os.rmdir('example')%0A print '*** DONE!'
|
|
81ade3168faa68ef43456cc35a122b9ef493a23e | Add script to plot MS flag rate and acq fail rate | plot_ms_flag_acq_fails.py | plot_ms_flag_acq_fails.py | Python | 0 | @@ -0,0 +1,1395 @@
+from __future__ import division%0A%0Aimport matplotlib.pyplot as plt%0Afrom astropy.table import Table%0Aimport numpy as np%0Afrom Ska.DBI import DBI%0Afrom chandra_aca import star_probs%0A%0Adb = DBI(dbi='sybase', server='sybase', user='aca_read')%0A%0Astats = db.fetchall('SELECT * from trak_stats_data '%0A 'WHERE kalman_datestart %3E %222014:180%22 '%0A 'AND aoacmag_median is not NULL')%0A%0Astats = Table(stats)%0Amags = stats%5B'aoacmag_median'%5D%0Aok = (mags %3E 9) & (mags %3C 11)%0Astats = stats%5Bok%5D%0Amags = mags%5Bok%5D%0A%0Astats%5B'frac_ms'%5D = stats%5B'mult_star_samples'%5D / stats%5B'n_samples'%5D%0A%0Astats%5B'mag_bin'%5D = np.round(mags / 0.2) * 0.2%0Asg = stats.group_by('mag_bin')%0Asgm = sg.groups.aggregate(np.mean)%0A%0Aplt.figure(1, figsize=(6, 4))%0Aplt.clf()%0Arandx = np.random.uniform(-0.05, 0.05, size=len(stats))%0Aplt.plot(mags + randx, stats%5B'frac_ms'%5D, '.', alpha=0.5,%0A label='MS flag rate per obsid')%0Aplt.plot(sgm%5B'mag_bin'%5D, sgm%5B'frac_ms'%5D, 'r', linewidth=5, alpha=0.7,%0A label='MS flag rate (0.2 mag bins)')%0A%0Ap_acqs = star_probs.acq_success_prob('2016:001', t_ccd=-15.0, mag=sgm%5B'mag_bin'%5D)%0Aplt.plot(sgm%5B'mag_bin'%5D, 1 - p_acqs, 'g', linewidth=5,%0A label='Acq fail rate (model 2016:001, T=-15C)')%0A%0Aplt.legend(loc='upper left', fontsize='small')%0Aplt.xlabel('Magnitude')%0Aplt.title('Acq fail rate compared to MS flag rate')%0Aplt.grid()%0Aplt.tight_layout()%0Aplt.savefig('ms_flag_acq_fails.png')%0A
|
|
f2413f05bc64818297541112f42e2a8d5ae72cbe | Create test_setup.py | test_setup.py | test_setup.py | Python | 0 | @@ -0,0 +1,294 @@
+import wget%0Aimport os%0A%0Atest_files_path = os.getcwd() + '/image-analysis/test/test_data/'%0A%0A# test files will be here whether is data, images, videos ect.%0Atest_files = %5B%22https://s3.amazonaws.com/testcodas/test_video.mp4%22%5D%0A%0Afor file_path in test_files:%0A wget.download(file_path, test_files_path)%0A
|
|
c54c948531cd73b0c0dd78b6bc8a1c5245886c97 | add visualise.py | visualize.py | visualize.py | Python | 0.000024 | @@ -0,0 +1,1395 @@
+#!/usr/bin/env python%0A%0Aimport json%0Aimport math%0Aimport numpy%0Aimport os%0Aimport re%0Aimport sys%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) %3C 3:%0A print('usage: %25s %5Bresult dir%5D %5Boutput html%5D' %25 sys.argv%5B0%5D)%0A sys.exit()%0A%0A result = %5B%5B%5D, %5B%5D, %5B%5D, %5B%5D%5D%0A for filename in os.listdir(sys.argv%5B1%5D):%0A match = re.match('(%5B0-9%5D+)_(%5B0-9%5D+).result', filename)%0A if not match:%0A continue%0A%0A average, size = map(int, match.groups())%0A name = 'Average: %25d, Size: %25d' %25 (average, size)%0A%0A matrix = numpy.loadtxt(os.path.join(sys.argv%5B1%5D, filename), dtype = str)%0A data = matrix%5B1:,1:%5D.astype(int)%0A result%5B0%5D.append(%5Bnumpy.mean(data%5B:,3%5D), numpy.mean(data%5B:,4%5D), len(data), name%5D)%0A result%5B1%5D.append(%5Bnumpy.median(data%5B:,3%5D), numpy.median(data%5B:,4%5D), len(data), name%5D)%0A result%5B2%5D.append(%5Bnumpy.amin(data%5B:,3%5D), numpy.amin(data%5B:,4%5D), len(data), name%5D)%0A result%5B3%5D.append(%5Bnumpy.amax(data%5B:,3%5D), numpy.amax(data%5B:,4%5D), len(data), name%5D)%0A %0A path = os.path.join(os.path.dirname(__file__), 'html')%0A with open(os.path.join(path, 'template.html')) as input:%0A with open(sys.argv%5B2%5D, 'w') as output:%0A relpath = os.path.relpath(path, os.path.dirname(sys.argv%5B2%5D))%0A%0A html = input.read()%0A format = %5Brelpath%5D * 5 + map(json.dumps, result)%0A output.write(html %25 tuple(format))%0A
|
|
6c61c2d367e698861657d4cfc9bba0ba3789f197 | add naive bayes | nb.py | nb.py | Python | 0.999993 | @@ -0,0 +1,2486 @@
+import numpy as np%0A%0Aclass NaiveBayes:%0A def __init__(self):%0A self._prior = None%0A self._mat = None%0A%0A def train(self, X, y):%0A y = np.matrix(y)%0A p1 = y*X%0A p2 = (1-y)*X%0A p = np.vstack(%5B%0A np.log(p1+1) - np.log(p1.sum() + p1.shape%5B1%5D),%0A np.log(p2+1) - np.log(p2.sum() + p2.shape%5B1%5D)%5D)%0A pri = np.matrix(%5B%5Bfloat(y.sum())/y.shape%5B1%5D%5D, %5B1 - float(y.sum())/y.shape%5B1%5D %5D%5D)%0A self._prior = np.log(pri)%0A self._mat = p%0A return p, pri%0A%0A def predict_many(self, mat):%0A logp = self._mat*mat.T + self._prior%0A ans = (np.sign(logp%5B0%5D - logp%5B1%5D) + 1)/2%0A return ans.A1%0A%0A def validate(self, mat, real_y):%0A predict_y = self.predict_many(mat)%0A return (predict_y == real_y).sum()%0A%0Aif __name__ == '__main__':%0A import loader%0A from sklearn.feature_extraction.text import HashingVectorizer%0A%0A d = loader.DataLoader()%0A g = d.alldata()%0A def iter_data(n, y, cat):%0A c = 0%0A for business in g:%0A if c %25 1000 == 0:%0A print c, '/', n%0A if c%3Cn:%0A if cat.decode('utf-8') in business.categories:%0A y%5Bc%5D = 1%0A else:%0A y%5Bc%5D = 0%0A yield %22%22.join(business.reviews)%0A else:%0A return%0A c += 1%0A# f = open('data/yelp.csv')%0A# def iter_data(n, y, cat):%0A# c = 0%0A# for line in f:%0A# if c %25 1000 == 0:%0A# print c, '/', n%0A# if c %3C n:%0A# b_id, categories, review = line.split('%5Ct')%0A# categories = categories.split(',')%0A# if cat in categories:%0A# y%5Bc%5D = 1%0A# else:%0A# y%5Bc%5D = 0%0A# yield review%0A# else:%0A# return%0A# c += 1%0A%0A%0A n = 4000%0A y = np.zeros(n)%0A v = HashingVectorizer(stop_words='english', non_negative=True, norm=None)%0A mat = v.transform(iter_data(n, y, 'Restaurants'))%0A print 'data readed', mat.shape, y.shape%0A nt = 1000%0A yt = np.zeros(nt)%0A mt = v.transform(iter_data(nt, yt, 'Restaurants'))%0A #print yt%0A %0A print 'our code',%0A mm = NaiveBayes()%0A mm.train(mat, y)%0A print float(mm.validate(mt, yt))/nt%0A%0A from sklearn.naive_bayes import MultinomialNB%0A model = MultinomialNB()%0A clf = model.fit(mat, y)%0A print 'model trained'%0A s = model.score(mt, yt)%0A print s%0A%0A%0A
|
|
9a33761f33c4f49a27d72944c231cb447353d81e | Add problem 10 | 010.py | 010.py | Python | 0 | @@ -0,0 +1,622 @@
+#!/usr/bin/env python3%0A# Author: Severin Kaderli %[email protected]%3E%0A#%0A# Project Euler - Problem 10:%0A# Find the sum of all the primes below two million.%0Adef get_prime_numbers(n):%0A %22%22%22Gets all prime numbers below n.%22%22%22%0A primes, sieve = %5B%5D, %5BTrue%5D * n%0A%0A for i in range(2, n):%0A if sieve%5Bi%5D:%0A primes.append(i)%0A for j in range(i*i, n, i):%0A sieve%5Bj%5D = False%0A%0A return primes%0A%0Adef get_prime_sum(n = 2000000):%0A %22%22%22Calculate the sum of all prime numbers below n.%22%22%22%0A return sum(get_prime_numbers(n))%0A%0Aif __name__ == %22__main__%22:%0A print(get_prime_sum(2000000))%0A
|
|
d075d188d541090ad8d3a5c4cf583ba10063aa88 | Move timing to right location for staging. | project/project/timing.py | project/project/timing.py | Python | 0 | @@ -0,0 +1,1207 @@
+import time%0A%0Afrom django.utils.deprecation import MiddlewareMixin%0A%0A%0Aclass TimingMiddleware(object):%0A %22%22%22Times a request and adds timing information to the content.%0A%0A Adds an attribute, %60_timing%60, onto the request, and uses this at the end%0A of the rendering chain to find the time difference. It replaces a token in%0A the HTML, %22%3C!-- RENDER_TIME --%3E%22, with the rendered time.%0A %22%22%22%0A%0A # Keep these out here so they can be modified in Django settings.%0A%0A REQUEST_ANNOTATION_KEY = %22_timing%22%0A REPLACE = b%22%3C!-- RENDER_TIME --%3E%22%0A REPLACE_TEMPLATE = b%22%3Cspan%3EHandsomely rendered in %25ims.%3C/span%3E%22%0A%0A def __init__(self, get_response):%0A self.get_response = get_response%0A # One-time configuration and initialization.%0A%0A def __call__(self, request):%0A setattr(request, self.REQUEST_ANNOTATION_KEY, time.time())%0A%0A response = self.get_response(request)%0A%0A then = getattr(request, self.REQUEST_ANNOTATION_KEY, None)%0A if then and hasattr(response, 'content'):%0A now = time.time()%0A msg = self.REPLACE_TEMPLATE %25 (int((now - then) * 1000))%0A response.content = response.content.replace(self.REPLACE, msg)%0A return response
|
|
2e503a58a1f9893d25cf2dbb2c885bc9834faebf | Create urls.py | tests/urls.py | tests/urls.py | Python | 0.000017 | @@ -0,0 +1,261 @@
+from django.conf.urls import url, include%0Afrom webhook.base import WebhookBase%0A%0A%0Aclass WebhookView(WebhookBase):%0A%0A def process_webhook(self, data, meta):%0A pass%0A%0A%0Aurlpatterns = %5B%0A url(r'%5Ewebhook-receiver', WebhookView.as_view(), name='web_hook'),%0A%5D%0A
|
|
0b3bfeb06a4594a2c188e623835c3a54262cca5d | Write initial Bible book HTML parser | utilities/book_parser.py | utilities/book_parser.py | Python | 0 | @@ -0,0 +1,1525 @@
+# utilities.book_parser%0A# coding=utf-8%0A%0Afrom __future__ import unicode_literals%0Aimport yvs.shared as shared%0Afrom HTMLParser import HTMLParser%0A%0A%0Aclass BookParser(HTMLParser):%0A%0A # Resets parser variables (implicitly called on instantiation)%0A def reset(self):%0A HTMLParser.reset(self)%0A self.depth = 0%0A self.in_book = False%0A self.book_depth = 0%0A self.books = %5B%5D%0A self.book_name_parts = %5B%5D%0A%0A # Detects the start of a book link%0A def handle_starttag(self, tag, attrs):%0A attr_dict = dict(attrs)%0A self.depth += 1%0A if 'data-book' in attr_dict:%0A self.in_book = True%0A self.book_depth = self.depth%0A self.books.append(%7B%0A 'id': attr_dict%5B'data-book'%5D%0A %7D)%0A%0A # Detects the end of a book link%0A def handle_endtag(self, tag):%0A if self.in_book and self.depth == self.book_depth:%0A self.in_book = False%0A self.books%5B-1%5D%5B'name'%5D = ''.join(self.book_name_parts).strip()%0A # Empty the list containing the book name parts%0A del self.book_name_parts%5B:%5D%0A self.depth -= 1%0A%0A # Handles the book name contained within the current book link%0A def handle_data(self, content):%0A if self.in_book:%0A self.book_name_parts.append(content)%0A%0A # Handles all HTML entities within the book name%0A def handle_charref(self, name):%0A if self.in_book:%0A char = shared.eval_html_charref(name)%0A self.book_name_parts.append(char)%0A
|
|
7b2d3aedbc2f78119974c9e724b37b2b336297d1 | Add device_hive_api.py | devicehive/device_hive_api.py | devicehive/device_hive_api.py | Python | 0.000029 | @@ -0,0 +1,1325 @@
+from devicehive.handler import Handler%0Afrom devicehive.device_hive import DeviceHive%0A%0A%0Aclass ApiCallHandler(Handler):%0A %22%22%22Api call handler class.%22%22%22%0A%0A def __init__(self, api, call, *call_args, **call_kwargs):%0A super(ApiCallHandler, self).__init__(api)%0A self._call = call%0A self._call_args = call_args%0A self._call_kwargs = call_kwargs%0A self._call_result = None%0A%0A @property%0A def call_result(self):%0A return self._call_result%0A%0A def handle_connect(self):%0A self._call_result = getattr(self.api, self._call)(*self._call_args,%0A **self._call_kwargs)%0A self.api.disconnect()%0A%0A%0Aclass DeviceHiveApi(object):%0A %22%22%22Device hive api class.%22%22%22%0A%0A def __init__(self, transport_url, **options):%0A self._transport_url = transport_url%0A self._options = options%0A%0A def _call(self, call, *call_args, **call_kwargs):%0A device_hive = DeviceHive(ApiCallHandler, call, *call_args,%0A **call_kwargs)%0A device_hive.connect(self._transport_url, **self._options)%0A return device_hive.transport.handler.handler.call_result%0A%0A def get_info(self):%0A return self._call('get_info')%0A%0A def get_cluster_info(self):%0A return self._call('get_cluster_info')%0A
|
|
1d4e462188e95b1270d45f95112c2458cbeb7b2f | Add definitions.py | definitions.py | definitions.py | Python | 0 | @@ -0,0 +1,2604 @@
+%0Adef API_launch():%0A %0A global app_config%0A global tweepy%0A%0A# Twitter API configuration%0A consumer_key = app_config.twitter%5B%22consumer_key%22%5D%0A consumer_secret = app_config.twitter%5B%22consumer_secret%22%5D%0A%0A access_token = app_config.twitter%5B%22access_token%22%5D%0A access_token_secret = app_config.twitter%5B%22access_token_secret%22%5D%0A%0A# Start%0A auth = tweepy.OAuthHandler(consumer_key, consumer_secret)%0A auth.set_access_token(access_token, access_token_secret)%0A%0A api = tweepy.API(auth)%0A return api%0A%0Adef followers_list(number_followers=200):%0A%0A global api%0A followers = api.followers(count=number_followers)%0A%0A followers_name = %5B%5D%0A for follower in followers:%0A followers_name.append(str(follower.screen_name))%0A return followers_name%0A%0Adef create_db(database_name='bot_detection.db'):%0A%0A global sqlite3%0A conn = sqlite3.connect(database_name)%0A %0Adef create_table(database_name='bot_detection.db'):%0A %0A global sqlite3%0A conn = sqlite3.connect(database_name)%0A conn.execute('''CREATE TABLE TWEETS%0A (ID INT PRIMARY KEY NOT NULL,%0A NAME TEXT NOT NULL,%0A DATE TEXT NOT NULL,%0A TEXT TEXT NOT NULL,%0A MENTIONS TEXT NOT NULL);''')%0A conn.close()%0A%0Adef feed_table(ID ,NAME,DATE ,TEXT,MENTIONS,database_name='bot_detection.db'):%0A %0A global sqlite3%0A conn = sqlite3.connect(database_name)%0A conn.execute(%22INSERT INTO TWEETS (ID,NAME,DATE,TEXT,MENTIONS) VALUES (?,?,?,?,?)%22%0A ,(ID, NAME ,DATE,TEXT, MENTIONS))%0A%0A conn.commit()%0A conn.close()%0A %0Adef tweet_info(follower,tweets_number=100):%0A%0A global api%0A global json%0A global unicodedata%0A user_info = api.user_timeline(screen_name = follower,count = tweets_number)%0A%0A tweet = %7B%7D%0A name_mentions = %5B%5D%0A%0A for i,status in enumerate(user_info):%0A tweet = status._json%0A text = tweet%5B'text'%5D%0A date = tweet%5B'created_at'%5D%0A entities = tweet%5B'entities'%5D%0A user_mentions = entities%5B'user_mentions'%5D%0A for mention in user_mentions:%0A dict_mentions = mention%0A name_mentions = dict_mentions%5B'screen_name'%5D%0A %0A ID_string = i%0A name_string = follower %0A text_string = unicodedata.normalize('NFKD', text).encode('ascii','ignore')%0A date_string = unicodedata.normalize('NFKD', date).encode('ascii','ignore')%0A name_mentions_string = unicodedata.normalize('NFKD', name_mentions).encode('ascii','ignore')%0A %0A feed_table(ID_string,%0A name_string,%0A text_string,%0A date_string,%0A name_mentions_string)%0A%0A%0A
|
|
cea01def787c6f9c6bfecd96fe40a7b8a312cf8e | Fix docstrings in RegisterHelper | FTB/Signatures/RegisterHelper.py | FTB/Signatures/RegisterHelper.py | '''
Crash Signature
Represents a crash signature as specified in https://wiki.mozilla.org/Security/CrashSignatures
@author: Christian Holler (:decoder)
@license:
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
@contact: [email protected]
'''
x86Registers = [ "eax", "ebx", "ecx", "edx", "esi", "edi", "ebp", "esp", "eip" ]
x64Registers = [ "rax", "rbx", "rcx", "rdx", "rsi", "rdi", "rbp", "rsp", "r8",
"r9", "r10", "r11", "r12", "r13", "r14", "r15", "rip" ]
armRegisters = [ "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
"r10", "r11", "r12", "sp", "lr", "pc", "cpsr" ]
validRegisters = {
"X86" : x86Registers,
"X64" : x64Registers,
"ARM" : armRegisters
}
def getRegisterPattern():
# Return a pattern including all register names that are considered valid
return "(" + '|'.join(["%s"] * len(validRegisters.values())) % tuple(['|'.join(i) for i in validRegisters.values()]) + ")"
def getStackPointer(registerMap):
'''
Return the stack pointer value from the given register map
@type registerMap: map
@param registerMap: Map of register names to value
@rtype long
@return The value of the stack pointer
'''
for reg in ["rsp", "esp", "sp"]:
if reg in registerMap:
return registerMap[reg]
raise RuntimeError("Register map does not contain a usable stack pointer!")
def getRegisterValue(register, registerMap):
'''
Return the value of the specified register using the provided register map.
This method also works for getting lower register parts out of higher ones.
@type register: string
@param register: The register to get the value for
@type registerMap: map
@param registerMap: Map of register names to values
'''
# If the register is requested as in the map, return it immediately
if register in registerMap:
return registerMap[register]
if register.startswith("e"):
# We might have the case that 32 bit of a 64 bit register
# are requested (e.g. eax and we have rax). Either that is
# the case, or we return None anyway because we don't know
# what else to do.
higherRegister = register.replace("e", "r", 1)
if higherRegister in registerMap:
return registerMap[higherRegister] & 0xFFFFFFFFL
if len(register) == 2:
# We're either requesting the lower 16 bit of a register (ax),
# or higher/lower 8 bit of the lower 16 bit register (ah/al).
if register.endswith("x"):
# Find proper higher register
reg32 = "e" + register
if reg32 in registerMap:
return registerMap[reg32] & 0xFFFFL
reg64 = "r" + register
if reg64 in registerMap:
return registerMap[reg64] & 0xFFFFL
elif register.endswith("h"):
higherRegister = register.replace("h", "x", 1)
# Find proper higher register
reg32 = "e" + higherRegister
if reg32 in registerMap:
return (registerMap[reg32] & 0xFF00L) >> 8
reg64 = "r" + higherRegister
if reg64 in registerMap:
return (registerMap[reg64] & 0xFF00L) >> 8
elif register.endswith("l"):
higherRegister = register.replace("l", "x", 1)
# Find proper higher register
reg32 = "e" + higherRegister
if reg32 in registerMap:
return registerMap[reg32] & 0xFFL
reg64 = "r" + higherRegister
if reg64 in registerMap:
return registerMap[reg64] & 0xFFL
return None
def getBitWidth(registerMap):
'''
Return the bit width (32 or 64 bit) given the registers
@type registerMap: map
@param registerMap: Map of register names to value
@rtype int
@return The bit width
'''
if "rax" in registerMap:
return 64
return 32
def isX86Compatible(registerMap):
'''
Return true, if the the given registers are X86 compatible, such as x86 or x86-64.
ARM, PPC and your PDP-15 will fail this check and we don't support it right now.
@type registerMap: map
@param registerMap: Map of register names to value
@rtype bool
@return True if the architecture is X86 compatible, False otherwise
'''
return "rax" in registerMap or "eax" in registerMap | Python | 0.000001 | @@ -963,17 +963,27 @@
():%0A
-#
+'''%0A
Return
@@ -1047,16 +1047,24 @@
d valid%0A
+ '''%0A
retu
@@ -1414,16 +1414,17 @@
@rtype
+:
long%0A
@@ -1432,24 +1432,25 @@
@return
+:
The value o
@@ -2096,16 +2096,82 @@
values%0A
+ %0A @rtype: long%0A @return: The register value%0A
'''%0A
@@ -4335,16 +4335,17 @@
@rtype
+:
int%0A
@@ -4356,16 +4356,17 @@
@return
+:
The bit
@@ -4792,16 +4792,17 @@
@rtype
+:
bool%0A
@@ -4814,16 +4814,17 @@
@return
+:
True if
|
045a10457cd87e37ef5862de55e344db5e9228cf | Add configfile.py | configfile.py | configfile.py | Python | 0.000003 | @@ -0,0 +1,1953 @@
+# vim: set et ts=4 sw=4 fdm=marker%0A%22%22%22%0AMIT License%0A%0ACopyright (c) 2016 Jesse Hogan%0A%0APermission is hereby granted, free of charge, to any person obtaining a copy%0Aof this software and associated documentation files (the %22Software%22), to deal%0Ain the Software without restriction, including without limitation the rights%0Ato use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0Acopies of the Software, and to permit persons to whom the Software is%0Afurnished to do so, subject to the following conditions:%0A%0AThe above copyright notice and this permission notice shall be included in all%0Acopies or substantial portions of the Software.%0A%0ATHE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0AIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0AFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0AAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0ALIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0AOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0ASOFTWARE.%0A%22%22%22%0A%0A# TODO Write Tests%0Afrom entities import *%0Afrom accounts import *%0Aimport yaml%0A%0Aclass configfile(entity):%0A _instance = None%0A%0A @classmethod%0A def getinstance(cls):%0A%0A if cls._instance == None:%0A cls._instance = configfile()%0A%0A return cls._instance%0A%0A @property%0A def file(self):%0A return self._file%0A%0A @file.setter%0A def file(self, v):%0A self._file = v%0A self.load()%0A %0A @property%0A def accounts(self):%0A return self._accounts%0A%0A @accounts.setter%0A def accounts(self, v):%0A self._accounts = v%0A%0A def clear(self):%0A self._accounts = accounts()%0A%0A def load(self):%0A self.clear()%0A%0A with open(self.file, 'r') as stream:%0A cfg = yaml.load(stream)%0A%0A for acct in cfg%5B'accounts'%5D:%0A self.accounts += account.create(acct)%0A%0A%0A
|
|
2275ae52e336bd2e07e32fa3a2559926734c3567 | add pyunit for PUBDEV-1480 | h2o-py/tests/testdir_jira/pyunit_NOPASS_INTERNAL_pubdev_1480_medium.py | h2o-py/tests/testdir_jira/pyunit_NOPASS_INTERNAL_pubdev_1480_medium.py | Python | 0 | @@ -0,0 +1,727 @@
+import sys, os%0Asys.path.insert(1, %22../../%22)%0Aimport h2o, tests%0A%0Adef pubdev_1480():%0A%0A if not tests.hadoop_namenode_is_accessible(): raise(EnvironmentError, %22Not running on H2O internal network. No access to HDFS.%22)%0A train = h2o.import_file(%22hdfs://mr-0xd6/datasets/kaggle/sf.crime.train.gz%22)%0A test = h2o.import_file(%22hdfs://mr-0xd6/datasets/kaggle/sf.crime.test.gz%22)%0A%0A model = h2o.gbm(x=train%5Brange(2,9)%5D, y=train%5B1%5D)%0A%0A predictions = model.predict(test)%0A%0A results_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)),%22..%22,%22results%22))%0A h2o.download_csv(predictions, os.path.join(results_dir,%22predictions.csv%22))%0A%0Aif __name__ == %22__main__%22:%0A tests.run_test(sys.argv, pubdev_1480)%0A
|
|
b16f6ea8a723fa064a78e014ab767be1a797e613 | Create cab.py | cab.py | cab.py | Python | 0.000006 | @@ -0,0 +1,1136 @@
+%22%22%22%0AWork with *.cab files%0A%22%22%22%0Afrom ctypes import pythonapi%0Afrom ctypes import cdll%0Afrom ctypes import cast%0Aimport ctypes as _ctypes%0A%0Alibc = cdll%5B_ctypes.util.find_library('c')%5D%0Alibcab = cdll%5B_ctypes.util.find_library('cabinet')%5D%0A%0APyMem_Malloc = pythonapi.PyMem_Malloc%0APyMem_Malloc.restype = _ctypes.c_size_t%0APyMem_Malloc.argtypes = %5B_ctypes.c_size_t%5D%0A%0Astrncpy = libc.strncpy%0Astrncpy.restype = _ctypes.c_char_p%0Astrncpy.argtypes = %5B_ctypes.c_char_p, _ctypes.c_char_p, _ctypes.c_size_t%5D%0A%0AHOOKFUNC = _ctypes.CFUNCTYPE(_ctypes.c_char_p, _ctypes.c_void_p, _ctypes.c_void_p, _ctypes.c_char_p)%0A%0A# typedef struct %7B%0A# DWORD cbStruct;%0A# DWORD dwReserved1;%0A# DWORD dwReserved2;%0A# DWORD dwFileVersionMS;%0A# DWORD dwFileVersionLS;%0A# %7D CABINETDLLVERSIONINFO, *PCABINETDLLVERSIONINFO;%0A%0Aclass CABINETDLLVERSIONINFO(_ctypes.Structure):%0A _fields_ = %5B('cbStruct', _ctypes.c_double),%0A ('dwReserved1', _ctypes.c_double),%0A ('dwReserved2', _ctypes.c_double),%0A ('dwFileVersionMS', _ctypes.c_double),%0A ('dwFileVersionLS', _ctypes.c_double)%5D%0A%0Alibcab.DllGetVersion.restype = CABINETDLLVERSIONINFO%0A
|
|
e6abd6a44de8687d88672ba80388afbe0cdb029a | 习题 1: 第一个程序 | ex1.py | ex1.py | Python | 0.997795 | @@ -0,0 +1,307 @@
+# --coding:utf8--%0Aprint(%22Hello world%22)%0Aprint(%22Hello Again%22)%0Aprint(%22I like type this.%22)%0Aprint(%22This is fun%22)%0Aprint('Yay! Printing')%0Aprint(%22I'd much rather you 'not'.%22)%0Aprint('I %22said%22 do not touch this')%0Aprint('%E5%A6%B3%E5%A5%BD%EF%BC%8C%E6%88%91%E6%98%AFPython!')%0Aprint('%E9%80%99%E6%98%AF%E6%88%91%E7%9A%84%E7%B7%B4%E7%BF%92%E9%A1%8C')%0Aprint('%E4%B8%8B%E9%9D%A2%E8%A6%81%E5%8A%A0%E4%B8%8A#%E7%9A%84%E8%A8%BB%E9%87%8B')%0A# This is pound%0A# This is hash%0A# This is mesh%0A
|
|
e17adde73c146ded7ed5a1a347f104a5e7a09f62 | Add bzl macro. | tools/testing/python/py23.bzl | tools/testing/python/py23.bzl | Python | 0.000066 | @@ -0,0 +1,370 @@
+%22%22%22Macro to generate python 2 and 3 binaries.%22%22%22%0A%0Adef py23_binary(name, **kwargs):%0A %22%22%22Generates python 2 and 3 binaries. Accepts any py_binary arguments.%22%22%22%0A native.py_binary(%0A name = name + %222%22,%0A python_version = %22PY2%22,%0A **kwargs%0A )%0A%0A native.py_binary(%0A name = name + %223%22,%0A python_version = %22PY3%22,%0A **kwargs%0A )%0A
|
|
596f432eb7d4b3fa5d1bf5dec33cc882546e8233 | Add a script to convert a GRLevelX colortable file to a dict data structure (and optionally boundaries for norm) for use with Matplotlib. | trunk/metpy/vis/util/gr2_to_mpl_colortable.py | trunk/metpy/vis/util/gr2_to_mpl_colortable.py | Python | 0.000006 | @@ -0,0 +1,2317 @@
+#!/usr/bin/env python%0A# This script is used to convert colortables from GRLevelX to data for a%0A# matplotlib colormap%0Aimport sys%0Afrom optparse import OptionParser%0A%0A#Set up command line options%0Aopt_parser = OptionParser(usage=%22usage: %25prog %5Boptions%5D colortablefile%22)%0Aopt_parser.add_option(%22-s%22, %22--scale%22, action=%22store_true%22, dest=%22scale%22,%0A help=%22Scale size of colortable entries by thresholds in file.%22)%0A%0Aopts,args = opt_parser.parse_args()%0Aif not args:%0A print %22You must pass the colortable file as the commandline argument.%22%0A opt_parser.print_help()%0A sys.exit(-1)%0A%0Afname = args%5B0%5D%0AscaleTable = opts.scale%0A%0Acolors = %5B%5D%0Athresholds = %5B%5D%0A#Initial color should end up not used by MPL%0Aprev = %5B0., 0., 0.%5D%0Afor line in open(fname, 'r'):%0A if line.startswith(%22Color:%22):%0A # This ignores the word %22Color:%22 and the threshold%0A # and converts the rest to float%0A parts = line.split()%0A thresholds.append(float(parts%5B1%5D))%0A%0A color_info = %5Bfloat(x)/255. for x in parts%5B2:%5D%5D%0A if not prev:%0A prev = info%5B:3%5D%0A%0A colors.append(zip(prev, color_info%5B:3%5D))%0A%0A prev = color_info%5B3:%5D%0A%0A# Add the last half of the last line, if necessary%0Aif prev:%0A colors.append(zip(prev,prev))%0A%0Acolordict = dict(red=%5B%5D, green=%5B%5D, blue=%5B%5D)%0Anum_entries = float(len(colors) - 1)%0Aoffset = min(thresholds)%0Ascale = 1. / (max(thresholds) - offset)%0Afor i,(t,(r,g,b)) in enumerate(zip(thresholds, colors)):%0A if scaleTable:%0A norm = (t - offset) * scale%0A else:%0A norm = i / num_entries%0A%0A colordict%5B'red'%5D.append((norm,) + r)%0A colordict%5B'green'%5D.append((norm,) + g)%0A colordict%5B'blue'%5D.append((norm,) + b)%0A%0A# Output as code that can be copied and pasted into a python script. Repr()%0A# would work here, but wouldn't be as human-readable.%0Aprint '%7B'%0Anum_colors = len(colordict.keys())%0Afor n,k in enumerate(sorted(colordict.keys())):%0A print %22'%25s' :%22 %25 k%0A num = len(colordict%5Bk%5D)%0A for i,line in enumerate(colordict%5Bk%5D):%0A if i == 0:%0A print ' %5B%25s,' %25 repr(line)%0A elif i == num - 1:%0A if n == num_colors - 1:%0A print ' %25s%5D' %25 repr(line)%0A else:%0A print ' %25s%5D,' %25 repr(line)%0A else:%0A print %22 %25s,%22 %25 repr(line)%0Aprint '%7D'%0A%0Aif not scaleTable:%0A print repr(thresholds)%0A
|
|
a041c683475f78d6101fe1741a561a6c00492007 | add pautils, to host various utility functions like loading the P2TH keys into the local or remote node over JSON-RPC. | pypeerassets/pautils.py | pypeerassets/pautils.py | Python | 0 | @@ -0,0 +1,1374 @@
+%0A'''miscellaneous utilities.'''%0A%0Adef testnet_or_mainnet(node):%0A '''check if local node is configured to testnet or mainnet'''%0A%0A q = node.getinfo()%0A%0A if q%5B%22testnet%22%5D is True:%0A return %22testnet%22%0A else:%0A return %22mainnet%22%0A%0Adef load_p2th_privkeys_into_node(node):%0A%0A if testnet_or_mainnet(node) is %22testnet%22:%0A assert testnet_PAPROD_addr in node.getaddressbyaccount()%0A try:%0A node.importprivkey(testnet_PAPROD)%0A assert testnet_PAPROD_addr in node.getaddressbyaccount()%0A except Exception:%0A return %7B%22error%22: %22Loading P2TH privkey failed.%22%7D%0A else:%0A try:%0A node.importprivkey(mainnet_PAPROD)%0A assert mainnet_PAPROD_addr in node.getaddressbyaccount()%0A except Exception:%0A return %7B%22error%22: %22Loading P2TH privkey failed.%22%7D%0A%0Adef load_test_p2th_privkeys_into_node(node):%0A%0A if testnet_or_mainnet(node) is %22testnet%22:%0A try:%0A node.importprivkey(testnet_PATEST)%0A assert testnet_PATEST_addr in node.getaddressbyaccount()%0A except Exception:%0A return %7B%22error%22: %22Loading P2TH privkey failed.%22%7D%0A else:%0A try:%0A node.importprivkey(mainnet_PATEST)%0A assert mainnet_PATEST_addr in node.getaddressbyaccount()%0A except Exception:%0A return %7B%22error%22: %22Loading P2TH privkey failed.%22%7D%0A%0A
|
|
7012a90cd1468da95c8939a0f0c1193766595ae8 | Add event spooler module | pytest_watch/spooler.py | pytest_watch/spooler.py | Python | 0 | @@ -0,0 +1,1073 @@
+# -*- coding: utf-8%0A%0Afrom multiprocessing import Queue, Process, Event%0A%0A%0Aclass Timer(Process):%0A def __init__(self, interval, function, args=%5B%5D, kwargs=%7B%7D):%0A super(Timer, self).__init__()%0A self.interval = interval%0A self.function = function%0A self.args = args%0A self.kwargs = kwargs%0A self.finished = Event()%0A%0A def cancel(self):%0A self.finished.set()%0A%0A def run(self):%0A self.finished.wait(self.interval)%0A if not self.finished.is_set():%0A self.function(*self.args, **self.kwargs)%0A self.finished.set()%0A%0A%0Aclass EventSpooler(object):%0A def __init__(self, cooldown, callback):%0A self.cooldown = cooldown%0A self.callback = callback%0A self.inbox = Queue()%0A self.outbox = Queue()%0A%0A def enqueue(self, event):%0A self.inbox.put(event)%0A Timer(self.cooldown, self.process).start()%0A%0A def process(self):%0A self.outbox.put(self.inbox.get())%0A if self.inbox.empty():%0A self.callback(%5Bself.outbox.get() for _ in range(self.outbox.qsize())%5D)%0A
|
|
0ff9373de6e11d7040b6b289cb3239a9ee9a924d | Fix haproxy agent unit test to be runnable alone by tox | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | neutron/tests/unit/services/loadbalancer/drivers/haproxy/test_agent.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import contextlib
import mock
from oslo.config import cfg
from neutron.services.loadbalancer.drivers.haproxy import agent
from neutron.tests import base
class TestLbaasService(base.BaseTestCase):
def setUp(self):
super(TestLbaasService, self).setUp()
self.addCleanup(cfg.CONF.reset)
cfg.CONF.register_opts(agent.OPTS)
def test_start(self):
with mock.patch.object(
agent.rpc_service.Service, 'start'
) as mock_start:
mgr = mock.Mock()
agent_service = agent.LbaasAgentService('host', 'topic', mgr)
agent_service.start()
self.assertTrue(mock_start.called)
def test_main(self):
logging_str = 'neutron.agent.common.config.setup_logging'
with contextlib.nested(
mock.patch(logging_str),
mock.patch.object(agent.service, 'launch'),
mock.patch.object(agent, 'eventlet'),
mock.patch('sys.argv'),
mock.patch.object(agent.manager, 'LbaasAgentManager')
) as (mock_logging, mock_launch, mock_eventlet, sys_argv, mgr_cls):
agent.main()
self.assertTrue(mock_eventlet.monkey_patch.called)
mock_launch.assert_called_once_with(mock.ANY)
| Python | 0.000001 | @@ -1014,52 +1014,8 @@
t)%0A%0A
- cfg.CONF.register_opts(agent.OPTS)%0A%0A
@@ -1695,16 +1695,74 @@
anager')
+,%0A mock.patch.object(cfg.CONF, 'register_opts')
%0A
@@ -1827,16 +1827,20 @@
mgr_cls
+, ro
):%0A
|
e4ef868660878e1ad1749be915b88ab6fea929b5 | Add asyncio example | examples/async.py | examples/async.py | Python | 0.000001 | @@ -0,0 +1,625 @@
+%22%22%22%0Aw1thermsensor%0A~~~~~~~~~~~~~%0A%0AA Python package and CLI tool to work with w1 temperature sensors.%0A%0A:copyright: (c) 2020 by Timo Furrer %[email protected]%3E%0A:license: MIT, see LICENSE for more details.%0A%22%22%22%0A%0Aimport asyncio%0A%0Afrom w1thermsensor import AsyncW1ThermSensor%0A%0A%0Aasync def main():%0A # initialize sensor with first available sensor%0A sensor = AsyncW1ThermSensor()%0A%0A # continuously read temperature from sensor%0A while True:%0A temperature = await sensor.get_temperature()%0A print(f%22Temperature: %7Btemperature:.3f%7D%22)%0A await asyncio.sleep(1)%0A%0A%0Aif __name__ == %22__main__%22:%0A asyncio.run(main())%0A
|
|
ef4aeb1e16245c76e7d10091b6fc8b0b289d635f | Split IP validation to a module | validateIp.py | validateIp.py | Python | 0 | @@ -0,0 +1,331 @@
+#!/usr/bin/env python%0Aimport socket%0A%0Adef parse(ip):%0A # parse and validate ip address%0A try:%0A socket.inet_pton(socket.AF_INET,ip)%0A return %22valid%22%0A except socket.error, e:%0A try:%0A socket.inet_pton(socket.AF_INET6,ip)%0A return %22valid%22%0A except:%0A print %22ERROR: %25s%22 %25 e%0A
|
|
d0ce887da3043106da1b875a46b6fe1bc0ce7145 | Create 0018_auto_20201109_0655.py | herders/migrations/0018_auto_20201109_0655.py | herders/migrations/0018_auto_20201109_0655.py | Python | 0.000008 | @@ -0,0 +1,579 @@
+# Generated by Django 2.2.17 on 2020-11-09 14:55%0A%0Aimport django.contrib.postgres.fields%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('herders', '0017_auto_20200808_1642'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='artifactinstance',%0A name='effects_value',%0A field=django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(blank=True, null=True), blank=True, default=list, help_text='Bonus value of this effect', size=4),%0A ),%0A %5D%0A
|
|
f9db946f9b067495d2785d46efe447371e22eb26 | Add tex2pdf function | docstamp/pdflatex.py | docstamp/pdflatex.py | Python | 0.000001 | @@ -0,0 +1,2192 @@
+# coding=utf-8%0A# -------------------------------------------------------------------------------%0A# Author: Alexandre Manhaes Savio %[email protected]%3E%0A# Grupo de Inteligencia Computational %3Cwww.ehu.es/ccwintco%3E%0A# Universidad del Pais Vasco UPV/EHU%0A#%0A# 2015, Alexandre Manhaes Savio%0A# Use this at your own risk!%0A# -------------------------------------------------------------------------------%0A%0Aimport os%0Aimport shutil%0Aimport os.path as op%0Afrom glob import glob%0A%0Afrom .commands import call_command%0Afrom .filenames import remove_ext%0A%0A%0Adef tex2pdf(tex_file, output_file=None, output_format='pdf'):%0A %22%22%22 Call PDFLatex to convert TeX files to PDF.%0A%0A Parameters%0A ----------%0A tex_file: str%0A Path to the input LateX file.%0A%0A output_file: str%0A Path to the output PDF file.%0A If None, will use the same output directory as the tex_file.%0A%0A output_format: str%0A Output file format. Choices: 'pdf' or 'dvi'. Default: 'pdf'%0A%0A Returns%0A -------%0A return_value%0A PDFLatex command call return value.%0A %22%22%22%0A if not op.exists(tex_file):%0A raise IOError('Could not find file %7B%7D.'.format(tex_file))%0A%0A if output_format != 'pdf' and output_format != 'dvi':%0A raise ValueError(%22Invalid output format given %7B%7D. Can only accept 'pdf' or 'dvi'.%22.format(output_format))%0A%0A cmd_name = 'pdflatex'%0A%0A args_strings = ' '%0A if output_file is not None:%0A args_strings += '-output-directory=%7B%7D '.format(op.abspath(op.dirname(output_file)))%0A%0A if output_file:%0A args_strings += '-output-format=%7B%7D '.format(output_format)%0A result_dir = op.dirname(output_file)%0A else:%0A result_dir = op.dirname(tex_file)%0A%0A args_strings += tex_file%0A%0A ret = call_command(cmd_name, args_strings.split())%0A%0A result_file = op.join(result_dir, remove_ext(op.basename(tex_file)) + '.' + output_format)%0A if op.exists(result_file):%0A shutil.move(result_file, output_file)%0A else:%0A raise IOError('Could not find PDFLatex result file.')%0A%0A %5Bos.remove(f) for f in glob(op.join(result_dir, '*.aux'))%5D%0A %5Bos.remove(f) for f in glob(op.join(result_dir, '*.log'))%5D%0A%0A return ret%0A
|
|
96a9d00ea20dee3ffd9114b4a094868ed7ae2413 | add createmask.py | createMask.py | createMask.py | Python | 0.000002 | @@ -0,0 +1,1452 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A'''%0Apython createmask.py %5Bvoc-like dir%5D%0A'''%0Aimport os, sys%0Aimport numpy as np%0Aimport cv2%0A%0Adef parsexml(xmlfile):%0A tree = ET.parse(xmlfile)%0A width = int(tree.find('size').find('width').text)%0A height = int(tree.find('size').find('height').text)%0A objs = tree.findall('object')%0A for index, obj in enumerate(objs):%0A name = obj.find('name').text.lower()%0A bbox = obj.find('bndbox')%0A x0 = int(bbox.find('x').text)%0A y0 = int(bbox.find('y').text)%0A a = int(bbox.find('a').text)%0A b = int(bbox.find('b').text)%0A angle = int(bbox.find('angle').text)%0A break%0A return width, height, x, y, a, b, angle%0A%0A%0Adef createmask(argv):%0A annodir = os.path.join(argv%5B0%5D, 'Annotations')%0A maskdir = os.path.join(argv%5B0%5D, 'JPEGImagesMask')%0A if not os.path.exists(maskdir):%0A os.makedirs(maskdir)%0A annofiles = sorted(%5Bos.path.join(annodir, x) for x in sorted(os.listdir(annodir)) if x.endswith('.xml')%5D)%0A %0A for xmlfile in annofiles:%0A w, h, x, y, a, b, angle = parsexml(xmlfile)%0A img = np.zeros(shape=(h, w, 1))%0A delta = 4%0A cv2.ellipse(img, (x, y), (a-delta, b-delta), anble, 0, 360, 255, -1)%0A cv2.imshow(img)%0A cv2.waitKey(0)%0A return%0A%0A%0A%0A%0Adef main():%0A import sys%0A if len(sys.argv) != 2:%0A print(__doc__)%0A return%0A createmask(sys.argv)%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
8ba179518a5901a250fdc7f864f79667c319cf2a | Enhance test | utest/api/test_exposed_api.py | utest/api/test_exposed_api.py | import unittest
from os.path import join
from robot import api, model, parsing, reporting, result, running
from robot.api import parsing as api_parsing
from robot.utils.asserts import assert_equal, assert_true
class TestExposedApi(unittest.TestCase):
def test_execution_result(self):
assert_equal(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equal(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equal(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equal(api.SuiteVisitor, model.SuiteVisitor)
assert_equal(api.ResultVisitor, result.ResultVisitor)
def test_deprecated_parsing(self):
assert_equal(api.get_model, parsing.get_model)
assert_equal(api.get_resource_model, parsing.get_resource_model)
assert_equal(api.get_tokens, parsing.get_tokens)
assert_equal(api.get_resource_tokens, parsing.get_resource_tokens)
assert_equal(api.Token, parsing.Token)
def test_parsing_getters(self):
assert_equal(api_parsing.get_model, parsing.get_model)
assert_equal(api_parsing.get_resource_model, parsing.get_resource_model)
assert_equal(api_parsing.get_tokens, parsing.get_tokens)
assert_equal(api_parsing.get_resource_tokens, parsing.get_resource_tokens)
def test_parsing_token(self):
assert_equal(api_parsing.Token, parsing.Token)
def test_parsing_model_statements(self):
for cls in parsing.model.Statement._statement_handlers.values():
assert_equal(getattr(api_parsing, cls.__name__), cls)
assert_true(not hasattr(api_parsing, 'Statement'))
def test_parsing_model_blocks(self):
for name in ('File', 'SettingSection', 'VariableSection', 'TestCaseSection',
'KeywordSection', 'CommentSection', 'TestCase', 'Keyword', 'For',
'If'):
assert_equal(getattr(api_parsing, name), getattr(parsing.model, name))
assert_true(not hasattr(api_parsing, 'Block'))
def test_parsing_visitors(self):
assert_equal(api_parsing.ModelVisitor, parsing.ModelVisitor)
assert_equal(api_parsing.ModelTransformer, parsing.ModelTransformer)
class TestModelObjects(unittest.TestCase):
"""These model objects are part of the public API.
They are only seldom needed directly and thus not exposed via the robot.api
package. Tests just validate they are not removed accidentally.
"""
def test_running_objects(self):
assert_true(running.TestSuite)
assert_true(running.TestCase)
assert_true(running.Keyword)
def test_result_objects(self):
assert_true(result.TestSuite)
assert_true(result.TestCase)
assert_true(result.Keyword)
class TestTestSuiteBuilder(unittest.TestCase):
# This list has paths like `/path/file.py/../file.robot` on purpose.
# They don't work unless normalized.
sources = [join(__file__, '../../../atest/testdata/misc', name)
for name in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equal(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equal(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| Python | 0.000001 | @@ -1938,16 +1938,32 @@
'If'
+, 'Try', 'While'
):%0A
|
45bc2562d3afd3674929e56425b597b54e74ba24 | Create Legends.py | Legends.py | Legends.py | Python | 0 | @@ -0,0 +1,315 @@
+#Draws Legends, Titles and Labels using matplotlib%0A%0Aimport matplotlib.pyplot as plt%0A%0Ax = %5B1, 2, 3%5D%0Ay = %5B5, 7, 4%5D%0Ax1 = %5B1, 2, 3%5D%0Ay1 = %5B10, 14, 12%5D%0A%0Aplt.plot(x, y, label='First Line')%0Aplt.plot(x1, y1, label='Second Line')%0Aplt.xlabel('X Axis')%0Aplt.ylabel('Y Axis')%0Aplt.title('This is a Title')%0Aplt.legend()%0Aplt.show()%0A
|
|
29090add692e6c32a75e123be6cd201949efd6ce | Add elasticsearch-administer | scripts/elasticsearch-administer.py | scripts/elasticsearch-administer.py | Python | 0 | @@ -0,0 +1,2496 @@
+%22%22%22%0AUtilities for administering elasticsearch%0A%22%22%22%0Afrom argparse import ArgumentParser, RawDescriptionHelpFormatter%0Afrom collections import namedtuple%0Aimport json%0Aimport sys%0A%0Afrom elasticsearch import Elasticsearch%0Afrom elasticsearch.client import ClusterClient, NodesClient, CatClient%0A%0A%0Adef pprint(data):%0A print json.dumps(data, indent=4)%0A%0A%0Adef confirm(msg):%0A if raw_input(msg + %22%5Cn(y/n)%22) != 'y':%0A sys.exit()%0A%0A%0ANode = namedtuple(%22Node%22, %22name node_id docs%22)%0A%0A%0Adef get_nodes_info(es):%0A nc = NodesClient(es)%0A stats = nc.stats(metric=%22indices%22, index_metric=%22docs%22)%0A return %5BNode(info%5B'name'%5D, node_id, info%5B'indices'%5D%5B'docs'%5D)%0A for node_id, info in stats%5B'nodes'%5D.items()%5D%0A%0A%0Adef cluster_status(es):%0A cluster = ClusterClient(es)%0A print %22%5CnCLUSTER HEALTH%22%0A pprint(cluster.health())%0A print %22%5CnPENDING TASKS%22%0A pprint(cluster.pending_tasks())%0A print %22%5CnNODES%22%0A for node in get_nodes_info(es):%0A print node.name, node.docs%0A print %22%5CnSHARD ALLOCATION%22%0A cat = CatClient(es)%0A print cat.allocation(v=True)%0A%0A%0Adef shard_status(es):%0A cat = CatClient(es)%0A print cat.shards(v=True)%0A%0A%0Adef cluster_settings(es):%0A cluster = ClusterClient(es)%0A pprint(cluster.get_settings())%0A%0A%0Adef decommission_node(es):%0A cluster = ClusterClient(es)%0A print %22The nodes are:%22%0A nodes = get_nodes_info(es)%0A for node in nodes:%0A print node.name, node.docs%0A confirm(%22Are you sure you want to decommission a node?%22)%0A node_name = raw_input(%22Which one would you like to decommission?%5Cnname:%22)%0A names = %5Bnode.name for node in nodes%5D%0A if node_name not in names:%0A print %22You must enter one of %7B%7D%22.format(%22, %22.join(names))%0A return%0A confirm(%22This will remove all shards from %7B%7D, okay?%22.format(node_name))%0A cmd = %7B%22transient%22: %7B%22cluster.routing.allocation.exclude._name%22: node_name%7D%7D%0A pprint(cluster.put_settings(cmd))%0A print %22The node is now being decommissioned.%22%0A%0A%0Acommands = %7B%0A 'cluster_status': cluster_status,%0A 'cluster_settings': cluster_settings,%0A 'decommission_node': decommission_node,%0A 'shard_status': shard_status,%0A%7D%0A%0A%0Adef main():%0A parser = ArgumentParser(description=__doc__, formatter_class=RawDescriptionHelpFormatter)%0A parser.add_argument('host_url')%0A parser.add_argument('command', choices=commands.keys())%0A args = parser.parse_args()%0A es = Elasticsearch(%5B%7B'host': args.host_url, 'port': 9200%7D%5D)%0A commands%5Bargs.command%5D(es)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
eee700f46e1edee1133722ee94992abda1ad6a4c | Add GYP build for zlib | deps/zlib.gyp | deps/zlib.gyp | Python | 0 | @@ -0,0 +1,1145 @@
+%7B%0A 'target_defaults': %7B%0A 'conditions': %5B%0A %5B'OS != %22win%22', %7B%0A 'defines': %5B%0A '_LARGEFILE_SOURCE',%0A '_FILE_OFFSET_BITS=64',%0A '_GNU_SOURCE',%0A 'HAVE_SYS_TYPES_H',%0A 'HAVE_STDINT_H',%0A 'HAVE_STDDEF_H',%0A %5D,%0A %7D,%0A %7B # windows%0A 'defines': %5B%0A '_CRT_SECURE_NO_DEPRECATE',%0A '_CRT_NONSTDC_NO_DEPRECATE',%0A %5D,%0A %7D,%0A %5D,%0A %5D,%0A %7D,%0A%0A 'targets': %5B%0A %7B%0A 'target_name': 'zlib',%0A 'type': 'static_library',%0A 'sources': %5B%0A 'zlib/adler32.c',%0A 'zlib/compress.c',%0A 'zlib/crc32.c',%0A 'zlib/deflate.c',%0A 'zlib/gzclose.c',%0A 'zlib/gzlib.c',%0A 'zlib/gzread.c',%0A 'zlib/gzwrite.c',%0A 'zlib/inflate.c',%0A 'zlib/infback.c',%0A 'zlib/inftrees.c',%0A 'zlib/inffast.c',%0A 'zlib/trees.c',%0A 'zlib/uncompr.c',%0A 'zlib/zutil.c',%0A 'zlib/win32/zlib1.rc'%0A %5D,%0A 'include_dirs': %5B%0A 'zlib',%0A %5D,%0A 'direct_dependent_settings': %7B%0A 'include_dirs': %5B%0A 'zlib',%0A %5D,%0A %7D,%0A %7D%0A %5D,%0A%7D%0A%0A
|
|
c46962f8055dc1c9d45a35b21afaac363ec3eb46 | add home view | simple_media_service/views/pages.py | simple_media_service/views/pages.py | Python | 0 | @@ -0,0 +1,794 @@
+#%0A# Copyright (c) Elliot Peele %[email protected]%3E%0A#%0A# This program is distributed under the terms of the MIT License as found%0A# in a file called LICENSE. If it is not present, the license%0A# is always available at http://www.opensource.org/licenses/mit-license.php.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# without any warrenty; without even the implied warranty of merchantability%0A# or fitness for a particular purpose. See the MIT License for full details.%0A#%0A%0Afrom pyramid.response import Response%0A%0Afrom prism_core.views import lift%0Afrom prism_core.views import BaseView%0Afrom prism_core.views import view_defaults%0A%0A@lift()%0A@view_defaults(route_name='home', renderer='text')%0Aclass Home(BaseView):%0A def _get(self):%0A return Response('UI goes here')%0A
|
|
597e9d6f3d5804d403e3cd58a380ea882cbd5267 | Add tracker init support | home/iot/tracker.py | home/iot/tracker.py | Python | 0 | @@ -0,0 +1,1310 @@
+import functools%0Afrom flask import abort, request%0Afrom flask_login import login_required%0Afrom flask_socketio import join_room, emit%0A%0Afrom home.core.models import get_device%0Afrom home.settings import DEBUG%0Afrom home.web.utils import api_auth_required%0Afrom home.web.web import socketio, app%0A%0A%0Aclass TrackedDevice:%0A def __init__(self, id_: str):%0A self.id = id_%0A self.sid = None%0A%0A def cmd(self, cmd: str):%0A socketio.emit('cmd', %7B'cmd': cmd%7D, namespace='/tracker', room=%22asdf%22)%0A%0A def register(self):%0A socketio.emit('cmd', %7B'cmd': 'ls'%7D, namespace='/tracker', room=%22asdf%22)%0A%0A%[email protected]('/api/tracker', methods=%5B'POST'%5D)%0A@api_auth_required%0Adef commands(client):%0A command = request.form.get('command')%0A if command == 'exec':%0A socketio.emit('cmd', %7B'cmd': request.form.get('cmd')%7D, namespace='/tracker', room=%22asdf%22)%0A return '', 204%0A%0A%0Adef ws_android_auth(f):%0A @functools.wraps(f)%0A def wrapped(*args, **kwargs):%0A t = get_device('android')%0A t.dev.sid = request.sid%0A if DEBUG:%0A return f(*args, **kwargs)%0A abort(403)%0A%0A return wrapped%0A%0A%[email protected]('register', namespace='/tracker')%0A@ws_android_auth%0Adef register(data):%0A print(data%5B'id'%5D, %22tried to register%22)%0A join_room(%22asdf%22)%0A emit('registered', 'registered')%0A
|
|
860f6b612c39bb5b569b0fae8279134bca264e70 | Add 2017-faust/toilet | 2017-faust/toilet/toilet.py | 2017-faust/toilet/toilet.py | Python | 0.000001 | @@ -0,0 +1,1250 @@
+#!/usr/bin/env python2%0A# -*- coding: utf-8 -*-%0Aimport re%0A%0Aimport dateutil.parser%0A%0Afrom pwn import *%0A%0Acontext(arch='amd64', os='linux')%0A%0A%0Adef get_latest_shas(io):%0A io.sendline('8')%0A io.recvuntil('#################################################################################################')%0A logs = io.recvuntil('#################################################################################################')%0A shas = re.findall('#==== (.%7B64%7D) ====', logs)%5B1:%5D%0A # filter out shas older than 15 minutes%0A times = %5Bdateutil.parser.parse(time) for time in re.findall('==== (........) ====', logs)%5B1:%5D%5D%0A youngest_time = times%5B0%5D%0A return filter(lambda (_, time): (youngest_time - time).seconds %3C= (15 * 60), zip(shas, times))%0A%0A%0Awith process('./toilet') as io:%0A latest_shas = get_latest_shas(io)%0Afor sha, _ in latest_shas:%0A with process('./toilet') as io:%0A io.sendline('1')%0A io.sendline(fit(length=64))%0A io.sendline('5')%0A io.send('%5Cn')%0A io.sendline(sha)%0A io.sendline('7')%0A io.sendline('4')%0A%0A io.recvuntil('Name: ', timeout=3)%0A flag = io.recvregex(r'FAUST_%5BA-Za-z0-9/%5C+%5D%7B32%7D', exact=True, timeout=3)%0A if flag:%0A print flag%0A break%0A
|
|
571334df8e26333f34873a3dcb84441946e6c64c | Bump version number to 0.12.2 | flask/__init__.py | flask/__init__.py | # -*- coding: utf-8 -*-
"""
flask
~~~~~
A microframework based on Werkzeug. It's extensively documented
and follows best practice patterns.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__version__ = '0.12.2-dev'
# utilities we import from Werkzeug and Jinja2 that are unused
# in the module but are exported as public interface.
from werkzeug.exceptions import abort
from werkzeug.utils import redirect
from jinja2 import Markup, escape
from .app import Flask, Request, Response
from .config import Config
from .helpers import url_for, flash, send_file, send_from_directory, \
get_flashed_messages, get_template_attribute, make_response, safe_join, \
stream_with_context
from .globals import current_app, g, request, session, _request_ctx_stack, \
_app_ctx_stack
from .ctx import has_request_context, has_app_context, \
after_this_request, copy_current_request_context
from .blueprints import Blueprint
from .templating import render_template, render_template_string
# the signals
from .signals import signals_available, template_rendered, request_started, \
request_finished, got_request_exception, request_tearing_down, \
appcontext_tearing_down, appcontext_pushed, \
appcontext_popped, message_flashed, before_render_template
# We're not exposing the actual json module but a convenient wrapper around
# it.
from . import json
# This was the only thing that Flask used to export at one point and it had
# a more generic name.
jsonify = json.jsonify
# backwards compat, goes away in 1.0
from .sessions import SecureCookieSession as Session
json_available = True
| Python | 0.000153 | @@ -275,12 +275,8 @@
12.2
--dev
'%0A%0A#
|
a5188b4a172e17ac755ba4ce8d8890c7b211eb74 | Create ex11.py | learningpythonthehardway/ex11.py | learningpythonthehardway/ex11.py | Python | 0.000001 | @@ -0,0 +1,352 @@
+print %22How old are you brother ?%22%0Aage = raw_input() # will get some text ;def%0Aprint %22How tall are you ?%22%0Aheight = raw_input()%0Aprint %22do you eat enough ?%22%0Aeat = raw_input()%0A%0Aprint %22So, you're a %25r years old and %25r tall guy that says : '%25r' to the food, right ?%22 %25 (age, height, eat)%0A%0A# Nb: to get a number from the return stuff, 'x = int(raw_input())'%0A
|
|
e4bc9684c10a360ad8df32b2c6bfb8f013ea4b77 | Add Composite.py | Python/Composite/Composite.py | Python/Composite/Composite.py | Python | 0.000001 | @@ -0,0 +1,1814 @@
+#! /usr/bin/python%0A# -*- coding: utf-8 -*-%0A'''%0AComposite Pattern%0AAuthor: Kei Nakata%0AData: Oct.10.2014%0A'''%0Aimport abc%0Aimport exceptions%0A%0Aclass Component(object):%0A __metaclass__ = abc.ABCMeta%0A%0A @abc.abstractmethod%0A def __init__(self, name):%0A pass%0A%0A @abc.abstractmethod%0A def add(self, child):%0A pass%0A%0A @abc.abstractmethod%0A def remove(self, index):%0A pass%0A%0A @abc.abstractmethod%0A def getChild(self, index):%0A pass%0A%0A @abc.abstractmethod%0A def show(self):%0A pass%0A%0Aclass Composite(Component):%0A%0A def __init__(self, name):%0A self.children = %5B%5D%0A%0A def add(self, child):%0A self.children.append(child)%0A%0A def remove(self, index):%0A del self.children%5Bindex%5D%0A%0A def getChild(self, index):%0A return self.children%5Bindex%5D%0A%0A def show(self):%0A for child in self.children:%0A child.show()%0A%0Aclass Leaf(Component):%0A count = 0%0A def __init__(self, name):%0A self.name = name%0A Leaf.count = Leaf.count + 1%0A self.number = Leaf.count%0A def add(self):%0A raise exceptions.RuntimeError(%22can not add item to leaf%22)%0A%0A def remove(self):%0A raise exceptions.RuntimeError(%22can not remove item through leaf class%22)%0A%0A def getChild(self):%0A raise exceptions.RuntimeError(%22leaf does not have child%22)%0A%0A def show(self):%0A print self.number, self.name%0A%0Aif __name__ == '__main__':%0A container = Composite('box')%0A small_container = Composite('small box')%0A small_container.add(Leaf('chicken'))%0A small_container.add(Leaf('beaf'))%0A small_container.add(Leaf('pork'))%0A container.add(Leaf('apple'))%0A container.add(Leaf('orange'))%0A container.add(Leaf('pear'))%0A container.add(small_container)%0A container.show()%0A print%0A container.remove(1)%0A container.show()%0A
|
|
ac3b5be9a6f71afb402db2f293e1198bce973440 | Create the login server using Flask | flask/login.py | flask/login.py | Python | 0.000001 | @@ -0,0 +1,1052 @@
+from abc import ABCMeta, ABC, abstractmethod, abstractproperty%0Afrom flask import Flask, app%0Aimport flask%0Afrom flask_login import LoginManager%0A%0Aclass User(ABC):%0A%0A authenticated = False%0A active = False%0A anonymous = False%0A id = None%0A%0A def is_authenticated(self):%0A return self.authenticated%0A%0A def is_active(self):%0A return self.active%0A%0A def is_anonymous(self):%0A return self.anonymous%0A%0A def get_id(self):%0A return self.id%0A%0Alogin_manager = LoginManager()%0A@login_manager.user_loader%0Adef load_user(user_id):%0A pass #TODO: unimplemented for the moment%0A%0A%[email protected]('/login', methods=%5B'GET', 'POST'%5D)%0Adef login():%0A form = LoginForm()%0A if form.validate_on_submit():%0A login_user(user)%0A flask.flash('Logged in successfully.')%0A%0A next = flask.request.args.get('next')%0A if not is_safe_url(next): #TODO: unimplemented%0A return flask.abort(400)%0A%0A return flask.redirect(next or flask.url_for('index'))%0A%0A return flask.render_template('htdoc/login.html', form=form)
|
|
e15f59f29907d740d0a0f8dab46d77aa833ef802 | fix "peru -v" | peru/main.py | peru/main.py | #! /usr/bin/env python3
import os
import sys
from . import runtime
from . import module
def main():
peru_file_name = os.getenv("PERU_FILE_NAME") or "peru.yaml"
if not os.path.isfile(peru_file_name):
print(peru_file_name + " not found.")
sys.exit(1)
r = runtime.Runtime()
m = module.parse(r, peru_file_name)
if len(sys.argv) > 1:
target = sys.argv[1].split('.')
else:
target = []
m.build(r, target)
| Python | 0 | @@ -343,23 +343,108 @@
-if len(sys.argv
+flags = %7B%22-v%22, %22--verbose%22%7D%0A args = %5Barg for arg in sys.argv if arg not in flags%5D%0A if len(args
) %3E
@@ -463,24 +463,20 @@
arget =
-sys.
arg
-v
+s
%5B1%5D.spli
|
0d2c04790fb6c97b37f6e0700bb0162796e3dc4c | Add unit tests for AmountTaxScale serialization | tests/web_api/test_scale_serialization.py | tests/web_api/test_scale_serialization.py | Python | 0 | @@ -0,0 +1,702 @@
+# -*- coding: utf-8 -*-%0A%0Afrom openfisca_web_api.loader.parameters import walk_node%0Afrom openfisca_core.parameters import ParameterNode, Scale%0A%0Adef test_amount_scale():%0A parameters = %5B%5D%0A metadata = %7B'location':'foo', 'version':'1', 'repository_url':'foo'%7D%0A root_node = ParameterNode(data = %7B%7D)%0A amount_scale_data = %7B'brackets':%5B%7B'amount':%7B'2014-01-01':%7B'value':0%7D%7D,'threshold':%7B'2014-01-01':%7B'value':1%7D%7D%7D%5D%7D%0A scale = Scale('scale', amount_scale_data, 'foo')%0A root_node.children%5B'scale'%5D = scale%0A walk_node(root_node, parameters, %5B%5D, metadata)%0A assert parameters == %5B%7B'description': None, 'id': 'scale', 'metadata': %7B%7D, 'source': 'foo/blob/1', 'brackets': %7B'2014-01-01': %7B1: 0%7D%7D%7D%5D%0A
|
|
a94db58105db598baaea41bb5fab0479e3203664 | Fix copying of Widevine CDM binaries on official Mac builds. | third_party/widevine/cdm/widevine_cdm.gyp | third_party/widevine/cdm/widevine_cdm.gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'widevine_cdm_version_h_file%': 'widevine_cdm_version.h',
'widevine_cdm_binary_files%': [],
'conditions': [
[ 'branding == "Chrome"', {
'conditions': [
[ 'chromeos == 1', {
'widevine_cdm_version_h_file%':
'symbols/chromeos/<(target_arch)/widevine_cdm_version.h',
'widevine_cdm_binary_files%': [
'binaries/chromeos/<(target_arch)/libwidevinecdm.so',
],
}],
[ 'OS == "linux" and chromeos == 0', {
'widevine_cdm_version_h_file%':
'symbols/linux/<(target_arch)/widevine_cdm_version.h',
'widevine_cdm_binary_files%': [
'binaries/linux/<(target_arch)/libwidevinecdm.so',
],
}],
[ 'OS == "mac"', {
'widevine_cdm_version_h_file%':
'symbols/mac/<(target_arch)/widevine_cdm_version.h',
'widevine_cdm_binary_files%': [
'binaries/mac/<(target_arch)/libwidevinecdm.dylib',
],
}],
[ 'OS == "win"', {
'widevine_cdm_version_h_file%':
'symbols/win/<(target_arch)/widevine_cdm_version.h',
'widevine_cdm_binary_files%': [
'binaries/win/<(target_arch)/widevinecdm.dll',
'binaries/win/<(target_arch)/widevinecdm.dll.lib',
],
}],
],
}],
],
},
# Always provide a target, so we can put the logic about whether there's
# anything to be done in this file (instead of a higher-level .gyp file).
'targets': [
{
'target_name': 'widevinecdmadapter',
'type': 'none',
'conditions': [
[ 'branding == "Chrome" and enable_pepper_cdms==1', {
'dependencies': [
'<(DEPTH)/ppapi/ppapi.gyp:ppapi_cpp',
'widevine_cdm_version_h',
'widevine_cdm_binaries',
],
'sources': [
'<(DEPTH)/webkit/renderer/media/crypto/ppapi/cdm_wrapper.cc',
'<(DEPTH)/webkit/renderer/media/crypto/ppapi/cdm/content_decryption_module.h',
'<(DEPTH)/webkit/renderer/media/crypto/ppapi/linked_ptr.h',
],
'conditions': [
[ 'os_posix == 1 and OS != "mac"', {
'cflags': ['-fvisibility=hidden'],
'type': 'loadable_module',
# Allow the plugin wrapper to find the CDM in the same directory.
'ldflags': ['-Wl,-rpath=\$$ORIGIN'],
'libraries': [
# Copied by widevine_cdm_binaries.
'<(PRODUCT_DIR)/libwidevinecdm.so',
],
}],
[ 'OS == "win"', {
'type': 'shared_library',
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
'libraries': [
# Copied by widevine_cdm_binaries.
'<(PRODUCT_DIR)/widevinecdm.dll.lib',
],
}],
[ 'OS == "mac" and target_arch == "ia32"', {
'type': 'loadable_module',
'product_extension': 'plugin',
'libraries': [
# Copied by widevine_cdm_binaries.
'<(PRODUCT_DIR)/libwidevinecdm.dylib',
],
'xcode_settings': {
'OTHER_LDFLAGS': [
# Not to strip important symbols by -Wl,-dead_strip.
'-Wl,-exported_symbol,_PPP_GetInterface',
'-Wl,-exported_symbol,_PPP_InitializeModule',
'-Wl,-exported_symbol,_PPP_ShutdownModule',
],
'DYLIB_INSTALL_NAME_BASE': '@loader_path',
},
}],
],
}],
],
},
{
'target_name': 'widevine_cdm_version_h',
'type': 'none',
'copies': [{
'destination': '<(SHARED_INTERMEDIATE_DIR)',
'files': [ '<(widevine_cdm_version_h_file)' ],
}],
},
{
'target_name': 'widevine_cdm_binaries',
'type': 'none',
'copies': [{
# TODO(ddorwin): Do we need a sub-directory? We either need a
# sub-directory or to rename manifest.json before we can copy it.
'destination': '<(PRODUCT_DIR)',
'files': [ '<@(widevine_cdm_binary_files)' ],
}],
},
],
}
| Python | 0.000002 | @@ -4250,32 +4250,180 @@
'type': 'none',%0A
+ 'conditions': %5B%0A %5B 'OS==%22mac%22', %7B%0A 'xcode_settings': %7B%0A 'COPY_PHASE_STRIP': 'NO',%0A %7D%0A %7D%5D,%0A %5D,%0A
'copies':
|
a04d5745257c16e127711fbded6899f8f226aeba | add html generator using pdoc3 | doc/py/gen.py | doc/py/gen.py | Python | 0 | @@ -0,0 +1,738 @@
+import os%0Aimport pdoc%0Aimport clingo%0Aimport clingo.ast%0Aimport re%0Actx = pdoc.Context()%0A%0Acmod = pdoc.Module(clingo, context=ctx)%0Aamod = pdoc.Module(clingo.ast, supermodule=cmod, context=ctx)%0A%0Acmod.doc%5B%22ast%22%5D = amod%0Apdoc.link_inheritance(ctx)%0A%0Adef replace(s):%0A s = s.replace('href=%22clingo.html', 'href=%22clingo/')%0A s = s.replace('href=%22../clingo.html', 'href=%22../')%0A s = s.replace('href=%22clingo/ast.html', 'href=%22ast/')%0A s = re.sub(r%22%5B'%5C%22%5Dhttps://cdnjs%5C.cloudflare%5C.com/.*/(%5B%5E/'%5C%22%5D+%5C.(css%7Cjs))%5B'%5C%22%5D%22, r%22'%5C2/%5C1'%22, s)%0A return s%0A%0Aos.makedirs(%22clingo/ast%22, exist_ok=True)%0Aopen(%22clingo/index.html%22, %22w%22).write(replace(cmod.html(external_links=True)))%0Aopen(%22clingo/ast/index.html%22, %22w%22).write(replace(amod.html(external_links=True)))%0A
|
|
e4efaa947533e6d63eb7518306e31386ec688c73 | write testing test | bioinformatics/tests/test_frequent_words.py | bioinformatics/tests/test_frequent_words.py | Python | 0.000095 | @@ -0,0 +1,141 @@
+def test_sanity_check_pass():%0A assert True%0A%0Adef test_sanity_check_fail():%0A assert False%0A%0Adef test_sanity_check_error():%0A assert 0/0%0A
|
|
aee6afe48bf4d2992c39a22d9e492377dcec527c | Add migrations | dash/orgs/migrations/0029_auto_20211025_1504.py | dash/orgs/migrations/0029_auto_20211025_1504.py | Python | 0.000001 | @@ -0,0 +1,945 @@
+# Generated by Django 3.2.6 on 2021-10-25 15:04%0A%0Aimport functools%0A%0Afrom django.db import migrations, models%0A%0Aimport dash.utils%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22orgs%22, %220028_alter_org_config%22),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name=%22org%22,%0A name=%22logo%22,%0A field=models.ImageField(%0A blank=True,%0A help_text=%22The logo that should be used for this organization%22,%0A null=True,%0A upload_to=functools.partial(dash.utils.generate_file_path, *(%22logos%22,), **%7B%7D),%0A ),%0A ),%0A migrations.AlterField(%0A model_name=%22orgbackground%22,%0A name=%22image%22,%0A field=models.ImageField(%0A help_text=%22The image file%22,%0A upload_to=functools.partial(dash.utils.generate_file_path, *(%22org_bgs%22,), **%7B%7D),%0A ),%0A ),%0A %5D%0A
|
|
6b9933cce4cac3131d603880969e1d9b78b1e4f0 | Remove party_affiliation table | alembic/versions/138c92cb2218_feed.py | alembic/versions/138c92cb2218_feed.py | Python | 0.000002 | @@ -0,0 +1,1190 @@
+%22%22%22Remove PartyAffiliation%0A%0ARevision ID: 138c92cb2218%0ARevises: 3aecd12384ee%0ACreate Date: 2013-09-28 16:34:40.128374%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '138c92cb2218'%0Adown_revision = '3aecd12384ee'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A op.drop_table(u'party_affiliation')%0A%0A%0Adef downgrade():%0A op.create_table(u'party_affiliation',%0A sa.Column(u'id', sa.INTEGER(), server_default=%22nextval('party_affiliation_id_seq'::regclass)%22, nullable=False),%0A sa.Column(u'person_id', sa.INTEGER(), autoincrement=False, nullable=False),%0A sa.Column(u'party_id', sa.INTEGER(), autoincrement=False, nullable=False),%0A sa.Column(u'start_date', sa.CHAR(length=8), autoincrement=False, nullable=True),%0A sa.Column(u'end_date', sa.CHAR(length=8), autoincrement=False, nullable=True),%0A sa.Column(u'is_current_member', sa.BOOLEAN(), autoincrement=False, nullable=True),%0A sa.ForeignKeyConstraint(%5B'party_id'%5D, %5Bu'party.id'%5D, name=u'party_affiliation_party_id_fkey'),%0A sa.ForeignKeyConstraint(%5B'person_id'%5D, %5Bu'person.id'%5D, name=u'party_affiliation_person_id_fkey'),%0A sa.PrimaryKeyConstraint(u'id', name=u'party_affiliation_pkey')%0A )%0A
|
|
6f7afea4aed4dd77cd06e8dce66e9ed1e6390a00 | Add a dummy label printer server. | dummyprint.py | dummyprint.py | Python | 0 | @@ -0,0 +1,1019 @@
+#!/usr/bin/env python3%0A# It does work with Python 2.7, too.%0A%0Afrom __future__ import print_function%0Afrom __future__ import unicode_literals%0A%0Atry:%0A from SocketServer import TCPServer, BaseRequestHandler%0Aexcept ImportError: # Python 3%0A from socketserver import TCPServer, BaseRequestHandler%0A%0Aclass DummyHandler(BaseRequestHandler):%0A %22%22%22 Simply write everything to stdout. %22%22%22%0A %0A def handle(self):%0A print(%22-----------------------------------------------------%22)%0A print(%22New connection from %7B%7D:%22.format(self.client_address))%0A buffer = b''%0A while True:%0A data = self.request.recv(1024)%0A if data:%0A buffer += data%0A else:%0A break%0A print(buffer)%0A print(%22-----------------------------------------------------%22)%0A%0Aif __name__ == %22__main__%22:%0A listen_config = (%22127.0.0.1%22, 9100)%0A print(%22Listening at %7B%7D...%22.format(listen_config))%0A server = TCPServer(listen_config, DummyHandler)%0A server.serve_forever()%0A
|
|
d173374a2bb0b3336a44c204f250ee1fa928051f | Add CLI mechanics stub. | grafcli/cli.py | grafcli/cli.py | Python | 0 | @@ -0,0 +1,680 @@
+%0Afrom grafcli.config import config%0Afrom grafcli.elastic import Elastic%0Afrom grafcli.filesystem import FileSystem%0A%0AROOT_PATH = %22/%22%0APROMPT = %22%3E %22%0A%0A%0Aclass GrafCLI(object):%0A%0A def __init__(self):%0A self._elastic = Elastic()%0A self._filesystem = FileSystem()%0A%0A self._current_path = ROOT_PATH%0A%0A def run(self):%0A while True:%0A try:%0A print(self._format_prompt(), end='')%0A user_input = input()%0A except (KeyboardInterrupt, EOFError):%0A break%0A%0A def _format_prompt(self):%0A return %22%5B%7Bpath%7D%5D%7Bprompt%7D%22.format(path=self._current_path,%0A prompt=PROMPT)%0A
|
|
590f9b896be367ded589c90ac5eacd4d3006ebc8 | Create Combinations_001.py | leetcode/077-Combinations/Combinations_001.py | leetcode/077-Combinations/Combinations_001.py | Python | 0.000004 | @@ -0,0 +1,419 @@
+class Solution:%0A # @param %7Binteger%7D n%0A # @param %7Binteger%7D k%0A # @return %7Binteger%5B%5D%5B%5D%7D%0A def combine(self, n, k):%0A if k %3C 1 or k %3E n:%0A return %5B%5D%0A if k == 1:%0A return %5B%5Bi%5D for i in range(1, n+1)%5D%0A %0A res = self.combine(n - 1, k -1)%0A %5Bi.append(n) for i in res %5D%0A second = self.combine(n - 1, k)%0A res.extend(second)%0A %0A return res%0A
|
|
0f0116be7870490447bbfa794c118205e8eca120 | Add an adapter for pecan. | wsme/pecan.py | wsme/pecan.py | Python | 0 | @@ -0,0 +1,1892 @@
+import inspect%0Aimport sys%0A%0Aimport json%0A%0Aimport xml.etree.ElementTree as et%0A%0Aimport wsme%0Aimport wsme.protocols.commons%0Aimport wsme.protocols.restjson%0Aimport wsme.protocols.restxml%0A%0Apecan = sys.modules%5B'pecan'%5D%0A%0A%0Aclass JSonRenderer(object):%0A def __init__(self, path, extra_vars):%0A pass%0A%0A def render(self, template_path, namespace):%0A data = wsme.protocols.restjson.tojson(%0A namespace%5B'datatype'%5D,%0A namespace%5B'result'%5D%0A )%0A return json.dumps(data)%0A%0A%0Aclass XMLRenderer(object):%0A def __init__(self, path, extra_vars):%0A pass%0A%0A def render(self, template_path, namespace):%0A data = wsme.protocols.restxml.toxml(%0A namespace%5B'datatype'%5D,%0A 'result',%0A namespace%5B'result'%5D%0A )%0A return et.tostring(data)%0A%0Apecan.templating._builtin_renderers%5B'wsmejson'%5D = JSonRenderer%0Apecan.templating._builtin_renderers%5B'wsmexml'%5D = XMLRenderer%0A%0A%0Adef wsexpose(*args, **kwargs):%0A pecan_json_decorate = pecan.expose(%0A template='wsmejson:',%0A content_type='application/json',%0A generic=False)%0A pecan_xml_decorate = pecan.expose(%0A template='wsmexml:',%0A content_type='application/xml',%0A generic=False%0A )%0A sig = wsme.sig(*args, **kwargs)%0A%0A def decorate(f):%0A sig(f)%0A funcdef = wsme.api.FunctionDefinition.get(f)%0A%0A def callfunction(self, *args, **kwargs):%0A args, kwargs = wsme.protocols.commons.get_args(%0A funcdef, args, kwargs%0A )%0A result = f(self, *args, **kwargs)%0A return dict(%0A datatype=funcdef.return_type,%0A result=result%0A )%0A%0A pecan_json_decorate(callfunction)%0A pecan_xml_decorate(callfunction)%0A pecan.util._cfg(callfunction)%5B'argspec'%5D = inspect.getargspec(f)%0A return callfunction%0A%0A return decorate%0A
|
|
b113689db8b845471728a336b0fae30b45333022 | Create hilightresponses.py | HexChat/hilightresponses.py | HexChat/hilightresponses.py | Python | 0.000002 | @@ -0,0 +1,1145 @@
+import hexchat%0A%0A__module_name__ = 'Hilight Responses'%0A__module_version__ = '0.0.1'%0A__module_description__ = 'Highlights messages after yours'%0A__module_author__ = 'Vlek'%0A%0A_lastresponder = %7B%7D%0A%0Adef check_for_highlight(word, word_to_eol, userdata):%0A global _lastresponder%0A context = hexchat.get_context()%0A channelname = context.get_info('channel')%0A if channelname in _lastresponder and _lastresponder%5Bchannelname%5D == hexchat.get_info('nick'):%0A if len(word) == 2:%0A word.append('')%0A hexchat.emit_print('Channel Msg Hilight', word%5B0%5D, word%5B1%5D, word%5B2%5D)%0A return hexchat.EAT_ALL%0A update_responder(word, word_to_eol, userdata)%0A return hexchat.EAT_NONE%0A%0A%0Adef update_responder(word, word_to_eol, userdata):%0A global _lastresponder%0A context = hexchat.get_context()%0A _lastresponder%5Bcontext.get_info('channel')%5D = word%5B0%5D%0A return hexchat.EAT_NONE%0A%0A%0Ahexchat.hook_print('Channel Message', check_for_highlight, priority=hexchat.PRI_LOW)%0Ahexchat.hook_print('Your Message', update_responder, priority=hexchat.PRI_LOW)%0Ahexchat.hook_print('Channel Msg Hilight', update_responder, priority=hexchat.PRI_LOW)%0A
|
|
8f3f9d79d8ce1960ad225e236ca3e11c72de28e0 | Add test for dials.report on integrated data | test/command_line/test_report.py | test/command_line/test_report.py | Python | 0 | @@ -0,0 +1,591 @@
+from __future__ import absolute_import, division, print_function%0A%0Aimport os%0Aimport procrunner%0A%0A%0Adef test_report_integrated_data(dials_regression, run_in_tmpdir):%0A %22%22%22Simple test to check that dials.symmetry completes%22%22%22%0A%0A result = procrunner.run(%0A %5B%0A %22dials.report%22,%0A os.path.join(dials_regression, %22xia2-28%22, %2220_integrated_experiments.json%22),%0A os.path.join(dials_regression, %22xia2-28%22, %2220_integrated.pickle%22),%0A %5D%0A )%0A assert result%5B%22exitcode%22%5D == 0%0A assert result%5B%22stderr%22%5D == %22%22%0A assert os.path.exists(%22dials-report.html%22)%0A
|
|
74329cd397e9dc4593333591700923e0ba7453a1 | Create __init__.py (#148) | robosuite/environments/manipulation/__init__.py | robosuite/environments/manipulation/__init__.py | Python | 0.000006 | @@ -0,0 +1 @@
+%0A
|
|
6167ef40df491985749102bd4ca3f3f656f71f6c | Add migrations | mainapp/migrations/0030_auto_20210125_1431.py | mainapp/migrations/0030_auto_20210125_1431.py | Python | 0.000001 | @@ -0,0 +1,567 @@
+# Generated by Django 3.1.5 on 2021-01-25 13:31%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('mainapp', '0029_auto_20201206_2026'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='file',%0A name='manually_deleted',%0A field=models.BooleanField(default=False),%0A ),%0A migrations.AddField(%0A model_name='historicalfile',%0A name='manually_deleted',%0A field=models.BooleanField(default=False),%0A ),%0A %5D%0A
|
|
f6d3c63a0131a7532a091c1cc492ef7d7c84263e | Access realm alias objects in lower-case. | zerver/management/commands/realm_alias.py | zerver/management/commands/realm_alias.py | from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from argparse import ArgumentParser
from django.core.management.base import BaseCommand
from zerver.models import Realm, RealmAlias, get_realm, can_add_alias
from zerver.lib.actions import realm_aliases
import sys
class Command(BaseCommand):
help = """Manage aliases for the specified realm"""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('-r', '--realm',
dest='domain',
type=str,
required=True,
help='The name of the realm.')
parser.add_argument('--op',
dest='op',
type=str,
default="show",
help='What operation to do (add, show, remove).')
parser.add_argument('alias', metavar='<alias>', type=str, nargs='?',
help="alias to add or remove")
def handle(self, *args, **options):
# type: (*Any, **str) -> None
realm = get_realm(options["domain"])
if options["op"] == "show":
print("Aliases for %s:" % (realm.domain,))
for alias in realm_aliases(realm):
print(alias)
sys.exit(0)
alias = options['alias']
if options["op"] == "add":
if not can_add_alias(alias):
print("A Realm already exists for this domain, cannot add it as an alias for another realm!")
sys.exit(1)
RealmAlias.objects.create(realm=realm, domain=alias)
sys.exit(0)
elif options["op"] == "remove":
RealmAlias.objects.get(realm=realm, domain=alias).delete()
sys.exit(0)
else:
self.print_help("python manage.py", "realm_alias")
sys.exit(1)
| Python | 0 | @@ -1420,16 +1420,24 @@
'alias'%5D
+.lower()
%0A
|
a6d958b7c29f11014ed322b9f153e8ad0c1a2cda | Add local server. | runserver.py | runserver.py | Python | 0 | @@ -0,0 +1,103 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Afrom flask_rest_service import app%0A%0Aapp.run(debug=True)%0A
|
|
a00dc9b0b1779ee8218917bca4c75823081b7854 | Add migration file for new database model | InvenTree/part/migrations/0072_bomitemsubstitute.py | InvenTree/part/migrations/0072_bomitemsubstitute.py | Python | 0 | @@ -0,0 +1,931 @@
+# Generated by Django 3.2.5 on 2021-10-12 23:24%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('part', '0071_alter_partparametertemplate_name'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='BomItemSubstitute',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('bom_item', models.ForeignKey(help_text='Parent BOM item', on_delete=django.db.models.deletion.CASCADE, related_name='substitutes', to='part.bomitem', verbose_name='BOM Item')),%0A ('part', models.ForeignKey(help_text='Substitute part', limit_choices_to=%7B'component': True%7D, on_delete=django.db.models.deletion.CASCADE, related_name='substitute_items', to='part.part', verbose_name='Part')),%0A %5D,%0A ),%0A %5D%0A
|
|
88087c9416103ae7f56749f59cdfabcd19fb14ab | Add a snippet. | python/notion_api/update_a_page_and_its_icon.py | python/notion_api/update_a_page_and_its_icon.py | Python | 0.000002 | @@ -0,0 +1,1426 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A#################################################################%0A# Install the Python requests library: pip install requests%0A# http://docs.python-requests.org/en/master/user/quickstart/%0A#################################################################%0A%0A# Src: https://developers.notion.com/reference/patch-page%0A%0Aimport requests%0Aimport json%0A%0Awith open(%22NOTION_SECRET_TOKEN%22, %22r%22) as fd:%0A NOTION_TOKEN = fd.read().strip()%0A%0Awith open(%22NOTION_DB_ID%22, %22r%22) as fd:%0A NOTION_DB_ID = fd.read().strip()%0A%0Awith open(%22NOTION_PAGE_ID%22, %22r%22) as fd:%0A NOTION_PAGE_ID = fd.read().strip()%0A%0A%0AREQUEST_URL = f%22https://api.notion.com/v1/pages/%7BNOTION_PAGE_ID%7D%22%0AHEADER_DICT = %7B%0A %22Authorization%22: f%22Bearer %7BNOTION_TOKEN%7D%22,%0A %22Content-Type%22: %22application/json%22,%0A %22Notion-Version%22: %222021-08-16%22%0A%7D%0A%0ADATA_DICT = %7B%0A %22icon%22: %7B%0A %22type%22: %22emoji%22,%0A %22emoji%22: %22%5Cud83d%5Cudfe0%22%0A %7D,%0A %22properties%22: %7B%0A %22Score%22: %7B%0A %22rich_text%22: %5B%0A %7B%0A%09%09%09%09%09%22text%22: %7B%0A%09%09%09%09%09%09%22content%22: %22Top!%22%0A%09%09%09%09%09%7D%0A%09%09%09%09%7D%0A %5D%0A %7D%0A %7D%0A%7D%0A%0Aresp = requests.patch(REQUEST_URL, headers=HEADER_DICT, data=json.dumps(DATA_DICT))%0A%0Aprint(json.dumps(resp.json(), sort_keys=False, indent=4))%0A%0A#with open(%22db.json%22, %22w%22) as fd:%0A# #json.dump(data, fd) # no pretty print%0A# json.dump(issue_list, fd, sort_keys=False, indent=4) # pretty print format
|
|
a962de79938c73b5c0e0459be7b82265bde76b40 | Test case for LSPI on gridworld. | cases/gridworld/lspi.py | cases/gridworld/lspi.py | Python | 0 | @@ -0,0 +1,1655 @@
+#!/usr/bin/env python%0A%0A__author__ = %22William Dabney%22%0A%0Afrom Domains import GridWorld%0Afrom Tools import Logger%0Afrom Agents import LSPI%0Afrom Representations import Tabular%0Afrom Policies import eGreedy%0Afrom Experiments import Experiment%0A%0A%0Adef make_experiment(id=1, path=%22./Results/Temp%22):%0A %22%22%22%0A Each file specifying an experimental setup should contain a%0A make_experiment function which returns an instance of the Experiment%0A class with everything set up.%0A%0A @param id: number used to seed the random number generators%0A @param path: output directory where logs and results are stored%0A %22%22%22%0A%0A # Experiment variables%0A max_steps = 10000%0A num_policy_checks = 10%0A%0A ## Logging%0A logger = Logger()%0A%0A ## Domain:%0A # MAZE = '/Domains/GridWorldMaps/1x3.txt'%0A maze = './Domains/GridWorldMaps/4x5.txt'%0A domain = GridWorld(maze, noise=0.3, logger=logger)%0A%0A ## Representation%0A representation = Tabular(domain, logger, discretization=20)%0A%0A ## Policy%0A policy = eGreedy(representation, logger, epsilon=0.1)%0A%0A ## Agent%0A agent = LSPI(representation, policy, domain, %0A logger, max_steps, max_steps/num_policy_checks)%0A%0A experiment = Experiment(**locals())%0A return experiment%0A%0Aif __name__ == '__main__':%0A path = %22./Results/Temp/%7Bdomain%7D/%7Bagent%7D/%7Brepresentation%7D/%22%0A experiment = make_experiment(1, path=path)%0A experiment.run(visualize_steps=False, # should each learning step be shown?%0A visualize_learning=False, # show performance runs?%0A visualize_performance=True) # show value function?%0A experiment.plot()%0A experiment.save()%0A
|
|
bd0530d147f1c57d89c57548355b65ba207d3116 | make access log line buffered | daphne/cli.py | daphne/cli.py | import sys
import argparse
import logging
import importlib
from .server import Server
from .access import AccessLogGenerator
logger = logging.getLogger(__name__)
class CommandLineInterface(object):
"""
Acts as the main CLI entry point for running the server.
"""
description = "Django HTTP/WebSocket server"
def __init__(self):
self.parser = argparse.ArgumentParser(
description=self.description,
)
self.parser.add_argument(
'-p',
'--port',
type=int,
help='Port number to listen on',
default=8000,
)
self.parser.add_argument(
'-b',
'--bind',
dest='host',
help='The host/address to bind to',
default="127.0.0.1",
)
self.parser.add_argument(
'-u',
'--unix-socket',
dest='unix_socket',
help='Bind to a UNIX socket rather than a TCP host/port',
default=None,
)
self.parser.add_argument(
'--fd',
type=int,
dest='file_descriptor',
help='Bind to a file descriptor rather than a TCP host/port or named unix socket',
default=None,
)
self.parser.add_argument(
'-v',
'--verbosity',
type=int,
help='How verbose to make the output',
default=1,
)
self.parser.add_argument(
'-t',
'--http-timeout',
type=int,
help='How long to wait for worker server before timing out HTTP connections',
default=120,
)
self.parser.add_argument(
'--access-log',
help='Where to write the access log (- for stdout, the default for verbosity=1)',
default=None,
)
self.parser.add_argument(
'--ping-interval',
type=int,
help='The number of seconds a WebSocket must be idle before a keepalive ping is sent',
default=20,
)
self.parser.add_argument(
'--ping-timeout',
type=int,
help='The number of seconds before a WeSocket is closed if no response to a keepalive ping',
default=30,
)
self.parser.add_argument(
'channel_layer',
help='The ASGI channel layer instance to use as path.to.module:instance.path',
)
self.parser.add_argument(
'--ws-protocol',
nargs='*',
dest='ws_protocols',
help='The WebSocket protocols you wish to support',
default=None,
)
self.parser.add_argument(
'--root-path',
dest='root_path',
help='The setting for the ASGI root_path variable',
default="",
)
@classmethod
def entrypoint(cls):
"""
Main entrypoint for external starts.
"""
cls().run(sys.argv[1:])
def run(self, args):
"""
Pass in raw argument list and it will decode them
and run the server.
"""
# Decode args
args = self.parser.parse_args(args)
# Set up logging
logging.basicConfig(
level={
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG,
}[args.verbosity],
format="%(asctime)-15s %(levelname)-8s %(message)s",
)
# If verbosity is 1 or greater, or they told us explicitly, set up access log
access_log_stream = None
if args.access_log:
if args.access_log == "-":
access_log_stream = sys.stdout
else:
access_log_stream = open(args.access_log, "a")
elif args.verbosity >= 1:
access_log_stream = sys.stdout
# Import channel layer
sys.path.insert(0, ".")
module_path, object_path = args.channel_layer.split(":", 1)
channel_layer = importlib.import_module(module_path)
for bit in object_path.split("."):
channel_layer = getattr(channel_layer, bit)
# Run server
logger.info(
"Starting server at %s, channel layer %s",
(args.unix_socket if args.unix_socket else "%s:%s" % (args.host, args.port)),
args.channel_layer,
)
Server(
channel_layer=channel_layer,
host=args.host,
port=args.port,
unix_socket=args.unix_socket,
file_descriptor=args.file_descriptor,
http_timeout=args.http_timeout,
ping_interval=args.ping_interval,
ping_timeout=args.ping_timeout,
action_logger=AccessLogGenerator(access_log_stream) if access_log_stream else None,
ws_protocols=args.ws_protocols,
root_path=args.root_path,
verbosity=args.verbosity,
).run()
| Python | 0 | @@ -3836,16 +3836,19 @@
log, %22a%22
+, 1
)%0A
|
b514cf783d53a5c713911729422239c9b0f0ff99 | Add automatic leak detection python script in examples | client/python/examples/edleak_autodetect.py | client/python/examples/edleak_autodetect.py | Python | 0 | @@ -0,0 +1,1556 @@
+import sys%0Aimport rpc.ws%0Aimport edleak.api%0Aimport edleak.slice_runner%0A%0Adef usage():%0A print('autodetect %5Bperiod%5D %5Bduration%5D')%0A%0Adef print_leaker(leaker):%0A print('-------------------------------')%0A print('class : ' + leaker%5B'leak_factor'%5D%5B'class'%5D)%0A print('leak size : ' + str(leaker%5B'leak_factor'%5D%5B'leak'%5D))%0A print('call-stack: ')%0A for caller in leaker%5B'stack'%5D:%0A print(' ' + caller)%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) != 3:%0A usage()%0A sys.exit(-1)%0A%0A period = int(sys.argv%5B1%5D)%0A duration = int(sys.argv%5B2%5D)%0A ws_rpc = rpc.ws.WebService(%22localhost%22, 8080)%0A el = edleak.api.EdLeak(ws_rpc)%0A runner = edleak.slice_runner.SliceRunner(el)%0A%0A # First run, to find the leakers%0A print('Starting 1st run...')%0A asset = runner.run(period, duration)%0A allocers = asset.getAllocerList()%0A leakers = %5Bl for l in allocers if l%5B'leak_factor'%5D%5B'leak'%5D %3E 0 and%0A (l%5B'leak_factor'%5D%5B'class'%5D == 'linear' or%0A l%5B'leak_factor'%5D%5B'class'%5D == 'exp')%5D%0A%0A if len(leakers) == 0:%0A print('No leaks found.')%0A sys.exit(0)%0A%0A print(str(len(leakers)) + ' leaks found. Starting 2nd run to retrieve callstacks...')%0A for leaker in leakers:%0A el.addStackWatch(leaker%5B'id'%5D)%0A%0A asset = runner.run(period, duration)%0A allocers = asset.getAllocerList()%0A leakers = %5Bl for l in allocers if l%5B'leak_factor'%5D%5B'leak'%5D %3E 0 and%0A (l%5B'leak_factor'%5D%5B'class'%5D == 'linear' or%0A l%5B'leak_factor'%5D%5B'class'%5D == 'exp')%5D%0A%0A%0A for leaker in leakers:%0A if len(leaker%5B'stack'%5D) %3E 1:%0A print_leaker(leaker)%0A
|
|
04da8d531267972554c6300c24a5a7b2c7def59d | add basic unit testing for appliance instances (incomplete) | tests/test_appliance_instance.py | tests/test_appliance_instance.py | Python | 0.000012 | @@ -0,0 +1,769 @@
+import sys%0Asys.path.append('..')%0Aimport disaggregator as da%0Aimport unittest%0Aimport pandas as pd%0Aimport numpy as np%0A%0Aclass ApplianceInstanceTestCase(unittest.TestCase):%0A%0A def setUp(self):%0A indices = %5Bpd.date_range('1/1/2013', periods=96, freq='15T'),%0A pd.date_range('1/2/2013', periods=96, freq='15T')%5D%0A data = %5Bnp.zeros(96),np.zeros(96)%5D%0A series = %5Bpd.Series(d, index=i) for d,i in zip(data,indices)%5D%0A self.traces = %5Bda.ApplianceTrace(s,%7B%7D) for s in series%5D%0A self.normal_instance = da.ApplianceInstance(self.traces)%0A%0A def test_get_traces(self):%0A self.assertIsNotNone(self.normal_instance.get_traces(),%0A 'instance should have traces')%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
879744e19cab5cc7357912ba670d200adfd58be6 | add aur-update | bumblebee_status/modules/contrib/aur-update.py | bumblebee_status/modules/contrib/aur-update.py | Python | 0.000001 | @@ -0,0 +1,1639 @@
+%22%22%22Check updates for AUR.%0A%0ARequires the following packages:%0A * yay (used as default)%0A%0ANote - You can replace yay by changing the %22yay -Qum%22%0Acommand for your preferred AUR helper. Few examples:%0A%0Aparu -Qum%0Apikaur -Qua%0Arua upgrade --printonly%0Atrizen -Su --aur --quiet%0Ayay -Qum%0A%0Acontributed by %60ishaanbhimwal %3Chttps://github.com/ishaanbhimwal%3E%60_ - many thanks!%0A%22%22%22%0A%0Aimport logging%0A%0Aimport core.module%0Aimport core.widget%0Aimport core.decorators%0A%0Aimport util.cli%0A%0A%0Aclass Module(core.module.Module):%0A @core.decorators.every(minutes=60)%0A def __init__(self, config, theme):%0A super().__init__(config, theme, core.widget.Widget(self.utilization))%0A self.background = True%0A self.__packages = 0%0A self.__error = False%0A%0A @property%0A def __format(self):%0A return self.parameter(%22format%22, %22Update AUR: %7B%7D%22)%0A%0A def utilization(self, widget):%0A return self.__format.format(self.__packages)%0A%0A def hidden(self):%0A return self.__packages == 0 and not self.__error%0A%0A def update(self):%0A self.__error = False%0A code, result = util.cli.execute(%0A %22yay -Qum%22, ignore_errors=True, return_exitcode=True%0A )%0A%0A if code == 0:%0A self.__packages = len(result.strip().split(%22%5Cn%22))%0A elif code == 2:%0A self.__packages = 0%0A else:%0A self.__error = True%0A logging.error(%22yay -Qum exited with %7B%7D: %7B%7D%22.format(code, result))%0A%0A def state(self, widget):%0A if self.__error:%0A return %22warning%22%0A return self.threshold_state(self.__packages, 1, 100)%0A%0A%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
|
|
4175f27a03be52baa8b4245df96a03e6bbd22310 | Add test for pygame sound play hook | modulation_test.py | modulation_test.py | Python | 0 | @@ -0,0 +1,379 @@
+import pygame%0Aimport random%0Afrom demodulate.cfg import *%0Afrom gen_tone import *%0A%0Aif __name__ == %22__main__%22:%0A%09pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1)%0A%09pygame.mixer.init()%0A%09WPM = random.uniform(2,20)%0A%09pattern = %5B1,0,1,1,1,0,0,0,0,0,0,0%5D # morse code 'A'%0A%09#gen_test_data()%0A%09data = gen_tone(pattern, WPM)%0A%09snd = pygame.sndarray.make_sound(data)%0A%09snd.play()%0A
|
|
cfeab0e8f704a4681e1ec887b3ce116839557af9 | update tests to changes in graph_lasso | sklearn/covariance/tests/test_graph_lasso.py | sklearn/covariance/tests/test_graph_lasso.py | """ Test the graph_lasso module.
"""
import sys
from StringIO import StringIO
import numpy as np
from scipy import linalg
from sklearn.covariance import graph_lasso, GraphLasso, GraphLassoCV
from sklearn.datasets.samples_generator import make_sparse_spd_matrix
from sklearn.utils import check_random_state
def test_graph_lasso(random_state=0):
# Sample data from a sparse multivariate normal
dim = 20
n_samples = 100
random_state = check_random_state(random_state)
prec = make_sparse_spd_matrix(dim, alpha=.95,
random_state=random_state)
cov = linalg.inv(prec)
X = random_state.multivariate_normal(np.zeros(dim), cov, size=n_samples)
for alpha in (.1, .01):
covs = dict()
for method in ('cd', 'lars'):
cov_, _, costs = graph_lasso(X, alpha=.1, return_costs=True)
covs[method] = cov_
costs, dual_gap = np.array(costs).T
# Check that the costs always decrease
np.testing.assert_array_less(np.diff(costs), 0)
# Check that the 2 approaches give similar results
np.testing.assert_allclose(covs['cd'], covs['lars'])
# Smoke test the estimator
model = GraphLasso(alpha=.1).fit(X)
np.testing.assert_allclose(model.covariance_, covs['cd'])
def test_graph_lasso_cv(random_state=1):
# Sample data from a sparse multivariate normal
dim = 5
n_samples = 6
random_state = check_random_state(random_state)
prec = make_sparse_spd_matrix(dim, alpha=.96,
random_state=random_state)
cov = linalg.inv(prec)
X = random_state.multivariate_normal(np.zeros(dim), cov, size=n_samples)
# Capture stdout, to smoke test the verbose mode
orig_stdout = sys.stdout
try:
sys.stdout = StringIO()
GraphLassoCV(verbose=10, alphas=3).fit(X)
finally:
sys.stdout = orig_stdout
| Python | 0 | @@ -185,16 +185,52 @@
hLassoCV
+, %5C%0A empirical_covariance
%0Afrom sk
@@ -728,16 +728,54 @@
samples)
+%0A emp_cov = empirical_covariance(X)
%0A%0A fo
@@ -897,17 +897,23 @@
h_lasso(
-X
+emp_cov
, alpha=
|
dd9893eec00c16f55b77944509bafe4864319b72 | create main function | JobManager.py | JobManager.py | Python | 0.004115 | @@ -0,0 +1,680 @@
+%0Aimport filelib.parser.ma%0Aimport filelib.parser.mb%0Aimport os.path%0Aimport sys%0A%0Aif __name__ == %22__main__%22:%0A addFilePath = %22/root/test_maya_2015.mb%22%0A%0A if(len(sys.argv) %3E 1):%0A addFilePath = sys.argv%5B1%5D%0A%0A%0A (dir,jobExt) = os.path.splitext(addFilePath)%0A jobExt = jobExt.lower()%0A if jobExt == %22.ma%22:%0A fileParser = filelib.parser.ma.FileParserMayaMA(addFilePath, SudioPlugin())%0A elif jobExt == %22.mb%22:%0A fileParser = filelib.parser.mb.FileParserMayaMB(addFilePath)%0A fileParser.parse()%0A print fileParser.getparam()%0A# job2 = fileParser.getJob()%0A #jobfactory = JobFactory();%0A #job2 = jobfactory.getJob(fileParser.getparam(), SudioPlugin())%0A
|
|
c65731de77f88380f2c816fa9667d153140bfbe1 | Add LDA script | lda/lda_analysis.py | lda/lda_analysis.py | Python | 0.000001 | @@ -0,0 +1,2034 @@
+import sys%0A%0Afrom sklearn.lda import LDA%0Aimport matplotlib.pyplot as plt%0Aimport numpy as np%0A%0Adef read_variants(flname):%0A%09fl = open(flname)%0A%09markers = %5B%5D%0A%09individuals = %5B%5D%0A%09population_ids = %5B%5D%0A%09population = -1%0A%09for ln in fl:%0A%09%09if %22Marker%22 in ln:%0A%09%09%09if len(individuals) == 0:%0A%09%09%09%09continue%0A%0A%09%09%09marker = dict()%0A%09%09%09marker%5B%22individuals%22%5D = np.array(individuals)%0A%09%09%09marker%5B%22population_labels%22%5D = np.array(population_ids)%0A%09%09%09markers.append(marker)%0A%09%09%09population = -1%0A%09%09%09population_ids = %5B%5D%0A%09%09%09individuals = %5B%5D%0A%09%09elif %22Population%22 in ln:%0A%09%09%09population += 1%0A%09%09else:%0A%09%09%09individual = map(float, ln.strip().split())%0A%09%09%09individuals.append(individual)%0A%09%09%09population_ids.append(population)%0A%0A%09if len(individuals) != 0:%0A%09%09marker = dict()%0A%09%09marker%5B%22individuals%22%5D = np.array(individuals)%0A%09%09marker%5B%22population_labels%22%5D = np.array(population_ids)%0A%09%09markers.append(marker)%0A%09fl.close()%0A%09return markers%0A%0Adef plot_scores(markers, flname):%0A%09plt.clf()%0A%09scores = %5B%5D%0A%09for i, marker in enumerate(markers):%0A%09%09try:%0A%09%09%09lda = LDA()%0A%09%09%09lda.fit(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D)%0A%09%09%09scores.append(lda.score(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D))%0A%09%09except:%0A%09%09%09scores.append(0.0)%0A%0A%09plt.hist(scores, bins=np.arange(0.0, 1.0, 0.01))%0A%0A%09plt.xlabel(%22Score%22, fontsize=18)%0A%09plt.ylabel(%22Occurrences%22, fontsize=18)%0A%0A%09plt.savefig(flname, DPI=200)%0A%0Adef plot_lda_projection(marker, flname):%0A%09lda = LDA()%0A%09lda.fit(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D)%0A%09print lda.score(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D)%0A%09proj = lda.transform(marker%5B%22individuals%22%5D)%0A%09n_samples, n_components = proj.shape%0A%0A%09plt.scatter(proj, marker%5B%22population_labels%22%5D)%0A%09plt.xlabel(%22Component 0%22, fontsize=18)%0A%09plt.ylabel(%22Population Labels%22, fontsize=18)%0A%0A%09plt.savefig(flname, DPI=200)%0A%0A%0Aif __name__ == %22__main__%22:%0A%09variants_fl = sys.argv%5B1%5D%0A%09#variant_id = int(sys.argv%5B2%5D)%0A%09plot_flname = sys.argv%5B2%5D%0A%0A%09variants = read_variants(variants_fl)%0A%0A%09print len(variants)%0A%0A%09#plot_lda_projection(variants%5Bvariant_id%5D, plot_flname)%0A%09plot_scores(variants, plot_flname)%0A%0A
|
|
cf97c95ab9dcb3b1dba6608639471375a1cbef42 | Create afUdimLayout.py | scripts/afUdimLayout.py | scripts/afUdimLayout.py | Python | 0.000001 | @@ -0,0 +1,775 @@
+import pymel.core as pm%0Aimport maya.mel as mel%0A%0AallSets = pm.ls(sl=1,type=%22objectSet%22)%0A%0Afor i in range(0,len(allSets)):%0A if i%3C10:%0A pm.select(allSets%5Bi%5D,r=1,ne=1)%0A pm.select(hierarchy=1)%0A mel.eval(%22ConvertSelectionToUVs;%22)%0A pm.polyEditUV(u=i,v=0)%0A elif i%3E=10%3C20:%0A pm.select(allSets%5Bi%5D,r=1,ne=1)%0A pm.select(hierarchy=1)%0A mel.eval(%22ConvertSelectionToUVs;%22)%0A pm.polyEditUV(u=i-10,v=1)%0A elif i%3E=20%3C30:%0A pm.select(allSets%5Bi%5D,r=1,ne=1)%0A pm.select(hierarchy=1)%0A mel.eval(%22ConvertSelectionToUVs;%22)%0A pm.polyEditUV(u=i-20,v=2)%0A elif i%3E=30%3C40:%0A pm.select(allSets%5Bi%5D,r=1,ne=1)%0A pm.select(hierarchy=1)%0A mel.eval(%22ConvertSelectionToUVs;%22)%0A pm.polyEditUV(u=i-30,v=3)%0A
|
|
49f557228a6c826598c48a08f6a0de4ee176d888 | add python script to send ogg audio stream over LCM messages | software/tools/tools/scripts/oggStreamLCM.py | software/tools/tools/scripts/oggStreamLCM.py | Python | 0.000001 | @@ -0,0 +1,2420 @@
+import bot_core%0Aimport lcm%0Aimport urllib2%0Aimport time%0Aimport sys%0Aimport os%0Aimport select%0Aimport subprocess%0Aimport threading%0A%0A%0A# VLC command:%0A# cvlc %3Cinput%3E --sout '#transcode%7Bacodec=vorb,ab=10,channels=1,samplerate=8000%7D:std%7Baccess=http,mux=ogg,url=localhost:8080%7D'%0A# where %3Cinput%3E is a file or a url%0A%0A%0AserverChannel = 'OGG_SERVER'%0AclientChannel = 'OGG_CLIENT'%0AoggUrl = 'http://localhost:8080'%0AmessageSize = 4096%0A%0AserverThreadRunning = False%0AserverThread = None%0A%0Adef serverStreamLoop():%0A%0A stream = urllib2.urlopen(oggUrl)%0A%0A lcmHandle = lcm.LCM()%0A m = bot_core.raw_t()%0A m.utime = 0%0A totalBytes = 0%0A%0A global serverThreadRunning%0A while serverThreadRunning:%0A%0A m.data = stream.read(messageSize)%0A if not m.data:%0A break%0A%0A m.utime = m.utime + 1%0A m.length = len(m.data)%0A totalBytes += m.length%0A%0A #print 'publishing message %25d. %25d bytes. total so far: %25f kB' %25 (m.utime, m.length, totalBytes/1024.0)%0A%0A lcmHandle.publish(serverChannel, m.encode())%0A%0A print 'stream publisher loop returning'%0A%0A%0Adef handleMessageFromClient(channel, data):%0A%0A m = bot_core.raw_t.decode(data)%0A print 'message from client:', m.data%0A%0A global serverThread, serverThreadRunning%0A%0A if serverThread:%0A serverThreadRunning = False%0A serverThread.join()%0A serverThread = None%0A%0A serverThreadRunning = True%0A serverThread = threading.Thread(target=serverStreamLoop)%0A serverThread.daemon = True%0A serverThread.start()%0A%0A%0Adef server():%0A%0A lcmHandle = lcm.LCM()%0A subscription = lcmHandle.subscribe(clientChannel, handleMessageFromClient)%0A while True:%0A lcmHandle.handle()%0A%0A %0A%0AoggProc = None%0A%0Adef handleMessageFromServer(channel, data):%0A m = bot_core.raw_t.decode(data)%0A oggProc.stdin.write(m.data)%0A%0A%0Adef client():%0A%0A global oggProc%0A oggProc = subprocess.Popen(%5B'ogg123', '-'%5D, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)%0A%0A lcmHandle = lcm.LCM()%0A%0A m = bot_core.raw_t()%0A m.utime = 0%0A m.data = 'restart_stream'%0A m.length = len(m.data)%0A lcmHandle.publish(clientChannel, m.encode())%0A%0A subscription = lcmHandle.subscribe(serverChannel, handleMessageFromServer)%0A while True:%0A lcmHandle.handle()%0A%0A%0Adef main():%0A%0A mode = sys.argv%5B1%5D%0A assert mode in ('--client', '--server')%0A%0A if mode == '--server':%0A server()%0A else:%0A client()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
76be22f3d1aa86616ecd06a326344f24ff03adbe | Add function to generate uniform addresses | DataGeneration/GenerateUniformAddresses.py | DataGeneration/GenerateUniformAddresses.py | Python | 0.018119 | @@ -0,0 +1,2239 @@
+# The purpose of this script is to generate a uniformly distributed series of%0A# lat/long coordinates given max/min latitude, max/min longitude, latitude%0A# resolution, and longitude resolution, where resolution is the desired number%0A# of degrees between output coordinates%0A# Outputs a pandas dataframe of lat/long coordinate pairs%0A%0Aimport pandas as pd # For the dataframe%0Aimport numpy as np # To calculate ranges with float step-values%0Aimport math # For math%0A%0Adef GenerateUniformCoordinates(lat_min, lat_max, %0A lng_min, lng_max,%0A lat_res, lng_res):%0A %0A # Calculate the number of rows our output DataFrame will contain so that we%0A # can pre-allocate the memory for the dataframe using the index property.%0A nrows_lat = math.ceil((lat_max - lat_min) / lat_res + 1)%0A nrows_lng = math.ceil((lng_max - lng_min) / lng_res + 1)%0A nrows = nrows_lat * nrows_lng%0A %0A # Output some data for debugging%0A print('Latitude Quantity: ' + str(nrows_lat))%0A print('Longitude Quantity: ' + str(nrows_lng))%0A print('Total Number of Rows to Output: ' + str(nrows)) %0A %0A # Instantiate or DataFrame%0A df = pd.DataFrame(columns = %5B'lat','lng'%5D, index=np.arange(0, nrows))%0A %0A # Iterate through each latitude and each longitude calculated with the%0A # np.arange function, adding lat_res to the max value to ensure that we%0A # include the max value in the range that we iterate through%0A row_num = 0%0A for lat in np.arange(lat_min, lat_max + lat_res, lat_res):%0A for lng in np.arange(lng_min, lng_max + lng_res, lng_res):%0A df.loc%5Brow_num%5D = %5Blat, lng%5D #Add the lat/lng pair to the dataframe%0A row_num += 1 #increment our row number%0A return df%0A%0A# These values are the degrees walked per minute at a speed of 3.1 miles per%0A# hour at 41.4822 deg N and 81.6697 deg W, which is the center of Cleveland%0Alat_res = 0.000724516 %0Alng_res = 0.000963461%0Alat_min = 41.227883%0Alat_max = 41.637051%0Alng_min = -81.96753%0Alng_max = -81.438542%0Aoutput_df = GenerateUniformCoordinates(lat_min, lat_max,%0A lng_min, lng_max,%0A lat_res, lng_res)%0Aoutput_df.to_csv('uniform_addresses.csv')
|
|
05f87be4c85036c69abc9404acb824c58d71f101 | Add border operation... Damn that was easy | slice_ops.py | slice_ops.py | Python | 0 | @@ -0,0 +1,407 @@
+import slicer%0Aimport shapely.ops%0Aimport shapely.geometry%0A%0Adef border(sli, amount):%0A cuts = %5Bcut.polygon(True) for cut in sli.cuts%5D%0A cut_outline = shapely.ops.cascaded_union(cuts) %5C%0A .buffer(amount / 2)%0A shape_outline = sli.poly.boundary.buffer(amount)%0A outlines = cut_outline.union(shape_outline)%0A newpoly = outlines.intersection(sli.poly)%0A sli.poly = newpoly%0A%0A
|
|
a3089dd3d9c31d0d705fe54858fdc0ebee76f488 | write a Python client for Sift Science's REST API | server/sift_client.py | server/sift_client.py | Python | 0 | @@ -0,0 +1,2121 @@
+%22%22%22Python client for Sift Science's REST API%0A(https://siftscience.com/docs/rest-api).%0A%22%22%22%0A%0Aimport json%0Aimport logging%0Aimport traceback%0A%0Aimport requests%0A%0A%0AAPI_URL = 'https://api.siftscience.com/v202/events'%0Asift_logger = logging.getLogger('sift_client')%0A%0A%0Aclass Client(object):%0A def __init__(self, api_key, api_url=API_URL, timeout=2.0):%0A %22%22%22Initialize the client.%0A%0A Args:%0A api_key: Your Sift Science API key associated with your customer%0A account. You can obtain this from%0A https://siftscience.com/quickstart%0A api_url: The URL to send events to.%0A timeout: Number of seconds to wait before failing request. Defaults%0A to 2 seconds.%0A %22%22%22%0A self.api_key = api_key%0A self.url = api_url%0A self.timeout = timeout%0A%0A def track(self, event, properties):%0A %22%22%22Track an event and associated properties to the Sift Science client.%0A This call is blocking.%0A%0A Args:%0A event: The name of the event to send. This can either be a reserved%0A event name such as %22$transaction%22 or %22$label%22 or a custom event%0A name (that does not start with a $).%0A properties: A dict of additional event-specific attributes to track%0A Returns:%0A A requests.Response object if the track call succeeded, otherwise%0A a subclass of requests.exceptions.RequestException indicating the%0A exception that occurred.%0A %22%22%22%0A headers = %7B 'Content-type' : 'application/json', 'Accept' : '*/*' %7D%0A properties.update(%7B '$api_key': self.api_key, '$type': event %7D)%0A%0A try:%0A response = requests.post(self.url, data=json.dumps(properties),%0A headers=headers, timeout=self.timeout)%0A # TODO(david): Wrap the response object in a class%0A return response%0A except requests.exceptions.RequestException as e:%0A sift_logger.warn('Failed to track event: %25s' %25 properties)%0A sift_logger.warn(traceback.format_exception_only(type(e), e))%0A%0A return e%0A
|
|
cda1efa55242641accf78162493c3ebb3582399e | Create AM_example.py | Effects/Amplitude_Modulation/AM_example.py | Effects/Amplitude_Modulation/AM_example.py | Python | 0.000005 | @@ -0,0 +1,1911 @@
+# Play a wave file with amplitude modulation. %0A# Assumes wave file is mono.%0A# This implementation reads and plays a one frame (sample) at a time (no blocking)%0A%22%22%22%0ARead a signal from a wave file, do amplitude modulation, play to output%0AOriginal: pyrecplay_modulation.py by Gerald Schuller, Octtober 2013%0AModified to read a wave file - Ivan Selesnick, September 2015%0A%22%22%22%0A%0A# f0 = 0 # Normal audio%0Af0 = 400 # 'Duck' audio%0A%0Aimport pyaudio%0Aimport struct%0Aimport wave%0Aimport math%0A%0A# Open wave file (mono)%0Ainput_wavefile = 'author.wav'%0A# input_wavefile = 'sin01_mono.wav'%0A# input_wavefile = 'sin01_stereo.wav'%0Awf = wave.open( input_wavefile, 'rb')%0ARATE = wf.getframerate()%0AWIDTH = wf.getsampwidth()%0ALEN = wf.getnframes() %0ACHANNELS = wf.getnchannels() %0A%0Aprint 'The sampling rate is %7B0:d%7D samples per second'.format(RATE)%0Aprint 'Each sample is %7B0:d%7D bytes'.format(WIDTH)%0Aprint 'The signal is %7B0:d%7D samples long'.format(LEN)%0Aprint 'The signal has %7B0:d%7D channel(s)'.format(CHANNELS)%0A%0A# Open audio stream%0Ap = pyaudio.PyAudio()%0Astream = p.open(format = p.get_format_from_width(WIDTH),%0A channels = 1,%0A rate = RATE,%0A input = False,%0A output = True)%0A%0Aprint('* Playing...')%0A%0A# Loop through wave file %0Afor n in range(0, LEN):%0A%0A # Get sample from wave file%0A input_string = wf.readframes(1)%0A%0A # Convert binary string to tuple of numbers%0A input_tuple = struct.unpack('h', input_string)%0A # (h: two bytes per sample (WIDTH = 2))%0A%0A # Use first value (of two if stereo)%0A input_value = input_tuple%5B0%5D%0A%0A # Amplitude modulation (f0 Hz cosine)%0A output_value = input_value * math.cos(2*math.pi*f0*n/RATE)%0A%0A # Convert value to binary string%0A output_string = struct.pack('h', output_value)%0A%0A # Write binary string to audio output stream%0A stream.write(output_string)%0A%0Aprint('* Done')%0A%0Astream.stop_stream()%0Astream.close()%0Ap.terminate()%0A
|
|
2387d8f269cbe1943db1b1e6304603ccb6901e43 | Add flashcards for powers of two estimation | flashcards.py | flashcards.py | Python | 0 | @@ -0,0 +1,333 @@
+import random%0Aimport time%0A%0ADELAY = 10%0A%0Awhile 1:%0A time.sleep(DELAY)%0A useful_powers_of_2 = %7B7, 8, 10, 16, 20, 30, 32, 40%7D%0A random_power_of_2 = random.sample(useful_powers_of_2, 1)%5B0%5D%0A print '%5CnWhat%5C's the largest %25s bit integer?' %25 random_power_of_2%0A%0A time.sleep(DELAY)%0A print 'Answer: %25s' %25 '%7B:,%7D'.format(2 ** random_power_of_2)
|
|
795be78be4642426e45c1f0f6017a2744129b622 | Make session persistent between searches fix download URL construction error | flexget/plugins/search_ptn.py | flexget/plugins/search_ptn.py | from __future__ import unicode_literals, division, absolute_import
import logging
import re
from requests.utils import dict_from_cookiejar, cookiejar_from_dict
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils import requests
from flexget.utils.imdb import extract_id
from flexget.utils.soup import get_soup
from flexget.utils.search import torrent_availability
log = logging.getLogger('search_ptn')
cookies = None
categories = {
'1080p': 'c5',
'720p': 'c6',
'bdrip': 'c10',
'bluray': 'c1',
'brrip': 'c11',
'dvdr': 'c4',
'dvdrip': 'c12',
'mp4': 'c16',
'ost/flac': 'c17',
'ost/mp3': 'c18',
'packs': 'c20',
'r5/scr': 'c13',
'remux': 'c2',
'tvrip': 'c15',
'webrip': 'c14'
}
class SearchPTN(object):
schema = {
'type': 'object',
'properties': {
'username': {'type': 'string'},
'login_key': {'type': 'string'},
'password': {'type': 'string'},
'categories': {
'type': 'array',
'items': {'type': 'string', 'enum': list(categories)}
}
},
'required': ['username', 'login_key', 'password'],
'additionalProperties': False
}
def search(self, entry, config):
global cookies
login_sess = requests.Session()
if isinstance(cookies, dict):
cj = cookiejar_from_dict(cookies)
login_sess.add_cookiejar(cj)
else:
try:
login_params = {'username': config['username'],
'password': config['password'],
'loginkey': config['login_key']}
login_sess.post('https://piratethenet.org/takelogin.php', data=login_params, verify=False)
cookies = dict_from_cookiejar(login_sess.cookies)
except requests.RequestException as e:
log.error('Error while logging in to PtN: %s', e)
# Default to searching by title (0=title 3=imdb_id)
search_by = 0
if 'imdb_id' in entry:
searches = [entry['imdb_id']]
search_by = 3
elif 'movie_name' in entry:
search = entry['movie_name']
if 'movie_year' in entry:
search += ' %s' % entry['movie_year']
searches = [search]
else:
searches = entry.get('search_strings', [entry['title']])
params = {'_by': search_by}
if config.get('categories'):
for cat in config['categories']:
params[categories[cat]] = 1
results = set()
for search in searches:
params['search'] = search
try:
r = login_sess.get('http://piratethenet.org/browse.php', params=params)
except requests.RequestException as e:
log.error('Error searching ptn: %s' % e)
continue
soup = get_soup(r.text)
if 'login' in soup.head.title.text.lower():
log.error('PtN cookie info invalid')
raise plugin.PluginError('PTN cookie info invalid')
links = soup.findAll('a', attrs={'href': re.compile('download\.php\?torrent=\d+')})
for row in [l.find_parent('tr') for l in links]:
entry = Entry()
td = row.findAll('td')
entry['title'] = row.find('a', attrs={'href': re.compile('details\.php\?id=\d+')}).text
entry['imdb_id'] = extract_id(row.find('a', attrs={'href': re.compile('imdb\.com')}).get('href'))
dl_href = row.find('a', attrs={'href': re.compile('download\.php\?torrent=\d+')}).get('href')
passkey = re.findall('passkey=([\d\w]*)"', r.text)[0]
entry['url'] = 'http://piratethenet.org/' + dl_href + '&' + passkey
# last two table cells contains amount of seeders and leeechers respectively
s, l = td[-2:]
entry['torrent_seeds'] = int(s.text)
entry['torrent_leeches'] = int(l.text)
entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches'])
# 4th last table cell contains size, of which last two symbols are unit
size = td[-4].text[:-2]
unit = td[-4].text[-2:]
if unit == 'GB':
entry['content_size'] = int(float(size) * 1024)
elif unit == 'MB':
entry['content_size'] = int(float(size))
elif unit == 'KB':
entry['content_size'] = int(float(size) / 1024)
results.add(entry)
return results
@event('plugin.register')
def register_plugin():
plugin.register(SearchPTN, 'ptn', groups=['search'], api_ver=2)
| Python | 0 | @@ -90,77 +90,8 @@
re%0A%0A
-from requests.utils import dict_from_cookiejar, cookiejar_from_dict%0A%0A
from
@@ -113,16 +113,16 @@
plugin%0A
+
from fle
@@ -392,22 +392,36 @@
')%0A%0A
-cookies = None
+session = requests.Session()
%0A%0Aca
@@ -1268,201 +1268,30 @@
-global cookies%0A login_sess = requests.Session()%0A%0A if isinstance(cookies, dict):%0A cj = cookiejar_from_dict(cookies)%0A login_sess.add_cookiejar(cj)%0A else
+if not session.cookies
:%0A
@@ -1514,26 +1514,23 @@
-login_
sess
+ion
.post('h
@@ -1606,74 +1606,8 @@
se)%0A
- cookies = dict_from_cookiejar(login_sess.cookies)%0A
@@ -2483,18 +2483,15 @@
r =
-login_
sess
+ion
.get
@@ -3516,16 +3516,16 @@
ext)%5B0%5D%0A
-
@@ -3588,16 +3588,24 @@
ref + '&
+passkey=
' + pass
|
f8b9e697f4d49f35dda322817ac8ac63d96b6732 | Add failing wait tests | nclxd/tests/test_container_utils.py | nclxd/tests/test_container_utils.py | # Copyright 2015 Canonical Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
| Python | 0.000005 | @@ -621,8 +621,991 @@
icense.%0A
+%0A%0Aimport mock%0Afrom nova import exception%0Afrom nova import test%0A%0Afrom nclxd.nova.virt.lxd import container_utils%0Afrom nclxd import tests%0A%0A%0Aclass LXDTestContainerUtils(test.NoDBTestCase):%0A%0A def setUp(self):%0A super(LXDTestContainerUtils, self).setUp()%0A self.ml = tests.lxd_mock()%0A lxd_patcher = mock.patch('pylxd.api.API',%0A mock.Mock(return_value=self.ml))%0A lxd_patcher.start()%0A self.addCleanup(lxd_patcher.stop)%0A%0A self.container_utils = container_utils.LXDContainerUtils()%0A%0A def test_wait_undefined(self):%0A self.assertRaises(exception.NovaException,%0A self.container_utils.wait_for_container,%0A None)%0A%0A def test_wait_timedout(self):%0A self.ml.wait_container_operation.return_value = False%0A self.assertRaises(exception.NovaException,%0A self.container_utils.wait_for_container,%0A 'fake')%0A
|
04979d5536a9787cac0024dd6e767f0baec280fd | Update __init__.py | tendrl/node_agent/objects/definition/__init__.py | tendrl/node_agent/objects/definition/__init__.py | import importlib
import namespaces as ns
import yaml
from tendrl.commons import objects
from tendrl.commons import etcdobj
from tendrl.node_agent.objects.definition import master
# Definitions need there own special init and have to be present in the NS
# before anything else, Hence subclassing BaseObject
class Definition(objects.BaseObject):
def __init__(self, *args, **kwargs):
super(Definition, self).__init__(*args, **kwargs)
self.value = '_tendrl/definitions'
self.master = master
self._parsed_defs = yaml.safe_load(self.master)
self._etcd_cls = _DefinitionEtcd
def get_obj_definition(self, namespace, obj_name):
raw_ns = "namespace.%s" % namespace
raw_obj = self._get_parsed_defs()[raw_ns]['objects'][obj_name]
for atom_name, atom in raw_obj.get('atoms', {}).iteritems():
atom_mod = atom['run'].split(".atoms.")[-1].split(".")[0]
atom_fqdn = "%s.objects.%s.atoms.%s" % (namespace,
obj_name.lower(),
atom_mod)
atom_cls = getattr(importlib.import_module(atom_fqdn), atom_name)
tendrl_ns.add_atom(obj_name, atom_name, atom_cls)
for flow_name, flow in raw_obj.get('flows', {}).iteritems():
flow_mod = flow['run'].split(".flows.")[-1].split(".")[0]
flow_fqdn = "%s.objects.%s.flows.%s" % (namespace,
obj_name.lower(),
flow_mod)
flow_cls = getattr(importlib.import_module(flow_fqdn), flow_name)
tendrl_ns.add_obj_flow(obj_name, flow_name, flow_cls)
return ns.Namespace(attrs=raw_obj['attrs'],
enabled=raw_obj['enabled'],
obj_list=raw_obj.get('list', ""),
obj_value=raw_obj['value'],
atoms=raw_obj.get('atoms', {}),
flows=raw_obj.get('flows', {}),
help=raw_obj['help'])
def get_flow_definition(self, namespace, flow_name):
raw_ns = "namespace.%s" % namespace
raw_flow = self._get_parsed_defs()[raw_ns]['flows'][flow_name]
flow_mod = raw_flow['run'].split(".flows.")[-1].split(".")[0]
flow_fqdn = "%s.flows.%s" % (namespace, flow_mod)
flow_cls = getattr(importlib.import_module(flow_fqdn), flow_name)
tendrl_ns.add_flow(flow_name, flow_cls)
return ns.Namespace(atoms=raw_flow['atoms'],
help=raw_flow['help'],
enabled=raw_flow['enabled'],
inputs=raw_flow['inputs'],
pre_run=raw_flow.get('pre_run', []),
post_run=raw_flow.get('post_run', []),
type=raw_flow['type'],
uuid=raw_flow['uuid']
)
def _get_parsed_defs(self):
self._parsed_defs = yaml.safe_load(self.master)
return self._parsed_defs
class _DefinitionEtcd(etcdobj.EtcdObj):
"""A table of the Definitions, lazily updated
"""
__name__ = '_tendrl/definitions'
_tendrl_cls = Definition | Python | 0.000072 | @@ -516,16 +516,21 @@
= master
+.read
%0A
@@ -3325,16 +3325,17 @@
cls = Definition
+%0A
|
51ee19f41e6fc48d4791bde97c5d28d55d76cdf4 | Add brute force inplementation | solvers/BruteForce.py | solvers/BruteForce.py | Python | 0.999725 | @@ -0,0 +1,981 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0Afrom itertools import permutations%0A%0Afrom base_solver import BaseSolver%0A%0Aclass BruteForceSolver(BaseSolver):%0A def run_search(self):%0A # get list of mid nodes names%0A mid_nodes = %5B%5D%0A for node in self.task.mid_nodes:%0A mid_nodes.append(node.name)%0A%0A # iterate over permutations generator%0A best_distance = float('inf')%0A best_solution = None%0A cycles = 0%0A for permutation in permutations(mid_nodes):%0A # check permutation distance%0A path = %5Bself.task.start.name, %5D%0A path.extend(permutation)%0A path.append(self.task.finish.name)%0A distance = self.task.get_path_distance(path)%0A%0A # check if this is the best solution so far%0A if distance %3C best_distance:%0A best_distance = distance%0A best_solution = path%0A%0A cycles += 1%0A%0A return best_solution, best_distance, cycles%0A
|
|
46496d8761ae94a349ed3b592ec7ee7e0c7e1a15 | Remove unused import; add missing import | gitc_utils.py | gitc_utils.py | #
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import shutil
import git_command
import git_config
# TODO (sbasi) - Remove this constant and fetch manifest dir from /gitc/.config
GITC_MANIFEST_DIR = '/usr/local/google/gitc/'
GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
NUM_BATCH_RETRIEVE_REVISIONID = 300
def _set_project_revisions(projects):
"""Sets the revisionExpr for a list of projects.
Because of the limit of open file descriptors allowed, length of projects
should not be overly large. Recommend calling this function multiple times
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
@param projects: List of project objects to set the revionExpr for.
"""
# Retrieve the commit id for each project based off of it's current
# revisionExpr and it is not already a commit id.
project_gitcmds = [(
project, git_command.GitCommand(None,
['ls-remote',
project.remote.url,
project.revisionExpr],
capture_stdout=True, cwd='/tmp'))
for project in projects if not git_config.IsId(project.revisionExpr)]
for proj, gitcmd in project_gitcmds:
if gitcmd.Wait():
print('FATAL: Failed to retrieve revisionExpr for %s' % project)
sys.exit(1)
proj.revisionExpr = gitcmd.stdout.split('\t')[0]
def generate_gitc_manifest(client_dir, manifest):
"""Generate a manifest for shafsd to use for this GITC client.
@param client_dir: GITC client directory to install the .manifest file in.
@param manifest: XmlManifest object representing the repo manifest.
"""
print('Generating GITC Manifest by fetching revision SHAs for each '
'project.')
project_gitcmd_dict = {}
index = 0
while index < len(manifest.projects):
_set_project_revisions(
manifest.projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
index += NUM_BATCH_RETRIEVE_REVISIONID
# Save the manifest.
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
manifest.Save(f)
| Python | 0.000007 | @@ -656,13 +656,10 @@
rt s
-hutil
+ys
%0A%0Aim
|
4dfc0c49cec86f3c03b90fa66e1fc9de2ac665e6 | Add migration file (fix fields) | samples/migrations/0012_auto_20170512_1138.py | samples/migrations/0012_auto_20170512_1138.py | Python | 0 | @@ -0,0 +1,716 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11 on 2017-05-12 14:38%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('samples', '0011_fluvaccine_date_applied'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='collectedsample',%0A name='collection_date',%0A field=models.DateField(blank=True, null=True, verbose_name='Data de coleta'),%0A ),%0A migrations.AlterField(%0A model_name='fluvaccine',%0A name='date_applied',%0A field=models.DateField(blank=True, null=True, verbose_name='Data de aplica%C3%A7%C3%A3o'),%0A ),%0A %5D%0A
|
|
947551083798e3125cf0782df44cc18728c6bca4 | test messages | src/eduid_webapp/security/tests/test_msgs.py | src/eduid_webapp/security/tests/test_msgs.py | Python | 0.000004 | @@ -0,0 +1,2136 @@
+# -*- coding: utf-8 -*-%0A%0Aimport unittest%0A%0Afrom eduid_webapp.security.helpers import SecurityMsg%0A%0A%0Aclass MessagesTests(unittest.TestCase):%0A%0A def test_messages(self):%0A %22%22%22%22%22%22%0A self.assertEqual(str(SecurityMsg.out_of_sync.value), 'user-out-of-sync')%0A self.assertEqual(str(SecurityMsg.stale_reauthn.value), 'security.stale_authn_info')%0A self.assertEqual(str(SecurityMsg.rm_verified.value), 'nins.verified_no_rm')%0A self.assertEqual(str(SecurityMsg.rm_success.value), 'nins.success_removal')%0A self.assertEqual(str(SecurityMsg.temp_problem.value), 'Temporary technical problems')%0A self.assertEqual(str(SecurityMsg.already_exists.value), 'nins.already_exists')%0A self.assertEqual(str(SecurityMsg.add_success.value), 'nins.successfully_added')%0A self.assertEqual(str(SecurityMsg.max_tokens.value), 'security.u2f.max_allowed_tokens')%0A self.assertEqual(str(SecurityMsg.max_webauthn.value), 'security.webauthn.max_allowed_tokens')%0A self.assertEqual(str(SecurityMsg.missing_data.value), 'security.u2f.missing_enrollment_data')%0A self.assertEqual(str(SecurityMsg.u2f_registered.value), 'security.u2f_register_success')%0A self.assertEqual(str(SecurityMsg.no_u2f.value), 'security.u2f.no_token_found')%0A self.assertEqual(str(SecurityMsg.no_challenge.value), 'security.u2f.missing_challenge_data')%0A self.assertEqual(str(SecurityMsg.no_token.value), 'security.u2f.missing_token')%0A self.assertEqual(str(SecurityMsg.long_desc.value), 'security.u2f.description_to_long')%0A self.assertEqual(str(SecurityMsg.rm_u2f_success.value), 'security.u2f-token-removed')%0A self.assertEqual(str(SecurityMsg.no_pdata.value), 'security.webauthn-missing-pdata')%0A self.assertEqual(str(SecurityMsg.webauthn_success.value), 'security.webauthn_register_success')%0A self.assertEqual(str(SecurityMsg.no_last.value), 'security.webauthn-noremove-last')%0A self.assertEqual(str(SecurityMsg.rm_webauthn.value), 'security.webauthn-token-removed')%0A self.assertEqual(str(SecurityMsg.no_webauthn.value), 'security.webauthn-token-notfound')%0A
|
|
fcfb84838c7bb111fb9710f4984767b2233caed3 | test commit | test.py | test.py | Python | 0.000002 | @@ -0,0 +1,62 @@
+print(%22Content-Type: text/plain%22)%0Aprint(%22%22)%0Aprint(%22Fuck you%22)%0A
|
|
f4f3429d157988d4823f20d5155b951f8471fb1b | Fix test app | test.py | test.py |
from gunicorn.httpserver import WSGIServer
def app(environ, start_response):
"""Simplest possible application object"""
data = 'Hello, World!\n'
status = '200 OK'
response_headers = [
('Content-type','text/plain'),
('Content-Length', len(data))
]
start_response(status, response_headers)
return [data]
if __name__ == '__main__':
server = WSGIServer(("127.0.0.1", 8000), 1, simple_app)
server.run() | Python | 0.000013 | @@ -1,51 +1,4 @@
-%0Afrom gunicorn.httpserver import WSGIServer%0A%0A%0A%0A
%0Adef
@@ -302,107 +302,4 @@
a%5D%0A%0A
-if __name__ == '__main__':%0A server = WSGIServer((%22127.0.0.1%22, 8000), 1, simple_app)%0A server.run()
|
bc0aa69adc5b1e290941c221ddd498d3fb92244e | Add simple recipe tagger experiment | test.py | test.py | Python | 0.000039 | @@ -0,0 +1,2428 @@
+import nltk%0Afrom nltk.classify import MaxentClassifier%0A%0A# Set up our training material in a nice dictionary.%0Atraining = %7B%0A 'ingredients': %5B%0A 'Pastry for 9-inch tart pan',%0A 'Apple cider vinegar',%0A '3 eggs',%0A '1/4 cup sugar',%0A %5D,%0A 'steps': %5B%0A 'Sift the powdered sugar and cocoa powder together.',%0A 'Coarsely crush the peppercorns using a mortar and pestle.',%0A 'While the vegetables are cooking, scrub the pig ears clean and cut away any knobby bits of cartilage so they will lie flat.',%0A 'Heat the oven to 375 degrees.',%0A %5D,%0A%7D%0A%0A# Set up a list that will contain all of our tagged examples,%0A# which we will pass into the classifier at the end.%0Atraining_set = %5B%5D%0Afor key, val in training.items():%0A for i in val:%0A # Set up a list we can use for all of our features,%0A # which are just individual words in this case.%0A features = %5B%5D%0A %0A # Before we can tokenize words, we need to break the%0A # text out into sentences.%0A sentences = nltk.sent_tokenize(i)%0A for sentence in sentences:%0A features = features + nltk.word_tokenize(sentence)%0A%0A # For this example, it's a good idea to normalize for case.%0A # You may or may not need to do this.%0A features = %5Bi.lower() for i in features%5D%0A %0A # Each feature needs a value. A typical use for a case like this%0A # is to use True or 1, though you can use almost any value for%0A # a more complicated application or analysis.%0A features = dict(%5B(i, True) for i in features%5D)%0A %0A # NLTK expects you to feed a classifier a list of tuples%0A # where each tuple is (features, tag).%0A training_set.append((features, key))%0A%0A%0Adef classify(s):%0A p = classifier.prob_classify(s)%0A import json%0A print(%22%25s%5Cn %3E%3E%3E %25s, %25s%5Cn%22 %25 (json.dumps(s), p.max(), p.prob(p.max())))%0A return (p.max(), p.prob(p.max()))%0A%0A# Train up our classifier%0A# TODO: get http://www.umiacs.umd.edu/~hal/megam/version0_91/ working%0Aclassifier = MaxentClassifier.train(training_set)%0A%0Aprint()%0Aprint()%0A%0A# Test it out!%0A# You need to feed the classifier your data in the same format you used%0A# to train it, in this case individual lowercase words.%0Aclassify(%7B'apple': True, 'cider': True, 'vinegar': True, 'cocoa': True%7D)%0Aclassify(%7B'heat': True, 'oven': True%7D)%0Aclassify(%7B'prepare': True, 'oven': True%7D)%0Aclassify(%7B'nothing': True%7D)%0A
|
|
5d9200298ab660bee79d7958f8e155023893be08 | Change author | l10n_cr_account_banking_cr_bcr/__openerp__.py | l10n_cr_account_banking_cr_bcr/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'BCR Account Banking',
'version': '0.1',
'license': 'AGPL-3',
'author': 'CLEARCORP S.A.',
'website': 'http://www.clearcorp.co.cr',
'category': 'Accounting & Finance',
'depends': [
'account_banking_ccorp',
],
'init_xml': [],
'update_xml': [],
'demo_xml': [],
'description': '',
'active': False,
'installable': True,
}
| Python | 0.000003 | @@ -1124,29 +1124,24 @@
hor': 'C
-LEARCORP S.A.
+learCorp
',%0A '
@@ -1275,16 +1275,19 @@
ng_ccorp
+_dg
',%0A
|
ab458e10742897c692e3d4e4066ed193e141e258 | add filterfuncs module | filterfuncs.py | filterfuncs.py | Python | 0.000001 | @@ -0,0 +1,617 @@
+from tools import pipeline_helpers%0Aimport pandas as pd%0A%0Adef run1(infile, features_label, output_label):%0A %22%22%22%0A Handle variant data by only keeping rows where 10-90%25 of samples have%0A variants.%0A%0A For CNV data, don't do any filtering.%0A%0A Otherwise, simply remove rows with zero variance.%0A %22%22%22%0A if (features_label == 'exome_variants' or 'variants' in output_label):%0A d = pipeline_helpers.remove_nfrac_variants(infile, nfrac=0.1)%0A elif features_label == 'cnv':%0A return pd.read_table(infile, index_col=0)%0A else:%0A d = pipeline_helpers.remove_zero_variance(infile)%0A return d%0A%0A
|
|
b9d5e015b291f27becc682f05a12ec5c6a0cf467 | Implement module to create new pads on collabedit.com. | gygax/modules/pad.py | gygax/modules/pad.py | Python | 0 | @@ -0,0 +1,1186 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0A:mod:%60gygax.modules.pad%60 --- Module for creating pads on collabedit.com%0A=======================================================================%0A%22%22%22%0A%0Afrom http import client%0A%0Afrom gygax.modules import admin%0A%0Adef pad(bot, sender, text):%0A if not admin.is_admin(sender):%0A bot.reply(%22unauthorized%22)%0A return%0A%0A # We can't use urllib, because collabedit uses weird redirects which make%0A # urllib think we are redirected in an endless loop.%0A conn = client.HTTPConnection(%22collabedit.com%22)%0A conn.request(%22GET%22, %22/new%22)%0A r1 = conn.getresponse()%0A if r1.status != 302:%0A raise Exception(%22GET /new returned %7B%7D %7B%7D%22.format(r1.status, r1.reason))%0A headers = %7B%22Cookie%22: r1.getheader(%22Set-Cookie%22).split(%22;%22)%5B0%5D%7D%0A r1.read() # Read the response body so we can make a new request.%0A%0A conn.request(%22GET%22, r1.getheader(%22Location%22), headers=headers)%0A r2 = conn.getresponse()%0A if r2.status != 302:%0A raise Exception(%22GET %7B%7D returned %7B%7D %7B%7D%22.format(%0A r1.getheader(%22Location%22), r2.status, r2.reason))%0A bot.reply(%22http://collabedit.com%7B%7D%22.format(r2.getheader(%22Location%22)))%0A conn.close()%0Apad.command = %22.pad%22%0A
|
|
7cc86a96427cc35824960c01d84fbe8d45364670 | Add admin page for User | helios_auth/admin.py | helios_auth/admin.py | Python | 0.000143 | @@ -0,0 +1,170 @@
+from django.contrib import admin%0Afrom helios.models import User%0A%0Aclass UserAdmin(admin.ModelAdmin):%09%0A exclude = ('info', 'token')%0A%0Aadmin.site.register(User, UserAdmin)
|
|
e99700ff985e9821faf390ca6070a0c879eafc20 | Add perkeyavg python example | src/python/PerKeyAvg.py | src/python/PerKeyAvg.py | Python | 0.000001 | @@ -0,0 +1,789 @@
+%22%22%22%0A%3E%3E%3E from pyspark.context import SparkContext%0A%3E%3E%3E sc = SparkContext('local', 'test')%0A%3E%3E%3E b = sc.parallelize(%5B(%22coffee%22, 1), (%22pandas%22, 2), (%22coffee%22, 3), (%22very%22, 4)%5D)%0A%3E%3E%3E perKeyAvg(b)%0A%0A%22%22%22%0A%0Aimport sys%0A%0Afrom pyspark import SparkContext%0A%0Adef perKeyAvg(nums):%0A %22%22%22Compute the avg%22%22%22%0A sumCount = nums.combineByKey((lambda x: (x,1)),%0A (lambda x, y: (x%5B0%5D + y, x%5B1%5D + 1)),%0A (lambda x, y: (x%5B0%5D + y%5B0%5D, x%5B1%5D + y%5B1%5D)))%0A return sumCount.collectAsMap()%0A%0Aif __name__ == %22__main__%22:%0A master = %22local%22%0A if len(sys.argv) == 2:%0A master = sys.argv%5B1%5D%0A sc = SparkContext(master, %22Sum%22)%0A nums = sc.parallelize(%5B(%22coffee%22, 1), (%22pandas%22, 2), (%22coffee%22, 3), (%22very%22, 4)%5D)%0A avg = perKeyAvg(nums)%0A print avg%0A%0A
|
|
a640bf45c4fb8829888f664e48058d6647473449 | Fix migrations | lowfat/migrations/0113_merge_20171103_0948.py | lowfat/migrations/0113_merge_20171103_0948.py | Python | 0.000006 | @@ -0,0 +1,336 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.5 on 2017-11-03 09:48%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('lowfat', '0112_auto_20171031_1133'),%0A ('lowfat', '0111_auto_20171009_0933'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.