commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
863ec839e24f2f17ba9d1dfb1177592f34cfc5e3
Create Transaction.py
pyvogue/Transaction.py
pyvogue/Transaction.py
Python
0.000001
@@ -0,0 +1,1313 @@ +%0Aimport requests%0Aimport json%0Aimport urllib%0A%0A%0A%0Aclass Transaction():%0A%0A%0A def getall(self,trx,res,decode_content=False):%0A %22%22%22%0A Gets all your transactions%0A %0A args:%0A trx -- the transaction id to be fetched %0A res -- the response type expected : json or xml %0A %22%22%22%0A url = %22https://voguepay.com/?v_transaction_id=%22+str(trx)+%22&type=%22+str(res)%0A %0A if ( decode_content ):%0A dec = self.__parse_json(requests.get(url).text)%0A return (dec)%0A else:%0A return requests.get(url).text%0A%0A%0A def paylink(self,param):%0A %0A %22%22%22%0A Generate a one time payment link from params%0A %0A args:%0A param -- a dictionary of payment params %0A%0A e.g%0A params = %7B'v_merchant_id':'14307-23682',%0A 'memo':'testing',%0A 'total':'1200'%0A %7D%0A %0A %22%22%22%0A %0A urlg = %22https://voguepay.com/?p=linkToken&%22+urllib.urlencode(param)%0A return requests.get(urlg)%0A%0A%0A def __parse_json(self, response_obj):%0A %22%22%22%0A This function takes in json response sent back by the%0A server %0A Returns a python dictionary of status, email, amount,memo etc%0A %22%22%22%0A data = json.loads(response_obj) %0A return data%0A%0A%0A%0A
3ae0ea21cc6b1afadb0dd72e29016385d18167ab
Add FifoReader class to utils
DebianDevelChangesBot/utils/fiforeader.py
DebianDevelChangesBot/utils/fiforeader.py
Python
0
@@ -0,0 +1,2833 @@ +# -*- coding: utf-8 -*-%0A#%0A# Debian Changes Bot%0A# Copyright (C) 2008 Chris Lamb %[email protected]%3E%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Aimport os%0Aimport fcntl%0Aimport select%0Aimport threading%0Aimport traceback%0A%0Aclass FifoReader(object):%0A __shared_state = %7B%7D%0A%0A read_lock = threading.Lock()%0A stop_lock = threading.Lock()%0A running = False%0A quitfds = None%0A%0A def __init__(self):%0A self.__dict__ = self.__shared_state%0A print %22lol%22%0A%0A def start(self, callback, fifo_loc):%0A self.callback = callback%0A self.fifo_loc = fifo_loc%0A threading.Thread(target=self.run).start()%0A%0A def run(self):%0A self.read_lock.acquire()%0A try:%0A for fileobj in self.gen_messages():%0A try:%0A self.callback(fileobj)%0A except Exception, exc:%0A print %22Uncaught exception caught inside fiforeader%22%0A traceback.print_exc()%0A finally:%0A fileobj.close()%0A finally:%0A self.read_lock.release()%0A%0A def gen_messages(self):%0A self.running = True%0A self.quitfds = os.pipe()%0A%0A while self.running:%0A fifo = os.open(self.fifo_loc, os.O_RDONLY %7C os.O_NONBLOCK)%0A flags = fcntl.fcntl(fifo, fcntl.F_GETFL)%0A fcntl.fcntl(fifo, fcntl.F_SETFD, flags & ~os.O_NONBLOCK)%0A%0A readfds, _, _ = select.select(%5Bfifo, self.quitfds%5B0%5D%5D, %5B%5D, %5B%5D)%0A%0A # If our anonymous descriptor was written to, exit loop%0A if not self.running or self.quitfds%5B0%5D in readfds:%0A os.close(fifo)%0A os.close(self.quitfds%5B0%5D)%0A os.close(self.quitfds%5B1%5D)%0A break%0A%0A if fifo not in readfds:%0A continue%0A%0A yield os.fdopen(fifo)%0A%0A def stop(self):%0A self.stop_lock.acquire()%0A try:%0A if self.running:%0A self.running = False%0A os.write(self.quitfds%5B1%5D, '1')%0A%0A # Block until we have actually stopped%0A self.read_lock.acquire()%0A self.read_lock.release()%0A finally:%0A self.stop_lock.release()%0A
5c3304ffbd78ee47b2c4d197165de08200e77632
Fix the `week` behavour to match api2
standup/apps/status/helpers.py
standup/apps/status/helpers.py
import re from datetime import date, datetime, timedelta from standup.database.helpers import paginate as _paginate def paginate(statuses, page=1, startdate=None, enddate=None, per_page=20): from standup.apps.status.models import Status if startdate: statuses = statuses.filter(Status.created >= startdate) if enddate: statuses = statuses.filter(Status.created <= enddate) return _paginate(statuses, int(page), per_page=per_page) def startdate(request): dates = request.args.get('dates') day = request.args.get('day') week = request.args.get('week') if dates == '7d': return date.today() - timedelta(days=7) elif dates == 'today': return date.today() elif isday(day): return get_day(day) elif isday(week): return get_day(week) return None def enddate(request): day = request.args.get('day') week = request.args.get('week') if isday(day): return get_day(day) + timedelta(days=1) elif isday(week): return get_day(week) + timedelta(days=7) return None def isday(day): return day and re.match('^\d{4}-\d{2}-\d{2}$', day) def get_day(day): return datetime.strptime(day, '%Y-%m-%d') def get_weeks(num_weeks=10): weeks = [] current = datetime.now() for i in range(num_weeks): weeks.append({"start_date": week_start(current), \ "end_date": week_end(current), \ "weeks_ago": i }) current = current - timedelta(7) return weeks def week_start(d): """Weeks start on the Monday on or before the given date""" return d - timedelta(d.isoweekday() - 1) def week_end(d): """Weeks start on the Sunday on or after the given date""" return d + timedelta(7 - d.isoweekday())
Python
0.999933
@@ -795,32 +795,43 @@ %0A return +week_start( get_day(week)%0A @@ -827,16 +827,17 @@ ay(week) +) %0A ret @@ -1035,32 +1035,41 @@ %0A return +week_end( get_day(week) + @@ -1069,27 +1069,8 @@ eek) - + timedelta(days=7 )%0A
6e096fc10c7eb580ec11fbee585dd2aa3210e2b3
add settings example
blog/settings_example.py
blog/settings_example.py
Python
0
@@ -0,0 +1,379 @@ +SITE_URL = %22http://project.com%22%0ASITE_NAME = %22Project Name%22%0A%0ACOMMENTS_APP = 'threadedcomments' # for example%0ARECAPTCHA_PUBLIC_KEY = 'put-your-key-here'%0ARECAPTCHA_PRIVATE_KEY = 'put-your-key-here'%0A%0ASOUTH_MIGRATION_MODULES = %7B%0A 'taggit': 'taggit.south_migrations',%0A%7D%0A%0ATAGGIT_TAGCLOUD_MIN = 1%0ATAGGIT_TAGCLOUD_MAX = 8%0A%0AGRAPPELLI_ADMIN_TITLE = u'%7B%7D Administration'.format(SITE_NAME)
edfba32b5dd24c0fe58da9bbbe84267e81754233
add demo.py
demo.py
demo.py
Python
0.000001
@@ -0,0 +1,417 @@ +import pdb%0Aimport json%0Afrom pprint import pprint%0Afrom chrome_debugger import protocol%0Afrom chrome_debugger import interface%0Afrom chrome_debugger import websocket%0A%0Acontext = protocol.connect(%22ws://localhost:9222/devtools/page/D08C4454-9122-6CC8-E492-93A22F9C9727%22)%0A%0Aheader = websocket.parse_response(context%5B%22sock%22%5D.recv(4096)) %0A%0Ainterface.debugger_enable(context)%0A%0Awhile True:%0A pprint(protocol.recv(context))%0A %0A
863ae7a76567913f60a758a9fb974a27e9bc58d2
add 21
p021.py
p021.py
Python
0.999999
@@ -0,0 +1,142 @@ +from utils import divisors%0A%0A%0Adef d(n):%0A return sum(divisors(n))%0A%0Aprint sum(filter(lambda n: n != d(n) and n == d((d(n))), range(1, 10000)))
a3a92435781300966ca59d5316693d0306abd600
Create osrm_OD_matrix.py
osrm_OD_matrix.py
osrm_OD_matrix.py
Python
0.006322
@@ -0,0 +1,2330 @@ +# using osrm to create a big dirty OD matrix%0A%0Aimport csv%0Aimport requests%0Aimport polyline%0Aimport time%0Aimport json%0A%0Adb_points = %5B%5D%0A%0A# grab points from csv file - just grab, x, y, and a unique ID%0A# the headers may be different depending on your data!%0Awith open(%22db.csv%22, 'r') as csvfile:%0A reader = csv.DictReader(csvfile)%0A n = 0%0A q = 0%0A for row in reader:%0A # limiting number of points for testing, may do all eventually!%0A if n %25 1 == 0:%0A q += 1%0A db_points.append(%5Brow%5B'X'%5D,row%5B'Y'%5D,row%5B'dbuid'%5D%5D)%0A n += 1%0A%0A# split up into managable size - 2000 destinations seems managable%0Apoint_count = len(db_points)%0Apoints_split_list = %5B%5D%0Asingle_list = %5B%5D%0Ai = 1%0Afor row in db_points:%0A single_list.append(row)%0A if i %25 3000 == 0:%0A points_split_list.append(single_list)%0A single_list = %5B%5D%0A if i == len(db_points):%0A points_split_list.append(single_list)%0A i += 1%0A%0A# print lenghts of before and after%0Aprint len(db_points)%0Aprint len(points_split_list)%0Afor x in points_split_list:%0A print len(x)%0A# make sure these total!%0A%0A# list of ids%0Adbuids = %5B%5D%0Afor row in db_points:%0A dbuids.append(row%5B2%5D)%0Aprint len(dbuids)%0A%0A%0A# set up that awesome marix were going to output!%0Athe_matrix = %5B%5D%0A%0A# lets add in a header row!%0Athe_matrix.append(%5B''%5D + dbuids)%0Aprint len(the_matrix)%0Aprint len(the_matrix%5B0%5D)%0A%0A# the start time for time timing%0Astart_time = time.time()%0A%0A%0A# loop over the origins%0Afor origin in db_points:%0A%0A # the output row!%0A out_row = %5Borigin%5B2%5D%5D%0A%0A for points in points_split_list:%0A%0A polyline_list = %5B%5D%0A%0A polyline_list.append((float(origin%5B1%5D),float(origin%5B0%5D)))%0A%0A # grab x y for lists%0A for row in points:%0A%0A dr_tuple = (float(row%5B1%5D),float(row%5B0%5D))%0A polyline_list.append(dr_tuple)%0A%0A line = polyline.encode(polyline_list, 5)%0A%0A # what to send%0A url = 'http://localhost:5000/table/v1/driving/polyline(' + line + ')?sources=0'%0A%0A # sending and recieving%0A page = requests.get(url)%0A data = json.loads(page.content)%0A durs = data%5B%22durations%22%5D%5B0%5D%0A del durs%5B0%5D # deleting initial 0%0A out_row = out_row + durs%0A%0A the_matrix.append(out_row)%0A%0A # this break is for testing!%0A break%0A%0Aprint time.time() - start_time%0A%0Afor row in the_matrix:%0A print len(row)%0A
41a533ffddfebc3303a1e882bfaf1fcdd243828e
add api like test
myideas/core/tests/test_like_api.py
myideas/core/tests/test_like_api.py
Python
0
@@ -0,0 +1,1441 @@ +from django.test import TestCase%0Afrom django.test.client import Client%0Afrom django.shortcuts import resolve_url as r%0Afrom django.contrib.auth.models import User%0Afrom myideas.core.models import Ideas%0A%0A%0Aclass LikeApiTest(TestCase):%0A def setUp(self):%0A self.client = Client()%0A self.username = 'diego'%0A self.email = '[email protected]'%0A self.password = 'test'%0A user = User.objects.create_user(%0A self.username, self.email, self.password%0A )%0A self.idea = Ideas.objects.create(%0A user=user, title='test app'%0A )%0A%0A def api_signin_and_get(self):%0A self.login = self.client.login(%0A username=self.username, password=self.password%0A )%0A self.response = self.client.get(r(self.idea.get_api_like_url()))%0A%0A def test_get(self):%0A %22%22%22GET 'Ideas like api' must return status code 200%22%22%22%0A self.api_signin_and_get()%0A self.assertEqual(200, self.response.status_code)%0A%0A def test_api_status(self):%0A self.api_signin_and_get()%0A self.assertTrue(self.response)%0A%0A def test_api_likes_count(self):%0A self.api_signin_and_get()%0A self.assertEqual(1, self.idea.likes.count())%0A%0A def test_access_forbidden(self):%0A %22%22%22GET page not logged in must return status code 403%22%22%22%0A self.response = self.client.get(r(self.idea.get_api_like_url()))%0A self.assertEqual(403, self.response.status_code)%0A
10c19d0c7d7cdb2c823a698db8ca128134f32c5a
Add beam potential generation
otz/Beam.py
otz/Beam.py
Python
0
@@ -0,0 +1,2257 @@ +import pdb%0Aimport numpy as np%0Aimport scipy as sp%0A%0Ah = 6.626E-34%0Ac = 3.0E8%0A%0Adef uniform(max_angle, intensity):%0A def profile(phi):%0A if (abs(phi) %3C max_angle):%0A return intensity%0A else:%0A return 0%0A return profile%0A%0Adef default_profile(angle):%0A return uniform(np.pi/8.0, 1)(angle)%0A%0Aclass Bead:%0A def __init__(self, diameter, index=2, mass=1, r=0, z=None):%0A self.disable = diameter%0A self.radius = diameter/2.0%0A self.mass = mass%0A self.r = r%0A if z is None:%0A z = diameter%0A self.z = z%0A self.index = index%0A def set_position(self, r, z):%0A self.r = r%0A self.z = z%0A%0Aclass Beam:%0A def __init__(self, wavelength, profile=default_profile):%0A self.profile = profile%0A self.wavelength = wavelength%0A def force(self, bead):%0A r = bead.r%0A z = bead.z%0A n = bead.index%0A R = bead.radius%0A d = np.sqrt(z**2+r**2)%0A phi_prime = np.arctan2(r,z)%0A%0A def theta(phi):%0A return np.arctan2(R*np.sin(phi),d-R*np.cos(phi))%0A def theta_prime(phi):%0A return theta(phi-phi_prime)%0A def theta2(phi):%0A return np.arcsin(np.sin(phi+theta_prime(phi))/n)%0A def delta_theta(phi):%0A return 2*theta2(phi)%0A def p(phi):%0A return self.profile(phi)*h*c/self.wavelength%0A def dF_r(phi):%0A return -p(phi)*(np.sin(theta_prime(phi))-np.sin(theta_prime(phi)+delta_theta(phi)))%0A def dF_z(phi):%0A return -p(phi)*(np.cos(theta_prime(phi))-np.cos(theta_prime(phi)+delta_theta(phi)))%0A F_r = sp.integrate.quad(dF_r, -np.pi/2.0, np.pi/2.0)%0A F_z = sp.integrate.quad(dF_z, -np.pi/2.0, np.pi/2.0)%0A return (F_r, F_z)%0A%0A def r_potential(self, bead, r_lim=None, z=None, dx = None):%0A if r_lim is None:%0A r_lim = 2*bead.radius%0A if z is not None:%0A bead.z = z%0A if dx is None:%0A dx = r_lim/1e4%0A r = np.arange(-r_lim, r_lim, dx)%0A def restoring_force(dist):%0A bead.r = dist%0A return self.force(bead)%5B0%5D%5B0%5D%0A force_r = %5Brestoring_force(dist) for dist in r%5D%0A V = sp.integrate.cumtrapz(force_r, r)%0A return (r%5B:-1%5D,V)%0A
ad5018c045a14f2e8360e8118d73d021df10baab
add solution for Course Schedule II
algorithms/courseScheduleII/courseScheduleII.py
algorithms/courseScheduleII/courseScheduleII.py
Python
0
@@ -0,0 +1,665 @@ +class Solution:%0A # @param %7Binteger%7D numCourses%0A # @param %7Binteger%5B%5D%5B%5D%7D prerequisites%0A # @return %7Binteger%5B%5D%7D%0A def findOrder(self, numCourses, prerequisites):%0A g = %7Bv: %5B%5D for v in xrange(numCourses)%7D%0A deg = %7Bv: 0 for v in xrange(numCourses)%7D%0A s = set(range(numCourses))%0A for u, v in prerequisites:%0A g%5Bv%5D.append(u)%0A deg%5Bu%5D += 1%0A s.discard(u)%0A res = %5B%5D%0A while s:%0A u = s.pop()%0A res.append(u)%0A for v in g%5Bu%5D:%0A deg%5Bv%5D -= 1%0A if deg%5Bv%5D == 0:%0A s.add(v)%0A return %5B%5D if len(res) != numCourses else res%0A
4557cce84ff91e830f1f1fd241223cff70ceb46e
add directions and a script for how I found duplicate functions
deprecated/utils/tags-to-dup-functions.py
deprecated/utils/tags-to-dup-functions.py
Python
0
@@ -0,0 +1,1559 @@ +# Run the below command to generate the TAGS file, then run this script with TAGS as stdin to see duplicate function names%0A#%0A# find . -name %5C*.c -not -path ./deprecated/%5C* -print0 %7C xargs -0 etags --declarations -D --no-globals -I --no-members%0A%0Aimport collections%0Aimport sys%0A%0Asrc_file = None%0Agot_section_header = 0%0A%0A# function name =%3E list of files%0Afunctions = collections.defaultdict(lambda: set())%0A%0A%0Afor line in sys.stdin:%0A line = line.rstrip('%5Cr%5Cn')%0A if got_section_header == 0:%0A if line != %22%5Cx0c%22:%0A exit(%22invalid header first line: %25s%22 %25 line)%0A got_section_header = 1%0A elif got_section_header == 1:%0A src_file, sep, tail = line.rpartition(',')%0A if sep != ',':%0A exit(%22invalid header second line: %25s%22 %25 line)%0A got_section_header = 2%0A elif got_section_header == 2:%0A if line == %22%5Cx0c%22:%0A got_section_header = 1%0A else:%0A definition, sep, tail = line.rpartition('%5Cx7f')%0A if sep != '%5Cx7f':%0A exit(%22invalid definition line: %25s%22 %25 line)%0A if definition%5B-1%5D == '(':%0A head, sep, function = definition.rpartition(' ')%0A if sep != ' ':%0A function = sep%0A function = function.rstrip('(')%0A function = function.lstrip('*')%0A functions%5Bfunction%5D.add(src_file)%0A else:%0A exit(%22unexpected value for got_section_header, %25s%22 %25 got_section_header);%0A%0A%0Afor k, v in functions.iteritems():%0A if len(v) %3E 1:%0A print k, len(v), ' '.join(v)%0A
4331b380e43751a7223e0ee1dee6c1c45ad09a67
add levy function
robo/task/levy.py
robo/task/levy.py
Python
0.000004
@@ -0,0 +1,664 @@ +'''%0ACreated on 12.07.2015%0A%0A@author: Aaron Klein%0A'''%0Aimport numpy as np%0A%0Afrom robo.task.base_task import BaseTask%0A%0A%0Aclass Levy(BaseTask):%0A%0A def __init__(self):%0A X_lower = np.array(%5B-15%5D)%0A X_upper = np.array(%5B10%5D)%0A opt = np.array(%5B%5B1.0%5D%5D)%0A fopt = 0.0%0A super(Levy, self).__init__(X_lower, X_upper, opt=opt, fopt=fopt)%0A%0A def objective_function(self, x):%0A z = 1 + ((x - 1.) / 4.)%0A s = np.power((np.sin(np.pi * z)), 2)%0A y = (s + ((z - 1) ** 2) * (1 + np.power((np.sin(2 * np.pi * z)), 2)))%0A%0A return y%5B:, np.newaxis%5D%0A%0A def objective_function_test(self, x):%0A return self.objective_function(x)%0A
f2c6e7cf6e60eac5222658d89baf28e1e7d12939
Test minimal snoop2
platforms/m3/programming/mbus_snoop_img2.py
platforms/m3/programming/mbus_snoop_img2.py
Python
0.000001
@@ -0,0 +1,575 @@ +#!/usr/bin/python%0A%0Aimport os%0Aimport sys%0Aimport logging%0Aimport csv%0A%0Aimport time%0Aimport datetime%0Afrom datetime import datetime%0A%0A%0Aimport m3_common%0A%0A#m3_common.configure_root_logger()%0A#logger = logging.getLogger(__name__)%0A%0Afrom m3_logging import get_logger%0Alogger = get_logger(__name__)%0A%0A%0Adef Bpp_callback(address, data, cb0, cb1):%0A%09print(%22 Time: %22 + datetime.now().strftime(%22%25Y-%25m-%25d %25H:%25M:%25S.%25f%22)%5B:-3%5D + %22 ADDR: 0x%22 + address.encode('hex') + %22 DATA: 0x%22 + data.encode('hex') + %22 (ACK: %22 + str(not cb1) + %22)%22)%0A%0Am = m3_common.mbus_snooper(Bpp_callback)%0Am.hang_for_messages()%0A%0A
a8c7c6f08571449b618fd57f298546da6ef80ee9
Add a pyastro16.py file to use as an auto doc demo
astrospam/pyastro16.py
astrospam/pyastro16.py
Python
0
@@ -0,0 +1,1457 @@ +%22%22%22%0APython in Astronomy 2016 is the second iteration of the Python in Astronomy%0Aconference series.%0A%0AThis is the docstring for the pyastro module, this gets included as the%0Adescription for the module.%0A%22%22%22%0A%0Aimport numpy as np%0A%0A%0Adef times(a, b):%0A %22%22%22%0A Multiply a by b.%0A%0A Parameters%0A ----------%0A%0A a : %60numpy.ndarray%60%0A Array one.%0A%0A b : %60numpy.ndarray%60%0A Array two%0A%0A Returns%0A -------%0A%0A result : %60numpy.ndarray%60%0A %60%60a%60%60 multiplied by %60%60b%60%60%0A %22%22%22%0A%0A return np.multipy(a, b)%0A%0A%0Aclass PyAstro(object):%0A %22%22%22%0A This is a class docstring, here you must describe the parameters for the%0A creation of the class, which is normally the signature of the %60%60__init__%60%60%0A method.%0A%0A Parameters%0A ----------%0A awesomeness_level : %60int%60%0A How awesome is pyastro16??!%0A%0A day : %60int%60%0A Day of the conference. Defaults to 1.%0A%0A Attributes%0A ----------%0A%0A awesomeness_level: %60int%60%0A How awesome is this class attributes?! You can document attributes that%0A are not properties here.%0A %22%22%22%0A%0A def __init__(self, awesomeness_level, day=1):%0A %22%22%22%0A This docstring is not used, because it is for a hidden method.%0A %22%22%22%0A self.awesomeness_level = awesomeness_level%0A self._day = day%0A%0A @property%0A def day(self):%0A %22%22%22%0A Day of the conference.%0A%0A Properties are automatically documented as attributes%0A %22%22%22%0A return self._day%0A
9af2a8341b59098d0ebb88f1e71a3452c338b191
Add a plotting example.
Lib/sandbox/pyem/examples/plotexamples.py
Lib/sandbox/pyem/examples/plotexamples.py
Python
0.000001
@@ -0,0 +1,1049 @@ +#! /usr/bin/env python%0A# Last Change: Mon Jun 11 03:00 PM 2007 J%0A%0A# This is a simple test to check whether plotting ellipsoides of confidence and%0A# isodensity contours match%0Aimport numpy as N%0Afrom numpy.testing import set_package_path, restore_path%0A%0Aimport pylab as P%0A%0Aset_package_path()%0Aimport pyem%0Arestore_path()%0A%0A# Generate a simple mixture model, plot its confidence ellipses + isodensity%0A# curves for both diagonal and full covariance matrices%0Ad = 3%0Ak = 3%0Adim = %5B0, 2%5D%0A# diag model%0Aw, mu, va = pyem.GM.gen_param(d, k)%0Adgm = pyem.GM.fromvalues(w, mu, va)%0A# full model%0Aw, mu, va = pyem.GM.gen_param(d, k, 'full', spread = 1)%0Afgm = pyem.GM.fromvalues(w, mu, va)%0A%0Adef plot_model(gm, dim):%0A X, Y, Z, V = gm.density_on_grid(dim = dim)%0A h = gm.plot(dim = dim)%0A %5Bi.set_linestyle('-.') for i in h%5D%0A P.contour(X, Y, Z, V)%0A data = gm.sample(200)%0A P.plot(data%5B:, dim%5B0%5D%5D, data%5B:,dim%5B1%5D%5D, '.')%0A%0A# Plot the contours and the ellipsoids of confidence%0AP.subplot(2, 1, 1)%0Aplot_model(dgm, dim)%0A%0AP.subplot(2, 1, 2)%0Aplot_model(fgm, dim)%0A%0AP.show()%0A
71ac93da2eed58bbd53bb13d4ade308404be18ad
Add auth0.v2.connection
auth0/v2/connection.py
auth0/v2/connection.py
Python
0
@@ -0,0 +1,2533 @@ +from .rest import RestClient%0A%0A%0Aclass Connection(object):%0A %22%22%22Auth0 connection endpoints%22%22%22%0A%0A def __init__(self, domain, jwt_token):%0A url = 'https://%25s/api/v2/connections' %25 domain%0A%0A self.client = RestClient(endpoint=url, jwt=jwt_token)%0A%0A def all(self, strategy=None, fields=%5B%5D, include_fields=True):%0A %22%22%22Retrieves all connections.%0A%0A Args:%0A strategy (str, optional): Only retrieve connections of%0A this strategy type. (e.g: strategy='amazon')%0A%0A fields (list of str, optional): A list of fields to include or%0A exclude from the result (depending on include_fields). Empty to%0A retrieve all fields.%0A%0A include_fields (bool, optional): True if the fields specified are%0A to be include in the result, False otherwise.%0A%0A Returns:%0A A list of connection objects.%0A %22%22%22%0A%0A params = %7B'strategy': strategy or None,%0A 'fields': ','.join(fields) or None,%0A 'include_fields': str(include_fields).lower()%7D%0A%0A return self.client.get(params=params)%0A%0A def get(self, id, fields=%5B%5D, include_fields=True):%0A %22%22%22Retrieve connection by id.%0A%0A Args:%0A id (str): Id of the connection to get.%0A%0A fields (list of str, optional): A list of fields to include or%0A exclude from the result (depending on include_fields). Empty to%0A retrieve all fields.%0A%0A include_fields (bool, optional): True if the fields specified are%0A to be include in the result, False otherwise.%0A%0A Returns:%0A A connection object.%0A %22%22%22%0A%0A params = %7B'fields': ','.join(fields) or None,%0A 'include_fields': str(include_fields).lower()%7D%0A%0A return self.client.get(params=params, id=id)%0A%0A def delete(self, id):%0A %22%22%22Deletes a connection and all its users.%0A%0A Args:%0A id: Id of the connection to delete.%0A%0A Returns:%0A An empty dict.%0A %22%22%22%0A%0A return self.client.delete(id=id)%0A%0A def update(self, id, body):%0A %22%22%22Modifies a connection.%0A%0A Args:%0A id: Id of the connection.%0A%0A body (dict): Specifies which fields are to be modified, and to what%0A values.%0A%0A Returns:%0A The modified connection object.%0A %22%22%22%0A%0A return self.client.patch(id=id, data=body)%0A%0A def create(self, body):%0A %22%22%22Creates a new connection. %22%22%22%0A%0A return self.client.post(data=body)%0A
7e600a791bec2f8639aae417a1ea052ca94cf7b9
Add a largish auto-generated test for the aligned bundling feature, along with the script generating it. The test should never be modified manually. If anyone needs to change it, please change the script and re-run it.
testgen/mc-bundling-x86-gen.py
testgen/mc-bundling-x86-gen.py
Python
0.000021
@@ -0,0 +1,2239 @@ +#!/usr/bin/python%0A%0A# Auto-generates an exhaustive and repetitive test for correct bundle-locked%0A# alignment on x86.%0A# For every possible offset in an aligned bundle, a bundle-locked group of every%0A# size in the inclusive range %5B1, bundle_size%5D is inserted. An appropriate CHECK%0A# is added to verify that NOP padding occurred (or did not occur) as expected.%0A%0A# This script runs with Python 2.6+ (including 3.x)%0A%0Afrom __future__ import print_function%0A%0ABUNDLE_SIZE_POW2 = 4%0ABUNDLE_SIZE = 2 ** BUNDLE_SIZE_POW2%0A%0APREAMBLE = '''%0A# RUN: llvm-mc -filetype=obj -triple i386-pc-linux-gnu %25s -o - %5C%5C%0A# RUN: %7C llvm-objdump -triple i386 -disassemble -no-show-raw-insn - %7C FileCheck %25s%0A%0A# !!! This test is auto-generated from utils/testgen/mc-bundling-x86-gen.py !!!%0A# It tests that bundle-aligned grouping works correctly in MC. Read the%0A# source of the script for more details.%0A%0A .text%0A .bundle_align_mode %7B0%7D%0A'''.format(BUNDLE_SIZE_POW2).lstrip()%0A%0AALIGNTO = ' .align %7B0%7D, 0x90'%0ANOPFILL = ' .fill %7B0%7D, 1, 0x90'%0A%0Adef print_bundle_locked_sequence(len):%0A print(' .bundle_lock')%0A print(' .rept %7B0%7D'.format(len))%0A print(' inc %25eax')%0A print(' .endr')%0A print(' .bundle_unlock')%0A%0Adef generate():%0A print(PREAMBLE)%0A %0A ntest = 0%0A for instlen in range(1, BUNDLE_SIZE + 1):%0A for offset in range(0, BUNDLE_SIZE):%0A # Spread out all the instructions to not worry about cross-bundle%0A # interference.%0A print(ALIGNTO.format(2 * BUNDLE_SIZE))%0A print('INSTRLEN_%7B0%7D_OFFSET_%7B1%7D:'.format(instlen, offset))%0A if offset %3E 0:%0A print(NOPFILL.format(offset))%0A print_bundle_locked_sequence(instlen)%0A%0A # Now generate an appropriate CHECK line%0A base_offset = ntest * 2 * BUNDLE_SIZE%0A inst_orig_offset = base_offset + offset # had it not been padded...%0A %0A if offset + instlen %3E BUNDLE_SIZE:%0A # Padding needed%0A print('# CHECK: %7B0:x%7D: nop'.format(inst_orig_offset))%0A aligned_offset = (inst_orig_offset + instlen) & ~(BUNDLE_SIZE - 1)%0A print('# CHECK: %7B0:x%7D: incl'.format(aligned_offset))%0A else:%0A # No padding needed%0A print('# CHECK: %7B0:x%7D: incl'.format(inst_orig_offset))%0A%0A print()%0A ntest += 1%0A%0Aif __name__ == '__main__':%0A generate()%0A%0A
15150516e1915948b10abed70e964a5b6109013b
Add ExtractAttribute
tohu/derived_generators_NEW.py
tohu/derived_generators_NEW.py
Python
0.000001
@@ -0,0 +1,940 @@ +import logging%0Afrom operator import attrgetter%0Afrom .base_NEW import TohuUltraBaseGenerator%0A%0A__all__ = %5B'ExtractAttribute'%5D%0A%0Alogger = logging.getLogger('tohu')%0A%0A%0Aclass ExtractAttribute(TohuUltraBaseGenerator):%0A %22%22%22%0A Generator which produces items that are attributes extracted from%0A the items produced by a different generator.%0A %22%22%22%0A%0A def __init__(self, g, attr_name):%0A logger.debug(f%22Extracting attribute '%7Battr_name%7D' from parent=%7Bg%7D%22)%0A self.parent = g%0A self.gen = g.clone()%0A self.attr_name = attr_name%0A self.attrgetter = attrgetter(attr_name)%0A%0A def __repr__(self):%0A return f%22%3CExtractAttribute '%7Bself.attr_name%7D' from %7Bself.parent%7D %3E%22%0A%0A def spawn(self, dependency_mapping):%0A logger.warning(f'ExtractAttribute.spawn(): dependency_mapping=%7Bdependency_mapping%7D')%0A raise NotImplementedError()%0A%0A def __next__(self):%0A return self.attrgetter(next(self.gen))%0A
c9afc35d2be96adea47e79a4c0042235e4ffd594
add ldap-filter-cut.py
python/python/openldap/ldap-filter-cut.py
python/python/openldap/ldap-filter-cut.py
Python
0.000001
@@ -0,0 +1,1650 @@ +#!/usr/bin/env python%0A%0A'''%0A Copyright (C) 2011 Bryan Maupin %[email protected]%3E%0A %0A This program is free software: you can redistribute it and/or modify%0A it under the terms of the GNU General Public License as published by%0A the Free Software Foundation, either version 3 of the License, or%0A (at your option) any later version.%0A %0A This program is distributed in the hope that it will be useful,%0A but WITHOUT ANY WARRANTY; without even the implied warranty of%0A MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A GNU General Public License for more details.%0A %0A You should have received a copy of the GNU General Public License%0A along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A'''%0A%0A'''%0ATakes an OpenLDAP log file, cuts the filters out of it, and writes the unique%0Afilters to a new file.%0A'''%0A%0Aimport re%0Aimport sys%0A%0A%0Adef main():%0A filters = %5B%5D%0A %0A pattern = re.compile('filter=%22(.*)%22')%0A %0A # the input file is the first argument to this script%0A infile_name = sys.argv%5B1%5D%0A infile = open(infile_name)%0A %0A for line in infile:%0A match = pattern.search(line)%0A if match:%0A filter = match.group(1)%0A %0A if filter not in filters:%0A filters.append(filter)%0A %0A infile.close()%0A%0A print '%25s filters found' %25 (len(filters))%0A %0A # the output file is the second argument to this script%0A outfile_name = sys.argv%5B2%5D%0A outfile = open(outfile_name, 'w')%0A%0A for filter in filters:%0A outfile.write('%25s%5Cn' %25 (filter))%0A %0A outfile.close()%0A%0A%0A# calls the main() function when the script runs%0Aif __name__ == '__main__':%0A main()%0A
3a19187e8116e8dc20166786fb1ca4d14b527950
Add missing IDL Visistor class
ppapi/generators/idl_visitor.py
ppapi/generators/idl_visitor.py
Python
0.999835
@@ -0,0 +1,2241 @@ +#!/usr/bin/python%0A#%0A# Copyright (c) 2011 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0A%22%22%22 Visitor Object for traversing AST %22%22%22%0A%0A#%0A# IDLVisitor%0A#%0A# The IDLVisitor class will traverse an AST truncating portions of the tree%0A# that fail due to class or version filters. For each node, after the filter%0A# passes, the visitor will call the 'Arive' member passing in the node and%0A# and data passing in from the parent call. It will then Visit the children.%0A# When done processing children, the visitor will call the 'Depart' member%0A# before returning%0A#%0A%0Aclass IDLVisitor(object):%0A def __init__(self):%0A self.depth = 0%0A%0A # Return TRUE if the node should be visited%0A def VisitFilter(self, node, data):%0A return True%0A%0A # Return TRUE if data should be added to the childdata list%0A def AgrigateFilter(self, data):%0A return data is not None%0A%0A def Visit(self, node, data):%0A self.depth += 1%0A if not self.VisitFilter(node, data): return None%0A%0A childdata = %5B%5D%0A newdata = self.Arrive(node, data)%0A for child in node.GetChildren():%0A ret = self.Visit(child, newdata)%0A if self.AgrigateFilter(ret):%0A childdata.append(ret)%0A out = self.Depart(node, newdata, childdata)%0A%0A self.depth -= 1%0A return out%0A%0A def Arrive(self, node, data):%0A return data%0A%0A def Depart(self, node, data, childdata):%0A return data%0A%0A%0A#%0A# IDLVersionVisitor%0A#%0A# The IDLVersionVisitor will only visit nodes with intervals that include the%0A# version. It will also optionally filter based on a class list%0A#%0Aclass IDLVersionVisitor(object):%0A def __init__(self, version, classList):%0A self.version = version%0A self.classes = classes%0A%0A def Filter(self, node, data):%0A if self.classList and node.cls not in self.classList: return False%0A if not node.IsVersion(self.version): return False%0A return True%0A%0Aclass IDLRangeVisitor(object):%0A def __init__(self, vmin, vmax, classList):%0A self.vmin = vmin%0A self.vmax = vmax%0A self.classList = classList%0A%0A def Filter(self, node, data):%0A if self.classList and node.cls not in self.classList: return False%0A if not node.IsVersion(self.version): return False%0A return True%0A%0A%0A
bbed7b813b6c809ee9615eabf2fcf4d3156b1c36
Add script to convert release notes from Markdown
tools/convert_release_notes.py
tools/convert_release_notes.py
Python
0
@@ -0,0 +1,1433 @@ +import sys%0Aimport mistune%0A%0Aprint(sys.argv%5B1%5D)%0A%0Awith open(sys.argv%5B1%5D, %22r%22) as source_file:%0A source = source_file.read()%0A%0Ahtml = mistune.Markdown()%0A%0Aprint()%0Aprint(%22HTML%22)%0Aprint(%22=====================================%22)%0Aprint(%22From the %3Ca href=%5C%22%5C%22%3EGitHub release page%3C/a%3E:%5Cn%3Cblockquote%3E%22)%0Aprint(html(source))%0Aprint(%22%3C/blockquote%3E%22)%0A%0Aclass AdafruitBBCodeRenderer:%0A def __init__(self, **kwargs):%0A self.options = kwargs%0A%0A def placeholder(self):%0A return ''%0A%0A def paragraph(self, text):%0A return text + %22%5Cn%5Cn%22%0A%0A def text(self, text):%0A return text%0A%0A def link(self, link, title, text):%0A return %22%5Burl=%7B%7D%5D%7B%7D%5B/url%5D%22.format(link, text)%0A%0A def header(self, text, level, raw):%0A return %22%5Bb%5D%5Bsize=150%5D%7B%7D%5B/size%5D%5B/b%5D%5Cn%22.format(text)%0A%0A def codespan(self, text):%0A return %22%5Bcolor=#E74C3C%5D%5Bsize=95%5D%7B%7D%5B/size%5D%5B/color%5D%22.format(text)%0A%0A def list_item(self, text):%0A return %22%5B*%5D%7B%7D%5B/*%5D%5Cn%22.format(text.strip())%0A%0A def list(self, body, ordered=True):%0A ordered_indicator = %22=%22 if ordered else %22%22%0A return %22%5Blist%7B%7D%5D%5Cn%7B%7D%5B/list%5D%22.format(ordered_indicator, body)%0A%0A def double_emphasis(self, text):%0A return %22%5Bb%5D%7B%7D%5B/b%5D%22.format(text)%0A%0Abbcode = mistune.Markdown(renderer=AdafruitBBCodeRenderer())%0A%0Aprint()%0Aprint(%22BBCode%22)%0Aprint(%22=====================================%22)%0Aprint(%22From the %5Burl=%5DGitHub release page%5B/url%5D:%5Cn%5Bquote%5D%22)%0Aprint(bbcode(source))%0Aprint(%22%5B/quote%5D%22)%0A
c16c04bde2ace97a2eec000c87e23cfc27bfd7ec
Print trace counters with trace events.
tools/perf/metrics/timeline.py
tools/perf/metrics/timeline.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections from metrics import Metric from telemetry.page import page_measurement TRACING_MODE = 'tracing-mode' TIMELINE_MODE = 'timeline-mode' class TimelineMetric(Metric): def __init__(self, mode): assert mode in (TRACING_MODE, TIMELINE_MODE) super(TimelineMetric, self).__init__() self._mode = mode self._model = None self._thread_for_tab = None def Start(self, page, tab): self._model = None self._thread_for_tab = None if self._mode == TRACING_MODE: if not tab.browser.supports_tracing: raise Exception('Not supported') tab.browser.StartTracing() else: assert self._mode == TIMELINE_MODE tab.StartTimelineRecording() def Stop(self, page, tab): if self._mode == TRACING_MODE: # This creates an async trace event in the render process for tab that # will allow us to find that tab during the AddTracingResultsForTab # function. success = tab.EvaluateJavaScript(""" console.time("__loading_measurement_was_here__"); console.timeEnd("__loading_measurement_was_here__"); console.time.toString().indexOf('[native code]') != -1; """) trace_result = tab.browser.StopTracing() if not success: raise page_measurement.MeasurementFailure( 'Page stomped on console.time') self._model = trace_result.AsTimelineModel() events = [s for s in self._model.GetAllEventsOfName( '__loading_measurement_was_here__') if s.parent_slice == None] assert len(events) == 1, 'Expected one marker, got %d' % len(events) # TODO(tonyg): This should be threads_for_tab and report events for both # the starting thread and ending thread. self._thread_for_tab = events[0].start_thread else: tab.StopTimelineRecording() self._model = tab.timeline_model self._thread_for_tab = self._model.GetAllThreads()[0] def AddResults(self, tab, results): assert self._model events = self._thread_for_tab.all_slices events_by_name = collections.defaultdict(list) for e in events: events_by_name[e.name].append(e) for event_name, event_group in events_by_name.iteritems(): times = [event.self_time for event in event_group] total = sum(times) biggest_jank = max(times) results.Add(event_name, 'ms', total) results.Add(event_name + '_max', 'ms', biggest_jank) results.Add(event_name + '_avg', 'ms', total / len(times))
Python
0.000002
@@ -2693,8 +2693,293 @@ times))%0A +%0A counters_by_name = self._thread_for_tab.parent.counters%0A for counter_name, counter in counters_by_name.iteritems():%0A total = sum(counter.totals)%0A results.Add(counter_name, 'count', total)%0A results.Add(counter_name + '_avg', 'count', total / len(counter.totals))%0A
1db5cd0fddbbcc1d38a08bfe8ad6cfb8d0b5c550
add migration to create new model fields
coupons/migrations/0004_auto_20151105_1456.py
coupons/migrations/0004_auto_20151105_1456.py
Python
0
@@ -0,0 +1,1564 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Afrom django.conf import settings%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A migrations.swappable_dependency(settings.AUTH_USER_MODEL),%0A ('coupons', '0003_auto_20150416_0617'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='CouponUser',%0A fields=%5B%0A ('id', models.AutoField(serialize=False, auto_created=True, primary_key=True, verbose_name='ID')),%0A ('redeemed_at', models.DateTimeField(blank=True, verbose_name='Redeemed at', null=True)),%0A %5D,%0A ),%0A migrations.AddField(%0A model_name='coupon',%0A name='user_limit',%0A field=models.PositiveIntegerField(verbose_name='User limit', default=1),%0A ),%0A migrations.AlterField(%0A model_name='coupon',%0A name='type',%0A field=models.CharField(choices=%5B('monetary', 'Money based coupon'), ('percentage', 'Percentage discount'), ('virtual_currency', 'Virtual currency')%5D, verbose_name='Type', max_length=20),%0A ),%0A migrations.AddField(%0A model_name='couponuser',%0A name='coupon',%0A field=models.ForeignKey(related_name='users', to='coupons.Coupon'),%0A ),%0A migrations.AddField(%0A model_name='couponuser',%0A name='user',%0A field=models.ForeignKey(null=True, to=settings.AUTH_USER_MODEL, blank=True, verbose_name='User'),%0A ),%0A %5D%0A
3bd7c50acfc8044fc33002530a5fcaa0b5c2152e
add module 'job' for reset queue
libs/qpanel/job.py
libs/qpanel/job.py
Python
0.000001
@@ -0,0 +1,2550 @@ +import backend%0Aimport config%0Afrom redis import Redis%0Afrom rq_scheduler import Scheduler%0Aimport datetime%0A%0A%0Adef reset_stats_queue(queuename, when, hour):%0A '''%0A Reset stat for a queue on backend%0A queuename: Name of queue to reset%0A when, hour parameters for more easy%0A control for exists_job_onqueue%0A '''%0A remove_jobs_not_config()%0A if not exists_job_onqueue(queuename, when, hour):%0A return False%0A b = backend.Backend()%0A return b.reset_stats(queuename)%0A%0A%0Adef job_reset_stats_queue(queuename, when, hour):%0A scheduler = Scheduler(connection=Redis())%0A remove_jobs_not_config()%0A if not exists_job_onqueue(queuename, when, hour):%0A scheduler.schedule(%0A scheduled_time=datetime_from_config(when, hour),%0A func=reset_stats_queue,%0A args=%5Bqueuename, when, hour%5D,%0A interval=seconds_from_config_interval(when)%0A )%0A%0A%0Adef exists_job_onqueue(queuename, when, hour):%0A %22%22%22%0A Check if a job is present on queue%0A %22%22%22%0A scheduler = Scheduler(connection=Redis())%0A jobs = scheduler.get_jobs()%0A for job in jobs:%0A if 'reset_stats_queue' in job.func_name:%0A args = job.args%0A if queuename == args%5B0%5D and when == args%5B1%5D and hour == args%5B2%5D:%0A return True%0A return False%0A%0A%0Adef remove_jobs_not_config():%0A %22%22%22%0A Remove jobs on queue but not present on config.%0A Prevent when in job for reset a queue stats is scheduled but%0A after your config is modified or deleted%0A %22%22%22%0A scheduler = Scheduler(connection=Redis())%0A queue_for_reset = config.QPanelConfig().queues_for_reset_stats()%0A jobs = scheduler.get_jobs()%0A for job in jobs:%0A if 'reset_stats_queue' in job.func_name:%0A q = job.args%5B0%5D%0A if q not in queue_for_reset.keys():%0A job.delete()%0A%0A%0Adef enqueue_reset_stats():%0A queues_for_reset = config.QPanelConfig().queues_for_reset_stats()%0A for queue, val in queues_for_reset.items():%0A job_reset_stats_queue(queue, val%5B'when'%5D, val%5B'hour'%5D)%0A%0A%0Adef seconds_from_config_interval(val):%0A %22%22%22%0A Get interval value for a configuration by parameter%0A %22%22%22%0A val = val.lower()%0A day = 0%0A if val == 'daily':%0A day = 1%0A elif val in %5B'weekly', 'sun', 'mon', 'tue', 'wed', 'thu', 'fri' 'sat'%5D:%0A day = 7%0A elif val == 'monthly':%0A day = 30%0A return day * 24 * 60 * 60 # day * hour * minute * seconds%0A%0A%0Adef datetime_from_config(when, hour):%0A return datetime.datetime.utcnow()%0A
b0c3ed39916e25bed2900b653974672a39fcb254
Use CHROME_HEADLESS to check if download_sdk_extras.py is running on a bot.
build/download_sdk_extras.py
build/download_sdk_extras.py
#!/usr/bin/env python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Script to download sdk/extras packages on the bots from google storage. The script expects arguments that specify zips file in the google storage bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will be extracted in the android_tools/sdk/extras directory. """ import json import os import shutil import subprocess import sys import zipfile sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'android')) from pylib import constants GSUTIL_PATH = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir, os.pardir, os.pardir, 'depot_tools', 'gsutil.py') SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras' SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras') SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__), 'android_sdk_extras.json') def clean_and_extract(dir_name, package_name, zip_file): local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name) if os.path.exists(local_dir): shutil.rmtree(local_dir) local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file) with zipfile.ZipFile(local_zip) as z: z.extractall(path=SDK_EXTRAS_PATH) def main(): if not os.path.exists(GSUTIL_PATH) or not os.path.exists(SDK_EXTRAS_PATH): # This is not a buildbot checkout. return 0 # Update the android_sdk_extras.json file to update downloaded packages. with open(SDK_EXTRAS_JSON_FILE) as json_file: packages = json.load(json_file) for package in packages: local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip']) if not os.path.exists(local_zip): package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip']) subprocess.check_call([GSUTIL_PATH, '--force-version', '4.7', 'cp', package_zip, local_zip]) # Always clean dir and extract zip to ensure correct contents. clean_and_extract(package['dir_name'], package['package'], package['zip']) if __name__ == '__main__': sys.exit(main())
Python
0
@@ -460,16 +460,82 @@ irectory + on the test bots. This%0Ascript will not do anything for developers .%0A%22%22%22%0A%0Ai @@ -618,245 +618,388 @@ le%0A%0A -sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'android'))%0Afrom pylib import constants%0A%0AGSUTIL_PATH = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,%0A os.pardir, os.pardir, os.pardir, os.pardir, 'depot_tools' +SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))%0ACHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))%0Asys.path.insert(0, os.path.join(SCRIPT_DIR, 'android'))%0Asys.path.insert(1, os.path.join(CHROME_SRC, 'tools'))%0A%0Afrom pylib import constants%0Aimport find_depot_tools%0A%0ADEPOT_PATH = find_depot_tools.add_depot_tools_to_path()%0AGSUTIL_PATH = os.path.join(DEPOT_PATH , 'g @@ -1599,70 +1599,37 @@ os. -path.exists(GSUTIL_PATH) or not os.path.exists(SDK_EXTRAS_PATH +environ.get('CHROME_HEADLESS' ):%0A @@ -2058,16 +2058,26 @@ k_call(%5B +'python', GSUTIL_P @@ -2110,15 +2110,11 @@ .7', - 'cp',%0A +%0A @@ -2132,24 +2132,29 @@ +'cp', package_zip
80580b8667558e3a4034b31ac08773de70ef3b39
Implement consumer for adjusting screen brightness.
display_control_consumer/run.py
display_control_consumer/run.py
Python
0
@@ -0,0 +1,3296 @@ +from setproctitle import setproctitle%0Aimport json%0Aimport redis%0Aimport subprocess%0Aimport time%0A%0Aclass DisplayControlConsumer(object):%0A STEP = 0.05%0A%0A def __init__(self):%0A self.redis_instance = redis.StrictRedis()%0A self.env = %7B%22DISPLAY%22: %22:0%22%7D%0A%0A%0A def get_brightness(self):%0A p = subprocess.Popen(%5B%22xrandr%22, %22--verbose%22%5D, env=self.env, stdout=subprocess.PIPE)%0A (stdout, _) = p.communicate()%0A for line in stdout.split(%22%5Cn%22):%0A if %22Brightness%22 in line:%0A return float(line.strip().split(%22: %22)%5B1%5D)%0A%0A def set_brightness(self, brightness):%0A p = subprocess.Popen(%5B%22xrandr%22, %22--q1%22, %22--output%22, %22HDMI-0%22, %22--brightness%22, unicode(brightness)%5D, env=self.env)%0A p.wait()%0A self.redis_instance.setex(%22display-control-brightness%22, 60, brightness)%0A%0A def run(self):%0A while True:%0A time.sleep(1)%0A destination_brightness = self.redis_instance.get(%22display-control-destination-brightness%22)%0A if not destination_brightness:%0A continue%0A destination_brightness = float(destination_brightness)%0A%0A current_brightness = self.redis_instance.get(%22display-control-brightness%22)%0A if current_brightness:%0A current_brightness = float(current_brightness)%0A else:%0A current_brightness = self.get_brightness()%0A self.redis_instance.setex(%22display-control-brightness%22, 60, current_brightness)%0A%0A if current_brightness %3E destination_brightness:%0A # Decrease brightness. Current brightness is too large.%0A new_brightness = current_brightness - self.STEP%0A print %22Decreasing brightness: %25s (-%3E %25s, currently at %25s)%22 %25 (new_brightness, destination_brightness, current_brightness)%0A if new_brightness %3C destination_brightness:%0A # Wrapped around: new brightness is smaller than destination brightness.; no action%0A print %22Brightness wrapped around%22%0A self.redis_instance.delete(%22display-control-destination-brightness%22)%0A continue%0A elif current_brightness %3C destination_brightness:%0A # Increase brightness%0A new_brightness = current_brightness + self.STEP%0A print %22Increasing brightness: %25s (-%3E %25s, currently at %25s)%22 %25 (new_brightness, destination_brightness, current_brightness)%0A%0A if new_brightness %3E destination_brightness:%0A # Wrapped around; no action%0A self.redis_instance.delete(%22display-control-destination-brightness%22)%0A continue%0A else:%0A # Already matches. No action.%0A self.redis_instance.delete(%22display-control-destination-brightness%22)%0A continue%0A print %22Setting brightness to %25s (destination: %25s)%22 %25 (new_brightness, destination_brightness)%0A self.set_brightness(new_brightness)%0A self.redis_instance.publish(%22home:broadcast:generic%22, json.dumps(%7B%22key%22: %22display_brightness%22, %22content%22: new_brightness%7D))%0Adef main():%0A setproctitle(%22display_control_consumer: run%22)%0A dcc = DisplayControlConsumer()%0A dcc.run()%0A%0Aif __name__ == '__main__':%0A main()%0A
5a376ef0d49193df46fc127323bfa50376e3c968
add lqr sample
lqr_sample/main.py
lqr_sample/main.py
Python
0
@@ -0,0 +1,1535 @@ +#! /usr/bin/python %0A# -*- coding: utf-8 -*- %0Au%22%22%22 %0ALinear-Quadratic Regulator sample code%0A%0Aauthor Atsushi Sakai%0A%22%22%22%0A%0Aimport matplotlib.pyplot as plt%0Aimport numpy as np%0Aimport scipy.linalg as la%0A%0AsimTime=3.0%0Adt=0.1%0A%0AA=np.matrix(%5B%5B1.1,2.0%5D,%5B0,0.95%5D%5D)%0AB=np.matrix(%5B0.0,0.0787%5D).T%0AC=np.matrix(%5B-2,1%5D)%0A%0Adef Observation(x):%0A y=C*x%0A ry=float(y%5B0%5D)%0A return (ry)%0A%0Adef Process(x,u):%0A x=A*x+B*u%0A return (x)%0A%0Adef dlqr(A,B,Q,R):%0A %22%22%22Solve the discrete time lqr controller.%0A x%5Bk+1%5D = A x%5Bk%5D + B u%5Bk%5D%0A cost = sum x%5Bk%5D.T*Q*x%5Bk%5D + u%5Bk%5D.T*R*u%5Bk%5D%0A %22%22%22%0A #ref Bertsekas, p.151%0A %0A #first, try to solve the ricatti equation%0A X = np.matrix(la.solve_discrete_are(A, B, Q, R))%0A %0A #compute the LQR gain%0A K = np.matrix(la.inv(B.T*X*B+R)*(B.T*X*A))%0A %0A eigVals, eigVecs = la.eig(A-B*K)%0A %0A return K, X, eigVals%0A%0Adef LQRController(x,u):%0A K,X,ev=dlqr(A,B,C.T*np.eye(1)*C,np.eye(1))%0A u=-K*x%0A return u%0A%0Adef Main():%0A time=0.0%0A u_history=%5B%5D%0A y_history=%5B%5D%0A time_history=%5B%5D%0A%0A x=np.matrix(%5B3,1%5D).T%0A u=np.matrix(%5B0,0,0%5D)%0A%0A while time%3C=simTime:%0A u=LQRController(x,u)%0A u0=float(u%5B0,0%5D)%0A x=Process(x,u0)%0A y=Observation(x)%0A%0A u_history.append(u0)%0A y_history.append(y)%0A time_history.append(time)%0A time+=dt%0A%0A plt.plot(time_history,u_history,%22-r%22,label=%22input%22)%0A plt.plot(time_history,y_history,%22-b%22,label=%22output%22)%0A plt.grid(True)%0A plt.xlim(%5B0,simTime%5D)%0A plt.legend()%0A plt.show()%0A%0Aif __name__ == '__main__':%0A Main()%0A
a3a022a184694cf95bbc37e22c4329c6b3e400cd
566. Reshape the Matrix
python/ReshapeTheMatrix.py
python/ReshapeTheMatrix.py
Python
0.999779
@@ -0,0 +1,1744 @@ +# -*- coding:utf-8 -*-%0A%0A# @Author zpf%0A%22%22%22%0AYou're given a matrix represented by a two-dimensional array,%0Aand two positive integers r and c representing the row number and column number of the wanted reshaped matrix, respectively.%0AThe reshaped matrix need to be filled with all the elements of the original matrix in the same row-traversing order as they were.%0AIf the 'reshape' operation with given parameters is possible and legal, output the new reshaped matrix;%0AOtherwise, output the original matrix.%0A%0AExample 1: Example 2:%0AInput: Input:%0Anums = nums =%0A%5B%5B1,2%5D, %5B%5B1,2%5D,%0A %5B3,4%5D%5D %5B3,4%5D%5D%0Ar = 1, c = 4 r = 2, c = 4%0AOutput: Output:%0A%5B%5B1,2,3,4%5D%5D %5B%5B1,2%5D,%0A %5B3,4%5D%5D%0A%0ANote:%0AThe height and width of the given matrix is in range %5B1, 100%5D.%0AThe given r and c are all positive.%0A%22%22%22%0A%0A%0Aclass Solution(object):%0A def matrixReshape(self, nums, r, c):%0A %22%22%22%0A :type nums: List%5BList%5Bint%5D%5D%0A :type r: int%0A :type c: int%0A :rtype: List%5BList%5Bint%5D%5D%0A %22%22%22%0A r1 = len(nums)%0A c1 = len(nums%5B0%5D)%0A if r1 * c1 != r * c:%0A return nums%0A else:%0A temp = %5B%5D%0A new_nums = %5B%5D%0A for e in nums:%0A for e1 in e:%0A temp.append(e1)%0A i = 0%0A while i %3C r:%0A new_nums.append(temp%5B0 + i * c: c + i * c%5D)%0A i += 1%0A return new_nums%0A%0A%0Aif __name__ == %22__main__%22:%0A sample = Solution()%0A print(sample.matrixReshape(nums=%5B%5B1, 2%5D, %5B3, 4%5D%5D, r=1, c=4))%0A
d83b18ec4faa513c7171a23af5ba46397141519e
add main __init__.py
wingstructure/__init__.py
wingstructure/__init__.py
Python
0.000588
@@ -0,0 +1,92 @@ +from . import analysis%0Afrom . import data%0Afrom . import liftingline%0Afrom . import structure%0A
81df43350fdcbde85780dfbf1101e47fff04dc6c
Add missing migration
resolwe/flow/migrations/0025_set_get_last_by.py
resolwe/flow/migrations/0025_set_get_last_by.py
Python
0.0002
@@ -0,0 +1,2417 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-03-15 12:42%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('flow', '0024_add_relations'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='collection',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_collection', 'Can view collection'), ('edit_collection', 'Can edit collection'), ('share_collection', 'Can share collection'), ('download_collection', 'Can download files from collection'), ('add_collection', 'Can add data objects to collection'), ('owner_collection', 'Is owner of the collection'))%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='data',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_data', 'Can view data'), ('edit_data', 'Can edit data'), ('share_data', 'Can share data'), ('download_data', 'Can download files from data'), ('owner_data', 'Is owner of the data'))%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='descriptorschema',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_descriptorschema', 'Can view descriptor schema'), ('edit_descriptorschema', 'Can edit descriptor schema'), ('share_descriptorschema', 'Can share descriptor schema'), ('owner_descriptorschema', 'Is owner of the description schema'))%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='entity',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_entity', 'Can view entity'), ('edit_entity', 'Can edit entity'), ('share_entity', 'Can share entity'), ('download_entity', 'Can download files from entity'), ('add_entity', 'Can add data objects to entity'), ('owner_entity', 'Is owner of the entity'))%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='process',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version', 'permissions': (('view_process', 'Can view process'), ('share_process', 'Can share process'), ('owner_process', 'Is owner of the process'))%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='storage',%0A options=%7B'default_permissions': (), 'get_latest_by': 'version'%7D,%0A ),%0A %5D%0A
7b2e28f9604347ff396b220c8d2ab7bdfdc671c8
test hbase TSocket
test/test_hbase_TSocker0Err32/test_hbase.py
test/test_hbase_TSocker0Err32/test_hbase.py
Python
0.000001
@@ -0,0 +1,366 @@ +import happybase%0A%0A# gives error%0A# TSocket read 0 bytes%0A# %5BErrno 32%5D Broken pipe%0A%0Aif __name__ == %22__main__%22:%0A%09conn = happybase.Connection(host=%2210.1.94.57%22)%0A%09table_name = %22escorts_images_sha1_infos_dev%22%0A%09hbase_table = conn.table(table_name)%0A%09batch_list_queries = %5B%22000421227D83DA48DB4A417FCEFCA68272398B8E%22%5D%0A%09rows = hbase_table.rows(batch_list_queries)%0A%09print rows%0A%09%0A
6d8e47f0b1bc70de7464303d6ac3b7684588a7aa
Add mpmodel
mpmodel/mpmodel.py
mpmodel/mpmodel.py
Python
0
@@ -0,0 +1,24 @@ +import tensorflow as tf%0A
ad6e67d382df1018e4ae55ebdcb6fae1cca9bffe
Add merge migration
osf/migrations/0081_merge_20180212_0949.py
osf/migrations/0081_merge_20180212_0949.py
Python
0.000001
@@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.9 on 2018-02-12 15:49%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0080_ensure_schemas'),%0A ('osf', '0079_merge_20180202_1206'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
3fbf2c29a54225e7d4dd882637e68cfe3a4d0101
Add some tests for Message Queue
src/cobwebs/tests/test_mq.py
src/cobwebs/tests/test_mq.py
Python
0
@@ -0,0 +1,858 @@ +from cobwebs.mq.core import RPCLink, TopicsLink%0Afrom cobwebs.mq.backends.rabbitmq import driver%0Aimport pytest%0Aimport spider%0Aimport json%0Afrom unittest import mock%0A%0AHOST = %22127.0.0.1%22%0A%0A%0Adef test_driver_instance():%0A assert isinstance(driver.rpc, RPCLink)%0A assert isinstance(driver.topics, TopicsLink)%0A%0A%[email protected](%22cobwebs.mq.backends.rabbitmq%22)%0Adef test_rpc(rabbitmq):%0A request = %7B%22action%22: %22list%22, %22data%22: None%7D%0A result = rabbitmq.rpc.send(%22db_driver%22, json.dumps(request), HOST)%0A rabbitmq.rpc.send.assert_called_with(%22db_driver%22, json.dumps(request), HOST)%0A%0A%[email protected](%22cobwebs.mq.backends.rabbitmq%22)%0Adef test_topic(rabbitmq):%0A result = rabbitmq.topic.emit(key=%22test%22, message=%22this is just a message%22)%0A rabbitmq.topic.emit.assert_called_with(key=%22test%22,%0A message=%22this is just a message%22)%0A%0A%0A
9469bcf60a199b96d1fec778c44346df744a1d60
add jieba
jieba/test_jieba.py
jieba/test_jieba.py
Python
0.999974
@@ -0,0 +1,426 @@ +#!/usr/bin/env python%0A# encoding=utf-8%0A%0Aimport jieba%0A%0Aseg_list = jieba.cut(%22%E6%88%91%E6%9D%A5%E5%88%B0%E5%8C%97%E4%BA%AC%E6%B8%85%E5%8D%8E%E5%A4%A7%E5%AD%A6%22, cut_all=True)%0Aprint(%22Full Mode: %22 + %22/ %22.join(seg_list)) # %E5%85%A8%E6%A8%A1%E5%BC%8F%0A%0Aseg_list = jieba.cut(%22%E6%88%91%E6%9D%A5%E5%88%B0%E5%8C%97%E4%BA%AC%E6%B8%85%E5%8D%8E%E5%A4%A7%E5%AD%A6%22, cut_all=False)%0Aprint(%22Default Mode: %22 + %22/ %22.join(seg_list)) # %E7%B2%BE%E7%A1%AE%E6%A8%A1%E5%BC%8F%0A%0Aseg_list = jieba.cut(%22%E4%BB%96%E6%9D%A5%E5%88%B0%E4%BA%86%E7%BD%91%E6%98%93%E6%9D%AD%E7%A0%94%E5%A4%A7%E5%8E%A6%22) # %E9%BB%98%E8%AE%A4%E6%98%AF%E7%B2%BE%E7%A1%AE%E6%A8%A1%E5%BC%8F%0Aprint(%22, %22.join(seg_list))%0A%0A# %E6%90%9C%E7%B4%A2%E5%BC%95%E6%93%8E%E6%A8%A1%E5%BC%8F%0Aseg_list = jieba.cut_for_search(%22%E5%B0%8F%E6%98%8E%E7%A1%95%E5%A3%AB%E6%AF%95%E4%B8%9A%E4%BA%8E%E4%B8%AD%E5%9B%BD%E7%A7%91%E5%AD%A6%E9%99%A2%E8%AE%A1%E7%AE%97%E6%89%80%EF%BC%8C%E5%90%8E%E5%9C%A8%E6%97%A5%E6%9C%AC%E4%BA%AC%E9%83%BD%E5%A4%A7%E5%AD%A6%E6%B7%B1%E9%80%A0%22)%0Aprint(%22, %22.join(seg_list))%0A
291e7c8b2a69f26f6343269aaac2b9e3cd517220
Add tests
readthedocs/proxito/tests/test_proxied_api.py
readthedocs/proxito/tests/test_proxied_api.py
Python
0.00001
@@ -0,0 +1,402 @@ +from readthedocs.rtd_tests.tests.test_footer import TestFooterHTML%0Afrom django.test import override_settings%0A%0A%0A@override_settings(ROOT_URLCONF='readthedocs.proxito.urls')%0Aclass TestProxiedFooterHTML(TestFooterHTML):%0A%0A def setUp(self):%0A super().setUp()%0A self.host = 'pip.readthedocs.io'%0A%0A def render(self):%0A r = self.client.get(self.url, HTTP_HOST=self.host)%0A return r%0A
081b5aabae205ad7c23c512be15ee26276dc8a29
Check whether Azure CLI is in ARM mode
perfkitbenchmarker/providers/azure/util.py
perfkitbenchmarker/providers/azure/util.py
Python
0
@@ -0,0 +1,1252 @@ +# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Verify that Azure CLI is in arm mode.%22%22%22%0A%0Afrom perfkitbenchmarker import events%0Afrom perfkitbenchmarker import providers%0Afrom perfkitbenchmarker import vm_util%0Afrom perfkitbenchmarker.providers import azure%0A%0A%0Aclass BadAzureCLIModeError(Exception):%0A pass%0A%0A%0Adef _CheckAzureCLIMode(sender):%0A assert sender == providers.AZURE, sender%0A%0A stdout, _ = vm_util.IssueRetryableCommand(%0A %5Bazure.AZURE_PATH, 'config'%5D)%0A%0A if 'Current Mode: arm' not in stdout:%0A raise BadAzureCLIModeError('Azure CLI may not be in ARM mode.')%0A%0A%0Aevents.provider_imported.connect(_CheckAzureCLIMode, providers.AZURE,%0A weak=False)%0A
95a86efeadc15f3edc83cbfe64c6d725b1eaf0bd
revert unneeded None checks
web/scripts/load_agagd_data.py
web/scripts/load_agagd_data.py
from app.models import db, Game, GoServer, Player, User from app.tokengen import generate_token from flask.ext.script import Command, Option from scripts.parsing import agagd_parser, pin_change_parser from uuid import uuid4 """Script which loads game and user data from an AGAGD SQL dump and file with PIN changes.""" def create_server(name): server = GoServer() server.name = name server.url = '' server.token = generate_token() db.session.add(server) db.session.commit() return server.id class AGAHistoricalGamesLoader(Command): """Class which holds a little bit of state used while loading the AGAGD data.""" option_list = ( Option('--sql_dump', '-d', dest='agagd_dump_filename'), Option('--pin_changes', '-p', dest='pin_change_dump_filename') ) def setup(self, pin_change_dump_filename): """Stand-in for __init__ because we don't have necessary information at construction time, and we are constructed regardless of whether this script is being run or not. """ name = 'AGA' server = db.session.query(GoServer).filter_by(name=name).first() if server: self.server_id = server.id else: print('Creating AGA Server object') self.server_id = create_server(name) self._users = {} # map: old_pin -> new_pin with open(pin_change_dump_filename) as f: self._pin_changes = {line['old']: line['new'] for line in pin_change_parser(f) if line['old'] != line['new']} # Prevents infinite lookup loops def get_or_make_user(self, aga_id): """Gets or creates a fake User object for an AGA ID, along with an AGA player If the AGA ID has had one or more PIN changes, the most recent ID will be used. """ while aga_id in self._pin_changes: if self._pin_changes[aga_id] is None: print ("Pin would change to none: %s" % aga_id) aga_id = self._pin_changes[aga_id] if aga_id in self._users: return self._users[aga_id] else: user = User(aga_id=aga_id, email=uuid4(), fake=True) db.session.add(user) db.session.commit() player = Player(id=aga_id, name='', user_id=user.id, server_id=self.server_id, token=uuid4()) db.session.add(player) self._users[aga_id] = user return user def store_game(self, row): if row['Pin_Player_1'] is None or row['Pin_Player_2'] is None: print(row) user1 = self.get_or_make_user(row['Pin_Player_1']) user2 = self.get_or_make_user(row['Pin_Player_2']) white_user, black_user = (user1, user2) if row['Color_1'] == 'W' else (user2, user1) game = Game(id=row['Game_ID'], server_id=self.server_id, white_id=white_user.aga_id, black_id=black_user.aga_id, date_played=row['Game_Date'], date_reported=row['Game_Date'], result=row['Result'], rated=row['Rated'], handicap=row['Handicap'], komi=row['Komi']) db.session.add(game) def load_data(self, filename): # server_id = create_server() with open(filename) as f: for i, row in enumerate(agagd_parser(f)): if i % 1000 == 0: print('-Loading row', i) db.session.commit() print('Committed', i) self.store_game(row) def run(self, agagd_dump_filename, pin_change_dump_filename): self.setup(pin_change_dump_filename) self.load_data(agagd_dump_filename) db.session.commit()
Python
0.000005
@@ -1974,122 +1974,8 @@ es:%0A - if self._pin_changes%5Baga_id%5D is None:%0A print (%22Pin would change to none: %25s%22 %25 aga_id)%0A @@ -2477,103 +2477,8 @@ w):%0A - if row%5B'Pin_Player_1'%5D is None or row%5B'Pin_Player_2'%5D is None:%0A print(row)%0A%0A
6705e0e23d13a94726556714e11dfbb7a916877d
Add basic mechanism to override the default EntryAdmin
zinnia_wymeditor/admin.py
zinnia_wymeditor/admin.py
Python
0.000001
@@ -0,0 +1,519 @@ +%22%22%22EntryAdmin for zinnia-wymeditor%22%22%22%0Afrom django.contrib import admin%0A%0Afrom zinnia.models import Entry%0Afrom zinnia.admin.entry import EntryAdmin%0A%0A%0Aclass EntryAdminWYMEditorMixin(object):%0A %22%22%22%0A Mixin adding WYMeditor for editing Entry.content field.%0A %22%22%22%0A pass%0A%0A%0Aclass EntryAdminWYMEditor(EntryAdminWYMEditorMixin,%0A EntryAdmin):%0A %22%22%22%0A Enrich the default EntryAdmin with WYMEditor.%0A %22%22%22%0A pass%0A%0Aadmin.site.unregister(Entry)%0Aadmin.site.register(Entry, EntryAdminWYMEditor)%0A
6193786bb2307550ab9dfb9c218f6d8b3f407156
Create is-graph-bipartite.py
Python/is-graph-bipartite.py
Python/is-graph-bipartite.py
Python
0.000326
@@ -0,0 +1,1898 @@ +# Time: O(%7CV%7C + %7CE%7C)%0A# Space: O(%7CV%7C)%0A%0A# Given a graph, return true if and only if it is bipartite.%0A#%0A# Recall that a graph is bipartite if we can split it's set of nodes into%0A# two independent subsets A and B such that every edge in the graph has%0A# one node in A and another node in B.%0A#%0A# The graph is given in the following form: graph%5Bi%5D is a list of indexes j%0A# for which the edge between nodes i and j exists. %0A# Each node is an integer between 0 and graph.length - 1.%0A# There are no self edges or parallel edges: graph%5Bi%5D does not contain i,%0A# and it doesn't contain any element twice.%0A#%0A# Example 1:%0A# Input: %5B%5B1,3%5D, %5B0,2%5D, %5B1,3%5D, %5B0,2%5D%5D%0A# Output: true%0A# Explanation: %0A# The graph looks like this:%0A# 0----1%0A# %7C %7C%0A# %7C %7C%0A# 3----2%0A# We can divide the vertices into two groups: %7B0, 2%7D and %7B1, 3%7D.%0A# %0A# Example 2:%0A# Input: %5B%5B1,2,3%5D, %5B0,2%5D, %5B0,1,3%5D, %5B0,2%5D%5D%0A# Output: false%0A# Explanation: %0A# The graph looks like this:%0A# 0----1%0A# %7C %5C %7C%0A# %7C %5C %7C%0A# 3----2%0A# We cannot find a way to divide the set of nodes into two independent ubsets.%0A#%0A# Note:%0A# - graph will have length in range %5B1, 100%5D.%0A# - graph%5Bi%5D will contain integers in range %5B0, graph.length - 1%5D.%0A# - graph%5Bi%5D will not contain i or duplicate values.%0A%0Aclass Solution(object):%0A def isBipartite(self, graph):%0A %22%22%22%0A :type graph: List%5BList%5Bint%5D%5D%0A :rtype: bool%0A %22%22%22%0A color = %7B%7D%0A for node in xrange(len(graph)):%0A if node in color:%0A continue%0A stack = %5Bnode%5D%0A color%5Bnode%5D = 0%0A while stack:%0A curr = stack.pop()%0A for neighbor in graph%5Bcurr%5D:%0A if neighbor not in color:%0A stack.append(neighbor)%0A color%5Bneighbor%5D = color%5Bcurr%5D %5E 1%0A elif color%5Bneighbor%5D == color%5Bcurr%5D:%0A return False%0A return True%0A
3204227799ce5f7a7d0df4cb6b480b42d6cdae1f
Add a snippet.
python/pyqt/pyqt5/widget_QPainter_OpenGL.py
python/pyqt/pyqt5/widget_QPainter_OpenGL.py
Python
0.000002
@@ -0,0 +1,1525 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A# See https://doc.qt.io/archives/4.6/opengl-2dpainting.html%0A%0Aimport sys%0Afrom PyQt5.QtWidgets import QApplication%0Afrom PyQt5.QtGui import QPainter, QBrush, QPen%0Afrom PyQt5.QtCore import Qt%0Afrom PyQt5.QtOpenGL import QGLWidget%0A%0A%0Aclass MyPaintWidget(QGLWidget):%0A%0A def __init__(self):%0A super().__init__()%0A%0A # Set window background color%0A self.setAutoFillBackground(True)%0A%0A palette = self.palette()%0A palette.setColor(self.backgroundRole(), Qt.white)%0A%0A self.setPalette(palette)%0A%0A def paintEvent(self, event):%0A qp = QPainter(self)%0A%0A qp.setPen(QPen(Qt.black, 5, Qt.SolidLine))%0A qp.setBrush(QBrush(Qt.red, Qt.SolidPattern))%0A qp.setRenderHint(QPainter.Antialiasing) # %3C- Set anti-aliasing See https://wiki.python.org/moin/PyQt/Painting%2520and%2520clipping%2520demonstration%0A%0A qp.drawEllipse(100, 15, 400, 200)%0A%0A qp.setBrush(QBrush(Qt.red, Qt.DiagCrossPattern))%0A%0A qp.drawEllipse(600, 15, 200, 200)%0A%0A%0Aif __name__ == '__main__':%0A app = QApplication(sys.argv)%0A%0A widget = MyPaintWidget()%0A widget.show()%0A%0A # The mainloop of the application. The event handling starts from this point.%0A # The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.%0A exit_code = app.exec_()%0A%0A # The sys.exit() method ensures a clean exit.%0A # The environment will be informed, how the application ended.%0A sys.exit(exit_code)%0A
ae3bd406736f9235b442c52bf584a97d0760a588
add api
buildbot_travis/api.py
buildbot_travis/api.py
Python
0
@@ -0,0 +1,3034 @@ +# Copyright 2012-2013 Isotoma Limited%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Afrom buildbot import config%0Afrom klein import Klein%0Afrom twisted.internet import defer%0Afrom twisted.internet import threads%0Aimport yaml%0Aimport json%0Afrom buildbot.util.eventual import eventually%0A%0A%0Adef getDbConfigObjectId(master, name=%22config%22):%0A return master.db.state.getObjectId(name, %22DbConfig%22)%0A%0A%0Aclass Api(object):%0A app = Klein()%0A _yamlPath = None%0A _useDbConfig = False%0A _in_progress = False%0A%0A def __init__(self, ep):%0A self.ep = ep%0A%0A def setYamlPath(self, path):%0A self._yamlPath = path%0A%0A def useDbConfig(self):%0A self._useDbConfig = True%0A%0A def setCfg(self, cfg):%0A self._cfg = cfg%0A self._in_progress = False%0A%0A @defer.inlineCallbacks%0A def saveCfg(self, cfg):%0A if self._yamlPath is not None:%0A cfg = yaml.safe_dump(cfg, default_flow_style=False, indent=4)%0A with open(self._yamlPath, %22w%22) as f:%0A f.write(cfg)%0A%0A if self._useDbConfig:%0A oid = yield getDbConfigObjectId(self.ep.master)%0A yield self.ep.master.db.state.setState(oid, %22travis%22, cfg)%0A%0A @app.route(%22/config%22, methods=%5B'GET'%5D)%0A def getConfig(self, request):%0A return json.dumps(self._cfg)%0A%0A def thdCheckConfig(self):%0A # check the config in thread%0A try:%0A config.MasterConfig.loadConfig(self.ep.master.basedir, self.ep.master.configFileName)%0A except config.ConfigErrors, e:%0A return e.errors%0A return None%0A%0A @app.route(%22/config%22, methods=%5B'PUT'%5D)%0A @defer.inlineCallbacks%0A def saveConfig(self, request):%0A %22%22%22I save the config, and run check_config, potencially returning errors%22%22%22%0A request.setHeader('Content-Type', 'application/json')%0A if self._in_progress:%0A defer.returnValue(json.dumps(%7B'success': False, 'errors': %5B'reconfig already in progress'%5D%7D))%0A self._in_progress = True%0A cfg = json.loads(request.content.read())%0A if cfg != self._cfg:%0A yield self.saveCfg(cfg)%0A try:%0A err = yield threads.deferToThread(self.thdCheckConfig)%0A except Exception as e:%0A err = %5Brepr(e)%5D%0A if err is not None:%0A self._in_progress = False%0A yield self.saveCfg(self._cfg)%0A defer.returnValue(json.dumps(%7B'success': False, 'errors': err%7D))%0A%0A yield self.ep.master.reconfig()%0A defer.returnValue(json.dumps(%7B'success': True%7D))%0A
d19ab50f2d3b259bd6c5cfb21b4087ca4d3ec248
create theano 2
theanoTUT/theano2_install.py
theanoTUT/theano2_install.py
Python
0
@@ -0,0 +1,854 @@ +# View more python tutorials on my Youtube and Youku channel!!!%0A%0A# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg%0A# Youku video tutorial: http://i.youku.com/pythontutorial%0A%0A# 2 - Install theano%0A%0A%22%22%22%0Arequirements:%0A1. python 2 %3E=2.6 or python 3%3E=3.3%0A2. Numpy %3E= 1.7.1%0A3. Scipy %3E=0.11%0A%0AIf using CPU, no other requirement.%0ABut if using GPU, you will need NVIDIA CUDA drivers and SDK.%0A%0AThe must easy way to install theano is to use pip install.%0A1. open your terminal (MacOS and Linux), or your command window (Windows)%0A2. type %22pip install theano%22 (for python 2x); type %22pip3 install theano%22 (for python 3x)%0A%0ANote: to install theano on Windows machine may be a little bit stuggling. If you encounter any%0Aproblem, please refer to this web page:%0Ahttp://deeplearning.net/software/theano/install_windows.html#install-windows%0A%0A%22%22%22
389adca1fd52747814f370de2d066a1743544469
Solve Game Time in python
solutions/beecrowd/1046/1046.py
solutions/beecrowd/1046/1046.py
Python
0.999996
@@ -0,0 +1,198 @@ +start, end = map(int, input().split())%0A%0Aif start == end:%0A result = 24%0Aelif end - start %3E= 0:%0A result = end - start%0Aelse:%0A result = 24 + end - start%0A%0Aprint(f'O JOGO DUROU %7Bresult%7D HORA(S)')%0A
3a9627f31846e06e04d7ae933712840d52616663
Create main.py
main.py
main.py
Python
0.000001
@@ -0,0 +1,293 @@ +import pygame%0Aimport game%0Afile = 'music.mp3'%0Apygame.init()%0Apygame.mixer.init()%0Apygame.mixer.music.load(file)%0Apygame.mixer.music.play(loops=-1)%0Apygame.mixer.music.set_volume(0.5)%0A%0Arun = True%0ASuperHeroTower = game.Game()%0A%0Awhile run:%0A run = SuperHeroTower.startScreen()%0A%0A%0Apygame.quit()%0Aquit()%0A
bc3f7e83bd35f1a6ae8add35932513c7da47076e
fix a typo.
restclients/test/util/datetime_convertor.py
restclients/test/util/datetime_convertor.py
from django.test import TestCase from datetime import date, datetime from restclients.util.datetime_convertor import convert_to_begin_of_day,\ convert_to_end_of_day class DatetimeConvertorTest(TestCase): def test_convert_to_begin_of_day(self): self.assertEquals(convert_to_begin_of_day(date(2013, 4, 9)), datetime(2013, 4, 9, 0, 0, 0)) self.assertEquals( convert_to_begin_of_day(datetime(2013, 4, 9, 10, 10, 10)), datetime(2013, 4, 9, 0, 0, 0)) def test_convert_to_end_of_day(self): self.assertEquals(convert_to_end_of_day(date(2012, 2, 28)), datetime(2013, 2, 29, 0, 0, 0)) self.assertEquals( convert_to_end_of_day(datetime(2012, 2, 28, 10, 10, 10)), datetime(2012, 2, 29, 0, 0, 0))
Python
0.03285
@@ -668,17 +668,17 @@ time(201 -3 +2 , 2, 29,
2199f4c5ed563200d555315b9a8575e00486e667
Add a simple script to generate monthly confirmed / fixed counts
script/confirmed-fixed-monthly-breakdown.py
script/confirmed-fixed-monthly-breakdown.py
Python
0
@@ -0,0 +1,2123 @@ +#!/usr/bin/python%0A%0A# A script to draw graphs showing the number of confirmed reports%0A# created each month, and those of which that have been fixed. This%0A# script expects to find a file called 'problems.csv' in the current%0A# directory which should be generated by:%0A#%0A# DIR=%60pwd%60 rake data:create_problem_spreadsheet%0A%0Aimport csv%0Aimport datetime%0Afrom collections import defaultdict%0A%0Aimport itertools%0A%0Astatus_types = ('confirmed', 'fixed')%0A%0Acounts = %7B%7D%0Afor status_type in status_types:%0A counts%5Bstatus_type%5D = defaultdict(int)%0A%0Atoday = datetime.date.today()%0Alatest_month = earliest_month = (today.year, today.month)%0A%0Amaximum_count = -1%0A%0Awith open('problems.csv') as fp:%0A reader = csv.DictReader(fp, delimiter=',', quotechar='%22')%0A for row in reader:%0A d = datetime.datetime.strptime(row%5B'Created'%5D,%0A '%25H:%25M %25d %25b %25Y')%0A ym = (d.year, d.month)%0A earliest_month = min(earliest_month, ym)%0A if row%5B'Status'%5D == 'confirmed':%0A counts%5B'confirmed'%5D%5Bym%5D += 1%0A elif row%5B'Status'%5D == 'fixed':%0A counts%5B'fixed'%5D%5Bym%5D += 1%0A maximum_count = max(maximum_count, counts%5B'fixed'%5D%5Bym%5D, counts%5B'confirmed'%5D%5Bym%5D)%0A%0Adef months_between(earlier, later):%0A %22%22%22A generator for iterating over months represented as (year, month) tuples%22%22%22%0A year = earlier%5B0%5D%0A month = earlier%5B1%5D%0A while True:%0A yield (year, month)%0A if month == 12:%0A year = year + 1%0A month = 1%0A else:%0A month += 1%0A if (year, month) %3E later:%0A return%0A%0Aall_months = list(months_between(earliest_month, latest_month))%0Amonths = len(all_months)%0A%0A# Make sure that there's at least a zero count for each month we're%0A# considering:%0Afor d in counts.values():%0A for ym in all_months:%0A d%5Bym%5D += 0%0A%0Awith open('monthly-breakdown.csv', 'w') as fp:%0A writer = csv.writer(fp)%0A writer.writerow(%5B'Month', 'Confirmed', 'Fixed'%5D)%0A for ym in all_months:%0A writer.writerow(%5B%22%25d-%2502d%22 %25 (ym%5B0%5D, ym%5B1%5D),%0A counts%5B'confirmed'%5D%5Bym%5D,%0A counts%5B'fixed'%5D%5Bym%5D%5D)%0A
417f1832dbb6a1d0742b2f01d56429139f8885ef
add conversion script
scripts/conversionScripts/toValidationPP.py
scripts/conversionScripts/toValidationPP.py
Python
0.000001
@@ -0,0 +1,2330 @@ +# Copyright 2017 Battelle Energy Alliance, LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0Aimport xml.etree.ElementTree as ET%0Aimport xml.dom.minidom as pxml%0Aimport os%0A%0Adef convert(tree,fileName=None):%0A %22%22%22%0A Converts input files to be compatible with merge request #1583%0A Restructure the Validation PostProcessor, use the subType to indicate the algorithm%0A used by the Validation. Remove the specific node 'Probailistic'.%0A @ In, tree, xml.etree.ElementTree.ElementTree object, the contents of a RAVEN input file%0A @ In, fileName, the name for the raven input file%0A @Out, tree, xml.etree.ElementTree.ElementTree object, the modified RAVEN input file%0A %22%22%22%0A simulation = tree.getroot()%0A models = simulation.find('Models')%0A updateTestInfo = False%0A if models is not None:%0A postProcessors = models.findall('PostProcessor')%0A for pp in postProcessors:%0A subType = pp.get('subType')%0A if subType == 'Validation':%0A prob = pp.find('Probabilistic')%0A if prob is not None:%0A pp.set('subType', prob.tag.strip())%0A pp.remove(prob)%0A updateTestInfo = True%0A%0A if updateTestInfo:%0A TestInfo = simulation.find('TestInfo')%0A if TestInfo is not None:%0A revisions = TestInfo.find('revisions')%0A hasRev = True%0A if revisions is None:%0A revisions = ET.Element('revisions')%0A hasRev = False%0A rev = ET.Element('revision')%0A rev.attrib%5B'author'%5D = 'wangc'%0A rev.attrib%5B'date'%5D = '2021-09-28'%0A rev.text = 'Convert Validation PostProcessor: subType will be replaced with the Probabilistic node tag, and Probabilistic node is removed'%0A revisions.append(rev)%0A if not hasRev:%0A TestInfo.append(revisions)%0A%0A return tree%0A%0Aif __name__=='__main__':%0A import convert_utils%0A import sys%0A convert_utils.standardMain(sys.argv,convert)%0A
bbae3e9fee30634a659276732f16a883500e8f45
Create memcache.py
cutout/cache/memcache.py
cutout/cache/memcache.py
Python
0.000001
@@ -0,0 +1,2213 @@ +# -*- coding: utf-8 -*-%0A%0A%0A%0A%0Aimport os%0Aimport re%0Aimport tempfile%0Afrom time import time%0Afrom .basecache import BaseCache%0Afrom .posixemulation import rename, _items%0Atry:%0A import cPickle as pickle%0Aexcept ImportError:%0A import pickle%0Atry:%0A from hashlib import md5%0Aexcept ImportError:%0A from md5 import new as md5%0A%0A%0A%0A%0Aclass MemCache(BaseCache):%0A %22%22%22Simple memory cache for single process environments. This class exists%0A mainly for the development server and is not 100%25 thread safe. It tries%0A to use as many atomic operations as possible and no locks for simplicity%0A but it could happen under heavy load that keys are added multiple times.%0A%0A :param threshold: the maximum number of items the cache stores before%0A it starts deleting some.%0A :param default_timeout: the default timeout that is used if no timeout is%0A specified on :meth:%60~BaseCache.set%60.%0A %22%22%22%0A%0A def __init__(self, threshold=500, default_timeout=300):%0A BaseCache.__init__(self, default_timeout)%0A self._cache = %7B%7D%0A self.clear = self._cache.clear%0A self._threshold = threshold%0A%0A def _prune(self):%0A if len(self._cache) %3E self._threshold:%0A now = time()%0A for idx, (key, (expires, _)) in enumerate(self._cache.items()):%0A if expires %3C= now or idx %25 3 == 0:%0A self._cache.pop(key, None)%0A%0A def get(self, key):%0A now = time()%0A expires, value = self._cache.get(key, (0, None))%0A if expires %3E time():%0A return pickle.loads(value)%0A%0A def set(self, key, value, timeout=None):%0A if timeout is None:%0A timeout = self.default_timeout%0A self._prune()%0A self._cache%5Bkey%5D = (time() + timeout, pickle.dumps(value,%0A pickle.HIGHEST_PROTOCOL))%0A%0A def add(self, key, value, timeout=None):%0A if timeout is None:%0A timeout = self.default_timeout%0A if len(self._cache) %3E self._threshold:%0A self._prune()%0A item = (time() + timeout, pickle.dumps(value,%0A pickle.HIGHEST_PROTOCOL))%0A self._cache.setdefault(key, item)%0A%0A def delete(self, key):%0A self._cache.pop(key, None)%0A
e5d4e0f0ebb2138cdc5efee0b8584e0cb571ac8a
Add test_notify_of_new_user_internally.
zerver/tests/test_new_users.py
zerver/tests/test_new_users.py
from django.conf import settings from django.core import mail from django.contrib.auth.signals import user_logged_in from zerver.lib.test_classes import ZulipTestCase from zerver.signals import get_device_browser, get_device_os class SendLoginEmailTest(ZulipTestCase): """ Uses django's user_logged_in signal to send emails on new login. The receiver handler for this signal is always registered in production, development and testing, but emails are only sent based on SEND_LOGIN_EMAILS setting. SEND_LOGIN_EMAILS is set to true in default settings. It is turned off during testing. """ def test_send_login_emails_if_send_login_email_setting_is_true(self): # type: () -> None with self.settings(SEND_LOGIN_EMAILS=True): self.assertTrue(settings.SEND_LOGIN_EMAILS) self.login("[email protected]") # email is sent and correct subject self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'A new login to your Zulip account.') def test_dont_send_login_emails_if_send_login_emails_is_false(self): # type: () -> None self.assertFalse(settings.SEND_LOGIN_EMAILS) self.login("[email protected]") self.assertEqual(len(mail.outbox), 0) def test_dont_send_login_emails_for_new_user_registration_logins(self): # type: () -> None with self.settings(SEND_LOGIN_EMAILS=True): self.register("[email protected]", "test") for email in mail.outbox: self.assertNotEqual(email.subject, 'A new login to your Zulip account.') def test_without_path_info_dont_send_login_emails_for_new_user_registration_logins(self): # type: () -> None with self.settings(SEND_LOGIN_EMAILS=True): self.client_post('/accounts/home/', {'email': "[email protected]"}) self.submit_reg_form_for_user("[email protected]", "orange", PATH_INFO='') for email in mail.outbox: self.assertNotEqual(email.subject, 'A new login to your Zulip account.') class TestBrowserAndOsUserAgentStrings(ZulipTestCase): def setUp(self): # type: () -> None self.user_agents = [ ('mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' + 'Chrome/54.0.2840.59 Safari/537.36', 'Chrome', 'Linux',), ('mozilla/5.0 (windows nt 6.1; win64; x64) applewebkit/537.36 (khtml, like gecko) ' + 'chrome/56.0.2924.87 safari/537.36', 'Chrome', 'Windows',), ('mozilla/5.0 (windows nt 6.1; wow64; rv:51.0) ' + 'gecko/20100101 firefox/51.0', 'Firefox', 'Windows',), ('mozilla/5.0 (windows nt 6.1; wow64; trident/7.0; rv:11.0) ' + 'like gecko', 'Internet Explorer', 'Windows'), ('Mozilla/5.0 (Android; Mobile; rv:27.0) ' + 'Gecko/27.0 Firefox/27.0', 'Firefox', 'Android'), ('Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) ' + 'AppleWebKit/536.26 (KHTML, like Gecko) ' + 'Version/6.0 Mobile/10B329 Safari/8536.25', 'Safari', 'iOS'), ('Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_4 like Mac OS X) ' + 'AppleWebKit/536.26 (KHTML, like Gecko) Mobile/10B350', None, 'iOS'), ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) ' + 'AppleWebKit/537.36 (KHTML, like Gecko) ' + 'Chrome/56.0.2924.87 Safari/537.36', 'Chrome', 'MacOS'), ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) ' + 'AppleWebKit/602.3.12 (KHTML, like Gecko) ' + 'Version/10.0.2 Safari/602.3.12', 'Safari', 'MacOS'), ('', None, None), ] def test_get_browser_on_new_login(self): # type: () -> None for user_agent in self.user_agents: device_browser = get_device_browser(user_agent[0]) self.assertEqual(device_browser, user_agent[1]) def test_get_os_on_new_login(self): # type: () -> None for user_agent in self.user_agents: device_os = get_device_os(user_agent[0]) self.assertEqual(device_os, user_agent[2])
Python
0.000001
@@ -220,16 +220,107 @@ evice_os +%0Afrom zerver.lib.actions import notify_new_user%0Afrom zerver.models import Recipient, Stream %0A%0Aclass @@ -4316,8 +4316,562 @@ ent%5B2%5D)%0A +%0A%0Aclass TestNotifyNewUser(ZulipTestCase):%0A def test_notify_of_new_user_internally(self):%0A # type: () -%3E None%0A new_user = self.example_user('cordelia')%0A self.make_stream('signups')%0A notify_new_user(new_user, internal=True)%0A%0A message = self.get_last_message()%0A actual_stream = Stream.objects.get(id=message.recipient.type_id)%0A self.assertEqual(actual_stream.name, 'signups')%0A self.assertEqual(message.recipient.type, Recipient.STREAM)%0A self.assertIn(%22**INTERNAL SIGNUP**%22, message.content)%0A
3719a0371fa6fcc95ca65b6d759762f1f17a16be
Solving p025
p025.py
p025.py
Python
0.999768
@@ -0,0 +1,407 @@ +%22%22%22%0AWhat is the first term in the Fibonacci sequence to contain 1000 digits?%0A%22%22%22%0A%0Adef solve_p025():%0A for i, num in enumerate(fib_generator()):%0A if len(str(num)) == 1000:%0A return i + i%0A%0Adef fib_generator():%0A prev = 1%0A curr = 1%0A yield prev%0A yield curr%0A while True:%0A prev, curr = curr, prev + curr%0A yield curr%0A%0Aif __name__ == '__main__':%0A print solve_p025()
bd2a70930ba67f3dd510b172fe4e00ddc2dc23c2
Create voxelmodel.py
odvm/voxelmodel.py
odvm/voxelmodel.py
Python
0.000002
@@ -0,0 +1,696 @@ +from panda3d.core import *%0Afrom odvm.quads import Quads%0A%0A%0Aclass VoxelModel(Geom):%0A def __init__(self):%0A Geom.__init__( self, GeomVertexData( 'vertices', GeomVertexFormat.get_v3n3c4(), Geom.UH_static ) )%0A self.quads = Quads(self)%0A self.add_primitive(self.quads)%0A%0A def add(self,p2s,i,j,k,c,p2i=0,p2j=0,p2k=0):%0A di = 1 %3C%3C p2i%0A dj = 1 %3C%3C p2j%0A dk = 1 %3C%3C p2k%0A self.quads.add( 1%3C%3Cp2s,i,j,k,%0A ( ( ( 0, 0, 0, di, dj, 0 ), c ),%0A ( ( 0, 0,-dk, 0, dj, 0 ), c ),%0A ( ( di, 0, 0, di, dj,-dk ), c ),%0A ( ( 0, 0,-dk, di, 0, 0 ), c ),%0A ( ( 0, dj,-dk, di, 0,-dk ), c ),%0A ( ( 0, dj, 0, di, dj,-dk ), c ) ) )%0A
b4d82c21995fb2b9e2afd93eea8849ded8b7d489
Update next-greater-element-iii.py
Python/next-greater-element-iii.py
Python/next-greater-element-iii.py
# Time: O(logn) # Space: O(logn) # Given a positive 32-bit integer n, you need to find the smallest 32-bit integer # which has exactly the same digits existing in the integer n and is greater in value than n. # If no such positive 32-bit integer exists, you need to return -1. @ # Example 1: # Input: 12 # Output: 21 # Example 2: # Input: 21 # Output: -1 class Solution(object): def nextGreaterElement(self, n): """ :type n: int :rtype: int """ digits = map(int, list(str(n))) k, l = -1, 0 for i in xrange(len(digits) - 1): if digits[i] < digits[i + 1]: k = i if k == -1: digits.reverse() return -1 for i in xrange(k + 1, len(digits)): if digits[i] > digits[k]: l = i digits[k], digits[l] = digits[l], digits[k] digits[k + 1:] = digits[:k:-1] result = int("".join(map(str, digits))) return -1 if result >= 0x7FFFFFFF else result
Python
0.000007
@@ -9,16 +9,23 @@ O(logn) + = O(1) %0A# Space @@ -33,16 +33,23 @@ O(logn) + = O(1) %0A%0A# Give
f30c542a9714574dbcee15ca7f7b4ca4cdb9d965
add atexit01.py
trypython/stdlib/atexit01.py
trypython/stdlib/atexit01.py
Python
0.000001
@@ -0,0 +1,749 @@ +# coding: utf-8%0A%0A%22%22%22%0Aatexit%E3%83%A2%E3%82%B8%E3%83%A5%E3%83%BC%E3%83%AB%E3%81%AB%E3%81%A4%E3%81%84%E3%81%A6%E3%81%AE%E3%82%B5%E3%83%B3%E3%83%97%E3%83%AB%E3%81%A7%E3%81%99%E3%80%82%0A%22%22%22%0Aimport atexit%0Aimport sys%0A%0Afrom trypython.common.commoncls import SampleBase%0Afrom trypython.common.commonfunc import pr%0A%0A%0Aclass Sample(SampleBase):%0A def exec(self):%0A #%0A # atexit%E3%83%A2%E3%82%B8%E3%83%A5%E3%83%BC%E3%83%AB%E3%82%92%E5%88%A9%E7%94%A8%E3%81%99%E3%82%8B%E3%81%A8%E3%82%B7%E3%83%A3%E3%83%83%E3%83%88%E3%83%80%E3%82%A6%E3%83%B3%E3%83%95%E3%83%83%E3%82%AF%E3%82%92%E8%A8%AD%E5%AE%9A%E5%87%BA%E6%9D%A5%E3%82%8B%0A # register() %E3%81%A7%E7%99%BB%E9%8C%B2%E3%81%97%E3%81%A6%E3%80%81 unregister() %E3%81%A7%E8%A7%A3%E9%99%A4%E3%81%99%E3%82%8B%0A #%0A # %E5%BC%95%E6%95%B0%E7%84%A1%E3%81%97%E3%81%AE%E9%96%A2%E6%95%B0%E3%81%AB%E9%99%90%E3%82%8A%E3%80%[email protected] %E3%81%A8%E3%81%84%E3%81%86%E9%A2%A8%E3%81%AB%E3%83%87%E3%82%B3%E3%83%AC%E3%83%BC%E3%82%BF%E3%81%A7%0A # %E6%8C%87%E5%AE%9A%E3%81%A7%E3%81%8D%E3%82%8B%E3%80%82%0A #%0A atexit.register(Sample.exit_hook)%0A pr('script', 'end')%0A sys.exit(0)%0A%0A @staticmethod%0A def exit_hook():%0A pr('exit_hook', 'called')%0A%0A @staticmethod%0A @atexit.register%0A def exit_hook2():%0A pr('exit_hook2', 'called')%0A%0A%0Adef go():%0A obj = Sample()%0A obj.exec()%0A%0A%0Aif __name__ == '__main__':%0A go()%0A
926fe25c4995b5ab1d2464159223e2c403b72570
use python command line tool with tshark to parse pcap and convert to csv
pcap2csv.py
pcap2csv.py
Python
0.000001
@@ -0,0 +1,500 @@ +import os%0Aimport csv%0A%0Acmd = %22tshark -n -r %7B0%7D -T fields -Eheader=y -e ip.addr %3E tmp.csv%22%0A%0Aos.system(cmd.format(%22wireshark_sample.pcap%22))%0A%0Aresult = %5B%5D%0A%0Awith open(%22tmp.csv%22, %22r%22) as infile:%0A for line in infile:%0A if line == %22%5Cn%22:%0A continue%0A else:%0A result.append(line.strip().split(%22,%22))%0A%0Awith open('sample.csv', 'wb') as csvfile:%0A writer = csv.writer(csvfile, quoting=csv.QUOTE_ALL)%0A for line in result:%0A writer.writerow(line)%0A%0Aos.system(%22rm tmp.csv%22)
81f4976645225b6cf4a422186a3419a06756bfc5
add a set of test utils that will be useful for running tests
test/test_util.py
test/test_util.py
Python
0.000009
@@ -0,0 +1,919 @@ +import contextlib%0Aimport os%0Aimport os.path%0A%0Aimport mock%0Aimport requests%0A%0A%[email protected]%0Adef mocked_requests(path):%0A %22%22%22mocks the requests library to return a given file's content%22%22%22%0A # if environment variable is set, then don't mock the tests just grab files%0A # over the network. Example:%0A # env ULMO_DONT_MOCK_TESTS=1 py.test%0A if os.environ.get('ULMO_DONT_MOCK_TESTS', False):%0A yield%0A%0A else:%0A test_path = test_file_path(path)%0A%0A with open(test_path, 'rb') as f:%0A mock_response = requests.Response()%0A mock_response.status_code = 200%0A mock_response.raw = f%0A%0A with mock.patch('requests.get', return_value=mock_response):%0A yield%0A%0A%0Adef test_file_path(file_path):%0A %22%22%22translates a file path to be relative to the test files directory%22%22%22%0A return os.path.join(os.path.dirname(__file__), 'files', file_path)%0A
0827fce61013172fa7183ee294189275030c0faf
Create code_5.py
MPI_Practice_Examples/code_5.py
MPI_Practice_Examples/code_5.py
Python
0.001674
@@ -0,0 +1,1799 @@ +#dotProductParallel_1.py%0A#%22to run%22 syntax example: mpiexec -n 4 python26 dotProductParallel_1.py 40000%0A%0Afrom mpi4py import MPI%0Aimport numpy%0Aimport sys%0A%0Acomm = MPI.COMM_WORLD%0Arank = comm.Get_rank()%0Asize = comm.Get_size()%0A%0A#read from command line%0An = int(sys.argv%5B1%5D) #length of vectors%0A%0A#arbitrary example vectors, generated to be evenly divided by the number of%0A#processes for convenience%0A%0Ax = numpy.linspace(0,100,n) if comm.rank == 0 else None%0Ay = numpy.linspace(20,300,n) if comm.rank == 0 else None%0A%0A#initialize as numpy arrays%0Adot = numpy.array(%5B0.%5D)%0Alocal_n = numpy.array(%5B0%5D)%0A%0A#test for conformability%0Aif rank == 0:%0A if (n != y.size):%0A print %22vector length mismatch%22%0A comm.Abort()%0A%0A #currently, our program cannot handle sizes that are not evenly divided by%0A #the number of processors%0A if(n %25 size != 0):%0A print %22the number of processors must evenly divide n.%22%0A comm.Abort()%0A%0A #length of each process's portion of the original vector%0A local_n = numpy.array(%5Bn/size%5D)%0A%0A#communicate local array size to all processes%0Acomm.Bcast(local_n, root=0)%0A%0A#initialize as numpy arrays%0Alocal_x = numpy.zeros(local_n)%0Alocal_y = numpy.zeros(local_n)%0A%0A#divide up vectors%0Acomm.Scatterv(%5Bx,(0,100,n),MPI.DOUBLE%5D, local_x)%0Acomm.Scatterv(%5By, (20,300,n), MPI.DOUBLE%5D local_y)%0A%0A#local computation of dot product%0Alocal_dot = numpy.array(%5Bnumpy.dot(local_x, local_y)%5D)%0A%0A#sum the results of each%0Acomm.Reduce(local_dot, local_n, op = MPI.SUM)%0A%0Aif (rank == 0):%0A print %22The dot product is%22, dot%5B0%5D, %22computed in parallel%22%0A print %22and%22, numpy.dot(x,y), %22computed serially%22%0A %0A
6454548da01dbc2b9f772a5c0ffb11a03dc933e7
Add module capable of rendering a circle when ran
draw_shape.py
draw_shape.py
Python
0
@@ -0,0 +1,627 @@ +import pygame%0A%0A%0Apygame.init()%0A%0A#-- SCREEN CHARACTERISTICS -------------------------%3E%3E%3E%0Abackground_color = (255,255,255)%0A(width, height) = (300, 200)%0A%0A#-- RENDER SCREEN ----------------------------------%3E%3E%3E%0Ascreen = pygame.display.set_mode((width, height))%0Ascreen.fill(background_color)%0A%0A#pygame.draw.circle(canvas, color, position(x,y), radius, thickness)%0Apygame.draw.circle(screen, (255,0,0), (150, 100), 10, 1)%0A%0A%0A#-- RUN LOOP ---------------------------------------%3E%3E%3E%0Apygame.display.flip()%0Arunning = True%0Awhile running:%0A for event in pygame.event.get():%0A if event.type == pygame.QUIT:%0A running = False%0A
469b28aec45c9832e4cfe658143316fb15e103d1
Add server
server.py
server.py
Python
0.000001
@@ -0,0 +1,20 @@ +print(%22Hola mundo%22)%0A
6ac6f9f3f933a98af8722561ba181ca50c6ad1fe
Add performance test
perftest.py
perftest.py
Python
0.000043
@@ -0,0 +1,922 @@ +import resource%0Afrom time import clock%0A%0Afrom sortedsets import SortedSet%0A%0Adef test(size):%0A tm = clock()%0A ss = SortedSet((str(i), i*10) for i in range(size))%0A create_time = clock() - tm%0A print(%22SORTED SET WITH%22, size, %22ELEMENTS%22, ss._level, %22LEVELS%22)%0A print(%22Memory usage%22, resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)%0A print(%22Creation time %22, format(create_time, '10.2f'), %22s%22)%0A num = 1000%0A step = size // (num + 2)%0A items = %5B%5D%0A for i in range(step, size-step, step):%0A items.append((str(i), i*10))%0A tm = clock()%0A for k, v in items:%0A del ss%5Bk%5D%0A del_time = num/(clock() - tm)%0A tm = clock()%0A for k, v in items:%0A ss%5Bk%5D = v%0A ins_time = num/(clock() - tm)%0A print(%22Insertion speed%22, format(ins_time, '10.2f'), %22ins/s%22)%0A print(%22Deletion speed %22, format(del_time, '10.2f'), %22del/s%22)%0A%0Afor size in (10000, 100000, 1000000, 10000000):%0A test(size)%0A%0A
a107d3c088e13c4bf1a600f0ebf2664321d6799f
add solution for Binary Tree Maximum Path Sum
src/binaryTreeMaximumPathSum.py
src/binaryTreeMaximumPathSum.py
Python
0.000001
@@ -0,0 +1,617 @@ +# Definition for a binary tree node%0A# class TreeNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0A%0Aclass Solution:%0A # @param root, a tree node%0A # @return an integer%0A%0A def maxPathSum(self, root):%0A self.res = root.val if root else 0%0A self.dfs(root)%0A return self.res%0A%0A def dfs(self, root):%0A if root is None:%0A return 0%0A l_max = max(0, self.dfs(root.left))%0A r_max = max(0, self.dfs(root.right))%0A self.res = max(self.res, root.val+l_max+r_max)%0A return root.val+max(l_max, r_max)%0A
4fdef464be6eabee609ecc4327493c277693c0e0
Make content text mandatory
content/migrations/0023_auto_20160614_1130.py
content/migrations/0023_auto_20160614_1130.py
Python
0.999999
@@ -0,0 +1,613 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.7 on 2016-06-14 09:30%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('content', '0022_auto_20160608_1407'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='comment',%0A name='text',%0A field=models.TextField(verbose_name='Text'),%0A ),%0A migrations.AlterField(%0A model_name='content',%0A name='text',%0A field=models.TextField(verbose_name='Text'),%0A ),%0A %5D%0A
b106bb6f346811181c9fde27147f7b1685827cbe
436. Find Right Interval. Brute force
p436_bruteforce.py
p436_bruteforce.py
Python
0.999464
@@ -0,0 +1,1212 @@ +import sys%0Aimport unittest%0A%0A%0A# Definition for an interval.%0Aclass Interval(object):%0A def __init__(self, s=0, e=0):%0A self.start = s%0A self.end = e%0A%0A%0Aclass Solution(object):%0A def findRightInterval(self, intervals):%0A %22%22%22%0A :type intervals: List%5BInterval%5D%0A :rtype: List%5Bint%5D%0A %22%22%22%0A result = %5B%5D%0A for i in xrange(len(intervals)):%0A min_index = -1%0A min_val = sys.maxint%0A for j in xrange(len(intervals)):%0A if i == j:%0A continue%0A if intervals%5Bi%5D.end %3C= intervals%5Bj%5D.start %3C min_val:%0A min_index = j%0A min_val = intervals%5Bj%5D.start%0A%0A result.append(min_index)%0A return result%0A%0A%0Aclass Test(unittest.TestCase):%0A def test(self):%0A self._test(%5B%5B1, 2%5D%5D, %5B-1%5D)%0A self._test(%5B%5B3, 4%5D, %5B2, 3%5D, %5B1, 2%5D%5D, %5B-1, 0, 1%5D)%0A self._test(%5B%5B1, 4%5D, %5B2, 3%5D, %5B3, 4%5D%5D, %5B-1, 2, -1%5D)%0A%0A def _test(self, intervals, expected):%0A intervals = %5BInterval(a%5B0%5D, a%5B1%5D) for a in intervals%5D%0A actual = Solution().findRightInterval(intervals)%0A self.assertEqual(actual, expected)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
5a857703de5fc1e67e958afb41a10db07b98bfa1
Add migration script to fix valid users with date_confirmed==None
scripts/migrate_unconfirmed_valid_users.py
scripts/migrate_unconfirmed_valid_users.py
Python
0.000184
@@ -0,0 +1,1884 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22Script to migrate users with a valid date_last_login but no date_confirmed.%22%22%22%0A%0Aimport sys%0Aimport logging%0A%0Afrom website.app import init_app%0Afrom website.models import User%0Afrom scripts import utils as script_utils%0Afrom tests.base import OsfTestCase%0Afrom tests.factories import UserFactory%0Afrom modularodm import Q%0Aimport datetime as dt%0A%0Alogger = logging.getLogger(__name__)%0A%0Adef do_migration(records):%0A for user in records:%0A user.date_confirmed = user.date_last_login%0A if not user.is_registered:%0A user.is_registered = True%0A logger.info('Finished migrating user %7B0%7D'.format(user._id))%0A%0Adef get_targets():%0A return User.find(Q('date_confirmed', 'eq', None) & Q('date_last_login', 'ne', None))%0A%0Adef main():%0A init_app(routes=False) # Sets the storage backends on all models%0A if 'dry' in sys.argv:%0A for user in get_targets():%0A print(user)%0A else:%0A do_migration(get_targets())%0A%0Aclass TestMigrateNodeCategories(OsfTestCase):%0A%0A def test_get_targets(self):%0A test = User.find(Q('date_confirmed', 'ne', None) & Q('date_last_login', 'ne', None))%0A assert test is not None%0A%0A def test_do_migration(self):%0A today = dt.datetime.utcnow()%0A user1 = UserFactory.build(date_confirmed=None, date_last_login=today, is_registered=False)%0A user2 = UserFactory.build(date_confirmed=None, date_last_login=today, is_registered=True)%0A user1.save()%0A user2.save()%0A %0A user_list = User.find(Q('_id', 'eq', user1._id) %7C Q('_id', 'eq', user2._id))%0A do_migration(user_list)%0A%0A assert user1.date_confirmed is today%0A assert user1.is_registered%0A assert user2.date_confirmed is today%0A assert user2.is_registered%0A%0A%0Aif __name__ == '__main__':%0A script_utils.add_file_logger(logger, __file__)%0A main()%0A
e12371408af1682904483341fd1f41ef6034a17f
add test
OperateSystem/Ex1/Test/SellTest.py
OperateSystem/Ex1/Test/SellTest.py
Python
0.000002
@@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*-%0A__author__ = 'jayin'%0A%0Aimport requests%0Aimport threading%0A%0A%0Adef buy_ticket():%0A res = requests.get('http://localhost:8000/buy1')%0A print threading.currentThread().getName() + u' buy ticket ' + res.content%0A%0A%0Adef main():%0A for x in range(1, 40):%0A t = threading.Thread(target=buy_ticket, name=x)%0A t.start()%0A%0Aif __name__ == '__main__':%0A main()
edeffbcbe8fb239553c73fa37e73c0188ffc2479
Add unit test for retrieving credentials from environment variables
tests/test_cli.py
tests/test_cli.py
Python
0
@@ -0,0 +1,1508 @@ +import sys%0A%0Aimport fixtures%0Aimport imgurpython%0Aimport testtools%0A%0Aimport imgur_cli.cli as cli%0A%0AFAKE_ENV = %7B'IMGUR_CLIENT_ID': 'client_id',%0A 'IMGUR_CLIENT_SECRET': 'client_secret',%0A 'IMGUR_ACCESS_TOKEN': 'access_token',%0A 'IMGUR_REFRESH_TOKEN': 'refresh_token',%0A 'IMGUR_MASHAPE_KEY': 'mashape_key'%7D%0A%0A%0Aclass TestImgurCli(testtools.TestCase):%0A%0A def make_env(self, exclude=None):%0A if not exclude:%0A exclude = %5B%5D%0A env = %7Bkey: value for key, value in FAKE_ENV.items() if key not in exclude%7D%0A self.useFixture(fixtures.MonkeyPatch('os.environ', env))%0A%0A def test_imgur_credentials_env(self):%0A self.make_env()%0A expected = ('client_id', 'client_secret', 'access_token', 'refresh_token',%0A 'mashape_key')%0A imgur_credentials = cli.imgur_credentials()%0A self.assertEqual(expected, imgur_credentials)%0A self.make_env(exclude=%5B'IMGUR_MASHAPE_KEY'%5D)%0A expected = ('client_id', 'client_secret', 'access_token', 'refresh_token',%0A None)%0A imgur_credentials = cli.imgur_credentials()%0A self.assertEqual(expected, imgur_credentials)%0A self.make_env(exclude=%5B'IMGUR_CLIENT_ID'%5D)%0A self.assertRaises(imgurpython.client.ImgurClientError,%0A cli.imgur_credentials)%0A self.make_env(exclude=%5B'IMGUR_CLIENT_SECRET'%5D)%0A self.assertRaises(imgurpython.client.ImgurClientError,%0A cli.imgur_credentials)%0A
4c148281ee8071ea8f150362388a44cf5c0895bf
Add exception classes.
tgif/exception.py
tgif/exception.py
Python
0
@@ -0,0 +1,180 @@ +%22%22%22 All exceptions go here.%0A%22%22%22%0A%0Aclass Friday(Exception):%0A %22%22%22 Base exception in Friday game.%0A %22%22%22%0A%0Aclass GameOver(Friday):%0A %22%22%22 Indicats that the game is overed.%0A %22%22%22%0A
a6935d250dfdbc275ce450f813697b73ebc291e3
Create addDigits.py
Puzzles/leetcode/April-9th-2016/addDigits.py
Puzzles/leetcode/April-9th-2016/addDigits.py
Python
0
@@ -0,0 +1,634 @@ +/*%0A%5Bref.href%5D leetcode.com/problems/add-digits%0A%22%0A Given a non-negative integer num, repeatedly add all its digits %0A until the result has only one digit.%0A %0A For example:%0A %0A Given num = 38, the process is like: 3 + 8 = 11, 1 + 1 = 2. %0A Since 2 has only one digit, return it.%0A %0A Credits:%0A Special thanks to @jianchao.li.fighter for adding this problem %0A and creating all test cases.%0A%22%0A*/%0A%0Aclass Solution(object):%0A def addDigits(self, n):%0A %22%22%22%0A :type num: int%0A :rtype: int%0A %22%22%22%0A if n %3C 10:%0A return n%0A n = n %25 10 + self.addDigits(n // 10)%0A return self.addDigits(n)%0A
85142dd9f7413dcb7c214ec251d21c93517ce26c
add AcoraMatcher tool
AcoraMatcher.py
AcoraMatcher.py
Python
0
@@ -0,0 +1,2082 @@ +# coding:utf-8%0Aimport cPickle%0Aimport json%0Aimport acora%0Afrom atma import tool%0Aimport collections%0Afrom itertools import groupby%0A%0A%0Aclass AcoraMatcher:%0A def __init__(self, spec_set, min_count=1, min_len=1):%0A key_lst = %5B%5D%0A if type(spec_set) == dict or type(spec_set) == collections.Counter:%0A for spec, cnt in spec_set.items():%0A if cnt %3E= min_count and len(spec) %3E= min_len:%0A key_lst.append(spec)%0A elif type(spec_set) == list:%0A key_lst = spec_set%0A else:%0A print 'ERROR: wrong value type:', type(spec_set)%0A exit(-1)%0A self.builder = acora.AcoraBuilder(key_lst)%0A self.ac = self.builder.build()%0A%0A def match(self, des, whole_match=True):%0A ret = %5B%5D%0A letters = set(%22!%5C%22$%25&'()*+,.:;%3C%3E?@%5B%5C%5D%5E_%60%7B%7C%7D~ -%22)%0A wrong_spec = %5B'other', 'no', 'A', 'none'%5D%0A for kw, pos in self.ac.findall(des):%0A # print des%5Bpos - 1%5D == ' '%0A # print des%5Bpos: pos + len(kw)%5D%0A # print pos+len(kw) == len(des), len(des), pos, len(kw), des%5Bpos + len(kw) - 1%5D in letters%0A if kw in wrong_spec:%0A continue%0A if not whole_match:%0A ret.append((kw, pos))%0A # remove non whole match%0A elif (pos == 0 or des%5Bpos-1%5D in letters) and (pos+len(kw) == len(des) or des%5Bpos+len(kw)%5D in letters):%0A ret.append((kw, pos))%0A return ret # return value format: %5B(match_string, start_pos)%5D, start_pos starts from 0%0A%0A @staticmethod%0A def longest_match(matches):%0A ret = %5B%5D%0A matches = sorted(matches, key=lambda (x, y): (y, len(x) * -1))%0A last_end = 0%0A for m in matches:%0A if len(m%5B0%5D) + m%5B1%5D %3E last_end:%0A ret.append(m)%0A last_end = len(m%5B0%5D) + m%5B1%5D%0A return ret%0A%0A @staticmethod%0A def distribution_counter(count_dic, items):%0A for i in items:%0A key = i%0A if key not in count_dic:%0A count_dic%5Bkey%5D = 1%0A else:%0A count_dic%5Bkey%5D += 1%0A
7a880376e098f60b1666833bb6b14b359b0ebda5
add fitness_spider.py
Exercise/fitness_spider.py
Exercise/fitness_spider.py
Python
0.00231
@@ -0,0 +1,995 @@ +from bs4 import BeautifulSoup%0Aimport requests%0Afrom selenium import webdriver%0Aimport time%0Aimport sqlite3%0Afrom selenium import webdriver%0Aimport json%0A%0Adriver = webdriver.PhantomJS()%0Aclass Fitness:%0A i = 0%0A url = %22http://www.hiyd.com/dongzuo/%22%0A headers = %7B%0A 'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'%0A %7D%0A def get_info(self, url):%0A response = requests.get(url, headers=self.headers, timeout=5)%0A # driver.get(url)%0A soup = BeautifulSoup(response.text, %22html.parser%22)%0A # soup = BeautifulSoup(driver.page_source, %22html.parser%22)%0A text = str(soup.find_all(%22script%22)%5B-1%5D)%0A # print(driver.page_source)%0A data_text = text.split(%22e.init(%22)%5B1%5D.split(%22);%22)%5B0%5D%0A json_text = json.loads(data_text)%0A print(json_text)%0A%0Aif __name__ == %22__main__%22:%0A spider = Fitness()%0A while spider.i %3C 1:%0A spider.i += 1%0A spider.get_info(spider.url + str(spider.i) + %22/%22)
52684fdcd94e05c96e9d9643a5c9d2cf9d8f5713
Use six.string_types as suggested by @piquadrat.
opbeat/instrumentation/packages/dbapi2.py
opbeat/instrumentation/packages/dbapi2.py
"""Provides classes to instrument dbapi2 providers https://www.python.org/dev/peps/pep-0249/ """ import re from opbeat.instrumentation.packages.base import AbstractInstrumentedModule from opbeat.traces import trace from opbeat.utils import wrapt class Literal(object): def __init__(self, literal_type, content): self.literal_type = literal_type self.content = content def __eq__(self, other): return (isinstance(other, Literal) and self.literal_type == other.literal_type and self.content == other.content) def __repr__(self): return "<Literal {}{}{}>".format(self.literal_type, self.content, self.literal_type) def skip_to(start, tokens, value_sequence): i = start while i < len(tokens): for idx, token in enumerate(value_sequence): if tokens[i+idx] != token: break else: # Match return tokens[start:i+len(value_sequence)] i += 1 # Not found return None def look_for_table(sql, keyword): tokens = tokenize(sql) table_name = _scan_for_table_with_tokens(tokens, keyword) if isinstance(table_name, Literal): table_name = table_name.content.strip(table_name.literal_type) return table_name def _scan_for_table_with_tokens(tokens, keyword): seen_keyword = False for idx, lexeme in scan(tokens): if seen_keyword: if lexeme == "(": return _scan_for_table_with_tokens(tokens[idx:], keyword) else: return lexeme if isinstance(lexeme, str) and lexeme.upper() == keyword: seen_keyword = True def tokenize(sql): return [t for t in re.split("(\W)", sql) if t != ''] def scan(tokens): literal_start_idx = None literal_started = None prev_was_escape = False lexeme = [] i = 0 while i < len(tokens): token = tokens[i] if literal_start_idx: if prev_was_escape: prev_was_escape = False lexeme.append(token) else: if token == literal_started: if (literal_started == "'" and len(tokens) > i+1 and tokens[i+1] == "'"): # double quotes i += 1 lexeme.append("'") else: yield i, Literal(literal_started, "".join(lexeme)) literal_start_idx = None literal_started = None lexeme = [] else: if token == '\\': prev_was_escape = token else: prev_was_escape = False lexeme.append(token) elif literal_start_idx is None: if token in ["'", '"', "`"]: literal_start_idx = i literal_started = token elif token == "$": # Postgres can use arbitrary characters between two $'s as a # literal separation token, e.g.: $fish$ literal $fish$ # This part will detect that and skip over the literal. skipped_token = skip_to(i+1, tokens, '$') if skipped_token is not None: dollar_token = ['$'] + skipped_token skipped = skip_to(i + len(dollar_token), tokens, dollar_token) if skipped: # end wasn't found. yield i, Literal("".join(dollar_token), "".join(skipped[:-len(dollar_token)])) i = i + len(skipped) + len(dollar_token) else: if token != ' ': yield i, token i += 1 if lexeme: yield i, lexeme def extract_signature(sql): sql = sql.strip() first_space = sql.find(' ') if first_space < 0: return sql second_space = sql.find(' ', first_space+1) sql_type = sql[0:first_space].upper() if sql_type in ['INSERT', 'DELETE']: keyword = 'INTO' if sql_type == 'INSERT' else 'FROM' sql_type = sql_type + " " + keyword table_name = look_for_table(sql, keyword) elif sql_type in ['CREATE', 'DROP']: # 2nd word is part of SQL type sql_type = sql_type + sql[first_space:second_space] table_name = '' elif sql_type == 'UPDATE': table_name = look_for_table(sql, "UPDATE") elif sql_type == 'SELECT': # Name is first table try: sql_type = 'SELECT FROM' table_name = look_for_table(sql, "FROM") except: table_name = '' else: # No name table_name = '' signature = ' '.join(filter(bool, [sql_type, table_name])) return signature class CursorProxy(wrapt.ObjectProxy): provider_name = None def callproc(self, procname, params=None): return self._trace_sql(self.__wrapped__.callproc, procname, params) def execute(self, sql, params=None): return self._trace_sql(self.__wrapped__.execute, sql, params) def executemany(self, sql, param_list): return self._trace_sql(self.__wrapped__.executemany, sql, param_list) def _trace_sql(self, method, sql, params): signature = self.extract_signature(sql) kind = "db.{0}.sql".format(self.provider_name) with trace(signature, kind, {"sql": sql}): if params is None: return method(sql) else: return method(sql, params) def extract_signature(self, sql): raise NotImplementedError() class ConnectionProxy(wrapt.ObjectProxy): cursor_proxy = CursorProxy def cursor(self, *args, **kwargs): return self.cursor_proxy(self.__wrapped__.cursor(*args, **kwargs)) class DbApi2Instrumentation(AbstractInstrumentedModule): connect_method = None def call(self, module, method, wrapped, instance, args, kwargs): return ConnectionProxy(wrapped(*args, **kwargs)) def call_if_sampling(self, module, method, wrapped, instance, args, kwargs): # Contrasting to the superclass implementation, we *always* want to # return a proxied connection, even if there is no ongoing opbeat # transaction yet. This ensures that we instrument the cursor once # the transaction started. return self.call(module, method, wrapped, instance, args, kwargs)
Python
0
@@ -240,16 +240,21 @@ rt wrapt +, six %0A%0A%0Aclass @@ -1652,18 +1652,31 @@ exeme, s -tr +ix.string_types ) and le
a8fd0bfa974ff818ec105a42c585bae48030a086
Create notebooknetc.py
_src/om2py3w/3wex0/notebooknetc.py
_src/om2py3w/3wex0/notebooknetc.py
Python
0.000002
@@ -0,0 +1,467 @@ +# _*_coding:utf-8_*_%0A# %E5%AE%A2%E6%88%B7%E7%AB%AF%E7%A8%8B%E5%BA%8F%0A%0Afrom socket import *%0Aimport time%0Aimport notebooknets%0A%0Adef main():%0A BUF_SIZE = 65565%0A ss_addr = ('127.0.0.1', 8800)%0A cs = socket(AF_INET, SOCK_DGRAM)%0A%0A while True:%0A global data%0A data = raw_input('Please Input data%3E')%0A cs.sendto(data, ss_addr)%0A data, addr = cs.recvfrom(BUF_SIZE)%0A print %22Data: %22, data %0A cs.close%0A notebooknets.history(data)%0Aif __name__ == '__main__':%0A main()%0A
e3aa781fe60e3ce293e34767c78e947ffc169cbc
Allow module contributions to return dict
src/ggrc/extensions.py
src/ggrc/extensions.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] import sys from ggrc import settings import ggrc def get_extension_name(extension_setting, default): extension_name = getattr(settings, extension_setting, default) if extension_name is not None: extension_name = extension_name if not callable(extension_name) else \ extension_name() else: extension_name = default return extension_name def get_extension_modules(modules=[]): if len(modules) == 0: extension_names = getattr(settings, 'EXTENSIONS') if extension_names is None: modules.append(None) else: for m in settings.EXTENSIONS: modules.append(get_extension_module(m)) if len(modules) == 0 or modules[0] is None: return [] else: return modules def get_extension_module(module_name): __import__(module_name) return sys.modules[module_name] def get_extension_module_for(extension_setting, default, extension_modules={}): if extension_setting not in extension_modules: extension_name = get_extension_name(extension_setting, default) if not extension_name: extension_modules[extension_setting] = extension_name else: __import__(extension_name) extension_modules[extension_setting] = sys.modules[extension_name] return extension_modules[extension_setting] def get_extension_instance(extension_setting, default, extensions={}): if extension_setting not in extensions: extension_name = get_extension_name(extension_setting, default) idx = extension_name.rfind('.') module_name = extension_name[0:idx] class_name = extension_name[idx + 1:] __import__(module_name) module = sys.modules[module_name] extensions[extension_setting] = getattr(module, class_name)(settings) return extensions[extension_setting] def _get_contribution(module, name): """Fetch contributions from a single module. Args: module: Python module that will be checked for a given attribute. name: Name of the attribute that we want to collect from a module. The attribute must be a list or a callable that returns a list. Returns: List of contributions found Raises: TypeError: If the attribute is not a list or a callable that returns a list. """ contributions = getattr(module, name, []) if callable(contributions): contributions = contributions() if not isinstance(contributions, list): raise TypeError("Contributed item must be a list or a callable that " "returns a list") return contributions def get_module_contributions(name): """Fetch contributions from all modules if they exist. This function loops through all modules and checks if the main module package contains attribute with a given name or if it cotnains contributions which have an attribute with the said name. It gathers all such attributes in a list and returns it. Args: name (string): name of the contributed attribute that will be collected. Returns: A list of all collected atributes. """ all_contributions = [] all_modules = [ggrc] + get_extension_modules() for module in all_modules: all_contributions.extend(_get_contribution(module, name)) contributions_module = getattr(module, "contributions", None) if contributions_module: all_contributions.extend(_get_contribution(contributions_module, name)) return all_contributions
Python
0.000001
@@ -2550,16 +2550,96 @@ ions()%0A%0A + if isinstance(contributions, dict):%0A contributions = contributions.items()%0A if not @@ -3641,16 +3641,127 @@ name))%0A + if all(isinstance(val, tuple) for val in all_contributions):%0A all_contributions = dict(all_contributions)%0A return
13f495ddabd1997b7dfdc9e2933b82fd25ecd664
Create LevelOrderTraversal.py from LeetCode
LevelOrderTraversal.py
LevelOrderTraversal.py
Python
0
@@ -0,0 +1,1687 @@ +%0A#https://leetcode.com/problems/binary-tree-level-order-traversal/#/description%0A%0A# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0A%0Aclass Node(object):%0A def __init__(self,node,level):%0A self.node = node%0A self.level = level %0A %0Aclass Solution(object):%0A %0A def __init__(self):%0A self.array = %5B%5D %0A self.level_counter = 0%0A self.result = %5B%5D%0A self.levelq = %5B%5D%0A %0A def queue(self,node):%0A self.array.append(node)%0A %0A def isNotEmpty(self):%0A return self.array%0A %0A def popValue(self): %0A value = self.array%5B0%5D%0A del self.array%5B0%5D%0A return value%0A %0A def levelOrder(self, root):%0A %22%22%22%0A :type root: TreeNode%0A :rtype: List%5BList%5Bint%5D%5D%0A %22%22%22%0A if not root:%0A return self.result%0A self.queue(Node(root,0)) %0A while self.isNotEmpty(): %0A bigNode = self.popValue() %0A if bigNode.level %3E self.level_counter: %0A self.level_counter = bigNode.level %0A self.result.append(self.levelq%5B:%5D)%0A self.levelq%5B:%5D = %5B%5D %0A %0A self.levelq.append(bigNode.node.val) %0A if bigNode.node.left : %0A self.queue(Node(bigNode.node.left, bigNode.level + 1))%0A if bigNode.node.right :%0A self.queue(Node(bigNode.node.right, bigNode.level + 1))%0A if self.levelq:%0A self.result.append(self.levelq%5B:%5D)%0A %0A return self.result%0A %0A %0A %0A %0A
6f83518ebbbc94de83368d4c1a598850df0a6ba5
Include any_cuts only if they exist
cubes/auth.py
cubes/auth.py
# -*- coding=utf -*- import os.path import json from collections import namedtuple from .extensions import get_namespace, initialize_namespace from .browser import Cell, cut_from_string, cut_from_dict from .errors import * from .common import read_json_file, sorted_dependencies __all__ = ( "create_authorizer", "Authorizer", "SimpleAuthorizer", "NotAuthorized", "right_from_dict" ) ALL_CUBES_WILDCARD = '*' class NotAuthorized(UserError): """Raised when user is not authorized for the request.""" # Note: This is not called NotAuthorizedError as it is not in fact an # error, it is just type of signal. def create_authorizer(name, **options): """Gets a new instance of an authorizer with name `name`.""" ns = get_namespace("authorizers") if not ns: ns = initialize_namespace("authorizers", root_class=Authorizer, suffix="_authorizer", option_checking=True) try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown authorizer '%s'" % name) return factory(**options) class Authorizer(object): def authorize(self, token, cubes): """Returns list of authorized cubes from `cubes`. If none of the cubes are authorized an empty list is returned. Default implementation returs the same `cubes` list as provided. """ return cubes def restricted_cell(self, token, cube, cell=None): """Restricts the `cell` for `cube` according to authorization by `token`. If no cell is provided or the cell is empty then returns the restriction cell. If there is no restriction, returns the original `cell` if provided or `None`. """ return cell class NoopAuthorizer(Authorizer): def __init__(self): super(NoopAuthorizer, self).__init__() class _SimpleAccessRight(object): def __init__(self, roles, allow_cubes, deny_cubes, cube_restrictions): self.roles = set(roles) if roles else set([]) self.allow_cubes = set(allow_cubes) if allow_cubes else set([]) self.deny_cubes = set(deny_cubes) if deny_cubes else set([]) self.cube_restrictions = cube_restrictions or {} def merge(self, other): """Merge `right` with the receiver: * `allow_cubes` are merged (union) * `deny_cubes` are merged (union) * `cube_restrictions` from `other` with same cube replace restrictions from the receiver""" self.roles |= other.roles self.allow_cubes |= other.allow_cubes self.deny_cubes |= other.deny_cubes for cube, restrictions in other.cube_restrictions: if not cube in self.cube_restrictions: self.cube_restrictions[cube] = restrictions else: mine = self.cube_restrictions[cube] mine += restrictions def is_allowed(self, cube_name): return (self.allow_cubes \ and (cube_name in self.allow_cubes \ or ALL_CUBES_WILDCARD in self.allow_cubes)) \ or \ (self.deny_cubes \ and (cube_name not in self.deny_cubes \ and ALL_CUBES_WILDCARD not in self.deny_cubes)) def to_dict(self): return { "roles": list(self.roles), "allowed_cubes": list(self.allow_cubes), "denied_cubes": list(self.deny_cubes), "cube_restrictions": dict(self.cube_restrictions) } def right_from_dict(info): return _SimpleAccessRight( info.get('roles'), info.get('allowed_cubes'), info.get('denied_cubes'), info.get('cube_restrictions') ) class SimpleAuthorizer(Authorizer): __options__ = [ { "name": "rights_file", "description": "JSON file with access rights", "type": "string" }, { "name": "roles_file", "description": "JSON file with access right roles", "type": "string" }, ] def __init__(self, rights_file=None, roles_file=None, roles=None, rights=None, **options): """Creates a simple JSON-file based authorizer. Reads data from `rights_file` and `roles_file` and merge them with `roles` and `rights` dictionaries respectively.""" super(SimpleAuthorizer, self).__init__() roles = roles or {} rights = rights or {} if roles_file: content = read_json_file(roles_file, "access roles") roles.update(content) if rights_file: content = read_json_file(rights_file, "access rights") rights.update(content) self.roles = {} self.rights = {} # Process the roles for key, info in roles.items(): role = right_from_dict(info) self.roles[key] = role deps = dict((name, role.roles) for name, role in self.roles.items()) order = sorted_dependencies(deps) for name in order: role = self.roles[name] for parent_name in role.roles: parent = self.roles[parent_name] role.merge(parent) # Process rights for key, info in rights.items(): right = right_from_dict(info) self.rights[key] = right for role_name in right.roles: role = self.roles[role_name] right.merge(role) def right(self, token): try: right = self.rights[token] except KeyError: raise NotAuthorized("Unknown access right '%s'" % token) return right def authorize(self, token, cubes): try: right = self.right(token) except NotAuthorized: return [] authorized = [] for cube in cubes: cube_name = str(cube) if right.is_allowed(cube_name): authorized.append(cube) return authorized def restricted_cell(self, token, cube, cell): right = self.right(token) cuts = right.cube_restrictions.get(cube.name) # Append cuts for "any cube" any_cuts = right.cube_restrictions.get(ALL_CUBES_WILDCARD, []) cuts += any_cuts if cuts: restriction_cuts = [] for cut in cuts: if isinstance(cut, basestring): cut = cut_from_string(cut, cube) else: cut = cut_from_dict(cut) restriction_cuts.append(cut) restriction = Cell(cube, restriction_cuts) else: restriction = None if not restriction: return cell elif cell: return cell & restriction else: return restriction
Python
0
@@ -6366,24 +6366,49 @@ LDCARD, %5B%5D)%0A + if any_cuts:%0A cuts
891dc05f36ae9084d8511bf3e26e0631eadecef7
add medications urls
medications/urls.py
medications/urls.py
Python
0.000009
@@ -0,0 +1,194 @@ +%0Afrom django.conf.urls import url%0Afrom medications.views import MedicationsView%0A%0Aurlpatterns = %5B%0A url(r'%5E$', MedicationsView.as_view()),%0A url(r'%5E(%5B0-9%5D+)/$', MedicationsView.as_view()),%0A%5D%0A
aa4f1df448c6d01875ed667e37afe68c114892ed
Add initial verification endpoint. Add all balance endpoint
api/mastercoin_verify.py
api/mastercoin_verify.py
Python
0.000001
@@ -0,0 +1,1201 @@ +import os%0Aimport glob%0Afrom flask import Flask, request, jsonify, abort, json%0A%0Adata_dir_root = os.environ.get('DATADIR')%0A%0Aapp = Flask(__name__)%0Aapp.debug = True%0A%0A%[email protected]('/addresses')%0Adef addresses():%0A currency_id = request.args.get('currency_id')%0A print currency_id%0A response = %5B%5D%0A addr_glob = glob.glob(data_dir_root + '/addr/*.json')%0A%0A for address_file in addr_glob:%0A with open(address_file, 'r') as f:%0A addr = json.load(f)%0A res = %7B%0A 'address': addr%5B'address'%5D%0A %7D%0A%0A if currency_id == '0':%0A btc_balance = %5Bx%5B'value'%5D for x in addr%5B'balance'%5D if x%5B'symbol'%5D == 'BTC'%5D%5B0%5D%0A res%5B'balance'%5D = float(btc_balance)%0A response.append(res)%0A else:%0A if currency_id == '1' or currency_id == '2':%0A msc_currency_id = str(int(currency_id) - 1) # Mastercoin-tools is off by one on currency id from the spec%0A%0A if msc_currency_id in addr:%0A print addr%5Bcurrency_id%5D%5B'balance'%5D%0A res%5B'balance'%5D = float(addr%5Bmsc_currency_id%5D%5B'balance'%5D)%0A response.append(res)%0A%0A json_response = json.dumps(response)%0A return json_response%0A%0A%[email protected]('/transactions/%3Caddress%3E')%0Adef transactions(address=None):%0A%0A return %22%22%0A
23799c4a33b9d2da82ec0770f15e840459a940c6
Add api comtrade
app/apis/comtrade_api.py
app/apis/comtrade_api.py
Python
0
@@ -0,0 +1,1699 @@ +from flask import Blueprint, jsonify, request%0Afrom sqlalchemy import func, distinct%0Afrom inflection import singularize%0Afrom app.models.comtrade import Comtrade as Model%0Afrom app import cache%0Afrom app.helpers.cache_helper import api_cache_key%0A%0Ablueprint = Blueprint('comtrade_api', __name__, url_prefix='/comtrade')%0A%[email protected]('/%3Cpath:path%3E/')%[email protected](key_prefix=api_cache_key(%22comtrade%22))%0Adef api(path):%0A dimensions = map(singularize, path.split('/'))%0A if invalid_dimension(dimensions):%0A return 'Error', 403%0A%0A filters = %7Bk: v for k, v in request.args.to_dict().iteritems() if k in Model.dimensions()%7D%0A counts = %5Bc for c in map(singularize, request.args.getlist('count')) if c in Model.dimensions()%5D%0A values = get_values(request)%0A%0A group_columns = get_columns(dimensions)%0A count_columns = get_columns(counts)%0A aggregated_values = %5BModel.aggregate(v) for v in values%5D%0A%0A headers = get_headers(group_columns) + get_headers(count_columns, '_count') + values%0A entities = group_columns + map(lambda x: func.count(distinct(x)), count_columns) + aggregated_values%0A query = Model.query.with_entities(*entities).filter_by(**filters).group_by(*group_columns)%0A%0A return jsonify(data=query.all(), headers=headers)%0A%0Adef get_values(request):%0A values = %5Bv for v in request.args.getlist('value') if v in Model.values()%5D%0A return values if len(values) else Model.values()%0A%0Adef get_headers(columns, suffix=''):%0A return map(lambda x: x.key + suffix, columns)%0A%0Adef get_columns(dimensions):%0A return %5Bgetattr(Model, dimension) for dimension in dimensions%5D%0A%0Adef invalid_dimension(dimensions):%0A return not set(dimensions).issubset(set(Model.dimensions()))%0A
670e5d017adb24c5adffb38fa59059fec5175c3c
Create hello.py
hello.py
hello.py
Python
0.999503
@@ -0,0 +1,23 @@ +print('hello, world!')%0A
1692161ad43fdc6a0e2ce9eba0bacefc04c46b5c
Add form generator module.
src/epiweb/apps/survey/utils.py
src/epiweb/apps/survey/utils.py
Python
0
@@ -0,0 +1,1202 @@ +from django import forms%0Afrom epiweb.apps.survey.data import Survey, Section, Question%0A%0A_ = lambda x: x%0A%0Adef create_field(question):%0A if question.type == 'yes-no':%0A field = forms.ChoiceField(widget=forms.RadioSelect,%0A choices=%5B('yes', _('Yes')), ('no', _('No'))%5D)%0A%0A elif question.type == 'option-multiple':%0A field = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,%0A choices=zip(range(0, len(question.options)), question.options))%0A%0A elif question.type == 'option-single':%0A field = forms.ChoiceField(widget=forms.RadioSelect,%0A choices=zip(range(0, len(question.options)), question.options))%0A%0A elif question.type == 'date':%0A field = forms.DateField(input_formats='%25m/%25d/%25y')%0A%0A else:%0A field = forms.CharField()%0A%0A field.label = question.label%0A field.required = False%0A%0A return field%0A%0Adef generate_form(section, values=None):%0A if values:%0A form = forms.Form(values)%0A else:%0A form = forms.Form()%0A%0A for question in section.questions:%0A form.fields%5Bquestion.id%5D = create_field(question)%0A%0A return form%0A%0A
3ae03deee41a5c243e46071f98bc2f0eedb7ed8f
Check for DirectiveSection duplicates before Clause import.
src/ggrc/converters/sections.py
src/ggrc/converters/sections.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from .base import * from ggrc.models import Directive, DirectiveSection, Section, Clause from .base_row import * from collections import OrderedDict class SectionRowConverter(BaseRowConverter): model_class = Section def setup_object(self): self.obj = self.setup_object_by_slug(self.attrs) if self.obj.directive \ and self.obj.directive is not self.importer.options.get('directive'): self.importer.errors.append('Section code is already used.') else: self.obj.directive = self.importer.options.get('directive') if self.obj.id is not None: self.add_warning('slug', "Section already exists and will be updated") def reify(self): self.handle('slug', SlugColumnHandler) self.handle_date('created_at', no_import=True) self.handle_date('updated_at', no_import=True) self.handle_text_or_html('description') self.handle_text_or_html('notes') self.handle_raw_attr('reference_url') self.handle('contact', ContactEmailHandler, person_must_exist=True) self.handle('controls', LinkControlsHandler) self.handle_raw_attr('title', is_required=True) def save_object(self, db_session, **options): directive_id = options.get('directive_id') if directive_id: self.obj.directive_id = int(directive_id) db_session.add(self.obj) class ClauseRowConverter(SectionRowConverter): model_class = Clause def save_object(self, db_session, **options): directive_id = options.get('directive_id') if directive_id: db_session.add(self.obj) ds = DirectiveSection(directive_id=directive_id, section=self.obj) db_session.add(ds) class SectionsConverter(BaseConverter): metadata_export_order = ['type', 'slug'] metadata_map = OrderedDict([ ('Type','type'), ('Directive Code','slug'), ]) object_export_order = [ 'slug', 'title', 'description', 'controls', 'created_at', 'updated_at' ] object_map = OrderedDict([ ('Section Code', 'slug'), ('Section Title', 'title'), ('Section Description' , 'description'), ('Notes', 'notes'), ('Reference URL', 'reference_url'), ('Map:Person of Contact', 'contact'), ('Controls', 'controls'), ('Created', 'created_at'), ('Updated', 'updated_at') ]) row_converter = SectionRowConverter def validate_code(self, attrs): if not attrs.get('slug'): self.errors.append(u'Missing {} Code heading'.format(self.directive_kind())) elif attrs['slug'] != self.directive().slug: self.errors.append(u'{0} Code must be {1}'.format( self.directive_kind(), self.directive().slug )) # Creates the correct metadata_map for the specific directive kind. def create_metadata_map(self): if self.options.get('directive'): self.metadata_map = OrderedDict( [(k.replace("Directive", self.directive().type), v) if 'Directive' in k else (k, v) for k, v in self.metadata_map.items()] ) # Called in case the object_map headers change amongst similar imports def create_object_map(self): if self.directive_kind() == "Contract": self.object_map = OrderedDict( [(k.replace("Section", "Clause"), v) \ if 'Section' in k else (k, v) for k, v in self.object_map.items()] ) def directive_kind(self): return self.directive().kind or self.directive().meta_kind def directive(self): return self.options.get('directive') def do_export_metadata(self): yield self.metadata_map.keys() yield [self.directive().type, self.directive().slug] yield [] yield [] yield self.object_map.keys() class ClausesConverter(SectionsConverter): row_converter = ClauseRowConverter
Python
0
@@ -1722,32 +1722,36 @@ ive_id')%0A if +not directive_id:%0A @@ -1744,24 +1744,386 @@ rective_id:%0A + return%0A # Make sure directive/clause aren't already connected before creating%0A clause_id = getattr(self.obj, 'id', None)%0A matching_relationship_count = DirectiveSection.query%5C%0A .filter(DirectiveSection.directive_id==directive_id)%5C%0A .filter(DirectiveSection.section_id==clause_id)%5C%0A .count()%0A if matching_relationship_count == 0:%0A db_ses
72a5f0d301b2169367c8bcbc42bb53b71c1d635c
Create utils.py
utils.py
utils.py
Python
0.000001
@@ -0,0 +1,1053 @@ +from google.appengine.api import users%0Afrom google.appengine.ext import webapp%0Afrom google.appengine.ext.webapp import blobstore_handlers%0Afrom google.appengine.api import memcache%0A%0Aimport jinja2%0Aimport logging%0Aimport json%0Aimport os%0A%0Aclass BaseHandler(webapp.RequestHandler):%0A context = %7B%7D%0A def initialize(self, request, response):%0A %22%22%22docstring for __init__%22%22%22%0A self.populateContext()%0A super(BaseHandler, self).initialize(request, response)%0A%0A def populateContext(self):%0A %22%22%22Load up the stuff that every web handler will need%22%22%22%0A user = users.get_current_user()%0A%0A if user:%0A self.context%5B'logged_in'%5D = True%0A self.context%5B'is_admin'%5D = users.is_current_user_admin()%0A%0A def render(self, template_name):%0A %22%22%22Rending a template in a base directory by passing the name of the template%22%22%22%0A env = jinja2.Environment(loader=jinja2.FileSystemLoader('views'))%0A template = env.get_template(template_name)%0A self.response.out.write(template.render(self.context))%0A%0A
26bc11340590b0b863527fa12da03cea528feb46
Add initial stub of GerritClient class
pygerrit/client.py
pygerrit/client.py
Python
0.000006
@@ -0,0 +1,1224 @@ +%22%22%22 Gerrit client interface. %22%22%22%0A%0Afrom Queue import Queue, Empty, Full%0A%0Afrom pygerrit.error import GerritError%0Afrom pygerrit.events import GerritEventFactory%0A%0A%0Aclass GerritClient(object):%0A%0A %22%22%22 Gerrit client interface. %22%22%22%0A%0A def __init__(self, host):%0A self._factory = GerritEventFactory()%0A self._host = host%0A self._events = Queue()%0A%0A def get_event(self, block=True, timeout=None):%0A %22%22%22 Get the next event from the queue.%0A%0A Return a %60GerritEvent%60 instance, or None if:%0A - %60block%60 was False and there is no event available in the queue, or%0A - %60block%60 was True and no event was available within the time%0A specified by %60timeout%60.%0A%0A %22%22%22%0A try:%0A return self._events.get(block, timeout)%0A except Empty:%0A return None%0A%0A def put_event(self, json_data):%0A %22%22%22 Create event from %60json_data%60 and add it to the queue.%0A%0A Raise GerritError if the queue is full, or the factory could not%0A create the event.%0A%0A %22%22%22%0A try:%0A event = self._factory.create(json_data)%0A self._events.put(event)%0A except Full:%0A raise GerritError(%22Unable to add event: queue is full%22)%0A
10f99acc11051b37595751b9b9b84e11dd133a64
Add functions for getting available checksums for a channel from remote and disk.
kolibri/core/content/utils/file_availability.py
kolibri/core/content/utils/file_availability.py
Python
0
@@ -0,0 +1,2878 @@ +import json%0Aimport os%0Aimport re%0A%0Aimport requests%0Afrom django.core.cache import cache%0A%0Afrom kolibri.core.content.models import LocalFile%0Afrom kolibri.core.content.utils.paths import get_content_storage_dir_path%0Afrom kolibri.core.content.utils.paths import get_file_checksums_url%0A%0A%0Achecksum_regex = re.compile(%22%5E(%5Ba-f0-9%5D%7B32%7D)$%22)%0A%0A%0Adef get_available_checksums_from_remote(channel_id, baseurl):%0A CACHE_KEY = %22PEER_AVAILABLE_CHECKSUMS_%7Bbaseurl%7D_%7Bchannel_id%7D%22.format(%0A baseurl=baseurl, channel_id=channel_id%0A )%0A if CACHE_KEY not in cache:%0A response = requests.get(get_file_checksums_url(channel_id, baseurl))%0A%0A checksums = None%0A%0A # Do something if we got a successful return%0A if response.status_code == 200:%0A try:%0A checksums = json.loads(response.content)%0A # Filter to avoid passing in bad checksums%0A checksums = %5B%0A checksum for checksum in checksums if checksum_regex.match(checksum)%0A %5D%0A cache.set(CACHE_KEY, checksums, 3600)%0A except (ValueError, TypeError):%0A # Bad JSON parsing will throw ValueError%0A # If the result of the json.loads is not iterable, a TypeError will be thrown%0A # If we end up here, just set checksums to None to allow us to cleanly continue%0A pass%0A return cache.get(CACHE_KEY)%0A%0A%0Adef get_available_checksums_from_disk(channel_id, basepath):%0A PER_DISK_CACHE_KEY = %22DISK_AVAILABLE_CHECKSUMS_%7Bbasepath%7D%22.format(basepath=basepath)%0A PER_DISK_PER_CHANNEL_CACHE_KEY = %22DISK_AVAILABLE_CHECKSUMS_%7Bbasepath%7D_%7Bchannel_id%7D%22.format(%0A basepath=basepath, channel_id=channel_id%0A )%0A if PER_DISK_PER_CHANNEL_CACHE_KEY not in cache:%0A if PER_DISK_CACHE_KEY not in cache:%0A content_dir = get_content_storage_dir_path(datafolder=basepath)%0A%0A disk_checksums = %5B%5D%0A%0A for _, _, files in os.walk(content_dir):%0A for name in files:%0A checksum = os.path.splitext(name)%5B0%5D%0A # Only add valid checksums formatted according to our standard filename%0A if checksum_regex.match(checksum):%0A disk_checksums.append(checksum)%0A # Cache is per device, so a relatively long lived one should%0A # be fine.%0A cache.set(PER_DISK_CACHE_KEY, disk_checksums, 3600)%0A disk_checksums = set(cache.get(PER_DISK_CACHE_KEY))%0A channel_checksums = set(%0A LocalFile.objects.filter(%0A files__contentnode__channel_id=channel_id%0A ).values_list(%22id%22, flat=True)%0A )%0A cache.set(%0A PER_DISK_PER_CHANNEL_CACHE_KEY,%0A channel_checksums.intersection(disk_checksums),%0A 3600,%0A )%0A return cache.get(PER_DISK_PER_CHANNEL_CACHE_KEY)%0A
61ec190ca29187cbf9ad7b721fbf1936d665e4f6
Revert "rm client.py"
orchestration/containerAPI/client.py
orchestration/containerAPI/client.py
Python
0
@@ -0,0 +1,374 @@ +from docker import Client as docker_client%0A%0Aclass Client(object):%0A '''%0A Docker engine client%0A '''%0A def __init__(self, hostURL, version):%0A self.client = docker_client(base_url=hostURL, version=version)%0A self.url = hostURL%0A self.version = version%0A%0A def get_url():%0A return self.url%0A%0A def get_version():%0A return self.version%0A
dcc08986d4e2f0e7940f485d0ece465b1325a711
Add barebones FileBlob class
python/fileblob.py
python/fileblob.py
Python
0.000009
@@ -0,0 +1,1235 @@ +#!/usr/bin/env python%0A%0Aimport os%0A%0AMEGABYTE = 1024 * 1024%0A%0Aclass FileBlob:%0A def __init__(self, path):%0A self.path = path%0A%0A def data(self):%0A return open(self.path).read()%0A%0A def size(self):%0A try:%0A return os.path.getsize(self.path)%0A except os.error:%0A return 0%0A%0A def extname(self):%0A _, ext = os.path.splitext(self.path)%0A return ext%0A%0A def _mime_type(self):%0A pass%0A%0A def mime_type(self):%0A pass%0A%0A def content_type(self):%0A pass%0A%0A def encoding(self):%0A pass%0A%0A def is_binary(self):%0A pass%0A%0A def is_text(self):%0A pass%0A%0A def is_image(self):%0A self.extname() in %5B'.png', '.jpg', '.jpeg', '.gif', '.tif', 'tiff'%5D%0A%0A def is_large(self):%0A self.size() %3E MEGABYTE%0A%0A def is_safe_to_tokenize(self):%0A return not self.is_large() and self.is_text() and not self.high_ratio_of_long_lines()%0A%0A def high_ratio_of_long_lines(self):%0A if self.loc() == 0:%0A return false%0A return self.size() / %3E 5000%0A%0A def loc(self):%0A return len(self.lines())%0A%0A def lines(self):%0A pass%0A%0A def is_viewable(self):%0A pass%0A%0A def line_split_character(self):%0A pass%0A%0A
5c1e1744fa19bf900981d6a40c69195419861357
Add snactor sanity-check command (#564)
leapp/snactor/commands/workflow/sanity_check.py
leapp/snactor/commands/workflow/sanity_check.py
Python
0
@@ -0,0 +1,1592 @@ +from __future__ import print_function%0Aimport sys%0A%0Afrom leapp.exceptions import LeappError, CommandError%0Afrom leapp.logger import configure_logger%0Afrom leapp.repository.scan import find_and_scan_repositories%0Afrom leapp.snactor.commands.workflow import workflow%0Afrom leapp.utils.clicmd import command_arg%0Afrom leapp.utils.repository import requires_repository, find_repository_basedir%0A%0A_DESCRIPTION = 'The following messages are attempted to be consumed before they are produced: %7B%7D'%0A_LONG_DESCRIPTION = '''%0APerform workflow sanity checks%0A%0A- check whether there is a message in the given workflow which is attempted to be consumed before it was produced%0A%0AFor more information please consider reading the documentation at:%0Ahttps://red.ht/leapp-docs%0A'''%0A%0A%[email protected]('sanity-check', help='Perform workflow sanity checks', description=_LONG_DESCRIPTION)%0A@command_arg('name')%0A@requires_repository%0Adef cli(params):%0A configure_logger()%0A repository = find_and_scan_repositories(find_repository_basedir('.'), include_locals=True)%0A try:%0A repository.load()%0A except LeappError as exc:%0A sys.stderr.write(exc.message)%0A sys.stderr.write('%5Cn')%0A sys.exit(1)%0A%0A wf = repository.lookup_workflow(params.name)%0A if not wf:%0A raise CommandError('Could not find any workflow named %22%7B%7D%22'.format(params.name))%0A%0A instance = wf()%0A produced_late = set(instance.initial).intersection(set(instance.produces))%0A if produced_late:%0A print(_DESCRIPTION.format(' '.join(%5Bm.__name__ for m in produced_late%5D)), file=sys.stderr, end='%5Cn')%0A sys.exit(1)%0A
d5aa7df9928603c7b7294587a7abeea2b0b18061
version set to semver.org conform 4.2.0-dev
rdflib/__init__.py
rdflib/__init__.py
"""\ A pure Python package providing the core RDF constructs. The packages is intended to provide the core RDF types and interfaces for working with RDF. The package defines a plugin interface for parsers, stores, and serializers that other packages can use to implement parsers, stores, and serializers that will plug into the rdflib package. The primary interface `rdflib` exposes to work with RDF is `rdflib.graph.Graph`. A tiny example: >>> import rdflib >>> g = rdflib.Graph() >>> result = g.parse("http://www.w3.org/2000/10/swap/test/meet/blue.rdf") >>> print("graph has %s statements." % len(g)) graph has 9 statements. >>> >>> for s, p, o in g: ... if (s, p, o) not in g: ... raise Exception("It better be!") >>> s = g.serialize(format='n3') """ __docformat__ = "restructuredtext en" # The format of the __version__ line is matched by a regex in setup.py __version__ = "4.2-dev" __date__ = "2013/12/31" __all__ = [ 'URIRef', 'BNode', 'Literal', 'Variable', 'Namespace', 'Dataset', 'Graph', 'ConjunctiveGraph', 'RDF', 'RDFS', 'OWL', 'XSD', 'util', ] import sys assert sys.version_info >= (2, 5, 0), "rdflib requires Python 2.5 or higher" del sys import logging import __main__ if not hasattr(__main__, '__file__'): # show log messages in interactive mode logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.info("RDFLib Version: %s" % __version__) NORMALIZE_LITERALS = True """ If True - Literals lexical forms are normalized when created. I.e. the lexical forms is parsed according to data-type, then the stored lexical form is the re-serialized value that was parsed. Illegal values for a datatype are simply kept. The normalized keyword for Literal.__new__ can override this. For example: >>> from rdflib import Literal,XSD >>> Literal("01", datatype=XSD.int) rdflib.term.Literal(u'1', datatype=rdflib.term.URIRef(u'http://www.w3.org/2001/XMLSchema#integer')) This flag may be changed at any time, but will only affect literals created after that time, previously created literals will remain (un)normalized. """ DAWG_LITERAL_COLLATION = False """ DAWG_LITERAL_COLLATION determines how literals are ordered or compared to each other. In SPARQL, applying the >,<,>=,<= operators to literals of incompatible data-types is an error, i.e: Literal(2)>Literal('cake') is neither true nor false, but an error. This is a problem in PY3, where lists of Literals of incompatible types can no longer be sorted. Setting this flag to True gives you strict DAWG/SPARQL compliance, setting it to False will order Literals with incompatible datatypes by datatype URI In particular, this determines how the rich comparison operators for Literal work, eq, __neq__, __lt__, etc. """ from rdflib.term import ( URIRef, BNode, Literal, Variable) from rdflib.namespace import Namespace from rdflib.graph import Dataset, Graph, ConjunctiveGraph from rdflib.namespace import RDF, RDFS, OWL, XSD from rdflib import plugin from rdflib import query # tedious sop to flake8 assert plugin assert query from rdflib import util
Python
0
@@ -935,16 +935,18 @@ _ = %224.2 +.0 -dev%22%0A__ @@ -962,15 +962,15 @@ %22201 -3/12/31 +4/01/07 %22%0A%0A_
489004c5f81b8a5a2a639bc67f3ed5008f18960a
fix the naming error of the plotting script
doc/source/report/plots/plot_hc_dendrogram.py
doc/source/report/plots/plot_hc_dendrogram.py
Python
0.000004
@@ -0,0 +1,692 @@ +from mousestyles import data%0Afrom mousestyles.classification import clustering%0Afrom mousestyles.visualization import plot_clustering%0A%0A%0A# load data%0Amouse_data = data.load_all_features()%0A%0A# mouse inidividual%0Amouse_dayavgstd_rsl = clustering.prep_data(mouse_data, melted=False, std = True, rescale = True)%0A%0A# optimal parameters%0Amethod, dist = clustering.get_optimal_hc_params(mouse_day=mouse_dayavgstd_rsl)%0A%0A# fit hc%0Asils_hc, labels_hc = clustering.fit_hc(%0A mouse_day_X=mouse_dayavgstd_rsl%5B:,2:%5D,%0A method=method, dist=dist, num_clusters=range(2,17))%0A%0A# plot and get the distance matrxix%0AZ = plot_clustering.plot_dendrogram(%0A mouse_day=mouse_dayavgstd_rsl, method=method, dist=dist)%0A
e47f61f22a568a69e74cbf8d0b70c4858879b9b5
Temporary workaround for an apprtc bug.
chrome/test/functional/webrtc_apprtc_call.py
chrome/test/functional/webrtc_apprtc_call.py
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import random # Note: pyauto_functional must come before pyauto. import pyauto_functional import pyauto import webrtc_test_base class WebrtcApprtcCallTest(webrtc_test_base.WebrtcTestBase): """Tests calling apprtc.appspot.com and setting up a call. Prerequisites: This test case must run on a machine with a webcam, either fake or real, and with some kind of audio device. The machine must have access to the public Internet. This should be considered an integration test: test failures could mean that the AppRTC reference is broken, that WebRTC is broken, or both. """ def tearDown(self): pyauto.PyUITest.tearDown(self) self.assertEquals('', self.CheckErrorsAndCrashes(), 'Chrome crashed or hit a critical error during test.') def testApprtcLoopbackCall(self): self.NavigateToURL('http://apprtc.appspot.com/?debug=loopback') self.WaitForInfobarCount(1, tab_index=0) self.PerformActionOnInfobar('accept', infobar_index=0, tab_index=0) self._WaitForCallEstablishment(tab_index=0) def testApprtcTabToTabCall(self): # Randomize the call session id. If we would use the same id we would risk # getting problems with hung calls and lingering state in AppRTC. random_call_id = 'pyauto%d' % random.randint(0, 65536) apprtc_url = 'http://apprtc.appspot.com/?r=%s' % random_call_id self.NavigateToURL(apprtc_url) self.AppendTab(pyauto.GURL(apprtc_url)) self.WaitForInfobarCount(1, tab_index=0) self.WaitForInfobarCount(1, tab_index=1) self.PerformActionOnInfobar('accept', infobar_index=0, tab_index=0) self.PerformActionOnInfobar('accept', infobar_index=0, tab_index=1) self._WaitForCallEstablishment(tab_index=0) self._WaitForCallEstablishment(tab_index=1) def _WaitForCallEstablishment(self, tab_index): # AppRTC will set opacity to 1 for remote video when the call is up. video_playing = self.WaitUntil( function=lambda: self.GetDOMValue('remoteVideo.style.opacity', tab_index=tab_index), expect_retval='1') self.assertTrue(video_playing, msg=('Timed out while waiting for ' 'remoteVideo.style.opacity to return 1.')) if __name__ == '__main__': pyauto_functional.Main()
Python
0.998543
@@ -195,16 +195,28 @@ t random +%0Aimport time %0A%0A# Note @@ -1794,32 +1794,148 @@ 0, tab_index=0)%0A + # TODO(phoglund): workaround for%0A # https://code.google.com/p/webrtc/issues/detail?id=1742%0A time.sleep(1)%0A self.Perform
37d851bb34552edfc3b1abd4d1034d4fdf46408f
Implement --remote
nvim-remote.py
nvim-remote.py
Python
0.000097
@@ -0,0 +1,2639 @@ +#!/usr/bin/env python3%0A%0A%22%22%22%0ACopyright (c) 2015 Marco Hinz%0A%0APermission is hereby granted, free of charge, to any person obtaining a copy%0Aof this software and associated documentation files (the %22Software%22), to deal%0Ain the Software without restriction, including without limitation the rights%0Ato use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0Acopies of the Software, and to permit persons to whom the Software is%0Afurnished to do so, subject to the following conditions:%0A%0AThe above copyright notice and this permission notice shall be included in%0Aall copies or substantial portions of the Software.%0A%0ATHE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0AIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0AFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0AAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0ALIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0AOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0ATHE SOFTWARE.%0A%22%22%22%0A%0Aimport sys%0Aimport os%0Aimport subprocess%0Aimport argparse%0A%0Afrom neovim import attach%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('--remote', action='append', help='Edit %3Cfiles%3E in a Vim server if possible')%0A parser.add_argument('--remote-silent', help=%22Same, don't complain if there is no server%22)%0A parser.add_argument('--remote-wait', help='As --remote but wait for files to have been edited')%0A parser.add_argument('--remote-wait-silent', help=%22Same, don't complain if there is no server%22)%0A parser.add_argument('--remote-tab', help='As --remote but use tab page per file')%0A parser.add_argument('--remote-send', help='Send %3Ckeys%3E to a Vim server and exit')%0A parser.add_argument('--remote-expr', help='Evaluate %3Cexpr%3E in a Vim server and print result ')%0A args, unused = parser.parse_known_args()%0A%0A sockpath = os.environ.get('NVIM_LISTEN_ADDRESS')%0A if sockpath is None:%0A sockpath = '/tmp/nvimsocket'%0A%0A try:%0A nvim = attach('socket', path='/tmp/nvimsocket')%0A except FileNotFoundError:%0A print(%22%22%22Problem: Can't find unix socket: /tmp/nvimsocket%0A Solution: Start a new server: NVIM_LISTEN_ADDRESS=/tmp/nvimsocket nvim%22%22%22)%0A sys.exit(1)%0A%0A if args.remote:%0A for fname in args.remote:%0A nvim.command('edit %7B%7D'.format(fname))%0A%0A if unused:%0A os.putenv('VIMRUNTIME', '/data/repo/neovim/runtime')%0A subprocess.Popen(%5B'/data/repo/neovim/build/bin/nvim'%5D + unused)%0A%0Aif __name__ == '__main__':%0A main()%0A
49424b855f043ae2bbb3562481493b1fa83f5090
add random selection wip code
af_scripts/tmp/randSelect.py
af_scripts/tmp/randSelect.py
Python
0
@@ -0,0 +1,358 @@ +import random as rd%0A%0Alist = %5B%22a%22,%22b%22,%22c%22,%22d%22,%22e%22,%22f%22,%22g%22,%22h%22,%22i%22,%22j%22,%22k%22,%22l%22,%22m%22,%22n%22,%22o%22,%22p%22,%22q%22,%22r%22,%22s%22,%22t%22,%22u%22,%22v%22,%22w%22,%22x%22,%22y%22,%22z%22%5D%0A%0ArandList = list%0A#print randList%0A%0Adiv=3%0A%0AlistSize=len(list)%0A#print listSize%0A%0AnumForOnePart=listSize/div%0A#print numForOnePart%0A%0Ard.shuffle(randList)%0A%0A#print randList%0A%0A%0Aprint %5BrandList%5Bi::3%5D for i in range(3)%5D%0A %0Aprint randList
7d4281574a9ee2a8e7642f14402a452f82a807db
Create smarthome.py
demos/smarthome/smarthome.py
demos/smarthome/smarthome.py
Python
0
@@ -0,0 +1,1297 @@ +import logging%0A%0Afrom pabiana import area%0Afrom pabiana.area import autoloop, load_interfaces, pulse, register, scheduling, subscribe%0Afrom pabiana.node import create_publisher, run%0A%0ANAME = 'smarthome'%0Apublisher = None%0A%0A%0A# Triggers%0A@register%0Adef increase_temp():%0A%09area.context%5B'temperature'%5D += 0.25%0A%09autoloop(increase_temp)%0A%0A%0A@register%0Adef lower_temp():%0A%09area.context%5B'temperature'%5D -= 0.25%0A%09autoloop(lower_temp)%0A%0A%0A@register%0Adef keep_temp():%0A%09pass%0A%0A%0A@register%0Adef window(open):%0A%09area.context%5B'window-open'%5D = open%0A%0A%0A# Reactions%0A@scheduling%0Adef schedule():%0A%09if keep_temp in area.demand:%0A%09%09area.demand.pop(increase_temp, None)%0A%09%09area.demand.pop(lower_temp, None)%0A%09elif lower_temp in area.demand:%0A%09%09area.demand.pop(increase_temp, None)%0A%0A%0A@pulse%0Adef publish():%0A%09if area.clock %25 8 == 0:%0A%09%09publisher.send_json(%7B%0A%09%09%09'temperature': area.context%5B'temperature'%5D,%0A%09%09%09'window-open': area.context%5B'window-open'%5D%0A%09%09%7D)%0A%0A%0Aif __name__ == '__main__':%0A%09logging.basicConfig(%0A%09%09format='%25(asctime)s %25(levelname)s %25(message)s',%0A%09%09datefmt='%25Y-%25m-%25d %25H:%25M:%25S',%0A%09%09level=logging.DEBUG%0A%09)%0A%09%0A%09load_interfaces('interfaces.json')%0A%09subscribe(%5B%5D, 'pulse', '01')%0A%09publisher = create_publisher(own_name=NAME, host='0.0.0.0')%0A%09area.context%5B'temperature'%5D = 18%0A%09area.context%5B'window-open'%5D = False%0A%09%0A%09run(own_name=NAME, host='0.0.0.0')%0A
babdfe81ef57c52c16f11dc48bf148157d052995
change to use new method names
src/robotide/plugins/preview.py
src/robotide/plugins/preview.py
# Copyright 2008-2009 Nokia Siemens Networks Oyj # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wx.html from StringIO import StringIO from robotide.ui.menu import ActionInfo from robotide.model.tcuk import TestCase, UserKeyword from robotide.errors import SerializationError from robotide.publish import RideTreeSelection, RideNotebookTabChanged from robotide.writer.writer import HtmlFileWriter, TxtFileWriter from plugin import Plugin class InMemoryHtmlWriter(HtmlFileWriter): def _write_empty_row(self): self._write_data(['&nbsp;']) def close(self): HtmlFileWriter.close(self, close_output=False) class InMemoryTxtWriter(TxtFileWriter): def close(self): TxtFileWriter.close(self, close_output=False) class PreviewPlugin(Plugin): """Provides preview of the test data in HTML and TXT formats.""" datafile = property(lambda self: self.get_selected_datafile()) def __init__(self, application): Plugin.__init__(self, application) self._panel = None def activate(self): self.register_action(ActionInfo('Tools','Preview', self.OnShowPreview, doc='Show preview of the current file')) self.subscribe(self.OnTreeSelection, RideTreeSelection) self.subscribe(self.OnTabChanged, RideNotebookTabChanged) def deactivate(self): self.unsubscribe_all() self.remove_added_menu_items() self.delete_page(self._panel) self._panel = None def OnShowPreview(self, event): if not self._panel: self._panel = PreviewPanel(self, self.notebook) self._update_preview(self.datafile) self.show_tab(self._panel) def OnTreeSelection(self, event): self._update_preview(event.item) def OnTabChanged(self, event): self._update_preview(self.datafile) def _update_preview(self, item): if self.tab_is_visible(self._panel) and item: self._panel.update_preview(item) class PreviewPanel(wx.Panel): def __init__(self, parent, notebook): wx.Panel.__init__(self, notebook) self._parent = parent self._previous_datafile = None self.SetSizer(wx.BoxSizer(wx.VERTICAL)) self._create_chooser() self._set_format('Text') notebook.AddPage(self, "Preview") def _create_chooser(self): chooser = wx.RadioBox(self, label='Format', choices=['Text', 'HTML']) self.Bind(wx.EVT_RADIOBOX, self.OnTypeChanged, chooser) self.Sizer.Add(chooser) def update_preview(self, item, force_reload=False): datafile = isinstance(item, (TestCase, UserKeyword))and item.datafile or item if datafile != self._previous_datafile or force_reload: content = datafile and self._get_content(datafile) or '' self._view.set_content(content.decode('UTF-8')) else: self._view.scroll_to_subitem(item) self._previous_datafile = datafile def _get_content(self, datafile): output = StringIO() writer = {'HTML': InMemoryHtmlWriter, 'Text': InMemoryTxtWriter}[self._format](output) try: # TODO: might need a public way to do this datafile._serialize(writer) except SerializationError, e: return "Creating preview of '%s' failed: %s" % (datafile.name, e) else: return output.getvalue() def OnTypeChanged(self, event): self._set_format(event.GetString()) self.update_preview(self._previous_datafile, force_reload=True) def _set_format(self, format): self._format = format if hasattr(self, '_view'): self.Sizer.Remove(self._view) self._view.Destroy() if format == 'HTML': self._view = HtmlView(self) else: self._view = TxtView(self) self.Sizer.Add(self._view, 1, wx.EXPAND|wx.ALL, border=8) self.Sizer.Layout() class HtmlView(wx.html.HtmlWindow): def __init__(self, parent): wx.html.HtmlWindow.__init__(self, parent) self.SetStandardFonts() def set_content(self, content): self.SetPage(content) def scroll_to_subitem(self, item): anchor = self._get_anchor(item) if self.HasAnchor(anchor): self.ScrollToAnchor(anchor) self.ScrollLines(-1) else: self.Scroll(0,0) def _get_anchor(self, item): if isinstance(item, TestCase): return 'test_%s' % item.name if isinstance(item, UserKeyword): return 'keyword_%s' % item.name return '' class TxtView(wx.TextCtrl): def __init__(self, parent): wx.TextCtrl.__init__(self, parent, style=wx.TE_MULTILINE) self.SetEditable(False) self.SetFont(wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL)) def set_content(self, content): self.SetValue(content) def scroll_to_subitem(self, item): pass
Python
0.000002
@@ -1923,30 +1923,25 @@ elf. -remove_added_menu_item +unregister_action s()%0A @@ -1964,12 +1964,11 @@ ete_ -page +tab (sel
a0493ff48b96056709880804f61e794621886c61
Add CoNLL reader tests
compattern/dependency/tests/test_conll.py
compattern/dependency/tests/test_conll.py
Python
0
@@ -0,0 +1,1483 @@ +# encoding: utf8%0A%0Afrom compattern.dependency import conll%0A%0A%0Adef test_read_french():%0A %22%22%22Test that conll.read understands French Bonsai output%22%22%22%0A line = (u%226%5Ctchauff%C3%A9%5Ctchauffer%5CtV%5CtVPP%5Ctg=m%7Cm=part%7Cn=s%7Ct=past%5Ct%22%0A u%221100011%5Ct5%5Ctdep_coord%5Ct_%5Ct_%22)%0A sentence = conll.read(%5Bline, '%5Cn'%5D)%5B0%5D%0A assert len(sentence) == 1%0A token = sentence%5B0%5D%0A assert token.id == 6%0A assert token.lemma == %22chauffer%22%0A assert token.cpos == %22V%22%0A assert token.pos == %22VPP%22%0A assert token.feat%5B0%5D.startswith(%22g=m%22) # morpho features%0A assert token.feat%5B1%5D.startswith(%22110%22) # cluster path%0A assert token.head == 5%0A assert token.deprel == %22dep_coord%22%0A # Don't really care what happens with undefined phead and pdeprel%0A%0A%0Adef test_read_turboparser():%0A line = %2211%5Ctvaccines%5Ctvaccine%5CtNNS%5CtNNS%5Ct_%5Ct10%5CtPMOD%22%0A sentence = conll.read(%5Bline, '%5Cn'%5D)%5B0%5D%0A assert len(sentence) == 1%0A token = sentence%5B0%5D%0A assert token.id == 11%0A assert token.form == %22vaccines%22%0A assert token.lemma == %22vaccine%22%0A assert token.cpos == %22NNS%22%0A assert token.pos == %22NNS%22%0A assert token.head == 10%0A assert token.deprel == %22PMOD%22%0A%0A%0Adef test_read_wacky():%0A line = %22was%5Ctbe%5CtVBD%5Ct18%5Ct11%5CtPRD%22%0A sentence = conll.read(%5Bline, '%5Cn'%5D)%5B0%5D%0A assert len(sentence) == 1%0A token = sentence%5B0%5D%0A assert token.id == 18%0A assert token.form == %22was%22%0A assert token.lemma == %22be%22%0A assert token.pos == %22VBD%22%0A assert token.head == 11%0A assert token.deprel == %22PRD%22%0A
4b696c2a54f7afd95013763c098aec30b08409d6
Create bulb-switcher-ii.py
Python/bulb-switcher-ii.py
Python/bulb-switcher-ii.py
Python
0.000001
@@ -0,0 +1,383 @@ +# Time: O(1)%0A# Space: O(1)%0A%0Aclass Solution(object):%0A def flipLights(self, n, m):%0A %22%22%22%0A :type n: int%0A :type m: int%0A :rtype: int%0A %22%22%22%0A if m == 0: return 1%0A if n == 1: return 2%0A if m == 1 and n == 2: return 3%0A if m == 1 or n == 2 return 4%0A if m == 2: return 7%0A return 8%0A
0621b935558b6805d2b45fee49bc2e959201fd7a
add number-of-digit-one
vol5/number-of-digit-one/number-of-digit-one.py
vol5/number-of-digit-one/number-of-digit-one.py
Python
0.999993
@@ -0,0 +1,868 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# @Author: Zeyuan Shang%0A# @Date: 2015-11-03 15:21:00%0A# @Last Modified by: Zeyuan Shang%0A# @Last Modified time: 2015-11-03 15:21:14%0Aimport itertools%0A%0Aclass Solution(object):%0A def countDigitOne(self, n):%0A %22%22%22%0A :type n: int%0A :rtype: int%0A %22%22%22%0A if n %3C= 0:%0A return 0%0A def digits(n):%0A while n:%0A yield n %25 10%0A n /= 10%0A def pows(b):%0A x = 1%0A while True:%0A yield x%0A x *= 10%0A def g(d, m):%0A if d %3C 1:%0A return n / (m * 10) * m%0A elif d == 1:%0A return n / (m * 10) * m + n %25 m + 1%0A else:%0A return (n / (m * 10) + 1) * m%0A return sum(itertools.starmap(g, itertools.izip(digits(n), pows(10))))
27d37833663842405f159127f30c6351958fcb10
Add draft of example using the new @bench
bench_examples/bench_dec_insert.py
bench_examples/bench_dec_insert.py
Python
0
@@ -0,0 +1,800 @@ +from csv import DictWriter%0A%0Afrom ktbs_bench.utils.decorators import bench%0A%0A%0A@bench%0Adef batch_insert(graph, file):%0A %22%22%22Insert triples in batch.%22%22%22%0A print(graph, file)%0A%0A%0Aif __name__ == '__main__':%0A # Define some graph/store to use%0A graph_list = %5B'g1', 'g2'%5D%0A%0A # Define some files to get the triples from%0A n3file_list = %5B'f1', 'f2'%5D%0A%0A # Testing batch insert%0A res = %7B'func_name': 'batch_insert'%7D%0A for graph in graph_list:%0A for n3file in n3file_list:%0A time_res = batch_insert(graph, n3file)%0A res%5Btime_res%5B0%5D%5D = time_res%5B1%5D%0A%0A # Setup the result CSV%0A with open('/tmp/res.csv', 'wb') as outfile:%0A res_csv = DictWriter(outfile, fieldnames=res.keys())%0A res_csv.writeheader()%0A%0A # Write the results%0A res_csv.writerow(res)%0A
175470eea9716f587a2339932c1cfb6c5240c4df
add tools.testing module for asserts (numpy, pandas compat wrapper)
statsmodels/tools/testing.py
statsmodels/tools/testing.py
Python
0
@@ -0,0 +1,1442 @@ +%22%22%22assert functions from numpy and pandas testing%0A%0A%22%22%22%0A%0Aimport re%0Afrom distutils.version import StrictVersion%0A%0Aimport numpy as np%0Aimport numpy.testing as npt%0Aimport pandas%0Aimport pandas.util.testing as pdt%0A%0A# for pandas version check%0Adef strip_rc(version):%0A return re.sub(r%22rc%5Cd+$%22, %22%22, version)%0A%0Adef is_pandas_min_version(min_version):%0A '''check whether pandas is at least min_version%0A '''%0A from pandas.version import short_version as pversion%0A return StrictVersion(strip_rc(pversion)) %3E= min_version%0A%0A%0A# local copies, all unchanged%0Afrom numpy.testing import (assert_allclose, assert_almost_equal,%0A assert_approx_equal, assert_array_almost_equal,%0A assert_array_almost_equal_nulp, assert_array_equal, assert_array_less,%0A assert_array_max_ulp, assert_raises, assert_string_equal, assert_warns)%0A%0A%0A# adjusted functions%0A%0Adef assert_equal(actual, desired, err_msg='', verbose=True, **kwds):%0A%0A if not is_pandas_min_version('0.14.1'):%0A npt.assert_equal(actual, desired, err_msg='', verbose=True)%0A else:%0A if isinstance(desired, pandas.Index):%0A pdt.assert_index_equal(actual, desired)%0A elif isinstance(desired, pandas.Series):%0A pdt.assert_series_equal(actual, desired, **kwds)%0A elif isinstance(desired, pandas.DataFrame):%0A pdt.assert_frame_equal(actual, desired, **kwds)%0A else:%0A npt.assert_equal(actual, desired, err_msg='', verbose=True)%0A
3f8fd4eb96e778148e73e1e89b369f03d2e97f69
Update ceph_help.py
tendrl/commons/flows/create_cluster/ceph_help.py
tendrl/commons/flows/create_cluster/ceph_help.py
# flake8: noqa import gevent from tendrl.commons.event import Event from tendrl.commons.flows.exceptions import FlowExecutionFailedError from tendrl.commons.message import Message def create_ceph(parameters): # install the packages Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Installing Ceph Packages %s" % parameters['TendrlContext.integration_id'] } ) ) mon_ips, osd_ips = install_packages(parameters) # Configure Mons Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Creating Ceph Monitors %s" % parameters['TendrlContext.integration_id'] } ) ) created_mons = create_mons(parameters, mon_ips) # Configure osds Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Creating Ceph OSD %s" % parameters['TendrlContext.integration_id']} ) ) create_osds(parameters, created_mons) Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Created Ceph Cluster %s" % parameters['TendrlContext.integration_id']} ) ) def install_packages(parameters): plugin = NS.ceph_provisioner.get_plugin() mon_ips = [] osd_ips = [] for node, config in parameters["Cluster.node_configuration"].iteritems(): if "mon" in config["role"].lower(): mon_ips.append(config["provisioning_ip"]) elif "osd" in config["role"].lower(): osd_ips.append(config["provisioning_ip"]) task_id = plugin.install_mon(mon_ips) Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Installing Ceph Packages on MONS [%s], ceph-installer task %s" % (" ".join(mon_ips), task_id) } ) ) status, err = sync_task_status(task_id) if not status: raise FlowExecutionFailedError(err) task_id = plugin.install_osd(osd_ips) Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Installing Ceph Packages on OSDS [%s], ceph-installer task %s" % (" ".join(osd_ips), task_id) } ) ) status, err = sync_task_status(task_id) if not status: raise FlowExecutionFailedError(err) return mon_ips, osd_ips def create_mons(parameters, mon_ips): created_mons = [] plugin = NS.ceph_provisioner.get_plugin() for mon_ip in mon_ips: task_id = plugin.configure_mon(mon_ip, parameters['TendrlContext.cluster_id'], parameters["TendrlContext.cluster_name"], mon_ip, parameters["Cluster.cluster_network"], parameters["Cluster.public_network"], created_mons ) Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Creating Ceph MON %s, ceph-installer task %s" % (mon_ip, task_id) } ) ) status, err = sync_task_status(task_id) if not status: raise FlowExecutionFailedError(err) else: # If success add the MON to the created list created_mons.append({"address":mon_ip, "host": mon_ip}) return created_mons def create_osds(parameters, created_mons): failed = [] plugin = NS.ceph_provisioner.get_plugin() for node, config in parameters["Cluster.node_configuration"].iteritems(): if "osd" in config["role"].lower(): if config["journal_colocation"]: devices = [] else: devices = {} for device in config["storage_disks"]: if config["journal_colocation"]: devices.append(device["device"]) else: devices[device["device"]] = device["journal"] task_id = plugin.configure_osd( config["provisioning_ip"], devices, parameters["TendrlContext.cluster_id"], parameters["TendrlContext.cluster_name"], config["journal_size"], parameters["Cluster.cluster_network"], parameters["Cluster.public_network"], created_mons ) Event( Message( job_id=parameters['job_id'], flow_id=parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={"message": "Creating Ceph OSD %s, ceph-installer task %s" % (config["provisioning_ip"], task_id) } ) ) status, err = sync_task_status(task_id) if not status: raise FlowExecutionFailedError(err) def sync_task_status(task_id): status = False count = 0 plugin = NS.ceph_provisioner.get_plugin() resp = {} while count < 90: gevent.sleep(10) resp = plugin.task_status(task_id) if resp: if resp["ended"]: if resp["succeeded"]: return True, "" return status, resp.get("stderr", "ceph-installer task_id %s timed out" % task_id)
Python
0.000001
@@ -2552,109 +2552,29 @@ -status, err = sync_task_status(task_id)%0A if not status:%0A raise FlowExecutionFailedError(err +wait_for_task(task_id )%0A @@ -3000,109 +3000,29 @@ -status, err = sync_task_status(task_id)%0A if not status:%0A raise FlowExecutionFailedError(err +wait_for_task(task_id )%0A @@ -4118,117 +4118,29 @@ -status, err = sync_task_status(task_id)%0A if not status:%0A raise FlowExecutionFailedError(err +wait_for_task(task_id )%0A @@ -5751,149 +5751,49 @@ -status, err = sync_task_status(task_id)%0A if not status:%0A raise FlowExecutionFailedError(err)%0A%0A%0Adef sync_task_status +wait_for_task(task_id)%0A%0Adef wait_for_task (tas @@ -6097,36 +6097,21 @@ turn - True, %22%22%0A return status, +%0A stderr = res @@ -6166,45 +6166,145 @@ out -%22%0A %25 task_id + and did not complete%22 %25 task_id)%0A stdout = resp.get(%22stdout%22, %22%22)%0A raise FlowExecutionFailedError(dict(stdout=stdout, stderr=stderr) )%0A