commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
724bc46c85e6ea75ac8d786f4d1706b74df8f330
Create dictid.py
dictid.py
dictid.py
Python
0.000001
@@ -0,0 +1,85 @@ +a = (1,2)%0Ab = %5B1,2%5D%0Ac = %7Ba: 1%7D # outcome: c= %7B(1,2): 1%7D %0Ad = %7Bb: 1%7D # outcome: error%0A
7d574c1f6d194df1f2b2009fb2e48fbaacaca873
Add migration for_insert_base
oedb_datamodels/versions/6887c442bbee_insert_base.py
oedb_datamodels/versions/6887c442bbee_insert_base.py
Python
0.000002
@@ -0,0 +1,1098 @@ +%22%22%22Add _insert_base%0A%0ARevision ID: 6887c442bbee%0ARevises: 3886946416ba%0ACreate Date: 2019-04-25 16:09:20.572057%0A%0A%22%22%22%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '6887c442bbee'%0Adown_revision = '3886946416ba'%0Abranch_labels = None%0Adepends_on = None%0A%0A%0Adef upgrade():%0A op.create_table('_insert_base',%0A sa.Column('_id', sa.BigInteger(), autoincrement=True, nullable=False),%0A sa.Column('_message', sa.Text(), nullable=True),%0A sa.Column('_user', sa.String(length=50), nullable=True),%0A sa.Column('_submitted', sa.DateTime(), server_default=sa.text('now()'), nullable=True),%0A sa.Column('_autocheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),%0A sa.Column('_humancheck', sa.Boolean(), server_default=sa.text('false'), nullable=True),%0A sa.Column('_type', sa.String(length=8), nullable=True),%0A sa.Column('_applied', sa.Boolean(), server_default=sa.text('false'), nullable=True),%0A sa.PrimaryKeyConstraint('_id'),%0A schema='public'%0A )%0A%0A%0Adef downgrade():%0A op.drop_table('_insert_base', schema='public')%0A
2ef707337adc3d0abc33ca638b2adb70a681bd12
update for new API
doc/examples/filters/plot_denoise.py
doc/examples/filters/plot_denoise.py
""" ==================== Denoising a picture ==================== In this example, we denoise a noisy version of the picture of the astronaut Eileen Collins using the total variation and bilateral denoising filter. These algorithms typically produce "posterized" images with flat domains separated by sharp edges. It is possible to change the degree of posterization by controlling the tradeoff between denoising and faithfulness to the original image. Total variation filter ---------------------- The result of this filter is an image that has a minimal total variation norm, while being as close to the initial image as possible. The total variation is the L1 norm of the gradient of the image. Bilateral filter ---------------- A bilateral filter is an edge-preserving and noise reducing filter. It averages pixels based on their spatial closeness and radiometric similarity. """ import numpy as np import matplotlib.pyplot as plt from skimage import data, img_as_float from skimage.restoration import denoise_tv_chambolle, denoise_bilateral astro = img_as_float(data.astronaut()) astro = astro[220:300, 220:320] noisy = astro + 0.6 * astro.std() * np.random.random(astro.shape) noisy = np.clip(noisy, 0, 1) fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5), sharex=True, sharey=True, subplot_kw={'adjustable': 'box-forced'}) plt.gray() ax[0, 0].imshow(noisy) ax[0, 0].axis('off') ax[0, 0].set_title('noisy') ax[0, 1].imshow(denoise_tv_chambolle(noisy, weight=0.1, multichannel=True)) ax[0, 1].axis('off') ax[0, 1].set_title('TV') ax[0, 2].imshow(denoise_bilateral(noisy, sigma_range=0.05, sigma_spatial=15)) ax[0, 2].axis('off') ax[0, 2].set_title('Bilateral') ax[1, 0].imshow(denoise_tv_chambolle(noisy, weight=0.2, multichannel=True)) ax[1, 0].axis('off') ax[1, 0].set_title('(more) TV') ax[1, 1].imshow(denoise_bilateral(noisy, sigma_range=0.1, sigma_spatial=15)) ax[1, 1].axis('off') ax[1, 1].set_title('(more) Bilateral') ax[1, 2].imshow(astro) ax[1, 2].axis('off') ax[1, 2].set_title('original') fig.tight_layout() plt.show()
Python
0
@@ -1618,21 +1618,21 @@ , sigma_ -range +color =0.05, s @@ -1883,13 +1883,13 @@ gma_ -range +color =0.1
9e6a016c5a59b25199426f6825b2c83571997e68
Refactor buildbot tests so that they can be used downstream.
build/android/buildbot/tests/bb_run_bot_test.py
build/android/buildbot/tests/bb_run_bot_test.py
#!/usr/bin/env python # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess import sys BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..') sys.path.append(BUILDBOT_DIR) import bb_run_bot def RunBotsWithTesting(bot_step_map): code = 0 procs = [ (bot, subprocess.Popen( [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot, '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)) for bot in bot_step_map] for bot, proc in procs: _, err = proc.communicate() code |= proc.returncode if proc.returncode != 0: print 'Error running bb_run_bot with id="%s"' % bot, err return code def main(): return RunBotsWithTesting(bb_run_bot.GetBotStepMap()) if __name__ == '__main__': sys.exit(main())
Python
0.000099
@@ -346,29 +346,29 @@ nBot -sWithTesting(bot_step +Processes(bot_process _map @@ -387,249 +387,40 @@ 0%0A -procs = %5B%0A ( +for bot, -sub proc -ess.Popen(%0A %5Bos.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,%0A '--testing'%5D, stdout=subprocess.PIPE, stderr=subprocess.PIPE))%0A for bot in bot_step_map%5D%0A for bot, proc in procs + in bot_process_map :%0A @@ -533,25 +533,29 @@ running -bb_run_bo +the bot scrip t with i @@ -608,60 +608,269 @@ :%0A -return RunBotsWithTesting(bb_run_bot.GetBotStepMap() +procs = %5B%0A (bot, subprocess.Popen(%0A %5Bos.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,%0A '--testing'%5D, stdout=subprocess.PIPE, stderr=subprocess.PIPE))%0A for bot in bb_run_bot.GetBotStepMap()%5D%0A return RunBotProcesses(procs )%0A%0A%0A
eb9f9d8bfa5ea278e1fb39c59ed660a223b1f6a9
Add flask api app creation to init
api/__init__.py
api/__init__.py
Python
0.000001
@@ -0,0 +1,393 @@ +from flask_sqlalchemy import SQLAlchemy%0Aimport connexion%0A%0Afrom config import config%0A%0Adb = SQLAlchemy()%0A%0A%0Adef create_app(config_name):%0A app = connexion.FlaskApp(__name__, specification_dir='swagger/')%0A app.add_api('swagger.yaml')%0A application = app.app%0A application.config.from_object(config%5Bconfig_name%5D)%0A db.init_app(application)%0A%0A return application%0A%0Afrom api.api import *%0A
48cc03558fcd1debf4d2453f26716839acba9e58
Add output sanity checks for called commands
tests/test_integration.py
tests/test_integration.py
import os import tempfile import vimrunner from tasklib.task import TaskWarrior, Task server = vimrunner.Server() class IntegrationTest(object): input = None output = None def add_plugin(self, name): plugin_base = os.path.expanduser('~/.vim/bundle/') plugin_path = os.path.join(plugin_base, name) self.client.add_plugin(plugin_path) def write_buffer(self, lines, position=0): result = self.client.write_buffer(position + 1, lines) assert result == u"0" def read_buffer(self, start=0, end=1000): return self.client.read_buffer( unicode(start+1), unicode(end+1) ).splitlines() def generate_data(self): self.dir = tempfile.mkdtemp(dir='/tmp/') self.tw = TaskWarrior(data_location=self.dir) self.tasks = [ Task(self.tw, description="project random task 1", project="Random"), Task(self.tw, description="project random task 2", project="Random"), Task(self.tw, description="tag home task 1", tags=["home"]), Task(self.tw, description="tag work task 1", tags=["work"]), Task(self.tw, description="today task 1", due="now"), ] for task in self.tasks: task.save() def setup(self): self.generate_data() self.client = server.start_gvim() self.add_plugin('taskwiki') self.add_plugin('vimwiki') self.command('let g:taskwiki_data_location="{0}"'.format(self.dir)) self.client.edit(os.path.join(self.dir, 'testwiki.txt')) self.command('set filetype=vimwiki') def teardown(self): self.client.quit() def command(self, command): return self.client.command(command) def check_sanity(self): """ Makes sanity checks upon the vim instance. """ # Assert all the important files were loaded scriptnames = self.client.command('scriptnames').splitlines() expected_loaded_files = [ 'vimwiki/autoload/vimwiki/base.vim', 'vimwiki/ftplugin/vimwiki.vim', 'vimwiki/autoload/vimwiki/u.vim', 'vimwiki/syntax/omnipresent_syntax.vim', 'vimwiki/syntax/vimwiki.vim', 'taskwiki/ftplugin/vimwiki.vim', ] # Do a partial match for each line from scriptnames for scriptfile in expected_loaded_files: assert any([scriptfile in line for line in scriptnames]) # Assert only note about Bram being maintainer is in messages bramline = u'Messages maintainer: Bram Moolenaar <[email protected]>' assert self.client.command('messages') == bramline # Assert that TW and cache objects exist tw_class = self.client.command('py print(tw.__class__.__name__)') cache_class = self.client.command('py print(cache.__class__.__name__)') assert tw_class == 'TaskWarrior' assert cache_class == 'TaskCache' def test_execute(self): # First, run sanity checks self.check_sanity() # Then load the input if self.input: self.write_buffer(input) # Do the stuff self.execute() # Check expected output if self.output: assert self.read_buffer() == self.output class TestBurndown(IntegrationTest): def execute(self): self.command("TaskWikiBurndownDaily") assert self.command(":py print vim.current.buffer").startswith("<buffer burndown.daily") assert "Daily Burndown" in self.read_buffer()[0] class TestViewports(IntegrationTest): def execute(self): lines = ["=== Work tasks | +work ==="] self.write_buffer(lines) self.command("w") assert self.read_buffer() == [ "=== Work tasks | +work ===", "* [ ] tag work task 1 #{0}".format(self.tasks[3]['uuid']) ] class TestSimpleTask(IntegrationTest): def execute(self): lines = ["* [ ] This is a test task"] self.write_buffer(lines) self.command("w") # Check that only one tasks with this description exists matching = self.tw.tasks.filter(description="This is a test task") assert len(matching) == 1 expected = [ "* [ ] This is a test task #{0}".format(matching[0]['uuid']) ] assert expected == self.read_buffer()
Python
0.000003
@@ -3,16 +3,26 @@ port os%0A +import re%0A import t @@ -1637,17 +1637,73 @@ vimwiki' -) +, silent=False) # TODO: fix these vimwiki loading errors %0A%0A de @@ -1777,16 +1777,53 @@ command +, silent=True, regex=None, lines=None ):%0A @@ -1819,36 +1819,38 @@ one):%0A re -turn +sult = self.client.com @@ -1864,16 +1864,450 @@ mmand)%0A%0A + # Specifying regex or lines cancels expectations of silence%0A if regex or lines:%0A silent = False%0A%0A # For silent commands, there should be no output%0A assert silent == bool(not result)%0A%0A # Multiline-evaluate the regex%0A if regex:%0A assert re.search(regex, result, re.MULTILINE)%0A%0A if lines:%0A assert lines == len(result.splitlines())%0A%0A return result%0A%0A def @@ -4025,16 +4025,30 @@ .buffer%22 +, silent=False ).starts @@ -4302,16 +4302,43 @@ mand(%22w%22 +, regex=%22written$%22, lines=1 )%0A @@ -4666,16 +4666,43 @@ mand(%22w%22 +, regex=%22written$%22, lines=1 )%0A%0A
c10eb3861daf48c13ec854bd210db5d5e1163b11
Add LotGroupAutocomplete
livinglots_lots/autocomplete_light_registry.py
livinglots_lots/autocomplete_light_registry.py
Python
0
@@ -0,0 +1,525 @@ +from autocomplete_light import AutocompleteModelBase, register%0A%0Afrom livinglots import get_lotgroup_model%0A%0A%0Aclass LotGroupAutocomplete(AutocompleteModelBase):%0A autocomplete_js_attributes = %7B'placeholder': 'lot group name',%7D%0A search_fields = ('name',)%0A%0A def choices_for_request(self):%0A choices = super(LotGroupAutocomplete, self).choices_for_request()%0A if not self.request.user.is_staff:%0A choices = choices.none()%0A return choices%0A%0A%0Aregister(get_lotgroup_model(), LotGroupAutocomplete)%0A
a5081ac307e037caee6bbd1add49d4c0d9424353
Fix wake_on_lan for german version of Windows 10 (#6397) (#6398)
homeassistant/components/switch/wake_on_lan.py
homeassistant/components/switch/wake_on_lan.py
""" Support for wake on lan. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/switch.wake_on_lan/ """ import logging import platform import subprocess as sp import voluptuous as vol from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.script import Script from homeassistant.const import (CONF_HOST, CONF_NAME) REQUIREMENTS = ['wakeonlan==0.2.2'] _LOGGER = logging.getLogger(__name__) CONF_MAC_ADDRESS = 'mac_address' CONF_OFF_ACTION = 'turn_off' DEFAULT_NAME = 'Wake on LAN' DEFAULT_PING_TIMEOUT = 1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_MAC_ADDRESS): cv.string, vol.Optional(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, }) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up a wake on lan switch.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) mac_address = config.get(CONF_MAC_ADDRESS) off_action = config.get(CONF_OFF_ACTION) add_devices([WOLSwitch(hass, name, host, mac_address, off_action)]) class WOLSwitch(SwitchDevice): """Representation of a wake on lan switch.""" def __init__(self, hass, name, host, mac_address, off_action): """Initialize the WOL switch.""" from wakeonlan import wol self._hass = hass self._name = name self._host = host self._mac_address = mac_address self._off_script = Script(hass, off_action) if off_action else None self._state = False self._wol = wol self.update() @property def should_poll(self): """Poll for status regularly.""" return True @property def is_on(self): """Return true if switch is on.""" return self._state @property def name(self): """The name of the switch.""" return self._name def turn_on(self): """Turn the device on.""" self._wol.send_magic_packet(self._mac_address) def turn_off(self): """Turn the device off if an off action is present.""" if self._off_script is not None: self._off_script.run() def update(self): """Check if device is on and update the state.""" if platform.system().lower() == 'windows': ping_cmd = 'ping -n 1 -w {} {}'.format( DEFAULT_PING_TIMEOUT * 1000, self._host) else: ping_cmd = 'ping -c 1 -W {} {}'.format( DEFAULT_PING_TIMEOUT, self._host) status = sp.getstatusoutput(ping_cmd)[0] self._state = not bool(status)
Python
0
@@ -2717,36 +2717,41 @@ sp. -getstatusoutput(ping_cmd)%5B0%5D +call(ping_cmd, stdout=sp.DEVNULL) %0A
1e316332889bcb46ce52b70603c1028f7614c2a0
Support Unicode request_id on Python 3
glance/tests/unit/test_context_middleware.py
glance/tests/unit/test_context_middleware.py
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob from glance.api.middleware import context import glance.context from glance.tests.unit import base class TestContextMiddleware(base.IsolatedUnitTest): def _build_request(self, roles=None, identity_status='Confirmed', service_catalog=None): req = webob.Request.blank('/') req.headers['x-auth-token'] = 'token1' req.headers['x-identity-status'] = identity_status req.headers['x-user-id'] = 'user1' req.headers['x-tenant-id'] = 'tenant1' _roles = roles or ['role1', 'role2'] req.headers['x-roles'] = ','.join(_roles) if service_catalog: req.headers['x-service-catalog'] = service_catalog return req def _build_middleware(self): return context.ContextMiddleware(None) def test_header_parsing(self): req = self._build_request() self._build_middleware().process_request(req) self.assertEqual('token1', req.context.auth_token) self.assertEqual('user1', req.context.user) self.assertEqual('tenant1', req.context.tenant) self.assertEqual(['role1', 'role2'], req.context.roles) def test_is_admin_flag(self): # is_admin check should look for 'admin' role by default req = self._build_request(roles=['admin', 'role2']) self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) # without the 'admin' role, is_admin should be False req = self._build_request() self._build_middleware().process_request(req) self.assertFalse(req.context.is_admin) # if we change the admin_role attribute, we should be able to use it req = self._build_request() self.config(admin_role='role1') self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) def test_roles_case_insensitive(self): # accept role from request req = self._build_request(roles=['Admin', 'role2']) self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) # accept role from config req = self._build_request(roles=['role1']) self.config(admin_role='rOLe1') self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) def test_roles_stripping(self): # stripping extra spaces in request req = self._build_request(roles=['\trole1']) self.config(admin_role='role1') self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) # stripping extra spaces in config req = self._build_request(roles=['\trole1\n']) self.config(admin_role=' role1\t') self._build_middleware().process_request(req) self.assertTrue(req.context.is_admin) def test_anonymous_access_enabled(self): req = self._build_request(identity_status='Nope') self.config(allow_anonymous_access=True) middleware = self._build_middleware() middleware.process_request(req) self.assertIsNone(req.context.auth_token) self.assertIsNone(req.context.user) self.assertIsNone(req.context.tenant) self.assertEqual([], req.context.roles) self.assertFalse(req.context.is_admin) self.assertTrue(req.context.read_only) def test_anonymous_access_defaults_to_disabled(self): req = self._build_request(identity_status='Nope') middleware = self._build_middleware() self.assertRaises(webob.exc.HTTPUnauthorized, middleware.process_request, req) def test_service_catalog(self): catalog_json = "[{}]" req = self._build_request(service_catalog=catalog_json) self._build_middleware().process_request(req) self.assertEqual([{}], req.context.service_catalog) def test_invalid_service_catalog(self): catalog_json = "bad json" req = self._build_request(service_catalog=catalog_json) middleware = self._build_middleware() self.assertRaises(webob.exc.HTTPInternalServerError, middleware.process_request, req) def test_response(self): req = self._build_request() req.context = glance.context.RequestContext() request_id = req.context.request_id resp = webob.Response() resp.request = req self._build_middleware().process_response(resp) self.assertEqual(request_id, resp.headers['x-openstack-request-id']) resp_req_id = resp.headers['x-openstack-request-id'] # Validate that request-id do not starts with 'req-req-' self.assertFalse(resp_req_id.startswith(b'req-req-')) self.assertTrue(resp_req_id.startswith(b'req-')) class TestUnauthenticatedContextMiddleware(base.IsolatedUnitTest): def test_request(self): middleware = context.UnauthenticatedContextMiddleware(None) req = webob.Request.blank('/') middleware.process_request(req) self.assertIsNone(req.context.auth_token) self.assertIsNone(req.context.user) self.assertIsNone(req.context.tenant) self.assertEqual([], req.context.roles) self.assertTrue(req.context.is_admin) def test_response(self): middleware = context.UnauthenticatedContextMiddleware(None) req = webob.Request.blank('/') req.context = glance.context.RequestContext() request_id = req.context.request_id resp = webob.Response() resp.request = req middleware.process_response(resp) self.assertEqual(request_id, resp.headers['x-openstack-request-id']) resp_req_id = resp.headers['x-openstack-request-id'] # Validate that request-id do not starts with 'req-req-' self.assertFalse(resp_req_id.startswith(b'req-req-')) self.assertTrue(resp_req_id.startswith(b'req-'))
Python
0.000006
@@ -5273,32 +5273,129 @@ with 'req-req-'%0A + if isinstance(resp_req_id, bytes):%0A resp_req_id = resp_req_id.decode('utf-8')%0A self.ass @@ -5418,33 +5418,32 @@ q_id.startswith( -b 'req-req-'))%0A @@ -5478,33 +5478,32 @@ q_id.startswith( -b 'req-'))%0A%0A%0Aclass @@ -6440,32 +6440,129 @@ ck-request-id'%5D%0A + if isinstance(resp_req_id, bytes):%0A resp_req_id = resp_req_id.decode('utf-8')%0A # Valida @@ -6654,25 +6654,24 @@ .startswith( -b 'req-req-')) @@ -6718,17 +6718,16 @@ rtswith( -b 'req-'))
2527683522394c823bc100c75f1ce4885949136e
add paths module for other modules to find paths from one place
glim/paths.py
glim/paths.py
Python
0
@@ -0,0 +1,1058 @@ +import os%0Afrom termcolor import colored%0A%0APROJECT_PATH = os.getcwd()%0AAPP_PATH = os.path.join(PROJECT_PATH, 'app')%0AEXT_PATH = os.path.join(PROJECT_PATH, 'ext')%0AGLIM_ROOT_PATH = os.path.dirname(os.path.dirname(__file__))%0APROTO_PATH = os.path.join(os.path.dirname(__file__), 'prototype')%0A%0Aimport sys%0Afrom pprint import pprint as p%0A%0Adef configure_sys_path():%0A%09if GLIM_ROOT_PATH == PROJECT_PATH:%0A%09%09print colored('Development mode is on, sys.path is being configured', 'yellow')%0A%09%09sys.path.pop(0)%0A%09%09sys.path.insert(0, GLIM_ROOT_PATH)%0A%09else:%0A%09%09sys.path.insert(0, PROJECT_PATH)%0A%0Adef controllers():%0A%09return os.path.join(APP_PATH, 'controllers.py')%0A%0Adef config(env):%0A%09return os.path.join(APP_PATH, 'config', '%25s.py' %25 env)%0A%0Adef start():%0A%09return os.path.join(APP_PATH, 'start.py')%0A%0Adef commands():%0A%09return os.path.join(APP_PATH, 'commands.py')%0A%0Adef routes():%0A%09return os.path.join(APP_PATH, 'routes.py')%0A%0Adef extensions(ext):%0A%09return os.path.join(EXT_PATH, '%25s' %25 ext, '%25s.py' %25 ext)%0A%0Adef extension_commands(ext):%0A%09return os.path.join(EXT_PATH, '%25s' %25 ext, 'commands.py')
24f21146b01ff75a244df40d1626c54883abeb1a
Add helper-lib for json object conversion and split dicts
lib/helpers.py
lib/helpers.py
Python
0
@@ -0,0 +1,297 @@ +#! /usr/bin/env python2.7%0A%0Aimport datetime%0A%0Adef typecast_json(o):%0A%09if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):%0A%09%09return o.isoformat()%0A%09else:%0A%09%09return o%0A%0Adef split_dict(src, keys):%0A%09result = dict()%0A%09for k in set(src.keys()) & set(keys):%0A%09%09result%5Bk%5D = src%5Bk%5D%0A%09return result%0A
2321603eb706745e20e70d156a3894a7f3ac38eb
Add the Gamerscore and Tier of the account (#12867)
homeassistant/components/sensor/xbox_live.py
homeassistant/components/sensor/xbox_live.py
""" Sensor for Xbox Live account status. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.xbox_live/ """ import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import (CONF_API_KEY, STATE_UNKNOWN) from homeassistant.helpers.entity import Entity REQUIREMENTS = ['xboxapi==0.1.1'] _LOGGER = logging.getLogger(__name__) CONF_XUID = 'xuid' ICON = 'mdi:xbox' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_XUID): vol.All(cv.ensure_list, [cv.string]) }) # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the Xbox platform.""" from xboxapi import xbox_api api = xbox_api.XboxApi(config.get(CONF_API_KEY)) devices = [] # request personal profile to check api connection profile = api.get_profile() if profile.get('error_code') is not None: _LOGGER.error("Can't setup XboxAPI connection. Check your account or " " api key on xboxapi.com. Code: %s Description: %s ", profile.get('error_code', STATE_UNKNOWN), profile.get('error_message', STATE_UNKNOWN)) return for xuid in config.get(CONF_XUID): new_device = XboxSensor(hass, api, xuid) if new_device.success_init: devices.append(new_device) if devices: add_devices(devices, True) class XboxSensor(Entity): """A class for the Xbox account.""" def __init__(self, hass, api, xuid): """Initialize the sensor.""" self._hass = hass self._state = STATE_UNKNOWN self._presence = {} self._xuid = xuid self._api = api # get profile info profile = self._api.get_user_gamercard(self._xuid) if profile.get('success', True) and profile.get('code') is None: self.success_init = True self._gamertag = profile.get('gamertag') self._picture = profile.get('gamerpicSmallSslImagePath') else: _LOGGER.error("Can't get user profile %s. " "Error Code: %s Description: %s", self._xuid, profile.get('code', STATE_UNKNOWN), profile.get('description', STATE_UNKNOWN)) self.success_init = False @property def name(self): """Return the name of the sensor.""" return self._gamertag @property def state(self): """Return the state of the sensor.""" return self._state @property def device_state_attributes(self): """Return the state attributes.""" attributes = {} for device in self._presence: for title in device.get('titles'): attributes[ '{} {}'.format(device.get('type'), title.get('placement')) ] = title.get('name') return attributes @property def entity_picture(self): """Avatar of the account.""" return self._picture @property def icon(self): """Return the icon to use in the frontend.""" return ICON def update(self): """Update state data from Xbox API.""" presence = self._api.get_user_presence(self._xuid) self._state = presence.get('state', STATE_UNKNOWN) self._presence = presence.get('devices', {})
Python
0
@@ -2135,16 +2135,73 @@ ertag')%0A + self._gamerscore = profile.get('gamerscore')%0A @@ -2261,16 +2261,61 @@ ePath')%0A + self._tier = profile.get('tier')%0A @@ -2979,24 +2979,117 @@ ibutes = %7B%7D%0A + attributes%5B'gamerscore'%5D = self._gamerscore%0A attributes%5B'tier'%5D = self._tier%0A%0A for
0f5c0168b257436882f837e5d521cce46a740ad6
Add symbol translator to make utf-8 variables compilable
finat/greek_alphabet.py
finat/greek_alphabet.py
Python
0
@@ -0,0 +1,1500 @@ +%22%22%22Translation table from utf-8 to greek variable names, taken from:%0Ahttps://gist.github.com/piquadrat/765262#file-greek_alphabet-py%0A%22%22%22%0A%0A%0Adef translate_symbol(symbol):%0A %22%22%22Translates utf-8 sub-strings into compilable variable names%22%22%22%0A name = symbol.decode(%22utf-8%22)%0A for k, v in greek_alphabet.iteritems():%0A name = name.replace(k, v)%0A return name%0A%0A%0Agreek_alphabet = %7B%0A u'%5Cu0391': 'Alpha',%0A u'%5Cu0392': 'Beta',%0A u'%5Cu0393': 'Gamma',%0A u'%5Cu0394': 'Delta',%0A u'%5Cu0395': 'Epsilon',%0A u'%5Cu0396': 'Zeta',%0A u'%5Cu0397': 'Eta',%0A u'%5Cu0398': 'Theta',%0A u'%5Cu0399': 'Iota',%0A u'%5Cu039A': 'Kappa',%0A u'%5Cu039B': 'Lamda',%0A u'%5Cu039C': 'Mu',%0A u'%5Cu039D': 'Nu',%0A u'%5Cu039E': 'Xi',%0A u'%5Cu039F': 'Omicron',%0A u'%5Cu03A0': 'Pi',%0A u'%5Cu03A1': 'Rho',%0A u'%5Cu03A3': 'Sigma',%0A u'%5Cu03A4': 'Tau',%0A u'%5Cu03A5': 'Upsilon',%0A u'%5Cu03A6': 'Phi',%0A u'%5Cu03A7': 'Chi',%0A u'%5Cu03A8': 'Psi',%0A u'%5Cu03A9': 'Omega',%0A u'%5Cu03B1': 'alpha',%0A u'%5Cu03B2': 'beta',%0A u'%5Cu03B3': 'gamma',%0A u'%5Cu03B4': 'delta',%0A u'%5Cu03B5': 'epsilon',%0A u'%5Cu03B6': 'zeta',%0A u'%5Cu03B7': 'eta',%0A u'%5Cu03B8': 'theta',%0A u'%5Cu03B9': 'iota',%0A u'%5Cu03BA': 'kappa',%0A u'%5Cu03BB': 'lamda',%0A u'%5Cu03BC': 'mu',%0A u'%5Cu03BD': 'nu',%0A u'%5Cu03BE': 'xi',%0A u'%5Cu03BF': 'omicron',%0A u'%5Cu03C0': 'pi',%0A u'%5Cu03C1': 'rho',%0A u'%5Cu03C3': 'sigma',%0A u'%5Cu03C4': 'tau',%0A u'%5Cu03C5': 'upsilon',%0A u'%5Cu03C6': 'phi',%0A u'%5Cu03C7': 'chi',%0A u'%5Cu03C8': 'psi',%0A u'%5Cu03C9': 'omega',%0A%7D%0A
6a7f9fe9063b651489839734b92b3f58cf5b50a2
Fix denied commands still triggering
src/decorators.py
src/decorators.py
import fnmatch from collections import defaultdict import botconfig from oyoyo.parse import parse_nick from src import settings as var from src import logger adminlog = logger(None) COMMANDS = defaultdict(list) HOOKS = defaultdict(list) class cmd: def __init__(self, *cmds, raw_nick=False, admin_only=False, owner_only=False, chan=True, pm=False, playing=False, silenced=False, phases=(), roles=()): self.cmds = cmds self.raw_nick = raw_nick self.admin_only = admin_only self.owner_only = owner_only self.chan = chan self.pm = pm self.playing = playing self.silenced = silenced self.phases = phases self.roles = roles self.func = None self.aftergame = False self.name = cmds[0] alias = False self.aliases = [] for name in cmds: for func in COMMANDS[name]: if (func.owner_only != owner_only or func.admin_only != admin_only): raise ValueError("unmatching protection levels for " + func.name) COMMANDS[name].append(self) if alias: self.aliases.append(self) alias = True def __call__(self, func): self.func = func self.__doc__ = self.func.__doc__ return self def caller(self, *args): largs = list(args) cli, rawnick, chan, rest = largs nick, mode, user, cloak = parse_nick(rawnick) if cloak is None: cloak = "" if not self.raw_nick: largs[1] = nick if not self.pm and chan == nick: return # PM command, not allowed if not self.chan and chan != nick: return # channel command, not allowed if chan.startswith("#") and chan != botconfig.CHANNEL and not (self.admin_only or self.owner_only): if "" in self.cmds: return # don't have empty commands triggering in other channels for command in self.cmds: if command in botconfig.ALLOWED_ALT_CHANNELS_COMMANDS: break else: return if nick in var.USERS and var.USERS[nick]["account"] != "*": acc = var.USERS[nick]["account"] else: acc = None if "" in self.cmds: return self.func(*largs) if self.phases and var.PHASE not in self.phases: return if self.playing and (nick not in var.list_players() or nick in var.DISCONNECTED): if chan == nick: pm(cli, nick, "You're not currently playing.") else: cli.notice(nick, "You're not currently playing.") return if self.silenced and nick in var.SILENCED: if chan == nick: pm(cli, nick, "You have been silenced, and are unable to use any special powers.") else: cli.notice(nick, "You have been silenced, and are unable to use any special powers.") return if self.roles: for role in self.roles: if nick in var.ROLES[role]: break else: return return self.func(*largs) # don't check restrictions for role commands if self.owner_only: if var.is_owner(nick, cloak): adminlog(chan, rawnick, self.name, rest) return self.func(*largs) if chan == nick: pm(cli, nick, "You are not the owner.") else: cli.notice(nick, "You are not the owner.") return if not self.admin_only: return self.func(*largs) if var.is_admin(nick, cloak): adminlog(chan, rawnick, self.name, rest) return self.func(*largs) if acc: for pattern in var.DENY_ACCOUNTS: if fnmatch.fnmatch(acc.lower(), pattern.lower()): for command in self.cmds: if command in var.DENY_ACCOUNTS[pattern]: if chan == nick: pm(cli, nick, "You do not have permission to use that command.") else: cli.notice(nick, "You do not have permission to use that command.") return for pattern in var.ALLOW_ACCOUNTS: if fnmatch.fnmatch(acc.lower(), pattern.lower()): for command in self.cmds: if command in var.ALLOW_ACCOUNTS[pattern]: adminlog(chan, rawnick, self.name, rest) return self.func(*largs) if not var.ACCOUNTS_ONLY and cloak: for pattern in var.DENY: if fnmatch.fnmatch(cloak.lower(), pattern.lower()): for command in self.cmds: if command in var.DENY[pattern]: if chan == nick: pm(cli, nick, "You do not have permission to use that command.") else: cli.notice(nick, "You do not have permission to use that command.") return for pattern in var.ALLOW: if fnmatch.fnmatch(cloak.lower(), pattern.lower()): for command in self.cmds: if command in var.ALLOW[pattern]: adminlog(chan, rawnick, self.name, rest) return self.func(*largs) if chan == nick: pm(cli, nick, "You are not an admin.") else: cli.notice(nick, "You are not an admin.") return class hook: def __init__(self, name, hookid=-1): self.name = name self.hookid = hookid self.func = None HOOKS[name].append(self) def __call__(self, func): self.func = func self.__doc__ = self.func.__doc__ return self @staticmethod def unhook(hookid): for each in list(HOOKS): for inner in list(HOOKS[each]): if inner.hookid == hookid: HOOKS[each].remove(inner) if not HOOKS[each]: del HOOKS[each]
Python
0
@@ -3714,105 +3714,71 @@ if -not self.admin_only:%0A return self.func(*largs)%0A%0A if var.is_admin(nick, cloak):%0A +var.is_admin(nick, cloak):%0A if self.admin_only:%0A @@ -4634,32 +4634,84 @@ OUNTS%5Bpattern%5D:%0A + if self.admin_only:%0A @@ -5583,32 +5583,84 @@ ALLOW%5Bpattern%5D:%0A + if self.admin_only:%0A @@ -5758,32 +5758,64 @@ f.func(*largs)%0A%0A + if self.admin_only:%0A if chan @@ -5827,32 +5827,36 @@ ck:%0A + pm(cli, nick, %22Y @@ -5878,32 +5878,36 @@ dmin.%22)%0A + else:%0A @@ -5900,32 +5900,36 @@ se:%0A + + cli.notice(nick, @@ -5962,24 +5962,28 @@ + return%0A%0A class ho @@ -5974,16 +5974,50 @@ return%0A%0A + return self.func(*largs)%0A%0A class ho
03951a227bfafb0b1017354bdbf3a1247322fc9b
Fix cycler tests
axelrod/tests/unit/test_cycler.py
axelrod/tests/unit/test_cycler.py
"""Test for the Cycler strategies.""" import itertools import axelrod from .test_player import TestPlayer, test_four_vector C, D = 'C', 'D' class TestAntiCycler(TestPlayer): name = "AntiCycler" player = axelrod.AntiCycler expected_classifier = { 'memory_depth': float('inf'), 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" responses = [C, D, C, C, D, C, C, C, D, C, C, C, C, D, C, C, C] self.responses_test([], [], responses) def test_cycler_factory(cycle): class TestCycler(TestPlayer): name = "Cycler %s" % cycle player = getattr(axelrod, 'Cycler%s' % cycle) expected_classifier = { 'memory_depth': 1, 'stochastic': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } def test_strategy(self): """Starts by cooperating""" for i in range(20): responses = itertools.islice(itertools.cycle(cycle), i) self.responses_test([], [], responses) return TestCycler TestCyclerCCD = test_cycler_factory("CCD") TestCyclerCCCD = test_cycler_factory("CCCD") TestCyclerCCCCCD = test_cycler_factory("CCCCCD")
Python
0
@@ -843,9 +843,18 @@ h': -1 +len(cycle) ,%0A
ca2269c5ae568cd63253af7bc614a79d26f7f8ac
Add ns_drop_indexes command.
needlestack/management/commands/ns_drop_indexes.py
needlestack/management/commands/ns_drop_indexes.py
Python
0
@@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*-%0A%0Afrom __future__ import print_function, absolute_import, unicode_literals%0A%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom needlestack import commands%0A%0A%0Aclass Command(BaseCommand):%0A help = 'Sync all defined indexes with a current backend'%0A option_list = BaseCommand.option_list + (%0A make_option('--backend',%0A action='store',%0A dest='backend',%0A default='default'),)%0A%0A def handle(self, *args, **options):%0A commands.drop_indexes(options%5B%22backend%22%5D, options%5B%22verbosity%22%5D)%0A%0A%0A
b8a84e612d67f7948d6dec8c202ac8a73390f9dc
make sure all protein ids are unique in a genbank file
proteins/unique_protein_ids.py
proteins/unique_protein_ids.py
Python
0
@@ -0,0 +1,1102 @@ +%22%22%22%0ATest a genbank file and make sure all the protein_ids are unique%0A%22%22%22%0A%0Aimport os%0Aimport sys%0Aimport argparse%0Afrom Bio import SeqIO%0A%0A__author__ = 'Rob Edwards'%0A__copyright__ = 'Copyright 2020, Rob Edwards'%0A__credits__ = %5B'Rob Edwards'%5D%0A__license__ = 'MIT'%0A__maintainer__ = 'Rob Edwards'%0A__email__ = '[email protected]'%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser(description=%22 %22)%0A parser.add_argument('-f', help='genbank file', required=True)%0A args = parser.parse_args()%0A%0A pids = set()%0A rc = 0%0A for seq in SeqIO.parse(args.f, %22genbank%22):%0A rc+=1;%0A print(f%22record %7Brc%7D: %7Bseq.id%7D%22)%0A for feat in seq.features:%0A if feat.type != %22CDS%22:%0A continue%0A if 'protein_id' not in feat.qualifiers:%0A thisid = %22 %22.join(feat.qualifiers.get('locus_tag', %5Bstr(feat.location)%5D))%0A print(f%22No protein id in %7Bthisid%7D%22)%0A continue%0A pid = %22%7C%22.join(feat.qualifiers%5B%22protein_id%22%5D)%0A if pid in pids:%0A print(f%22%7Bpid%7D is not unique%22)%0A pids.add(pid)
61fa404da3eeb3b695b12f398c27f641e1e681e2
add codegen script for fname.pyf.src -> _fnamemodule.c
tools/generate_f2pymod.py
tools/generate_f2pymod.py
Python
0
@@ -0,0 +1,1474 @@ +%22%22%22%0AProcess f2py template files (%60filename.pyf.src%60 -%3E %60filename.pyf%60)%0A%0AUsage: python generate_pyf.py filename.pyf.src -o filename.pyf%0A%22%22%22%0A%0Aimport os%0Aimport sys%0Aimport subprocess%0Aimport argparse%0A%0Afrom numpy.distutils.from_template import process_file%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22infile%22, type=str,%0A help=%22Path to the input file%22)%0A parser.add_argument(%22-o%22, %22--outfile%22, type=str,%0A help=%22Path to the output file%22)%0A args = parser.parse_args()%0A%0A # Read .pyf.src file%0A code = process_file(args.infile)%0A%0A # Write out the .pyf file%0A outdir = os.path.split(args.outfile)%5B0%5D%0A outdir_abs = os.path.join(os.getcwd(), outdir)%0A fname_pyf = os.path.join(outdir,%0A os.path.splitext(os.path.split(args.infile)%5B1%5D)%5B0%5D)%0A%0A with open(fname_pyf, 'w') as f:%0A f.write(code)%0A%0A # Now invoke f2py to generate the C API module file%0A p = subprocess.Popen(%5Bsys.executable, '-m', 'numpy.f2py', fname_pyf,%0A '--build-dir', outdir_abs%5D, #'--quiet'%5D,%0A stdout=subprocess.PIPE, stderr=subprocess.PIPE,%0A cwd=os.getcwd())%0A out, err = p.communicate()%0A if not (p.returncode == 0):%0A raise RuntimeError(f%22Writing %7Bargs.outfile%7D with f2py failed!%5Cn%22%0A f%22%7Bout%7D%5Cn%22%0A r%22%7Berr%7D%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
0f94251c7cc844042c9e3ce160d78e4d81d895ea
add log module
src/log.py
src/log.py
Python
0.000002
@@ -0,0 +1,995 @@ +import logging%0Aimport os%0Afrom datetime import datetime%0A%0Aclass LOG(object):%0A logger = None%0A def __init__(self, log_dir):%0A if log_dir:%0A if not os.path.exists(log_dir):%0A os.makedirs(log_dir)%0A self.logger = logging.getLogger('simple-db-migrate')%0A%0A now = datetime.now()%0A filename = %22%25s/%25s.log%22 %25(os.path.abspath(log_dir), now.strftime(%22%25Y%25m%25d%25H%25M%25S%22))%0A hdlr = logging.FileHandler(filename)%0A formatter = logging.Formatter('%25(message)s')%0A hdlr.setFormatter(formatter)%0A self.logger.addHandler(hdlr)%0A self.logger.setLevel(logging.DEBUG)%0A%0A def debug(self, msg):%0A if self.logger:%0A self.logger.debug(msg)%0A%0A def info(self, msg):%0A if self.logger:%0A self.logger.info(msg)%0A%0A def error(self, msg):%0A if self.logger:%0A self.logger.error(msg)%0A%0A def warn(self, msg):%0A if self.logger:%0A self.logger.warn(msg)%0A
bc567eda01abcaf23717f5da5f494c1be46f47da
Create ValAnagram_001.py
leetcode/242-Valid-Anagram/ValAnagram_001.py
leetcode/242-Valid-Anagram/ValAnagram_001.py
Python
0.000002
@@ -0,0 +1,581 @@ +class Solution:%0A # @param %7Bstring%7D s%0A # @param %7Bstring%7D t%0A # @return %7Bboolean%7D%0A def anaRepresentation(self, s):%0A p = %7B%7D%0A for c in s:%0A if c in p:%0A p%5Bc%5D += 1%0A else:%0A p%5Bc%5D = 1%0A return p%0A %0A def isAnagram(self, s, t):%0A if len(s) != len(t):%0A return False%0A p = self.anaRepresentation(s)%0A q = self.anaRepresentation(t)%0A %0A for c in p:%0A if c not in q or (c in q and p%5Bc%5D != q%5Bc%5D):%0A return False%0A %0A return True%0A
682d6b3ca9c4a0dd49f9762ddd20ac746971e3eb
Create solution.py
leetcode/easy/find_the_difference/py/solution.py
leetcode/easy/find_the_difference/py/solution.py
Python
0.000018
@@ -0,0 +1,437 @@ +class Solution(object):%0A def findTheDifference(self, s, t):%0A %22%22%22%0A :type s: str%0A :type t: str%0A :rtype: str%0A %22%22%22%0A import collections%0A import itertools%0A %0A c1 = collections.Counter(s)%0A c2 = collections.Counter(t)%0A %0A for char in set(itertools.chain(s, t)):%0A if c1%5Bchar%5D != c2%5Bchar%5D:%0A return char%0A %0A return None%0A
9e128fdd5af0598a233416de5a1e8f2d3a74fdc0
Enforce unique paths and names
spaces/migrations/0006_unique_space_document.py
spaces/migrations/0006_unique_space_document.py
Python
0.00002
@@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9 on 2015-12-15 02:12%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('spaces', '0005_document_space_doc'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='space',%0A name='name',%0A field=models.CharField(max_length=100, unique=True),%0A ),%0A migrations.AlterField(%0A model_name='space',%0A name='path',%0A field=models.CharField(max_length=40, unique=True),%0A ),%0A migrations.AlterUniqueTogether(%0A name='document',%0A unique_together=set(%5B('path', 'parent')%5D),%0A ),%0A %5D%0A
0256868a3b261e598689eebdf5ac5f939ea20a0d
add test cases for mni module
lib/neuroimaging/reference/tests/test_mni.py
lib/neuroimaging/reference/tests/test_mni.py
Python
0
@@ -0,0 +1,381 @@ +import unittest%0Aimport numpy as N%0A%0Aimport neuroimaging.reference.mni as mni%0A%0Aclass MNITest(unittest.TestCase):%0A%0A def testMNI(self):%0A %22%22%22 ensure all elementes of the interface exist %22%22%22%0A m = mni.MNI%0A g = mni.generic%0A m_v = mni.MNI_voxel%0A m_w = mni.MNI_world %0A m_m = mni.MNI_mapping%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
a0789a4bad7747073257d8976534b33ab9862ec4
Add unit test for IssueRegister view
feed/tests/test_issueregisterview.py
feed/tests/test_issueregisterview.py
Python
0
@@ -0,0 +1,3102 @@ +from django.contrib.auth.models import User%0Afrom django.test import TestCase%0Afrom rest_framework.test import APIRequestFactory%0A%0Afrom feed.views import IssueRegisterViewSet%0Afrom workflow.models import IssueRegister, Organization, TolaUser%0A%0A%0Aclass IssueRegisterViewsTest(TestCase):%0A def setUp(self):%0A self.user = User.objects.create_user('john', '[email protected]', 'johnpassword')%0A self.user.is_superuser = True%0A self.user.is_staff = True%0A self.user.save()%0A%0A IssueRegister.objects.bulk_create(%5B%0A IssueRegister(name='IssueRegister1'),%0A IssueRegister(name='IssueRegister2'),%0A %5D)%0A%0A factory = APIRequestFactory()%0A self.request_get = factory.get('/api/issueregister/')%0A self.request_post = factory.post('/api/issueregister/')%0A%0A def test_list_issueregister_superuser(self):%0A self.request_get.user = self.user%0A view = IssueRegisterViewSet.as_view(%7B'get': 'list'%7D)%0A response = view(self.request_get)%0A self.assertEqual(response.status_code, 200)%0A self.assertEqual(len(response.data), 2)%0A%0A def test_list_issueregister_normaluser(self):%0A self.user.is_superuser = False%0A self.user.is_staff = False%0A self.user.save()%0A organization = Organization.objects.create(name=%22TestOrg%22)%0A TolaUser.objects.create(user=self.user, organization=organization)%0A%0A self.request_get.user = self.user%0A view = IssueRegisterViewSet.as_view(%7B'get': 'list'%7D)%0A response = view(self.request_get)%0A self.assertEqual(response.status_code, 200)%0A self.assertEqual(len(response.data), 0)%0A%0A def test_list_issueregister_normaluser_one_result(self):%0A self.user.is_superuser = False%0A self.user.is_staff = False%0A self.user.save()%0A organization = Organization.objects.create(name=%22TestOrg%22)%0A TolaUser.objects.create(user=self.user, organization=organization)%0A%0A IssueRegister.objects.create(name='IssueRegister0', organization=organization)%0A%0A self.request_get.user = self.user%0A view = IssueRegisterViewSet.as_view(%7B'get': 'list'%7D)%0A response = view(self.request_get)%0A self.assertEqual(response.status_code, 200)%0A self.assertEqual(len(response.data), 1)%0A %0A def test_create_issueregister_normaluser_one_result(self):%0A self.user.is_superuser = False%0A self.user.is_staff = False%0A self.user.save()%0A organization = Organization.objects.create(name=%22TestOrg%22)%0A TolaUser.objects.create(user=self.user, organization=organization)%0A%0A self.request_post.user = self.user%0A view = IssueRegisterViewSet.as_view(%7B'post': 'create'%7D)%0A response = view(self.request_post)%0A self.assertEqual(response.status_code, 201)%0A%0A # check if the obj created has the user organization%0A self.request_get.user = self.user%0A view = IssueRegisterViewSet.as_view(%7B'get': 'list'%7D)%0A response = view(self.request_get)%0A self.assertEqual(response.status_code, 200)%0A self.assertEqual(len(response.data), 1)%0A
05e8f84356c63ab953f5c2a3d3d06ee1760008d0
Add list_queue plugin
flexget/plugins/filter/list_queue.py
flexget/plugins/filter/list_queue.py
Python
0.000002
@@ -0,0 +1,1373 @@ +import logging%0A%0Afrom flexget import plugin%0Afrom flexget.event import event%0A%0Alog = logging.getLogger('list_queue')%0A%0A%0Aclass ListQueue(object):%0A schema = %7B%0A 'type': 'array',%0A 'items': %7B%0A 'allOf': %5B%0A %7B'$ref': '/schema/plugins?group=list'%7D,%0A %7B%0A 'maxProperties': 1,%0A 'error_maxProperties': 'Plugin options within list_queue plugin must be indented 2 more spaces '%0A 'than the first letter of the plugin name.',%0A 'minProperties': 1%0A %7D%0A %5D%0A %7D%0A %7D%0A%0A def on_task_filter(self, task, config):%0A for item in config:%0A for plugin_name, plugin_config in item.iteritems():%0A thelist = plugin.get_plugin_by_name(plugin_name).instance.get_list(plugin_config)%0A for entry in task.all_entries:%0A if entry in thelist:%0A entry.accept()%0A%0A def on_task_learn(self, task, config):%0A for item in config:%0A for plugin_name, plugin_config in item.iteritems():%0A thelist = plugin.get_plugin_by_name(plugin_name).instance.get_list(plugin_config)%0A thelist -= task.accepted%0A%0A%0A@event('plugin.register')%0Adef register_plugin():%0A plugin.register(ListQueue, 'list_queue', api_ver=2)%0A
f7a69e24912c3b9ed52201b52c79be4407884c3a
add module util for trying to resolve an ipv6 netmask to cidr. not perfect, but not meant to be either.
library/module_utils/network/f5/ipaddress.py
library/module_utils/network/f5/ipaddress.py
Python
0
@@ -0,0 +1,1147 @@ +# -*- coding: utf-8 -*-%0A#%0A# Copyright (c) 2018 F5 Networks Inc.%0A# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)%0A%0Afrom __future__ import absolute_import, division, print_function%0A__metaclass__ = type%0A%0A%0Adef ipv6_netmask_to_cidr(mask):%0A %22%22%22converts an IPv6 netmask to CIDR form%0A%0A According to the link below, CIDR is the only official way to specify%0A a subset of IPv6. With that said, the same link provides a way to%0A loosely convert an netmask to a CIDR.%0A%0A Arguments:%0A mask (string): The IPv6 netmask to convert to CIDR%0A%0A Returns:%0A int: The CIDR representation of the netmask%0A%0A References:%0A https://stackoverflow.com/a/33533007%0A http://v6decode.com/%0A %22%22%22%0A bit_masks = %5B%0A 0, 0x8000, 0xc000, 0xe000, 0xf000, 0xf800,%0A 0xfc00, 0xfe00, 0xff00, 0xff80, 0xffc0,%0A 0xffe0, 0xfff0, 0xfff8, 0xfffc, 0xfffe,%0A 0xffff%0A %5D%0A count = 0%0A try:%0A for w in mask.split(':'):%0A if not w or int(w, 16) == 0:%0A break%0A count += bit_masks.index(int(w, 16))%0A return count%0A except:%0A return -1%0A
8922f9430ec2844a3a14621ad0625aa45999c92a
fix args order
lixian_hash.py
lixian_hash.py
#!/usr/bin/env python import hashlib import lixian_hash_ed2k import lixian_hash_bt import os def lib_hash_file(h, path): with open(path, 'rb') as stream: while True: bytes = stream.read(1024*1024) if not bytes: break h.update(bytes) return h.hexdigest() def sha1_hash_file(path): return lib_hash_file(hashlib.sha1(), path) def verify_sha1(path, sha1): return sha1_hash_file(path).lower() == sha1.lower() def md5_hash_file(path): return lib_hash_file(hashlib.md5(), path) def verify_md5(path, md5): return md5_hash_file(path).lower() == md5.lower() def md4_hash_file(path): return lib_hash_file(hashlib.new('md4'), path) def verify_md4(path, md4): return md4_hash_file(path).lower() == md4.lower() def dcid_hash_file(path): h = hashlib.sha1() size = os.path.getsize(path) with open(path, 'rb') as stream: if size < 0xF000: h.update(stream.read()) else: h.update(stream.read(0x5000)) stream.seek(size/3) h.update(stream.read(0x5000)) stream.seek(size-0x5000) h.update(stream.read(0x5000)) return h.hexdigest() def verify_dcid(path, dcid): return dcid_hash_file(path).lower() == dcid.lower() def main(args): option = args.pop(0) if option.startswith('--verify'): hash_fun = {'--verify-sha1':verify_sha1, '--verify-md5':verify_md5, '--verify-md4':verify_md4, '--verify-dcid':verify_dcid, '--verify-ed2k':lixian_hash_ed2k.verify_ed2k_link, '--verify-bt':lixian_hash_bt.verify_bt_file, }[option] assert len(args) == 2 hash, path = args if hash_fun(path, hash): print 'looks good...' else: print 'failed...' else: hash_fun = {'--sha1':sha1_hash_file, '--md5':md5_hash_file, '--md4':md4_hash_file, '--dcid':dcid_hash_file, '--ed2k':lixian_hash_ed2k.generate_ed2k_link, '--info-hash':lixian_hash_bt.info_hash, }[option] for f in args: h = hash_fun(f) print '%s *%s' % (h, f) if __name__ == '__main__': import sys args = sys.argv[1:] main(args)
Python
0.999403
@@ -1432,16 +1432,30 @@ ify-bt': + lambda f, t: lixian_h @@ -1471,24 +1471,30 @@ rify_bt_file +(t, f) ,%0A%09%09%09%09 %7D%5Bo
0814bbf6867a4bdd9d92c63e467f237b6129ee28
add solution for palindrome number
leetcode/palindrome-number/sol.py
leetcode/palindrome-number/sol.py
Python
0.999935
@@ -0,0 +1,969 @@ +#!/usr/bin/env python%0Aclass Solution:%0A # @return a boolean%0A def isPalindrome(self, x):%0A if x == -1:%0A return True%0A def ll(x):%0A return 0 if x == 0 or x == -1 else ll(x/10)+1%0A %0A p = x %3E= 0%0A l = ll(x)%0A print %22x is %25d l is %25d%22 %25 (x, l)%0A t = x%0A for a in range(l/2):%0A mark = 10**(a)+10**(l-1-a)%0A b = (t / (10**(a))) %25 10%0A b = b if p else 10-b%0A t = (t - b * mark) if p else (t+b*mark)%0A # print %22t=%25d%22 %25 (t)%0A if l %25 2:%0A b = (t/(10**(l/2))) %25 10%0A b = b if p else 10-b%0A t = (t - b * (10**(l/2))) if p else (t+b*(10**(l/2)))%0A return t == 0%0A%0Aif __name__ == %22__main__%22:%0A sol = Solution()%0A print sol.isPalindrome(-2147483648)%0A print sol.isPalindrome(1234321)%0A print sol.isPalindrome(-1234321)%0A print sol.isPalindrome(1)%0A print sol.isPalindrome(-1)%0A print sol.isPalindrome(-11)%0A
d2a92c5d628f426c26374dea6cb37bd35ba18812
print variables
bin/basenji_variables.py
bin/basenji_variables.py
Python
0.000103
@@ -0,0 +1,2067 @@ +#!/usr/bin/env python%0A# Copyright 2017 Calico LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# https://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A# =========================================================================%0A%0Afrom __future__ import print_function%0Afrom optparse import OptionParser%0Aimport os%0Aimport sys%0Aimport time%0A%0Aimport h5py%0Aimport tensorflow as tf%0A%0Afrom basenji import params%0Afrom basenji import seqnn%0A%0A%22%22%22%0Abasenji_variables.py%0A%0APrint a model's variables, typically for debugging purposes.%0A%22%22%22%0A%0A################################################################################%0A# main%0A################################################################################%0Adef main():%0A usage = 'usage: %25prog %5Boptions%5D %3Cparams_file%3E %3Cmodel_file%3E'%0A parser = OptionParser(usage)%0A (options, args) = parser.parse_args()%0A%0A if len(args) != 2:%0A parser.error('Must provide parameters, model, and test data HDF5')%0A else:%0A params_file = args%5B0%5D%0A model_file = args%5B1%5D%0A%0A #######################################################%0A # model parameters and placeholders%0A%0A job = params.read_job_params(params_file)%0A model = seqnn.SeqNN()%0A model.build(job)%0A%0A # initialize saver%0A saver = tf.train.Saver()%0A%0A with tf.Session() as sess:%0A # load variables into session%0A saver.restore(sess, model_file)%0A%0A for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):%0A print(v.name, v.shape)%0A%0A%0A################################################################################%0A# __main__%0A################################################################################%0Aif __name__ == '__main__':%0A main()%0A
0598e61d9bcef2217f22cce2deeec08ed6868575
Add rmd.py
scripts/rmd.py
scripts/rmd.py
Python
0.000002
@@ -0,0 +1,2548 @@ +#!/usr/bin/env python%0A%0Aimport argparse%0Aimport sys%0Aimport logging%0Aimport os%0Aimport os.path as pt%0Aimport shutil%0A%0A%0Aclass App(object):%0A%0A def run(self, args):%0A name = pt.basename(args%5B0%5D)%0A parser = self.create_parser(name)%0A opts = parser.parse_args(args%5B1:%5D)%0A return self.main(name, opts)%0A%0A def create_parser(self, name):%0A p = argparse.ArgumentParser(%0A prog=name,%0A formatter_class=argparse.ArgumentDefaultsHelpFormatter,%0A description='Run rmd script')%0A p.add_argument(%0A 'rmd_file',%0A help='RMD file')%0A p.add_argument(%0A '-o', '--out_file',%0A help='Output file')%0A p.add_argument(%0A '-f', '--format',%0A help='Output format',%0A default='html',%0A choices=%5B'html', 'pdf', 'word'%5D)%0A p.add_argument(%0A '--cmd',%0A help='R command')%0A p.add_argument(%0A '--copy',%0A help='Copy to file')%0A p.add_argument(%0A '--test',%0A help='Print command without executing',%0A action='store_true')%0A p.add_argument(%0A '--verbose',%0A help='More detailed log messages',%0A action='store_true')%0A p.add_argument(%0A '--log_file',%0A help='Write log messages to file')%0A return p%0A%0A def main(self, name, opts):%0A logging.basicConfig(filename=opts.log_file,%0A format='%25(levelname)s (%25(asctime)s): %25(message)s')%0A log = logging.getLogger(name)%0A if opts.verbose:%0A log.setLevel(logging.DEBUG)%0A else:%0A log.setLevel(logging.INFO)%0A log.debug(opts)%0A%0A rmd_file = opts.rmd_file%0A if opts.copy:%0A shutil.copyfile(rmd_file, opts.copy)%0A rmd_file = opts.copy%0A _format = opts.format%0A out_file = opts.out_file%0A if out_file is None:%0A out_file = '%25s.%25s' %25 (pt.splitext(rmd_file)%5B0%5D, opts.format)%0A else:%0A _format = pt.splitext(out_file)%5B1%5D%5B1:%5D%0A Rcmd = ''%0A if opts.cmd is not None:%0A Rcmd = '%25s;' %25 (opts.cmd)%0A cmd = %22library(rmarkdown); %7Bc%7D render('%7Br%7D', output_file='%7Bo%7D', output_format='%7Bf%7D_document')%22%0A cmd = cmd.format(c=Rcmd, r=rmd_file, o=out_file, f=_format)%0A cmd = 'Rscript -e %22%25s%22' %25 (cmd)%0A print(cmd)%0A if not opts.test:%0A os.system(cmd)%0A%0A return 0%0A%0A%0Aif __name__ == '__main__':%0A app = App()%0A app.run(sys.argv)%0A
30a8e40efee241dd6aa3b534814655b9f70cfffe
Add 020-valid-parentheses.py, but missed case "([])", the description is confused
020-valid-parentheses.py
020-valid-parentheses.py
Python
0.998825
@@ -0,0 +1,1075 @@ +%22%22%22%0AQuestion:%0A Valid Parentheses My Submissions Question Solution%0A%0A Given a string containing just the characters '(', ')', '%7B', '%7D', '%5B' and '%5D', determine if the input string is valid.%0A%0A The brackets must close in the correct order, %22()%22 and %22()%5B%5D%7B%7D%22 are all valid but %22(%5D%22 and %22(%5B)%5D%22 are not.%0A%0APerformance:%0A 1. Total Accepted: 71155 Total Submissions: 265078 Difficulty: Easy%0A%22%22%22%0A%0A%0Aclass Solution(object):%0A def isValid(self, s):%0A %22%22%22%0A :type s: str%0A :rtype: bool%0A %22%22%22%0A valid_set = set(%5B%22()%22, %22%5B%5D%22, %22%7B%7D%22%5D)%0A max_group = (len(s) + 1) / 2%0A is_valid = True%0A%0A for idx in xrange(max_group):%0A curr_group = s%5Bidx*2:idx*2+2%5D%0A if curr_group not in valid_set:%0A is_valid = False%0A break%0A%0A return is_valid%0A%0A%0Aassert Solution().isValid(%22()%22) is True%0Aassert Solution().isValid(%22()%5B%5D%7B%7D%22) is True%0Aassert Solution().isValid(%22(%5B%5D)%22) is True%0Aassert Solution().isValid(%22(%5D%22) is False%0Aassert Solution().isValid(%22(%5B)%5D%22) is False%0Aassert Solution().isValid(%22%5B%22) is False%0A
8249d33898500d9d39e8bee3d44d39c2a6034659
Add script to create overlays
scripts/create_overlays.py
scripts/create_overlays.py
Python
0.000001
@@ -0,0 +1,712 @@ +%22%22%22Varcan smart tool.%22%22%22%0A%0Aimport click%0A%0Afrom dtoolcore import DataSet%0A%0A%[email protected]()%[email protected]('dataset_uri')%[email protected]('--config-path', type=click.Path(exists=True))%0Adef main(dataset_uri, config_path=None):%0A%0A dataset = DataSet.from_uri(dataset_uri, config_path=config_path)%0A%0A def name_from_identifier(identifier):%0A item_properties = dataset.item_properties(identifier)%0A name = item_properties%5B'relpath'%5D.rsplit('.', 1)%5B0%5D%0A return name%0A%0A useful_name_overlay = %7B%0A identifier: name_from_identifier(identifier)%0A for identifier in dataset.identifiers%0A %7D%0A%0A dataset.put_overlay(%22useful_name%22, useful_name_overlay)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
5510f90565809471e545584419b22980b63a1864
Add metadata
bids_writer/_metadata.py
bids_writer/_metadata.py
Python
0.000044
@@ -0,0 +1,249 @@ +# -*- coding: utf-8 -*-%0Aversion = %220.1.0%22%0Aauthor = %22Nathan Vack%22%0Aauthor_email = %[email protected]%22%0Alicense = %22MIT%22%0Acopyright = %22Copyright 2015 Boards of Regent of the University of Wisconsin System%22%0Aurl = %22https://github.com/njvack/bids-json-writer%22%0A
65f574973bbde545c1c815d0ad21e4a8d3f3b59d
Add initial cbio client
bioagents/cbio_client.py
bioagents/cbio_client.py
Python
0
@@ -0,0 +1,1237 @@ +import os%0Aimport json%0Aimport logging%0Aimport requests%0Afrom collections import defaultdict%0A%0A%0Alogger = logging.getLogger(__name__)%0A%0Abase_url = 'https://www.cbioportal.org/api'%0A%0Aresources_dir = os.path.join(os.path.dirname(%0A os.path.abspath(__file__)), os.pardir, 'resources')%0A%0Apatient_list_cache = os.path.join(resources_dir, 'cbio_patients.json')%0A%0A%0Adef get_patient_list():%0A if os.path.exists(patient_list_cache):%0A logger.info('Loading patient list from cache at %25s' %25%0A patient_list_cache)%0A with open(patient_list_cache, 'r') as fh:%0A patient_list = json.load(fh)%0A else:%0A logger.info('Querying patient list from cBioPortal')%0A url = base_url + '/patients'%0A res = requests.get(url)%0A patient_list = res.json()%0A%0A with open(patient_list_cache, 'w') as fh:%0A json.dump(patient_list, fh, indent=1)%0A%0A patients_by_id = defaultdict(list)%0A patients_by_study = defaultdict(list)%0A for patient in patient_list:%0A patients_by_id%5Bpatient%5B'patientId'%5D%5D.append(patient)%0A patients_by_study%5Bpatient%5B'studyId'%5D%5D.append(patient)%0A return dict(patients_by_id), dict(patients_by_study)%0A%0A%0Apatients_by_id, patients_by_study = get_patient_list()%0A
bf60d3c48a30863571a8700fa5a843be48e7646b
add vat_reckoner
components/vat_reckoner/vat_reckoner.py
components/vat_reckoner/vat_reckoner.py
Python
0.000284
@@ -0,0 +1,1003 @@ +#! /usr/bin/env python%0A%0Afrom json import loads, dumps%0Afrom pika import BlockingConnection, ConnectionParameters%0A%0ARABBIT_MQ_HOST = '54.76.183.35'%0ARABBIT_MQ_PORT = 5672%0A%0Adef vat(ch, method, properties, body):%0A product = loads(body)%0A sku, price = product%5B'sku'%5D, product%5B'price'%5D%0A vat = price * 0.20%0A vat_fact = %7B'sku': sku, 'vat': vat%7D%0A%0A print 'Calculated vat %25s' %25 (vat_fact,)%0A%0A channel.basic_publish(exchange='alex2',%0A routing_key='vat',%0A body=dumps(vat_fact))%0A%0Aconnection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,%0A port=RABBIT_MQ_PORT))%0Achannel = connection.channel()%0A%0Achannel.exchange_declare(exchange='alex2', type='topic')%0A%0Aresult = channel.queue_declare(exclusive=True)%0Aqueue = result.method.queue%0A%0Achannel.queue_bind(exchange='alex2', queue=queue, routing_key='new_products')%0A%0Achannel.basic_consume(vat, queue=queue, no_ack=True)%0Achannel.start_consuming()%0A
77d90ec03eff1946a422e5471cc1a64708eff0f4
Test dramatis personae
shakespearelang/tests/unit/test_dramatis_personae.py
shakespearelang/tests/unit/test_dramatis_personae.py
Python
0.000007
@@ -0,0 +1,1130 @@ +from shakespearelang import Shakespeare%0Afrom shakespearelang.errors import ShakespeareRuntimeError%0Aimport pytest%0A%0AMANY_CHARACTERS_PLAY = %22%22%22%0AA lot of people.%0A%0AAchilles, a test.%0AChristopher Sly, a test.%0ADemetrius, a test.%0AJohn of Lancaster, a test.%0AJuliet, a test.%0AMistress Overdone, a test.%0ARomeo, a test.%0AStephano, a test.%0AThe Abbot of Westminster, a test.%0AThe Ghost, a test.%0ATitania, a test.%0AVincentio, a test.%0A%22%22%22%0A%0Adef test_correct_characters():%0A s = Shakespeare('Foo. Juliet, a test. Romeo, a test. The Ghost, a test.')%0A assert sorted(%5Bc.name for c in s.characters%5D) == %5B'Juliet', 'Romeo', 'The Ghost'%5D%0A%0Adef test_no_characters():%0A s = Shakespeare('Foo. Act I: The beginning.')%0A assert s.characters == %5B%5D%0A%0Adef test_many_characters():%0A s = Shakespeare(MANY_CHARACTERS_PLAY)%0A assert sorted(%5Bc.name for c in s.characters%5D) == %5B%0A 'Achilles',%0A 'Christopher Sly',%0A 'Demetrius',%0A 'John of Lancaster',%0A 'Juliet',%0A 'Mistress Overdone',%0A 'Romeo',%0A 'Stephano',%0A 'The Abbot of Westminster',%0A 'The Ghost',%0A 'Titania',%0A 'Vincentio',%0A %5D%0A
0ba11dd47dac04f3f7a314cf320558ccbc9eb148
Add test for water polygon name dropping.
integration-test/1477-water-layer-too-big.py
integration-test/1477-water-layer-too-big.py
Python
0
@@ -0,0 +1,1731 @@ +# -*- encoding: utf-8 -*-%0Afrom . import FixtureTest%0A%0A%0Aclass WaterLayerTooBigTest(FixtureTest):%0A%0A def test_drop_label(self):%0A from tilequeue.tile import calc_meters_per_pixel_area%0A from shapely.ops import transform%0A from tilequeue.tile import reproject_mercator_to_lnglat%0A import math%0A import dsl%0A%0A for zoom in range(5, 16):%0A area = 270.0 * calc_meters_per_pixel_area(zoom)%0A radius = math.sqrt(area / math.pi)%0A%0A coord = 2 ** (zoom - 1)%0A%0A # larger feature should retain name%0A shape = dsl.tile_centre_shape(%0A zoom, coord, coord).buffer(radius * 1.1)%0A shape_lnglat = transform(%0A reproject_mercator_to_lnglat, shape)%0A%0A self.generate_fixtures(%0A dsl.way(1, shape_lnglat, %7B%0A 'natural': 'water',%0A 'name': 'Foo',%0A %7D),%0A )%0A%0A self.assert_has_feature(%0A zoom, coord, coord, 'water', %7B%0A 'kind': 'water',%0A 'name': 'Foo',%0A %7D)%0A%0A # smaller shape should drop it%0A shape = dsl.tile_centre_shape(%0A zoom, coord, coord).buffer(radius / 1.1)%0A shape_lnglat = transform(%0A reproject_mercator_to_lnglat, shape)%0A%0A self.generate_fixtures(%0A dsl.way(1, shape_lnglat, %7B%0A 'natural': 'water',%0A 'name': 'Foo',%0A %7D),%0A )%0A%0A self.assert_has_feature(%0A zoom, coord, coord, 'water', %7B%0A 'kind': 'water',%0A 'name': type(None),%0A %7D)%0A
865dc29421c1e9ef4bf340bf32164863cc5f2006
Add management command to list installed spiders
app/raw/management/commands/list_spiders.py
app/raw/management/commands/list_spiders.py
Python
0
@@ -0,0 +1,252 @@ +from django.core.management import BaseCommand%0Afrom raw.utils import list_spiders%0A%0A%0Aclass Command(BaseCommand):%0A help = 'List installed spiders'%0A%0A def handle(self, *args, **options):%0A for spider in list_spiders():%0A print spider%0A
77966f7f993e526467b2e54e0d12241354efec16
add spec for re2
build/fbcode_builder/specs/re2.py
build/fbcode_builder/specs/re2.py
Python
0
@@ -0,0 +1,372 @@ +#!/usr/bin/env python%0Afrom __future__ import absolute_import%0Afrom __future__ import division%0Afrom __future__ import print_function%0Afrom __future__ import unicode_literals%0A%0A%0Adef fbcode_builder_spec(builder):%0A return %7B%0A 'steps': %5B%0A builder.github_project_workdir('google/re2', 'build'),%0A builder.cmake_install('google/re2'),%0A %5D,%0A %7D%0A
ec5e7171c454c17c62bb2d8364902d287586cb6c
Fix qibuild open when default toolchain is undefined
python/qibuild/actions/open.py
python/qibuild/actions/open.py
## Copyright (c) 2012 Aldebaran Robotics. All rights reserved. ## Use of this source code is governed by a BSD-style license that can be ## found in the COPYING file. """Open a project with an IDE """ import os import sys import glob import subprocess from qibuild import ui import qibuild SUPPORTED_IDES = ["QtCreator", "Visual Studio", "Xcode"] def configure_parser(parser): """Configure parser for this action """ qibuild.parsers.toc_parser(parser) qibuild.parsers.build_parser(parser) parser.add_argument("project", nargs="?") def get_ide(qibuild_cfg): """ Return an IDE to use """ known_ides = qibuild_cfg.ides.values() ide_names = qibuild_cfg.ides.keys() if not known_ides: mess = "Could not find any IDE in configuration\n" mess += "Please use `qibuild config --wizard` or `qibuild config --edit`" raise Exception(mess) # Remove the one that are not supported: supported_ides = [x for x in known_ides if x.name in SUPPORTED_IDES] if len(supported_ides) == 1: return supported_ides[0] if not supported_ides: mess = "Found those IDEs in configuration: %s\n" % ", ".join(ide_names) mess += "But `qibuild open` only supports: %s\n" % ", ".join(SUPPORTED_IDES) raise Exception(mess) # User chose a specific config and an IDE matches this config if qibuild_cfg.ide: return qibuild_cfg.ide supported_names = [x.name for x in supported_ides] # Several IDEs, ask the user to choose ide_name = qibuild.interact.ask_choice(supported_names, "Please choose an ide to use") if not ide_name: return None return qibuild_cfg.ides[ide_name] def do(args): """Main entry point """ toc = qibuild.toc.toc_open(args.worktree, args) project = qibuild.cmdparse.project_from_args(toc, args) if not os.path.exists(project.build_directory): ui.error("""It looks like your project has not been configured yet (The build directory: '%s' does not exists)""" % project.build_directory) answer = qibuild.interact.ask_yes_no( "Do you want me to run qibuild configure for you?", default=True) if not answer: sys.exit(2) else: qibuild.run_action("qibuild.actions.configure", [project.name, "--config", toc.active_config]) error_message = "Could not open project %s\n" % project.name qibuild_cfg = qibuild.config.QiBuildConfig(user_config=toc.active_config) qibuild_cfg.read() qibuild_cfg.read_local_config(toc.config_path) ide = get_ide(qibuild_cfg) if not ide: return if ide.name == "Visual Studio": sln_files = glob.glob(project.build_directory + "/*.sln") if len(sln_files) != 1: raise Exception(error_message + "Expecting only one sln, got %s" % sln_files) print "starting VisualStudio:" print "%s %s" % ("start", sln_files[0]) subprocess.Popen(["start", sln_files[0]], shell=True) return if ide.name == "Xcode": projs = glob.glob(project.build_directory + "/*.xcodeproj") if len(projs) == 0: raise Exception(error_message + "Do you have called qibuild configure with --cmake-generator=Xcode?") if len(projs) > 1: raise Exception(error_message + "Expecting only one xcode project file, got %s" % projs) print "starting Xcode:" print "%s %s" % ("open", projs[0]) subprocess.Popen(["open", projs[0]]) return if ide.name == "QtCreator": ide_path = ide.path if not ide_path: ide_path = 'qtcreator' cmake_list = os.path.join(project.directory, "CMakeLists.txt") if not os.access(ide_path, os.X_OK): mess = "Invalid configuration dectected!\n" mess += "QtCreator path (%s) is not a valid path\n" % ide_path mess += "Please run `qibuild config --wizard\n" raise Exception(mess) print "starting QtCreator:" print ide_path, cmake_list subprocess.Popen([ide_path, cmake_list]) return # Not supported (yet) IDE: mess = "Invalid ide: %s\n" % ide.name mess += "Supported IDES are: %s" % ", ".join(SUPPORTED_IDES) raise Exception(mess)
Python
0.000001
@@ -2282,55 +2282,63 @@ -qibuild.run_action(%22qibuild +args = %5Bproject.name%5D%0A if toc .acti -ons. +ve_ config -ure%22, +: %0A @@ -2352,25 +2352,21 @@ - %5Bproject.name, +args.extend(%5B %22--c @@ -2392,16 +2392,82 @@ config%5D) +%0A qibuild.run_action(%22qibuild.actions.configure%22, args) %0A%0A er
15388e09ab537d3731891353c54f53105c4a7ee4
add files
weixin_pay.py
weixin_pay.py
Python
0.000002
@@ -0,0 +1,63 @@ +#!/usr/bin/env python%0A# coding=utf-8%0A__author__ = 'youqingkui'%0A
b7360d6ba397f8654f4e051227aa86a1ebe693f7
Add main program
follow.py
follow.py
Python
0.000002
@@ -0,0 +1,964 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport sys%0A%0Afrom github import Github%0A%0A%0A# usage%0Adef usage():%0A print 'Follow GitHub user%5C's starred, watching and following.'%0A print%0A print 'Usage: python follow.py %3Ctoken%3E %3Cuser%3E'%0A print%0A print 'token: Go to https://github.com/settings/tokens and %60Generate new token%60 with scope %60public_repo%60.'%0A print%0A print 'user: GitHub user ID you want to follow.'%0A%0A%0A# args%0Aif len(sys.argv) != 3:%0A usage()%0A exit(1)%0A%0A# variable%0Ame = Github(sys.argv%5B1%5D)%0A%0AnamedUser = Github().get_user(sys.argv%5B2%5D)%0A%0A# action%0Afor starred in namedUser.get_starred().reversed:%0A me.get_user().add_to_starred(starred)%0A%0Afor subscription in namedUser.get_subscriptions().reversed:%0A me.get_user().add_to_subscriptions(subscription)%0A%0Afor watched in namedUser.get_watched().reversed:%0A me.get_user().add_to_watched(watched)%0A%0Afor following in namedUser.get_following().reversed:%0A me.get_user().add_to_following(following)%0A
9080d20bd61ac66a534c834a17a9825808416512
Add pre-stage hook for FrostNumberModel
metadata/FrostNumberModel/hooks/pre-stage.py
metadata/FrostNumberModel/hooks/pre-stage.py
Python
0
@@ -0,0 +1,611 @@ +%22%22%22A hook for modifying parameter values read from the WMT client.%22%22%22%0A%0Aimport os%0Aimport shutil%0A%0Afrom wmt.utils.hook import find_simulation_input_file%0Afrom topoflow_utils.hook import assign_parameters%0A%0A%0Afile_list = %5B%5D%0A%0A%0Adef execute(env):%0A %22%22%22Perform pre-stage tasks for running a component.%0A%0A Parameters%0A ----------%0A env : dict%0A A dict of component parameter values from WMT.%0A%0A %22%22%22%0A assign_parameters(env, file_list)%0A env%5B'fn_out_filename'%5D = 'frostnumber_output.dat'%0A%0A for fname in file_list:%0A src = find_simulation_input_file(env%5Bfname%5D)%0A shutil.copy(src, os.curdir)%0A
70849edc52acc1c559b35a55c7f1925c1cbf57ad
add new tagcount plugin for yawt rewrite
yawtext/tagcount.py
yawtext/tagcount.py
Python
0
@@ -0,0 +1,2049 @@ +from flask import current_app, g, Blueprint%0Aimport jsonpickle%0Afrom yawt.utils import save_file, load_file%0Aimport os%0A%0Atagcountsbp = Blueprint('tagcounts', __name__)%0A%[email protected]_context_processor%0Adef tagcounts():%0A tagcountfile = current_app.config%5B'YAWT_TAGCOUNT_FILE'%5D%0A tvars = %7B%7D%0A if os.path.isfile(tagcountfile):%0A tagbase = current_app.config%5B'YAWT_TAGCOUNT_BASE'%5D%0A if not tagbase.endswith('/'):%0A tagbase += '/'%0A tvars = %7B'tagcounts': jsonpickle.decode(load_file(tagcountfile)),%0A 'tagbase': tagbase%7D%0A return tvars%0A%0A%0Aclass YawtTagCount(object):%0A def __init__(self, app=None):%0A self.app = app%0A if app is not None:%0A self.init_app(app)%0A self.tagcounts = %7B%7D%0A%0A def init_app(self, app):%0A app.config.setdefault('YAWT_TAGCOUNT_BASE', '')%0A app.config.setdefault('YAWT_TAGCOUNT_FILE', '/tmp/tagcounts')%0A app.register_blueprint(tagcountsbp)%0A%0A def on_pre_walk(self):%0A self.tagcounts = %7B%7D%0A%0A def on_visit_article(self, article):%0A if hasattr(article.info, 'taglist'):%0A for tag in article.info.taglist:%0A if tag in self.tagcounts:%0A self.tagcounts%5Btag%5D += 1%0A else:%0A self.tagcounts%5Btag%5D = 1%0A%0A def on_post_walk(self): %0A pickled_counts = jsonpickle.encode(self.tagcounts)%0A save_file(current_app.config%5B'YAWT_TAGCOUNT_FILE'%5D, pickled_counts)%0A%0A def on_files_changed(self, files_modified, files_added, files_removed):%0A pickled_counts = load_file(current_app.config%5B'YAWT_TAGCOUNT_FILE'%5D)%0A self.tagcounts = jsonpickle.decode(pickled_counts)%0A%0A for f in files_removed + files_modified: %0A article = g.store.fetch_article_by_repofile(f)%0A for tag in article.info.taglist:%0A self.tagcounts%5Btag%5D -= 1%0A%0A for f in files_modified + files_added:%0A article = g.store.fetch_article_by_repofile(f)%0A self.on_visit_article(article)%0A%0A self.on_post_walk()%0A
d19aaf0fd3c88c08b2b8563030dd38c0cea3631b
Add unit test for `parse_cluster_info` (#22205)
dashboard/modules/job/tests/test_sdk.py
dashboard/modules/job/tests/test_sdk.py
Python
0
@@ -0,0 +1,3046 @@ +import pytest%0Afrom typing import Dict, Optional, Tuple%0Afrom unittest.mock import Mock, patch%0A%0Afrom ray.dashboard.modules.job.sdk import parse_cluster_info%0A%0A%[email protected](%0A %22address_param%22,%0A %5B%0A (%22ray://1.2.3.4:10001%22, %22ray%22, %221.2.3.4:10001%22),%0A (%22other_module://%22, %22other_module%22, %22%22),%0A (%22other_module://address%22, %22other_module%22, %22address%22),%0A %5D,%0A)%[email protected](%22create_cluster_if_needed%22, %5BTrue, False%5D)%[email protected](%22cookies%22, %5BNone, %7B%22test_cookie_key%22: %22test_cookie_val%22%7D%5D)%[email protected](%22metadata%22, %5BNone, %7B%22test_metadata_key%22: %22test_metadata_val%22%7D%5D)%[email protected](%22headers%22, %5BNone, %7B%22test_headers_key%22: %22test_headers_val%22%7D%5D)%0Adef test_parse_cluster_info(%0A address_param: Tuple%5Bstr, str, str%5D,%0A create_cluster_if_needed: bool,%0A cookies: Optional%5BDict%5Bstr, str%5D%5D,%0A metadata: Optional%5BDict%5Bstr, str%5D%5D,%0A headers: Optional%5BDict%5Bstr, str%5D%5D,%0A):%0A %22%22%22%0A Test ray.dashboard.modules.job.sdk.parse_cluster_info for different%0A format of addresses.%0A %22%22%22%0A mock_get_job_submission_client_cluster = Mock(return_value=%22Ray ClusterInfo%22)%0A mock_module = Mock()%0A mock_module.get_job_submission_client_cluster_info = Mock(%0A return_value=%22Other module ClusterInfo%22%0A )%0A mock_import_module = Mock(return_value=mock_module)%0A%0A address, module_string, inner_address = address_param%0A%0A with patch.multiple(%0A %22ray.dashboard.modules.job.sdk%22,%0A get_job_submission_client_cluster_info=mock_get_job_submission_client_cluster,%0A ), patch.multiple(%22importlib%22, import_module=mock_import_module):%0A if module_string == %22ray%22:%0A assert (%0A parse_cluster_info(%0A address,%0A create_cluster_if_needed=create_cluster_if_needed,%0A cookies=cookies,%0A metadata=metadata,%0A headers=headers,%0A )%0A == %22Ray ClusterInfo%22%0A )%0A mock_get_job_submission_client_cluster.assert_called_once_with(%0A inner_address,%0A create_cluster_if_needed=create_cluster_if_needed,%0A cookies=cookies,%0A metadata=metadata,%0A headers=headers,%0A )%0A elif module_string == %22other_module%22:%0A assert (%0A parse_cluster_info(%0A address,%0A create_cluster_if_needed=create_cluster_if_needed,%0A cookies=cookies,%0A metadata=metadata,%0A headers=headers,%0A )%0A == %22Other module ClusterInfo%22%0A )%0A mock_import_module.assert_called_once_with(module_string)%0A mock_module.get_job_submission_client_cluster_info.assert_called_once_with(%0A inner_address,%0A create_cluster_if_needed=create_cluster_if_needed,%0A cookies=cookies,%0A metadata=metadata,%0A headers=headers,%0A )%0A
3c685922756a582030980f319014ba308735ee2c
add nextlaunch command
src/tenyksscripts/scripts/rockets.py
src/tenyksscripts/scripts/rockets.py
Python
0.000001
@@ -0,0 +1,578 @@ +import datetime%0Aimport requests%0Aimport time%0A%0A%0Adef run(data, settings):%0A if data%5B%22payload%22%5D != %22nextlaunch%22:%0A return%0A%0A launches = requests.get(%22https://launchlibrary.net/1.2/launch%22, params=%7B%22next%22: 1, %22mode%22: %22verbose%22%7D).json()%0A if not launches%5B%22count%22%5D:%0A return %22No launches scheduled%22%0A%0A launch = launches%5B%22launches%22%5D%5B0%5D%0A delta = datetime.timedelta(seconds=launch%5B%22netstamp%22%5D - int(time.time()))%0A%0A return %22Next launch: %7Bname%7D. When: %7Btime%7D (in %7Bdelta%7D)%22.format(%0A name=launch%5B%22name%22%5D,%0A time=launch%5B%22net%22%5D,%0A delta=delta%0A )%0A
ba3e2a81a5e89c010473820732835d8bf7ccc39a
Create morningstar.py
morningstar.py
morningstar.py
Python
0.000007
@@ -0,0 +1,1332 @@ +import os%0Aimport sys%0Aimport threading%0Aimport thread%0Aimport time%0Aimport settings%0Aimport subprocess%0Aimport psutil%0A%0Aclass watchman(threading.Thread):%0A%09def __init__(self):%0A%09%09threading.Thread.__init__(self)%0A%09def run(self):%0A%09%09badwinprocs = %5B'taskmgr', 'regedit', 'mbam', 'cmd', 'command'%5D%0A%09%09if 'lucifer' in sys.argv%5B0%5D:%0A%09%09%09exe = %22morningstar%22%0A%09%09else:%0A%09%09%09exe = %22lucifer%22%0A%09%09while 1:%0A%09%09%09#%0A%09%09%09processlist = psutil.pids()%0A%09%09%09x = False%0A%09%09%09for process in processlist:%0A%09%09%09%09try:%0A%09%09%09%09%09proc = psutil.Process(process)%0A%09%09%09%09%09print proc.name()%0A%09%09%09%09%09if exe in proc.name():%0A%09%09%09%09%09%09x = True%0A%09%09%09%09%09elif proc.name() in badwinprocs:%0A%09%09%09%09%09%09proc.stop()%0A%09%09%09%09except: print 'psutil error'%0A%09%09%09if x == False:%0A%09%09%09%09print exe + ' not running...'%0A%09%09%09%09os.popen('Shutdown -s -f -t 000')%0A%09%09%09%09sys.exit()%0A%09%09%09%09#break%0A%09%09%09#%0A%09%09%09%0Adef startup():%0A%09time.sleep(5)%0A%09try:%0A%09%09startupshit = glob.glob(%22*.exe%22)%0A%09%09for nigger in startupshit:%0A%09%09%09try:%0A%09%09%09%09if nigger in sys.argv%5B0%5D:%0A%09%09%09%09%09pass%0A%09%09%09%09else:%0A%09%09%09%09%09os.popen(killcmd + ' ' + nigger)%0A%09%09%09except:%0A%09%09%09%09print prefix + %22couldn't kill the %22 + nigger # HA!%0A%09%09subprocess.check_call(%22attrib +R +S +H %22 + sys.argv%5B0%5D, shell=True)%0A%09except:%0A%09%09pass%0A%0Aif 'lucifer' in sys.argv%5B0%5D:%0A%09print %22%5B %3E %5D Morningstar loaded%22%0Aelse:%0A%09thread.start_new_thread(startup, ())%0A%09print %22%5B %3E %5D Startup loaded%22%0A%09time.sleep(5)%0A%09watchman().start()%0A%09print %22%5B %3E %5D Watchman loaded%22%0A
7d546ca0ce8e2e8ef4f71abda50764817ce83c0b
add mouse_click.py
mouse_click.py
mouse_click.py
Python
0.000006
@@ -0,0 +1,242 @@ +from pymouse import PyMouse%0Afrom time import sleep%0A%0Am = PyMouse()%0Asleep(5)%0Ax=969%0Ay=581%0Aa = 1%0Awhile a == 1:%0A m.click(x,y)#%E7%A7%BB%E5%8B%95%E5%88%B0(x,y)%E4%B8%A6%E4%B8%94%E9%BB%9E%E6%93%8A%0A sleep(0.1)%0A p = m.position() #%E7%8D%B2%E5%8F%96%E7%9B%AE%E5%89%8D%E4%BD%8D%E7%BD%AE%0A if not 900%3Cp%5B0%5D%3C1000: #x%E5%BA%A7%E6%A8%99%E4%B8%8D%E5%9C%A8 900~1000%E5%85%A7 %E9%9B%A2%E9%96%8B%E8%BF%B4%E5%9C%88%0A break
9ef3260ba5d27a3274fa6d3112e36091f04989f9
add file
resource-4/permutations/permutationToInteger.py
resource-4/permutations/permutationToInteger.py
Python
0.000001
@@ -0,0 +1,220 @@ +def permutationToInteger(perm):%0A%09permLen = len(perm)%0A%09elts = range(permLen)%0A%09num = 0%0A%09for i in range(permLen):%0A%09%09digit = elts.index(perm%5Bi%5D)%0A%09%09num += digit * math.factorial(permLen - i - 1)%0A%09%09del elts(digit)%0A%09return num%0A
4c5d45f5af83cc2af3a9f29d10b4b2d00f60bbec
Update __init__.py
tendrl/commons/flows/expand_cluster/__init__.py
tendrl/commons/flows/expand_cluster/__init__.py
import etcd import gevent import json import uuid from tendrl.commons import flows from tendrl.commons.event import Event from tendrl.commons.message import Message from tendrl.commons.flows.create_cluster import \ utils as create_cluster_utils from tendrl.commons.flows.expand_cluster import ceph_help from tendrl.commons.flows.expand_cluster import gluster_help from tendrl.commons.flows.exceptions import FlowExecutionFailedError from tendrl.commons.objects.job import Job class ExpandCluster(flows.BaseFlow): def run(self): integration_id = self.parameters['TendrlContext.integration_id'] if integration_id is None: raise FlowExecutionFailedError( "TendrlContext.integration_id cannot be empty" ) supported_sds = NS.compiled_definitions.get_parsed_defs()['namespace.tendrl']['supported_sds'] sds_name = self.parameters["TendrlContext.sds_name"] if sds_name not in supported_sds: raise FlowExecutionFailedError("SDS (%s) not supported" % sds_name) ssh_job_ids = [] if "ceph" in sds_name: ssh_job_ids = create_cluster_utils.ceph_create_ssh_setup_jobs( self.parameters ) else: ssh_job_ids = create_cluster_utils.gluster_create_ssh_setup_jobs( self.parameters, skip_current_node=True ) while True: gevent.sleep(3) all_status = {} for job_id in ssh_job_ids: all_status[job_id] = NS._int.client.read("/queue/%s/status" % job_id).value _failed = {_jid: status for _jid, status in all_status.iteritems() if status == "failed"} if _failed: raise FlowExecutionFailedError("SSH setup failed for jobs %s cluster %s" % (str(_failed), integration_id)) if all([status for status in all_status if status == "finished"]): Event( Message( job_id=self.parameters['job_id'], flow_id=self.parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={ "message": "SSH setup completed for all " "nodes in cluster %s" % integration_id } ) ) break # SSH setup jobs finished above, now install sds # bits and create cluster if "ceph" in sds_name: Event( Message( job_id=self.parameters['job_id'], flow_id = self.parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={ "message": "Expanding ceph cluster %s" % integration_id } ) ) ceph_help.expand_cluster(self.parameters) else: Event( Message( job_id=self.parameters['job_id'], flow_id=self.parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={ "message": "Expanding Gluster Storage" " Cluster %s" % integration_id } ) ) gluster_help.expand_gluster(self.parameters) # Wait till detected cluster in populated for nodes while not all_nodes_have_detected_cluster: gevent.sleep(3) all_status = [] detected_cluster = "" different_cluster_id = False dc = "" for node in self.parameters['Node[]']: try: dc = NS._int.client.read( "/nodes/%s/DetectedCluster/detected_cluster_id" % node ).value if not detected_cluster: detected_cluster = dc else: if detected_cluster != dc: all_status.append(False) different_cluster_id = True break all_status.append(True) except etcd.EtcdKeyNotFound: all_status.append(False) if different_cluster_id: raise FlowExecutionFailedError( "Seeing different detected cluster id in" " different nodes. %s and %s" % ( detected_cluster, dc) ) if all([status for status in all_status if status]): break # Create the params list for import cluster flow new_params = {} new_params['Node[]'] = self.parameters['Node[]'] new_params['TendrlContext.integration_id'] = integration_id # Get node context for one of the nodes from list sds_pkg_name = NS._int.client.read( "nodes/%s/DetectedCluster/" "sds_pkg_name" % self.parameters['Node[]'][0] ).value new_params['import_after_expand'] = True if "gluster" in sds_pkg_name: new_params['gdeploy_provisioned'] = True sds_pkg_version = NS._int.client.read( "nodes/%s/DetectedCluster/sds_pkg_" "version" % self.parameters['Node[]'][0] ).value new_params['DetectedCluster.sds_pkg_name'] = \ sds_pkg_name new_params['DetectedCluster.sds_pkg_version'] = \ sds_pkg_version payload = { "node_ids": self.parameters['Node[]'], "run": "tendrl.flows.ImportCluster", "status": "new", "parameters": new_params, "parent": self.parameters['job_id'], "type": "node" } _job_id = str(uuid.uuid4()) Job(job_id=_job_id, status="new", payload=payload).save() Event( Message( job_id=self.parameters['job_id'], flow_id=self.parameters['flow_id'], priority="info", publisher=NS.publisher_id, payload={ "message": "Importing (job_id: %s) newly expanded " "%s Storage nodes %s" % ( _job_id, sds_pkg_name, integration_id ) } ) )
Python
0.000072
@@ -2013,32 +2013,41 @@ us in all_status +.values() if status == %22f
dafa0060460a2d4e820fbdafd33e51363bac0259
Create 01.Mean.py
01.Python/01.Mean.py
01.Python/01.Mean.py
Python
0
@@ -0,0 +1,230 @@ +import numpy as np%0A A = np.array(%5B%5B10,14,11,7,9.5,15,19%5D,%5B8,9,17,14.5,12,18,15.5%5D,%0A %5B15,7.5,11.5,10,10.5,7,11%5D,%5B11.5,11,9,12,14,12,7.5%5D%5D)%0A B = A.T%0A print B%0A print(np.mean(B))%0A print(np.mean(B,axis=0))%0A print(np.mean(A,axis=1))%0A
9570da3427121628d4e144c1092da155583a496d
Add Python benchmark
lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py
lib/node_modules/@stdlib/math/base/special/asinh/benchmark/python/benchmark.py
Python
0.000138
@@ -0,0 +1,1520 @@ +#!/usr/bin/env python%0A%22%22%22Benchmark asinh.%22%22%22%0A%0Aimport timeit%0A%0Aname = %22asinh%22%0Arepeats = 3%0Aiterations = 1000000%0A%0A%0Adef print_version():%0A %22%22%22Print the TAP version.%22%22%22%0A%0A print(%22TAP version 13%22)%0A%0A%0Adef print_summary(total, passing):%0A %22%22%22Print the benchmark summary.%0A%0A # Arguments%0A%0A * %60total%60: total number of tests%0A * %60passing%60: number of passing tests%0A%0A %22%22%22%0A%0A print(%22#%22)%0A print(%221..%22 + str(total)) # TAP plan%0A print(%22# total %22 + str(total))%0A print(%22# pass %22 + str(passing))%0A print(%22#%22)%0A print(%22# ok%22)%0A%0A%0Adef print_results(elapsed):%0A %22%22%22Print benchmark results.%0A%0A # Arguments%0A%0A * %60elapsed%60: elapsed time (in seconds)%0A%0A # Examples%0A%0A %60%60%60 python%0A python%3E print_results(0.131009101868)%0A %60%60%60%0A %22%22%22%0A%0A rate = iterations / elapsed%0A%0A print(%22 ---%22)%0A print(%22 iterations: %22 + str(iterations))%0A print(%22 elapsed: %22 + str(elapsed))%0A print(%22 rate: %22 + str(rate))%0A print(%22 ...%22)%0A%0A%0Adef benchmark():%0A %22%22%22Run the benchmark and print benchmark results.%22%22%22%0A%0A setup = %22from math import asinh; from random import random;%22%0A stmt = %22y = asinh(200.0*random() - 100.0)%22%0A%0A t = timeit.Timer(stmt, setup=setup)%0A%0A print_version()%0A%0A for i in xrange(3):%0A print(%22# python::%22 + name)%0A elapsed = t.timeit(number=iterations)%0A print_results(elapsed)%0A print(%22ok %22 + str(i+1) + %22 benchmark finished%22)%0A%0A print_summary(repeats, repeats)%0A%0A%0Adef main():%0A %22%22%22Run the benchmark.%22%22%22%0A benchmark()%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
d1ba1a02385581375831fd4b394f68ade4cbb101
Create RX_TX.py
home/hairygael/RX_TX.py
home/hairygael/RX_TX.py
Python
0.000295
@@ -0,0 +1,463 @@ +arduino = Runtime.createAndStart(%22arduino%22,%22Arduino%22)%0Aarduino.setBoardMega()%0Aarduino.connect(%22COM7%22)%0Aarduino1 = Runtime.createAndStart(%22arduino1%22,%22Arduino%22)%0Aarduino1.setBoardAtmega328()%0A %0A#connecting arduino1 to arduino Serial1 instead to a COMX%0Aarduino1.connect(arduino,%22Serial1%22) %0Aservo = Runtime.createAndStart(%22servo%22,%22Servo%22)%0Aservo.attach(arduino1,5)%0A %0A#attaching procedure take a bit more time to do, wait a little before using it%0Asleep(1)%0Aservo.moveTo(90)%0A
c760c3387b6dcf5bd171960a3e64306c7f2519d0
add a rotating colored triangle
pynodegl-utils/pynodegl_utils/examples/misc.py
pynodegl-utils/pynodegl_utils/examples/misc.py
Python
0.000003
@@ -0,0 +1,738 @@ +import math%0A%0Afrom pynodegl import Texture, Shader, TexturedShape, Rotate, AnimKeyFrameScalar, Triangle%0A%0Afrom pynodegl_utils.misc import scene%0A%0A@scene()%0Adef triangle(cfg):%0A frag_data = '''%0A#version 100%0Aprecision mediump float;%0Avarying vec2 var_tex0_coords;%0Avoid main(void)%0A%7B%0A vec2 c = var_tex0_coords;%0A gl_FragColor = vec4(c.y-c.x, 1.0-c.y, c.x, 1.0);%0A%7D'''%0A%0A a = 0.5%0A b = a * math.sqrt(3) / 2.0%0A c = a * 1/2.%0A%0A triangle = Triangle((0, a, 0), (b, -c, 0), (-b, -c, 0))%0A s = Shader(fragment_data=frag_data)%0A node = TexturedShape(triangle, s, Texture())%0A node = Rotate(node, axis=(0,0,1))%0A node.add_animkf(AnimKeyFrameScalar(0, 0),%0A AnimKeyFrameScalar(cfg.duration, -360*2))%0A return node%0A
3e9fc08e096ddb212cf40a285887b7ed5dd8897b
Fix running coverage for nose tests (PY-14869)
python/helpers/coverage_runner/run_coverage.py
python/helpers/coverage_runner/run_coverage.py
"""Coverage.py's main entrypoint.""" import os import sys bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH') if bundled_coverage_path: sys_path_backup = sys.path sys.path = [p for p in sys.path if p != bundled_coverage_path] from coverage.cmdline import main sys.path = sys_path_backup else: from coverage.cmdline import main coverage_file = os.getenv('PYCHARM_COVERAGE_FILE') run_cov = os.getenv('PYCHARM_RUN_COVERAGE') if os.getenv('CREATE_TEMP_COVERAGE_FILE'): line = 'LOG: PyCharm: File mapping:%s\t%s\n' import tempfile (h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage') print(line%(coverage_file, new_cov_file)) print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt')) print(line%(coverage_file + '.xml', new_cov_file + '.xml')) coverage_file = new_cov_file if coverage_file: os.environ['COVERAGE_FILE'] = coverage_file if run_cov: a_file = open(coverage_file + '.syspath.txt', mode='w') a_file.write(os.getcwd()+"\n") for path in sys.path: a_file.write(path + "\n") a_file.close() argv = [] for arg in sys.argv: if arg.startswith('-m'): argv.append('-m') argv.append(arg[2:]) else: argv.append(arg) sys.argv = argv cwd = os.getcwd() main() if run_cov: os.chdir(cwd) main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"])
Python
0
@@ -401,16 +401,69 @@ _FILE')%0A +%0Acoverage_file = coverage_file%5B0:-len(%22.coverage%22)%5D%0A%0A run_cov @@ -707,30 +707,58 @@ age_file -, new_cov_file + + %22.coverage%22, new_cov_file + %22.coverage%22 ))%0A p @@ -928,16 +928,25 @@ cov_file + + %22.cov%22 %0A%0Aif cov @@ -1004,16 +1004,31 @@ age_file + + %22.coverage%22%0A %0Aif run_ @@ -1388,15 +1388,38 @@ d()%0A -main()%0A +%0Atry:%0A main()%0Afinally:%0A if r @@ -1430,16 +1430,20 @@ ov:%0A + + os.chdir @@ -1448,16 +1448,20 @@ ir(cwd)%0A + main
fd5ba7ad61a8c7c9aad6b3f1404d819ae21085d1
Add 'calc_pb_flux.py' to calculate the particle background
bin/calc_pb_flux.py
bin/calc_pb_flux.py
Python
0.000024
@@ -0,0 +1,1627 @@ +#!/usr/bin/env python3%0A#%0A# Copyright (c) 2017 Weitian LI %[email protected]%3E%0A# MIT license%0A%0A%22%22%22%0ACalculate the particle background flux (e.g., 9.5-12.0 keV) of the spectra.%0A%0Aflux = counts / exposure / area%0Awhere 'counts' is the total photon counts within the specified energy range;%0A'area' is the value of the %60%60BACKSCAL%60%60 stored in the spectrum.%0Atherefore, the output flux has arbitrary unit.%0A%22%22%22%0A%0Aimport argparse%0A%0Afrom _context import acispy%0Afrom acispy.spectrum import Spectrum%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser(%0A description=%22Calculate the particle background for spectra%22)%0A parser.add_argument(%22-L%22, %22--energy-low%22, dest=%22elow%22,%0A type=int, default=9500,%0A help=%22lower energy limit of the particle %22 +%0A %22background %5BeV%5D (default: 9500 eV)%22)%0A parser.add_argument(%22-H%22, %22--energy-high%22, dest=%22ehigh%22,%0A type=int, default=12000,%0A help=%22upper energy limit of the particle %22 +%0A %22background %5BeV%5D (default: 12000 eV)%22)%0A parser.add_argument(%22-v%22, %22--verbose%22, dest=%22verbose%22, action=%22store_true%22,%0A help=%22show verbose information%22)%0A parser.add_argument(%22infile%22, nargs=%22+%22,%0A help=%22input spectra%22)%0A args = parser.parse_args()%0A%0A for f in args.infile:%0A print(%22=== %25s ===%22 %25 f)%0A spec = Spectrum(f)%0A flux = spec.calc_pb_flux(elow=args.elow, ehigh=args.ehigh,%0A verbose=args.verbose)%0A print(%22flux = %25.5g%22 %25 flux)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
0f2f4df45ea96551c09358f33f3d33f682b42e8d
add Hydra integration
test.py
test.py
import csv import os import subprocess import threading # Gather the packages to test. PREFIX = './packages/node_modules/' CISCOSPARK = os.path.join(PREFIX, '@ciscospark') WEBEX = os.path.join(PREFIX, '@webex') PROD_ENV_VARS = { # 'ACL_SERVICE_URL': 'https://acl-a.wbx2.com/acl/api/v1', ? 'ATLAS_SERVICE_URL': 'https://atlas-a.wbx2.com/admin/api/v1', 'CONVERSATION_SERVICE': 'https://conv-a.wbx2.com/conversation/api/v1', 'ENCRYPTION_SERVICE_URL': 'https://encryption-a.wbx2.com', 'IDBROKER_BASE_URL': 'https://idbroker.webex.com', 'IDENTITY_BASE_URL': 'https://identity.webex.com', 'WDM_SERVICE_URL': 'https://wdm-a.wbx2.com/wdm/api/v1', # Logging 'ENABLE_VERBOSE_NETWORK_LOGGING': 'true' } INT_ENV_VARS = { 'ACL_SERVICE_URL': 'https://acl-intb.ciscospark.com/acl/api/v1', 'ATLAS_SERVICE_URL': 'https://atlas-intb.ciscospark.com/admin/api/v1', 'CONVERSATION_SERVICE': 'https://conversation-intb.ciscospark.com/conversation/api/v1', 'ENCRYPTION_SERVICE_URL': 'https://encryption-intb.ciscospark.com/encryption/api/v1', 'IDBROKER_BASE_URL': 'https://idbrokerbts.webex.com', 'IDENTITY_BASE_URL': 'https://identitybts.webex.com', 'WDM_SERVICE_URL': 'https://wdm-intb.ciscospark.com/wdm/api/v1', 'WHISTLER_API_SERVICE_URL': 'https://whistler.onint.ciscospark.com/api/v1', # Logging 'ENABLE_VERBOSE_NETWORK_LOGGING': 'true' } OUTPUT_DIR = 'output' OUTPUT_FILE_PATH = os.path.join(OUTPUT_DIR, 'test-comparison.csv') TEST_COMMAND = 'npm test -- --package %s' SKIP_PACKAGES = [ '@webex/bin-sauce-connect', # needs Sauce started # '@webex/plugin-meetings', # no tests # '@webex/test-helper-server' # no tests # '@ciscospark/internal-plugin-calendar', # no tests # '@ciscospark/plugin-webhooks' # no tests ] def should_include_package(path_name, name): scoped_name = os.path.join(os.path.basename(path_name), name) return os.path.isdir(os.path.join(path_name, name)) and scoped_name not in SKIP_PACKAGES def get_package_names(path_name): namespace = path_name.replace(PREFIX, '') return [os.path.join(namespace, name) for name in os.listdir(path_name) if should_include_package(path_name, name)] def run_subprocess(bash_command, env_vars): env = os.environ.copy() env.update(env_vars) process = subprocess.Popen(bash_command.split(), stdout=subprocess.PIPE, env=env) output, error = process.communicate() return process.returncode # , output, error class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def print_result(return_code, prefix='Tests are a...'): if return_code == 0: print(bcolors.OKGREEN + prefix + 'success.' + bcolors.ENDC) else: print(bcolors.FAIL + prefix + 'failure.' + bcolors.ENDC) def run_test(package, environment): env_vars = INT_ENV_VARS if environment is 'integration' else PROD_ENV_VARS print(bcolors.OKBLUE + 'Testing `%s` on %s...' % (package, environment) + bcolors.ENDC) bash_command = TEST_COMMAND % package return_code = run_subprocess(bash_command, env_vars) print_result(return_code, prefix='Testing `%s` on %s...' % (package, environment)) return return_code def run_env_tests(package, writer, csv_file): prod_return_code = run_test(package, 'production') int_return_code = run_test(package, 'integration') writer.writerow([package, prod_return_code, int_return_code]) csv_file.flush() def main(): ciscospark_packages = get_package_names(CISCOSPARK) webex_packages = get_package_names(WEBEX) packages = ciscospark_packages + webex_packages print ('Skipping %d packages: %s' % (len(SKIP_PACKAGES), ', '.join(SKIP_PACKAGES))) print('Testing %d packages...' % len(packages)) try: os.mkdir(OUTPUT_DIR) except OSError: pass threads = [] with open(OUTPUT_FILE_PATH, 'wb') as csv_file: writer = csv.writer(csv_file, quoting=csv.QUOTE_MINIMAL) writer.writerow(['Package', 'Production exit code', 'Integration exit code']) for package in packages: run_env_tests(package, writer, csv_file) # threads = [threading.Thread(target=run_env_tests, args=(package, writer, csv_file)) for package in packages] # for thread in threads: # thread.start() # for thread in threads: # thread.join() print('Wrote output to: %s' % OUTPUT_FILE_PATH) print('Done.') if __name__ == "__main__": main()
Python
0.000001
@@ -724,16 +724,33 @@ ARS = %7B%0A + # Environments%0A 'ACL_S @@ -1059,16 +1059,79 @@ pi/v1',%0A + 'HYDRA_SERVICE_URL': 'https://hydra-intb.ciscospark.com/v1',%0A 'IDBRO
6fb6e67792085b6ee910f1d0b8ed3e89f15dd60d
add script to datamine the reports via nltk
smelly_london/all_reports_smell_search_final.py
smelly_london/all_reports_smell_search_final.py
Python
0
@@ -0,0 +1,2605 @@ +%0Afrom map import mapping%0A# walk through the os and get all files%0A# read each file in tern and go through line by line%0A# print lines that contain smell and the report name%0Afrom os import listdir%0Aimport nltk.data%0Aimport json%0A%0ASMELL_WORDS = %5B'smell', 'stench', 'stink', 'odour', 'sniff', 'effluvium'%5D%0AREPORTS_DIR = '/Users/deborah/Documents/scripts/python_work/project2016/Full Text Online'%0A%0Aglobal finalResult%0AfinalResult = %7B%7D%0A%0Adef addToDic(d, report, rDate, val):%0A d.setDefault(report, %5B%5D).append(val)%0A return d%0A%0A%0Adef getFileNames():%0A '''Retrieve file names'''%0A fileNames = %5Bf for f in listdir(REPORTS_DIR) if f.endswith('txt')%5D%0A return fileNames%0A%0A%0Adef processFile(fileName):%0A path = REPORTS_DIR + '/' + fileName%0A references = %5B%5D%0A with open(path) as f:%0A for line in f:%0A report_tokenized = tokenize(line)%0A for scentence in report_tokenized:%0A for word in SMELL_WORDS:%0A if word in scentence.lower():%0A references.append(scentence)%0A return references%0A%0A%0Adef tokenize(sentence):%0A parser = nltk.data.load('tokenizers/punkt/english.pickle')%0A result = parser.tokenize(sentence.strip())%0A return result%0A%0A%0Adef saveObject(results):%0A '''Save results dictionary as file'''%0A with open('processed_results.txt', 'w') as outfile:%0A json.dump(results, outfile)%0A%0A%0Adef performAnalysis(fileName, references):%0A '''Create the resuts output'''%0A # splits a fileName into :%5B'Acton', '1900', 'b19783358', 'txt'%5D%0A splitReport = fileName.split('.')%0A bID = splitReport%5B2%5D%0A year = splitReport%5B1%5D%0A%0A try:%0A region = mapping%5BbID%5D%0A except:%0A return%0A # print bID%0A%0A if region in finalResult:%0A nestedDic = finalResult%5Bregion%5D%0A else:%0A nestedDic = %7B%7D%0A %0A nestedDic%5Byear%5D = references%0A finalResult%5Bregion%5D = nestedDic%0A%0A # if nestedDic%5BsplitReport%5B1%5D%5D:%0A # val = nestedDic%5BsplitReport%5B1%5D%5D%0A # nestedDic%5BsplitReport%5B1%5D%5D = len(references) + val%0A # else:%0A # if len(references):%0A # nestedDic%5BsplitReport%5B1%5D%5D = len(references)%0A # # nestedDic.setDefault(splitReport%5B1%5D, 0).__add__(len(references))%0A # result%5Bregion%5D = nestedDic%0A%0A# print(result)%0A# for k,v in result.iteritems():%0A%0A%0A%0Adef main():%0A # tokenize(s)%0A fileNames = getFileNames()%0A # f1 = fileNames%5B0%5D%0A # processFile(f1)%0A fileNames = fileNames%5B:100%5D%0A for f in fileNames:%0A references = processFile(f)%0A if references:%0A performAnalysis(f, references)%0A saveObject(finalResult)%0A%0A%0Aif __name__ == '__main__':%0A main()
5b31e63043e3c3652f751d4a85e6bcdf925f797e
Create q3.py
work/q3.py
work/q3.py
Python
0.000019
@@ -0,0 +1,161 @@ +def fibonacci_number(n, m, count):%0A if count %3C= 10:%0A print(n, end=%22 %22)%0A return fibonacci_number(m, n + m, count + 1)%0A%0Afibonacci_number(0, 1, 0)%0A
0383796cb681404e6c4794f1321ad62a9945b572
add script to output all leagues of users
checkLeagues.py
checkLeagues.py
Python
0
@@ -0,0 +1,339 @@ +import settings as settings%0A%0Aimport funcs%0A%0AaccountMaps = funcs.readAccountsFile(%22accounts.txt%22)%0Adef getLeagueForAccountMap(accountMap):%0A league = funcs.getLeague(settings.regions%5BaccountMap%5B'region'%5D%5D, accountMap%5B'bnet'%5D)%0A return (accountMap%5B'redditName'%5D, league)%0A%0AnewLeagues = map(getLeagueForAccountMap, accountMaps)%0Aprint newLeagues%0A
c20cde04d1a5a2939e7f5c0953725fd043c5b849
add media migration
molo/core/migrations/0067_media_migration.py
molo/core/migrations/0067_media_migration.py
Python
0.000001
@@ -0,0 +1,977 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Adef convert_media_to_molo_media(apps, schema_editor):%0A from molo.core.models import MoloMedia, ArticlePage%0A from wagtailmedia.models import Media%0A%0A for media in Media.objects.all():%0A new_media = MoloMedia.objects.create(%0A title=media.title, file=media.file, duration=media.duration,%0A type=media.type, width=media.width,%0A height=media.height, thumbnail=media.thumbnail)%0A%0A for article in ArticlePage.objects.all():%0A for block in article.body:%0A if block.block_type is 'media' and block.value is media.id:%0A block.value = new_media.id%0A article.save()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('core', '0066_add_custom_media_model'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(convert_media_to_molo_media),%0A %5D%0A
77b6c86359376af5eb8de63ae89d9316776b26bc
Add missing migration
tracpro/polls/migrations/0034_auto_20170323_1315.py
tracpro/polls/migrations/0034_auto_20170323_1315.py
Python
0.000008
@@ -0,0 +1,501 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('polls', '0033_auto_20170307_1338'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='pollrun',%0A name='region',%0A field=models.ForeignKey(blank=True, to='groups.Region', help_text='Panel where the poll was conducted.', null=True, verbose_name='panel'),%0A ),%0A %5D%0A
255c7ff91bc4918ce13d32cba2b871e3d0befad8
revert that url change
polio/urls.py
polio/urls.py
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'polio.views.home', name='home'), url(r'^uf04/datapoints/', include('datapoints.app_urls.urls', namespace="datapoints")), url(r'^uf04/datapoints/indicators/', include('datapoints.app_urls.indicator_urls', namespace="indicators")), url(r'^uf04/datapoints/regions/', include('datapoints.app_urls.region_urls', namespace="regions")), url(r'^uf04/admin/', include(admin.site.urls)), )
Python
0.000005
@@ -197,37 +197,32 @@ '),%0A%0A url(r'%5E -uf04/ datapoints/', in @@ -284,37 +284,32 @@ %22)),%0A url(r'%5E -uf04/ datapoints/indic @@ -400,21 +400,16 @@ url(r'%5E -uf04/ datapoin @@ -504,13 +504,8 @@ (r'%5E -uf04/ admi
5f12ada7fe0ddb44274e18decbaea0d05ab4471f
Solve Code Fights lineup problem
CodeFights/lineUp.py
CodeFights/lineUp.py
Python
0.0007
@@ -0,0 +1,840 @@ +#!/usr/local/bin/python%0A# Code Fights Lineup Problem%0A%0A%0Adef lineUp(commands):%0A aligned, tmp = 0, 0%0A com_dict = %7B%22L%22: 1, %22A%22: 0, %22R%22: -1%7D%0A for c in commands:%0A tmp += com_dict%5Bc%5D%0A if tmp %25 2 == 0:%0A aligned += 1%0A return aligned%0A%0A%0Adef main():%0A tests = %5B%0A %5B%22LLARL%22, 3%5D,%0A %5B%22RLR%22, 1%5D,%0A %5B%22%22, 0%5D,%0A %5B%22L%22, 0%5D,%0A %5B%22A%22, 1%5D,%0A %5B%22AAAAAAAAAAAAAAA%22, 15%5D,%0A %5B%22RRRRRRRRRRLLLLLLLLLRRRRLLLLLLLLLL%22, 16%5D,%0A %5B%22AALAAALARAR%22, 5%5D%0A %5D%0A%0A for t in tests:%0A res = lineUp(t%5B0%5D)%0A ans = t%5B1%5D%0A if ans == res:%0A print(%22PASSED: lineUp(%7B%7D) returned %7B%7D%22%0A .format(t%5B0%5D, res))%0A else:%0A print(%22FAILED: lineUp(%7B%7D) returned %7B%7D, answer: %7B%7D%22%0A .format(t%5B0%5D, res, ans))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
8deb0dc2743d1d85899cb636b88ed831c05838a9
Make machine action button translatable
DiscoverUM3Action.py
DiscoverUM3Action.py
from cura.MachineAction import MachineAction from UM.Application import Application from PyQt5.QtCore import pyqtSignal, pyqtProperty, pyqtSlot class DiscoverUM3Action(MachineAction): def __init__(self): super().__init__("DiscoverUM3Action", "Discover printers") self._qml_url = "DiscoverUM3Action.qml" self._network_plugin = None printerDetected = pyqtSignal() @pyqtSlot() def startDiscovery(self): if not self._network_plugin: self._network_plugin = Application.getInstance().getOutputDeviceManager().getOutputDevicePlugin("JediWifiPrintingPlugin") self._network_plugin.addPrinterSignal.connect(self._onPrinterAdded) self.printerDetected.emit() def _onPrinterAdded(self, *args): self.printerDetected.emit() @pyqtProperty("QVariantList", notify = printerDetected) def foundDevices(self): if self._network_plugin: printers = self._network_plugin.getPrinters() return [printers[printer] for printer in printers] else: return [] @pyqtSlot(str) def setKey(self, key): global_container_stack = Application.getInstance().getGlobalContainerStack() if global_container_stack: if "key" in global_container_stack.getMetaData(): global_container_stack.setMetaDataEntry("key", key) else: global_container_stack.addMetaDataEntry("key", key) if self._network_plugin: # Ensure that the connection states are refreshed. self._network_plugin.reCheckConnections()
Python
0.000016
@@ -140,16 +140,79 @@ qtSlot%0A%0A +from UM.i18n import i18nCatalog%0Acatalog = i18nCatalog(%22cura%22)%0A%0A class Di @@ -317,27 +317,54 @@ n%22, -%22Discover printers%22 +catalog.i18nc(%22@action%22,%22Connect via Network%22) )%0A
c486b8df5861fd883b49ea8118d40d73f5b4e7b8
Add download apikey test case
tardis/tardis_portal/tests/test_download_apikey.py
tardis/tardis_portal/tests/test_download_apikey.py
Python
0
@@ -0,0 +1,1661 @@ +# -*- coding: utf-8 -*-%0A%0Afrom django.core.urlresolvers import reverse%0Afrom django.test import TestCase%0Afrom tastypie.test import ResourceTestCase%0Afrom django.test.client import Client%0A%0Afrom django.conf import settings%0Afrom django.contrib.auth.models import User%0A%0Aclass ApiKeyDownloadTestCase(ResourceTestCase):%0A%0A def setUp(self):%0A # create a test user%0A self.username = 'test'%0A self.email = '[email protected]'%0A self.password = 'passw0rd'%0A self.user = User.objects.create_user(username=self.username,%0A email=self.email,%0A password=self.password)%0A%0A def tearDown(self):%0A self.user.delete()%0A%0A def testView(self):%0A download_api_key_url = reverse('tardis.tardis_portal.views.download_api_key')%0A client = Client()%0A%0A # Expect redirect to login%0A response = client.get(download_api_key_url)%0A self.assertEqual(response.status_code, 302)%0A%0A # Login as user%0A login = client.login(username=self.username, password=self.password)%0A self.assertTrue(login)%0A response = client.get(download_api_key_url)%0A self.assertEqual(response%5B'Content-Disposition'%5D,%0A 'inline; filename=%22%7B0%7D.key%22'.format(self.username))%0A self.assertEqual(response.status_code, 200)%0A response_content = %22%22%0A for c in response.streaming_content:%0A response_content += c%0A self.assertEqual(response_content,%0A self.create_apikey(username=self.username,%0A api_key=user.api_key.key))%0A
af67d052fc78e56ac7f934f4c90f00d2eb097bb3
Add StarFinder tests
photutils/detection/tests/test_starfinder.py
photutils/detection/tests/test_starfinder.py
Python
0
@@ -0,0 +1,2799 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst%0A%22%22%22%0ATests for StarFinder.%0A%22%22%22%0A%0Afrom astropy.modeling.models import Gaussian2D%0Afrom astropy.tests.helper import catch_warnings%0Aimport numpy as np%0Aimport pytest%0A%0Afrom ..starfinder import StarFinder%0Afrom ...datasets import make_100gaussians_image%0Afrom ...utils.exceptions import NoDetectionsWarning%0A%0Atry:%0A import scipy # noqa%0A HAS_SCIPY = True%0Aexcept ImportError:%0A HAS_SCIPY = False%0A%0A%0ADATA = make_100gaussians_image()%0Ay, x = np.mgrid%5B0:25, 0:25%5D%0Ag = Gaussian2D(1, 12, 12, 3, 2, theta=np.pi / 6.)%0APSF = g(x, y)%0A%0A%[email protected]('not HAS_SCIPY')%0Aclass TestStarFinder:%0A def test_starfind(self):%0A finder1 = StarFinder(10, PSF)%0A finder2 = StarFinder(30, PSF)%0A tbl1 = finder1(DATA)%0A tbl2 = finder2(DATA)%0A assert len(tbl1) %3E len(tbl2)%0A%0A def test_inputs(self):%0A with pytest.raises(ValueError):%0A StarFinder(10, PSF, min_separation=-1)%0A with pytest.raises(ValueError):%0A StarFinder(10, PSF, brightest=-1)%0A with pytest.raises(ValueError):%0A StarFinder(10, PSF, brightest=3.1)%0A%0A def test_nosources(self):%0A with catch_warnings(NoDetectionsWarning) as warning_lines:%0A finder = StarFinder(100, PSF)%0A tbl = finder(DATA)%0A assert tbl is None%0A assert 'No sources were found.' in str(warning_lines%5B0%5D.message)%0A%0A def test_min_separation(self):%0A finder1 = StarFinder(10, PSF, min_separation=0)%0A finder2 = StarFinder(10, PSF, min_separation=50)%0A tbl1 = finder1(DATA)%0A tbl2 = finder2(DATA)%0A assert len(tbl1) %3E len(tbl2)%0A%0A def test_peakmax(self):%0A finder1 = StarFinder(10, PSF, peakmax=None)%0A finder2 = StarFinder(10, PSF, peakmax=50)%0A tbl1 = finder1(DATA)%0A tbl2 = finder2(DATA)%0A assert len(tbl1) %3E len(tbl2)%0A%0A with catch_warnings(NoDetectionsWarning) as warning_lines:%0A starfinder = StarFinder(10, PSF, peakmax=5)%0A tbl = starfinder(DATA)%0A assert tbl is None%0A assert ('Sources were found, but none pass'%0A in str(warning_lines%5B0%5D.message))%0A%0A def test_brightest(self):%0A finder = StarFinder(10, PSF, brightest=10)%0A tbl = finder(DATA)%0A assert len(tbl) == 10%0A fluxes = tbl%5B'flux'%5D%0A assert fluxes%5B0%5D == np.max(fluxes)%0A%0A finder = StarFinder(40, PSF, peakmax=120)%0A tbl = finder(DATA)%0A assert len(tbl) == 1%0A%0A def test_mask(self):%0A starfinder = StarFinder(10, PSF)%0A mask = np.zeros(DATA.shape, dtype=bool)%0A mask%5B0:100%5D = True%0A tbl1 = starfinder(DATA)%0A tbl2 = starfinder(DATA, mask=mask)%0A assert len(tbl1) %3E len(tbl2)%0A assert min(tbl2%5B'ycentroid'%5D) %3E 100%0A
72203e529f083cbc9427b02348cc178e4443031c
Add new package: libuser (#18916)
var/spack/repos/builtin/packages/libuser/package.py
var/spack/repos/builtin/packages/libuser/package.py
Python
0
@@ -0,0 +1,883 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Libuser(AutotoolsPackage):%0A %22%22%22A user and group account administration library.%22%22%22%0A%0A homepage = %22https://pagure.io/libuser%22%0A url = %22http://releases.pagure.org/libuser/libuser-0.62.tar.xz%22%0A%0A version('0.62', sha256='a58ff4fabb01a25043b142185a33eeea961109dd60d4b40b6a9df4fa3cace20b')%0A version('0.61', sha256='0a114a52446e12781e2ffdf26f59df0d14e7809c7db5e551d3cf61c4e398751d')%0A version('0.60', sha256='b1f73408ebfee79eb01a47c5879a2cdef6a00b75ee24870de7df1b816ff483eb')%0A%0A depends_on('glib')%0A depends_on('linux-pam')%0A depends_on('popt')%0A%0A def setup_run_environment(self, env):%0A env.prepend_path('PATH', self.prefix.sbin)%0A
605baa80dfbf5028eb9924818c2f1d609a1d8c7a
Use private non-static method
py/selenium/webdriver/remote/errorhandler.py
py/selenium/webdriver/remote/errorhandler.py
# Copyright 2010 WebDriver committers # Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from selenium.common.exceptions import ElementNotSelectableException from selenium.common.exceptions import ElementNotVisibleException from selenium.common.exceptions import InvalidCookieDomainException from selenium.common.exceptions import InvalidElementStateException from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import NoSuchFrameException from selenium.common.exceptions import NoSuchWindowException from selenium.common.exceptions import StaleElementReferenceException from selenium.common.exceptions import UnableToSetCookieException from selenium.common.exceptions import ErrorInResponseException from selenium.common.exceptions import TimeoutException from selenium.common.exceptions import WebDriverException class ErrorCode(object): """Error codes defined in the WebDriver wire protocol.""" # Keep in sync with org.openqa.selenium.remote.ErrorCodes and errorcodes.h SUCCESS = 0 NO_SUCH_ELEMENT = 7 NO_SUCH_FRAME = 8 UNKNOWN_COMMAND = 9 STALE_ELEMENT_REFERENCE = 10 ELEMENT_NOT_VISIBLE = 11 INVALID_ELEMENT_STATE = 12 UNKNOWN_ERROR = 13 ELEMENT_IS_NOT_SELECTABLE = 15 JAVASCRIPT_ERROR = 17 XPATH_LOOKUP_ERROR = 19 NO_SUCH_WINDOW = 23 INVALID_COOKIE_DOMAIN = 24 UNABLE_TO_SET_COOKIE = 25 TIMEOUT = 28 class ErrorHandler(object): """Handles errors returned by the WebDriver server.""" def check_response(self, response): """ Checks that a JSON response from the WebDriver does not have an error. Args: response - The JSON response from the WebDriver server as a dictionary object. Raises: If the response contains an error message. """ status = response['status'] if status == ErrorCode.SUCCESS: return exception_class = ErrorInResponseException if status == ErrorCode.NO_SUCH_ELEMENT: exception_class = NoSuchElementException elif status == ErrorCode.NO_SUCH_FRAME: exception_class = NoSuchFrameException elif status == ErrorCode.NO_SUCH_WINDOW: exception_class = NoSuchWindowException elif status == ErrorCode.STALE_ELEMENT_REFERENCE: exception_class = StaleElementReferenceException elif status == ErrorCode.ELEMENT_NOT_VISIBLE: exception_class = ElementNotVisibleException elif status == ErrorCode.INVALID_ELEMENT_STATE: exception_class = WebDriverException elif status == ErrorCode.ELEMENT_IS_NOT_SELECTABLE: exception_class = ElementNotSelectableException elif status == ErrorCode.INVALID_COOKIE_DOMAIN: exception_class = WebDriverException elif status == ErrorCode.UNABLE_TO_SET_COOKIE: exception_class = WebDriverException elif status == ErrorCode.TIMEOUT: exception_class = TimeoutException elif status == ErrorCode.UNKNOWN_ERROR: exception_class = WebDriverException else: exception_class = WebDriverException value = response['value'] if type(value) is str: if exception_class == ErrorInResponseException: raise exception_class(response, value) raise exception_class(value) message = '' if 'message' in value: message = value['message'] screen = None if 'screen' in value: screen = value['screen'] stacktrace = None if 'stackTrace' in value: zeroeth = value['stackTrace'][0] if zeroeth.has_key('methodName'): stacktrace = "Method %s threw an error in %s" % \ (zeroeth['methodName'], self.value_or_default_(zeroeth, 'fileName', '[No file name]')) if exception_class == ErrorInResponseException: raise exception_class(response, message) raise exception_class(message, screen, stacktrace) @staticmethod def value_or_default_(obj, key, default): return obj[key] if obj.has_key(key) else default
Python
0
@@ -4404,16 +4404,17 @@ self. +_ value_or @@ -4421,17 +4421,16 @@ _default -_ (zeroeth @@ -4635,34 +4635,16 @@ e)%0A%0A - @staticmethod%0A def valu @@ -4639,16 +4639,17 @@ def +_ value_or @@ -4656,17 +4656,16 @@ _default -_ (obj, ke
ac09970129df9c5292344287b04a1be143fac681
add diag openmp
tests/examples/openmp/diagnostics.py
tests/examples/openmp/diagnostics.py
Python
0.000146
@@ -0,0 +1,1409 @@ +# coding: utf-8%0A%0Aimport numpy as np%0Afrom matplotlib import pyplot as plt%0A%0Adef matrix_product():%0A procs = %5B1, 4, 8, 16, 28%5D%0A times = %5B1194.849, 305.231, 69.174,37.145, 22.731%5D%0A%0A n_groups = len(procs)%0A%0A #%C2%A0...%0A fig, ax = plt.subplots()%0A index = np.arange(n_groups)%0A bar_width = 0.2%0A opacity = 0.4%0A rects1 = plt.bar(index, times, bar_width,%0A alpha=opacity,%0A color='b',%0A label='OpenMP')%0A%0A plt.xlabel('Number of Processors')%0A plt.ylabel('CPU time')%0A plt.title('Weak scaling')%0A labels = %5Bstr(i) for i in procs%5D%0A plt.xticks(index + bar_width / 2, labels)%0A plt.legend()%0A%0A plt.tight_layout()%0A plt.savefig(%22matrix_product_scalability.png%22)%0A plt.clf()%0A #%C2%A0...%0A%0A #%C2%A0...%0A speedup = %5Btimes%5B0%5D/b for b in times%5B1:%5D%5D%0A n_groups = len(speedup)%0A%0A fig, ax = plt.subplots()%0A index = np.arange(n_groups)%0A bar_width = 0.2%0A opacity = 0.4%0A rects1 = plt.bar(index, speedup, bar_width,%0A alpha=opacity,%0A color='b',%0A label='OpenMP')%0A%0A plt.xlabel('Number of Processors')%0A plt.ylabel('Speedup')%0A plt.title('Speedup')%0A labels = %5Bstr(i) for i in procs%5B1:%5D%5D%0A plt.xticks(index + bar_width / 2, labels)%0A plt.legend()%0A%0A plt.tight_layout()%0A plt.savefig(%22matrix_product_speedup.png%22)%0A plt.clf()%0A #%C2%A0...%0A%0Amatrix_product()%0A
65f6f78008d4f961c9ebe5d8047b0f2c742fe15f
Add unittest for QInputDialog.getXXX() methods
tests/qtgui/qinputdialog_get_test.py
tests/qtgui/qinputdialog_get_test.py
Python
0
@@ -0,0 +1,677 @@ +import unittest%0A%0Afrom PySide import QtCore, QtGui%0Afrom helper import UsesQApplication, TimedQApplication%0A%0Aclass TestInputDialog(TimedQApplication):%0A%0A def testGetDouble(self):%0A QtGui.QInputDialog.getDouble(None, %22title%22, %22label%22)%0A%0A def testGetInt(self):%0A QtGui.QInputDialog.getInt(None, %22title%22, %22label%22)%0A%0A def testGetInteger(self):%0A QtGui.QInputDialog.getInteger(None, %22title%22, %22label%22)%0A%0A def testGetItem(self):%0A QtGui.QInputDialog.getItem(None, %22title%22, %22label%22, QtCore.QStringList(%5B%221%22, %222%22, %223%22%5D))%0A%0A def testGetText(self):%0A QtGui.QInputDialog.getText(None, %22title%22, %22label%22)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A%0A
52189e2161e92b36df47a04c2150dff38f81f5e9
Add mocked tests for activation
tests/unit/tests/test_activations.py
tests/unit/tests/test_activations.py
Python
0
@@ -0,0 +1,1399 @@ +from unittest import mock%0Afrom django.test import TestCase%0A%0Afrom viewflow import activation, flow%0Afrom viewflow.models import Task%0A%0A%0Aclass TestActivations(TestCase):%0A def test_start_activation_lifecycle(self):%0A flow_task_mock = mock.Mock(spec=flow.Start())%0A%0A act = activation.StartActivation()%0A act.initialize(flow_task_mock)%0A act.prepare()%0A act.done()%0A%0A act.task.prepare.assert_called_once_with()%0A act.task.done.assert_called_once_with()%0A act.process.start.assert_called_once_with()%0A flow_task_mock.activate_next.assert_any_call(act)%0A%0A def test_view_activation_activate(self):%0A flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))%0A prev_activation_mock = mock.Mock(spec=activation.StartActivation())%0A%0A act = activation.ViewActivation.activate(flow_task_mock, prev_activation_mock)%0A%0A act.task.save.assert_has_calls(())%0A%0A def test_view_activation_lifecycle(self):%0A flow_task_mock = mock.Mock(spec=flow.View(lambda *args, **kwargs: None))%0A task_mock = mock.Mock(spec=Task())%0A%0A act = activation.ViewActivation()%0A act.initialize(flow_task_mock, task_mock)%0A act.prepare()%0A act.done()%0A%0A act.task.prepare.assert_called_once_with()%0A act.task.done.assert_called_once_with()%0A flow_task_mock.activate_next.assert_any_call(act)%0A
e0df929e07e30c514b2b39f515bfd3102d1ebfe7
Add annotate experiment
Source/Git/Experiments/git_annotate.py
Source/Git/Experiments/git_annotate.py
Python
0.000007
@@ -0,0 +1,340 @@ +#!/usr/bin/python3%0Aimport sys%0A%0Aimport git%0A%0Ar = git.Repo( sys.argv%5B1%5D )%0A%0A%0Anum = 0%0A%0Afor info in r.blame( 'HEAD', sys.argv%5B2%5D ):%0A num += 1%0A commit = info%5B0%5D%0A all_lines = info%5B1%5D%0A print( '%25s %256d:%25s' %25 (commit, num, all_lines%5B0%5D) )%0A%0A for line in all_lines%5B1:%5D:%0A num += 1%0A print( '%25*s %256d:%25s' %25 (40, '', num, line) )%0A
51d581c7bca0fcacf8604b898f96394847865e15
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/e1e64a45b138980a6d8c125bacc81f22142d2b53.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "0dcdcc3f57a96bc354e66f3805dff4f619e2b93f" TFRT_SHA256 = "940edcaf656cbbfee314689fd7e52aaa02bd07197bd4139f24aec64eee74c7a8" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
Python
0.000001
@@ -228,133 +228,133 @@ = %22 -0dcdcc3f57a96bc354e66f3805dff4f619e2b93f%22%0A TFRT_SHA256 = %22940edcaf656cbbfee314689fd7e52aaa02bd07197bd4139f24aec64eee74c7a8 +e1e64a45b138980a6d8c125bacc81f22142d2b53%22%0A TFRT_SHA256 = %225afd4500e88c75188e29e68273438b849d57d800ed982bbe292325148ad3e016 %22%0A%0A
5dd31aa3cfacb6bd157d50ac3d310b8064a46b80
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/5f6e52142a3592d0cfa058dbfd140cad49ed451a.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "736eeebfb56c6d0de138f4a29286140d8c26d927" TFRT_SHA256 = "b584ee5ce5ecaadf289b0997987dfb5eec6cf3623f30b83028923cad20914e61" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
Python
0
@@ -228,133 +228,133 @@ = %22 -736eeebfb56c6d0de138f4a29286140d8c26d927%22%0A TFRT_SHA256 = %22b584ee5ce5ecaadf289b0997987dfb5eec6cf3623f30b83028923cad20914e61 +5f6e52142a3592d0cfa058dbfd140cad49ed451a%22%0A TFRT_SHA256 = %228e1efbd7df0fdeb5186b178d7c8b90c33ba80cef54999e988097bd1ff0f4e8fe %22%0A%0A
0c13207eeda65754532bab5888cc33693fb06834
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/b87ea071c60db54775b92da8e0eed8477ab96a6a.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "75318fbce7817886508abd18dd5ea3b35d552372" TFRT_SHA256 = "233d123e6287e105acb2b464db68b753624dfe5c27f299ff6b2dbe29ef40e9e3" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
Python
0.000003
@@ -210,132 +210,132 @@ = %22 -75318fbce7817886508abd18dd5ea3b35d552372%22%0A TFRT_SHA256 = %22233d123e6287e105acb2b464db68b753624dfe5c27f299ff6b2dbe29ef40e9e +b87ea071c60db54775b92da8e0eed8477ab96a6a%22%0A TFRT_SHA256 = %2261b8951d9236a82c54be8db871cd427013ec24ae17b0e681829a634e4f0388b 3%22%0A%0A
1eb980caefcbaaa4b29f7c3d92f27e490003e208
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/9562f24de39c95b4a076f7e0a0eb79cb980a9c72.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "de22adc4126843c3cf142e0a829d153dc94cdd73" TFRT_SHA256 = "e345d2ae1d385ebaf41531c831bb1025cab260fe20daa5b6024c1d07c1ebfd0c" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
Python
0.000006
@@ -210,133 +210,133 @@ = %22 -de22adc4126843c3cf142e0a829d153dc94cdd73%22%0A TFRT_SHA256 = %22e345d2ae1d385ebaf41531c831bb1025cab260fe20daa5b6024c1d07c1ebfd0c +9562f24de39c95b4a076f7e0a0eb79cb980a9c72%22%0A TFRT_SHA256 = %226fda4b556e5100e83ba292b8907c82f152740bb9eb157dc64e9c01ed2c4536e8 %22%0A%0A
fd01a25c0f5cb9ba75b2a659d47d1d3902242c5e
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/c3e082762b7664bbc7ffd2c39e86464928e27c0c.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "5a604f55b0d725eb537fd1a7cb6a88fcc6fd9b73" TFRT_SHA256 = "004f312a2c65165e301b101add213013603c8822e479b4be63e2f95a3f972ebd" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)), # A patch file can be provided for atomic commits to both TF and TFRT. # The job that bumps the TFRT_COMMIT also resets patch_file to 'None'. patch_file = None, )
Python
0.000003
@@ -228,133 +228,133 @@ = %22 -5a604f55b0d725eb537fd1a7cb6a88fcc6fd9b73%22%0A TFRT_SHA256 = %22004f312a2c65165e301b101add213013603c8822e479b4be63e2f95a3f972ebd +c3e082762b7664bbc7ffd2c39e86464928e27c0c%22%0A TFRT_SHA256 = %229b7fabe6e786e6437bb7cd1a4bed8416da6f08969266e57945805017092900c6 %22%0A%0A
06cfa4c7055ec997dcb3aec11732ee1be5330b75
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/285e48bc47db23a479637fd1e2767b9a35dc2c9b.
third_party/tf_runtime/workspace.bzl
third_party/tf_runtime/workspace.bzl
"""Provides the repository macro to import TFRT.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(): """Imports TFRT.""" # Attention: tools parse and update these lines. TFRT_COMMIT = "962d1c7a123f01ccdb39e0d1959794f432b0ffeb" TFRT_SHA256 = "ce0f2f86d19850e8951514b0e3f76950d07a8dc79d053de3d7a4cf402389351a" tf_http_archive( name = "tf_runtime", sha256 = TFRT_SHA256, strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT), urls = [ "http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), "https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT), ], )
Python
0.000004
@@ -210,133 +210,133 @@ = %22 -962d1c7a123f01ccdb39e0d1959794f432b0ffeb%22%0A TFRT_SHA256 = %22ce0f2f86d19850e8951514b0e3f76950d07a8dc79d053de3d7a4cf402389351a +285e48bc47db23a479637fd1e2767b9a35dc2c9b%22%0A TFRT_SHA256 = %226f0067d0cb7bb407caeef060603b6e33f1231cddf1ce4ce2ebce027dc418764f %22%0A%0A
27cb9279670bd513a1559f4865500d84869bb9f0
Test module for Predictor class.
tests/test_predictor.py
tests/test_predictor.py
Python
0
@@ -0,0 +1,2002 @@ +#! /usr/env/bin python%0Aimport numpy as np%0A%0Afrom pyboas import predictor, models%0A%0A# Build random 3-parameter normal posterior.%0Aposterior = np.random.randn(100, 3)%0A%0A%0Adef toy_model(param, time):%0A time = np.atleast_1d(time)%5B:, np.newaxis%5D%0A%0A a = param%5B:, 0%5D%0A b = param%5B:, 1%5D%0A c = param%5B:, 2%5D%0A%0A return a*time**2 + b*time + c%0A%0A%0Adef test_basic_shape():%0A %22%22%22Test basic shape conditions on output of predictions.%22%22%22%0A time = np.random.rand(4, )%0A%0A pred1 = predictor.GaussPredictor(posterior, toy_model)%0A%0A pred1.make_prediction(time)%0A%0A # Test shape of predictive distributions and x%0A assert pred1.x.shape == pred1.predictives.shape%0A # Test len of time array and predictives%0A assert len(time) == len(pred1.predictives)%0A%0A return%0A%0A%0Adef test_time_concatenation():%0A %22%22%22%0A Test feature to concatenate prediction times over make_prediction calls.%0A %22%22%22%0A%0A # Built random time array%0A time = np.random.rand(4,)%0A%0A pred1 = predictor.GaussPredictor(posterior, toy_model)%0A pred2 = predictor.GaussPredictor(posterior, toy_model)%0A%0A # Run first predictor with full time array%0A pred1.make_prediction(time)%0A%0A # Run second predictor twice%0A pred2.make_prediction(time%5B:2%5D)%0A pred2.make_prediction(time%5B2:%5D)%0A%0A assert np.allclose(pred1.predictives, pred2.predictives)%0A assert np.allclose(pred1.x, pred2.x)%0A%0A return%0A%0A%0Adef test_sample_draw():%0A # Built random time array%0A time = np.random.rand(4, )%0A%0A pred1 = predictor.GaussPredictor(posterior, toy_model)%0A%0A pred1.samplepredictive(time, 100)%0A%0A%0Adef ok():%0A print('%5C033%5B92mOK%5C033%5B0m')%0A%0A%0Adef failed():%0A print('%5C033%5B91mFAILED%5C033%5B0m')%0A%0A%0Adef test_all():%0A print('Testing basic functioning....%5Ct'),%0A try:%0A test_basic_shape()%0A ok()%0A except AssertionError:%0A failed()%0A%0A print('Testing time concatenation....%5Ct'),%0A try:%0A test_time_concatenation()%0A ok()%0A except AssertionError:%0A failed()%0A return%0A%0Aif __name__ == '__main__':%0A test_all()%0A
34d5b5cdc058f1c9055b82151b518251fa3b4f74
Add tool to create combined smart contract files
tools/join-contracts.py
tools/join-contracts.py
Python
0
@@ -0,0 +1,1749 @@ +import os%0A%0Aimport click%0Aimport re%0Afrom click.types import File%0A%0AIMPORT_RE = re.compile(r'%5Eimport +%5B%22%5C'%5D(?P%3Ccontract%3E%5B%5E%22%5C'%5D+.sol)%5B%22%5C'%5D;$')%0A%0A%22%22%22%0AUtility to join solidity contracts into a single output file by recursively%0Aresolving imports.%0A%0Aexample usage:%0A%0A$ cd raiden/smart_contracts%0A$ python ../../tools/join-contracts.py SomeContractWithImports.sol joined.sol%0A%0A%22%22%22%0A%0A%0Aclass ContractJoiner(object):%0A def __init__(self):%0A self.have_pragma = False%0A self.seen = set()%0A%0A def join(self, contract_file):%0A out = %5B%5D%0A if contract_file.name in self.seen:%0A print('Skipping duplicate %7B%7D'.format(contract_file.name))%0A return %5B%5D%0A%0A self.seen.add(contract_file.name)%0A print('Reading %7B%7D'.format(contract_file.name))%0A%0A for line in contract_file:%0A line = line.strip('%5Cr%5Cn')%0A stripped_line = line.strip()%0A if stripped_line.startswith('pragma'):%0A if not self.have_pragma:%0A self.have_pragma = True%0A out.append(line)%0A elif stripped_line.startswith('import'):%0A match = IMPORT_RE.match(stripped_line)%0A if match:%0A next_file = match.groupdict().get('contract')%0A if next_file and os.path.exists(next_file):%0A with open(next_file) as next_contract:%0A out.extend(self.join(next_contract))%0A else:%0A out.append(line)%0A return out%0A%0A%[email protected]()%[email protected]('contract', type=File())%[email protected]('output', type=File('w'))%0Adef main(contract, output):%0A output.write(%22%5Cn%22.join(ContractJoiner().join(contract)))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
e06416a61826229ebd0cccdc519b6dc39d8a0fd9
Add migration to remove models.
server/migrations/0088_auto_20190304_1313.py
server/migrations/0088_auto_20190304_1313.py
Python
0
@@ -0,0 +1,1371 @@ +# Generated by Django 2.1.4 on 2019-03-04 18:13%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('server', '0087_auto_20190301_1424'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterUniqueTogether(%0A name='installedupdate',%0A unique_together=set(),%0A ),%0A migrations.RemoveField(%0A model_name='installedupdate',%0A name='machine',%0A ),%0A migrations.RemoveField(%0A model_name='pendingappleupdate',%0A name='machine',%0A ),%0A migrations.AlterUniqueTogether(%0A name='updatehistory',%0A unique_together=set(),%0A ),%0A migrations.RemoveField(%0A model_name='updatehistory',%0A name='machine',%0A ),%0A migrations.AlterUniqueTogether(%0A name='updatehistoryitem',%0A unique_together=set(),%0A ),%0A migrations.RemoveField(%0A model_name='updatehistoryitem',%0A name='update_history',%0A ),%0A migrations.DeleteModel(%0A name='InstalledUpdate',%0A ),%0A migrations.DeleteModel(%0A name='PendingAppleUpdate',%0A ),%0A migrations.DeleteModel(%0A name='UpdateHistory',%0A ),%0A migrations.DeleteModel(%0A name='UpdateHistoryItem',%0A ),%0A %5D%0A
04a4d7887664753f87d6ccd0921c87160d8ced26
Create 002_gen.py
002/002_gen.py
002/002_gen.py
Python
0.000005
@@ -0,0 +1,216 @@ +#!/usr/bin/env python %0A%0Adef fibonatti(n_max=4000000):%0A f1, f2 = 1, 1%0A while f2 %3C= n_max:%0A yield f2%0A f2 += f1%0A f1 = f2 - f1%0A%0Aanswer = sum(f for f in fibonatti() if f %25 2 == 0)%0Aprint(answer)%0A
92f88fb9021094f1429f5175d01a354c4ad35880
add initial gyp to build freetype lib (problems with cflags not showing up in xcode)
gyp/freetype.gyp
gyp/freetype.gyp
Python
0.000002
@@ -0,0 +1,2359 @@ +%7B%0A# 'includes': %5B%0A# 'common.gypi',%0A# %5D,%0A 'targets': %5B%0A %7B%0A 'target_name': 'skfreetype',%0A 'type': 'static_library',%0A 'sources': %5B%0A '../third_party/freetype/src/base/ftbbox.c',%0A '../third_party/freetype/src/base/ftbitmap.c',%0A '../third_party/freetype/src/base/ftglyph.c',%0A '../third_party/freetype/src/base/ftlcdfil.c',%0A '../third_party/freetype/src/base/ftstroke.c',%0A '../third_party/freetype/src/base/ftxf86.c',%0A '../third_party/freetype/src/base/ftbase.c',%0A '../third_party/freetype/src/base/ftsystem.c',%0A '../third_party/freetype/src/base/ftinit.c',%0A '../third_party/freetype/src/base/ftgasp.c',%0A '../third_party/freetype/src/base/ftfstype.c',%0A '../third_party/freetype/src/raster/raster.c',%0A '../third_party/freetype/src/sfnt/sfnt.c',%0A '../third_party/freetype/src/smooth/smooth.c',%0A '../third_party/freetype/src/autofit/autofit.c',%0A '../third_party/freetype/src/truetype/truetype.c',%0A '../third_party/freetype/src/cff/cff.c',%0A '../third_party/freetype/src/psnames/psnames.c',%0A '../third_party/freetype/src/pshinter/pshinter.c',%0A%0A# added for linker%0A '../third_party/freetype/src/lzw/ftlzw.c',%0A '../third_party/freetype/src/gzip/ftgzip.c',%0A '../third_party/freetype/src/cid/type1cid.c',%0A '../third_party/freetype/src/bdf/bdf.c',%0A '../third_party/freetype/src/psaux/psaux.c',%0A '../third_party/freetype/src/pcf/pcf.c',%0A '../third_party/freetype/src/pfr/pfr.c',%0A '../third_party/freetype/src/type1/type1.c',%0A '../third_party/freetype/src/type42/type42.c',%0A '../third_party/freetype/src/winfonts/winfnt.c',%0A %5D,%0A 'include_dirs': %5B%0A '../third_party/freetype/internal',%0A '../third_party/freetype/builds',%0A '../third_party/freetype/include',%0A '../third_party/freetype',%0A %5D,%0A 'cflags': %5B%0A '-W',%0A '-Wall',%0A '-fPIC',%0A '-DPIC',%0A '-DDARWIN_NO_CARBON',%0A '-DFT2_BUILD_LIBRARY',%0A %5D,%0A 'direct_dependent_settings': %7B%0A 'include_dirs': %5B%0A '../third_party/freetype/include', # For ft2build.h%0A %5D,%0A %7D,%0A %7D,%0A %5D,%0A%7D%0A%0A# Local Variables:%0A# tab-width:2%0A# indent-tabs-mode:nil%0A# End:%0A# vim: set expandtab tabstop=2 shiftwidth=2:%0A
66137a8710bf3b778c860af8d6278ee0c97bbab4
Add script to delete unused users on JupyterHub
scripts/delete-unused-users.py
scripts/delete-unused-users.py
Python
0
@@ -0,0 +1,1831 @@ +#!/usr/bin/env python3%0A%22%22%22%0ADelete unused users from a JupyterHub.%0A%0AJupyterHub performance sometimes scales with *total* number%0Aof users, rather than running number of users. While that should%0Abe fixed, we can work around it by deleting unused users once in%0Aa while. This script will delete anyone who hasn't registered%0Aany activity in a given period of time, double checking to%0Amake sure they aren't active right now. This will require users to%0Alog in again the next time they use the hub, but that's probably%0Aok.%0A%22%22%22%0Aimport argparse%0Afrom jhub_client.api import JupyterHubAPI%0Afrom dateutil.parser import parse%0Aimport asyncio%0Afrom datetime import timedelta, datetime%0A%0Aasync def main():%0A argparser = argparse.ArgumentParser()%0A argparser.add_argument(%0A 'hub_url',%0A help='Fully qualified URL to the JupyterHub'%0A )%0A args = argparser.parse_args()%0A%0A to_delete = %5B%5D%0A async with JupyterHubAPI(hub_url=args.hub_url) as hub:%0A users = await hub.list_users()%0A for user in users:%0A last_activity_str = user.get('last_activity', False)%0A if last_activity_str:%0A try:%0A last_activity = parse(user%5B'last_activity'%5D)%0A except:%0A print(user%5B'last_activity'%5D)%0A raise%0A if last_activity and datetime.now().astimezone() - last_activity %3C timedelta(hours=24) and user%5B'server'%5D is not None:%0A print(f%22Not deleting %7Buser%5B'name'%5D%7D%22)%0A else:%0A to_delete.append(user%5B'name'%5D)%0A print(f%22Deleting %7Buser%5B'name'%5D%7D%22)%0A%0A for i, username in enumerate(to_delete):%0A print(f'%7Bi+1%7D of %7Blen(to_delete)%7D: deleting %7Busername%7D')%0A await hub.delete_user(username)%0A%0Aif __name__ == '__main__':%0A asyncio.run(main())%0A
ad6aa623bbd8f316ab7fb8c389d1c9c74b17ae8c
add util module for converting an update job into xml
rpath_repeater/utils/update_job_formatter.py
rpath_repeater/utils/update_job_formatter.py
Python
0
@@ -0,0 +1,3128 @@ +#!/usr/bin/python%0A#%0A# Copyright (c) 2012 rPath, Inc.%0A#%0A# This program is distributed under the terms of the Common Public License,%0A# version 1.0. A copy of this license should have been distributed with this%0A# source file in a file called LICENSE. If it is not present, the license%0A# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# without any warranty; without even the implied warranty of merchantability%0A# or fitness for a particular purpose. See the Common Public License for%0A# full details.%0A#%0A%0Afrom xml.etree import cElementTree as etree%0A%0Aclass Formatter(object):%0A __slots__ = %5B 'jobs', 'root', 'changes' %5D%0A def __init__(self, updateJob):%0A self.jobs = %5B%5D%0A if updateJob is not None:%0A self.jobs = updateJob.getJobs()%0A self.root = None%0A self.changes = None%0A%0A def format(self):%0A self.root = etree.Element('preview')%0A self.changes = etree.SubElement(self.root, 'conary_package_changes')%0A for oneJob in self.jobs:%0A for j in oneJob:%0A self._formatJob(j)%0A%0A def toxml(self):%0A return etree.tostring(self.root)%0A%0A def _formatJob(self, job):%0A (name, (oldVer, oldFla), (newVer, newFla)) = job%5B:3%5D%0A if oldVer is None:%0A self._formatInstall(name, newVer, newFla)%0A elif newVer is None:%0A self._formatErase(name, oldVer, oldFla)%0A else:%0A self._formatUpdate(name, oldVer, oldFla, newVer, newFla)%0A%0A def _formatInstall(self, name, version, flavor):%0A node = self._newPackageChange('added')%0A self._packageSpec(node, 'added_conary_package', name, version, flavor)%0A%0A def _formatErase(self, name, version, flavor):%0A node = self._newPackageChange('removed')%0A self._packageSpec(node, 'removed_conary_package', name, version, flavor)%0A%0A def _formatUpdate(self, name, oldVersion, oldFlavor, newVersion, newFlavor):%0A node = self._newPackageChange('changed')%0A self._packageSpec(node, 'from', name, oldVersion, oldFlavor)%0A self._packageSpec(node, 'to', name, newVersion, newFlavor)%0A diff = etree.SubElement(node, 'conary_package_diff')%0A self._fieldDiff(diff, 'version', oldVersion, newVersion)%0A self._fieldDiff(diff, 'flavor', oldFlavor, newFlavor)%0A%0A def _newPackageChange(self, type):%0A node = etree.SubElement(self.changes, 'conary_package_change')%0A etree.SubElement(node, 'type').text = type%0A return node%0A%0A def _packageSpec(self, parent, tag, name, version, flavor):%0A node = etree.SubElement(parent, tag)%0A etree.SubElement(node, 'name').text = str(name)%0A etree.SubElement(node, 'version').text = str(version)%0A etree.SubElement(node, 'flavor').text = str(flavor)%0A return node%0A%0A def _fieldDiff(self, parent, tag, oldValue, newValue):%0A if oldValue == newValue:%0A return%0A node = etree.SubElement(parent, tag)%0A etree.SubElement(node, 'from').text = str(oldValue)%0A etree.SubElement(node, 'to').text = str(newValue)%0A%0A
7b14028f3796981974b6d01b98277326123c0395
add get_flatpage template tag
core/templatetags/get_flatpage.py
core/templatetags/get_flatpage.py
Python
0
@@ -0,0 +1,1709 @@ +from django import template%0Afrom django.core.exceptions import ObjectDoesNotExist%0Afrom django.conf import settings%0Afrom django.contrib.flatpages.models import FlatPage%0Afrom django.contrib.sites.models import get_current_site%0A%0A%0Aregister = template.Library()%0A%0A%0Aclass FlatpageNode(template.Node):%0A def __init__(self, context_name, url):%0A self.context_name = context_name%0A self.url = template.Variable(url)%0A%0A def render(self, context):%0A if 'request' in context:%0A site_pk = get_current_site(context%5B'request'%5D).pk%0A else:%0A site_pk = settings.SITE_ID%0A try:%0A flatpage = FlatPage.objects.get(sites__id=site_pk, url=self.url.resolve(context))%0A except ObjectDoesNotExist:%0A flatpage = FlatPage(url=self.url.resolve(context))%0A%0A context%5Bself.context_name%5D = flatpage%0A return ''%0A%0A%[email protected]%0Adef get_flatpage(parser, token):%0A %22%22%22%0A Retrieves the flatpage object for the specified url%0A Syntax::%0A %7B%25 get_flatpages %5B'url'%5D as context_name %25%7D%0A Example usage::%0A %7B%25 get_flatpages '/about/' as about_page %25%7D%0A %22%22%22%0A bits = token.split_contents()%0A syntax_message = (%22%25(tag_name)s expects a syntax of %25(tag_name)s %22%0A %22%5B'url'%5D as context_name%22 %25%0A dict(tag_name=bits%5B0%5D))%0A # Must have at 3-6 bits in the tag%0A if len(bits) == 4:%0A%0A # The very last bit must be the context name%0A if bits%5B-2%5D != 'as':%0A raise template.TemplateSyntaxError(syntax_message)%0A context_name = bits%5B-1%5D%0A%0A url = bits%5B1%5D%0A%0A return FlatpageNode(context_name, url)%0A else:%0A raise template.TemplateSyntaxError(syntax_message)%0A
b8a07ce36cfeb2679ace05b26d6adc1e525d6044
Add feature computation module
husc/features.py
husc/features.py
Python
0.000001
@@ -0,0 +1,2796 @@ +import functools as fun%0Aimport numpy as np%0Afrom scipy.stats.mstats import mquantiles%0Afrom scipy import ndimage as nd%0Afrom skimage import feature, color, io as imio, img_as_float, %5C%0A morphology as skmorph%0Afrom skimage import filter as imfilter, measure%0A%0A%0A%0Adef lab_hist(rgb_image, **kwargs):%0A return np.histogram(color.rgb2lab(rgb_image), **kwargs)%0A%0A%0A# threshold and labeling number of objects, statistics about object size and%0A# shape%0Adef intensity_object_features(im, adaptive_t_radius=51):%0A %22%22%22Segment objects based on intensity threshold and compute properties.%0A%0A Parameters%0A ----------%0A im : 2D np.ndarray of float or uint8.%0A The input image.%0A adaptive_t_radius : int, optional%0A The radius to calculate background with adaptive threshold.%0A%0A Returns%0A -------%0A f : 1D np.ndarray of float%0A The feature vector.%0A %22%22%22%0A tim1 = im %3E imfilter.threshold_otsu(im)%0A f1 = object_features(tim1, im)%0A tim2 = imfilter.threshold_adaptive(im, adaptive_t_radius)%0A f2 = object_features(tim2, im)%0A f = np.concatenate(%5Bf1, f2%5D)%0A return f%0A%0A%0Adef object_features(bin_im, im, erode=2):%0A %22%22%22Compute features about objects in a binary image.%0A%0A Parameters%0A ----------%0A bin_im : 2D np.ndarray of bool%0A The image of objects.%0A im : 2D np.ndarray of float or uint8%0A The actual image.%0A erode : int, optional%0A Radius of erosion of objects.%0A%0A Returns%0A -------%0A f : 1D np.ndarray of float%0A The feature vector.%0A %22%22%22%0A selem = skmorph.disk(erode)%0A if erode %3E 0:%0A bin_im = nd.binary_erosion(bin_im, selem)%0A lab_im, n_objs = nd.label(bin_im)%0A if erode %3E 0:%0A lab_im = nd.grey_dilate(lab_im, footprint=selem)%0A feats = measure.regionprops(lab_im,%0A %5B'Area', 'Eccentricity', 'EulerNumber',%0A 'Extent', 'MinIntensity', 'MeanIntensity',%0A 'MaxIntensity', 'Solidity'%5D,%0A intensity_image=im)%0A feats = np.array(%5Bprops.values() for props in feats%5D, np.float)%0A feature_quantiles = mquantiles(feats, %5B0.05, 0.25, 0.5, 0.75, 0.95%5D,%0A axis=0)%0A f = np.concatenate(%5Bnp.array(%5Bn_objs%5D, np.float),%0A feature_quantiles.ravel()%5D)%0A return f%0A%0A%0Afull_feature_list = %5C%0A %5Bfun.partial(np.histogram, bins=16, range=(0.0, 1.0)),%0A fun.partial(lab_hist, bins=16, range=(0.0, 1.0)),%0A feature.hog%0A %5D%0A # TO-DO: add segmentation features%0A%0A%0Adef image_feature_vector(im, feature_list=None):%0A if type(im) == str:%0A im = img_as_float(imio.imread(im))%0A if feature_list is None:%0A feature_list = full_feature_list%0A features = np.concatenate(%5Bf(im) for f in feature_list%5D)%0A return features%0A
f16a7e43ce4d9dc82fd4bfca34d80f0447bd57db
add isStaffOrReadOnly permissions
treeherder/webapp/api/permissions.py
treeherder/webapp/api/permissions.py
Python
0.00003
@@ -0,0 +1,466 @@ +from rest_framework.permissions import BasePermission%0Afrom rest_framework.permissions import SAFE_METHODS%0A%0A%0Aclass IsStaffOrReadOnly(BasePermission):%0A %22%22%22%0A The request is authenticated as an admin staff (eg. sheriffs), or is a read-only request.%0A %22%22%22%0A%0A def has_permission(self, request, view):%0A return (request.method in SAFE_METHODS or%0A request.user and%0A request.user.is_authenticated() and%0A request.user.is_staff)
5a77678a44ec9838e943b514a586dbd96b8bdfdc
Add migration for license change
modelview/migrations/0042_auto_20171215_0953.py
modelview/migrations/0042_auto_20171215_0953.py
Python
0
@@ -0,0 +1,659 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-12-15 08:53%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('modelview', '0041_merge_20171211_1420'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='basicfactsheet',%0A name='license',%0A field=models.CharField(choices=%5B('MIT Licence', 'MIT Licence'), ('Apache Licence', 'Apache Licence'), ('GNU GPL Licence', 'GNU GPL Licence'), ('Other', 'Other'), ('Unknown', 'Unknown')%5D, default='Unknown', max_length=20, verbose_name='License'))%0A %5D%0A
5522285af9179441e56f65405037bb3a4c1c1274
Revert "Important fixes"
soccer/gameplay/plays/testing/triangle_pass.py
soccer/gameplay/plays/testing/triangle_pass.py
Python
0
@@ -0,0 +1,3063 @@ +import robocup%0Aimport play%0Aimport behavior%0Aimport skills.move%0Aimport skills.capture%0Aimport tactics.coordinated_pass%0Aimport constants%0Aimport main%0Aimport enum%0A%0A%0A## A demo play written during a teaching session to demonstrate play-writing%0A# Three robots form a triangle on the field and pass the ball A-%3EB-%3EC-%3EA and so on.%0Aclass TrianglePass(play.Play):%0A class State(enum.Enum):%0A ## 2 robots get on the corners of a triangle,%0A # while a third fetches the ball%0A setup = 1%0A%0A ## The robots continually pass to each other%0A passing = 2%0A%0A def __init__(self):%0A super().__init__(continuous=True)%0A%0A # register states - they're both substates of %22running%22%0A self.add_state(TrianglePass.State.setup,%0A behavior.Behavior.State.running)%0A self.add_state(TrianglePass.State.passing,%0A behavior.Behavior.State.running)%0A%0A self.add_transition(behavior.Behavior.State.start,%0A TrianglePass.State.setup, lambda: True,%0A 'immediately')%0A self.add_transition(TrianglePass.State.setup,%0A TrianglePass.State.passing,%0A lambda: self.all_subbehaviors_completed(),%0A 'all subbehaviors completed')%0A%0A self.triangle_points = %5B%0A robocup.Point(0, constants.Field.Length / 2.0),%0A robocup.Point(constants.Field.Width / 4,%0A constants.Field.Length / 4),%0A robocup.Point(-constants.Field.Width / 4,%0A constants.Field.Length / 4),%0A %5D%0A%0A def on_enter_setup(self):%0A closestPt = min(self.triangle_points,%0A key=lambda pt: pt.dist_to(main.ball().pos))%0A%0A otherPts = list(self.triangle_points)%0A otherPts.remove(closestPt)%0A%0A self.add_subbehavior(skills.move.Move(otherPts%5B0%5D), 'move1')%0A self.add_subbehavior(skills.move.Move(otherPts%5B1%5D), 'move2')%0A self.add_subbehavior(skills.capture.Capture(), 'capture')%0A%0A def on_exit_setup(self):%0A self.remove_all_subbehaviors()%0A%0A def execute_passing(self):%0A # If we had a pass in progress before and it finished, remove it%0A if self.has_subbehaviors():%0A if self.all_subbehaviors()%5B0%5D.is_done_running():%0A self.remove_all_subbehaviors()%0A%0A # if we're not currently passing, start a new pass%0A if not self.has_subbehaviors():%0A # pick pass from and to points%0A kickFrom = min(self.triangle_points,%0A key=lambda pt: pt.dist_to(main.ball().pos))%0A kickFromIdx = self.triangle_points.index(kickFrom)%0A kickToIdx = (kickFromIdx + 1) %25 len(self.triangle_points)%0A kickToPt = self.triangle_points%5BkickToIdx%5D%0A%0A # add the pass subbehavior%0A self.add_subbehavior(%0A tactics.coordinated_pass.CoordinatedPass(kickToPt), 'pass')%0A%0A def on_exit_passing(self):%0A self.remove_all_subbehaviors()%0A
0a4c100f9fb6e7540320fb7c55aeebdffe91c6d1
add primenumber.py
primenumber.py
primenumber.py
Python
0.998844
@@ -0,0 +1,251 @@ +lower = int(input(%22Enter lower range: %22))%0Aupper = int(input(%22Enter upper range: %22))%0A%0Afor num in range(lower,upper + 1):%0A if num %3E 1:%0A for i in range(2,num):%0A if (num %25 i) == 0:%0A break%0A else:%0A print(num)%0A
64ced324f05de20f839782913cfb13d147d49dd6
create a scheduler example file to test on live
code-samples/web_scraper/jared/scheduling_script.py
code-samples/web_scraper/jared/scheduling_script.py
Python
0
@@ -0,0 +1,1000 @@ +from time import sleep%0Afrom apscheduler.schedulers.background import BackgroundScheduler as Scheduler%0Aimport logging%0Aimport datetime%0A%0A# create a scheduler%0As = Scheduler()%0A%0A# This is what I want to happen%0Adef job():%0A%0A%09logging.basicConfig(filename='scheduled_task.log',level=logging.INFO,%0A %09%09%09%09%09format='%25(asctime)s %25(message)s line: %25(lineno)d')%0A%0A%09try:%0A%09%09logging.info( %22scheduled event%22)%0A%09except Exception as e:%0A%09%09print(%22open file failed%22)%0A%0Adef main():%0A%09newTime = datetime.datetime.now() + datetime.timedelta(seconds = 2)%0A%09s.add_job(job, 'cron', hour='0-23')%0A%09s.start()%0A%0A%09try:%0A # This is here to simulate application activity (which keeps the main thread alive).%0A%09%09while True:%0A%09%09%09sleep(2)%0A%0A%09except (KeyboardInterrupt, SystemExit):%0A # Not strictly necessary if daemonic mode is enabled but should be done if possible%0A%09%09scheduler.shutdown()%0A%0Aif __name__ == %22__main__%22:%0A%09main()%0A%0A%0A# Running a python script with python script & will fork that process immediately, so you can close the terminal.
a9a6a3dafc8901ffeeb89862fdc79f7099ba311a
Add UTF-8 test
test/test_utf8.py
test/test_utf8.py
Python
0.000569
@@ -0,0 +1,919 @@ +# -*- coding: utf-8 -*-%0A# Monary - Copyright 2011-2014 David J. C. Beach%0A# Please see the included LICENSE.TXT and NOTICE.TXT for licensing information.%0A%0Aimport pymongo%0A%0Aimport monary%0A%0Adef setup():%0A with pymongo.Connection(%22127.0.0.1%22) as c:%0A c.drop_database(%22monary_test%22)%0A c.monary_test.data.insert(%7B%22test%22 : u%22a%E3%81%82%22%7D)%0A c.monary_test.data.insert(%7B%22test%22 : u%22%C3%A2%C3%A9%C3%87%22%7D)%0A c.monary_test.data.insert(%7B%22test%22 : u%22%CE%B1%CE%BB%CE%A9%22%7D)%0A%0Adef teardown():%0A with pymongo.Connection(%22127.0.0.1%22) as c:%0A c.drop_database(%22monary_test%22)%0A%0Adef test_utf8():%0A with monary.Monary(%22127.0.0.1%22) as m:%0A %5Bdata%5D = m.query(%22monary_test%22,%0A %22data%22,%0A %7B%7D,%0A %5B%22test%22%5D,%0A %5B%22string:8%22%5D,%0A sort=%22sequence%22)%0A%0A expected = %5B%22a%E3%81%82%22, %22%C3%A2%C3%A9%C3%87%22, %22%CE%B1%CE%BB%CE%A9%22%5D%0A for x, y in zip(data, expected):%0A assert x == y%0A
6740c6192ab9bf37767230981b86e446486d4c43
implement basic plugin loader for laser
mythril/laser/ethereum/plugins/plugin_loader.py
mythril/laser/ethereum/plugins/plugin_loader.py
Python
0
@@ -0,0 +1,1092 @@ +from mythril.laser.ethereum.svm import LaserEVM%0Afrom mythril.laser.ethereum.plugins.plugin import LaserPlugin%0A%0A%0Aclass LaserPluginLoader:%0A %22%22%22%0A The LaserPluginLoader is used to abstract the logic relating to plugins.%0A Components outside of laser thus don't have to be aware of the interface that plugins provide%0A %22%22%22%0A def __init__(self, symbolic_vm: LaserEVM):%0A %22%22%22 Initializes the plugin loader%0A%0A :param symbolic_vm: symbolic virtual machine to load plugins for%0A %22%22%22%0A self.symbolic_vm = symbolic_vm%0A self.laser_plugins = %5B%5D%0A%0A def load(self, laser_plugin: LaserPlugin):%0A %22%22%22 Loads the plugin%0A%0A :param laser_plugin: plugin that will be loaded in the symbolic virtual machine%0A %22%22%22%0A laser_plugin.initialize(self.symbolic_vm)%0A self.laser_plugins.append(laser_plugin)%0A%0A def is_enabled(self, laser_plugin: LaserPlugin):%0A %22%22%22 Returns whether the plugin is loaded in the symbolic_vm%0A%0A :param laser_plugin: plugin that will be checked%0A %22%22%22%0A return laser_plugin in self.laser_plugins%0A
a01f4d47410ee1bf164d8b962f6337f8c39f0d16
add quicksort recursive
sort/quick_sort/python/quicksort-recusive.py
sort/quick_sort/python/quicksort-recusive.py
Python
0.000114
@@ -0,0 +1,449 @@ +%0Adef quickSort(arr):%0A sort(arr,0,len(arr)-1) %0A%0Adef sort(arr, low, high):%0A if (low %3C high):%0A p = partition(arr, low, high)%0A sort(arr, low, p - 1)%0A sort(arr, p + 1, high)%0A%0Adef partition(arr, low, high):%0A pivot = arr%5Bhigh%5D%0A i = (low - 1) %0A for j in range(low,high):%0A if (arr%5Bj%5D %3C= pivot):%0A i+= 1%0A arr%5Bi%5D,arr%5Bj%5D = arr%5Bj%5D,arr%5Bi%5D%0A arr%5Bi+1%5D,arr%5Bhigh%5D = arr%5Bhigh%5D,arr%5Bi+1%5D%0A return i + 1
e6b381a617808c500e115d5e3715dc2ae454e896
Add command line tool
src/psd_tools2/__main__.py
src/psd_tools2/__main__.py
Python
0.00002
@@ -0,0 +1,1856 @@ +from __future__ import unicode_literals%0Aimport logging%0Aimport docopt%0A%0Afrom psd_tools2 import PSDImage%0Afrom psd_tools2.version import __version__%0A%0Atry:%0A from IPython.lib.pretty import pprint%0Aexcept ImportError:%0A from pprint import pprint%0A%0Alogger = logging.getLogger(__name__)%0Alogger.addHandler(logging.StreamHandler())%0A%0A%0Adef main():%0A %22%22%22%0A psd-tools command line utility.%0A%0A Usage:%0A psd-tools export %3Cinput_file%3E %3Coutput_file%3E %5Boptions%5D%0A psd-tools show %3Cinput_file%3E %5Boptions%5D%0A psd-tools debug %3Cinput_file%3E %5Boptions%5D%0A psd-tools -h %7C --help%0A psd-tools --version%0A%0A Options:%0A -v --verbose Be more verbose.%0A%0A Example:%0A psd-tools show example.psd # Show the file content%0A psd-tools export example.psd example.png # Export as PNG%0A psd-tools export example.psd%5B0%5D example-0.png # Export layer as PNG%0A %22%22%22%0A%0A args = docopt.docopt(main.__doc__, version=__version__)%0A%0A if args%5B'--verbose'%5D:%0A logger.setLevel(logging.DEBUG)%0A else:%0A logger.setLevel(logging.INFO)%0A%0A if args%5B'export'%5D:%0A input_parts = args%5B'%3Cinput_file%3E'%5D.split('%5B')%0A input_file = input_parts%5B0%5D%0A if len(input_parts) %3E 1:%0A indices = %5Bint(x.rstrip('%5D')) for x in input_parts%5B1:%5D%5D%0A else:%0A indices = %5B%5D%0A layer = PSDImage.open(input_file)%0A for index in indices:%0A layer = layer%5Bindex%5D%0A if isinstance(layer, PSDImage) and layer.has_preview():%0A image = layer.topil()%0A else:%0A image = layer.compose()%0A image.save(args%5B'%3Coutput_file%3E'%5D)%0A%0A elif args%5B'show'%5D:%0A psd = PSDImage.open(args%5B'%3Cinput_file%3E'%5D)%0A pprint(psd)%0A%0A elif args%5B'debug'%5D:%0A psd = PSDImage.open(args%5B'%3Cinput_file%3E'%5D)%0A pprint(psd._psd)%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
bce910100fe0c3970b82d4f5544f11ce3392bc3c
Remove NoQueueMinCycleTime nonsense from sync worker
sync_worker.py
sync_worker.py
from datetime import datetime, timedelta import os print("Sync worker %s booting at %s" % (os.getpid(), datetime.now())) from tapiriik.requests_lib import patch_requests_with_default_timeout, patch_requests_source_address from tapiriik import settings from tapiriik.database import db, close_connections import time import signal import sys import subprocess import socket Run = True RecycleInterval = 2 # Time spent rebooting workers < time spent wrangling Python memory management. NoQueueMinCycleTime = timedelta(seconds=30) # No need to hammer the database given the number of sync workers I have oldCwd = os.getcwd() WorkerVersion = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, cwd=os.path.dirname(__file__)).communicate()[0].strip() os.chdir(oldCwd) def sync_interrupt(signal, frame): global Run Run = False signal.signal(signal.SIGINT, sync_interrupt) signal.signal(signal.SIGUSR2, sync_interrupt) def sync_heartbeat(state, user=None): db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"$set": {"Heartbeat": datetime.utcnow(), "State": state, "User": user}}) print("Sync worker " + str(os.getpid()) + " initialized at " + str(datetime.now())) db.sync_workers.update({"Process": os.getpid(), "Host": socket.gethostname()}, {"Process": os.getpid(), "Heartbeat": datetime.utcnow(), "Startup": datetime.utcnow(), "Version": WorkerVersion, "Host": socket.gethostname(), "Index": settings.WORKER_INDEX, "State": "startup"}, upsert=True) sys.stdout.flush() patch_requests_with_default_timeout(timeout=60) if isinstance(settings.HTTP_SOURCE_ADDR, list): settings.HTTP_SOURCE_ADDR = settings.HTTP_SOURCE_ADDR[settings.WORKER_INDEX % len(settings.HTTP_SOURCE_ADDR)] patch_requests_source_address((settings.HTTP_SOURCE_ADDR, 0)) print(" -> Index %s\n -> Interface %s" % (settings.WORKER_INDEX, settings.HTTP_SOURCE_ADDR)) # We defer including the main body of the application till here so the settings aren't captured before we've set them up. # The better way would be to defer initializing services until they're requested, but it's 10:30 and this will work just as well. from tapiriik.sync import Sync Sync.InitializeWorkerBindings() while Run: cycleStart = datetime.utcnow() # Avoid having synchronization fall down during DST setback processed_user_count = Sync.PerformGlobalSync(heartbeat_callback=sync_heartbeat, version=WorkerVersion) RecycleInterval -= processed_user_count # When there's no queue, all the workers sit sending 1000s of the queries to the database server if processed_user_count == 0: # Put this before the recycle shutdown, otherwise it'll quit and get rebooted ASAP remaining_cycle_time = NoQueueMinCycleTime - (datetime.utcnow() - cycleStart) if remaining_cycle_time > timedelta(0): print("Pausing for %ss" % remaining_cycle_time.total_seconds()) sync_heartbeat("idle-spin") time.sleep(remaining_cycle_time.total_seconds()) if RecycleInterval <= 0: break sync_heartbeat("idle") print("Sync worker shutting down cleanly") db.sync_workers.remove({"Process": os.getpid(), "Host": socket.gethostname()}) print("Closing database connections") close_connections() sys.stdout.flush()
Python
0.000001
@@ -482,125 +482,8 @@ ent. -%0ANoQueueMinCycleTime = timedelta(seconds=30) # No need to hammer the database given the number of sync workers I have %0A%0Aol @@ -2100,16 +2100,41 @@ ings()%0A%0A +sync_heartbeat(%22ready%22)%0A%0A while Ru @@ -2339,16 +2339,16 @@ ersion)%0A + Recy @@ -2387,545 +2387,8 @@ unt%0A - # When there's no queue, all the workers sit sending 1000s of the queries to the database server%0A if processed_user_count == 0:%0A # Put this before the recycle shutdown, otherwise it'll quit and get rebooted ASAP%0A remaining_cycle_time = NoQueueMinCycleTime - (datetime.utcnow() - cycleStart)%0A if remaining_cycle_time %3E timedelta(0):%0A print(%22Pausing for %25ss%22 %25 remaining_cycle_time.total_seconds())%0A sync_heartbeat(%22idle-spin%22)%0A time.sleep(remaining_cycle_time.total_seconds())%0A
6d134c2a870150477ecc41edbab272e75462bbcd
Add benchmark script
tests/bench.py
tests/bench.py
Python
0.000001
@@ -0,0 +1,1260 @@ +%0Aimport os%0Aimport re%0Aimport time%0A%0Aroot = os.path.dirname(__file__)%0A%0Aknown = %5B%5D%0A%0A%0Adef listdir(folder):%0A folder = os.path.join(root, folder)%0A files = os.listdir(folder)%0A files = filter(lambda o: o.endswith('.text'), files)%0A return files%0A%0A%0Adef mistune_runner(content):%0A import mistune%0A return mistune.markdown(content)%0A%0A%0Adef misaka_runner(content):%0A import misaka%0A extensions = (%0A misaka.EXT_NO_INTRA_EMPHASIS %7C misaka.EXT_TABLES %7C%0A misaka.EXT_FENCED_CODE %7C misaka.EXT_AUTOLINK %7C%0A misaka.EXT_STRIKETHROUGH%0A )%0A md = misaka.Markdown(misaka.HtmlRenderer(), extensions=extensions)%0A return md.render(content)%0A%0A%0Adef bench(runner=None):%0A cases = %5B%5D%0A%0A for name in listdir('cases'):%0A with open(os.path.join(root, 'cases', name), 'r') as f:%0A cases.append(f.read())%0A%0A for name in listdir('extra'):%0A with open(os.path.join(root, 'extra', name), 'r') as f:%0A cases.append(f.read())%0A%0A if runner is None:%0A runner = mistune_runner%0A%0A begin = time.time()%0A count = 100%0A while count:%0A count -= 1%0A for text in cases:%0A runner(text)%0A%0A end = time.time()%0A return end - begin%0A%0Aprint('misaka', bench(misaka_runner))%0Aprint('mistune', bench())%0A
f5970d1488d28f27c5f20dd11619187d0c13c960
Add simple windows registry read/write functions
os/win_registry.py
os/win_registry.py
Python
0.000001
@@ -0,0 +1,706 @@ +import _winreg%0A%0AkeyName = %22myKey%22%0A%0A%0Adef write_to_registry():%0A try:%0A key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, %22Software%5C%5C%22 + keyName)%0A _winreg.SetValueEx(key, %22myVal%22, 0, _winreg.REG_SZ, %22This is a value.%22)%0A print(%22value created%22)%0A except Exception as e:%0A print(e)%0A%0A%0Adef read_from_registry():%0A try:%0A with _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, %22Software%5C%5C%22 + keyName, 0, _winreg.KEY_READ) as key:%0A if key:%0A data = _winreg.QueryValueEx(key, %22myVal%22)%0A print(%22Read from registry: %22, data)%0A except Exception as e:%0A print(e)%0A%0A%0Aif __name__ == '__main__':%0A write_to_registry()%0A read_from_registry()%0A