commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
a37007e03747395c12cc4bc34c761aa3253f7599 | Add tests folder | tests/__init__.py | tests/__init__.py | Python | 0.000001 | @@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-%0A
|
|
c02a2ac7dc882b68f061f2caff1b4c1d91fce471 | Make Postgres the default test adapter (refs GH-2862) | src/sentry/utils/pytest.py | src/sentry/utils/pytest.py | from __future__ import absolute_import
import mock
import os
from django.conf import settings
def pytest_configure(config):
# HACK: Only needed for testing!
os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')
os.environ.setdefault('RECAPTCHA_TESTING', 'True')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')
if not settings.configured:
# only configure the db if its not already done
test_db = os.environ.get('DB', 'sqlite')
if test_db == 'mysql':
settings.DATABASES['default'].update({
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sentry',
'USER': 'root',
})
elif test_db == 'postgres':
settings.DATABASES['default'].update({
'ENGINE': 'sentry.db.postgres',
'USER': 'postgres',
'NAME': 'sentry',
})
elif test_db == 'sqlite':
settings.DATABASES['default'].update({
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
})
settings.TEMPLATE_DEBUG = True
settings.SENTRY_DISALLOWED_IPS = ('127.0.0.1',)
# Disable static compiling in tests
settings.STATIC_BUNDLES = {}
# override a few things with our test specifics
settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
'tests',
)
# Need a predictable key for tests that involve checking signatures
settings.SENTRY_PUBLIC = False
if not settings.SENTRY_CACHE:
settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
settings.SENTRY_CACHE_OPTIONS = {}
# This speeds up the tests considerably, pbkdf2 is by design, slow.
settings.PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
# Replace real sudo middleware with our mock sudo middleware
# to assert that the user is always in sudo mode
middleware = list(settings.MIDDLEWARE_CLASSES)
sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
settings.MIDDLEWARE_CLASSES = tuple(middleware)
settings.SENTRY_OPTIONS['system.url-prefix'] = 'http://testserver'
# enable draft features
settings.SENTRY_ENABLE_EMAIL_REPLIES = True
settings.SENTRY_ALLOW_ORIGIN = '*'
settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
settings.SENTRY_TSDB_OPTIONS = {}
settings.RECAPTCHA_PUBLIC_KEY = 'a' * 40
settings.RECAPTCHA_PRIVATE_KEY = 'b' * 40
settings.BROKER_BACKEND = 'memory'
settings.BROKER_URL = None
settings.CELERY_ALWAYS_EAGER = False
settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
settings.DISABLE_RAVEN = True
settings.CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
settings.SOUTH_TESTS_MIGRATE = bool(os.environ.get('USE_SOUTH'))
if not hasattr(settings, 'SENTRY_OPTIONS'):
settings.SENTRY_OPTIONS = {}
settings.SENTRY_OPTIONS['redis.clusters'] = {
'default': {
'hosts': {
0: {
'db': 9,
},
},
}
}
# django mail uses socket.getfqdn which doesn't play nice if our
# networking isn't stable
patcher = mock.patch('socket.getfqdn', return_value='localhost')
patcher.start()
from sentry.runner.initializer import initialize_receivers, fix_south, bind_cache_to_option_store
fix_south(settings)
bind_cache_to_option_store()
initialize_receivers()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
# force celery registration
from sentry.celery import app # NOQA
def pytest_runtest_teardown(item):
from sentry.app import tsdb
tsdb.flush()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
from celery.task.control import discard_all
discard_all()
| Python | 0 | @@ -478,22 +478,24 @@
('DB', '
-sqlite
+postgres
')%0A
|
d046bc3be27c39ca70a45d92939a2aa2444f3195 | test examples | test/examples/test_examples.py | test/examples/test_examples.py | Python | 0 | @@ -0,0 +1,1750 @@
+%22%22%22%0ARuns all example scripts. Only tests whether examples can be executed.%0A%22%22%22%0Aimport pytest%0Aimport os%0Aimport subprocess%0Aimport glob%0Aimport sys%0A%0A# set environment flag%0A# can be used in examples to reduce cpu cost%0Aos.environ%5B'THETIS_REGRESSION_TEST'%5D = %221%22%0A%0Aexclude_files = %5B%0A 'baroclinic_eddies/diagnostics.py',%0A 'baroclinic_eddies/submitRuns.py',%0A 'bottomFriction/plot_results.py',%0A 'columbia_plume/atm_forcing.py',%0A 'columbia_plume/bathymetry.py',%0A 'columbia_plume/cre-plume.py',%0A 'columbia_plume/diagnostics.py',%0A 'columbia_plume/plot_elevation_ts.py',%0A 'columbia_plume/roms_forcing.py',%0A 'columbia_plume/test_bathy_smoothing.py',%0A 'columbia_plume/tidal_forcing.py',%0A 'columbia_plume/timeseries_forcing.py',%0A 'dome/diagnostics.py',%0A 'dome/dome_setup.py',%0A 'dome/plot_histogram.py',%0A 'katophillips/plot_results.py',%0A 'lockExchange/diagnostics.py',%0A 'lockExchange/plotting.py',%0A 'lockExchange/submitRuns.py',%0A 'tidalfarm/tidalfarm.py',%0A%5D%0A%0Acwd = os.path.abspath(os.path.dirname(__file__))%0Aexamples_dir = os.path.abspath(os.path.join(cwd, '..', '..', 'examples'))%0A%0Aexclude_files = %5Bos.path.join(examples_dir, f) for f in exclude_files%5D%0A%0Aall_examples = glob.glob(os.path.join(examples_dir, '*/*.py'))%0Aall_examples = %5Bf for f in all_examples if f not in exclude_files%5D%0A%0A%[email protected](params=all_examples,%0A ids=lambda x: os.path.basename(x))%0Adef example_file(request):%0A return os.path.abspath(request.param)%0A%0A%0Adef test_examples(example_file, tmpdir, monkeypatch):%0A assert os.path.isfile(example_file), 'File not found %7B:%7D'.format(example_file)%0A # change workdir to temporary dir%0A monkeypatch.chdir(tmpdir)%0A subprocess.check_call(%5Bsys.executable, example_file%5D)%0A
|
|
b872aaa2837e7cd72c36f2b3fd7679106fda57b4 | Add test cli | tests/test_cli.py | tests/test_cli.py | Python | 0 | @@ -0,0 +1,645 @@
+import unittest%0Aimport sys, os%0Aimport cli%0Afrom io import StringIO%0Aio = StringIO()%0A%0A%0Aclass TestBuildInCommands(unittest.TestCase):%0A%0A def setUp(self):%0A pass%0A def tearDown(self):%0A pass%0A%0A def test_normal(self):%0A sys.stdout = io%0A # $ iroha-ya-cli%0A cli.main.main(%5B'iroha-ya-cli'%5D)%0A sys.stdout = sys.__stdout__%0A self.assertTrue('Iroha-mizuki-cli' in io.getvalue())%0A%0A def test_config(self):%0A sys.stdout = io%0A # $ iroha-ya-cli config%0A cli.main.main(%5B'iroha-ya-cli','config'%5D)%0A sys.stdout = sys.__stdout__%0A self.assertTrue('Iroha-mizuki-cli' in io.getvalue())
|
|
3d7bb0dfcbfda9c99ee2372394959667c76bb83f | Add first .py file to project | main.py | main.py | Python | 0 | @@ -0,0 +1,16 @@
+%0Aprint(%22Hello!%22)
|
|
59a57a25ff925bd1ce6d467d316ec478847b58ad | Create combinations.py | combinations.py | combinations.py | Python | 0 | @@ -0,0 +1,595 @@
+#!/usr/bin/env python%0A%0Afrom string import uppercase, lowercase, maketrans%0Aimport math, sys%0A%0A%0Aclass combinations():%0A%0A def combs(self, total, choice):%0A return (math.factorial(total)/(math.factorial(choice)*math.factorial(total-choice)))%0A %0A%0Aif __name__ == '__main__':%0A try:%0A total = sys.argv%5B1%5D%0A choice = sys.argv%5B2%5D%0A total = int(total, 0)%0A choice = int(choice, 0)%0A ops = combinations()%0A result = ops.combs(total, choice)%0A print result%0A except IndexError:%0A print('Usage: combinations.py %3Cint of total%3E %3Cint to choice%3E')%0A
|
|
2c63d77428b84c7d1be1c861079d39d641d51fcf | add script to scrap stock data and save them locally | stock_scraping/stock_price_scraping_to_local.py | stock_scraping/stock_price_scraping_to_local.py | Python | 0 | @@ -0,0 +1,2169 @@
+'''%0AThis script helps you scrap stock data avaliable on Bloomberg Finance%0Aand store them locally.%0A%0APlease obey applicable local and federal laws and applicable API term of use%0Awhen using this scripts. I, the creater of this script, will not be responsible%0Afor any legal issues resulting from the use of this script.%0A%0A@author Gan Tu%0A@version python 2 or python 3%0A%0A%5BHOW TO CHANGE PYTHON VERSION%5D%0A%0AThis script by default should be run by Python 2.%0ATo use this in Python 3, change the followings:%0A%0A1) change ALL occurrences of %22urllib%22 to %22urllib.request%22. %0A'''%0A%0Aimport urllib%0Aimport re%0Aimport json%0Aimport os%0A%0A%0A# Stock Symbols Initialization%0A# Feel free to modify the file source to contain stock symbols you plan to scrap fro%0Astocks = open(%22nasdaq_symbols.txt%22, %22r%22).read().split(%22%5Cn%22)%0A%0A# URL Initialization%0AurlPrefix = %22http://www.bloomberg.com/markets/api/bulk-time-series/price/%22%0AurlAffix = %22%253AUS?timeFrame=%22%0A%0A# Only four of these are valid options for now%0A# 1_Day will scrap minute by minute data for one day, while others will be daily close price%0A# Feel free to modify them for your own need%0Aoptions = %5B%221_DAY%22, %221_MONTH%22, %221_YEAR%22, %225_YEAR%22%5D%0A%0Adef setup():%0A try:%0A os.mkdir(%22data%22)%0A except Exception as e:%0A pass%0A for option in options:%0A try:%0A os.mkdir(%22data/%22 + option + %22/%22)%0A except Exception as e:%0A pass%0A%0Adef scrap():%0A i = 0%0A while i %3C len(stocks):%0A for option in options:%0A file = open(%22data/%22 + option + %22/%22 + stocks%5Bi%5D + %22.txt%22, %22w%22)%0A file.close()%0A htmltext = urllib.urlopen(urlPrefix + stocks%5Bi%5D + urlAffix + option)%0A try:%0A data = json.load(htmltext)%5B0%5D%5B%22price%22%5D%0A key = %22date%22%0A if option == %221_DAY%22:%0A key = %22dateTime%22%0A file = open(%22data/%22 + option + %22/%22 + stocks%5Bi%5D + %22.txt%22, %22a%22)%0A for price in data:%0A file.write(stocks%5Bi%5D + %22,%22 + price%5Bkey%5D + %22,%22 + str(price%5B%22value%22%5D) + %22%5Cn%22)%0A file.close()%0A except Exception as e:%0A pass%0A i += 1%0A%0A%0Aif __name__ == %22__main__%22:%0A setup()%0A scrap()%0A%0A
|
|
ba06683866ce8e4e3bccd4acebd6ec2278acfeaa | Add Litecoin testnet, and Litecoin BIP32 prefixes. | pycoin/networks.py | pycoin/networks.py | from collections import namedtuple
from .serialize import h2b
NetworkValues = namedtuple('NetworkValues',
('network_name', 'subnet_name', 'code', 'wif', 'address',
'pay_to_script', 'prv32', 'pub32'))
NETWORKS = (
NetworkValues("Bitcoin", "mainnet", "BTC", b'\x80', b'\0', b'\5', h2b("0488ADE4"), h2b("0488B21E")),
NetworkValues("Bitcoin", "testnet3", "XTN", b'\xef', b'\x6f', b'\xc4',
h2b("04358394"), h2b("043587CF")),
NetworkValues("Litecoin", "mainnet", "LTC", b'\xb0', b'\x30', None, None, None),
NetworkValues("Dogecoin", "mainnet", "DOGE", b'\x9e', b'\x1e', b'\x16',
h2b("02fda4e8"), h2b("02fda923")),
# BlackCoin: unsure about bip32 prefixes; assuming will use Bitcoin's
NetworkValues("Blackcoin", "mainnet", "BLK", b'\x99', b'\x19', None, h2b("0488ADE4"), h2b("0488B21E")),
)
# Map from short code to details about that network.
NETWORK_NAME_LOOKUP = dict((i.code, i) for i in NETWORKS)
# All network names, return in same order as list above: for UI purposes.
NETWORK_NAMES = [i.code for i in NETWORKS]
DEFAULT_NETCODES = NETWORK_NAMES
def _lookup(netcode, property):
# Lookup a specific value needed for a specific network
network = NETWORK_NAME_LOOKUP.get(netcode)
if network:
return getattr(network, property)
return None
def network_name_for_netcode(netcode):
return _lookup(netcode, "network_name")
def subnet_name_for_netcode(netcode):
return _lookup(netcode, "subnet_name")
def full_network_name_for_netcode(netcode):
network = NETWORK_NAME_LOOKUP[netcode]
if network:
return "%s %s" % (network.network_name, network.subnet_name)
def wif_prefix_for_netcode(netcode):
return _lookup(netcode, "wif")
def address_prefix_for_netcode(netcode):
return _lookup(netcode, "address")
def pay_to_script_prefix_for_netcode(netcode):
return _lookup(netcode, "pay_to_script")
def prv32_prefix_for_netcode(netcode):
return _lookup(netcode, "prv32")
def pub32_prefix_for_netcode(netcode):
return _lookup(netcode, "pub32")
| Python | 0 | @@ -569,26 +569,155 @@
, None,
-None, None
+h2b('019d9cfe'), h2b('019da462')),%0A NetworkValues(%22Litecoin%22, %22testnet%22, %22XLT%22, b'%5Cxb1', b'%5Cx31', None, h2b('0436ef7d'), h2b('0436f6e1')
),%0A N
@@ -1025,16 +1025,17 @@
%22)),%0A)%0A%0A
+%0A
# Map fr
|
1de668219f618a0632fac80fd892a0a229b8fa05 | Solve Code Fights addition without carrying problem | CodeFights/additionWithoutCarrying.py | CodeFights/additionWithoutCarrying.py | Python | 0.000002 | @@ -0,0 +1,1013 @@
+#!/usr/local/bin/python%0A# Code Fights Addition Without Carrying Problem%0A%0A%0Adef additionWithoutCarrying(param1, param2):%0A s1, s2 = str(param1), str(param2)%0A shorter = s1 if len(s1) %3C len(s2) else s2%0A longer = s2 if shorter == s1 else s1%0A if len(shorter) %3C len(longer):%0A shorter = shorter.zfill(len(longer))%0A return int(''.join(%5Bstr(int(a) + int(b))%5B-1%5D for (a, b) in%0A zip(shorter, longer)%5D))%0A%0A%0Adef main():%0A tests = %5B%0A %5B456, 1734, 1180%5D,%0A %5B99999, 0, 99999%5D,%0A %5B999, 999, 888%5D,%0A %5B0, 0, 0%5D,%0A %5B54321, 54321, 8642%5D%0A %5D%0A%0A for t in tests:%0A res = additionWithoutCarrying(t%5B0%5D, t%5B1%5D)%0A ans = t%5B2%5D%0A if ans == res:%0A print(%22PASSED: additionWithoutCarrying(%7B%7D, %7B%7D) returned %7B%7D%22%0A .format(t%5B0%5D, t%5B1%5D, res))%0A else:%0A print(%22FAILED: additionWithoutCarrying(%7B%7D, %7B%7D) returned %7B%7D,%22%0A %22answer: %7B%7D%22.format(t%5B0%5D, t%5B1%5D, res, ans))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
2620ca08397258ec207a52fe186d28d4ce7e9bd4 | fix for Python2.6 | python/qisys/ui.py | python/qisys/ui.py | ## Copyright (c) 2012 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
# Colorized output to console code inspired by
# pygments (http://pygments.org/) BSD License.
""" Tools for a nice user interface
"""
import sys
import os
import datetime
import functools
# Try using pyreadline so that we can
# have colors on windows, too.
_console = None
HAS_PYREADLINE = True
if os.name == 'nt':
try:
# pylint: disable-msg=F0401
from pyreadline.console import Console
_console = Console()
except ImportError:
HAS_PYREADLINE = False
# ANSI color codes, as classes,
# so that we can use ::
#
# qisys.ui.msg(qisys.ui.bold, "This is bold", qisys.ui.reset)
class _Color:
def __init__(self, code, modifier=None):
self.code = '\033[%d' % code
if modifier is not None:
self.code += ';%dm' % modifier
else:
self.code += 'm'
reset = _Color(0)
bold = _Color(1)
faint = _Color(2)
standout = _Color(3)
underline = _Color(4)
blink = _Color(5)
overline = _Color(6)
black = _Color(30)
darkred = _Color(31)
darkgreen = _Color(32)
brown = _Color(33)
darkblue = _Color(34)
purple = _Color(35)
teal = _Color(36)
lightgray = _Color(37)
darkgray = _Color(30, 1)
red = _Color(31, 1)
green = _Color(32, 1)
yellow = _Color(33, 1)
blue = _Color(34, 1)
fuchsia = _Color(35, 1)
turquoise = _Color(36, 1)
white = _Color(37, 1)
darkteal = turquoise
darkyellow = brown
fuscia = fuchsia
# Global variable to store qisys.ui configuration
# Useful settings when running qibuild on a buildfarm:
# CONFIG['timestamps'] = True
# CONFIG['interative'] = False
CONFIG = {
"verbose" : False,
"quiet" : False,
"color" : True,
"timestamp" : False,
"interactive" : True,
}
def _msg(*tokens, **kwargs):
""" Helper method for error, warning, info, debug
"""
fp = kwargs.get("fp", sys.stdout)
sep = kwargs.get("sep", " ")
end = kwargs.get("end", "\n")
with_color = CONFIG["color"]
if os.name == 'nt' and not HAS_PYREADLINE or not fp.isatty():
with_color = False
if CONFIG["timestamp"]:
now = datetime.datetime.now()
res = now.strftime("[%Y-%m-%d %H:%M:%S] ")
else:
res = ""
for i, token in enumerate(tokens):
if not token:
continue
if isinstance(token, _Color):
if with_color:
res += token.code
else:
if sep == " " and token == "\n":
res += "\n"
else:
res += str(token)
res += sep
# always reset:
if with_color:
res += reset.code
res += end
if _console and with_color:
_console.write_color(res)
else:
fp.write(res)
fp.flush()
def error(*tokens, **kwargs):
""" Print an error message """
tokens = [bold, red, "[ERROR]: "] + list(tokens)
kwargs["fp"] = sys.stderr
_msg(*tokens, **kwargs)
def warning(*tokens, **kwargs):
""" Print a warning message """
tokens = [brown, "[WARN ]: "] + list(tokens)
kwargs["fp"] = sys.stderr
_msg(*tokens, **kwargs)
def info(*tokens, **kwargs):
""" Print an informative message """
if CONFIG["quiet"]:
return
_msg(*tokens, **kwargs)
def debug(*tokens, **kwargs):
""" Print a debug message """
if not CONFIG["verbose"]:
return
tokens = [blue, "[DEBUG]: "] + list(tokens)
_msg(*tokens, **kwargs)
class timer:
""" To be used as a decorator,
or as a with statement:
>>> @timer("something")
def do_something():
foo()
bar()
# Or:
>>> with timer("something")
foo()
bar()
This will print:
'something took 2h 33m 42s'
"""
def __init__(self, description):
self.description = description
self.start_time = None
self.stop_time = None
self.elapsed_time = None
def __call__(self, func, *args, **kwargs):
@functools.wraps(func)
def res(*args, **kwargs):
self.start()
func(*args, **kwargs)
self.stop()
return res
def __enter__(self):
self.start()
return self
def __exit__(self, *unused):
self.stop()
def start(self):
""" Start the timer """
self.start_time = datetime.datetime.now()
def stop(self):
""" Stop the timer and emit a nice log """
end_time = datetime.datetime.now()
elapsed_time = end_time - self.start_time
elapsed_seconds = elapsed_time.total_seconds()
hours, remainder = divmod(int(elapsed_seconds), 3600)
minutes, seconds = divmod(remainder, 60)
as_str = "%sh %sm %ss" % (hours, minutes, seconds)
if CONFIG['timestamp']:
info("%s took %s" % (self.description, as_str))
| Python | 0 | @@ -4738,23 +4738,15 @@
ime.
-total_
seconds
-()
%0A
|
ed5c27623711a7f3b798aed9c0f7cdbdcebc0dcd | test python interpreter | test/test_interpreter_layer.py | test/test_interpreter_layer.py | Python | 0.00001 | @@ -0,0 +1,422 @@
+# This code is so you can run the samples without installing the package%0Aimport sys%0Aimport os%0Asys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))%0A#%0A%0A%0Aimport cocos%0Afrom cocos.director import director%0Aimport pyglet%0A %0A%0Aif __name__ == %22__main__%22:%0A director.init()%0A interpreter_layer = cocos.layer.InterpreterLayer()%0A main_scene = cocos.scene.Scene(interpreter_layer)%0A director.run(main_scene)%0A
|
|
7f4bd900d1e647fe017ce4c01e279dd41a71a349 | Add management command to set SoftwareSecure verification status. | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | lms/djangoapps/verify_student/management/commands/set_software_secure_status.py | Python | 0 | @@ -0,0 +1,2193 @@
+%22%22%22%0AManually set Software Secure verification status.%0A%22%22%22%0A%0Aimport sys%0A%0Afrom django.core.management.base import BaseCommand%0Afrom verify_student.models import (%0A SoftwareSecurePhotoVerification, VerificationCheckpoint, VerificationStatus%0A)%0A%0A%0Aclass Command(BaseCommand):%0A %22%22%22%0A Command to trigger the actions that would normally follow Software Secure%0A returning with the results of a photo verification.%0A %22%22%22%0A%0A args = %22%3C%7Bapproved, denied%7D, SoftwareSecurePhotoVerification id, %5Breason_for_denial%5D%3E%22%0A%0A def handle(self, *args, **kwargs): # pylint: disable=unused-argument%0A from verify_student.views import _set_user_requirement_status%0A%0A status_to_set = args%5B0%5D%0A receipt_id = args%5B1%5D%0A%0A try:%0A attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)%0A except SoftwareSecurePhotoVerification.DoesNotExist:%0A self.stderr.write(%0A 'SoftwareSecurePhotoVerification with id %7Bid%7D could not be found.%5Cn'.format(id=receipt_id)%0A )%0A sys.exit(1)%0A%0A if status_to_set == 'approved':%0A self.stdout.write('Approving verification for %7Bid%7D.%5Cn'.format(id=receipt_id))%0A attempt.approve()%0A _set_user_requirement_status(attempt, 'reverification', 'satisfied')%0A%0A elif status_to_set == 'denied':%0A self.stdout.write('Denying verification for %7Bid%7D.%5Cn'.format(id=receipt_id))%0A if len(args) %3E= 3:%0A reason_for_denial = args%5B2%5D%0A else:%0A reason_for_denial = 'Denied via management command.'%0A attempt.deny(reason_for_denial)%0A _set_user_requirement_status(attempt, 'reverification', 'failed', reason_for_denial)%0A%0A else:%0A self.stdout.write('Cannot set id %7Bid%7D to unrecognized status %7Bstatus%7D'.format(%0A id=receipt_id, status=status_to_set%0A ))%0A sys.exit(1)%0A%0A checkpoints = VerificationCheckpoint.objects.filter(photo_verification=attempt).all()%0A VerificationStatus.add_status_from_checkpoints(%0A checkpoints=checkpoints,%0A user=attempt.user,%0A status=status_to_set%0A )%0A
|
|
4dd66150c922e1c700fad74727955ef72c045f37 | Add Find Command MCEdit filter | minecraft/FindCommand.py | minecraft/FindCommand.py | Python | 0 | @@ -0,0 +1,738 @@
+# MCEdit filter%0A%0Afrom albow import alert%0A%0AdisplayName = %22Find Command%22%0A%0Ainputs = (%0A (%22Command:%22, (%22string%22, %22value=%22)),%0A)%0A%0Adef perform(level, box, options):%0A command = options%5B%22Command:%22%5D%0A n = 0%0A result = %22%22%0A for (chunk, slices, point) in level.getChunkSlices(box):%0A for e in chunk.TileEntities:%0A x = e%5B%22x%22%5D.value%0A y = e%5B%22y%22%5D.value%0A z = e%5B%22z%22%5D.value%0A if (x, y, z) in box:%0A t = e%5B%22id%22%5D.value%0A if t == %22Control%22:%0A c = e%5B%22Command%22%5D.value%0A if c.find(command) %3E= 0:%0A n += 1%0A result += %22(%25d, %25d, %25d) %25s%5Cn%22 %25 (x, y, z, c)%0A result += %22(%25d)%22 %25 n%0A alert(result)%0A
|
|
5e4ef4737c78b6154596ab8c76c4e60bd840453c | Add component.navbar | src/penn_chime_dash/app/components/navbar.py | src/penn_chime_dash/app/components/navbar.py | Python | 0 | @@ -0,0 +1,1274 @@
+# components/navbar.py%0Aimport dash_bootstrap_components as dbc%0Aimport dash_html_components as html%0Aimport dash_core_components as dcc%0Afrom ..config import Config%0A%0Acfg = Config()%0A%0Anavbar = dbc.NavbarSimple(%0A brand='Penn Med CHIME', # Browser window title%0A brand_href='/', # index page%0A children=%5B%0A%0A html.Link(%0A key='penn-med-header',%0A rel=%22stylesheet%22,%0A href=cfg.PENN_HEADER,%0A ),%0A%0A dbc.NavItem(%0A dcc.Link(%0A 'Model',%0A href='/CHIME',%0A className='nav-link'%0A )%0A ),%0A%0A dbc.NavItem(%0A dcc.Link(%0A 'Contribute',%0A href='https://codeforphilly.github.io/chime/',%0A className='nav-link'%0A )%0A ),%0A%0A dbc.NavItem(%0A dcc.Link(%0A 'Resources',%0A href='/resources',%0A className='nav-link'%0A )%0A ),%0A%0A dbc.NavItem(%0A dcc.Link(%0A 'Contact',%0A href=cfg.PENN_MED_URL,%0A className='nav-link'%0A )%0A ),%0A %5D,%0A%0A sticky='top',%0A color='primary',%0A light=True,%0A dark=False%0A)%0A
|
|
eea33e6207da7446e1713eb4d78b76d37ae5eaf2 | Add sample of scheduler using celery | with_celery.py | with_celery.py | Python | 0 | @@ -0,0 +1,285 @@
+from celery import Celery%0A%0A# The host in which RabbitMQ is running%0AHOST = 'amqp://guest@localhost'%0A%0Aapp = Celery('pages_celery', broker=HOST)%0A%[email protected]%0Adef work(msg):%0A print msg%0A%0A# To execute the task:%0A#%0A# $ python%0A# %3E%3E%3E from with_celery import work%0A# %3E%3E%3E work.delay('Hi there!!')%0A
|
|
7ca1f6c5d51f5e2fc582603012c3ca5a053ee4eb | Add BLT package (#19410) | var/spack/repos/builtin/packages/blt/package.py | var/spack/repos/builtin/packages/blt/package.py | Python | 0 | @@ -0,0 +1,1262 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Blt(Package):%0A %22%22%22BLT is a streamlined CMake-based foundation for Building, Linking and%0A Testing large-scale high performance computing (HPC) applications.%22%22%22%0A%0A homepage = %22https://github.com/LLNL/blt%22%0A url = %22https://github.com/LLNL/blt/archive/v0.3.6.tar.gz%22%0A git = %22https://github.com/LLNL/blt.git%22%0A%0A maintainers = %5B'davidbeckingsale'%5D%0A%0A version('develop', branch='develop')%0A version('main', branch='main')%0A version('0.3.6', sha256='6276317c29e7ff8524fbea47d9288ddb40ac06e9f9da5e878bf9011e2c99bf71')%0A version('0.3.5', sha256='68a1c224bb9203461ae6f5ab0ff3c50b4a58dcce6c2d2799489a1811f425fb84')%0A version('0.3.0', sha256='bb917a67cb7335d6721c997ba9c5dca70506006d7bba5e0e50033dd0836481a5')%0A version('0.2.5', sha256='3a000f60194e47b3e5623cc528cbcaf88f7fea4d9620b3c7446ff6658dc582a5')%0A version('0.2.0', sha256='c0cadf1269c2feb189e398a356e3c49170bc832df95e5564e32bdbb1eb0fa1b3')%0A%0A depends_on('cmake', type='run')%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix)%0A
|
|
4537ab84bb87eeae6b6865b7b9140d5324384e4a | add test cases for address operations | test/test-rpc/TestCase/Account/test_address.py | test/test-rpc/TestCase/Account/test_address.py | Python | 0 | @@ -0,0 +1,1337 @@
+import random%0Afrom TestCase.MVSTestCase import *%0A%0Aclass TestAccount(MVSTestCaseBase):%0A roles = (Alice,)%0A need_mine = False%0A%0A def test_0_new_address(self):%0A #password error%0A ec, message = mvs_rpc.new_address(Alice.name, Alice.password+'1')%0A self.assertEqual(ec, 1000, message)%0A%0A #check address_count%0A ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0)%0A self.assertEqual(ec, 4004, message)%0A%0A ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 0x00100000)%0A self.assertEqual(ec, 4004, message)%0A%0A ec, message = mvs_rpc.new_address(Alice.name, Alice.password, 11)%0A self.assertEqual(ec, 0, message)%0A%0A def test_1_list_addresses(self):%0A # password error%0A ec, message = mvs_rpc.list_addresses(Alice.name, Alice.password + '1')%0A self.assertEqual(ec, 1000, message)%0A%0A ec, addresses = mvs_rpc.list_addresses(Alice.name, Alice.password)%0A self.assertEqual(ec, 0, addresses)%0A addresses.sort()%0A alice_addresses = Alice.addresslist%5B:%5D%0A alice_addresses.sort()%0A self.assertEqual(addresses, alice_addresses)%0A%0A def test_2_check_address(self):%0A for address in Alice.addresslist:%0A ec, message = mvs_rpc.check_address(address)%0A self.assertEqual(ec, 0, message)
|
|
83cdd840979dc452f444914a0c40d077e6917c38 | Add DB connector class. | DBConnection.py | DBConnection.py | Python | 0 | @@ -0,0 +1,21 @@
+__author__ = 'David'%0A
|
|
4ff0e6a4d190d8c1f60903d18dcdaac1edeace8a | Create test.py | test.py | test.py | Python | 0.000005 | @@ -0,0 +1,294 @@
+import unittest%0Afrom mock import patch%0Aimport RedDefineBot%0A%0Aclass TestBot(unittest.TestCase):%0A def test_auth_called(self,mock):%0A self.assertTrue(mock.called)%0A def test_auth_notcalled(self,mock):%0A self.assertFalse(mock.called)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
00b04f773b9e2018b08776c5d53ff3dad7ed00d1 | Create test.py | test.py | test.py | Python | 0.000005 | @@ -0,0 +1,36 @@
+%22%22%22test.py%0A%22%22%22%0A%0Aprint %22Hello world%22%0A
|
|
b4b2b80cb1d0c0729e8e98085c2cfc3bc55ddda3 | Solve the Longest Lines challenge using Python3 | LongestLines.py | LongestLines.py | Python | 0.999972 | @@ -0,0 +1,579 @@
+# Longest Lines%0A#%0A# https://www.codeeval.com/open_challenges/2/%0A#%0A# Challenge Description: Write a program which reads a file and prints to %0A# stdout the specified number of the longest lines that are sorted based on %0A# their length in descending order.%0Aimport sys%0A%0A%0Ainput_file = sys.argv%5B1%5D%0Awith open(input_file, 'r') as test_cases:%0A number_of_lines_to_print = int(test_cases.readline().rstrip())%0A lines = %5B%5D%0A for case in test_cases:%0A lines.append(case.rstrip())%0A %0Alines.sort(key=len, reverse=True)%0Afor l in lines%5B:number_of_lines_to_print%5D:%0A print(l)%0A
|
|
37e674f05547c7b6b93f447477443644865975d1 | Bring back the Root URL config | urls.py | urls.py | Python | 0 | @@ -0,0 +1,909 @@
+__author__ = 'ankesh'%0Afrom django.conf.urls import patterns, include, url%0Afrom django.http import HttpResponseRedirect%0A%0A# Uncomment the next two lines to enable the admin:%0Afrom django.contrib import admin%0Aadmin.autodiscover()%0A%0Aurlpatterns = patterns('',%0A # Examples:%0A # url(r'%5E$', 'upload.views.home', name='home'),%0A%0A url(r'%5E$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),%0A url(r'%5Eupload/', include('fileupload.urls')),%0A url(r'%5Eplots/', include('plots.urls')),%0A%0A # Uncomment the admin/doc line below to enable admin documentation:%0A # url(r'%5Eadmin/doc/', include('django.contrib.admindocs.urls')),%0A%0A # Uncomment the next line to enable the admin:%0A url(r'%5Eadmin/', include(admin.site.urls)),%0A)%0A%0Aimport os%0Aurlpatterns += patterns('',%0A (r'%5Emedia/(.*)$', 'django.views.static.serve', %7B'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')%7D),%0A)
|
|
b733f433d797b302c46cb71cf0230b986f630d26 | Create w3_1.py | w3_1.py | w3_1.py | Python | 0.000482 | @@ -0,0 +1,15 @@
+print(%22%E4%BD%A0%E6%95%99%E5%BE%97%E7%9C%9F%E5%A5%BD%22)%0A
|
|
d23461fb7b81f70c919fb028eb22009deaae13da | Generate posterior information | main.py | main.py | import math
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
def generate_sample(mean, cov_matrix):
'''generate_sample: Generate sample function output from a mean and covariance matrix.'''
cholesky_decomp = tf.cholesky(cov_matrix)
cov_shape = tf.shape(cov_matrix)
result_shape = [cov_shape[0], 1]
uniform_gaussian_distribution = tf.random_normal(result_shape, mean=0.0, stddev=1.0, \
dtype=tf.float64)
return mean + tf.matmul(cholesky_decomp, uniform_gaussian_distribution)
if __name__ == "__main__":
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 10)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
mean_est = 0.0
length_scale = 1.5
# Use squared exponential covariance matrix
x_rows, x_cols = tf.meshgrid(x_data, x_data)
# Covariance defined as $exp(-0.5*(x_i-x_j)^2/l^2)$ where l is the length-scale
covariance_est = tf.exp(tf.scalar_mul(-0.5, \
tf.squared_difference(x_cols, x_rows)/length_scale))
sess = tf.Session()
# print prior samples
num_samples = 0
while num_samples < 5:
prior_sample = sess.run(generate_sample(mean_est, covariance_est))
plt.plot(x_data, prior_sample)
plt.title('Prior Samples')
num_samples = num_samples + 1
plt.show()
| Python | 1 | @@ -529,16 +529,1238 @@
ution)%0A%0A
+def solve_posterior(x_data, y_data, cov_matrix, sigma, test_data):%0A '''solve_posterior: Generate the mean, variance and log marginal likelihood from%0A sample data.'''%0A cholesky_decomp = tf.cholesky(cov_matrix + math.pow(sigma, 2)*tf.eye(tf.shape(cov_matrix)%5B0%5D, dtype=tf.float64))%0A alpha = tf.cholesky_solve(cholesky_decomp, y_data)%0A star_X_rows, star_X_cols = tf.meshgrid(x_data, test_data)%0A K_star_X = tf.exp(tf.scalar_mul(-0.5, %0A tf.squared_difference(star_X_cols, star_X_rows)/length_scale))%0A mean = tf.matmul(K_star_X, alpha)%0A star_rows, star_cols = tf.meshgrid(test_data, test_data)%0A K_star_star = tf.exp(tf.scalar_mul(-0.5,%0A tf.squared_difference(star_cols, star_rows)/length_scale))%0A X_star_rows, X_star_cols = tf.meshgrid(test_data, x_data)%0A K_X_star = tf.exp(tf.scalar_mul(-0.5,%0A tf.squared_difference(X_star_cols, X_star_rows)/length_scale))%0A variance = K_star_star - tf.matmul(K_star_X, tf.cholesky_solve(cholesky_decomp, K_X_star))%0A log_marg_likelihood = -0.5*tf.transpose(y_data)*alpha %5C%0A - tf.reduce_sum(tf.log(tf.diag_part(cholesky_decomp))) %5C%0A - (x_data.size / 2) * math.log(math.pi)%0A return mean, variance, log_marg_likelihood%0A%0A
if __nam
@@ -1928,58 +1928,8 @@
e)%0A%0A
- plt.plot(x_data, y_data, 'o')%0A plt.show()%0A%0A
@@ -2018,57 +2018,8 @@
rix%0A
- x_rows, x_cols = tf.meshgrid(x_data, x_data)%0A
@@ -2098,16 +2098,65 @@
h-scale%0A
+ x_rows, x_cols = tf.meshgrid(x_data, x_data)%0A
cova
@@ -2565,10 +2565,522 @@
show()%0A%0A
+ x_test = np.linspace(-math.pi, math.pi, 100)%0A%0A mean, variance, log_marg_likelihood = sess.run(solve_posterior(x_data, %0A tf.reshape(y_data, %5By_data.size, 1%5D), covariance_est, 0.1, x_test))%0A mean = mean.flatten()%0A%0A variance_diag = np.diagonal(variance)%0A%0A mean_plus_variance = mean + variance_diag%0A mean_minus_variance = mean - variance_diag%0A%0A plt.plot(x_data, y_data, 'o')%0A plt.plot(x_test, mean)%0A plt.fill_between(x_test, mean_minus_variance, mean_plus_variance)%0A plt.show()%0A
%0A%0A
|
73afce309f0e73b441c0ade49849397cba0fb0c2 | update spec runner to work with invoke's boolean flags to run specs untranslated | tasks/specs.py | tasks/specs.py | from invoke import task, run as run_
from .base import BaseTest
class Rubyspecs(BaseTest):
def __init__(self, files, options, translated=True):
super(Rubyspecs, self).__init__()
self.exe = "`pwd`/bin/%s" % ("topaz" if translated else "topaz_untranslated.py")
self.files = files
self.options = options
self.download_mspec()
self.download_rubyspec()
def mspec(self, args):
run_("../mspec/bin/mspec %s -t %s --config=topaz.mspec %s" % (args, self.exe, self.files))
def run(self):
self.mspec("run -G fails %s" % self.options)
def tag(self):
self.mspec("tag --add fails -G fails -f spec %s" % self.options)
def untag(self):
self.mspec("tag --del fails -g fails -f spec %s" % self.options)
def generate_spectask(taskname):
def spectask(files="", options="", translated=True):
runner = Rubyspecs(files, options, translated=(translated != "False"))
getattr(runner, taskname)()
spectask.__name__ = taskname
return task(spectask)
run = generate_spectask("run")
tag = generate_spectask("tag")
untag = generate_spectask("untag")
| Python | 0 | @@ -118,32 +118,34 @@
files, options,
+un
translated=True)
@@ -131,35 +131,36 @@
s, untranslated=
-Tru
+Fals
e):%0A supe
@@ -236,13 +236,31 @@
opaz
+_untranslated.py
%22 if
+un
tran
@@ -277,32 +277,16 @@
e %22topaz
-_untranslated.py
%22)%0A
@@ -863,16 +863,18 @@
ons=%22%22,
+un
translat
@@ -880,11 +880,12 @@
ted=
-Tru
+Fals
e):%0A
@@ -927,16 +927,18 @@
ptions,
+un
translat
@@ -940,17 +940,18 @@
nslated=
-(
+un
translat
@@ -956,20 +956,8 @@
ated
- != %22False%22)
)%0A
|
90399f50a3f50d9193ae1e6b2042215fb388230f | Create Video Stream program for webcam | VideoStream.py | VideoStream.py | Python | 0 | @@ -0,0 +1,428 @@
+import cv2%0D%0Aimport numpy as np%0D%0A%0D%0Acap = cv2.VideoCapture(0)%0D%0A%0D%0Aprint('Beginning Capture Device opening...%5Cn')%0D%0Aprint('Capture device opened?', cap.isOpened())%0D%0A%0D%0Awhile True:%0D%0A%0D%0A ret, frame = cap.read()%0D%0A gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)%0D%0A%0D%0A cv2.imshow('frame', gray_image)%0D%0A%0D%0A if cv2.waitKey(1) & 0xFF == ord('q'):%0D%0A break%0D%0A%0D%0A# Release the capture%0D%0Acap.release()%0D%0Acv2.destroyAllWindows()%0D%0A
|
|
1437bb868844731d3fdb13c6dd52dfd706df6f63 | Add a new script to clean up a habitica user given user email | bin/ext_service/clean_habitica_user.py | bin/ext_service/clean_habitica_user.py | Python | 0 | @@ -0,0 +1,942 @@
+import argparse%0Aimport sys%0Aimport logging%0A%0Aimport emission.core.get_database as edb%0Aimport emission.net.ext_service.habitica.proxy as proxy%0A%0Aif __name__ == '__main__':%0A logging.basicConfig(level=logging.DEBUG)%0A%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22user_email%22,%0A help=%22the email address of the user whose habitica account you want to clean up%22)%0A%0A args = parser.parse_args()%0A del_uuid = edb.get_uuid_db().find_one(%7B'user_email': args.user_email%7D)%5B'uuid'%5D%0A logging.debug(%22Found uuid %25s%22 %25 del_uuid)%0A del_habitica_creds = edb.get_habitica_db().find_one(%7B'user_id': del_uuid%7D)%0A logging.debug(%22del_habitica_creds = %25s%22 %25 del_habitica_creds)%0A del_result = proxy.habiticaProxy(del_uuid, %22DELETE%22,%0A %22/api/v3/user%22,%0A %7B'password': del_habitica_creds%5B'habitica_password'%5D%7D)%0A logging.debug(%22delete result = %25s%22 %25 del_result)%0A
|
|
a9dd25c825bacd03ae358cc153c94ce3960ec0cf | Add serializers | chipy_org/apps/meetings/serializers.py | chipy_org/apps/meetings/serializers.py | Python | 0.000005 | @@ -0,0 +1,814 @@
+from rest_framework import serializers%0A%0Afrom .models import Meeting, Topic, Presentor%0A%0A%0Aclass PresentorSerializer(serializers.ModelSerializer):%0A class Meta:%0A model = Presentor%0A fields = ('name', 'release')%0A%0A%0Aclass TopicSerializer(serializers.ModelSerializer):%0A presentor = PresentorSerializer()%0A%0A class Meta:%0A model = Topic%0A fields = (%0A 'title',%0A 'presentor',%0A 'length',%0A 'description',%0A 'embed_video',%0A 'slides_link',%0A 'start_time',%0A 'approved'%0A )%0A depth = 1%0A%0A%0Aclass MeetingSerializer(serializers.ModelSerializer):%0A topics = TopicSerializer(many=True)%0A%0A class Meta:%0A model = Meeting%0A fields = ('when', 'where', 'live_stream', 'topics')%0A depth = 2%0A
|
|
55185a7a7402c9d0ce2677b00a329aa4197556c3 | add mediator | Mediator.py | Mediator.py | Python | 0.001741 | @@ -0,0 +1,1822 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AMediator pattern%0A%22%22%22%0A%0A%0Aclass AbstractColleague(object):%0A %22%22%22%0A AbstractColleague%0A %22%22%22%0A%0A def __init__(self, mediator):%0A self.mediator = mediator%0A%0A%0Aclass ConcreteColleague(AbstractColleague):%0A %22%22%22%0A ConcreteColleague%0A %22%22%22%0A def __init__(self, name, mediator):%0A self.name = name%0A AbstractColleague.__init__(self, mediator)%0A%0A def send(self, message, receiver=None):%0A self.mediator.send(message, self, receiver)%0A%0A @staticmethod%0A def notify(name, message, sender):%0A print u'From: %7B%7D To: %7B%7D -- %7B%7D'.format(name, sender.name, message)%0A%0A%0Aclass AbstractMediator(object):%0A %22%22%22%0A AbstractMediator%0A %22%22%22%0A def send(self, message, colleague):%0A pass%0A%0A%0Aclass ConcreteMediator(AbstractMediator):%0A%0A def __init__(self, name):%0A self.name = name%0A%0A self.colleagues = %5B%5D%0A%0A def register(self, colleague):%0A self.colleagues.append(colleague)%0A%0A def send(self, message, colleague, receiver=None):%0A if receiver:%0A receiver.notify(colleague.name, message, receiver)%0A else:%0A for _ in self.colleagues:%0A if _ != colleague:%0A _.notify(colleague.name, message, _)%0A%0A%0Aif __name__ == '__main__':%0A mediator = ConcreteMediator(u'UN')%0A%0A USA = ConcreteColleague(u'USA', mediator)%0A mediator.register(USA)%0A Japan = ConcreteColleague(u'Japan', mediator)%0A mediator.register(Japan)%0A Iraq = ConcreteColleague(u'Iraq', mediator)%0A mediator.register(Iraq)%0A UK = ConcreteColleague(u'UK', mediator)%0A mediator.register(UK)%0A%0A USA.send(u%22I'm the boss, bitch%EF%BC%81%22)%0A Japan.send(u'Emm...', receiver=USA)%0A Iraq.send(u'A ha!', receiver=USA)%0A UK.send(u%22Reversed?%22)%0A UK.send(u%22My litter brother send that, boss...Trust me!%22, receiver=USA)%0A
|
|
290f990e31a5f732fb054846caea9346946778df | enable import as module | __init__.py | __init__.py | Python | 0.000001 | @@ -0,0 +1,218 @@
+%22%22%22%0A.. module:: lmtscripts%0A :platform: Unix%0A :synopsis: useful scripts for EHT observations at LMT%0A%0A.. moduleauthor:: Lindy Blackburn %[email protected]%3E%0A.. moduleauthor:: Katie Bouman %[email protected]%3E%0A%0A%22%22%22%0A
|
|
19a4c4364d1629cd6bfd7ca27ae4e6441f13747e | Make mygmm a module | __init__.py | __init__.py | Python | 0.000018 | @@ -0,0 +1,26 @@
+from .mygmm.mygmm import *
|
|
a7f4d96becfd1a58794a4dbedb9e9c8f6ac8c1a6 | Create acceptor.py | acceptor.py | acceptor.py | Python | 0.000001 | @@ -0,0 +1,818 @@
+#! /usr/bin/env python%0A%0Aimport message%0A%0Aimport logging%0A%0Aclass Acceptor(message.MessageListener):%0A def __init__(self, config, network):%0A message.MessageListener.__init__(self, %0A name = 'AcceptorListenser',%0A mapping = %7B %0A message.MSG_PROPOSAL_REQ : self.on_proposal_request,%0A message.MSG_ACCEPT_REQ : self.on_accept_request%0A %7D) %0A%0A self.network = network%0A self.config = config%0A self.promised_id = 0 %0A self.accepted_id = 0 %0A self.accepted_values= %5B%5D%0A%0A def on_proposal_request(self, pkg, msg):%0A logging.debug('process proposal request') %0A return False %0A %0A def on_accept_request(self, pkg, msg):%0A logging.debug('process accept request') %0A return False%0A
|
|
5bb387947ac13bcd3949c6b17839033231c05e2d | Add unittests for cupy.testing.array | tests/cupy_tests/testing_tests/test_array.py | tests/cupy_tests/testing_tests/test_array.py | Python | 0.000001 | @@ -0,0 +1,2670 @@
+import copy%0Aimport unittest%0A%0Aimport numpy%0Aimport six%0A%0Aimport cupy%0Afrom cupy import testing%0A%0A%[email protected](%0A *testing.product(%7B%0A 'assertion': %5B'assert_allclose', 'assert_array_almost_equal',%0A 'assert_array_almost_equal_nulp',%0A 'assert_array_max_ulp', 'assert_array_equal'%5D,%0A 'array_module_x': %5Bnumpy, cupy%5D,%0A 'array_module_y': %5Bnumpy, cupy%5D%0A %7D)%0A)%[email protected]%0Aclass TestEqualityAssertion(unittest.TestCase):%0A%0A def setUp(self):%0A self.assertion = getattr(testing, self.assertion)%0A val = numpy.random.uniform(-1, 1, (2, 3))%0A self.x = self.array_module_x.array(val, val.dtype, copy=True)%0A self.y = self.array_module_y.array(val, val.dtype, copy=True)%0A%0A def test_equality(self):%0A self.assertion(self.x, self.y)%0A%0A def test_inequality(self):%0A self.y += 1%0A with self.assertRaises(AssertionError):%0A self.assertion(self.x, self.y)%0A%0A%0Adef _convert_array(xs, array_module):%0A if array_module == 'all_numpy':%0A return xs%0A elif array_module == 'all_cupy':%0A return cupy.asarray(xs)%0A else:%0A return %5Bcupy.asarray(x) if numpy.random.random_integers(0, 1)%0A else x for x in xs%5D%0A%0A%[email protected](%0A *testing.product(%7B%0A 'array_module_x': %5B'all_numpy', 'all_cupy', 'random'%5D,%0A 'array_module_y': %5B'all_numpy', 'all_cupy', 'random'%5D%0A %7D)%0A)%[email protected]%0Aclass TestListEqualityAssertion(unittest.TestCase):%0A%0A def setUp(self):%0A xs = %5Bnumpy.random.uniform(-1, 1, (2, 3)) for _ in six.moves.range(10)%5D%0A ys = copy.deepcopy(xs)%0A self.xs = _convert_array(xs, self.array_module_x)%0A self.ys = _convert_array(ys, self.array_module_y)%0A%0A def test_equality_numpy(self):%0A testing.assert_array_list_equal(self.xs, self.ys)%0A%0A def test_inequality_numpy(self):%0A self.xs%5B0%5D += 1%0A with self.assertRaises(AssertionError):%0A testing.assert_array_list_equal(self.xs, self.ys)%0A%0A%[email protected](%0A *testing.product(%7B%0A 'array_module_x': %5Bnumpy, cupy%5D,%0A 'array_module_y': %5Bnumpy, cupy%5D%0A %7D)%0A)%[email protected]%0Aclass TestLessAssertion(unittest.TestCase):%0A%0A def setUp(self):%0A val = numpy.random.uniform(-1, 1, (2, 3))%0A self.x = self.array_module_x.array(val, val.dtype, copy=True)%0A self.y = self.array_module_y.array(val + 1, val.dtype, copy=True)%0A%0A def test_equality_numpy(self):%0A testing.assert_array_less(self.x, self.y)%0A%0A def test_inequality_numpy(self):%0A self.x%5B0%5D += 100%0A with self.assertRaises(AssertionError):%0A testing.assert_array_less(self.x, self.y)%0A
|
|
dafa89d52bff2bef6768160af306ab40173e8a8e | Replace get_user_profile_by_email with example_user. | zerver/tests/test_outgoing_webhook_system.py | zerver/tests/test_outgoing_webhook_system.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import mock
from typing import Any, Union, Mapping, Callable
from zerver.lib.test_helpers import get_user_profile_by_email
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import (
get_realm_by_email_domain,
UserProfile,
Recipient,
Service,
)
from zerver.lib.outgoing_webhook import do_rest_call
from zerver.lib.actions import do_create_user
import requests
rest_operation = {'method': "POST",
'relative_url_path': "",
'request_kwargs': {},
'base_url': ""}
class ResponseMock(object):
def __init__(self, status_code, data, content):
# type: (int, Any, str) -> None
self.status_code = status_code
self.data = data
self.content = content
def request_exception_error(http_method, final_url, data, **request_kwargs):
# type: (Any, Any, Any, Any) -> Any
raise requests.exceptions.RequestException
def timeout_error(http_method, final_url, data, **request_kwargs):
# type: (Any, Any, Any, Any) -> Any
raise requests.exceptions.Timeout
class DoRestCallTests(ZulipTestCase):
@mock.patch('zerver.lib.outgoing_webhook.succeed_with_message')
def test_successful_request(self, mock_succeed_with_message):
# type: (mock.Mock) -> None
response = ResponseMock(200, {"message": "testing"}, '')
with mock.patch('requests.request', return_value=response):
do_rest_call(rest_operation, None, None)
self.assertTrue(mock_succeed_with_message.called)
@mock.patch('zerver.lib.outgoing_webhook.request_retry')
def test_retry_request(self, mock_request_retry):
# type: (mock.Mock) -> None
response = ResponseMock(500, {"message": "testing"}, '')
with mock.patch('requests.request', return_value=response):
do_rest_call(rest_operation, None, None)
self.assertTrue(mock_request_retry.called)
@mock.patch('zerver.lib.outgoing_webhook.fail_with_message')
def test_fail_request(self, mock_fail_with_message):
# type: (mock.Mock) -> None
response = ResponseMock(400, {"message": "testing"}, '')
with mock.patch('requests.request', return_value=response):
do_rest_call(rest_operation, None, None)
self.assertTrue(mock_fail_with_message.called)
@mock.patch('logging.info')
@mock.patch('requests.request', side_effect=timeout_error)
@mock.patch('zerver.lib.outgoing_webhook.request_retry')
def test_timeout_request(self, mock_request_retry, mock_requests_request, mock_logger):
# type: (mock.Mock, mock.Mock, mock.Mock) -> None
do_rest_call(rest_operation, {"command": "", "service_name": ""}, None)
self.assertTrue(mock_request_retry.called)
@mock.patch('logging.exception')
@mock.patch('requests.request', side_effect=request_exception_error)
@mock.patch('zerver.lib.outgoing_webhook.fail_with_message')
def test_request_exception(self, mock_fail_with_message, mock_requests_request, mock_logger):
# type: (mock.Mock, mock.Mock, mock.Mock) -> None
do_rest_call(rest_operation, {"command": ""}, None)
self.assertTrue(mock_fail_with_message.called)
class TestMentionMessageTrigger(ZulipTestCase):
def check_values_passed(self, queue_name, trigger_event, x):
# type: (Any, Union[Mapping[Any, Any], Any], Callable[[Any], None]) -> None
self.assertEqual(queue_name, "outgoing_webhooks")
self.assertEqual(trigger_event['user_profile_id'], self.bot_profile.id)
self.assertEqual(trigger_event['trigger'], "mention")
self.assertEqual(trigger_event["message"]["sender_email"], self.user_profile.email)
self.assertEqual(trigger_event["message"]["content"], self.content)
self.assertEqual(trigger_event["message"]["type"], Recipient._type_names[Recipient.STREAM])
self.assertEqual(trigger_event["message"]["display_recipient"], "Denmark")
@mock.patch('zerver.lib.actions.queue_json_publish')
def test_mention_message_event_flow(self, mock_queue_json_publish):
# type: (mock.Mock) -> None
self.user_profile = get_user_profile_by_email("[email protected]")
self.bot_profile = do_create_user(email="[email protected]",
password="test",
realm=get_realm_by_email_domain("zulip.com"),
full_name="FooBot",
short_name="foo-bot",
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
bot_owner=self.user_profile)
self.content = u'@**FooBot** foo bar!!!'
mock_queue_json_publish.side_effect = self.check_values_passed
# TODO: In future versions this won't be required
self.subscribe_to_stream(self.bot_profile.email, "Denmark")
self.send_message(self.user_profile.email, "Denmark", Recipient.STREAM, self.content)
self.assertTrue(mock_queue_json_publish.called)
| Python | 0 | @@ -161,70 +161,8 @@
le%0A%0A
-from zerver.lib.test_helpers import get_user_profile_by_email%0A
from
@@ -4199,52 +4199,34 @@
e =
-get_user_profile_by_email(%[email protected]
+self.example_user(%22othello
%22)%0A
|
6891c9e635cbe9ba663ac7f72bdff653bb8c8220 | make sure we can call commit | netforce_general/netforce_general/controllers/root.py | netforce_general/netforce_general/controllers/root.py | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce import config
from netforce.database import get_connection
from netforce import access
from netforce.model import get_model
class Root(Controller):
_path="/"
def get(self):
url=None
db=get_connection()
try:
if db:
res=db.get("SELECT root_url FROM settings WHERE id=1")
url=res.root_url
if url:
self.redirect(url)
return
user_id=access.get_active_user()
action=None
if user_id:
user=get_model("base.user").browse(user_id)
profile=user.profile_id
action=profile.home_action
if action:
self.redirect("/ui#name=%s"%action)
return
self.redirect("/ui#name=login")
finally:
db.commit()
Root.register()
| Python | 0 | @@ -1992,16 +1992,39 @@
inally:%0A
+ if db:%0A
|
21d7e6f83f34e66167d7452998f2c7622a90e46c | Create test_parser.py | test_parser.py | test_parser.py | Python | 0.000008 | @@ -0,0 +1,1173 @@
+import os%0Aimport csv%0Aimport json%0Aimport collections%0Afrom collections import defaultdict%0A%0Afilename = %22C:/Users/zeffi/Documents/Export_482016.csv%22%0Asome_dict = defaultdict(list)%0A%0Adef sanedate(date):%0A MM, DD, YYYY = date.split('/')%0A return '/'.join(%5BDD, MM, YYYY%5D)%0A%0Adef formatted_time(gtime):%0A HH, MM, SS = gtime.split(':')%0A return ':'.join(%5BHH, MM%5D)%0A%0Adef open_csv_test(filename):%0A #csvfile = open(filename, 'r', encoding='ISO-8859-15', newline='')%0A csvfile = open(filename, 'r', newline='')%0A ofile = csv.reader(csvfile, delimiter=',')%0A%0A # skip the first 7 lines (OneTouch uses an odd csv format)%0A for i in range(6):%0A next(ofile)%0A%0A for row in ofile:%0A try:%0A print(row)%0A date, gtime, gvalue = row%5B1:4%5D%0A date = date + '__' + sanedate(date)%0A gtime = formatted_time(gtime)%0A some_dict%5Bdate%5D.append(%7B'time': gtime, 'value': float(gvalue)%7D)%0A except:%0A print(%22failed at%22)%0A print(row)%0A%0A with open('C:/Users/zeffi/Documents/some_constructed.json', 'w') as wfile:%0A wfile.write(json.dumps(some_dict, sort_keys=True, indent=4))%0A%0A%0A%0Aopen_csv_test(filename)%0A
|
|
0c49c3dcd168e01512deb72bfbeed1438430abe4 | remove duplicate error messages before displaying, issue 486 | src/robotide/context/logger.py | src/robotide/context/logger.py | # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import wx
class Logger(object):
empty_suite_init_file_warn = re.compile("Test suite directory initialization "
"file '.*' contains no test data.")
def __init__(self):
self._messages = []
def report_parsing_errors(self):
errors = [m[0] for m in self._messages]
if errors:
dlg = ErrorMessageDialog('Parsing errors',
'\n'.join(self._format_parsing_error_line(line)
for line in errors))
dlg.ShowModal()
dlg.Destroy()
self._messages = []
def _format_parsing_error_line(self, line):
if ':' not in line:
return line
index = line.index(':') + 1
return line[:index] + '\n\t' + line[index:]
def warn(self, msg=''):
self._write(msg, 'WARN')
def error(self, msg=''):
self._write(msg, 'ERROR')
def message(self, msg):
message, level = msg.message, msg.level.upper()
if self._is_logged(level):
self._messages.append((message, level))
def _write(self, msg, level):
level = level.upper()
if self._is_logged(level) and not self._is_ignored_warning(msg):
self._show_message(msg, level)
def _is_logged(self, level):
return level.upper() in ['ERROR', 'WARN']
def _is_ignored_warning(self, msg):
return self.empty_suite_init_file_warn.search(msg)
def _show_message(self, msg, level):
try:
icon = level == 'ERROR' and wx.ICON_ERROR or wx.ICON_WARNING
wx.MessageBox(msg, level, icon)
except wx.PyNoAppError:
sys.stderr.write('%s: %s\n' % (level, msg))
class ErrorMessageDialog(wx.Dialog):
def __init__(self, title, message):
wx.Dialog.__init__(self, None, size=(700, 400), title=title,
style=wx.DEFAULT_FRAME_STYLE)
area = wx.TextCtrl(self, size=(700,400), style=wx.TE_MULTILINE|wx.TE_DONTWRAP|wx.TE_READONLY)
area.SetValue(message)
| Python | 0 | @@ -975,16 +975,180 @@
errors:%0A
+ # Warnings from robot.variables.Variables.set_from_variable_table%0A # are present multiple times, issue 486.%0A errors = set(errors)%0A
|
fabf4e8bd93155101d459716b35c10b32a3dfd16 | add tests/utils.py | tests/utils.py | tests/utils.py | Python | 0.000001 | @@ -0,0 +1,1613 @@
+import sys%0D%0Aimport yappi%0D%0Aimport unittest%0D%0A%0D%0Aclass YappiUnitTestCase(unittest.TestCase):%0D%0A def setUp(self):%0D%0A if yappi.is_running():%0D%0A yappi.stop()%0D%0A yappi.clear_stats()%0D%0A yappi.set_clock_type('cpu') # reset to default clock type%0D%0A%0D%0A def tearDown(self):%0D%0A fstats = yappi.get_func_stats()%0D%0A if not fstats._debug_check_sanity():%0D%0A sys.stdout.write(%22ERR: Duplicates found in Func stats%5Cr%5Cn%22)%0D%0A %0D%0A fstats.debug_print()%0D%0A for fstat in fstats:%0D%0A if not fstat.children._debug_check_sanity():%0D%0A sys.stdout.write(%22ERR: Duplicates found in ChildFunc stats%5Cr%5Cn%22)%0D%0A fstat.children.print_all()%0D%0A tstats = yappi.get_func_stats()%0D%0A if not tstats._debug_check_sanity():%0D%0A sys.stdout.write(%22ERR: Duplicates found in Thread stats%5Cr%5Cn%22)%0D%0A tstats.print_all()%0D%0A %0D%0Adef assert_raises_exception(func):%0D%0A try:%0D%0A _run(func)%0D%0A assert 0 == 1%0D%0A except:%0D%0A pass%0D%0A %0D%0Adef run_with_yappi(func, *args, **kwargs):%0D%0A yappi.start()%0D%0A func(*args, **kwargs)%0D%0A yappi.stop()%0D%0A%0D%0Adef run_and_get_func_stats(func, *args, **kwargs):%0D%0A run_with_yappi(func, *args, **kwargs)%0D%0A return yappi.get_func_stats()%0D%0A%0D%0Adef run_and_get_thread_stats(func, *args, **kwargs):%0D%0A run_with_yappi(func, *args, **kwargs)%0D%0A return yappi.get_thread_stats()%0D%0A %0D%0Adef is_py3x():%0D%0A return sys.version_info %3E (3, 0)%0D%0A %0D%0Adef find_stat_by_name(stats, name):%0D%0A for stat in stats:%0D%0A if stat.name == name:%0D%0A return stat%0D%0A
|
|
21a9ca4487d0d3ef9f2aa2ba5909b37c735c18e6 | Fix linter errors in test_tftrt.py | tensorflow/contrib/tensorrt/test/test_tftrt.py | tensorflow/contrib/tensorrt/test/test_tftrt.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.tensorrt as trt
import numpy as np
def getSimpleGraphDef():
"""Create a simple graph and return its graph_def"""
g = tf.Graph()
with g.as_default():
A = tf.placeholder(dtype=tf.float32, shape=(None, 24, 24, 2), name="input")
e = tf.constant(
[[[[1., 0.5, 4., 6., 0.5, 1.], [1., 0.5, 1., 1., 0.5, 1.]]]],
name="weights",
dtype=tf.float32)
conv = tf.nn.conv2d(
input=A, filter=e, strides=[1, 2, 2, 1], padding="SAME", name="conv")
b = tf.constant([4., 1.5, 2., 3., 5., 7.], name="bias", dtype=tf.float32)
t = tf.nn.bias_add(conv, b, name="biasAdd")
relu = tf.nn.relu(t, "relu")
idty = tf.identity(relu, "ID")
v = tf.nn.max_pool(
idty, [1, 2, 2, 1], [1, 2, 2, 1], "VALID", name="max_pool")
out = tf.squeeze(v, name="output")
return g.as_graph_def()
def runGraph(gdef, dumm_inp):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.50)
tf.reset_default_graph()
g = tf.Graph()
with g.as_default():
inp, out = tf.import_graph_def(
graph_def=gdef, return_elements=["input", "output"])
inp = inp.outputs[0]
out = out.outputs[0]
with tf.Session(
config=tf.ConfigProto(gpu_options=gpu_options), graph=g) as sess:
val = sess.run(out, {inp: dumm_inp})
return val
if "__main__" in __name__:
inpDims = (100, 24, 24, 2)
dummy_input = np.random.random_sample(inpDims)
gdef = getSimpleGraphDef()
trt_graph = trt.create_inference_graph(gdef, ["output"],
inpDims[0]) # Get optimized graph
o1 = runGraph(gdef, dummy_input)
o2 = runGraph(trt_graph, dummy_input)
assert (np.array_equal(o1, o2))
print("Pass")
| Python | 0.000027 | @@ -839,16 +839,35 @@
nction%0A%0A
+import numpy as np%0A
import t
@@ -924,35 +924,16 @@
as trt%0A
-import numpy as np%0A
%0A%0Adef ge
@@ -929,36 +929,39 @@
rt%0A%0A%0Adef get
-S
+_s
imple
-G
+_g
raph
-D
+_d
ef():%0A %22%22%22C
@@ -1053,17 +1053,17 @@
():%0A
-A
+a
= tf.pl
@@ -1309,17 +1309,17 @@
input=
-A
+a
, filter
@@ -1662,22 +1662,16 @@
ol%22)%0A
- out =
tf.sque
@@ -1723,25 +1723,26 @@
()%0A%0A%0Adef run
-G
+_g
raph(gdef, d
@@ -2212,17 +2212,18 @@
_:%0A inp
-D
+_d
ims = (1
@@ -2280,17 +2280,18 @@
mple(inp
-D
+_d
ims)%0A g
@@ -2303,25 +2303,52 @@
get
-S
+_s
imple
-G
+_g
raph
-D
+_d
ef()%0A
+ # Get optimized graph%0A
tr
@@ -2405,84 +2405,21 @@
t%22%5D,
-%0A
inp
-D
+_d
ims%5B0%5D)
- # Get optimized graph
%0A o
@@ -2421,25 +2421,26 @@
)%0A o1 = run
-G
+_g
raph(gdef, d
@@ -2461,17 +2461,18 @@
o2 = run
-G
+_g
raph(trt
@@ -2501,17 +2501,16 @@
assert
-(
np.array
@@ -2523,17 +2523,16 @@
(o1, o2)
-)
%0A print
|
8cd02f550634ea7ae5f75531a49986e099ddf957 | Fix Markdown syntax of bulleted list. | tensorflow/python/ops/distributions/uniform.py | tensorflow/python/ops/distributions/uniform.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Uniform distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.util.tf_export import tf_export
@tf_export("distributions.Uniform")
class Uniform(distribution.Distribution):
"""Uniform distribution with `low` and `high` parameters.
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; a, b) = I[a <= x < b] / Z
Z = b - a
```
where:
* `low = a`,
* `high = b`,
* `Z` is the normalizing constant, and,
* `I[predicate]` is the [indicator function](
https://en.wikipedia.org/wiki/Indicator_function) for `predicate`.
The parameters `low` and `high` must be shaped in a way that supports
broadcasting (e.g., `high - low` is a valid operation).
#### Examples
```python
# Without broadcasting:
u1 = Uniform(low=3.0, high=4.0) # a single uniform distribution [3, 4]
u2 = Uniform(low=[1.0, 2.0],
high=[3.0, 4.0]) # 2 distributions [1, 3], [2, 4]
u3 = Uniform(low=[[1.0, 2.0],
[3.0, 4.0]],
high=[[1.5, 2.5],
[3.5, 4.5]]) # 4 distributions
```
```python
# With broadcasting:
u1 = Uniform(low=3.0, high=[5.0, 6.0, 7.0]) # 3 distributions
```
"""
def __init__(self,
low=0.,
high=1.,
validate_args=False,
allow_nan_stats=True,
name="Uniform"):
"""Initialize a batch of Uniform distributions.
Args:
low: Floating point tensor, lower boundary of the output interval. Must
have `low < high`.
high: Floating point tensor, upper boundary of the output interval. Must
have `low < high`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
InvalidArgumentError: if `low >= high` and `validate_args=False`.
"""
parameters = locals()
with ops.name_scope(name, values=[low, high]):
with ops.control_dependencies([
check_ops.assert_less(
low, high, message="uniform not defined when low >= high.")
] if validate_args else []):
self._low = array_ops.identity(low, name="low")
self._high = array_ops.identity(high, name="high")
check_ops.assert_same_float_dtype([self._low, self._high])
super(Uniform, self).__init__(
dtype=self._low.dtype,
reparameterization_type=distribution.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=[self._low,
self._high],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("low", "high"),
([ops.convert_to_tensor(sample_shape, dtype=dtypes.int32)] * 2)))
@property
def low(self):
"""Lower boundary of the output interval."""
return self._low
@property
def high(self):
"""Upper boundary of the output interval."""
return self._high
def range(self, name="range"):
"""`high - low`."""
with self._name_scope(name):
return self.high - self.low
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.low),
array_ops.shape(self.high))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.low.get_shape(),
self.high.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
shape = array_ops.concat([[n], self.batch_shape_tensor()], 0)
samples = random_ops.random_uniform(shape=shape,
dtype=self.dtype,
seed=seed)
return self.low + self.range() * samples
def _log_prob(self, x):
return math_ops.log(self._prob(x))
def _prob(self, x):
broadcasted_x = x * array_ops.ones(self.batch_shape_tensor())
return array_ops.where(
math_ops.is_nan(broadcasted_x),
broadcasted_x,
array_ops.where(
math_ops.logical_or(broadcasted_x < self.low,
broadcasted_x >= self.high),
array_ops.zeros_like(broadcasted_x),
array_ops.ones_like(broadcasted_x) / self.range()))
def _log_cdf(self, x):
return math_ops.log(self.cdf(x))
def _cdf(self, x):
broadcast_shape = array_ops.broadcast_dynamic_shape(
array_ops.shape(x), self.batch_shape_tensor())
zeros = array_ops.zeros(broadcast_shape, dtype=self.dtype)
ones = array_ops.ones(broadcast_shape, dtype=self.dtype)
broadcasted_x = x * ones
result_if_not_big = array_ops.where(
x < self.low, zeros, (broadcasted_x - self.low) / self.range())
return array_ops.where(x >= self.high, ones, result_if_not_big)
def _entropy(self):
return math_ops.log(self.range())
def _mean(self):
return (self.low + self.high) / 2.
def _variance(self):
return math_ops.square(self.range()) / 12.
def _stddev(self):
return self.range() / math.sqrt(12.)
| Python | 0.999999 | @@ -1623,13 +1623,13 @@
here
-:%0A *
+%0A%0A -
%60lo
@@ -1638,17 +1638,17 @@
= a%60,%0A
-*
+-
%60high =
@@ -1654,17 +1654,17 @@
= b%60,%0A
-*
+-
%60Z%60 is
@@ -1696,13 +1696,12 @@
and
-,
%0A
-*
+-
%60I%5B
|
a333a5c15ffd2b775ad4d854c7accd32b898d2fb | Add encryptor_python3.py compatible with Python 3 | encryptor_python3.py | encryptor_python3.py | Python | 0.000669 | @@ -0,0 +1,346 @@
+from __future__ import print_function%0A%0A__author__ = 'Samuel Gratzl'%0A%0Aif __name__ == '__main__':%0A import uuid%0A import hashlib%0A%0A password = input('enter password: ').encode('utf-8')%0A salt = uuid.uuid4().hex.encode('utf-8')%0A hashed_password = hashlib.sha512(password + salt).hexdigest()%0A print(password)%0A print(salt)%0A print(hashed_password)%0A
|
|
a7b25e343623f41b0466c8cea852ecc07ffab359 | Create marsLanderLevelTwo.py | Codingame/Python/Medium/marsLanderLevelTwo.py | Codingame/Python/Medium/marsLanderLevelTwo.py | Python | 0.000045 | @@ -0,0 +1,1933 @@
+import sys%0Aimport math%0A%0A# Auto-generated code below aims at helping you parse%0A# the standard input according to the problem statement.%0A%0Asurface_n = int(input()) # the number of points used to draw the surface of Mars.%0Asurface = %5B%5D%0Afor i in range(surface_n):%0A # land_x: X coordinate of a surface point. (0 to 6999)%0A # land_y: Y coordinate of a surface point. By linking all the points together in a sequential fashion, you form the surface of Mars.%0A land_x, land_y = %5Bint(j) for j in input().split()%5D%0A surface.append(%5Bland_x,land_y%5D)%0A %0Aminx = 0%0Amaxx = 0%0Alast = 0%0A%0Afor x in range(1, len(surface)):%0A if surface%5Bx%5D%5B1%5D == surface%5Blast%5D%5B1%5D:%0A minx = last%0A maxx = x%0A last = x%0A%0Aly = surface%5Bminx%5D%5B1%5D%0Aminx = surface%5Bminx%5D%5B0%5D%0Amaxx = surface%5Bmaxx%5D%5B0%5D%0A%0A# game loop%0Awhile 1:%0A # h_speed: the horizontal speed (in m/s), can be negative.%0A # v_speed: the vertical speed (in m/s), can be negative.%0A # fuel: the quantity of remaining fuel in liters.%0A # rotate: the rotation angle in degrees (-90 to 90).%0A # power: the thrust power (0 to 4).%0A x, y, h_speed, v_speed, fuel, rotate, power = %5Bint(i) for i in input().split()%5D%0A if h_speed %3C -60 or h_speed %3E 60:%0A d = (-45,45)%5Bh_speed %3E 60%5D%0A p = 4%0A elif x %3C maxx and x %3E minx:%0A if h_speed %3C -20:%0A d = -60%0A p = 4%0A elif h_speed %3E 20:%0A d = 60%0A p = 4%0A else:%0A if maxx - x %3C 200 and h_speed %3E 0:%0A d = 15%0A elif minx - x %3E -200 and h_speed %3C 0:%0A d = -15%0A else:%0A d = 0%0A p = (3,4)%5Bmath.sqrt(v_speed**2+((y-ly)*2*(4-3.711))) %3C -38%5D%0A else:%0A d = (30,-30)%5Bx %3C minx%5D%0A p = 4%0A # Write an action using print%0A # To debug: print(%22Debug messages...%22, file=sys.stderr)%0A%0A # rotate power. rotate is the desired rotation angle. power is the desired thrust power.%0A print(d,%224%22)%0A
|
|
ac2f517f15816277dd808ac473c4581212b8e841 | add migration for meta | Seeder/www/migrations/0004_auto_20170223_1457.py | Seeder/www/migrations/0004_auto_20170223_1457.py | Python | 0 | @@ -0,0 +1,493 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.4 on 2017-02-23 14:57%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('www', '0003_auto_20170216_2204'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='topiccollection',%0A options=%7B'ordering': %5B'id'%5D, 'verbose_name': 'Topic collection', 'verbose_name_plural': 'Topic collections'%7D,%0A ),%0A %5D%0A
|
|
0a8af4a4f5e9fa711e9e4b1b14cc639d5ff166a0 | Create beta_dog_recommendation_system.py | Solutions/beta/beta_dog_recommendation_system.py | Solutions/beta/beta_dog_recommendation_system.py | Python | 0.000007 | @@ -0,0 +1,428 @@
+from itertools import takewhile%0A%0Adef find_similar_dogs(breed):%0A compare = dogs%5Bbreed%5D%0A scores = sorted((%0A %5B%0A dog,%0A sum(1 if q in compare else 0 for q in dogs%5Bdog%5D)%0A %5D%0A for dog in dogs if dog != breed%0A ), key = lambda x: x%5B1%5D, reverse=True)%0A max_score = scores%5B0%5D%5B1%5D%0A return %7Bs%5B0%5D for s in takewhile(lambda x: x%5B1%5D==max_score, scores)%7D%0A
|
|
8a293ddc633730a6c2323392b1ac9083e5a45ad4 | Create lora_test_recv.py | device/src/test/lora_test_recv.py | device/src/test/lora_test_recv.py | Python | 0.000002 | @@ -0,0 +1,1156 @@
+# lora_test_recv.py%0A#Communication module: LoRa.%0A#Communication method with gateway via LoRa.%0A#Uart port drive LoRa module.%0A#Parse JSON between device and gateway via LoRa channel.%0A#LoRa module: E32-TTL-100%0A#Pin specification:%0A#Module MCU%0A#M0(IN) %3C--%3E GPIO(X3)(OUT) #mode setting, can not hang%0A#M1(IN) %3C--%3E GPIO(X4)(OUT) #mode setting, can not hang%0A#RXD(IN) %3C--%3E X1(TX)(OUT) #UART4%0A#TXD(OUT) %3C--%3E X2(RX)(IN) #UART4%0A#AUX(OUT) %3C--%3E GPIO/INT(IN) #module status detecting%0A#VCC%0A#GND%0A%0A#Communication mode is 0, need to set M0 and M1 to 0.%0A%0A#JSON data format:%0A#%7BID:123,CMD:heartbeat,DATA:hello,SEQUENCE:123%7D%0A%0Afrom pyb import Pin%0Afrom pyb import UART %0Afrom pyb import Timer%0Aimport time%0A%0A#LED shining regularly(using timer) to indicate the program is running correctly%0Atim1 = Timer(1, freq=1)%0Atim1.callback(lambda t: pyb.LED(1).toggle())%0A%0AM0 = Pin('X3', Pin.OUT_PP)%0AM1 = Pin('X4', Pin.OUT_PP)%0AM0.low()%0AM1.low()%0A%0Au4 = UART(4,9600) %0Au4.init(9600, bits=8, parity=None, stop=1) %0Au4.write('%7BID:1,CMD:OnLine,DATA:TYPBoard1,SEQ:0%7D')%0A%0Aif __name__=='__main__':%0A while True:%0A len = u4.any()%0A if(len %3E 0): %0A print(u4.read())%0A%0A
|
|
d19a36fda0bfc9d221d65bde1612ff6181fca66d | add proposed setup.py file | setup.py | setup.py | Python | 0 | @@ -0,0 +1,299 @@
+from distutils.core import setup%0A%0Asetup(%0A name='vectortween',%0A version='0.0.1',%0A packages=%5B'vectortween'%5D,%0A url='',%0A license='MIT',%0A author='stefaan himpe',%0A author_email='[email protected]',%0A description='some tweening for use with libraries like gizeh and moviepy'%0A)%0A
|
|
581c21af4248429dfbdd507ffe072d6620f3b115 | Version bump. | setup.py | setup.py |
from setuptools import setup, find_packages
setup(
name="filebrowser_safe",
version="0.1.2",
description="A snapshot of the filebrowser_3 branch of django-filebrowser, "
"packaged as a dependency for the Mezzanine CMS for Django.",
long_description=open("README.rst").read(),
author="Patrick Kranzlmueller, Axel Swoboda (vonautomatisch)",
author_email="[email protected]",
maintainer="Stephen McDonald",
maintainer_email="[email protected]",
url="http://github.com/stephenmcd/filebrowser-safe",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| Python | 0 | @@ -97,9 +97,9 @@
0.1.
-2
+3
%22,%0A
|
a57d39e7f63e6c034644a158aabb5ff6e6f04ae9 | add response test to testing module | oct/testing/response.py | oct/testing/response.py | Python | 0 | @@ -0,0 +1,499 @@
+# This file is fit for containing basic response status check%0A# All functions have to take a response object in param%0A%0A%0Adef check_response_status(resp, status):%0A %22%22%22%0A This will check is the response_code is equal to the status%0A%0A :param resp: a response object%0A :param status: the expected status%0A :type status: int%0A :return: None%0A :raise: AssertionError%0A %22%22%22%0A assert(resp.code == status), %22Bad Response: HTTP %25s, expected %25s, URL : %25s%22 %25 (resp.code, status, resp.geturl())
|
|
f37bcfdae9bfc14bacccdcba325d2b8fb1284d32 | set keystone user name to user's email address | planetstack/observer/steps/sync_user_deployments.py | planetstack/observer/steps/sync_user_deployments.py | import os
import base64
import hashlib
from collections import defaultdict
from django.db.models import F, Q
from planetstack.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.site import SiteDeployments, Deployment
from core.models.user import User, UserDeployments
from util.logger import Logger, logging
logger = Logger(level=logging.INFO)
class SyncUserDeployments(OpenStackSyncStep):
provides=[User, UserDeployments]
requested_interval=0
def fetch_pending(self):
# user deployments are not visible to users. We must ensure
# user are deployed at all deploymets available to their sites.
deployments = Deployment.objects.all()
site_deployments = SiteDeployments.objects.all()
site_deploy_lookup = defaultdict(list)
for site_deployment in site_deployments:
site_deploy_lookup[site_deployment.site].append(site_deployment.deployment)
user_deployments = UserDeployments.objects.all()
user_deploy_lookup = defaultdict(list)
for user_deployment in user_deployments:
user_deploy_lookup[user_deployment.user].append(user_deployment.deployment)
for user in User.objects.all():
if user.is_admin:
# admins should have an account at all deployments
expected_deployments = deployments
else:
# normal users should have an account at their site's deployments
expected_deployments = site_deploy_lookup[user.site]
for expected_deployment in expected_deployments:
if expected_deployment not in user_deploy_lookup[user]:
ud = UserDeployments(user=user, deployment=expected_deployment)
ud.save()
# now we can return all slice deployments that need to be enacted
return UserDeployments.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, user_deployment):
logger.info("sync'ing user %s at deployment %s" % (user_deployment.user, user_deployment.deployment.name))
name = user_deployment.user.email[:user_deployment.user.email.find('@')]
user_fields = {'name': name,
'email': user_deployment.user.email,
'password': hashlib.md5(user_deployment.user.password).hexdigest()[:6],
'enabled': True}
driver = self.driver.admin_driver(deployment=user_deployment.deployment.name)
if not user_deployment.kuser_id:
keystone_user = driver.create_user(**user_fields)
user_deployment.kuser_id = keystone_user.id
else:
driver.update_user(user_deployment.kuser_id, user_fields)
# setup user deployment site roles
if user_deployment.user.site:
site_deployments = SiteDeployments.objects.filter(site=user_deployment.user.site,
deployment=user_deployment.deployment)
if site_deployments:
# need the correct tenant id for site at the deployment
tenant_id = site_deployments[0].tenant_id
driver.add_user_role(user_deployment.kuser_id,
tenant_id, 'user')
if user_deployment.user.is_admin:
driver.add_user_role(user_deployment.kuser_id, tenant_id, 'admin')
else:
# may have admin role so attempt to remove it
driver.delete_user_role(user_deployment.kuser_id, tenant_id, 'admin')
if user_deployment.user.public_key:
user_driver = driver.client_driver(caller=user, tenant=user.site.login_base,
deployment=user_deployment.deployment.name)
key_fields = {'name': user_deployment.user.keyname,
'public_key': user_deployment.user.public_key}
user_driver.create_keypair(**key_fields)
user_deployment.save()
| Python | 0.000008 | @@ -2256,20 +2256,42 @@
'name':
-name
+user_deployment.user.email
,%0A
|
2f1b12a6f173c01f9631d0ad5a4d3c3f411983cb | add file notification platform | homeassistant/components/notify/file.py | homeassistant/components/notify/file.py | Python | 0 | @@ -0,0 +1,2841 @@
+%22%22%22%0Ahomeassistant.components.notify.file%0A~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0AFile notification service.%0A%0AConfiguration:%0A%0ATo use the File notifier you will need to add something like the following%0Ato your config/configuration.yaml%0A%0Anotify:%0A platform: file%0A path: PATH_TO_FILE%0A filename: FILENAME%0A timestamp: 1 or 0%0A%0AVariables:%0A%0Apath%0A*Required%0APath to the directory that contains your file. You need to have write%0Apermission for that directory. The directory will be created if it doesn't%0Aexist.%0A%0Afilename%0A*Required%0AName of the file to use. The file will be created if it doesn't exist.%0A%0Adate%0A*Required%0AAdd a timestamp to the entry, valid entries are 1 or 0.%0A%22%22%22%0Aimport logging%0Afrom pathlib import (Path, PurePath)%0A%0Aimport homeassistant.util.dt as dt_util%0Afrom homeassistant.helpers import validate_config%0Afrom homeassistant.components.notify import (%0A DOMAIN, ATTR_TITLE, BaseNotificationService)%0A%0A_LOGGER = logging.getLogger(__name__)%0A%0A%0Adef get_service(hass, config):%0A %22%22%22 Get the file notification service. %22%22%22%0A%0A if not validate_config(config,%0A %7BDOMAIN: %5B'path',%0A 'filename',%0A 'timestamp'%5D%7D,%0A _LOGGER):%0A return None%0A%0A path = config%5BDOMAIN%5D%5B'path'%5D%0A filename = config%5BDOMAIN%5D%5B'filename'%5D%0A filepath = Path(path, filename)%0A%0A # pylint: disable=no-member%0A if not filepath.parent.exists():%0A try:%0A filepath.parent.mkdir(parents=True)%0A filepath.touch(mode=0o644, exist_ok=True)%0A except:%0A _LOGGER.exception(%22No write permission to given location.%22)%0A # raise PermissionError('') from None%0A # raise FileNotFoundError('') from None%0A return None%0A%0A return FileNotificationService(filepath, config%5BDOMAIN%5D%5B'timestamp'%5D)%0A%0A%0A# pylint: disable=too-few-public-methods%0Aclass FileNotificationService(BaseNotificationService):%0A %22%22%22 Implements notification service for the File service. %22%22%22%0A%0A # pylint: disable=no-member%0A def __init__(self, filepath, add_timestamp):%0A self._filepath = str(PurePath(filepath))%0A self._add_timestamp = add_timestamp%0A%0A def send_message(self, message=%22%22, **kwargs):%0A %22%22%22 Send a message to a file. %22%22%22%0A%0A file = open(self._filepath, 'a')%0A if not Path(self._filepath).stat().st_size:%0A title = '%7B%7D notifications (Log started: %7B%7D)%5Cn%7B%7D%5Cn'.format(%0A kwargs.get(ATTR_TITLE),%0A dt_util.strip_microseconds(dt_util.utcnow()),%0A '-'*80)%0A file.write(title)%0A%0A if self._add_timestamp == 1:%0A text = '%7B%7D %7B%7D%5Cn'.format(dt_util.utcnow(), message)%0A file.write(text)%0A else:%0A text = '%7B%7D%5Cn'.format(message)%0A file.write(text)%0A%0A file.close()%0A
|
|
64b4c8f43a501f005f60a3bf3159d29722af3d94 | Fix deprecation warning (#19882) | homeassistant/helpers/aiohttp_client.py | homeassistant/helpers/aiohttp_client.py | """Helper for aiohttp webclient stuff."""
import asyncio
import sys
import aiohttp
from aiohttp.hdrs import USER_AGENT, CONTENT_TYPE
from aiohttp import web
from aiohttp.web_exceptions import HTTPGatewayTimeout, HTTPBadGateway
import async_timeout
from homeassistant.core import callback
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE, __version__
from homeassistant.loader import bind_hass
from homeassistant.util import ssl as ssl_util
DATA_CONNECTOR = 'aiohttp_connector'
DATA_CONNECTOR_NOTVERIFY = 'aiohttp_connector_notverify'
DATA_CLIENTSESSION = 'aiohttp_clientsession'
DATA_CLIENTSESSION_NOTVERIFY = 'aiohttp_clientsession_notverify'
SERVER_SOFTWARE = 'HomeAssistant/{0} aiohttp/{1} Python/{2[0]}.{2[1]}'.format(
__version__, aiohttp.__version__, sys.version_info)
@callback
@bind_hass
def async_get_clientsession(hass, verify_ssl=True):
"""Return default aiohttp ClientSession.
This method must be run in the event loop.
"""
if verify_ssl:
key = DATA_CLIENTSESSION
else:
key = DATA_CLIENTSESSION_NOTVERIFY
if key not in hass.data:
hass.data[key] = async_create_clientsession(hass, verify_ssl)
return hass.data[key]
@callback
@bind_hass
def async_create_clientsession(hass, verify_ssl=True, auto_cleanup=True,
**kwargs):
"""Create a new ClientSession with kwargs, i.e. for cookies.
If auto_cleanup is False, you need to call detach() after the session
returned is no longer used. Default is True, the session will be
automatically detached on homeassistant_stop.
This method must be run in the event loop.
"""
connector = _async_get_connector(hass, verify_ssl)
clientsession = aiohttp.ClientSession(
loop=hass.loop,
connector=connector,
headers={USER_AGENT: SERVER_SOFTWARE},
**kwargs
)
if auto_cleanup:
_async_register_clientsession_shutdown(hass, clientsession)
return clientsession
@bind_hass
async def async_aiohttp_proxy_web(hass, request, web_coro,
buffer_size=102400, timeout=10):
"""Stream websession request to aiohttp web response."""
try:
with async_timeout.timeout(timeout, loop=hass.loop):
req = await web_coro
except asyncio.CancelledError:
# The user cancelled the request
return
except asyncio.TimeoutError as err:
# Timeout trying to start the web request
raise HTTPGatewayTimeout() from err
except aiohttp.ClientError as err:
# Something went wrong with the connection
raise HTTPBadGateway() from err
try:
return await async_aiohttp_proxy_stream(
hass,
request,
req.content,
req.headers.get(CONTENT_TYPE)
)
finally:
req.close()
@bind_hass
async def async_aiohttp_proxy_stream(hass, request, stream, content_type,
buffer_size=102400, timeout=10):
"""Stream a stream to aiohttp web response."""
response = web.StreamResponse()
response.content_type = content_type
await response.prepare(request)
try:
while True:
with async_timeout.timeout(timeout, loop=hass.loop):
data = await stream.read(buffer_size)
if not data:
break
await response.write(data)
except (asyncio.TimeoutError, aiohttp.ClientError):
# Something went wrong fetching data, closed connection
pass
return response
@callback
def _async_register_clientsession_shutdown(hass, clientsession):
"""Register ClientSession close on Home Assistant shutdown.
This method must be run in the event loop.
"""
@callback
def _async_close_websession(event):
"""Close websession."""
clientsession.detach()
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_CLOSE, _async_close_websession)
@callback
def _async_get_connector(hass, verify_ssl=True):
"""Return the connector pool for aiohttp.
This method must be run in the event loop.
"""
key = DATA_CONNECTOR if verify_ssl else DATA_CONNECTOR_NOTVERIFY
if key in hass.data:
return hass.data[key]
if verify_ssl:
ssl_context = ssl_util.client_context()
else:
ssl_context = False
connector = aiohttp.TCPConnector(loop=hass.loop, ssl=ssl_context)
hass.data[key] = connector
@callback
def _async_close_connector(event):
"""Close connector pool."""
connector.close()
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_CLOSE, _async_close_connector)
return connector
| Python | 0 | @@ -4468,37 +4468,29 @@
nector%0A%0A
-@callback%0A
+async
def _async_
@@ -4549,32 +4549,38 @@
ool.%22%22%22%0A
+await
connector.close(
|
a9893fc562c9131fdaebaa842f587f415b7fdfda | Add second test. | oommfmif/test_basics.py | oommfmif/test_basics.py | import oommfmif as o
def test_get_oommf_version():
assert isinstance(o.get_version(), str)
| Python | 0.000007 | @@ -42,16 +42,28 @@
_version
+_return_type
():%0A
@@ -102,8 +102,82 @@
), str)%0A
+%0A%0Adef test_get_oommf_version():%0A assert o.get_version()%5B0:4%5D == %221.2.%22%0A
|
99389c1f863592c8c56c8dca415155536abbd0fd | Create new.py | simple_mqtt/new.py | simple_mqtt/new.py | Python | 0.000001 | @@ -0,0 +1 @@
+%0A
|
|
0ec30eb8bcf0e7688182f827bea24fd0ceb33501 | add models | models.py | models.py | Python | 0 | @@ -0,0 +1,323 @@
+from peewee import *%0Afrom config import db%0A%0Aclass BaseModel(Model):%0A class Meta:%0A database = db%0A%0Aclass HistoricalTrainPosition(BaseModel):%0A cars = IntegerField()%0A line_code = CharField()%0A next_station = CharField()%0A dest_station = CharField()%0A time = IntegerField()%0A timestamp = DateTimeField()%0A
|
|
929dd804dbbd5935910cc434f5646da3de8a3a94 | Fix LogCounter on py26 | tornado/test/runtests.py | tornado/test/runtests.py | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function, with_statement
import gc
import locale # system locale module, not tornado.locale
import logging
import operator
import textwrap
import sys
from tornado.httpclient import AsyncHTTPClient
from tornado.ioloop import IOLoop
from tornado.netutil import Resolver
from tornado.options import define, options, add_parse_callback
from tornado.test.util import unittest
try:
reduce # py2
except NameError:
from functools import reduce # py3
TEST_MODULES = [
'tornado.httputil.doctests',
'tornado.iostream.doctests',
'tornado.util.doctests',
'tornado.test.auth_test',
'tornado.test.concurrent_test',
'tornado.test.curl_httpclient_test',
'tornado.test.escape_test',
'tornado.test.gen_test',
'tornado.test.httpclient_test',
'tornado.test.httpserver_test',
'tornado.test.httputil_test',
'tornado.test.import_test',
'tornado.test.ioloop_test',
'tornado.test.iostream_test',
'tornado.test.locale_test',
'tornado.test.netutil_test',
'tornado.test.log_test',
'tornado.test.options_test',
'tornado.test.process_test',
'tornado.test.simple_httpclient_test',
'tornado.test.stack_context_test',
'tornado.test.tcpclient_test',
'tornado.test.template_test',
'tornado.test.testing_test',
'tornado.test.twisted_test',
'tornado.test.util_test',
'tornado.test.web_test',
'tornado.test.websocket_test',
'tornado.test.wsgi_test',
]
def all():
return unittest.defaultTestLoader.loadTestsFromNames(TEST_MODULES)
class TornadoTextTestRunner(unittest.TextTestRunner):
def run(self, test):
result = super(TornadoTextTestRunner, self).run(test)
if result.skipped:
skip_reasons = set(reason for (test, reason) in result.skipped)
self.stream.write(textwrap.fill(
"Some tests were skipped because: %s" %
", ".join(sorted(skip_reasons))))
self.stream.write("\n")
return result
class LogCounter(logging.Filter):
"""Counts the number of WARNING or higher log records."""
def __init__(self, *args, **kwargs):
super(LogCounter, self).__init__(*args, **kwargs)
self.warning_count = self.error_count = 0
def filter(self, record):
if record.levelno >= logging.ERROR:
self.error_count += 1
elif record.levelno >= logging.WARNING:
self.warning_count += 1
return True
def main():
# The -W command-line option does not work in a virtualenv with
# python 3 (as of virtualenv 1.7), so configure warnings
# programmatically instead.
import warnings
# Be strict about most warnings. This also turns on warnings that are
# ignored by default, including DeprecationWarnings and
# python 3.2's ResourceWarnings.
warnings.filterwarnings("error")
# setuptools sometimes gives ImportWarnings about things that are on
# sys.path even if they're not being used.
warnings.filterwarnings("ignore", category=ImportWarning)
# Tornado generally shouldn't use anything deprecated, but some of
# our dependencies do (last match wins).
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("error", category=DeprecationWarning,
module=r"tornado\..*")
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
warnings.filterwarnings("error", category=PendingDeprecationWarning,
module=r"tornado\..*")
# The unittest module is aggressive about deprecating redundant methods,
# leaving some without non-deprecated spellings that work on both
# 2.7 and 3.2
warnings.filterwarnings("ignore", category=DeprecationWarning,
message="Please use assert.* instead")
# unittest2 0.6 on py26 reports these as PendingDeprecationWarnings
# instead of DeprecationWarnings.
warnings.filterwarnings("ignore", category=PendingDeprecationWarning,
message="Please use assert.* instead")
logging.getLogger("tornado.access").setLevel(logging.CRITICAL)
define('httpclient', type=str, default=None,
callback=lambda s: AsyncHTTPClient.configure(
s, defaults=dict(allow_ipv6=False)))
define('ioloop', type=str, default=None)
define('ioloop_time_monotonic', default=False)
define('resolver', type=str, default=None,
callback=Resolver.configure)
define('debug_gc', type=str, multiple=True,
help="A comma-separated list of gc module debug constants, "
"e.g. DEBUG_STATS or DEBUG_COLLECTABLE,DEBUG_OBJECTS",
callback=lambda values: gc.set_debug(
reduce(operator.or_, (getattr(gc, v) for v in values))))
define('locale', type=str, default=None,
callback=lambda x: locale.setlocale(locale.LC_ALL, x))
def configure_ioloop():
kwargs = {}
if options.ioloop_time_monotonic:
from tornado.platform.auto import monotonic_time
if monotonic_time is None:
raise RuntimeError("monotonic clock not found")
kwargs['time_func'] = monotonic_time
if options.ioloop or kwargs:
IOLoop.configure(options.ioloop, **kwargs)
add_parse_callback(configure_ioloop)
log_counter = LogCounter()
add_parse_callback(
lambda: logging.getLogger().handlers[0].addFilter(log_counter))
import tornado.testing
kwargs = {}
if sys.version_info >= (3, 2):
# HACK: unittest.main will make its own changes to the warning
# configuration, which may conflict with the settings above
# or command-line flags like -bb. Passing warnings=False
# suppresses this behavior, although this looks like an implementation
# detail. http://bugs.python.org/issue15626
kwargs['warnings'] = False
kwargs['testRunner'] = TornadoTextTestRunner
try:
tornado.testing.main(**kwargs)
finally:
# The tests should run clean; consider it a failure if they logged
# any warnings or errors. We'd like to ban info logs too, but
# we can't count them cleanly due to interactions with LogTrapTestCase.
if log_counter.warning_count > 0 or log_counter.error_count > 0:
logging.error("logged %d warnings and %d errors",
log_counter.warning_count, log_counter.error_count)
sys.exit(1)
if __name__ == '__main__':
main()
| Python | 0 | @@ -2204,31 +2204,103 @@
-super(LogCounter, self)
+# Can't use super() because logging.Filter is an old-style class in py26%0A logging.Filter
.__i
@@ -2305,16 +2305,22 @@
_init__(
+self,
*args, *
|
5c02d902753327b3413e994d6edc089b8ca72749 | Add create_flipper step | dbaas/workflow/steps/create_flipper.py | dbaas/workflow/steps/create_flipper.py | Python | 0.000001 | @@ -0,0 +1,1522 @@
+# -*- coding: utf-8 -*-%0Aimport logging%0Afrom base import BaseStep%0Afrom dbaas_flipper.provider import FlipperProvider%0A%0A%0ALOG = logging.getLogger(__name__)%0A%0A%0Aclass CreateFlipper(BaseStep):%0A%0A def __unicode__(self):%0A return %22Creating Flipper%22%0A%0A def do(self, workflow_dict):%0A try:%0A if workflow_dict%5B'qt'%5D==1:%0A return True%0A flipper = FlipperProvider()%0A LOG.info(%22Creating Flipper...%22)%0A flipper.create_flipper_dependencies(%0A masterpairname=workflow_dict%5B'names'%5D%5B'infra'%5D,%0A hostname1=workflow_dict%5B%0A 'hosts'%5D%5B0%5D.address,%0A writeip=workflow_dict%5B%0A 'databaseinfraattr'%5D%5B0%5D.ip,%0A readip=workflow_dict%5B%0A 'databaseinfraattr'%5D%5B1%5D.ip,%0A hostname2=workflow_dict%5B%0A 'hosts'%5D%5B1%5D.address,%0A environment=workflow_dict%5B'environment'%5D)%0A%0A return True%0A except Exception, e:%0A print e%0A return False%0A%0A def undo(self, workflow_dict):%0A try:%0A if workflow_dict%5B'qt'%5D==1:%0A return True%0A LOG.info(%22Destroying Flipper...%22)%0A FlipperProvider(%0A ).destroy_flipper_dependencies(masterpairname=workflow_dict%5B'databaseinfra'%5D.name,%0A environment=workflow_dict%5B'environment'%5D)%0A%0A return True%0A except Exception, e:%0A print e%0A return False%0A
|
|
cf99929c923cb31782a192f108c735bfcc9cde2f | Add render module, this will be the interface to manage rendering state files into high state data | salt/render.py | salt/render.py | Python | 0 | @@ -0,0 +1,340 @@
+'''%0ARender is a module used to parse the render files into high salt state data%0Astructures.%0A%0AThe render system uses render modules which are plugable interfaces under the%0Arender directory.%0A'''%0A# Import salt modules%0Aimport salt.loader%0A%0Aclass Render(object):%0A '''%0A Render state files.%0A '''%0A def __init__(self, opts):%0A pass%0A
|
|
c735935c983cc7ccd72b2c71733e6f785a8a3ae3 | Create urls.py | Assessment/urls.py | Assessment/urls.py | Python | 0.000017 | @@ -0,0 +1,252 @@
+from django.conf.urls import url%0A%0Afrom . import views%0A%0Aurlpatterns = %5B%0A url(r'%5EgetAssignmentByCode', views.getAssignmentByCode, name='getAssignmentByCode'),%0A url(r'%5EretrieveAssignments', views.retrieveAssignments, name='retrieveAssignments'),%0A%0A%5D%0A
|
|
83d4ac6c3565044727c9b3fcbada9966d529a80e | Add forgotten font leader lib | lib/font_loader.py | lib/font_loader.py | Python | 0 | @@ -0,0 +1,2129 @@
+import os%0Aimport sys%0Aimport logging%0A%0AFONT_FILE_NAME_LIST = (%0A %22fontawesome-webfont.ttf%22,%0A )%0A%0AFONT_DIRECTORY = %22share%22%0AFONT_DIRECTORY_SYSTEM = %22/usr/share/fonts%22%0AFONT_DIRECTORY_USER = os.path.join(os.environ%5B'HOME'%5D, %22.local/share/fonts%22)%0A%0Aclass FontLoader:%0A def __init__(self):%0A self.fonts_loaded = %5B%5D%0A self.logger = logging.getLogger('FontLoader')%0A%0A def load(self):%0A for font_file_name in FONT_FILE_NAME_LIST:%0A # check if font is in the project font directory%0A font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)%0A if not os.path.isfile(font_source_path):%0A raise IOError(%22Font '%7B%7D' not found in project directories%22.format(%0A font_file_name%0A ))%0A%0A # check if the font is installed at system level%0A if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):%0A self.logger.debug(%22Font '%7B%7D' found in system directory%22.format(%0A font_file_name%0A ))%0A%0A continue%0A%0A # check if the font is installed at user level%0A if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):%0A self.logger.debug(%22Font '%7B%7D' found in user directory%22.format(%0A font_file_name%0A ))%0A%0A continue%0A%0A # if the font is not installed%0A font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)%0A os.symlink(%0A os.path.join(os.getcwd(), font_source_path),%0A font_target_path%0A )%0A%0A self.fonts_loaded.append(font_target_path)%0A self.logger.debug(%22Font '%7B%7D' loaded in user directory: '%7B%7D'%22.format(%0A font_file_name,%0A font_target_path%0A ))%0A%0A def unload(self):%0A for font_path in self.fonts_loaded:%0A os.unlink(font_path)%0A self.logger.debug(%22Font '%7B%7D' unloaded%22.format(%0A font_path%0A ))%0A%0A self.fonts_loaded = %5B%5D%0A%0A%0A
|
|
d8f7cb58e7f760ccbb839aafeda4dbf7204d7d82 | Add r_latestagecapitalism | channels/r_latestagecapitalism/app.py | channels/r_latestagecapitalism/app.py | Python | 0.999492 | @@ -0,0 +1,157 @@
+#encoding:utf-8%0A%0Asubreddit = 'latestagecapitalism'%0At_channel = '@r_latestagecapitalism'%0A%0A%0Adef send_post(submission, r2t):%0Areturn r2t.send_simple(submission)%0A
|
|
151e8fc71e5ef2e31db13730bff57bc8fd915c30 | Add test case for list invoice | paystackapi/tests/test_invoice.py | paystackapi/tests/test_invoice.py | Python | 0.000001 | @@ -0,0 +1,1331 @@
+import httpretty%0A%0Afrom paystackapi.tests.base_test_case import BaseTestCase%0Afrom paystackapi.invoice import Invoice%0A%0A%0Aclass TestInvoice(BaseTestCase):%0A%0A @httpretty.activate%0A def test_create_invoice(self):%0A %22%22%22Method defined to test create Invoice.%22%22%22%0A httpretty.register_uri(%0A httpretty.POST,%0A self.endpoint_url(%22/paymentrequest%22),%0A content_type='text/json',%0A body='%7B%22status%22: true, %22message%22: %22Invoice created%22%7D',%0A status=201,%0A )%0A%0A response = Invoice.create(%0A customer=%22CUS_je02lbimlqixzax%22,%0A amount=42000,%0A due_date=%222019-05-08T00:00:00.000Z%22%0A )%0A self.assertTrue(response%5B'status'%5D)%0A%0A @httpretty.activate%0A def test_list_invoice(self):%0A %22%22%22Method defined to test list Invoice.%22%22%22%0A httpretty.register_uri(%0A httpretty.GET,%0A self.endpoint_url(%22/paymentrequest%22),%0A content_type='text/json',%0A body='%7B%22status%22: true, %22message%22: %22Invoice retrieved%22%7D',%0A status=201,%0A )%0A%0A response = Invoice.list(%0A customer=%22CUS_je02lbimlqixzax%22,%0A status=%22pending%22,%0A currency=%22NGN%22,%0A paid=%22false%22,%0A include_archive=%22true%22%0A )%0A self.assertTrue(response%5B'status'%5D)%0A
|
|
dcd1d962feec4f3cd914677545f74924ad9e6351 | Add test for file creation of low level library | testing/test_direct_wrapper.py | testing/test_direct_wrapper.py | Python | 0 | @@ -0,0 +1,296 @@
+import os%0A%0Afrom cffitsio._cfitsio import ffi, lib%0A%0A%0Adef test_create_file(tmpdir):%0A filename = str(tmpdir.join('test.fits'))%0A f = ffi.new('fitsfile **')%0A status = ffi.new('int *')%0A lib.fits_create_file(f, filename, status)%0A assert status%5B0%5D == 0%0A assert os.path.isfile(filename)%0A
|
|
2ccd94f9fb6f4a64976124ca82ac4c5ef585d64b | add serializer field | djbitcoin/serializers.py | djbitcoin/serializers.py | Python | 0 | @@ -0,0 +1,466 @@
+from django.utils.translation import ugettext_lazy as _%0A%0Afrom rest_framework import serializers%0A%0Afrom .utils import is_bitcoin_address_valid%0A%0A%0Aclass BitcoinAddressField(serializers.CharField):%0A default_error_messages = %7B%0A 'invalid': _('Invalid bitcoin address.')%0A %7D%0A%0A def to_internal_value(self, data):%0A data = super().to_internal_value(data)%0A if not is_bitcoin_address_valid(data):%0A self.fail('invalid')%0A return data%0A
|
|
dddc76173a5150939535b2c506aa967fe17ee000 | Fix #12 : env implementation | elevator/env.py | elevator/env.py | Python | 0.000001 | @@ -0,0 +1,2872 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Afrom ConfigParser import ConfigParser%0A%0Afrom utils.patterns import Singleton%0Afrom utils.decorators import lru_cache%0Afrom utils.snippets import items_to_dict%0A%0A%0Aclass Environment(object):%0A %22%22%22%0A Unix shells like environment class. Implements add,%0A get, load, flush methods. Handles lists of values too.%0A Basically Acts like a basic key/value store.%0A %22%22%22%0A __metaclass__ = Singleton%0A%0A SEQ_DELIMITER = ','%0A%0A def __init__(self, env_file=''):%0A self.attributes = set() # Stores manually added attributes%0A if env_file:%0A self.load(env_file=env_file) # Has to be called last!%0A%0A def add(self, name, value):%0A %22%22%22Adds a key/value to env%22%22%22%0A setattr(self, name, value)%0A self.attributes.add(name)%0A%0A @lru_cache(maxsize=1024)%0A def get(self, name):%0A %22%22%22Cached env key fetch%22%22%22%0A var = getattr(self, name)%0A%0A if ',' in var:%0A return var.split(',')%0A%0A return var%0A%0A def append(self, var, value):%0A %22%22%22%0A %60value%60 can either be a (name, value) tuple/list pair,%0A or a value string. If a pair is given, the method%0A will consider that the var to append to is a dict%0A and will try to add the name/value to it.%0A If it is a String, it will try to automatically transform%0A the pointed var to a sequence and add the value to it.%0A %22%22%22%0A env_var = getattr(self, var)%0A env_var_type = type(env_var)%0A%0A if ((isinstance(value, tuple) or isinstance(value, list)) and %5C%0A len(value) == 2):%0A key, value = value%0A env_var.update(%7Bkey: value%7D)%0A elif isinstance(value, str):%0A if env_var_type != list:%0A env_var = %5Benv_var%5D%0A env_var.append(value)%0A setattr(self, var, env_var)%0A else:%0A err_msg = %22Env value has to wether be iterable sequence or str%22%0A raise TypeError(err_msg)%0A%0A self.attributes.add(var)%0A%0A def load(self, env_file):%0A %22%22%22Loads an ini file containing the env description : key/value%22%22%22%0A config = ConfigParser()%0A config.read(env_file)%0A%0A for section in config.sections():%0A setattr(self, section, items_to_dict(config.items(section)))%0A self.attributes.add(section)%0A for k, v in getattr(self, section).iteritems():%0A if self.CONFIG_SEQ_DELIMITER in v:%0A splitted = %5Be for e in v.split(self.SEQ_DELIMITER) if e%5D%0A getattr(self, section)%5Bk%5D = splitted%0A%0A def reload(self, env_file=''):%0A self.flush(env_file)%0A self.load(env_file)%0A%0A def flush(self):%0A %22%22%22%0A Flushes the environment from it's manually%0A set attributes.%0A %22%22%22%0A for attr in self.attributes:%0A delattr(self, attr)%0A
|
|
01b42c531f7ab0ca81768b6e9833062f9e31ba95 | Update train_tagger script | examples/training/train_tagger.py | examples/training/train_tagger.py | Python | 0 | @@ -0,0 +1,2323 @@
+%22%22%22A quick example for training a part-of-speech tagger, without worrying%0Aabout the tokenization, or other language-specific customizations.%22%22%22%0A%0Afrom __future__ import unicode_literals%0Afrom __future__ import print_function%0A%0Aimport plac%0Afrom pathlib import Path%0A%0Afrom spacy.vocab import Vocab%0Afrom spacy.tagger import Tagger%0Afrom spacy.tokens import Doc%0Aimport random%0A%0A%0A# You need to define a mapping from your data's part-of-speech tag names to the%0A# Universal Part-of-Speech tag set, as spaCy includes an enum of these tags.%0A# See here for the Universal Tag Set:%0A# http://universaldependencies.github.io/docs/u/pos/index.html%0A# You may also specify morphological features for your tags, from the universal%0A# scheme.%0ATAG_MAP = %7B%0A 'N': %7B%22pos%22: %22NOUN%22%7D,%0A 'V': %7B%22pos%22: %22VERB%22%7D,%0A 'J': %7B%22pos%22: %22ADJ%22%7D%0A %7D%0A%0A# Usually you'll read this in, of course. Data formats vary.%0A# Ensure your strings are unicode.%0ADATA = %5B%0A (%0A %5B%22I%22, %22like%22, %22green%22, %22eggs%22%5D,%0A %5B%22N%22, %22V%22, %22J%22, %22N%22%5D%0A ),%0A (%0A %5B%22Eat%22, %22blue%22, %22ham%22%5D,%0A %5B%22V%22, %22J%22, %22N%22%5D%0A )%0A%5D%0A %0Adef ensure_dir(path):%0A if not path.exists():%0A path.mkdir()%0A%0A%0Adef main(output_dir=None):%0A if output_dir is not None:%0A output_dir = Path(output_dir)%0A ensure_dir(output_dir)%0A ensure_dir(output_dir / %22pos%22)%0A ensure_dir(output_dir / %22vocab%22)%0A %0A vocab = Vocab(tag_map=TAG_MAP)%0A # The default_templates argument is where features are specified. See%0A # spacy/tagger.pyx for the defaults.%0A tagger = Tagger.blank(vocab, Tagger.default_templates())%0A%0A for i in range(5):%0A for words, tags in DATA:%0A doc = Doc(vocab, orths_and_spaces=zip(words, %5BTrue%5D * len(words)))%0A tagger.update(doc, tags)%0A random.shuffle(DATA)%0A tagger.model.end_training()%0A doc = Doc(vocab, orths_and_spaces=zip(%5B%22I%22, %22like%22, %22blue%22, %22eggs%22%5D, %5BTrue%5D*4))%0A tagger(doc)%0A for word in doc:%0A print(word.text, word.tag_, word.pos_)%0A if output_dir is not None:%0A tagger.model.dump(str(output_dir / 'pos' / 'model'))%0A with (output_dir / 'vocab' / 'strings.json').open('wb') as file_:%0A tagger.vocab.strings.dump(file_)%0A%0A%0Aif __name__ == '__main__':%0A plac.call(main)%0A # I V VERB%0A # like V VERB%0A # blue N NOUN%0A # eggs N NOUN%0A
|
|
8fc4fdc96c07432f87b49676b4ba9ca92a0f3385 | Add tool.parser module | grab/tools/parser.py | grab/tools/parser.py | Python | 0.000001 | @@ -0,0 +1,94 @@
+def parse_int(val):%0A if val is None:%0A return None%0A else:%0A return int(val)%0A
|
|
e426afbe9ccbc72a1aa0d00032144e8b9b2b8cdc | Implement utility for colored, tabular output using fabric's color controls. | gusset/colortable.py | gusset/colortable.py | Python | 0 | @@ -0,0 +1,2786 @@
+%22%22%22%0APretty table generation.%0A%22%22%22%0Afrom itertools import cycle%0Afrom string import capwords%0Afrom fabric.colors import red, green, blue, magenta, white, yellow%0A%0A%0Aclass ColorRow(dict):%0A %22%22%22%0A Ordered collection of column values.%0A %22%22%22%0A def __init__(self, table, **kwargs):%0A super(ColorRow, self).__init__(self)%0A self.table = table%0A for column in self.table.columns:%0A self%5Bcolumn%5D = kwargs.get(column)%0A%0A def __str__(self):%0A %22%22%22%0A Generate a formatted and colored string for this row.%0A %22%22%22%0A def format_cell(color, item):%0A column, value = item%0A return color(%22 %7B%7D%22.format(value).ljust(1 + self.table.column_widths%5Bcolumn%5D))%0A%0A # get items in column order%0A items = %5B(column, self%5Bcolumn%5D) for column in self.table.columns%5D%0A # format cells with color and length%0A cells = %5Bformat_cell(color, item) for color, item in zip(cycle(self.table.colors), items)%5D%0A return %22 %22.join(cells)%0A%0A%0Aclass ColorTable(object):%0A %22%22%22%0A Simple row/column table.%0A %22%22%22%0A%0A def __init__(self, *columns, **kwargs):%0A %22%22%22%0A Create a table with fixed columns.%0A%0A :param columns: *args style list of column names%0A :param kwargs: additional options, including %60sort_key%60 and %60colors%60%0A %22%22%22%0A self.columns = columns%0A self.sort_key = kwargs.get(%22sort_key%22)%0A self.colors = kwargs.get(%22colors%22, %5Bred, green, blue, magenta, white, yellow%5D)%0A self.header = ColorRow(self, **dict(%5B(column, capwords(column)) for column in self.columns%5D))%0A # initialize column widths based on header%0A self.column_widths = dict(%5B(column, len(self.header%5Bcolumn%5D)) for column in self.columns%5D)%0A self.rows = %5B%5D%0A%0A @property%0A def separator(self):%0A %22%22%22%0A Generate a separator row using current column widths.%0A %22%22%22%0A cells = dict(%5B(column, %22-%22 * self.column_widths%5Bcolumn%5D) for column in self.columns%5D)%0A return ColorRow(self, **cells)%0A%0A def add(self, **kwargs):%0A row = ColorRow(self, **kwargs)%0A%0A # update column widths%0A for column in self.columns:%0A self.column_widths%5Bcolumn%5D = max(self.column_widths%5Bcolumn%5D, len(row%5Bcolumn%5D))%0A%0A self.rows.append(row)%0A%0A def __str__(self):%0A %22%22%22%0A Generate a colored table.%0A %22%22%22%0A rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows%0A return %22%5Cn%22.join(map(str, %5Bself.header, self.separator%5D + rows))%0A%0A%0Aif __name__ == '__main__':%0A table = ColorTable(%22first%22, %22last%22, sort_key=lambda row: (row%5B%22last%22%5D, row%5B%22first%22%5D))%0A table.add(first=%22George%22, last=%22Washington%22)%0A table.add(first=%22John%22, last=%22Adams%22)%0A table.add(first=%22Thomas%22, last=%22Jefferson%22)%0A print table%0A
|
|
af61c9a44871b1da8a939470492c18a45ab373e1 | Create lineValueDisp.py | lineValueDisp.py | lineValueDisp.py | Python | 0.000009 | @@ -0,0 +1,337 @@
+%0Aimport TCP%0Aimport Motor%0Aimport Steering%0Aimport Status%0Aimport time%0Aimport Cameras%0Aimport Lights%0Aimport Modes%0Aimport os%0A%0Atry:%0A trip_meter = Motor.TripMeter()%0A motors = Motor.Motor(trip_meter)%0A follow_line = Steering.FollowLine(motors, start_speed = 0)%0A%0A while True:%0A time.sleep(10)%0Aexcept:%0A motors.turn_off()%0A follow_line.stop()%0A
|
|
3660c183ba1ddec8033ceae21b1b06fd0ab9a8b7 | Add Signal class | phasortoolbox/signal.py | phasortoolbox/signal.py | Python | 0 | @@ -0,0 +1,37 @@
+class Signal(object):%0A run = False
|
|
23b40c583f3713d77f59e8a48f1499d702d29b32 | use pattern to exclude dirs | flexget/plugins/filter/exists_movie.py | flexget/plugins/filter/exists_movie.py | from __future__ import unicode_literals, division, absolute_import
import os
import re
import logging
from path import path
from flexget import plugin
from flexget.event import event
from flexget.config_schema import one_or_more
from flexget.plugin import get_plugin_by_name
from flexget.utils.tools import TimedDict
log = logging.getLogger('exists_movie')
class FilterExistsMovie(object):
"""
Reject existing movies.
Example::
exists_movie: /storage/movies/
"""
schema = {
'anyOf': [
one_or_more({'type': 'string', 'format': 'path'}),
{
'type': 'object',
'properties': {
'path': one_or_more({'type': 'string', 'format': 'path'}),
'allow_different_qualities': {'enum': ['better', True, False], 'default': False},
'type': {'enum': ['files', 'dirs'], 'default': 'dirs'},
'imdb_lookup': {'type': 'boolean', 'default': False}
},
'required': ['path'],
'additionalProperties': False
}
]
}
skip = ['cd1', 'cd2', 'subs', 'sample']
pattern = re.compile('\.(avi|mkv|mp4|mpg|webm)$',re.IGNORECASE)
def __init__(self):
self.cache = TimedDict(cache_time='1 hour')
def prepare_config(self, config):
# if config is not a dict, assign value to 'path' key
if not isinstance(config, dict):
config = { 'path': config }
if not config.get('type'):
config['type'] = 'dirs'
# if only a single path is passed turn it into a 1 element list
if isinstance(config['path'], basestring):
config['path'] = [config['path']]
return config
@plugin.priority(-1)
def on_task_filter(self, task, config):
if not task.accepted:
log.debug('nothing accepted, aborting')
return
config = self.prepare_config(config)
imdb_lookup = plugin.get_plugin_by_name('imdb_lookup').instance
incompatible_files = 0
incompatible_entries = 0
count_entries = 0
count_files = 0
# list of imdb ids gathered from paths / cache
qualities = {}
for folder in config['path']:
folder = path(folder).expanduser()
# see if this path has already been scanned
if folder in self.cache:
log.verbose('Using cached scan for %s ...' % folder)
qualities.update(self.cache[folder])
continue
path_ids = {}
if not folder.isdir():
log.critical('Path %s does not exist' % folder)
continue
log.verbose('Scanning path %s ...' % folder)
# Help debugging by removing a lot of noise
#logging.getLogger('movieparser').setLevel(logging.WARNING)
#logging.getLogger('imdb_lookup').setLevel(logging.WARNING)
# scan through
items = []
if config.get('type') == 'dirs':
for d in folder.walkdirs(errors='ignore'):
if d.name.lower() in self.skip:
continue
items.append(d.name)
elif config.get('type') == 'files':
for f in folder.walkfiles(errors='ignore'):
if not self.pattern.search(f.name):
continue
items.append(f.name)
for item in items:
count_files += 1
movie = get_plugin_by_name('parsing').instance.parse_movie(item)
if config.get('imdb_lookup'):
try:
imdb_id = imdb_lookup.imdb_id_lookup(movie_title=movie.name,
raw_title=item,
session=task.session)
if imdb_id in path_ids:
log.trace('duplicate %s' % item)
continue
if imdb_id is not None:
log.trace('adding: %s' % imdb_id)
path_ids[imdb_id] = movie.quality
except plugin.PluginError as e:
log.trace('%s lookup failed (%s)' % (item, e.value))
incompatible_files += 1
else:
path_ids[movie.name] = movie.quality
log.debug(movie.name)
# store to cache and extend to found list
self.cache[folder] = path_ids
qualities.update(path_ids)
log.debug('-- Start filtering entries ----------------------------------')
# do actual filtering
for entry in task.accepted:
count_entries += 1
if config.get('imdb_lookup'):
key = 'imdb_id'
if not entry.get('imdb_id', eval_lazy=False):
try:
imdb_lookup.lookup(entry)
except plugin.PluginError as e:
log.trace('entry %s imdb failed (%s)' % (entry['title'], e.value))
incompatible_entries += 1
continue
else:
key = 'movie_name'
if not entry.get('movie_name', eval_lazy=False):
movie = get_plugin_by_name('parsing').instance.parse_movie(entry['title'])
entry['movie_name'] = movie.name
# actual filtering
if entry[key] in qualities:
if config.get('allow_different_qualities') == 'better':
if entry['quality'] > qualities[entry[key]]:
log.trace('better quality')
continue
elif config.get('allow_different_qualities'):
if entry['quality'] != qualities[entry[key]]:
log.trace('wrong quality')
continue
entry.reject('movie exists')
if incompatible_files or incompatible_entries:
log.verbose('There were some incompatible items. %s of %s entries '
'and %s of %s directories could not be verified.' %
(incompatible_entries, count_entries, incompatible_files, count_dirs))
log.debug('-- Finished filtering entries -------------------------------')
@event('plugin.register')
def register_plugin():
plugin.register(FilterExistsMovie, 'exists_movie', groups=['exists'], api_ver=2)
| Python | 0 | @@ -1135,52 +1135,86 @@
-skip = %5B'cd1', 'cd2', 'subs', 'sample'%5D%0A
+dir_pattern = re.compile('%5Cb(cd.%5Cd%7Csubs?%7Csamples?)%5Cb',re.IGNORECASE)%0A file_
patt
@@ -3198,35 +3198,39 @@
if
-d.name.lower() in self.skip
+self.dir_pattern.search(d.name)
:%0A
@@ -3441,16 +3441,21 @@
ot self.
+file_
pattern.
|
e0c05bb4af8ab1df2d91050863206d164df616e5 | Allow a few more built-ins to be used from if plugin. | flexget/plugins/filter/if_condition.py | flexget/plugins/filter/if_condition.py | import logging
from flexget.feed import Feed
from flexget.plugin import register_plugin, get_plugins_by_phase, get_plugin_by_name, priority
import re
log = logging.getLogger('if')
def safer_eval(statement, locals):
"""A safer eval function. Does not allow __ or try statements, does not include any globals in the namespace."""
if re.search(r'__|try\s*:', statement):
raise ValueError('\'__\' or try blocks not allowed in if statements.')
return eval(statement, {'__builtins__': None}, locals)
class FilterIf(object):
"""Can run actions on entries that satisfy a given condition.
Actions include accept, reject, and fail, as well as the ability to run other filter plugins on the entries."""
def validator(self):
from flexget import validator
root = validator.factory('list')
key_validator = validator.factory('regexp_match',
message='If statements cannot contain \'__\' or \'try\' statements')
key_validator.reject(r'.*?(__|try\s*:)')
key_validator.accept('.')
action = root.accept('dict').accept_valid_keys('root', key_validator=key_validator)
action.accept('choice').accept_choices(['accept', 'reject', 'fail'])
filter_action = action.accept('dict')
# Get a list of apiv2 input plugins, make sure to exclude self
valid_filters = [plugin for plugin in get_plugins_by_phase('filter')
if plugin.api_ver > 1 and plugin.name != 'if']
# Build a dict validator that accepts the available filter plugins and their settings
for plugin in valid_filters:
if hasattr(plugin.instance, 'validator'):
validator = plugin.instance.validator()
if validator.name == 'root':
# If a root validator is returned, grab the list of child validators
filter_action.valid[plugin.name] = validator.valid
else:
filter_action.valid[plugin.name] = [plugin.instance.validator()]
else:
filter_action.valid[plugin.name] = [validator.factory('any')]
return root
@priority(80)
def on_feed_filter(self, feed, config):
entry_actions = {
'accept': feed.accept,
'reject': feed.reject,
'fail': feed.fail}
for entry in feed.entries:
eval_locals = {
'has_field': lambda f: entry.has_key(f),
'True': True,
'False': False}
eval_locals.update(entry)
for item in config:
requirement, action = item.items()[0]
try:
# Restrict eval namespace to have no globals and locals only from eval_locals
passed = safer_eval(requirement, eval_locals)
except ValueError, e:
log.error(e)
except SyntaxError, e:
log.exception('There is a syntax error in if statement, traceback follows')
except NameError, e:
# Extract the name that did not exist
missing_field = e.message.split('\'')[1]
log.debug('%s does not contain the field %s' % (entry['title'], missing_field))
else:
if passed:
log.debug('%s matched requirement %s' % (entry['title'], requirement))
if isinstance(action, basestring):
# Simple entry action (accept, reject or fail) was specified as a string
entry_actions[action](entry, 'Matched requirement: %s' % requirement)
else:
# Other filters were specified to run on this entry
fake_feed = Feed(feed.manager, feed.name, feed.config)
fake_feed.session = feed.session
fake_feed.entries = [entry]
try:
for filter_name, filter_config in action.iteritems():
filter = get_plugin_by_name(filter_name)
method = filter.phase_handlers['on_feed_filter']
method(fake_feed, filter_config)
except Exception:
raise
else:
# Populate changes from the fake feed to the real one
for e in fake_feed.accepted:
feed.accept(e, e.get('reason'))
for e in fake_feed.rejected:
feed.reject(e, e.get('reason'))
for e in fake_feed.failed:
feed.fail(e, e.get('reason'))
register_plugin(FilterIf, 'if', api_ver=2)
| Python | 0.000033 | @@ -291,57 +291,200 @@
ts,
-does not include any globals in the namespace.%22%22%22
+only includes certain 'safe' builtins.%22%22%22%0D%0A allowed_builtins = %5B'True', 'False', 'len', 'any'%5D%0D%0A for name in allowed_builtins:%0D%0A locals%5Bname%5D = globals()%5B'__builtins__'%5D.get(name)
%0D%0A
@@ -2617,144 +2617,147 @@
-eval_locals = %7B%0D%0A 'has_field': lambda f: entry.has_key(f),%0D%0A 'True': True,%0D%0A 'False': False
+# Make entry fields and other utilities available in the eval namespace%0D%0A eval_locals = %7B'has_field': lambda f: entry.has_key(f)
%7D%0D%0A
|
913bb348938c2b54ab7a76c7e16ce9b3fb999dbe | Copy fail. | judge/management/commands/render_pdf.py | judge/management/commands/render_pdf.py | import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from django.utils import translation
from judge.models import Problem, ProblemTranslation
from judge.pdf_problems import WebKitPdfMaker
class Command(BaseCommand):
help = 'renders a PDF file of a problem'
def add_arguments(self, parser):
parser.add_argument('code', help='code of problem to render')
parser.add_argument('directory', nargs='?', help='directory to store temporaries')
parser.add_argument('-l', '--language', default=settings.LANGUAGE_CODE,
help='language to render PDF in')
def handle(self, *args, **options):
try:
problem = Problem.objects.get(code=options['code'])
except Problem.DoesNotExist:
print 'Bad problem code'
return
problem = self.get_object()
try:
trans = problem.translations.get(language=options['language'])
except ProblemTranslation.DoesNotExist:
trans = None
directory = options['directory']
with WebKitPdfMaker(directory, clean_up=directory is None) as maker, \
translation.override(options['language']):
maker.html = get_template('problem/raw.jade').render(Context({
'problem': problem,
'problem_name': problem.name if trans is None else trans.name,
'description': problem.description if trans is None else trans.description,
})).replace('"//', '"http://').replace("'//", "'http://")
for file in ('style.css', 'pygment-github.css'):
maker.load(file, os.path.join(settings.DMOJ_RESOURCES, file))
maker.make(debug=True)
if not maker.success:
print>>sys.stderr, maker.log
elif directory is None:
os.rename(maker.pdffile, problem.code + '.pdf')
| Python | 0 | @@ -953,44 +953,8 @@
rn%0A%0A
- problem = self.get_object()%0A
|
b31e15d12dbff8eaab71ec523ec16d5f1afe908b | add sharpen pic tool | sharpen_pic.py | sharpen_pic.py | Python | 0 | @@ -0,0 +1,966 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A#function: %E9%94%90%E5%8C%96%E5%9B%BE%E5%83%8F%0Aimport os%0Aimport os.path%0Aimport sys, getopt, argparse%0Afrom PIL import Image, ImageEnhance%0A%0Adef sharpenPic(filein,fileout):%0A%0A%09im02 =Image.open(filein)%0A%09%0A%09im_30 =ImageEnhance.Sharpness (im02).enhance(2.0)%0A%0A%09im_30.save(fileout)%0A%0Adef main():%0A%09argc = len(sys.argv)%0A%09cmdargs = str(sys.argv)%0A%09parser = argparse.ArgumentParser(description=%22Tool for sharp the image%22)%0A%09parser.add_argument('-f', '--fromdir', required=True, help='the directory path of the input file')%0A%09parser.add_argument('-d', '--outdir', required=True, help='the directory of the output file')%0A%0A%09args = parser.parse_args()%0A%09fromdir = args.fromdir%0A%09outdir = args.outdir%0A%0A%09for file in os.listdir(fromdir):%0A%09%09if file == %22desktop.ini%22:%0A%09%09%09continue%0A%09%09filein = os.path.join(fromdir, file)%0A%09%09fileout = os.path.join(outdir, file)%0A%09%09try:%0A%09%09%09sharpenPic(filein, fileout)%0A%09%09except Exception as e:%0A%09%09%09print(e)%0A%09%09%09continue%0Aif __name__ == '__main__':%0A%09main()
|
|
338a23f907a82d821844639128d070385138af80 | Simple https server. requires key and cert files | server.py | server.py | Python | 0.999993 | @@ -0,0 +1,380 @@
+import SimpleHTTPServer%0Aimport SocketServer%0Aimport BaseHTTPServer, SimpleHTTPServer%0Aimport ssl%0A%0A%0Ahttpd = BaseHTTPServer.HTTPServer(('localhost', 4443), SimpleHTTPServer.SimpleHTTPRequestHandler)%0Ahttpd.socket = ssl.wrap_socket (httpd.socket, certfile='/Users/jtep/Code/own/audioviz/cert.pem', keyfile='/Users/jtep/Code/own/audioviz/key.pem',server_side=True)%0Ahttpd.serve_forever()%0A
|
|
e9518383da031d03a3c70f1a670bcf522cfa4d57 | Fix gaiohttp tests for Python 2.5 | tests/test_009-gaiohttp.py | tests/test_009-gaiohttp.py | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import t
import os
import sys
import unittest
if sys.version_info[0] < 3:
raise unittest.SkipTest("gaiohttp requires Python 3.3+")
try:
import aiohttp
except ImportError:
raise unittest.SkipTest("gaiohttp requires aiohttp")
from aiohttp.wsgi import WSGIServerHttpProtocol
import asyncio
from gunicorn.workers import gaiohttp
from gunicorn.config import Config
from unittest import mock
class WorkerTests(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.worker = gaiohttp.AiohttpWorker('age',
'ppid',
'sockets',
'app',
'timeout',
Config(),
'log')
def tearDown(self):
self.loop.close()
@mock.patch('gunicorn.workers.gaiohttp.asyncio')
def test_init_process(self, m_asyncio):
try:
self.worker.init_process()
except TypeError:
# to mask incomplete initialization of AiohttWorker instance:
# we pass invalid values for ctor args
pass
self.assertTrue(m_asyncio.get_event_loop.return_value.close.called)
self.assertTrue(m_asyncio.new_event_loop.called)
self.assertTrue(m_asyncio.set_event_loop.called)
@mock.patch('gunicorn.workers.gaiohttp.asyncio')
def test_run(self, m_asyncio):
self.worker.loop = mock.Mock()
self.worker.run()
self.assertTrue(m_asyncio.async.called)
self.assertTrue(self.worker.loop.run_until_complete.called)
self.assertTrue(self.worker.loop.close.called)
def test_factory(self):
self.worker.wsgi = mock.Mock()
self.worker.loop = mock.Mock()
self.worker.log = mock.Mock()
self.worker.cfg = mock.Mock()
f = self.worker.factory(
self.worker.wsgi, 'localhost', 8080)
self.assertIsInstance(f, WSGIServerHttpProtocol)
@mock.patch('gunicorn.workers.gaiohttp.asyncio')
def test__run(self, m_asyncio):
self.worker.ppid = 1
self.worker.alive = True
self.worker.servers = []
sock = mock.Mock()
sock.cfg_addr = ('localhost', 8080)
self.worker.sockets = [sock]
self.worker.wsgi = mock.Mock()
self.worker.log = mock.Mock()
self.worker.notify = mock.Mock()
loop = self.worker.loop = mock.Mock()
loop.create_server.return_value = asyncio.Future(loop=self.loop)
loop.create_server.return_value.set_result(sock)
self.loop.run_until_complete(self.worker._run())
self.assertTrue(self.worker.log.info.called)
self.assertTrue(self.worker.notify.called)
def test__run_connections(self):
conn = mock.Mock()
self.worker.ppid = 1
self.worker.alive = False
self.worker.servers = [mock.Mock()]
self.worker.connections = {1: conn}
self.worker.sockets = []
self.worker.wsgi = mock.Mock()
self.worker.log = mock.Mock()
self.worker.loop = self.loop
self.worker.loop.create_server = mock.Mock()
self.worker.notify = mock.Mock()
def _close_conns():
self.worker.connections = {}
self.loop.call_later(0.1, _close_conns)
self.loop.run_until_complete(self.worker._run())
self.assertTrue(self.worker.log.info.called)
self.assertTrue(self.worker.notify.called)
self.assertFalse(self.worker.servers)
self.assertTrue(conn.closing.called)
@mock.patch('gunicorn.workers.gaiohttp.os')
@mock.patch('gunicorn.workers.gaiohttp.asyncio.sleep')
def test__run_exc(self, m_sleep, m_os):
m_os.getpid.return_value = 1
m_os.getppid.return_value = 1
self.worker.servers = [mock.Mock()]
self.worker.ppid = 1
self.worker.alive = True
self.worker.sockets = []
self.worker.log = mock.Mock()
self.worker.loop = mock.Mock()
self.worker.notify = mock.Mock()
slp = asyncio.Future(loop=self.loop)
slp.set_exception(KeyboardInterrupt)
m_sleep.return_value = slp
self.loop.run_until_complete(self.worker._run())
self.assertTrue(m_sleep.called)
self.assertTrue(self.worker.servers[0].close.called)
def test_close_wsgi_app(self):
self.worker.ppid = 1
self.worker.alive = False
self.worker.servers = [mock.Mock()]
self.worker.connections = {}
self.worker.sockets = []
self.worker.log = mock.Mock()
self.worker.loop = self.loop
self.worker.loop.create_server = mock.Mock()
self.worker.notify = mock.Mock()
self.worker.wsgi = mock.Mock()
self.worker.wsgi.close.return_value = asyncio.Future(loop=self.loop)
self.worker.wsgi.close.return_value.set_result(1)
self.loop.run_until_complete(self.worker._run())
self.assertTrue(self.worker.wsgi.close.called)
self.worker.wsgi = mock.Mock()
self.worker.wsgi.close.return_value = asyncio.Future(loop=self.loop)
self.worker.wsgi.close.return_value.set_exception(ValueError())
self.loop.run_until_complete(self.worker._run())
self.assertTrue(self.worker.wsgi.close.called)
def test_wrp(self):
conn = object()
tracking = {}
meth = mock.Mock()
wrp = gaiohttp._wrp(conn, meth, tracking)
wrp()
self.assertIn(id(conn), tracking)
self.assertTrue(meth.called)
meth = mock.Mock()
wrp = gaiohttp._wrp(conn, meth, tracking, False)
wrp()
self.assertNotIn(1, tracking)
self.assertTrue(meth.called)
| Python | 0.000019 | @@ -132,228 +132,62 @@
ort
-t%0Aimport os%0Aimport sys%0Aimport unittest%0Aif sys.version_info%5B0%5D %3C 3:%0A raise unittest.SkipTest(%22gaiohttp requires Python 3.3+%22)%0A%0Atry:%0A import aiohttp%0Aexcept ImportError:%0A raise unittest.SkipTest(%22gaiohttp requires
+unittest%0Aimport pytest%0Aaiohttp = pytest.importorskip(%22
aioh
|
0882c8885b88618ea55b97ace256cdf833a1547d | Add tests for pylama isort | tests/test_pylama_isort.py | tests/test_pylama_isort.py | Python | 0 | @@ -0,0 +1,541 @@
+import os%0A%0Afrom isort.pylama_isort import Linter%0A%0A%0Aclass TestLinter:%0A instance = Linter()%0A%0A def test_allow(self):%0A assert not self.instance.allow(%22test_case.pyc%22)%0A assert not self.instance.allow(%22test_case.c%22)%0A assert self.instance.allow(%22test_case.py%22)%0A%0A def test_run(self, src_dir, tmpdir):%0A assert not self.instance.run(os.path.join(src_dir, %22isort.py%22))%0A%0A incorrect = tmpdir.join(%22incorrect.py%22)%0A incorrect.write(%22import b%5Cnimport a%5Cn%22)%0A assert self.instance.run(str(incorrect))%0A
|
|
8a7ea0e8d29d443676c8893790625cbeb9d973ad | Test addByUniqueID Survey model | tests/test_survey_model.py | tests/test_survey_model.py | Python | 0 | @@ -0,0 +1,1438 @@
+# Copyright (C) 2016 University of Zurich. All rights reserved.%0A#%0A# This file is part of MSRegistry Backend.%0A#%0A# MSRegistry Backend is free software: you can redistribute it and/or%0A# modify it under the terms of the version 3 of the GNU Affero General%0A# Public License as published by the Free Software Foundation, or any%0A# other later version.%0A#%0A# MSRegistry Backend is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version%0A# 3 of the GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the version 3 of the GNU Affero%0A# General Public License along with MSRegistry Backend. If not, see %0A# %3Chttp://www.gnu.org/licenses/%3E.%0A%0A__author__ = %22Filippo Panessa %[email protected]%3E%22%0A__copyright__ = (%22Copyright (c) 2016 S3IT, Zentrale Informatik,%22%0A%22 University of Zurich%22)%0A%0A%0Aimport unittest%0A%0Afrom app import create_app%0Afrom app.models import Survey%0A%0A%0Aclass SurveyModelTestCase(unittest.TestCase):%0A uniqueID = 'd4c74594d841139328695756648b6bd6'%0A %0A def setUp(self):%0A self.app = create_app('TESTING')%0A self.app_context = self.app.app_context()%0A self.app_context.push()%0A%0A def tearDown(self):%0A self.app_context.pop()%0A%0A def test_addByUniqueID(self):%0A u = Survey()%0A self.assertTrue(u.addByUniqueID(self.uniqueID, %7B%7D))%0A%0A
|
|
280e72331d99a8c49783196951287627a933a659 | Add py solution for 459. Repeated Substring Pattern | py/repeated-substring-pattern.py | py/repeated-substring-pattern.py | Python | 0.00038 | @@ -0,0 +1,316 @@
+class Solution(object):%0A def repeatedSubstringPattern(self, s):%0A %22%22%22%0A :type s: str%0A :rtype: bool%0A %22%22%22%0A for i in xrange(1, len(s) / 2 + 1):%0A if len(s) %25 i == 0 and len(set(s%5Bj:j+i%5D for j in xrange(0, len(s), i))) == 1:%0A return True%0A return False%0A
|
|
75dc32ef71fd32c7728269b01a74faf840690473 | Add a slow bot to test timeout feature | examples/too_slow_bot.py | examples/too_slow_bot.py | Python | 0 | @@ -0,0 +1,578 @@
+import random%0Aimport asyncio%0A%0Aimport sc2%0Afrom sc2 import Race, Difficulty%0Afrom sc2.constants import *%0Afrom sc2.player import Bot, Computer%0A%0Afrom proxy_rax import ProxyRaxBot%0A%0Aclass SlowBot(ProxyRaxBot):%0A async def on_step(self, state, iteration):%0A await asyncio.sleep(random.random())%0A await super().on_step(state, iteration)%0A%0Adef main():%0A sc2.run_game(sc2.maps.get(%22Abyssal Reef LE%22), %5B%0A Bot(Race.Terran, SlowBot()),%0A Computer(Race.Protoss, Difficulty.Medium)%0A %5D, realtime=False, step_time_limit=0.2)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
7c865c63d5debcf7463ad1b81470d2f044ec4738 | Add lab result models | radar/patients/lab_results/models.py | radar/patients/lab_results/models.py | Python | 0 | @@ -0,0 +1,1884 @@
+from sqlalchemy import Column, Integer, String, ForeignKey, Numeric, Date, Boolean%0Afrom sqlalchemy.orm import relationship%0Afrom radar.database import db%0Afrom radar.models import PatientMixin, UnitMixin, CreatedModifiedMixin, DataSource%0A%0A%0Aclass LabOrderDefinition(db.Model):%0A __tablename__ = 'lab_order_definitions'%0A%0A id = Column(Integer, primary_key=True)%0A%0A code = Column(String, nullable=False)%0A description = Column(String, nullable=False)%0A%0A pre_post = Column(Boolean, nullable=False)%0A%0A%0Aclass LabResultDefinition(db.Model):%0A __tablename__ = 'lab_result_definitions'%0A%0A id = Column(Integer, primary_key=True)%0A%0A lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)%0A%0A code = Column(String, nullable=False)%0A description = Column(String, nullable=False)%0A%0A%0Aclass LabOrder(DataSource, PatientMixin, UnitMixin, CreatedModifiedMixin):%0A __tablename__ = 'lab_orders'%0A%0A id = Column(Integer, ForeignKey('lab_orders.id'), primary_key=True)%0A%0A lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)%0A lab_order_definition = relationship('LabOrderDefinition')%0A%0A date = Column(Date, nullable=False)%0A pre_post = Column(String)%0A%0A lab_results = relationship('LabResult', cascade='all, delete-orphan')%0A%0A __mapper_args__ = %7B%0A 'polymorphic_identity': 'hospitalisations',%0A %7D%0A%0A%0Aclass LabResult(db.Model):%0A __tablename__ = 'lab_results'%0A%0A id = Column(Integer, primary_key=True)%0A%0A lab_order_id = Column(Integer, ForeignKey('lab_orders.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)%0A lab_order = relationship('LabOrder')%0A%0A lab_result_definition_id = Column(Integer, ForeignKey('lab_result_definitions.id'), nullable=False)%0A lab_result_definition = relationship('LabResultDefinition')%0A%0A value = Column(Numeric, nullable=False)
|
|
36d8a0e091ec1dd4ff451031810c75cd0431ac44 | add admins.py file | aligot/admin.py | aligot/admin.py | Python | 0.000001 | @@ -0,0 +1,223 @@
+# coding: utf-8%0A%0Afrom django.contrib import admin%0A%0Afrom .models import Note, NoteBook, NoteRevision, User%0A%0Aadmin.site.register(User)%0Aadmin.site.register(NoteBook)%0Aadmin.site.register(Note)%0Aadmin.site.register(NoteRevision)%0A
|
|
cf93f84dd794b63dd373cf59d802000799e32232 | Create main.py | example/main.py | example/main.py | Python | 0.000001 | @@ -0,0 +1 @@
+%0A
|
|
4efc45499d1736933691b9de39090b86526ea4e1 | Create 217_contain_duplicates.py | 217_contain_duplicates.py | 217_contain_duplicates.py | Python | 0.000607 | @@ -0,0 +1,743 @@
+%22%22%22%0Ahttps://leetcode.com/problems/contains-duplicate/description/%0A%0AGiven an array of integers, find if the array contains any duplicates. %0AYour function should return true if any value appears at least twice in the array, and it should return false if every element%0Ais distinct.%0A%22%22%22%0Aclass Solution(object):%0A def containsDuplicate(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: bool%0A %22%22%22%0A dictionary = %7B%7D%0A duplicates = %5B%5D%0A is_duplicate = False%0A for num in nums:%0A if num in dictionary:%0A return True%0A is_duplicate = True%0A break%0A else:%0A dictionary%5Bnum%5D = 1%0A return is_duplicate%0A %0A %0A
|
|
7ab2298f22de79cd14fae9f3add1417a76bcbcd0 | Add package file. | app/__init__.py | app/__init__.py | Python | 0 | @@ -0,0 +1,274 @@
+#!/usr/bin/env python%0Afrom pkgutil import extend_path%0A%0A__path__ = extend_path(__path__, __name__)%0A%0A__all__ = %5B%0A 'data_visualization',%0A 'knn_prediction',%0A 'load_dataset',%0A 'model_visualization',%0A 'select_model',%0A 'svm_prediction',%0A 'validate_dataset',%0A%5D%0A
|
|
f622255dc2c6695b785213c8d69cb57ae5d8a5e9 | Add pebble sdk version for detecting sdk features | waftools/pebble_sdk_version.py | waftools/pebble_sdk_version.py | Python | 0 | @@ -0,0 +1,600 @@
+from waflib.Configure import conf%0A%0A%0A@conf%0Adef compare_sdk_version(ctx, platform, version):%0A target_env = ctx.all_envs%5Bplatform%5D if platform in ctx.all_envs else ctx.env%0A target_version = (int(target_env.SDK_VERSION_MAJOR or 0x5) * 0xff +%0A int(target_env.SDK_VERSION_MINOR or 0x19))%0A other_version = int(version%5B0%5D) * 0xff + int(version%5B1%5D)%0A diff_version = target_version - other_version%0A return 0 if diff_version == 0 else diff_version / abs(diff_version)%0A%0A%0A@conf%0Adef supports_bitmap_resource(ctx):%0A return (ctx.compare_sdk_version('aplite', %5B0x5, 0x48%5D) %3E= 0)%0A
|
|
79fdfaceee84321bb802f9f99ee500f400f38780 | Add admin to credentials | dbaas/integrations/credentials/admin/__init__.py | dbaas/integrations/credentials/admin/__init__.py | Python | 0 | @@ -0,0 +1,177 @@
+# -*- coding:utf-8 -*-%0Afrom django.contrib import admin%0Afrom .. import models%0A%0Aadmin.site.register(models.IntegrationType, )%0Aadmin.site.register(models.IntegrationCredential, )%0A
|
|
58627bd4cbe100a7cbd526be38cd69e8605984cd | Add json-encoder example | examples/json-encoder.py | examples/json-encoder.py | Python | 0.000082 | @@ -0,0 +1,866 @@
+#!/usr/bin/env python3%0Afrom pycnic.core import WSGI, Handler%0Aimport datetime%0Aimport json%0A%0Aclass DateTimeEncoder(json.JSONEncoder):%0A def default(self, o):%0A if isinstance(o, datetime.datetime):%0A return o.isoformat()%0A%0A return json.JSONEncoder.default(self, o)%0A%0Aclass Hello(Handler):%0A%0A def get(self, name=%22World%22):%0A return %7B %0A %22message%22: %22Hello, %7Bname%7D!%22.format(name=name),%0A %22date%22: datetime.datetime.now()%0A %7D%0A%0A%0Aclass app(WSGI):%0A debug = True%0A json_cls = DateTimeEncoder%0A routes = %5B%0A (%22/%22, Hello()),%0A (%22/(%5B%5Cw%5D+)%22, Hello())%0A %5D%0A%0Aif __name__ == %22__main__%22:%0A from wsgiref.simple_server import make_server%0A try:%0A print(%22Serving on 0.0.0.0:8080...%22)%0A make_server('0.0.0.0', 8080, app).serve_forever()%0A except KeyboardInterrupt:%0A pass%0A print(%22Done%22)%0A%0A
|
|
ebfe2faa5fcf66f3f1ece597922d4a72b59c3e43 | Create B_Averages_ocean.py | Cas_1/B_Averages_ocean.py | Cas_1/B_Averages_ocean.py | Python | 0.001485 | @@ -0,0 +1,1918 @@
+#Averages of U,V,W,T,S and ETA%0A%0Aimport numpy as np %0Aimport matplotlib.pyplot as plt%0A%0Afrom xmitgcm import open_mdsdataset%0A %0Adir0 = '/homedata/bderembl/runmit/test_southatlgyre' #Case 1 : 38 iterations%0A%0Ads0 = open_mdsdataset(dir0,prefix=%5B'Eta','U','V','W','T','S'%5D)%0A%0Aprint(ds0)%0A%0AAverage_ETA = ds0%5B'Eta'%5D.mean().values%0Aprint('Average of Ocean Surface Height Anomaly ')%0Aprint(Average_ETA,'m')%0A#Average_ETA_mask = ds0.Eta.where(ds0.hFacC%3E0).mean().values%0A#print('Average of Ocean Surface Height Anomaly without continents')%0A#print(Average_ETA_mask,'m')%0A%0AAverage_T = ds0%5B'T'%5D.mean().values%0Aprint('Average of Ocean Temperature')%0Aprint(Average_T,'%C2%B0C')%0A#Average_T_mask = ds0%5B'T'%5D.where(ds0.hFacC%3E0).mean().values%0A#print('Average of Ocean Temperature without continents')%0A#print(Average_T_mask,'%C2%B0C')%0A%0AAverage_S = ds0%5B'S'%5D.mean().values%0Aprint('Average of Ocean Salinity')%0Aprint(Average_S,'psu')%0A#Average_S_mask = ds0.S.where(ds0.hFacC%3E0).mean().values%0A#print('Average of Ocean Salinity without continents')%0A#print(Average_S_mask,'psu')%0A%0AAverage_U = ds0%5B'U'%5D.mean().values%0Aprint('Average of Meridional component of Ocean Velocity')%0Aprint(Average_U,'m/s')%0A#Average_U_mask = ds0.U.where(ds0.hFacW%3E0).mean().values%0A#print('Average of Meridional component of Ocean Velocity without continents')%0A#print(Average_U_mask,'m/s')%0A%0AAverage_V = ds0%5B'V'%5D.mean().values%0Aprint('Average of Zonal component of Ocean Velocity')%0Aprint(Average_V,'m/s')%0A#Average_V_mask = ds0.V.where(ds0.hFacS%3E0).mean().values%0A#print('Average of Meridional component of Ocean Velocity without continents')%0A#print(Average_V_mask,'m/s')%0A%0AAverage_W = ds0%5B'W'%5D.mean().values%0Aprint('Average of Vertical component of Ocean Velocity')%0Aprint(Average_W,'m/s')%0A#Average_W_mask = ds0.W.where(ds0.hFacS%3E0).mean().values%0A#print('Average of Vertical component of Ocean Velocity without continents')%0A#print(Average_W_mask,'m/s')%0A
|
|
8ada83d3140b871d7699988996ff7427c0526c9b | Remove extraneous logging from benchmarks | tensorflow/python/data/benchmarks/benchmark_base.py | tensorflow/python/data/benchmarks/benchmark_base.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test utilities for tf.data benchmarking functionality."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.platform import test
# TODO(b/119837791): Add eager benchmarks.
class DatasetBenchmarkBase(test.Benchmark):
"""Base class for dataset benchmarks."""
def run_benchmark(self, dataset, num_elements, iters=1):
"""Benchmarks the dataset.
Runs the dataset `iters` times. In each iteration, the benchmark measures
the time it takes to go through `num_elements` elements of the dataset.
Args:
dataset: Dataset to benchmark.
num_elements: Number of dataset elements to iterate through each benchmark
iteration.
iters: Number of times to repeat the timing.
Returns:
A float, representing the per-element wall time of the dataset in seconds.
This is the median time (with respect to `iters`) it takes for the dataset
to go through `num_elements` elements, divided by `num_elements.`
"""
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
# NOTE: We use `dataset.skip()` to perform the iterations in C++, avoiding
# the overhead of multiple `session.run()` calls. Note that this relies on
# the underlying implementation of `skip`: if it is optimized in the future,
# we will have to change this code.
dataset = dataset.skip(num_elements - 1)
iterator = dataset_ops.make_initializable_iterator(dataset)
next_element = iterator.get_next()
next_element = nest.flatten(next_element)[0]
deltas = []
for _ in range(iters):
with session.Session() as sess:
# Run once to warm up the session caches.
sess.run(iterator.initializer)
sess.run(next_element)
sess.run(iterator.initializer)
start = time.time()
sess.run(next_element.op)
end = time.time()
deltas.append(end - start)
return np.median(deltas) / float(num_elements)
def run_and_report_benchmark(self,
dataset,
num_elements,
name,
iters=5,
extras=None):
# Measure the per-element wall time.
wall_time = self.run_benchmark(dataset, num_elements, iters)
if extras is None:
extras = {}
extras["elements_per_second"] = 1 / wall_time
extras["num_elements"] = num_elements
# 'mode' represents the mechanism used for iterating over dataset elements.
name = "%s_mode_cpp" % name
self.report_benchmark(
wall_time=wall_time, iters=iters, name=name, extras=extras)
| Python | 0.000001 | @@ -3344,58 +3344,8 @@
%7B%7D%0A
- extras%5B%22elements_per_second%22%5D = 1 / wall_time%0A
@@ -3466,40 +3466,8 @@
ts.%0A
- name = %22%25s_mode_cpp%22 %25 name%0A
|
9afe19676cbb87985939bd0099301a7003a38b7f | check for monitoring file and directory count | samples/folder_check.py | samples/folder_check.py | Python | 0 | @@ -0,0 +1,1738 @@
+#!/usr/bin/env python%0A%0Aimport json,os,time%0A%0APLUGIN_VERSION=%221%22%0A%0AHEARTBEAT=%22true%22%0A%0A#set this value to 1 if the file count needs to be recursive%0AINCLUDE_RECURSIVE_FILES=None%0A%0AFOLDER_NAME=%22/%22%0A%0ATHRESHOLD_COUNT=10%0A%0Adef get_data():%0A folder_checks_data = %7B%7D%0A folder_checks_data%5B'plugin_version'%5D = PLUGIN_VERSION%0A folder_checks_data%5B'heartbeat_required'%5D = HEARTBEAT%0A try:%0A if INCLUDE_RECURSIVE_FILES:%0A file_count = sum(%5Blen(files) for r, d, files in os.walk(FOLDER_NAME)%5D)%0A directory_count = sum(%5Blen(d) for r, d, files in os.walk(FOLDER_NAME)%5D)%0A else:%0A path, dirs, files = next(os.walk(FOLDER_NAME))%0A file_count = len(files)%0A directory_count = len(dirs)%0A folder_checks_data%5B'file_count'%5D = file_count%0A folder_checks_data%5B'directory_count'%5D = directory_count%0A %0A #logical conditions%0A if file_count %3E THRESHOLD_COUNT:%0A folder_checks_data%5B'status'%5D=0%0A folder_checks_data%5B'msg'%5D='File Count Exceeds the threshold'%0A return folder_checks_data%0A%0A if directory_count %3E THRESHOLD_COUNT:%0A folder_checks_data%5B'status'%5D=0%0A folder_checks_data%5B'msg'%5D='Directory Count Exceeds the threshold' %0A return folder_checks_data%0A%0A if file_count %3E THRESHOLD_COUNT and directory_count %3E THRESHOLD_COUNT:%0A folder_checks_data%5B'status'%5D=0%0A folder_checks_data%5B'msg'%5D='Folder / Directory Counts Exceeded the threshold'%0A %0A except Exception as e:%0A folder_checks_data%5B'status'%5D=0%0A folder_checks_data%5B'msg'%5D=str(e)%0A return folder_checks_data%0A%0Aif __name__ == %22__main__%22:%0A data = get_data()%0A print(json.dumps(data,indent=4))
|
|
d881ee2866bb422a266871c1b426d76c669025da | Test for CASSANDRA-8741 | nodetool_test.py | nodetool_test.py | Python | 0 | @@ -0,0 +1,1046 @@
+from ccmlib.node import NodetoolError%0Afrom dtest import Tester%0Afrom tools import require%0A%0A%0Aclass TestNodetool(Tester):%0A%0A @require(%228741%22)%0A def test_decommission_after_drain_is_invalid(self):%0A %22%22%22%0A @jira_ticket CASSANDRA-8741%0A%0A Running a decommission after a drain should generate%0A an unsupported operation message and exit with an error%0A code (which we receive as a NodetoolError exception).%0A %22%22%22%0A cluster = self.cluster%0A cluster.populate(%5B3%5D).start()%0A version = cluster.version()%0A%0A node = cluster.nodelist()%5B0%5D%0A node.drain(block_on_log=True)%0A%0A try:%0A node.decommission()%0A self.assertFalse(%22Expected nodetool error%22)%0A except NodetoolError as e:%0A if version %3E= %222.1%22:%0A self.assertEqual('', e.stderr)%0A self.assertTrue('Unsupported operation' in e.stdout)%0A else:%0A self.assertEqual('', e.stdout)%0A self.assertTrue('Unsupported operation' in e.stderr)%0A
|
|
125b9ce4bc5b5966f3730f3d99dba122b1d295eb | use session.request instead of session.{method} | src/sentry/http.py | src/sentry/http.py | """
sentry.utils.http
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sentry
import six
import socket
import requests
import warnings
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from ipaddr import IPNetwork
from requests.adapters import HTTPAdapter
from requests.exceptions import SSLError
# In case SSL is unavailable (light builds) we can't import this here.
try:
from OpenSSL.SSL import ZeroReturnError
except ImportError:
class ZeroReturnError(Exception):
pass
from urlparse import urlparse
USER_AGENT = 'sentry/{version} (https://getsentry.com)'.format(
version=sentry.VERSION,
)
DISALLOWED_IPS = set((IPNetwork(i) for i in settings.SENTRY_DISALLOWED_IPS))
def get_server_hostname():
# TODO(dcramer): Ideally this would parse at runtime, but we currently
# change the URL prefix when runner initializes which may be post-import
return urlparse(settings.SENTRY_URL_PREFIX).hostname
def is_valid_url(url):
"""
Tests a URL to ensure it doesn't appear to be a blacklisted IP range.
"""
parsed = urlparse(url)
if not parsed.hostname:
return False
server_hostname = get_server_hostname()
if parsed.hostname == server_hostname:
return True
try:
ip_address = socket.gethostbyname(parsed.hostname)
except socket.gaierror:
return False
if ip_address == server_hostname:
return True
ip_network = IPNetwork(ip_address)
for addr in DISALLOWED_IPS:
if ip_network in addr:
return False
return True
class BlacklistAdapter(HTTPAdapter):
def send(self, request, *args, **kwargs):
if not is_valid_url(request.url):
raise SuspiciousOperation('%s matches the URL blacklist' % (request.url,))
return super(BlacklistAdapter, self).send(request, *args, **kwargs)
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
session.mount('https://', BlacklistAdapter())
session.mount('http://', BlacklistAdapter())
return session
def safe_urlopen(url, method=None, params=None, data=None, json=None,
headers=None, allow_redirects=False, timeout=30,
verify_ssl=True, user_agent=None):
"""
A slightly safer version of ``urlib2.urlopen`` which prevents redirection
and ensures the URL isn't attempting to hit a blacklisted IP range.
"""
if user_agent is not None:
warnings.warn('user_agent is no longer used with safe_urlopen')
session = build_session()
kwargs = {}
if json:
kwargs['json'] = json
if not headers:
headers = {}
headers.setdefault('Content-Type', 'application/json')
if data:
kwargs['data'] = data
if params:
kwargs['params'] = params
if headers:
kwargs['headers'] = headers
if method is None:
method = 'POST' if (data or json) else 'GET'
try:
response = getattr(session, method.lower())(
url,
allow_redirects=allow_redirects,
timeout=timeout,
verify=verify_ssl,
**kwargs
)
# Our version of requests does not transform ZeroReturnError into an
# appropriately generically catchable exception
except ZeroReturnError as exc:
import sys
exc_tb = sys.exc_info()[2]
six.reraise(SSLError, exc, exc_tb)
del exc_tb
# requests' attempts to use chardet internally when no encoding is found
# and we want to avoid that slow behavior
if not response.encoding:
response.encoding = 'utf-8'
return response
def safe_urlread(response):
return response.content
| Python | 0 | @@ -3166,41 +3166,51 @@
e =
-getattr(session, method.lower())(
+session.request(%0A method=method,
%0A
@@ -3218,16 +3218,20 @@
+url=
url,%0A
|
72cbdd0c1cf804eecb8f503f86e6be237719bf99 | add echo client for testing | network/echo-server/echo-client/main.py | network/echo-server/echo-client/main.py | Python | 0.000003 | @@ -0,0 +1,1729 @@
+#!/usr/bin/env python%0A# -*- coding: UTF-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0Aimport socket%0A%0Adef main():%0A client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A client_socket.connect(('127.0.0.1', 5555))%0A client_socket.send('This is a echo test')%0A data = client_socket.recv(4096)%0A if data:%0A print 'got data:', data%0A client_socket.close()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
be96a2f7e3aeb59727ba88913cc6fda97bf8a423 | Add some unit tests | InvenTree/company/test_views.py | InvenTree/company/test_views.py | Python | 0.000001 | @@ -0,0 +1,1680 @@
+%22%22%22 Unit tests for Company views (see views.py) %22%22%22%0A%0A# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.test import TestCase%0Afrom django.urls import reverse%0Afrom django.contrib.auth import get_user_model%0A%0Afrom .models import SupplierPart%0A%0A%0Aclass CompanyViewTest(TestCase):%0A%0A fixtures = %5B%0A 'category',%0A 'part',%0A 'location',%0A 'company',%0A 'supplier_part',%0A %5D%0A%0A def setUp(self):%0A super().setUp()%0A%0A # Create a user%0A User = get_user_model()%0A User.objects.create_user('username', '[email protected]', 'password')%0A%0A self.client.login(username='username', password='password')%0A%0A def test_supplier_part_delete(self):%0A %22%22%22 Test the SupplierPartDelete view %22%22%22%0A%0A url = reverse('supplier-part-delete')%0A%0A # Get form using 'part' argument%0A response = self.client.get(url, %7B'part': '1'%7D, HTTP_X_REQUESTED_WITH='XMLHttpRequest')%0A self.assertEqual(response.status_code, 200)%0A%0A # Get form using 'parts' argument%0A response = self.client.get(url + '?parts%5B%5D=1&parts%5B%5D=2', HTTP_X_REQUESTED_WITH='XMLHttpRequest')%0A self.assertEqual(response.status_code, 200)%0A%0A # POST to delete two parts%0A n = SupplierPart.objects.count()%0A response = self.client.post(%0A url,%0A %7B%0A 'supplier-part-2': 'supplier-part-2',%0A 'supplier-part-3': 'supplier-part-3',%0A 'confirm_delete': True%0A %7D,%0A HTTP_X_REQUESTED_WITH='XMLHttpRequest')%0A %0A self.assertEqual(response.status_code, 200)%0A%0A self.assertEqual(n - 2, SupplierPart.objects.count())%0A
|
|
725832be85b7b0455cb735ce8a054007209d9645 | test scan scraper | src/hsimage.py | src/hsimage.py | Python | 0.000001 | @@ -0,0 +1,1247 @@
+import sys%0Afrom PIL import Image%0A%0Aimg = Image.open(sys.argv%5B1%5D)%0Awidth, height = img.size%0A%0Axblock = 5%0Ayblock = 5%0A%0Aw_width = width / xblock%0Aw_height = height / yblock%0A%0Ablockmap = %5B(xb*w_width, yb*w_height, (xb+1)*w_width, (yb+1)*w_height)%0A for xb in xrange(xblock) for yb in xrange(yblock)%5D%0A%0Anewblockmap = list(blockmap)%0A%0Anewblockmap%5B0%5D = blockmap%5B14%5D%0Anewblockmap%5B1%5D = blockmap%5B13%5D%0Anewblockmap%5B2%5D = blockmap%5B12%5D%0Anewblockmap%5B3%5D = blockmap%5B11%5D%0Anewblockmap%5B4%5D = blockmap%5B10%5D%0Anewblockmap%5B5%5D = blockmap%5B24%5D%0Anewblockmap%5B6%5D = blockmap%5B23%5D%0Anewblockmap%5B7%5D = blockmap%5B22%5D%0Anewblockmap%5B8%5D = blockmap%5B21%5D%0Anewblockmap%5B9%5D = blockmap%5B20%5D%0Anewblockmap%5B10%5D = blockmap%5B4%5D%0Anewblockmap%5B11%5D = blockmap%5B3%5D%0Anewblockmap%5B12%5D = blockmap%5B2%5D%0Anewblockmap%5B13%5D = blockmap%5B1%5D%0Anewblockmap%5B14%5D = blockmap%5B0%5D%0Anewblockmap%5B15%5D = blockmap%5B19%5D%0Anewblockmap%5B16%5D = blockmap%5B18%5D%0Anewblockmap%5B17%5D = blockmap%5B17%5D%0Anewblockmap%5B18%5D = blockmap%5B16%5D%0Anewblockmap%5B19%5D = blockmap%5B15%5D%0Anewblockmap%5B20%5D = blockmap%5B9%5D%0Anewblockmap%5B21%5D = blockmap%5B8%5D%0Anewblockmap%5B22%5D = blockmap%5B7%5D%0Anewblockmap%5B23%5D = blockmap%5B6%5D%0Anewblockmap%5B24%5D = blockmap%5B5%5D%0A%0Aresult = Image.new(img.mode, (width, height))%0Afor box, sbox in zip(blockmap, newblockmap):%0A c = img.crop(sbox)%0A result.paste(c, box)%0Aresult.save(sys.argv%5B1%5D)%0A
|
|
dede46a2d5ad1504991b05b8edab4d1ffd781f46 | fix out of range error in tracker remover plugin | searx/plugins/tracker_url_remover.py | searx/plugins/tracker_url_remover.py | '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <[email protected]>
'''
from flask_babel import gettext
import re
from searx.url_utils import urlunparse, parse_qsl, urlencode
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
if 'parsed_url' not in result:
return True
query = result['parsed_url'].query
if query == "":
return True
parsed_query = parse_qsl(query)
changed = False
for i, (param_name, _) in enumerate(list(parsed_query)):
for reg in regexes:
if reg.match(param_name):
parsed_query.pop(i)
changed = True
break
if changed:
result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
result['url'] = urlunparse(result['parsed_url'])
return True
| Python | 0 | @@ -1269,17 +1269,13 @@
ange
-d = False
+s = 0
%0A
@@ -1432,16 +1432,26 @@
ry.pop(i
+ - changes
)%0A
@@ -1470,60 +1470,19 @@
ange
-d = True
+s += 1
%0A
- break%0A%0A if changed:%0A
@@ -1577,32 +1577,36 @@
y))%0A
+
result%5B'url'%5D =
@@ -1637,16 +1637,38 @@
d_url'%5D)
+%0A break
%0A%0A re
|
a6fb8c86e14722527ff004ca1378458df252f8c0 | add doxygen module | modules/doxygen.py | modules/doxygen.py | Python | 0.000001 | @@ -0,0 +1,555 @@
+%22%22%22Doxygen module.%0A%0ACreate project's documentation.%0A%0AWebsite: http://www.doxygen.org%0A%22%22%22%0Aimport os%0Aimport shlex%0A%0Adef doxygen(loader, variant=None, *args):%0A if len(args) == 1:%0A args = shlex.split(args%5B0%5D)%0A%0A if variant is None:%0A variant = os.environ.get('PROJECT_VARIANT',%0A loader.config.get('default_variant'))%0A%0A config = loader.config.get('configuration', %7B%7D)%0A config = config.get(variant, %7B%7D)%0A%0A binargs = %5B'doxygen', config%5B'doxygen'%5D%5B'config_file'%5D%5D%0A os.execvp(binargs%5B0%5D, binargs)%0A%0A%0Acommands = (doxygen,)%0A
|
|
7a74f85fc76af2df62bb92ff2997ab1b84caa3a0 | Test dummy IRC bot | tests/test_irc_bot_dummy.py | tests/test_irc_bot_dummy.py | Python | 0.000001 | @@ -0,0 +1,862 @@
+%22%22%22%0A:Copyright: 2007-2021 Jochen Kupperschmidt%0A:License: MIT, see LICENSE for details.%0A%22%22%22%0A%0Aimport pytest%0A%0Afrom syslog2irc.irc import create_bot, IrcChannel, IrcConfig%0Afrom syslog2irc.signals import irc_channel_joined%0A%0A%[email protected]%0Adef config():%0A channels = %7BIrcChannel('#one'), IrcChannel('#two')%7D%0A%0A return IrcConfig(%0A server=None,%0A nickname='nick',%0A realname='Nick',%0A channels=channels,%0A )%0A%0A%[email protected]%0Adef bot(config):%0A bot = create_bot(config)%0A%0A yield bot%0A%0A bot.disconnect('Done.')%0A%0A%0Adef test_fake_channel_joins(bot):%0A received_signal_data = %5B%5D%0A%0A @irc_channel_joined.connect%0A def handle_irc_channel_joined(sender, **data):%0A received_signal_data.append(data)%0A%0A bot.start()%0A%0A assert received_signal_data == %5B%0A %7B'channel_name': '#one'%7D,%0A %7B'channel_name': '#two'%7D,%0A %5D%0A
|
|
2283c4851c628b24036edd029594c3bda115ae71 | Fix collisions with footer logo | generate_quote_images.py | generate_quote_images.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import json
import os
import textwrap
import HTMLParser
from typogrify.filters import smartypants
from PIL import Image, ImageDraw, ImageFont
OUT_DIR = 'www/quote-images'
CANVAS_WIDTH = 640
CANVAS_HEIGHT = 640
TEXT_MARGIN = (40, 40, 200, 40)
TEXT_MAX_WIDTH = CANVAS_WIDTH - (TEXT_MARGIN[1] + TEXT_MARGIN[3])
SIZE_MIN = 16
SIZE_MAX = 64
SIZE_DELTA = 4
LINE_MIN = 16
LINE_MAX = 50
LINE_DELTA = 2
LOGO = Image.open('www/assets/npr-white.png')
fonts = {}
fonts['book'] = {}
fonts['bold'] = {}
quote_width = {}
def compute_size(lines, fontsize):
font = fonts['bold'][fontsize]
width = 0
height = 0
for line in lines:
x, y = font.getsize(line)
width = max((width, x))
height += y
return width, height
def optimize_text(text, max_height):
permutations = {}
for size in fonts['bold'].keys():
for wrap_count in xrange(LINE_MIN, LINE_MAX + 1, LINE_DELTA):
lines = textwrap.wrap(text, wrap_count)
width, height = compute_size(lines, size)
# Throw away any that exceed canvas space
if width > TEXT_MAX_WIDTH - quote_width[size]:
continue
if height > max_height:
continue
permutations[(size, wrap_count)] = (width, height)
optimal = (0, 0)
# Find the largest font size that's in the butter zone
for k, v in permutations.items():
size, wrap_count = k
width, height = v
if size > optimal[0]:
optimal = k
elif size == optimal[0] and wrap_count > optimal[1]:
optimal = k
return optimal
def render(quote, name, slug, mug_src):
img = Image.new('RGB', (640, 640), (17, 17, 17))
draw = ImageDraw.Draw(img)
text_margin = TEXT_MARGIN
parse = HTMLParser.HTMLParser()
text = u'“%s”' % quote
text = parse.unescape(text)
if mug_src != '' and os.path.exists('www/assets/mugs/%s' % mug_src):
text_margin = (230, 40, 200, 40)
mask = Image.open('www/assets/mug-mask.png')
mask = mask.resize((150,150),1)
mug = Image.open('www/assets/mugs/%s' % mug_src)
mug = mug.resize((150,150),1)
mug_xy = (
(CANVAS_WIDTH / 2) - mug.size[0] / 2,
40
)
img.paste(mug, mug_xy, mask)
max_height = CANVAS_WIDTH - (text_margin[0] + text_margin[2])
size, wrap_count = optimize_text(text, max_height)
font = fonts['bold'][size]
lines = textwrap.wrap(text, wrap_count)
y = text_margin[0]
for i, line in enumerate(lines):
x = text_margin[1]
if i > 0:
x += quote_width[size]
draw.text((x, y), line, font=fonts['bold'][size], fill=(255, 255, 255))
y += size
y += size
text = u'— %s' % name
size = min(size, 32)
font = fonts['book'][size]
width, height = font.getsize(text)
x = (CANVAS_WIDTH - text_margin[1]) - width
draw.text((x, y), text, font=font, fill=(255, 255, 255))
logo_xy = (
(CANVAS_WIDTH - 40) - LOGO.size[0],
(CANVAS_HEIGHT - 40) - LOGO.size[1]
)
img.paste(LOGO, logo_xy)
img.save('%s/%s.png' % (OUT_DIR, slug), 'PNG')
def main():
for size in xrange(SIZE_MIN, SIZE_MAX + 1, SIZE_DELTA):
fonts['book'][size] = ImageFont.truetype('www/assets/Gotham-Book.otf', size)
fonts['bold'][size] = ImageFont.truetype('www/assets/Gotham-Bold.otf', size)
quote_width[size] = fonts['bold'][size].getsize(u'“')[0]
with open('www/static-data/data.json') as f:
data = json.load(f)
if not os.path.exists(OUT_DIR):
os.mkdir(OUT_DIR)
for speech in data:
print speech['slug']
if speech['money_quote']:
render(speech['money_quote'], speech['name'], speech['slug'], speech['img'])
if __name__ == '__main__':
main()
| Python | 0.000002 | @@ -1021,16 +1021,17 @@
_count)%0A
+%0A
@@ -1075,16 +1075,62 @@
s, size)
+%0A height += height / len(lines) * 2
%0A%0A
@@ -2852,16 +2852,22 @@
+= size
+ * 1.2
%0A%0A y
@@ -2928,10 +2928,10 @@
ze,
-3
2
+4
)%0A
|
a8ec11719ccc158fd457ed02f2b8459d1b452975 | Create tweets.py | tweets.py | tweets.py | Python | 0.000008 | @@ -0,0 +1,566 @@
+import sqlite3%0A%0Adef main(cursor):%0A cursor.execute(%22select * from tweets%22)%0A for tweet in cursor.fetchall():%0A tid = tweet%5B0%5D%0A tdate = tweet%5B1%5D%0A text = tweet%5B2%5D%0A geo = tweet%5B3%5D%0A t = str(tdate + text + geo)%0A %0A print '-----tweet: %25s ' %25 text%0A print '------date: %25s' %25 tdate%0A print '-------geo: %25s' %25 geo%0A print '----length: %25s' %25 len(text)%0A print '%5Cn'%0A%0Aif __name__ == '__main__':%0A conn = sqlite3.connect('tweets.db')%0A conn.text_factory = str%0A cur = conn.cursor()%0A main(cur)%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.