commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
f3f363e8911d3a635d68c7dbe767ee2585ed4f36
Check for duplicates based on coordinates and select only one database (EU/NASA)
checkDuplicates.py
checkDuplicates.py
Python
0
@@ -0,0 +1,2209 @@ +import pandas as pd%0Afrom astropy import coordinates as coord%0Afrom astropy import units as u%0A%0A%0Aclass Sweetcat:%0A %22%22%22Load SWEET-Cat database%22%22%22%0A def __init__(self):%0A%0A self.fname_sc = 'WEBSITE_online_EU-NASA_full_database.rdb'%0A %0A # Loading the SweetCat database%0A self.readSC()%0A%0A def readSC(self):%0A # TODO: Use the ra and dec, and match with coordinates instead of name%0A # stored in self.coordinates.%0A%0A # Read the current version of SWEET-Cat%0A names_ = %5B'name', 'hd', 'ra', 'dec', 'V', 'Verr', 'p', 'perr',%0A 'pflag', 'Teff', 'Tefferr', 'logg', 'logger',%0A 'n1', 'n2', 'vt', 'vterr', 'feh', 'feherr', 'M', 'Merr',%0A 'author', 'link', 'source', 'update', 'comment', 'database',%0A 'n3'%5D%0A%0A # SC = pd.read_csv('WEBSITE_online.rdb', delimiter='%5Ct', names=names_)%0A SC = pd.read_csv(self.fname_sc, delimiter='%5Ct', names=names_)%0A%0A # Clean star names%0A self.sc_names = %5Bx.lower().replace(' ', '').replace('-', '') for x in SC.name%5D%0A self.sc_names = list(map(str.strip, self.sc_names))%0A # Original star names%0A self.sc_names_orig = %5Bx.strip() for x in SC.name%5D%0A # Coordinates of the stars in SWEET-Cat%0A self.coordinates = SC.loc%5B:, %5B'ra', 'dec'%5D%5D%0A # SWEET-Cat (used to automatically update the database label)%0A self.SC = SC%0A%0A%0Aif __name__ == '__main__':%0A%0A # Loading SWEET Cat%0A sc = Sweetcat()%0A%0A # Check for duplicates, subset of columns can be changed%0A print(sc.SC%5Bsc.SC.duplicated(%5B'ra', 'dec'%5D, keep=False)%5D)%0A%0A # Indexes of the duplicates%0A indexes = sc.SC%5Bsc.SC.duplicated(%5B'ra', 'dec'%5D, keep=False)%5D.index%0A%0A # Remove a row%0A # new_sc = sc.SC.drop(%5B2728%5D)%0A # new_sc.to_csv('WEBSITE_online_EU-NASA_full_database_minusHD21749.rdb',%0A # sep='%5Ct', index=False, header=False)%0A%0A # Select only the EU data%0A sc_EU = new_sc%5Bnew_sc%5B'database'%5D.str.contains('EU')%5D%0A # Drop the database column%0A sc_like_old = sc_EU.drop(columns=%5B'database'%5D)%0A #sc_like_old.to_csv('WEBSITE_online_EU-updated_04-03-2020.rdb',%0A # sep='%5Ct', index=False, header=False)%0A
485bbe732dfb8539ffaf017f3a005896a7f3e503
create subhash module
iscc_bench/imageid/subhash.py
iscc_bench/imageid/subhash.py
Python
0.000001
@@ -0,0 +1,209 @@ +# -*- coding: utf-8 -*-%0A%22%22%22Test strategy with hashing mutiple shift invariant aligned patches%0A%0ASee: https://stackoverflow.com/a/20316789/51627%0A%22%22%22%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
1742beec320d40e7859ea6f3b72e5fb3a7d1a51e
add flask hello world
hello.py
hello.py
Python
0.999928
@@ -0,0 +1,145 @@ +from flask import Flask%0Aapp = Flask(__name__)%0A%0A%[email protected](%22/%22)%0Adef hello():%0A return %22Hello World!%22%0A%0Aif __name__ == %22__main__%22:%0A app.run()%0A
f2b329d5ab98cfd1c1e9a9c28e373e1411a78967
Convert text/plain to multipart/alternative
home/bin/parse_mail.py
home/bin/parse_mail.py
Python
0.999999
@@ -0,0 +1,2616 @@ +#!/usr/bin/python3%0A%0Aimport email%0Afrom email import policy%0Aimport pypandoc%0Aimport fileinput%0Aimport subprocess%0Afrom email import charset%0A%0A# use 8bit encoded utf-8 when applicable%0Acharset.add_charset('utf-8', charset.SHORTEST, '8bit')%0A%0A# read email%0Astdin_lines = %5B%5D%0Awith fileinput.input(%5B%22-%22%5D) as stdin:%0A msg = email.message_from_string(%22%22.join(list(stdin)), policy=policy.SMTP)%0A%0A# determine conversion%0Aconvert_simple = all(%5B%0A not msg.is_multipart(),%0A msg.get_content_type() == %22text/plain%22,%0A msg.get_content_disposition() == %22inline%22,%0A%5D)%0Aconvert_multi = all(%5B%0A msg.get_content_type() == %22multipart/mixed%22,%0A not any(%5Bpart.is_multipart() for part in list(msg.walk())%5B1:%5D%5D),%0A len(%5Bpart for part in msg.walk() if part.get_content_disposition() == %22inline%22 and part.get_content_type() == %22text/plain%22%5D) == 1,%0A%5D)%0Aconvert = any(%5Bconvert_simple, convert_multi%5D)%0A%0Aif convert:%0A # extract attachments%0A attachments = %5B%5D%0A for part in msg.walk():%0A if part.is_multipart():%0A continue%0A elif part.get_content_disposition() == %22inline%22 and part.get_content_type() == %22text/plain%22:%0A inline = part.get_payload()%0A else:%0A attachments.append(part)%0A # copy headers%0A headers = %5B%0A %22Date%22,%0A %22From%22,%0A %22To%22,%0A %22CC%22,%0A %22Subject%22,%0A %22Message-ID%22,%0A %5D%0A new_msg = email.message.EmailMessage(policy=policy.SMTP)%0A for header in headers:%0A if msg%5Bheader%5D:%0A new_msg%5Bheader%5D = msg%5Bheader%5D%0A new_msg.add_header(%22MIME-Version%22, %221.0%22)%0A # make plain and html parts%0A text_plain = email.message.MIMEPart(policy=policy.SMTP)%0A text_plain.set_content(inline)%0A text_html = email.message.MIMEPart(policy=policy.SMTP)%0A text_html.set_content(pypandoc.convert_text(inline, %22html%22, format=%22md%22), subtype=%22html%22)%0A # attach attachments%0A if convert_simple:%0A new_msg.make_alternative()%0A new_msg.attach(text_plain)%0A new_msg.attach(text_html)%0A elif convert_multi:%0A new_msg.make_mixed()%0A alternative = email.message.EmailMessage(policy=policy.SMTP)%0A alternative.add_header(%22MIME-Version%22, %221.0%22)%0A alternative.make_alternative()%0A alternative.add_header(%22Content-Disposition%22, %22inline%22)%0A alternative.attach(text_plain)%0A alternative.attach(text_html)%0A new_msg.attach(alternative)%0A for part in attachments:%0A new_msg.attach(part)%0A out_msg = new_msg%0Aelse:%0A out_msg = msg%0A%0A# send%0Asubprocess.run(%5B%22/usr/bin/msmtp%22, %22--read-recipients%22, %22-a%22, %22AAU%22%5D, input=out_msg.as_bytes())%0A#print(out_msg.as_string())%0A%0A
ef192ebd7679b96317cc6d878fb82c925787710d
Add Pattern based filterer.
source/bark/filterer/pattern.py
source/bark/filterer/pattern.py
Python
0
@@ -0,0 +1,1740 @@ +# :coding: utf-8%0A# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips%0A# :license: See LICENSE.txt.%0A%0Aimport re%0A%0Afrom .base import Filterer%0A%0A%0Aclass Pattern(Filterer):%0A '''Filter logs using pattern matching.'''%0A%0A INCLUDE, EXCLUDE = ('include', 'exclude')%0A%0A def __init__(self, pattern, key='name', mode=INCLUDE):%0A '''Initialise filterer with *pattern* and *key* to test.%0A%0A If *pattern* is a string it will be converted to a compiled regular%0A expression instance.%0A%0A *mode* can be either 'exclude' or 'include'. If set to 'exclude'%0A then any log matching the pattern will be filtered. Conversely, if set%0A to 'include' then any log not matching the pattern will be filtered.%0A%0A '''%0A super(Pattern, self).__init__()%0A self.pattern = pattern%0A if isinstance(self.pattern, basestring):%0A self.pattern = re.compile(self.pattern)%0A%0A self.key = key%0A self.mode = mode%0A%0A def filter(self, log):%0A '''Filter *log* based on pattern matching.%0A%0A If the log does not have the key to test against it will pass the%0A filter successfully. If the key is present, but not a string then the%0A log will be filtered.%0A%0A '''%0A # If key was not present then pass filter%0A if self.key not in log:%0A return False%0A%0A value = log%5Bself.key%5D%0A%0A # If not a string then can't test pattern against it so fail filter.%0A if not isinstance(value, basestring):%0A return True%0A%0A matched = self.pattern.search(value)%0A if matched and self.mode == self.EXCLUDE:%0A return True%0A%0A if not matched and self.mode == self.INCLUDE:%0A return True%0A%0A return False%0A
bcd485f240a7eb6373f847d6cc9dd07ebd2c3ef2
add test case for redeem of default coupon (user limit=1, not bound to user)
coupons/tests/test_use_cases.py
coupons/tests/test_use_cases.py
Python
0
@@ -0,0 +1,721 @@ +from datetime import datetime%0A%0Afrom django.contrib.auth.models import User%0Afrom django.utils import timezone%0Afrom django.test import TestCase%0A%0Afrom coupons.forms import CouponForm%0Afrom coupons.models import Coupon%0A%0A%0Aclass DefaultCouponTestCase(TestCase):%0A def setUp(self):%0A self.user = User.objects.create(username=%22user1%22)%0A self.coupon = Coupon.objects.create_coupon('monetary', 100)%0A%0A def test_redeem(self):%0A self.coupon.redeem(self.user)%0A self.assertTrue(self.coupon.is_redeemed)%0A self.assertEquals(self.coupon.users.count(), 1)%0A self.assertIsInstance(self.coupon.users.first().redeemed_at, datetime)%0A self.assertEquals(self.coupon.users.first().user, self.user)%0A
424d7107944f3ecb8ebf78a62dc35428952b380b
add reindex script
contrib/reindex.py
contrib/reindex.py
Python
0.000001
@@ -0,0 +1,797 @@ +#!/bin/python%0A# coding: utf-8%0A%0Aimport signal%0Aimport argparse%0Afrom datetime import datetime%0A%0Aargparser = argparse.ArgumentParser()%0Aargparser.add_argument(%22--database-type%22, %22-T%22, choices=%5B%22nic%22, %22ipam%22%5D,%0A default=%22nic%22)%0Aargparser.add_argument(%22database%22)%0Aargs = argparser.parse_args()%0A%0Aif args.database_type == %22nic%22:%0A import lglass_sql.nic%0A db = lglass_sql.nic.NicDatabase(args.database)%0Aelif args.database_type == %22ipam%22:%0A import lipam.sql%0A db = lipam.sql.IPAMDatabase(args.database)%0A%0An = 0%0Astart = datetime.now()%0A%0Adef sigusr1(*args):%0A global n%0A print(%22Processed %7B%7D objects in %7B%7D%22.format(n, datetime.now() - start))%0A%0Asignal.signal(signal.SIGUSR1, sigusr1)%0A%0Awith db.session() as sess:%0A for obj in sess.find():%0A n += 1%0A sess.reindex(obj)%0A sess.commit()%0A
65a1c06b6e5d7ec37ac232ab048b3cc541b75a45
refactor Coupon
customermanage/models/Coupon.py
customermanage/models/Coupon.py
Python
0.999424
@@ -0,0 +1,546 @@ +from django.db import models%0Afrom storemanage.models.Currency import Currency%0Afrom storemanage.models.Ticket import Ticket%0Afrom django.contrib.auth.models import User%0Afrom django.contrib.postgres.fields import JSONField%0A# Create your models here.%0A%0Aclass Coupon(models.Model):%0A ticket = models.ForeignKey(Ticket, on_delete=models.CASCADE)%0A user = models.ForeignKey(User, on_delete=models.CASCADE)%0A remaining_date = models.DateTimeField(null=True)%0A active = models.BooleanField(default=True)%0A attribute = JSONField(default = dict())%0A
837dc69a430161f6b942b629793ec1d37db780d4
Create virtool.db.settings
virtool/db/settings.py
virtool/db/settings.py
Python
0.000001
@@ -0,0 +1,928 @@ +import logging%0Aimport pymongo.errors%0A%0Alogger = logging.getLogger(__name__)%0A%0A%0Aasync def initialize(db):%0A try:%0A await db.settings.insert_one(%7B%0A %22_id%22: %22settings%22,%0A %22enable_sentry%22: %7B%22type%22: %22boolean%22, %22default%22: True%7D,%0A %22sample_group%22: %22none%22,%0A %22sample_group_read%22: True,%0A %22sample_group_write%22: False,%0A %22sample_all_read%22: True,%0A %22sample_all_write%22: False,%0A %22sample_unique_names%22: True,%0A %22hmm_slug%22: %22virtool/virtool-hmm%22,%0A %22software_channel%22: %22stable%22,%0A %22minimum_password_length%22: 8,%0A %22default_source_types%22: %5B%22isolate%22, %22strain%22%5D%0A %7D)%0A except pymongo.errors.DuplicateKeyError:%0A logger.debug(%22Settings collection already initialized.%22)%0A%0A%0Aasync def update(db, updates):%0A return await db.settings.find_one_and_update(%7B%22_id%22: %22settings%22%7D, %7B%0A %22$set%22: updates%0A %7D)%0A
094020855126721827342da98992a8c057d1a135
fix memory benchmark for reference builds.
tools/perf/perf_tools/memory_benchmark.py
tools/perf/perf_tools/memory_benchmark.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import multi_page_benchmark MEMORY_HISTOGRAMS = [ {'name': 'V8.MemoryExternalFragmentationTotal', 'units': 'percent'}, {'name': 'V8.MemoryHeapSampleTotalCommitted', 'units': 'kb'}, {'name': 'V8.MemoryHeapSampleTotalUsed', 'units': 'kb'}] class MemoryBenchmark(multi_page_benchmark.MultiPageBenchmark): def __init__(self): super(MemoryBenchmark, self).__init__('stress_memory') def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArg('--dom-automation') # For a hard-coded set of Google pages (such as GMail), we produce custom # memory histograms (V8.Something_gmail) instead of the generic histograms # (V8.Something), if we detect that a renderer is only rendering this page # and no other pages. For this test, we need to disable histogram # customizing, so that we get the same generic histograms produced for all # pages. options.AppendExtraBrowserArg('--disable-histogram-customizer') def CanRunForPage(self, page): return hasattr(page, 'stress_memory') def MeasurePage(self, page, tab, results): for histogram in MEMORY_HISTOGRAMS: name = histogram['name'] data = tab.runtime.Evaluate( 'window.domAutomationController.getHistogram("%s")' % name) results.Add(name.replace('.', '_'), histogram['units'], data, data_type='histogram')
Python
0.000017
@@ -1366,16 +1366,75 @@ aluate(%0A + 'window.domAutomationController.getHistogram ? '%0A @@ -1485,16 +1485,21 @@ am(%22%25s%22) + : %22%22 ' %25 name @@ -1500,16 +1500,33 @@ %25 name)%0A + if data:%0A re @@ -1585,16 +1585,18 @@ , data,%0A +
4e797dd9c8b43ab62f70b0515dee9e6b5c17d043
Create secret.py
propalyzer_site/propalyzer_site/secret.py
propalyzer_site/propalyzer_site/secret.py
Python
0.000032
@@ -0,0 +1,34 @@ +class Secret():%0A SECRET_KEY = ''%0A
3d8fe5cfc64c3667f938fa221353489846a9aeb0
Add test of F.diagonal
tests/chainer_tests/functions_tests/array_tests/test_diagonal.py
tests/chainer_tests/functions_tests/array_tests/test_diagonal.py
Python
0.999693
@@ -0,0 +1,2524 @@ +import unittest%0A%0Aimport numpy%0Aimport chainer%0Afrom chainer.backends import cuda%0Afrom chainer import functions%0Afrom chainer import gradient_check%0Afrom chainer import testing%0Afrom chainer.testing import attr%0A%0A%[email protected](*testing.product_dict(%0A %5B%0A %7B'shape': (2, 4, 6), 'args': (1, 2, 0)%7D,%0A %7B'shape': (2, 4, 6), 'args': (-1, 2, 0)%7D,%0A %7B'shape': (2, 4, 6), 'args': (0, -1, -2)%7D,%0A %7B'shape': (2, 4, 6), 'args': (0, -1, 1)%7D,%0A %5D,%0A %5B%0A %7B'dtype': numpy.float16%7D,%0A %7B'dtype': numpy.float32%7D,%0A %7B'dtype': numpy.float64%7D,%0A %5D,%0A))%0Aclass TestDiagonal(unittest.TestCase):%0A%0A def setUp(self):%0A self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)%0A self.y_expected = self.x.diagonal(*self.args)%0A self.y_shape = self.y_expected.shape%0A self.gy = numpy.random.uniform(-1, 1, self.y_shape).astype(self.dtype)%0A self.ggx = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)%0A%0A self.check_double_backward_options = %7B'atol': 1e-3, 'rtol': 1e-2%7D%0A if self.dtype == numpy.float16:%0A self.check_double_backward_options.update(dtype=numpy.float64)%0A%0A def check_forward(self, x_data):%0A x = chainer.Variable(x_data)%0A y = functions.diagonal(x, *self.args)%0A testing.assert_allclose(y.data, self.y_expected)%0A%0A def test_forward_cpu(self):%0A self.check_forward(self.x)%0A%0A @attr.gpu%0A def test_forward_gpu(self):%0A self.check_forward(cuda.to_gpu(self.x))%0A%0A def check_backward(self, x_data, y_grad):%0A gradient_check.check_backward(%0A lambda x: functions.diagonal(x, *self.args),%0A x_data, y_grad, dtype=numpy.float64)%0A%0A def test_backward_cpu(self):%0A self.check_backward(self.x, self.gy)%0A%0A @attr.gpu%0A def test_backward_gpu(self):%0A self.check_backward(%0A cuda.to_gpu(self.x), cuda.to_gpu(self.gy))%0A%0A def check_double_backward(self, x_data, y_grad, x_grad_grad):%0A def f(x):%0A x = functions.diagonal(x, *self.args)%0A return x * x%0A%0A gradient_check.check_double_backward(%0A f, x_data, y_grad, x_grad_grad,%0A **self.check_double_backward_options)%0A%0A def test_double_backward_cpu(self):%0A self.check_double_backward(self.x, self.gy, self.ggx)%0A%0A @attr.gpu%0A def test_double_backward_gpu(self):%0A self.check_double_backward(%0A cuda.to_gpu(self.x), cuda.to_gpu(self.gy), cuda.to_gpu(self.ggx))%0A%0A%0Atesting.run_module(__name__, __file__)%0A
254564ceb905dc512693febed44e908c27f249ce
Add tests for cupyx.scipy.ndimage.label
tests/cupyx_tests/scipy_tests/ndimage_tests/test_measurements.py
tests/cupyx_tests/scipy_tests/ndimage_tests/test_measurements.py
Python
0
@@ -0,0 +1,1923 @@ +import unittest%0A%0Aimport numpy%0A%0Afrom cupy import testing%0Aimport cupyx.scipy.ndimage # NOQA%0A%0Atry:%0A import scipy.ndimage # NOQA%0Aexcept ImportError:%0A pass%0A%0A%0Adef _generate_binary_structure(rank, connectivity):%0A if connectivity %3C 1:%0A connectivity = 1%0A if rank %3C 1:%0A return numpy.array(True, dtype=bool)%0A output = numpy.fabs(numpy.indices(%5B3%5D * rank) - 1)%0A output = numpy.add.reduce(output, 0)%0A return output %3C= connectivity%0A%0A%[email protected](*testing.product(%7B%0A 'ndim': %5B1, 2, 3, 4%5D,%0A 'size': %5B50, 100%5D,%0A 'density': %5B0.2, 0.3, 0.4%5D,%0A 'connectivity': %5BNone, 2, 3%5D,%0A 'x_dtype': %5Bbool, numpy.int8, numpy.int32, numpy.int64,%0A numpy.float32, numpy.float64%5D,%0A 'output': %5BNone, numpy.int32, numpy.int64%5D,%0A 'o_type': %5BNone, 'ndarray'%5D%0A%7D))%[email protected]%[email protected]_requires('scipy')%0Aclass TestLabel(unittest.TestCase):%0A%0A @testing.numpy_cupy_array_equal(scipy_name='scp')%0A def test_label(self, xp, scp):%0A size = int(pow(self.size, 1 / self.ndim))%0A x_shape = range(size, size + self.ndim)%0A x = xp.zeros(x_shape, dtype=self.x_dtype)%0A # x%5Bnumpy.where(testing.shaped_random(x_shape, xp) %3C self.density)%5D = 1%0A x%5Btesting.shaped_random(x_shape, xp) %3C self.density%5D = 1%0A if self.connectivity is None:%0A structure = None%0A else:%0A structure = _generate_binary_structure(self.ndim,%0A self.connectivity)%0A if self.o_type == 'ndarray' and self.output is not None:%0A output = xp.empty(x_shape, dtype=self.output)%0A num_features = scp.ndimage.label(x, structure=structure,%0A output=output)%0A return output%0A labels, num_features = scp.ndimage.label(x, structure=structure,%0A output=self.output)%0A return labels%0A
2a6907ddf9c7b5df2e1b59c8feeb0fa4bd4b5752
add rudimentary validation tests for azure
tests/validation/cattlevalidationtest/core/test_machine_azure.py
tests/validation/cattlevalidationtest/core/test_machine_azure.py
Python
0
@@ -0,0 +1,3343 @@ +import logging%0A%0Afrom common_fixtures import * # NOQA%0A%0ADEFAULT_TIMEOUT = 900%0A%0Asubscription_id = os.environ.get('AZURE_SUBSCRIPTION_ID')%0Asubscription_cert = os.environ.get('AZURE_SUBSCRIPTION_CERT')%0A%0A# Use azure settings from environment variables , if set%0Ai = 'b39f27a8b8c64d52b05eac6a62ebad85__'%0Ai = i + 'Ubuntu-14_04_1-LTS-amd64-server-20140927-en-us-30GB'%0Aimage = os.environ.get('AZURE_IMAGE', i)%0Alocation = os.environ.get('AZURE_LOCATION', %22West US%22)%0Ausername = os.environ.get('AZURE_USERNAME', %22%22)%0Apassword = os.environ.get('AZURE_PASSWORD', %22%22)%0Asize = os.environ.get('AZURE_SIZE', %22Small%22)%0A%0Aif_machine_azure = pytest.mark.skipif(%0A not os.environ.get('AZURE_SUBSCRIPTION_ID') or%0A not os.environ.get('AZURE_SUBSCRIPTION_CERT'),%0A reason='Azure SubscriptionId/SubscriptionCert/AuthToken is not set')%0A%0A%0A# Get logger%0Alogger = logging.getLogger(__name__)%0A%0A%[email protected](scope='session', autouse=True)%0Adef register_host(admin_client):%0A test_url = cattle_url()%0A start = test_url.index(%22//%22) + 2%0A api_host = test_url%5Bstart:%5D%0A admin_client.create_setting(name=%22api.host%22, value=api_host)%0A%0A%0A@if_machine_azure%0Adef test_azure_machine_all_params(client):%0A name = random_str()%0A create_args = %7B%22name%22: name,%0A %22azureConfig%22: %7B%22subscriptionId%22: subscription_id,%0A %22subscriptionCert%22: subscription_cert,%0A %22image%22: image,%0A %22location%22: location,%0A %22username%22: username,%0A %22password%22: password,%0A %22size%22: size%7D%7D%0A expected_values = %7B%22subscriptionId%22: subscription_id,%0A %22subscriptionCert%22: subscription_cert,%0A %22image%22: image,%0A %22location%22: location,%0A %22username%22: username,%0A %22password%22: password,%0A %22size%22: size%7D%0A azure_machine_life_cycle(client, create_args, expected_values)%0A%0A%0Adef azure_machine_life_cycle(client, configs, expected_values):%0A machine = client.create_machine(**configs)%0A%0A machine = client.wait_success(machine, timeout=DEFAULT_TIMEOUT)%0A assert machine.state == 'active'%0A%0A # Wait until host shows up with some physicalHostId%0A machine = wait_for_host(client, machine)%0A host = machine.hosts()%5B0%5D%0A assert host.state == 'active'%0A assert machine.accountId == host.accountId%0A # Remove the machine and make sure that the host%0A # and the machine get removed%0A%0A machine = client.wait_success(machine.remove())%0A assert machine.state == 'removed'%0A%0A host = client.reload(machine.hosts()%5B0%5D)%0A assert host.state == 'removed'%0A%0A%0Adef wait_for_host(client, machine):%0A wait_for_condition(client,%0A machine,%0A lambda x: len(x.hosts()) == 1,%0A lambda x: 'Number of hosts associated with machine ' +%0A str(len(x.hosts())),%0A DEFAULT_TIMEOUT)%0A%0A host = machine.hosts()%5B0%5D%0A host = wait_for_condition(client,%0A host,%0A lambda x: x.state == 'active',%0A lambda x: 'Host state is ' + x.state%0A )%0A return machine%0A
e9091be4ae9ddf0cb83bd7535c4ced5bb2d691d2
add config_edit.py
castiron/lib/castiron/actions/config_edit.py
castiron/lib/castiron/actions/config_edit.py
Python
0.000004
@@ -0,0 +1,2453 @@ +from castiron.tools import Action, register_actions%0A%0Aimport os%0Aimport re%0A%0Aclass G:%0A all_edits = %5B%5D%0A%0Adef _file_contains_re(runner, path, contains_re):%0A real_path = os.path.realpath(os.path.expanduser(path))%0A if os.path.exists(real_path):%0A with open(real_path) as f:%0A for line in f:%0A if contains_re.search(line.rstrip()):%0A return True%0A return False%0A%0Adef _append_text(runner, path, text):%0A real_path = os.path.realpath(os.path.expanduser(path))%0A with open(real_path, 'a' if os.path.exists(real_path) else 'w') as f:%0A f.write('%5Cn')%0A f.write(text)%0A if not text.endswith('%5Cn'):%0A f.write('%5Cn')%0A%0Aclass EditBase(object):%0A def __init__(self, path):%0A self.path = path%0A%0Aclass Inject(EditBase):%0A '''%0A Append to existing file or create new file.%0A '''%0A def __init__(self, path, skip_if, text):%0A '''%0A path is the file to edit or create.%0A text is the text to inject.%0A skip_if skips the edit when a line matches a regex pattern.%0A '''%0A super(Inject, self).__init__(path)%0A self.skip_if_re = re.compile(skip_if)%0A self.text = text%0A self.needed = False%0A%0A def check(self, runner):%0A return _file_contains_re(runner, self.path, self.skip_if_re)%0A%0A def perform(self, runner):%0A if _file_contains_re(runner, self.path, self.skip_if_re):%0A _append_text(runner, self.path, self.text)%0A%0Adef edits(*edits):%0A G.all_edits.extend(edits)%0A%0Aclass ConfigEditAction(Action):%0A%0A description = 'edit configuration files'%0A enabled = True%0A%0A def __init__(self):%0A super(ConfigEditAction, self).__init__()%0A class CheckedEdit(object):%0A def __init__(self, edit):%0A self.edit = edit%0A self.needed = False%0A self.checked_edits = %5BCheckedEdit(edit) for edit in G.all_edits%5D%0A%0A def check(self, runner):%0A okay = False%0A for checked_edit in self.checked_edits:%0A if runner.call(checked_edit.edit.check):%0A okay = checked_edit.needed = True%0A return okay%0A%0A def perform(self, runner, needed):%0A for checked_edit in self.checked_edits:%0A if checked_edit.needed:%0A runner.call(checked_edit.edit.perform)%0A else:%0A print 'Configuration file was already changed: %25s' %25 checked_edit.edit.path%0A%0Aregister_actions(ConfigEditAction)%0A
bba34d3bf938bcf2a69a69b7c9e4993e19fc7002
Add missed ProductMapping.is_empty new method.
src/python/twitter/pants/goal/products.py
src/python/twitter/pants/goal/products.py
from collections import defaultdict class Products(object): class ProductMapping(object): """Maps products of a given type by target. Each product is a map from basedir to a list of files in that dir. """ def __init__(self, typename): self.typename = typename self.by_target = defaultdict(lambda: defaultdict(list)) def add(self, target, basedir, product_paths=None): """ Adds a mapping of products for the given target, basedir pair. If product_paths are specified, these will over-write any existing mapping for this target. If product_paths is omitted, the current mutable list of mapped products for this target and basedir is returned for appending. """ if product_paths is not None: self.by_target[target][basedir].extend(product_paths) else: return self.by_target[target][basedir] def has(self, target): """Returns whether we have a mapping for the specified target.""" return target in self.by_target def get(self, target): """ Returns the product mapping for the given target as a map of <basedir> -> <products list>, or None if no such product exists. """ return self.by_target.get(target) def __getitem__(self, target): """ Support for subscripting into this mapping. Returns the product mapping for the given target as a map of <basedir> -> <products list>. If no mapping exists, returns an empty map whose values default to empty lists. So you can use the result without checking for None. """ return self.by_target[target] def __getitem__(self, target): """ Support for subscripting into this mapping. Returns the product mapping for the given target as a map of <basedir> -> <products list>. If no mapping exists, returns an empty map whose values default to empty lists. So you can use the result without checking for None. """ return self.by_target[target] def itermappings(self): """ Returns an iterable over all pairs (target, product) in this mapping. Each product is itself a map of <basedir> -> <products list>. """ return self.by_target.iteritems() def keys_for(self, basedir, file): """Returns the set of keys the given mapped product is registered under.""" keys = set() for key, mappings in self.by_target.items(): for mapped in mappings.get(basedir, []): if file == mapped: keys.add(key) break return keys def __repr__(self): return 'ProductMapping(%s) {\n %s\n}' % (self.typename, '\n '.join( '%s => %s\n %s' % (str(target), basedir, outputs) for target, outputs_by_basedir in self.by_target.items() for basedir, outputs in outputs_by_basedir.items())) def __init__(self): self.products = {} # type -> ProductMapping instance. self.predicates_for_type = defaultdict(list) self.data_products = {} # type -> arbitrary object. self.required_data_products = set() def require(self, typename, predicate=None): """Registers a requirement that file products of the given type by mapped. If a target predicate is supplied, only targets matching the predicate are mapped. """ if predicate: self.predicates_for_type[typename].append(predicate) return self.products.setdefault(typename, Products.ProductMapping(typename)) def isrequired(self, typename): """Returns a predicate that selects targets required for the given type if mappings are required. Otherwise returns None. """ if typename not in self.products: return None def combine(first, second): return lambda target: first(target) or second(target) return reduce(combine, self.predicates_for_type[typename], lambda target: False) def get(self, typename): """Returns a ProductMapping for the given type name.""" return self.require(typename) def require_data(self, typename): """ Registers a requirement that data produced by tasks is required. typename: the name of a data product that should be generated. """ self.required_data_products.add(typename) def is_required_data(self, typename): """ Checks if a particular data product is required by any tasks.""" return typename in self.required_data_products def get_data(self, typename): """ Returns a data product, or None if the product isn't found.""" return self.data_products.get(typename) def set_data(self, typename, data): """ Stores a required data product. If the product already exists, the value is replaced. """ self.data_products[typename] = data
Python
0
@@ -30,16 +30,17 @@ ltdict%0A%0A +%0A class Pr @@ -343,16 +343,75 @@ list))%0A%0A + def empty(self):%0A return len(self.by_target) == 0%0A %0A def @@ -1191,37 +1191,35 @@ et as a -map +tuple of -%3C +( basedir -%3E -%3E %3C +, products @@ -1227,10 +1227,10 @@ list -%3E, +). %0A @@ -1238,41 +1238,67 @@ -or None if no such product exists +Can return None if there is no mapping for the given target .%0A
99430e9f51eccb79f32af49bedfb28ba5f39cd09
update : minor changes
ptop/plugins/system_sensor.py
ptop/plugins/system_sensor.py
Python
0.000001
@@ -0,0 +1,778 @@ +'''%0A System sensor plugin%0A%0A Generates the basic system info%0A'''%0Afrom ptop.core import Plugin%0Aimport psutil, socket, getpass%0Aimport datetime, time%0A%0Aclass SystemSensor(Plugin):%0A def __init__(self,**kwargs):%0A super(SystemSensor,self).__init__(**kwargs)%0A%0A # overriding the update method%0A def update(self):%0A # only text part for the system info%0A self.currentValue%5B'text'%5D = %7B%7D%0A # updating values%0A self.currentValue%5B'text'%5D%5B'user'%5D = getpass.getuser()%0A self.currentValue%5B'text'%5D%5B'host_name'%5D = socket.gethostname()%0A self.currentValue%5B'text'%5D%5B'running_time'%5D = datetime.timedelta(seconds=int(time.time() - psutil.boot_time()))%0A%0A %0Asystem_sensor = SystemSensor(name='System',sensorType='text',interval=1)%0A %0A%0A%0A
ded21520c1fde89336480b48387d383a2e449c2a
Write test for array
tests/chainer_tests/utils_tests/test_array.py
tests/chainer_tests/utils_tests/test_array.py
Python
0.000234
@@ -0,0 +1,755 @@ +import unittest%0A%0Aimport numpy%0A%0Afrom chainer import cuda%0Afrom chainer.utils import array%0Afrom chainer.testing import attr%0A%0A%0Aclass TestFullLike(unittest.TestCase):%0A%0A def test_full_like_cpu(self):%0A x = numpy.array(%5B1, 2%5D, numpy.float32)%0A y = array.full_like(x, 3)%0A self.assertIsInstance(y, numpy.ndarray)%0A self.assertEqual(y.shape, (2,))%0A self.assertEqual(y%5B0%5D, 3)%0A self.assertEqual(y%5B1%5D, 3)%0A%0A @attr.gpu%0A def test_full_like_gpu(self):%0A x = cuda.cupy.array(%5B1, 2%5D, numpy.float32)%0A y = array.full_like(x, 3)%0A self.assertIsInstance(y, cuda.cupy.ndarray)%0A y = cuda.to_cpu(y)%0A self.assertEqual(y.shape, (2,))%0A self.assertEqual(y%5B0%5D, 3)%0A self.assertEqual(y%5B1%5D, 3)%0A
f4260ad3e652a09922395e64d29bcf8f96ee12bc
Add test_colormap.py
tests/test_colormap.py
tests/test_colormap.py
Python
0.000003
@@ -0,0 +1,1419 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%22%0AFolium Colormap Module%0A----------------------%0A%22%22%22%0Aimport folium.colormap as cm%0A%0Adef test_simple_step():%0A step = cm.StepColormap(%5B'green','yellow','red'%5D, vmin=3., vmax=10., index=%5B3,4,8,10%5D, caption='step')%0A step = cm.StepColormap(%5B'r','y','g','c','b','m'%5D)%0A step._repr_html_()%0A%0Adef test_simple_linear():%0A linear = cm.LinearColormap(%5B'green','yellow','red'%5D, vmin=3., vmax=10.)%0A linear = cm.LinearColormap(%5B'red','orange', 'yellow','green'%5D, index=%5B0,0.1,0.9,1.%5D)%0A linear._repr_html_()%0A%0Adef test_linear_to_step():%0A some_list = %5B30.6, 50, 51, 52, 53, 54, 55, 60, 70, 100%5D%0A lc = cm.linear.YlOrRd%0A lc.to_step(n=12)%0A lc.to_step(index=%5B0,2,4,6,8,10%5D)%0A lc.to_step(data=some_list, n=12)%0A lc.to_step(data=some_list, n=12, method='linear')%0A lc.to_step(data=some_list, n=12, method='log')%0A lc.to_step(data=some_list, n=30, method='quantiles')%0A lc.to_step(data=some_list, quantiles=%5B0,0.3,0.7,1%5D)%0A lc.to_step(data=some_list, quantiles=%5B0,0.3,0.7,1%5D, round_method='int')%0A lc.to_step(data=some_list, quantiles=%5B0,0.3,0.7,1%5D, round_method='log10')%0A%0Adef test_step_to_linear():%0A step = cm.StepColormap(%5B'green','yellow','red'%5D, vmin=3., vmax=10., index=%5B3,4,8,10%5D, caption='step')%0A step.to_linear()%0A%0Adef test_linear_object():%0A cm.linear.OrRd._repr_html_()%0A cm.linear.PuBu.to_step(12)%0A cm.linear.YlGn.scale(3,12)%0A cm.linear._repr_html_()%0A
dd65fb84e41b11f8d97e3862d00137969589ab4b
integrate greenify
tests/test_greenify.py
tests/test_greenify.py
Python
0.000475
@@ -0,0 +1,595 @@ +from __future__ import absolute_import%0Aimport sys%0Aimport time%0Aimport greenify%0Agreenify.greenify()%0Aimport pylibmc%0Aimport random%0Afrom tornado.ioloop import IOLoop%0Afrom tornado.gen import coroutine%0Afrom gtornado import green%0A%0Agreenify.patch_lib(%22/usr/lib/x86_64-linux-gnu/libmemcached.so%22)%0A%0Adef call_mc(i):%0A mc = pylibmc.Client(%5B%22localhost%22%5D)%0A mc.get_stats()%0A mc.disconnect_all()%0A%0A@coroutine%0Adef use_greenlet():%0A s = time.time()%0A yield %5Bgreen.spawn(call_mc, i) for i in range(1000)%5D%0A print(time.time() - s)%0A%0Aif __name__ == %22__main__%22:%0A IOLoop.instance().run_sync(use_greenlet)%0A
7401d1ecd6b3323b266cf02eabd42a2c4e40d988
Add initial tests for test module
tests/test_test.py
tests/test_test.py
Python
0
@@ -0,0 +1,1649 @@ +%22%22%22tests/test_test.py.%0A%0ATest to ensure basic test functionality works as expected.%0A%0ACopyright (C) 2019 Timothy Edmund Crosley%0A%0APermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated%0Adocumentation files (the %22Software%22), to deal in the Software without restriction, including without limitation%0Athe rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and%0Ato permit persons to whom the Software is furnished to do so, subject to the following conditions:%0A%0AThe above copyright notice and this permission notice shall be included in all copies or%0Asubstantial portions of the Software.%0A%0ATHE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED%0ATO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL%0ATHE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF%0ACONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR%0AOTHER DEALINGS IN THE SOFTWARE.%0A%0A%22%22%22%0Aimport pytest%0A%0Aimport hug%0A%0Aapi = hug.API(__name__)%0A%0A%0Adef test_cli():%0A %22%22%22Test to ensure the CLI tester works as intended to allow testing CLI endpoints%22%22%22%0A @hug.cli()%0A def my_cli_function():%0A return 'Hello'%0A%0A assert hug.test.cli(my_cli_function) == 'Hello'%0A assert hug.test.cli('my_cli_function', api=api) == 'Hello'%0A%0A # Shouldn't be able to specify both api and module.%0A with pytest.raises(ValueError):%0A assert hug.test.cli('my_method', api=api, module=hug)%0A
925da64adf0b74ba18eb78acd9127e3a6dc6f903
Add test cases for reported issues
tests/test_reported.py
tests/test_reported.py
Python
0
@@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*-%0A%0A%0Afrom pyrql import parse%0A%0A%0ACMP_OPS = %5B'eq', 'lt', 'le', 'gt', 'ge', 'ne'%5D%0A%0A%0Aclass TestReportedErrors:%0A%0A def test_like_with_string_parameter(self):%0A expr = 'like(name,*new jack city*)'%0A rep = %7B'name': 'like', 'args': %5B'name', '*new jack city*'%5D%7D%0A%0A pd = parse(expr)%0A assert pd == rep%0A%0A def test_like_with_string_encoded_parameter(self):%0A expr = 'like(name,*new%2520jack%2520city*)'%0A rep = %7B'name': 'like', 'args': %5B'name', '*new jack city*'%5D%7D%0A%0A pd = parse(expr)%0A assert pd == rep%0A
e61c6eb5b5a9f6f70df036dcfedf552325a6e9bd
move unit test syn import to pytest fixture
tests/unit/conftest.py
tests/unit/conftest.py
Python
0
@@ -0,0 +1,437 @@ +import logging%0A%0Aimport pytest%0A%0Afrom synapseclient import Synapse%0Afrom synapseclient.core.logging_setup import SILENT_LOGGER_NAME%0A%0A%22%22%22%0Apytest unit test session level fixtures%0A%22%22%22%0A%0A%[email protected](scope=%22session%22)%0Adef syn():%0A %22%22%22%0A Create a Synapse instance that can be shared by all tests in the session.%0A %22%22%22%0A syn = Synapse(debug=False, skip_checks=True)%0A syn.logger = logging.getLogger(SILENT_LOGGER_NAME)%0A return syn%0A
182762812cb1945dd2b50c21b34609be00b7bf45
Create wordlist_add_digits.py
wordlist_add_digits.py
wordlist_add_digits.py
Python
0.000575
@@ -0,0 +1,859 @@ +#!/usr/bin/env python%0A#Adds 4digits to the end of the common word lists%0Aimport os, sys%0A%0A%0Aclass Wordlist_Add_Digits():%0A%0A%09def add_digits(self, wordlist, outfile):%0A%09%09#File to start with%0A%09%09file=wordlist%0A%09%09#Output file%0A%09%09out=open(outfile, 'w')%0A%09%09#Start loop of 0000-9999 added to each word%0A%09%09with open(file) as f:%0A%09%09%09content = f.read().splitlines()%0A%09%09%09for x in content:%0A%09%09%09%09for a in range(10):%0A%09%09%09%09%09x0=x+str(a)%0A%09%09%09%09%09for b in range(10):%0A%09%09%09%09%09%09x1=x0+str(b)%0A%09%09%09%09%09%09for c in range (10):%0A%09%09%09%09%09%09%09x2=x1+str(c)%0A%09%09%09%09%09%09%09for d in range (10):%0A%09%09%09%09%09%09%09%09x3=x2+str(d)%0A%09%09%09%09%09%09%09%09# print final combo%0A%09%09%09%09%09%09%09%09out.write(str(x3)+%22%5Cn%22)%0A%0A%0Aif __name__ == '__main__':%0A%09try:%0A%09%09wordlist = sys.argv%5B1%5D%0A%09%09outfile = sys.argv%5B2%5D%0A%09%09wordz = Wordlist_Add_Digits()%0A%09%09wordz.add_digits(wordlist, outfile)%0A%09except IndexError:%0A%09%09print('Usage: wordlist_add_digits.py wordlist.txt output.txt')%0A%09%09sys.exit(1)%0A
da3e9d5f7ffeae68ef7ae3b07247a9f6cb16d40d
Create get_user_statuses.py
src/Python/get_user_statuses.py
src/Python/get_user_statuses.py
Python
0.000003
@@ -0,0 +1,1450 @@ +import sys%0Aimport urllib2%0Aimport time%0Aimport re%0Afrom lxml import html%0A%0Adef get_user_statuses(userid):%0A %0A reached_end = False%0A i = 1%0A saying_list = %5B%5D%0A while not reached_end:%0A page_url = %22http://www.douban.com/people/%25s/statuses?p=%25d%22 %25 (userid, i)%0A %0A # TODO: User login. Results limited to the first 10 pages without login%0A response = urllib2.urlopen(page_url)%0A page_html = response.read()%0A tree = html.fromstring(page_html)%0A %0A statuses_element_list = tree.xpath('//*%5B@class=%22status-item%22%5D')%0A if len(statuses_element_list) %3C 20:%0A reached_end = True%0A print len(statuses_element_list)%0A %0A for s in range(len(statuses_element_list)):%0A author_element = statuses_element_list%5Bs%5D.findall('.//*%5B@class=%22hd%22%5D/*%5B@class=%22text%22%5D/a')%5B0%5D%0A author_link = author_element.get('href')%0A author_id=None%0A if re.search(r%22.*people/(.+?)/%22, author_link):%0A author_id=re.search(r%22.*people/(.+?)/%22, author_link).group(1)%0A if author_id == userid:%0A blockquote_element_list = statuses_element_list%5Bs%5D.findall('.//*%5B@class=%22status-saying%22%5D/blockquote')%0A if blockquote_element_list:%0A content='%5Cn'.join(%5Bp.text for p in blockquote_element_list%5B0%5D.findall('p')%5D)%0A saying_list.append(content)%0A i += 1%0A time.sleep(1)%0A return saying_list%0A%0Aif __name__ == %22__main__%22:%0A userid = sys.argv%5B1%5D%0A result_list = get_user_statuses( userid )%0A for i in result_list:%0A print i%0A
c0637f482a95dd7ec02bb7b85bc8d164c0a80585
add missing check_headers tool
tools/check_headers.py
tools/check_headers.py
Python
0.000001
@@ -0,0 +1,1267 @@ +#!/usr/bin/env python2%0A%0Aimport sys%0Afrom os import unlink%0Afrom os.path import exists%0A%0AHEADERS = ('Content-Disposition', 'Content-Length', 'Content-Type',%0A 'ETag', 'Last-Modified')%0A%0Adef is_sig_header(header):%0A header = header.lower()%0A for s in HEADERS:%0A if header.startswith(s.lower()):%0A return True%0A%0Adef do():%0A headers_fn = sys.argv%5B1%5D%0A signature_fn = sys.argv%5B2%5D%0A%0A # first, get all the headers from the latest request%0A with open(headers_fn) as fd:%0A headers = %5Bline.strip() for line in fd.readlines()%5D%0A%0A last_index = 0%0A for index, header in enumerate(headers):%0A if header.startswith('HTTP/1.'):%0A last_index = index%0A headers = headers%5Blast_index:%5D%0A%0A # select few headers for the signature%0A headers = %5Bheader for header in headers if is_sig_header(header)%5D%0A signature = '%5Cn'.join(headers)%0A%0A # read the original signature%0A if exists(signature_fn):%0A with open(signature_fn) as fd:%0A original_signature = fd.read()%0A if original_signature == signature:%0A return 0%0A unlink(signature_fn)%0A%0A if signature:%0A with open(signature_fn, 'w') as fd:%0A fd.write(signature)%0A%0Atry:%0A ret = do()%0Aexcept:%0A ret = 1%0A%0Asys.exit(ret)%0A%0A
3e5105218976549a0a782f179bb358edfd4e89c9
Add load_tests / __init__.py to the azure/cli/tests module to allow for simpler unit test discovery
src/azure/cli/tests/__init__.py
src/azure/cli/tests/__init__.py
Python
0.000001
@@ -0,0 +1,305 @@ +from .test_argparse import Test_argparse%0A%0Afrom unittest import TestSuite%0A %0Atest_cases = %5BTest_argparse%5D%0A%0Adef load_tests(loader, tests, pattern):%0A suite = TestSuite()%0A for testclass in test_cases:%0A tests = loader.loadTestsFromTestCase(testclass)%0A suite.addTests(tests)%0A return suite%0A
543190dfb0b467127841b5960d2c1deaf9eff9ea
add hosts and interface_addr to sonos config
homeassistant/components/media_player/sonos.py
homeassistant/components/media_player/sonos.py
""" homeassistant.components.media_player.sonos ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Provides an interface to Sonos players (via SoCo) For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.sonos/ """ import logging import datetime from homeassistant.components.media_player import ( MediaPlayerDevice, SUPPORT_PAUSE, SUPPORT_SEEK, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_MUTE, SUPPORT_PREVIOUS_TRACK, SUPPORT_NEXT_TRACK, MEDIA_TYPE_MUSIC) from homeassistant.const import ( STATE_IDLE, STATE_PLAYING, STATE_PAUSED, STATE_UNKNOWN) REQUIREMENTS = ['SoCo==0.11.1'] _LOGGER = logging.getLogger(__name__) # The soco library is excessively chatty when it comes to logging and # causes a LOT of spam in the logs due to making a http connection to each # speaker every 10 seconds. Quiet it down a bit to just actual problems. _SOCO_LOGGER = logging.getLogger('soco') _SOCO_LOGGER.setLevel(logging.ERROR) _REQUESTS_LOGGER = logging.getLogger('requests') _REQUESTS_LOGGER.setLevel(logging.ERROR) SUPPORT_SONOS = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE |\ SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_SEEK # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """ Sets up the Sonos platform. """ import soco if discovery_info: add_devices([SonosDevice(hass, soco.SoCo(discovery_info))]) return True players = soco.discover() if not players: _LOGGER.warning('No Sonos speakers found.') return False add_devices(SonosDevice(hass, p) for p in players) _LOGGER.info('Added %s Sonos speakers', len(players)) return True # pylint: disable=too-many-instance-attributes, too-many-public-methods # pylint: disable=abstract-method class SonosDevice(MediaPlayerDevice): """ Represents a Sonos device. """ # pylint: disable=too-many-arguments def __init__(self, hass, player): self.hass = hass super(SonosDevice, self).__init__() self._player = player self.update() @property def should_poll(self): return True def update_sonos(self, now): """ Updates state, called by track_utc_time_change. """ self.update_ha_state(True) @property def name(self): """ Returns the name of the device. """ return self._name @property def unique_id(self): """ Returns a unique id. """ return "{}.{}".format(self.__class__, self._player.uid) @property def state(self): """ Returns the state of the device. """ if self._status == 'PAUSED_PLAYBACK': return STATE_PAUSED if self._status == 'PLAYING': return STATE_PLAYING if self._status == 'STOPPED': return STATE_IDLE return STATE_UNKNOWN def update(self): """ Retrieve latest state. """ self._name = self._player.get_speaker_info()['zone_name'].replace( ' (R)', '').replace(' (L)', '') self._status = self._player.get_current_transport_info().get( 'current_transport_state') self._trackinfo = self._player.get_current_track_info() @property def volume_level(self): """ Volume level of the media player (0..1). """ return self._player.volume / 100.0 @property def is_volume_muted(self): return self._player.mute @property def media_content_id(self): """ Content ID of current playing media. """ return self._trackinfo.get('title', None) @property def media_content_type(self): """ Content type of current playing media. """ return MEDIA_TYPE_MUSIC @property def media_duration(self): """ Duration of current playing media in seconds. """ dur = self._trackinfo.get('duration', '0:00') # If the speaker is playing from the "line-in" source, getting # track metadata can return NOT_IMPLEMENTED, which breaks the # volume logic below if dur == 'NOT_IMPLEMENTED': return None return sum(60 ** x[0] * int(x[1]) for x in enumerate(reversed(dur.split(':')))) @property def media_image_url(self): """ Image url of current playing media. """ if 'album_art' in self._trackinfo: return self._trackinfo['album_art'] @property def media_title(self): """ Title of current playing media. """ if 'artist' in self._trackinfo and 'title' in self._trackinfo: return '{artist} - {title}'.format( artist=self._trackinfo['artist'], title=self._trackinfo['title'] ) if 'title' in self._status: return self._trackinfo['title'] @property def supported_media_commands(self): """ Flags of media commands that are supported. """ return SUPPORT_SONOS def turn_off(self): """ Turn off media player. """ self._player.pause() def volume_up(self): """ Volume up media player. """ self._player.volume += 1 def volume_down(self): """ Volume down media player. """ self._player.volume -= 1 def set_volume_level(self, volume): """ Set volume level, range 0..1. """ self._player.volume = str(int(volume * 100)) def mute_volume(self, mute): """ Mute (true) or unmute (false) media player. """ self._player.mute = mute def media_play(self): """ Send paly command. """ self._player.play() def media_pause(self): """ Send pause command. """ self._player.pause() def media_next_track(self): """ Send next track command. """ self._player.next() def media_previous_track(self): """ Send next track command. """ self._player.previous() def media_seek(self, position): """ Send seek command. """ self._player.seek(str(datetime.timedelta(seconds=int(position)))) def turn_on(self): """ Turn the media player on. """ self._player.play()
Python
0
@@ -1373,16 +1373,34 @@ ort soco +%0A import socket %0A%0A if @@ -1523,22 +1523,374 @@ s = -soco.discover( +None%0A hosts = config.get('hosts', None)%0A if hosts:%0A players = %5B%5D%0A for host in hosts.split(%22,%22):%0A host = socket.gethostbyname(host)%0A players.append(soco.SoCo(host))%0A%0A if not players:%0A players = soco.discover(interface_addr=config.get('interface_addr',%0A None) )%0A%0A
f91d666cc06f5db48bea43de29ca4153e58c473d
add test for os platform check
check.py
check.py
Python
0
@@ -0,0 +1,325 @@ +#!/bin/py%0A%0Aimport os%0Aimport sys%0A%0Adef osCheck():%0A %22%22%22 Check if OS is 'UNIX-like' %22%22%22%0A if not sys.platform.startswith('linux') or sys.platform.startswith('darwin'): %0A # if not sys.platform.startswith('darwin'):%0A print(%22This program was designed for UNIX-like systems. Exiting.%22) %0A sys.exit()%0A%0AosCheck()%0A
cafb802c51e0c0b8ff58cb749fa30b99cd7182b4
Fix versions script to accept versions without -ce suffix
scripts/versions.py
scripts/versions.py
import operator import re from collections import namedtuple import requests base_url = 'https://download.docker.com/linux/static/{0}/x86_64/' categories = [ 'edge', 'stable', 'test' ] STAGES = ['tp', 'beta', 'rc'] class Version(namedtuple('_Version', 'major minor patch stage edition')): @classmethod def parse(cls, version): edition = None version = version.lstrip('v') version, _, stage = version.partition('-') if stage: if not any(marker in stage for marker in STAGES): edition = stage stage = None elif '-' in stage: edition, stage = stage.split('-') major, minor, patch = version.split('.', 3) return cls(major, minor, patch, stage, edition) @property def major_minor(self): return self.major, self.minor @property def order(self): """Return a representation that allows this object to be sorted correctly with the default comparator. """ # non-GA releases should appear before GA releases # Order: tp -> beta -> rc -> GA if self.stage: for st in STAGES: if st in self.stage: stage = (STAGES.index(st), self.stage) break else: stage = (len(STAGES),) return (int(self.major), int(self.minor), int(self.patch)) + stage def __str__(self): stage = '-{}'.format(self.stage) if self.stage else '' edition = '-{}'.format(self.edition) if self.edition else '' return '.'.join(map(str, self[:3])) + edition + stage def main(): results = set() for url in [base_url.format(cat) for cat in categories]: res = requests.get(url) content = res.text versions = [ Version.parse( v.strip('"').lstrip('docker-').rstrip('.tgz').rstrip('-x86_64') ) for v in re.findall( r'"docker-[0-9]+\.[0-9]+\.[0-9]+-.*tgz"', content ) ] sorted_versions = sorted( versions, reverse=True, key=operator.attrgetter('order') ) latest = sorted_versions[0] results.add(str(latest)) print(' '.join(results)) if __name__ == '__main__': main()
Python
0
@@ -2018,16 +2018,17 @@ .%5B0-9%5D+- +? .*tgz%22',
6c7f241656a2ae3059af27f91799a8247d84cfe7
fix score.py for factored models
nematus/score.py
nematus/score.py
""" Given a parallel corpus of sentence pairs: with one-to-one of target and source sentences, produce the score, and optionally alignment for each pair. """ import sys import argparse import tempfile import numpy import json from data_iterator import TextIterator from util import load_dict, load_config from alignment_util import * from compat import fill_options from theano_util import (load_params, init_theano_params) from nmt import (pred_probs, build_model, prepare_data, init_params) from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import theano def rescore_model(source_file, target_file, saveto, models, options, b, normalize, verbose, alignweights): trng = RandomStreams(1234) fs_log_probs = [] for model, option in zip(models, options): # load model parameters and set theano shared variables param_list = numpy.load(model).files param_list = dict.fromkeys([key for key in param_list if not key.startswith('adam_')], 0) params = load_params(model, param_list) tparams = init_theano_params(params) trng, use_noise, \ x, x_mask, y, y_mask, \ opt_ret, \ cost = \ build_model(tparams, option) inps = [x, x_mask, y, y_mask] use_noise.set_value(0.) if alignweights: sys.stderr.write("\t*** Save weight mode ON, alignment matrix will be saved.\n") outputs = [cost, opt_ret['dec_alphas']] f_log_probs = theano.function(inps, outputs) else: f_log_probs = theano.function(inps, cost) fs_log_probs.append(f_log_probs) def _score(pairs, alignweights=False): # sample given an input sequence and obtain scores scores = [] alignments = [] for i, f_log_probs in enumerate(fs_log_probs): score, alignment = pred_probs(f_log_probs, prepare_data, options[i], pairs, normalize=normalize, alignweights = alignweights) scores.append(score) alignments.append(alignment) return scores, alignments pairs = TextIterator(source_file.name, target_file.name, options[0]['dictionaries'][:-1], options[0]['dictionaries'][1], n_words_source=options[0]['n_words_src'], n_words_target=options[0]['n_words'], batch_size=b, maxlen=float('inf'), sort_by_length=False) #TODO: sorting by length could be more efficient, but we'd want to resort after scores, alignments = _score(pairs, alignweights) source_file.seek(0) target_file.seek(0) source_lines = source_file.readlines() target_lines = target_file.readlines() for i, line in enumerate(target_lines): score_str = ' '.join(map(str,[s[i] for s in scores])) if verbose: saveto.write('{0} '.format(line.strip())) saveto.write('{0}\n'.format(score_str)) ### optional save weights mode. if alignweights: ### writing out the alignments. temp_name = saveto.name + ".json" with tempfile.NamedTemporaryFile(prefix=temp_name) as align_OUT: for line in all_alignments: align_OUT.write(line + "\n") ### combining the actual source and target words. combine_source_target_text_1to1(source_file, target_file, saveto.name, align_OUT) def main(models, source_file, nbest_file, saveto, b=80, normalize=False, verbose=False, alignweights=False): # load model model_options options = [] for model in models: options.append(load_config(model)) fill_options(options[-1]) rescore_model(source_file, nbest_file, saveto, models, options, b, normalize, verbose, alignweights) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-b', type=int, default=80, help="Minibatch size (default: %(default)s))") parser.add_argument('-n', action="store_true", help="Normalize scores by sentence length") parser.add_argument('-v', action="store_true", help="verbose mode.") parser.add_argument('--models', '-m', type=str, nargs = '+', required=True, help="model to use. Provide multiple models (with same vocabulary) for ensemble decoding") parser.add_argument('--source', '-s', type=argparse.FileType('r'), required=True, metavar='PATH', help="Source text file") parser.add_argument('--target', '-t', type=argparse.FileType('r'), required=True, metavar='PATH', help="Target text file") parser.add_argument('--output', '-o', type=argparse.FileType('w'), default=sys.stdout, metavar='PATH', help="Output file (default: standard output)") parser.add_argument('--walign', '-w',required = False,action="store_true", help="Whether to store the alignment weights or not. If specified, weights will be saved in <target>.alignment") args = parser.parse_args() main(args.models, args.source, args.target, args.output, b=args.b, normalize=args.n, verbose=args.v, alignweights=args.walign)
Python
0
@@ -2235,16 +2235,17 @@ aries'%5D%5B +- 1%5D,%0A
ba82331fa694ec26c7f0108451abf3912b5a37ff
Reimplement deprecated (1.6) _is_ignorable_404
opbeat/contrib/django/middleware/__init__.py
opbeat/contrib/django/middleware/__init__.py
""" opbeat.contrib.django.middleware ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011-2012 Opbeat Large portions are :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.middleware.common import _is_ignorable_404 from opbeat.contrib.django.models import client import threading import logging class Opbeat404CatchMiddleware(object): def process_response(self, request, response): if response.status_code != 404 or _is_ignorable_404(request.get_full_path()): return response data = client.get_data_from_request(request) data.update({ 'level': logging.INFO, 'logger': 'http404', }) result = client.capture('Message', param_message={'message':'Page Not Found: %s','params':[request.build_absolute_uri()]}, data=data) request.opbeat = { 'app_id': data.get('app_id', client.app_id), 'id': client.get_ident(result), } return response class OpbeatResponseErrorIdMiddleware(object): """ Appends the X-Opbeat-ID response header for referencing a message within the Opbeat datastore. """ def process_response(self, request, response): if not getattr(request, 'opbeat', None): return response response['X-Opbeat-ID'] = request.opbeat['id'] return response class OpbeatLogMiddleware(object): # Create a threadlocal variable to store the session in for logging thread = threading.local() def process_request(self, request): self.thread.request = request
Python
0.000001
@@ -284,17 +284,16 @@ _import%0A -%0A from dja @@ -300,50 +300,28 @@ ngo. -middleware.common import _is_ignorable_404 +conf import settings %0Afro @@ -382,16 +382,16 @@ reading%0A - import l @@ -398,16 +398,222 @@ ogging%0A%0A +def _is_ignorable_404(self, uri):%0A %22%22%22%0A Returns True if the given request *shouldn't* notify the site managers.%0A %22%22%22%0A return any(pattern.search(uri) for pattern in settings.IGNORABLE_404_URLS)%0A%0A %0Aclass O
92b572004264c69baed5cce721e20e1a830514f8
add 'is_changed' filter
filter_plugins/is_changed.py
filter_plugins/is_changed.py
Python
0.000036
@@ -0,0 +1,679 @@ +class FilterModule(object):%0A ''' A comment '''%0A%0A def filters(self):%0A return %7B%0A 'is_changed': self.is_changed,%0A %7D%0A%0A def is_changed(self, input_value, key, value):%0A if type(input_value) is not dict:%0A raise TypeError, u%22%7B%7D must be dict (got %7B%7D)%22.format(input_value, str(type(input_value)))%0A%0A if input_value.has_key('results'):%0A res = input_value%5B'results'%5D%0A else:%0A res = %5Binput_value%5D%0A%0A for item in res:%0A if item.has_key(key) and item.has_key('changed'):%0A if item%5Bkey%5D == value and item%5B'changed'%5D == True:%0A return True%0A%0A return False%0A
82cab3f91df9b4bb9f60e553d6b9e4ef431cb6ae
Add __init__.py
eppconvert/__init__.py
eppconvert/__init__.py
Python
0.006636
@@ -0,0 +1,822 @@ +#%0A# Copyright (c) 2017 Ralf Horstmann %[email protected]%3E%0A#%0A# Permission to use, copy, modify, and distribute this software for any%0A# purpose with or without fee is hereby granted, provided that the above%0A# copyright notice and this permission notice appear in all copies.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22 AND THE AUTHOR DISCLAIMS ALL WARRANTIES%0A# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF%0A# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR%0A# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES%0A# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN%0A# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF%0A# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.%0A%0A__all__ = %5B'eppformat', 'eppread', 'gpx2epp'%5D%0A%0A
2f9699d5088266aaa76dad1742f2432d78da9d3b
add validator class
biothings_explorer/resolve_ids/validator.py
biothings_explorer/resolve_ids/validator.py
Python
0.000001
@@ -0,0 +1,2670 @@ +from collections import defaultdict%0A%0Afrom ..config_new import ID_RESOLVING_APIS%0Afrom ..exceptions.id_resolver import InvalidIDResolverInputError%0Afrom ..utils.common import getPrefixFromCurie%0A%0A%0Aclass Validator:%0A def __init__(self, user_input):%0A self.__user_input = user_input%0A self.__valid = defaultdict(list)%0A self.__invalid = defaultdict(list)%0A%0A def get_valid_inputs(self):%0A return self.__valid%0A%0A def get_invalid_inputs(self):%0A return self.__invalid%0A%0A def _validate_if_input_is_dict(self, user_input):%0A if not isinstance(user_input, dict):%0A raise InvalidIDResolverInputError(%0A user_input,%0A message=%22Your Input to ID Resolver is Invalid. It should be a dictionary!%22,%0A )%0A%0A def _validate_if_values_of_input_is_list(self, user_input):%0A for k, v in user_input.items():%0A if not isinstance(v, list):%0A raise InvalidIDResolverInputError(%0A user_input,%0A message=%22Your Input to ID Resolver is Invalid. All values of your input dictionary should be a list!%22,%0A )%0A%0A def _validate_if_each_item_in_input_values_is_curie(self, user_input):%0A for k, v in user_input.items():%0A for _v in v:%0A if not isinstance(_v, str) or %22:%22 not in _v:%0A raise InvalidIDResolverInputError(%0A user_input,%0A message=%22Your Input to ID Resolver is Invalid. Each item in the values of your input dictionary should be a curie. Spotted %7B%7D is not a curie%22.format(%0A _v%0A ),%0A )%0A%0A def _check_if_semantic_type_can_be_resolved(self, user_input):%0A res = %7B%7D%0A for k, v in user_input.items():%0A if k not in ID_RESOLVING_APIS:%0A self.__invalid%5Bk%5D = v%0A else:%0A res%5Bk%5D = v%0A return res%0A%0A def _check_if_prefix_can_be_resolved(self, user_input):%0A for k, v in user_input.items():%0A for _v in v:%0A if getPrefixFromCurie(_v) not in ID_RESOLVING_APIS%5Bk%5D%5B%22mapping%22%5D:%0A self.__invalid%5Bk%5D.append(_v)%0A else:%0A self.__valid%5Bk%5D.append(_v)%0A%0A def validate(self):%0A self._validate_if_input_is_dict(self.__user_input)%0A self._validate_if_values_of_input_is_list(self.__user_input)%0A self._validate_if_each_item_in_input_values_is_curie(self.__user_input)%0A tmp_valid_res = self._check_if_semantic_type_can_be_resolved(self.__user_input)%0A self._check_if_prefix_can_be_resolved(tmp_valid_res)%0A
c69fdba07aa4228f3e708b49e7fef4d0143e7a13
Add missing stats.py
vpr/tests/api_stats.py
vpr/tests/api_stats.py
Python
0.000095
@@ -0,0 +1,266 @@ +from django.db import connection%0A%0ASQL_COUNT = 'select count(id) from vpr_api_apirecord where %25s=%25s;'%0A%0Adef countValue(field, value, time_start=None, time_end=None):%0A cur = connection.cursor()%0A cur.execute(SQL_COUNT %25 (field, value))%0A return cur.fetchone()%0A%0A%0A
13addaf6e5a0423b632efcc4d16e3e5d864fdac3
Create validate_csv_wd.py
validate_csv_wd.py
validate_csv_wd.py
Python
0.001169
@@ -0,0 +1,542 @@ +#!/usr/bin/env python3.5%0A%0Aimport sys%0Aimport re%0Aimport os%0Aimport csv%0A%0Adef read_file(fname):%0A f = open(fname, 'r')%0A csv_reader = csv.reader(f, delimiter='~')%0A no_rows = 0%0A for row in csv_reader:%0A no_rows += 1%0A no_cols = len(row)%0A print(%22Row %25d: columns = %25d%22 %25 (no_rows, no_cols))%0A f.close()%0A print(%22.........%22)%0A print(%22Number of records in csv file: %25d%22 %25 no_rows)%0A%0Aif __name__ == '__main__':%0A args = sys.argv%5B1:%5D%0A for fl in args:%0A print(%22File : %25s%22 %25 fl)%0A print(%22..................................%22)%0A read_file(fl)%0A
6c4ef8298bbdf48f82d13fb25a0f3958237392f2
Add nova client for retrieving instance information
novajoin/nova.py
novajoin/nova.py
Python
0
@@ -0,0 +1,1420 @@ +# Copyright 2016 Red Hat, Inc.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0A%22%22%22Handle communication with Nova.%22%22%22%0A%0Afrom novaclient import client%0Afrom oslo_config import cfg%0Afrom oslo_log import log as logging%0A%0Afrom novajoin import keystone_client%0A%0A%0ACONF = cfg.CONF%0A%0ALOG = logging.getLogger(__name__)%0A%0ANOVA_APIVERSION = 2.1%0A%0A%0Aclass NovaClient(object):%0A %22%22%22Wrapper around nova client.%22%22%22%0A%0A def __init__(self):%0A%0A self.version = NOVA_APIVERSION%0A self.client = self._nova_client()%0A%0A def _nova_client(self):%0A %22%22%22Instantiate a new novaclient.Client object.%22%22%22%0A%0A session = keystone_client.get_session()%0A return client.Client(str(self.version), session=session)%0A%0A%0Adef get_instance(instance_id):%0A novaclient = NovaClient()%0A try:%0A return novaclient.client.servers.get(instance_id)%0A except novaclient.exceptions.NotFound:%0A return None%0A
d53358a6a0a564a5b4982f7f3dfdfd1163d6a295
Add test covering no RunStop for v2.
databroker/tests/test_v2/test_no_run_stop.py
databroker/tests/test_v2/test_no_run_stop.py
Python
0
@@ -0,0 +1,985 @@ +# This is a special test because we corrupt the generated data.%0A# That is why it does not reuse the standard fixures.%0A%0Aimport tempfile%0Afrom suitcase.jsonl import Serializer%0Afrom bluesky import RunEngine%0Afrom bluesky.plans import count%0Afrom ophyd.sim import det%0Afrom databroker._drivers.jsonl import BlueskyJSONLCatalog%0A%0A%0Adef test_no_stop_document(RE, tmpdir):%0A %22%22%22%0A When a Run has no RunStop document, whether because it does not exist yet%0A or because the Run was interrupted in a critical way and never completed,%0A we expect the field for 'stop' to contain None.%0A %22%22%22%0A directory = str(tmpdir)%0A%0A serializer = Serializer(directory)%0A%0A def insert_all_except_stop(name, doc):%0A if name != 'stop':%0A serializer(name, doc)%0A%0A RE(count(%5Bdet%5D), insert_all_except_stop)%0A serializer.close()%0A catalog = BlueskyJSONLCatalog(f'%7Bdirectory%7D/*.jsonl')%0A assert catalog%5B-1%5D.metadata%5B'start'%5D is not None%0A assert catalog%5B-1%5D.metadata%5B'stop'%5D is None%0A
ace782a3f4c616f9e22e1a1ce29f053b71391845
Add missing migration for column description.
cms/migrations/0002_update_template_field.py
cms/migrations/0002_update_template_field.py
Python
0
@@ -0,0 +1,564 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('cms', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='page',%0A name='template',%0A field=models.CharField(help_text='Templates are used to render the layout of a page.', max_length=100, verbose_name='template', choices=%5B(b'test-template.html', 'Test Template')%5D),%0A preserve_default=True,%0A ),%0A %5D%0A
c1e76dbdf07e67d98814d6f357a70c692af3a31d
Add first pass at db router
osf/db/router.py
osf/db/router.py
Python
0
@@ -0,0 +1,2134 @@ +from django.conf import settings%0Aimport psycopg2%0A%0ACACHED_MASTER = None%0A%0A%0Aclass PostgreSQLFailoverRouter(object):%0A %22%22%22%0A 1. CHECK MASTER_SERVER_DSN @ THREAD LOCAL%0A 2. THERE?, GOTO 9%0A 3. GET RANDOM_SERVER FROM %60settings.DATABASES%60%0A 4. CONNECT TO RANDOM_SERVER%0A 5. IS MASTER SERVER?%0A 6. YES? GOTO 8%0A 7. NO?, %60exit()%60%0A 8. STOR MASTER_SERVER_DSN @ THREAD_LOCAL%0A 9. PROFIT%0A Number of servers can be assumed to be %3E 1 but shouldn't assume 2 max.%0A Might be nice to keep track of the servers that have been tried from settings.DATABASES so we don't get into a loop.%0A %22%22%22%0A DSNS = dict()%0A%0A def __init__(self):%0A self._get_dsns()%0A global CACHED_MASTER%0A if not CACHED_MASTER:%0A CACHED_MASTER = self._get_master()%0A%0A def _get_master(self):%0A for name, dsn in self.DSNS.iteritems():%0A conn = self._get_conn(dsn)%0A cur = conn.cursor()%0A cur.execute('SELECT pg_is_in_recovery();')%0A row = cur.fetchone()%0A if not row%5B0%5D:%0A cur.close()%0A conn.close()%0A return name%0A cur.close()%0A conn.close()%0A return None%0A%0A def _get_dsns(self):%0A template = '%7Bprotocol%7D://%7BUSER%7D:%7BPASSWORD%7D@%7BHOST%7D:%7BPORT%7D/%7BNAME%7D'%0A for name, db in settings.DATABASES.iteritems():%0A if 'postgresql' in db%5B'ENGINE'%5D:%0A db%5B'protocol'%5D = 'postgres'%0A # db.setdefault('protocol', 'postgres')%0A else:%0A raise Exception('PostgreSQLFailoverRouter only works with PostgreSQL... ... ...')%0A self.DSNS%5Bname%5D = template.format(**db)%0A%0A def _get_conn(self, dsn):%0A return psycopg2.connect(dsn)%0A%0A def db_for_read(self, model, **hints):%0A if not CACHED_MASTER:%0A exit()%0A return CACHED_MASTER%0A%0A def db_for_write(self, model, **hints):%0A if not CACHED_MASTER:%0A exit()%0A return CACHED_MASTER%0A%0A def allow_relation(self, obj1, obj2, **hints):%0A return None%0A%0A def allow_migrate(self, db, app_label, model_name=None, **hints):%0A return None%0A
aca6b8b4cd221efca6d3a5f59f96b73d70e65714
test integration against scipy
gary/integrate/tests/test_1d.py
gary/integrate/tests/test_1d.py
Python
0.000001
@@ -0,0 +1,1434 @@ +# coding: utf-8%0A%0Afrom __future__ import division, print_function%0A%0A__author__ = %22adrn %[email protected]%3E%22%0A%0A# Standard library%0Aimport os%0Aimport time%0Aimport logging%0A%0A# Third-party%0Aimport numpy as np%0Afrom astropy import log as logger%0Afrom scipy.integrate import simps%0A%0A# Project%0Afrom ..simpsgauss import simpson%0A%0Alogger.setLevel(logging.DEBUG)%0A%0Aplot_path = %22plots/tests/TODO%22%0Aif not os.path.exists(plot_path):%0A os.makedirs(plot_path)%0A%0Adef test_simpson():%0A ncalls = 10%0A func = lambda x: np.sin(x - 0.2414)*x + 2.%0A%0A x = np.linspace(0, 10, 250001)%0A y = func(x)%0A%0A t0 = time.time()%0A for i in range(ncalls):%0A s1 = simpson(y, dx=x%5B1%5D-x%5B0%5D)%0A print(%22cython (odd): %7B0%7D sec for %7B1%7D calls%22.format(time.time() - t0,ncalls))%0A%0A t0 = time.time()%0A for i in range(ncalls):%0A s2 = simps(y, x=x)%0A print(%22python (odd): %7B0%7D sec for %7B1%7D calls%22.format(time.time() - t0,ncalls))%0A np.testing.assert_allclose(s1, s2)%0A%0A # -----------------------------------------------------%0A print()%0A x = np.linspace(0, 10, 250000)%0A y = func(x)%0A t0 = time.time()%0A for i in range(ncalls):%0A s1 = simpson(y, dx=x%5B1%5D-x%5B0%5D)%0A print(%22cython (even): %7B0%7D sec for %7B1%7D calls%22.format(time.time() - t0,ncalls))%0A%0A t0 = time.time()%0A for i in range(ncalls):%0A s2 = simps(y, x=x)%0A print(%22python (even): %7B0%7D sec for %7B1%7D calls%22.format(time.time() - t0,ncalls))%0A%0A np.testing.assert_allclose(s1, s2)%0A
003cb0478a7fcbc7fb9b3521c174397a9dd9a318
Use sources.list instead of sources.all.
zilencer/lib/stripe.py
zilencer/lib/stripe.py
from functools import wraps import logging import os from typing import Any, Callable, TypeVar from django.conf import settings from django.utils.translation import ugettext as _ import stripe from zerver.lib.exceptions import JsonableError from zerver.lib.logging_util import log_to_file from zerver.models import Realm, UserProfile from zilencer.models import Customer from zproject.settings import get_secret STRIPE_SECRET_KEY = get_secret('stripe_secret_key') STRIPE_PUBLISHABLE_KEY = get_secret('stripe_publishable_key') stripe.api_key = STRIPE_SECRET_KEY BILLING_LOG_PATH = os.path.join('/var/log/zulip' if not settings.DEVELOPMENT else settings.DEVELOPMENT_LOG_DIRECTORY, 'billing.log') billing_logger = logging.getLogger('zilencer.stripe') log_to_file(billing_logger, BILLING_LOG_PATH) log_to_file(logging.getLogger('stripe'), BILLING_LOG_PATH) CallableT = TypeVar('CallableT', bound=Callable[..., Any]) class StripeError(JsonableError): pass def catch_stripe_errors(func: CallableT) -> CallableT: @wraps(func) def wrapped(*args: Any, **kwargs: Any) -> Any: if STRIPE_PUBLISHABLE_KEY is None: # Dev-only message; no translation needed. raise StripeError( "Missing Stripe config. In dev, add to zproject/dev-secrets.conf .") try: return func(*args, **kwargs) except stripe.error.StripeError as e: billing_logger.error("Stripe error: %d %s", e.http_status, e.__class__.__name__) if isinstance(e, stripe.error.CardError): raise StripeError(e.json_body.get('error', {}).get('message')) else: raise StripeError( _("Something went wrong. Please try again or email us at %s.") % (settings.ZULIP_ADMINISTRATOR,)) except Exception as e: billing_logger.exception("Uncaught error in Stripe integration") raise return wrapped # type: ignore # https://github.com/python/mypy/issues/1927 @catch_stripe_errors def count_stripe_cards(realm: Realm) -> int: try: customer_obj = Customer.objects.get(realm=realm) cards = stripe.Customer.retrieve(customer_obj.stripe_customer_id).sources.all(object="card") return len(cards["data"]) except Customer.DoesNotExist: return 0 @catch_stripe_errors def save_stripe_token(user: UserProfile, token: str) -> int: """Returns total number of cards.""" # The card metadata doesn't show up in Dashboard but can be accessed # using the API. card_metadata = {"added_user_id": user.id, "added_user_email": user.email} try: customer_obj = Customer.objects.get(realm=user.realm) customer = stripe.Customer.retrieve(customer_obj.stripe_customer_id) billing_logger.info("Adding card on customer %s: source=%r, metadata=%r", customer_obj.stripe_customer_id, token, card_metadata) card = customer.sources.create(source=token, metadata=card_metadata) customer.default_source = card.id customer.save() return len(customer.sources.all(object="card")["data"]) except Customer.DoesNotExist: customer_metadata = {"string_id": user.realm.string_id} # Description makes it easier to identify customers in Stripe dashboard description = "{} ({})".format(user.realm.name, user.realm.string_id) billing_logger.info("Creating customer: source=%r, description=%r, metadata=%r", token, description, customer_metadata) customer = stripe.Customer.create(source=token, description=description, metadata=customer_metadata) card = customer.sources.all(object="card")["data"][0] card.metadata = card_metadata card.save() Customer.objects.create(realm=user.realm, stripe_customer_id=customer.id) return 1
Python
0.000001
@@ -3244,35 +3244,36 @@ ustomer.sources. -all +list (object=%22card%22)%5B @@ -3911,35 +3911,36 @@ ustomer.sources. -all +list (object=%22card%22)%5B
833fcc808ab856720072b164a130e98433dffccc
Set procpool debug status to False by default.
contrib/procpools/python_procpool_plugin.py
contrib/procpools/python_procpool_plugin.py
""" Python ProcPool plugin Evennia contribution - Griatch 2012 This is a plugin for the Evennia services. It will make the service and run_async in python_procpool.py available to the system. To activate, add the following line to your settings file: SERVER_SERVICES_PLUGIN_MODULES.append("contrib.procpools.python_procpool_plugin") Next reboot the server and the new service will be available. If you want to adjust the defaults, copy this file to game/gamesrc/conf/ and re-point settings.SERVER_SERVICES_PLUGINS_MODULES to that file instead. This is to avoid clashes with eventual upstream modifications to this file. It is not recommended to use this with an SQLite3 database, at least if you plan to do many out-of-process database write - SQLite3 does not work very well with a high frequency of off-process writes due to file locking clashes. """ import os from django.conf import settings # Process Pool setup # convenient flag to turn off process pool without changing settings PROCPOOL_ENABLED = True # relay process stdout to log (debug mode, very spammy) PROCPOOL_DEBUG = True # max/min size of the process pool. Will expand up to max limit on demand. PROCPOOL_MIN_NPROC = 5 PROCPOOL_MAX_NPROC = 20 # after sending a command, this is the maximum time in seconds the process # may run without returning. After this time the process will be killed. This # can be seen as a fallback; the run_async method takes a keyword proc_timeout # that will override this value on a per-case basis. PROCPOOL_TIMEOUT = 10 # maximum time (seconds) a process may idle before being pruned from pool (if pool bigger than minsize) PROCPOOL_IDLETIME = 20 # only change if the port clashes with something else on the system PROCPOOL_PORT = 5001 # 0.0.0.0 means listening to all interfaces PROCPOOL_INTERFACE = '0.0.0.0' # user-id and group-id to run the processes as (for OS:es supporting this). # If you plan to run unsafe code one could experiment with setting this # to an unprivileged user. PROCPOOL_UID = None PROCPOOL_GID = None # real path to a directory where all processes will be run. If # not given, processes will be executed in game/. PROCPOOL_DIRECTORY = None # don't need to change normally SERVICE_NAME = "PythonProcPool" # plugin hook def start_plugin_services(server): """ This will be called by the Evennia Server when starting up. server - the main Evennia server application """ if not PROCPOOL_ENABLED: return # terminal output print ' amp (Process Pool): %s' % PROCPOOL_PORT from contrib.procpools.ampoule import main as ampoule_main from contrib.procpools.ampoule import service as ampoule_service from contrib.procpools.ampoule import pool as ampoule_pool from contrib.procpools.ampoule.main import BOOTSTRAP as _BOOTSTRAP from contrib.procpools.python_procpool import PythonProcPoolChild # for some reason absolute paths don't work here, only relative ones. apackages = ("twisted", os.path.join(os.pardir, "contrib", "procpools", "ampoule"), os.path.join(os.pardir, "ev"), os.path.join(os.pardir)) aenv = {"DJANGO_SETTINGS_MODULE":"settings", "DATABASE_NAME":settings.DATABASES.get("default", {}).get("NAME") or settings.DATABASE_NAME} if PROCPOOL_DEBUG: _BOOTSTRAP = _BOOTSTRAP % "log.startLogging(sys.stderr)" else: _BOOTSTRAP = _BOOTSTRAP % "" procpool_starter = ampoule_main.ProcessStarter(packages=apackages, env=aenv, path=PROCPOOL_DIRECTORY, uid=PROCPOOL_UID, gid=PROCPOOL_GID, bootstrap=_BOOTSTRAP, childReactor=os.name == 'nt' and "select" or "epoll") procpool = ampoule_pool.ProcessPool(name=SERVICE_NAME, min=PROCPOOL_MIN_NPROC, max=PROCPOOL_MAX_NPROC, recycleAfter=500, timeout=PROCPOOL_TIMEOUT, maxIdle=PROCPOOL_IDLETIME, ampChild=PythonProcPoolChild, starter=procpool_starter) procpool_service = ampoule_service.AMPouleService(procpool, PythonProcPoolChild, PROCPOOL_PORT, PROCPOOL_INTERFACE) procpool_service.setName(SERVICE_NAME) # add the new services to the server server.services.addService(procpool_service)
Python
0
@@ -742,18 +742,18 @@ se write - - +s. SQLite3 @@ -849,16 +849,51 @@ clashes. + Test what works with your mileage. %0A%0A%22%22%22%0Aim @@ -1123,19 +1123,20 @@ DEBUG = -Tru +Fals e%0A# max/ @@ -1251,16 +1251,145 @@ OC = 20%0A +# maximum time (seconds) a process may idle before being pruned from%0A# pool (if pool bigger than minsize)%0APROCPOOL_IDLETIME = 20%0A # after @@ -1665,16 +1665,16 @@ basis.%0A + PROCPOOL @@ -1691,135 +1691,8 @@ 10%0A -# maximum time (seconds) a process may idle before being pruned from pool (if pool bigger than minsize)%0APROCPOOL_IDLETIME = 20%0A # on
ca002a18b7e392bbdca9d7e0ed8c39739dc5b4a3
Add code to get 99th percentile absolute pointing for POG
pog_absolute_pointing.py
pog_absolute_pointing.py
Python
0
@@ -0,0 +1,678 @@ +import numpy as np%0Afrom Chandra.Time import DateTime%0Aimport plot_aimpoint%0A%0A# Get 99th percential absolute pointing radius%0A%0Aplot_aimpoint.opt = plot_aimpoint.get_opt()%0Aasols = plot_aimpoint.get_asol()%0A# Last six months of data%0Aasols = asols%5Basols%5B'time'%5D %3E DateTime(-183).secs%5D%0A# center of box of range of data%0Amid_dy = (np.max(asols%5B'dy'%5D) + np.min(asols%5B'dy'%5D)) / 2.%0Amid_dz = (np.max(asols%5B'dz'%5D) + np.min(asols%5B'dz'%5D)) / 2.%0A# radius of each delta in mm (asol dy dz in mm)%0Adr = np.sqrt((asols%5B'dy'%5D - mid_dy) ** 2 + (asols%5B'dz'%5D - mid_dz) ** 2)%0Adr_99 = np.percentile(dr, 99)%0Adr_99_arcsec = dr_99 * 20%0Aprint %2299th percentile radius of 6m data is %7B%7D arcsec%22.format(dr_99_arcsec)%0A
228a643dd54cefec82fdbca0d637deb7e1ea302c
Fix quoting in templates
graphitepager/description.py
graphitepager/description.py
from jinja2 import Template from urllib import urlencode from graphitepager.level import Level ALERT_MISSING_TEMPLATE = r"""{{level}} alert for "{{alert.get('name')}}". Go to {{graph_url}}. {% if docs_url %}Documentation: {{docs_url}}{% endif %}. """ HTML_ALERT_MISSING_TEMPLATE = r"""{{level}} alert for "{{alert.get('name')}}". Go to <a href="{{graph_url}}">the graph</a>. {% if docs_url %}<a href="{{docs_url}}">Documentation</a>{% endif %}. """ STDOUT_MISSING_TEMPLATE = r"""{{level}} alert for "{{alert.get('name')}}". Go to {{graph_url}}. """ ALERT_TEMPLATE = r"""{{level}} alert for {{alert.get('name')}} {{record.target}}. The current value is {{current_value}} which passes the {{threshold_level|lower}} value of {{threshold_value}}. Go to {{graph_url}}. {% if docs_url %}Documentation: {{docs_url}}{% endif %}. """ HTML_ALERT_TEMPLATE = r"""{{level}} alert for {{alert.get('name')}} {{record.target}}. The current value is {{current_value}} which passes the {{threshold_level|lower}} value of {{threshold_value}}. Go to <a href="{{graph_url}}">the graph</a>. {% if docs_url %}<a href="{{docs_url}}">Documentation</a>{% endif %}. """ STDOUT_TEMPLATE = r"""{{level}} alert for {{alert.get('name')}} {{record.target}}. The current value is {{current_value}} which passes the {{threshold_level|lower}} value of {{threshold_value}}. """ class Description(object): def __init__(self, template, graphite_url, alert, record, level, value): self.template = template self.graphite_url = graphite_url self.alert = alert self.record = record self.level = level self.value = value def __str__(self): return self.description_for_alert( self.template, self.graphite_url, self.alert, self.record, self.level, self.value, ) def stdout(self): template = STDOUT_TEMPLATE if self.level == Level.NO_DATA: template = STDOUT_MISSING_TEMPLATE return self.description_for_alert( template, self.graphite_url, self.alert, self.record, self.level, self.value, ) def description_for_alert(self, template, graphite_url, alert, record, level, current_value): context = dict(locals()) context['graphite_url'] = graphite_url if type(record) == str: context['docs_url'] = alert.documentation_url() else: context['docs_url'] = alert.documentation_url(record.target) url_params = ( ('width', 586), ('height', 308), ('target', alert.get('target')), ('target', 'threshold({},"Warning")'.format( alert.get('warning'))), ('target', 'threshold({},"Critical")'.format( alert.get('critical'))), ('from', '-20mins'), ) url_args = urlencode(url_params) url = '{}/render/?{}'.format(graphite_url, url_args) context['graph_url'] = url.replace('https', 'http') context['threshold_value'] = alert.value_for_level(level) if level == Level.NOMINAL: context['threshold_level'] = 'warning' else: context['threshold_level'] = level return Template(template).render(context) def _get_descriptions(graphite_url, alert, record, alert_level, value, alert_template, html_alert_template): description = Description( alert_template, graphite_url, alert, record, alert_level, value ) html_description = Description( html_alert_template, graphite_url, alert, record, alert_level, value ) return description, html_description def get_descriptions(graphite_url, alert, record, alert_level, value): return _get_descriptions(graphite_url, alert, record, alert_level, value, ALERT_TEMPLATE, HTML_ALERT_TEMPLATE) def missing_target_descriptions(graphite_url, alert, record, alert_level, value): return _get_descriptions(graphite_url, alert, record, alert_level, value, ALERT_MISSING_TEMPLATE, HTML_ALERT_MISSING_TEMPLATE)
Python
0.000001
@@ -131,33 +131,32 @@ vel%7D%7D alert for%0A -%22 %7B%7Balert.get('nam @@ -152,33 +152,50 @@ rt.get('name')%7D%7D -%22 + %7B%7Brecord.target%7D%7D . Go to %7B%7Bgraph_ @@ -309,33 +309,32 @@ vel%7D%7D alert for%0A -%22 %7B%7Balert.get('nam @@ -338,17 +338,34 @@ name')%7D%7D -%22 + %7B%7Brecord.target%7D%7D .%0AGo to @@ -528,17 +528,16 @@ ert for%0A -%22 %7B%7Balert. @@ -549,17 +549,34 @@ name')%7D%7D -%22 + %7B%7Brecord.target%7D%7D . Go to
fa28e80dc7aeed1eb4fb0a18126a2f8105d5a5d2
Create Cleverbot.py
Cleverbot.py
Cleverbot.py
Python
0
@@ -0,0 +1,1144 @@ +mport re%0Aimport cleverbot%0Aimport traceback%0A%0AWORDS = %5B%22CLEVERBOT%22, %22BOT%22%5D%0APATTERN = r%22%5Cb(cleverbot%7Cbot)%5Cb%22%0A%0Adef handle(text, mic, profile):%0A %22%22%22%0A Responds to user-input, typically speech text, starting a conversation with cleverbot%0A%0A Arguments:%0A text -- user-input, typically transcribed speech%0A mic -- used to interact with the user (for both input and output)%0A profile -- contains information related to the user (e.g., phone number)%0A %22%22%22%0A mic.say('Starting clever bot')%0A exit = False%0A bot = cleverbot.Cleverbot()%0A errors = 0%0A while not exit:%0A try:%0A question = mic.activeListen()%0A if is_exit(question):%0A break%0A answer = bot.ask(question)%0A mic.say(answer)%0A except Exception as e:%0A mic.say('Oops')%0A print traceback.format_exc()%0A errors += 1%0A if errors %3E 5:%0A break%0A%0A mic.say('Stopping clever bot')%0A%0A%0Adef is_exit(text):%0A return bool(re.search(r%22(exit%7Cquit%7Cstop)%22, text, re.IGNORECASE))%0A%0Adef isValid(text):%0A return bool(re.search(PATTERN, text, re.IGNORECASE))%0A
405dfc9a0a814001961e4090be83a3da4a4d4369
Copy in constants file from master
cea/technologies/constants.py
cea/technologies/constants.py
Python
0
@@ -0,0 +1,730 @@ +%22%22%22%0AConstants used throughout the cea.technologies package.%0A%0AHistory lesson: This is a first step at removing the %60cea.globalvars.GlobalVariables%60 object.%0A%22%22%22%0A%0A# Heat Exchangers%0AU_cool = 2500.0 # W/m2K%0AU_heat = 2500.0 # W/m2K%0AdT_heat = 5.0 # K - pinch delta at design conditions%0AdT_cool = 2.0 # K - pinch delta at design conditions%0A%0A# Specific heat%0Arho_W = 998.0 # %5Bkg/m%5E3%5D density of Water%0Acp = 4185.0 # %5BJ/kg K%5D%0A%0A%0A# Substation data%0Aroughness = 0.02 / 1000 # roughness coefficient for heating network pipe in m (for a steel pipe, from Li &%0ANetworkDepth = 1 # m%0A%0A# Initial Diameter guess%0AREDUCED_TIME_STEPS = 50 # number of time steps of maximum demand which are evaluated as an initial guess of the edge diameters
656cf2955510151675dfb4acae4e92e21021a6b5
Add the Course of LiaoXueFeng
LiaoXueFeng/function.py
LiaoXueFeng/function.py
Python
0.000001
@@ -0,0 +1,86 @@ +def fact(n):%0A if n==1:%0A return 1%0A return n * fact(n - 1)%0A%0Aprint fact(10)%0A
7a8250e6640c8ebf36cd159607da24b095cf708e
Create Fibonacci.py
Fibonacci.py
Fibonacci.py
Python
0.999733
@@ -0,0 +1,1712 @@ +#Author-Michael Aubry%0A#Description-This script outputs a spiraling fibinacci sequence onto a Fusion 360 sketch%0A%0Aimport adsk.core, adsk.fusion%0A%0Aapp= adsk.core.Application.get()%0Adesign = app.activeProduct%0Aui = app.userInterface;%0A%0A#**User Inputs**%0ASteps = 15 #How many steps of Fibonacci would you like to plot?%0ALength = 2 #How long is the first segment? (cm)%0A%0A#Get root component%0ArootComp = design.rootComponent%0A#Create a new sketch on XY plane%0Asketch = rootComp.sketches.add(rootComp.xYConstructionPlane)%0A%0A# Create an object collection for the points.%0Apoints = adsk.core.ObjectCollection.create()%0A%0A# R = total steps to be run thru the For loop%0AR = Steps - 2%0A%0A#starting x and y coordiantes%0Ax = 0%0Ay = 0%0A%0A#Create 1st coordinate%0Apoints.add(adsk.core.Point3D.create(x,y,0))%0A%0A#starting values for sequence%0Afib = 1%0Afib1 = 1%0A%0A#1st fib number%0A#print str(fib)%0A%0A#Create 2nd coordinate%0Ax = 1 * Length%0Apoints.add(adsk.core.Point3D.create(x,y,0))%0A%0A#bins for shifting x and y coordinates%0ABin1 = range(0,R,4)%0ABin2 = range(1,R,4)%0ABin3 = range(2,R,4)%0ABin4 = range(3,R,4)%0A%0Afor i in range(R):%0A fib2 = fib + fib1%0A fib = fib1%0A fib1 = fib2%0A fibLength = fib*Length #adds the scalar component to coordinates%0A%0A if i in Bin1:%0A x = x%0A y = y + fibLength%0A points.add(adsk.core.Point3D.create(x,y,0))%0A if i in Bin2:%0A x = x - fibLength%0A y = y%0A points.add(adsk.core.Point3D.create(x,y,0))%0A if i in Bin3:%0A x = x%0A y = y - fibLength%0A points.add(adsk.core.Point3D.create(x,y,0))%0A if i in Bin4:%0A x = x + fibLength%0A y = y%0A points.add(adsk.core.Point3D.create(x,y,0))%0A%0A# Create the spline.%0Asketch.sketchCurves.sketchFittedSplines.add(points)%0A
e52b6eed224274ee58bd2284382cf45b4f78864e
Support for django 1.3 Response.templates attribute is added
django_webtest/__init__.py
django_webtest/__init__.py
# -*- coding: utf-8 -*- from django.conf import settings from django.contrib.auth.models import User from django.core import signals from django.test.signals import template_rendered from django.core.handlers.wsgi import WSGIHandler from django.core.servers.basehttp import AdminMediaHandler from django.db import close_connection from django.http import HttpResponseServerError from django.test import TestCase from django.test.client import store_rendered_templates from django.utils.functional import curry from webtest import TestApp, TestRequest class DjangoWsgiFix(object): """Django closes the database connection after every request; this breaks the use of transactions in your tests. This wraps around Django's WSGI interface and will disable the critical signal handler for every request served. Note that we really do need to do this individually a every request, not just once when our WSGI hook is installed, since Django's own test client does the same thing; it would reinstall the signal handler if used in combination with us. From django-test-utils. """ def __init__(self, app): self.app = app def __call__(self, environ, start_response): signals.request_finished.disconnect(close_connection) try: return self.app(environ, start_response) finally: signals.request_finished.connect(close_connection) class DjangoTestApp(TestApp): def __init__(self, extra_environ=None, relative_to=None): super(DjangoTestApp, self).__init__(self.get_wsgi_handler(), extra_environ, relative_to) def get_wsgi_handler(self): return DjangoWsgiFix(AdminMediaHandler(WSGIHandler())) def _update_environ(self, environ, user): if user: environ = environ or {} if isinstance(user, User): environ['REMOTE_USER'] = str(user.username) else: environ['REMOTE_USER'] = user return environ def do_request(self, req, status, expect_errors): # Curry a data dictionary into an instance of the template renderer # callback function. data = {} on_template_render = curry(store_rendered_templates, data) template_rendered.connect(on_template_render) response = super(DjangoTestApp, self).do_request(req, status, expect_errors) # Add any rendered template detail to the response. # If there was only one template rendered (the most likely case), # flatten the list to a single element. for detail in ('template', 'context'): if data.get(detail): if len(data[detail]) == 1: setattr(response, detail, data[detail][0]); else: setattr(response, detail, data[detail]) else: setattr(response, detail, None) return response def get(self, url, params=None, headers=None, extra_environ=None, status=None, expect_errors=False, user=None, auto_follow=False): extra_environ = self._update_environ(extra_environ, user) response = super(DjangoTestApp, self).get( url, params, headers, extra_environ, status, expect_errors) is_redirect = lambda r: r.status_int >= 300 and r.status_int < 400 while auto_follow and is_redirect(response): response = response.follow() return response def post(self, url, params='', headers=None, extra_environ=None, status=None, upload_files=None, expect_errors=False, content_type=None, user=None): extra_environ = self._update_environ(extra_environ, user) return super(DjangoTestApp, self).post( url, params, headers, extra_environ, status, upload_files, expect_errors, content_type) class WebTest(TestCase): extra_environ = {} csrf_checks = True setup_auth = True def _patch_settings(self): ''' Patch settings to add support for REMOTE_USER authorization and (optional) to disable CSRF checks ''' self._DEBUG_PROPAGATE_EXCEPTIONS = settings.DEBUG_PROPAGATE_EXCEPTIONS self._MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES[:] self._AUTHENTICATION_BACKENDS = settings.AUTHENTICATION_BACKENDS[:] settings.MIDDLEWARE_CLASSES = list(settings.MIDDLEWARE_CLASSES) settings.AUTHENTICATION_BACKENDS = list(settings.AUTHENTICATION_BACKENDS) settings.DEBUG_PROPAGATE_EXCEPTIONS = True if not self.csrf_checks: self._disable_csrf_checks() if self.setup_auth: self._setup_auth() def _unpatch_settings(self): ''' Restore settings to before-patching state ''' settings.MIDDLEWARE_CLASSES = self._MIDDLEWARE_CLASSES settings.AUTHENTICATION_BACKENDS = self._AUTHENTICATION_BACKENDS settings.DEBUG_PROPAGATE_EXCEPTIONS = self._DEBUG_PROPAGATE_EXCEPTIONS def _setup_auth(self): ''' Setup REMOTE_USER authorization ''' self._setup_remote_user_middleware() self._setup_remote_user_backend() def _disable_csrf_checks(self): disable_csrf_middleware = 'django_webtest.middleware.DisableCSRFCheckMiddleware' if not disable_csrf_middleware in settings.MIDDLEWARE_CLASSES: settings.MIDDLEWARE_CLASSES = [disable_csrf_middleware] + settings.MIDDLEWARE_CLASSES def _setup_remote_user_middleware(self): remote_user_middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware' if not remote_user_middleware in settings.MIDDLEWARE_CLASSES: settings.MIDDLEWARE_CLASSES += [remote_user_middleware] def _setup_remote_user_backend(self): auth_backends = settings.AUTHENTICATION_BACKENDS try: index = auth_backends.index('django.contrib.auth.backends.ModelBackend') auth_backends[index] = 'django.contrib.auth.backends.RemoteUserBackend' except ValueError: auth_backends.append('django.contrib.auth.backends.RemoteUserBackend') settings.AUTHENTICATION_BACKENDS = auth_backends def __call__(self, result=None): self._patch_settings() self.app = DjangoTestApp(extra_environ=self.extra_environ) res = super(WebTest, self).__call__(result) self._unpatch_settings() return res
Python
0
@@ -2573,44 +2573,27 @@ -for detail in ('template', 'context' +def flattend(detail ):%0A @@ -2610,24 +2610,30 @@ if -data.get(detail) +len(data%5Bdetail%5D) == 1 :%0A @@ -2638,39 +2638,39 @@ -if len( +return data%5Bdetail%5D) == @@ -2657,39 +2657,35 @@ urn data%5Bdetail%5D -) == 1: +%5B0%5D %0A @@ -2685,206 +2685,410 @@ - setattr(response, detail, data%5Bdetail%5D%5B0%5D);%0A else:%0A setattr(response, detail, data%5Bdetail%5D)%0A else:%0A setattr(response, detail, None +return data%5Bdetail%5D%0A%0A response.context = None%0A response.template = None%0A response.templates = data.get('templates', None)%0A%0A if data.get('context'):%0A response.context = flattend('context')%0A%0A if data.get('template'):%0A response.template = flattend('template')%0A elif data.get('templates'):%0A response.template = flattend('templates' )%0A%0A
b777872d1b06714f538dc8fb21b790de822b5a66
Update Example folder
examples/listing_instruments.py
examples/listing_instruments.py
Python
0
@@ -0,0 +1,59 @@ +import visa%0Arm = visa.ResourceManager()%0Arm.list_resources()
735c55d68d4831137255808042684733f93d5c18
add iconv clone
iconv.py
iconv.py
Python
0
@@ -0,0 +1,1156 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport sys%0Aimport locale%0Aimport argparse%0Aimport fileinput%0A%0Apreferredenc = locale.getpreferredencoding()%0A%0Aparser = argparse.ArgumentParser(%0A description=%22Convert encoding of given files from one encoding to another.%22)%0Aparser.add_argument(%0A %22-f%22, %22--from-code%22, metavar='NAME', default=preferredenc,%0A help=%22encoding of original text (locale default: %25s)%22 %25 preferredenc)%0Aparser.add_argument(%0A %22-t%22, %22--to-code%22, metavar='NAME', default=preferredenc,%0A help=%22encoding for output (locale default: %25s)%22 %25 preferredenc)%0Aparser.add_argument(%0A %22-c%22, metavar='errors', nargs='?', default='strict', const='ignore',%0A help=%22set error handling scheme (default: 'strict', omitted: 'ignore')%22)%0Aparser.add_argument(%22-o%22, metavar='FILE', help=%22output file%22)%0Aparser.add_argument(%22FILE%22, nargs='*', help=%22input file%22)%0Aargs = parser.parse_args()%0A%0Aif args.o:%0A wstream = open(args.o, 'wb')%0Aelse:%0A wstream = sys.stdout.buffer%0A%0Awith fileinput.input(args.FILE, mode='rb') as f, wstream:%0A for line in f:%0A wstream.write(%0A line.decode(args.from_code, args.c).encode(args.to_code, args.c))%0A
6a13295ea0e3e763683ec2317502141e4913935b
Make prowl debug output actually go to debug log
flexget/plugins/output/prowl.py
flexget/plugins/output/prowl.py
from __future__ import unicode_literals, division, absolute_import import logging from requests import RequestException from flexget.plugin import register_plugin, priority from flexget.utils.template import RenderError __version__ = 0.1 log = logging.getLogger('prowl') headers = {'User-Agent': 'FlexGet Prowl plugin/%s' % str(__version__)} class OutputProwl(object): """ Send prowl notifications Example:: prowl: apikey: xxxxxxx [application: application name, default FlexGet] [event: event title, default New Release] [priority: -2 - 2 (2 = highest), default 0] [description: notification to send] Configuration parameters are also supported from entries (eg. through set). """ def validator(self): from flexget import validator config = validator.factory('dict') config.accept('text', key='apikey', required=True) config.accept('text', key='application') config.accept('text', key='event') config.accept('integer', key='priority') config.accept('text', key='description') return config def prepare_config(self, config): if isinstance(config, bool): config = {'enabled': config} config.setdefault('apikey', '') config.setdefault('application', 'FlexGet') config.setdefault('event', 'New release') config.setdefault('priority', 0) return config # Run last to make sure other outputs are successful before sending notification @priority(0) def on_task_output(self, task, config): config = self.prepare_config(config) for entry in task.accepted: # get the parameters apikey = entry.get('apikey', config['apikey']) application = entry.get('application', config['application']) event = entry.get('event', config['event']) priority = entry.get('priority', config['priority']) description = config.get('description', entry['title']) # If event has jinja template, render it try: event = entry.render(event) except RenderError as e: log.error('Error rendering jinja event: %s' % e) # If description has jinja template, render it try: description = entry.render(description) except RenderError as e: description = entry['title'] log.error('Error rendering jinja description: %s' % e) url = 'https://prowl.weks.net/publicapi/add' data = {'priority': priority, 'application': application, 'apikey': apikey, 'event': event, 'description': description} if task.manager.options.test: log.info('Would send prowl message about: %s', entry['title']) log.verbose('options: %s' % data) continue try: response = task.requests.post(url, headers=headers, data=data, raise_status=False) except RequestException as e: log.error('Error with request: %s' % e) continue # Check if it succeeded request_status = response.status_code # error codes and messages from http://prowl.weks.net/api.php if request_status == 200: log.debug("Prowl message sent") elif request_status == 400: log.error("Bad request, the parameters you provided did not validate") elif request_status == 401: log.error("Not authorized, the API key given is not valid, and does not correspond to a user.") elif request_status == 406: log.error("Not acceptable, your IP address has exceeded the API limit.") elif request_status == 500: log.error("Internal server error, something failed to execute properly on the Prowl side.") else: log.error("Unknown error when sending Prowl message") register_plugin(OutputProwl, 'prowl', api_ver=2)
Python
0.000001
@@ -2894,15 +2894,13 @@ log. -verbose +debug ('op
72dcd6857f5f895f0fb9325681302f5875bc50ec
Add a new user-defined file
profile_collection/startup/31-capillaries.py
profile_collection/startup/31-capillaries.py
Python
0
@@ -0,0 +1,798 @@ +#6.342 mm apart%0A%0A#6.074%0A%0Adef capillary6_in():%0A mov(diff.xh,12.41)%0A mov(diff.yh,-12.58)%0A%0Adef capillary7_in():%0A mov(diff.xh,6.075) %0A mov(diff.yh,-12.58)%0A%0Adef capillary8_in():%0A mov(diff.xh,-.26695) %0A mov(diff.yh,-12.58)%0A%0Adef capillary9_in():%0A mov(diff.xh,-6.609) %0A mov(diff.yh,-12.58)%0A%0Adef capillary10_in():%0A mov(diff.xh,-12.951) %0A mov(diff.yh,-12.58)%0A%0A%0A'''%0Acommands to move capillaries%0Acapillary6_in()%0Asam = Sample(%22YT-11%22)%0A%0Acapillary7_in()%0Asam = Sample(%22YT-28%22)%0A%0Acapillary8_in()%0Asam = Sample(%22YT-47%22) %0A%0Acapillary9_in()%0Asam = Sample(%22YT-306%22)%0A%0Acapillary10_in()%0Asam = Sample(%22YT-51%22)%0A'''%0A%0A''' Steps for capillary measurements:%0A1. Rename %22Name Pattern%22 to sample name (YT-3 for example)%0A2. type command capillary6_in() (or the number)%0A3. move and measure%0A'''%0A
f1ee6ce108626342b42a2d2a7b5aa4779af87e6c
Add python code to plot the histogram
plot-histogram.py
plot-histogram.py
Python
0.00008
@@ -0,0 +1,203 @@ +import matplotlib.pyplot as plt%0Aimport sys%0A%0Aif __name__ == %22__main__%22:%0A with open(sys.argv%5B1%5D) as f:%0A data = map(float, f.readlines())%0A plt.hist(list(data), 100)%0A plt.show()%0A %0A
c9f64c0e61fb08c43b1c8cb93ec6f9c389b9c31c
delete finished pods from cluster
XcScripts/deletePods.py
XcScripts/deletePods.py
Python
0
@@ -0,0 +1,1148 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport os%0Aimport sys%0Aimport json%0Aimport shutil%0Aimport subprocess%0Aimport time%0A%0Adef ReadPodsToBeDeleted(fname):%0A %22%22%22%0A self explanatory%0A %22%22%22%0A%0A listPods = %5B%5D%0A%0A with open(fname,'r') as f:%0A for line in f:%0A listPods.append(line.rstrip('%5Cn'))%0A%0A return listPods%0A%0Adef main(pods_fname):%0A %22%22%22%0A This method takes list of pods and delte them all,%0A one by one%0A %22%22%22%0A %0A pods = ReadPodsToBeDeleted(pods_fname)%0A%0A print(%22To remove PODs: %7B0%7D%22.format(len(pods)))%0A %0A for pod in pods:%0A cmd = %22kubectl delete pod %22 + pod%0A rc = 0%0A for k in range(0, 12): # several attempts to make a pod%0A rc = subprocess.call(cmd, shell=True)%0A if rc == 0:%0A break%0A%0A if rc != 0:%0A print(%22Cannot delete pod %7B0%7D%22.format(pod))%0A sys.exit(1)%0A%0Aif __name__ =='__main__':%0A nof_args = len(sys.argv)%0A%0A if nof_args == 1:%0A print(%22Use:deletePods list_of_PODs%22)%0A sys.exit(1)%0A%0A pods_fname = %22%22%0A if nof_args %3E= 2:%0A pods_fname = sys.argv%5B1%5D%0A%0A main(pods_fname)%0A%0A sys.exit(0)%0A%0A
93c828b7c94004321a3801c2b53ba692532d1c79
Update TwitterSearchSensor to retrieve and store last_id in the datastore.
packs/twitter/sensors/twitter_search_sensor.py
packs/twitter/sensors/twitter_search_sensor.py
from TwitterSearch import TwitterSearch from TwitterSearch import TwitterSearchOrder from st2reactor.sensor.base import PollingSensor BASE_URL = 'https://twitter.com' class TwitterSearchSensor(PollingSensor): def __init__(self, sensor_service, config=None, poll_interval=None): super(TwitterSearchSensor, self).__init__(sensor_service=sensor_service, config=config, poll_interval=poll_interval) self._trigger_ref = 'twitter.matched_tweet' self._logger = self._sensor_service.get_logger(__name__) def setup(self): self._client = TwitterSearch( consumer_key=self._config['consumer_key'], consumer_secret=self._config['consumer_secret'], access_token=self._config['access_token'], access_token_secret=self._config['access_token_secret'] ) self._last_id = None def poll(self): tso = TwitterSearchOrder() tso.set_keywords([self._config['query']]) language = self._config.get('language', None) if language: tso.set_language(language) tso.set_result_type('recent') tso.set_count(self._config.get('count', 30)) tso.set_include_entities(False) if self._last_id: tso.set_since_id(self._last_id) try: tweets = self._client.search_tweets(tso) tweets = tweets['content']['statuses'] except Exception as e: self._logger.exception('Polling Twitter failed: %s' % (str(e))) return tweets = list(reversed(tweets)) if tweets: self._last_id = tweets[-1]['id'] for tweet in tweets: self._dispatch_trigger_for_tweet(tweet=tweet) def cleanup(self): # TODO: Persist state (id) so we avoid duplicate events pass def add_trigger(self, trigger): pass def update_trigger(self, trigger): pass def remove_trigger(self, trigger): pass def _dispatch_trigger_for_tweet(self, tweet): trigger = self._trigger_ref url = '%s/%s/status/%s' % (BASE_URL, tweet['user']['screen_name'], tweet['id']) payload = { 'id': tweet['id'], 'created_at': tweet['created_at'], 'lang': tweet['lang'], 'place': tweet['place'], 'retweet_count': tweet['retweet_count'], 'favorite_count': tweet['favorite_count'], 'user': { 'screen_name': tweet['user']['screen_name'], 'name': tweet['user']['name'], 'location': tweet['user']['location'], 'description': tweet['user']['description'], }, 'text': tweet['text'], 'url': url } self._sensor_service.dispatch(trigger=trigger, payload=payload)
Python
0
@@ -129,16 +129,57 @@ Sensor%0A%0A +__all__ = %5B%0A 'TwitterSearchSensor'%0A%5D%0A%0A BASE_URL @@ -1366,17 +1366,50 @@ -if self._ +last_id = self._get_last_id()%0A%0A if last @@ -1446,22 +1446,20 @@ _id( -self._ +int( last_id) %0A%0A @@ -1454,16 +1454,17 @@ last_id) +) %0A%0A @@ -1779,26 +1779,36 @@ self._ +set_ last_id - = +(last_id= tweets%5B- @@ -1815,16 +1815,17 @@ 1%5D%5B'id'%5D +) %0A%0A @@ -1933,72 +1933,8 @@ f):%0A - # TODO: Persist state (id) so we avoid duplicate events%0A @@ -2077,32 +2077,32 @@ self, trigger):%0A - pass%0A%0A @@ -2091,32 +2091,443 @@ :%0A pass%0A%0A + def _get_last_id(self):%0A if not self._last_id and hasattr(self._sensor_service, 'get_value'):%0A self._last_id = self._sensor_service.get_value(name='last_id')%0A%0A return self._last_id%0A%0A def _set_last_id(self, last_id):%0A self._last_id = last_id%0A%0A if hasattr(self._sensor_service, 'set_value'):%0A self._sensor_service.set_value(name='last_id', value=last_id)%0A%0A def _dispatc
7da94fd5576f4c052e79a8068164c101054d5ae7
Add Python / `requests` example
python/simple.py
python/simple.py
Python
0.000024
@@ -0,0 +1,612 @@ +import requests # http://python-requests.org/%0A%0A# Premium user authentication process and API access example%0Ar = requests.post('https://api.masterleague.net/auth/token/', data=%7B'username': 'user', 'password': '12345'%7D)%0A%0Aif 'token' not in r.json():%0A print(r.text)%0A raise ValueError(%22Unable to extract authentication token!%22)%0A%0Atoken = r.json()%5B'token'%5D%0A%0As = requests.Session()%0As.headers.update(%7B'Authorization': 'Token ' + token%7D)%0A%0Ar = s.get('https://api.masterleague.net/heroes.json')%0Aprint(r.text)%0A%0A# Anonymous user access example%0Ar = requests.get('https://api.masterleague.net/heroes.json')%0Aprint(r.text)%0A
b4f2c7b8bde0d28f7d1b61718eb7cd0b9159f507
add __version__
epistasis/__version__.py
epistasis/__version__.py
Python
0.000984
@@ -0,0 +1,22 @@ +__version__ = %220.6.4%22%0A
9498ac9ec27bbef1725b92e84a3b0d4c9e967aa6
add ex14
lpthw/ex14.py
lpthw/ex14.py
Python
0.99846
@@ -0,0 +1,590 @@ +#!/usr/bin/env python%0A%0A# Exercise 14: Prompting and Passing%0A%0Afrom sys import argv%0A%0Ascript, user_name = argv%0Aprompt = '%3E '%0A%0Aprint %22Hi %25s, I'm the %25s script.%22 %25 (user_name, script)%0Aprint %22I'd like to ask you a few questions.%22%0Aprint %22Do you like me %25s?%22 %25 user_name%0Alikes = raw_input(prompt)%0A%0Aprint %22Where do you live %25s?%22 %25 user_name%0Alives = raw_input(prompt)%0A%0Aprint %22What kind of computer do you have?%22%0Acomputer = raw_input(prompt)%0A%0Aprint %22%22%22%0AAlright, so you said %25r about liking me.%0AYou live in %25r. Not sure where that is.%0AAnd you have a %25r computer. Nice.%0A%22%22%22 %25 (likes, lives, computer)%0A
1c8fd79c783ba6f21140b4c08bbf648bf5989dd4
Add main module
core/hybra.py
core/hybra.py
Python
0.000001
@@ -0,0 +1,1579 @@ +import data_loader%0Aimport descriptives%0Aimport network%0Aimport timeline%0Aimport wordclouds%0A%0A%0Adef load_data( terms = %5B%5D, data_folder = '' ):%0A if data_folder == '':%0A return load_all_data( terms )%0A else:%0A if '/' not in data_folder:%0A data_folder += '/'%0A loader = data_folder.split( '/' )%5B0%5D%0A return load_data_from_folder( terms, loader, data_folder )%0A%0A%0Adef load_all_data( terms ):%0A data = %7B%7D%0A%0A for function_name in dir( data_loader ):%0A if 'load_' in function_name:%0A if len( terms ) == 0:%0A f = getattr( data_loader, function_name )%0A else:%0A f = getattr( data_loader, function_name )( *terms )%0A data%5Bfunction_name%5D = f()%0A%0A return data%0A%0Adef load_data_from_folder( terms, loader, data_folder ):%0A data = %5B%5D%0A%0A for function_name in dir( data_loader ):%0A if loader in function_name:%0A if len( terms ) == 0:%0A data += getattr( data_loader, function_name )( data_folder = data_folder )%0A else:%0A data += getattr( data_loader, function_name)( terms, data_folder )%0A%0A return data%0A%0Adef describe( data ):%0A if isinstance( data, dict ):%0A for loader in data:%0A print loader%0A descriptives.describe( data%5Bloader%5D )%0A print '%5Cn'%0A else:%0A descriptives.describe( data )%0A%0Adef create_timeline( data ):%0A timeline.create_timeline( data )%0A%0Adef create_network( data ):%0A network.create_network( data )%0A%0Adef create_wordcloud( data ):%0A wordclouds.create_wordcloud( data )%0A
0ec0398f8e50ed0adca426f9c468fd5154603941
add mmd matrix example
open_spiel/python/examples/mmd_matrix_example.py
open_spiel/python/examples/mmd_matrix_example.py
Python
0
@@ -0,0 +1,1363 @@ +%22%22%22 Example of using MMD with dilated entropy%0A to solve for QRE in a Matrix Game %22%22%22%0A%0Afrom absl import app%0Afrom absl import flags%0A%0Afrom open_spiel.python.algorithms import mmd_dilated%0Aimport pyspiel%0A%0AFLAGS = flags.FLAGS%0A%0Aflags.DEFINE_integer(%22iterations%22, 1000, %22Number of iterations%22)%0Aflags.DEFINE_float(%22alpha%22, 0.1, %22QRE parameter, larger value amounts to more regularization%22)%0Aflags.DEFINE_integer(%22print_freq%22, 100, %22How often to print the gap%22)%0A%0A# create pyspiel perturbed RPS matrix game%0A%0Agame = pyspiel.create_matrix_game(%5B%5B0, -1, 3%5D,%0A %5B1, 0, -3%5D,%0A %5B-3, 3, 0%5D%5D,%0A %5B%5B0, 1, -3%5D,%0A %5B-1, 0, 3%5D,%0A %5B3, -3, 0%5D%5D)%0A%0Agame = pyspiel.convert_to_turn_based(game)%0A%0Adef main(_):%0A mmd = mmd_dilated.MMDDilatedEnt(game, FLAGS.alpha)%0A for i in range(FLAGS.iterations):%0A mmd.update_sequences()%0A if i %25 FLAGS.print_freq == 0:%0A conv = mmd.get_gap()%0A print(%22Iteration %7B%7D gap %7B%7D%22.format(i, conv))%0A%0A # Extract policies for both players%0A print(mmd.get_policies().action_probability_array)%0A # Note the sequence form and behavioural-form coincide%0A # for a normal-form game (sequence form has extra root value of 1)%0A print(mmd.current_sequences())%0A%0Aif __name__ == %22__main__%22:%0A app.run(main)
3425d265c32d33c189710bcffd1d0df62ce27b3a
update model
model.py
model.py
class User(dict): """ Every user must have keys for a username, name, passphrase (this is a md5 hash of the password), groups, and an email address. They can be blank or None, but the keys must exist. """ def __init__(self, dict=None): for key in ['username', 'name', 'passphrase', 'email']: self[key] = '' for key in ['groups']: self[key] = [] if dict: for key in dict: self[key] = dict[key] def __getattr__(self, attr): return None
Python
0.000001
@@ -97,11 +97,14 @@ s a -md5 +bcrypt has @@ -122,16 +122,22 @@ ssword), + salt, groups, @@ -158,16 +158,19 @@ address. +%0A They c @@ -174,20 +174,16 @@ y can be -%0A blank o @@ -307,16 +307,24 @@ phrase', + 'salt', 'email'
b4eb3a55be9e753496c5fd12a89ef85d6a904c09
Annotate zerver/management/commands/realm_emoji.py.
zerver/management/commands/realm_emoji.py
zerver/management/commands/realm_emoji.py
from __future__ import absolute_import from __future__ import print_function from argparse import RawTextHelpFormatter from typing import Any from argparse import ArgumentParser from django.core.management.base import BaseCommand from zerver.models import Realm, get_realm from zerver.lib.actions import check_add_realm_emoji, do_remove_realm_emoji import sys import six class Command(BaseCommand): help = """Manage emoji for the specified realm Example: python manage.py realm_emoji --realm=zulip.com --op=add robotheart \\ https://humbug-user-avatars.s3.amazonaws.com/95ffa70fe0e7aea3c052ba91b38a28d8779f5705 Example: python manage.py realm_emoji --realm=zulip.com --op=remove robotheart Example: python manage.py realm_emoji --realm=zulip.com --op=show """ # Fix support for multi-line usage def create_parser(self, *args, **kwargs): parser = super(Command, self).create_parser(*args, **kwargs) parser.formatter_class = RawTextHelpFormatter return parser def add_arguments(self, parser): # type: (ArgumentParser) -> None parser.add_argument('-r', '--realm', dest='domain', type=str, required=True, help='The name of the realm.') parser.add_argument('--op', dest='op', type=str, default="show", help='What operation to do (add, show, remove).') parser.add_argument('name', metavar='<name>', type=str, nargs='?', default=None, help="name of the emoji") parser.add_argument('img_url', metavar='<image url>', type=str, nargs='?', help="URL of image to display for the emoji") def handle(self, *args, **options): # type: (*Any, **str) -> None realm = get_realm(options["domain"]) if options["op"] == "show": for name, url in six.iteritems(realm.get_emoji()): print(name, url) sys.exit(0) name = options['name'] if name is None: self.print_help("python manage.py", "realm_emoji") sys.exit(1) if options["op"] == "add": img_url = options['img_url'] if img_url is None: self.print_help("python manage.py", "realm_emoji") sys.exit(1) check_add_realm_emoji(realm, name, img_url) sys.exit(0) elif options["op"] == "remove": do_remove_realm_emoji(realm, name) sys.exit(0) else: self.print_help("python manage.py", "realm_emoji") sys.exit(1)
Python
0
@@ -224,16 +224,31 @@ eCommand +, CommandParser %0Afrom ze @@ -862,24 +862,71 @@ **kwargs):%0A + # type: (*Any, **Any) -%3E CommandParser%0A pars
2a33ad8317b1642f4ff30b7466b83bbaf85299df
Remove unused var
resources/site-packages/quasar/navigation.py
resources/site-packages/quasar/navigation.py
import os import sys import urllib2 import xbmc import xbmcgui import xbmcplugin import socket from quasar.config import QUASARD_HOST from quasar.addon import ADDON, ADDON_ID from quasar.util import notify, GetLocalizedString try: import simplejson as json except ImportError: import json HANDLE = int(sys.argv[1]) class closing(object): def __init__(self, thing): self.thing = thing def __enter__(self): return self.thing def __exit__(self, *exc_info): self.thing.close() class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, req, fp, code, msg, headers): import urllib infourl = urllib.addinfourl(fp, headers, headers["Location"]) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 def _json(url): with closing(urllib2.urlopen(url)) as response: if response.code >= 300 and response.code <= 307: item = xbmcgui.ListItem( path=response.geturl(), label=xbmc.getInfoLabel("ListItem.Label"), label2=xbmc.getInfoLabel("ListItem.label2"), thumbnailImage=xbmc.getInfoLabel("ListItem.Art(thumb)")) _infoLabels = { "Title": xbmc.getInfoLabel("ListItem.Title"), "OriginalTitle": xbmc.getInfoLabel("ListItem.OriginalTitle"), "TVShowTitle": xbmc.getInfoLabel("ListItem.TVShowTitle"), "Season": xbmc.getInfoLabel("ListItem.Season"), "Episode": xbmc.getInfoLabel("ListItem.Episode"), # "Date": xbmc.getInfoLabel("ListItem.Date"), "VideoCodec": xbmc.getInfoLabel("ListItem.VideoCodec"), "VideoResolution": xbmc.getInfoLabel("ListItem.VideoResolution"), "VideoAspect": xbmc.getInfoLabel("ListItem.VideoAspect"), "DBID": xbmc.getInfoLabel("ListItem.DBID"), "DBTYPE": xbmc.getInfoLabel("ListItem.DBTYPE"), } infoLabels = {} for key, value in _infoLabels.iteritems(): if value: infoLabels[key] = value item.setInfo(type='Video', infoLabels=infoLabels) xbmcplugin.setResolvedUrl(HANDLE, True, item) return payload = response.read() if payload: return json.loads(payload) def run(url_suffix=""): if not os.path.exists(os.path.join(xbmc.translatePath(ADDON.getAddonInfo("path")), ".firstrun")): notify(ADDON.getLocalizedString(30101).encode('utf-8')) return socket.setdefaulttimeout(300) urllib2.install_opener(urllib2.build_opener(NoRedirectHandler())) url = sys.argv[0].replace("plugin://%s" % ADDON_ID, QUASARD_HOST + url_suffix) + sys.argv[2] xbmc.log(url) try: data = _json(url) except urllib2.HTTPError, e: return if not data: return if data["content_type"]: xbmcplugin.addSortMethod(HANDLE, xbmcplugin.SORT_METHOD_UNSORTED) xbmcplugin.addSortMethod(HANDLE, xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE) xbmcplugin.addSortMethod(HANDLE, xbmcplugin.SORT_METHOD_DATE) xbmcplugin.addSortMethod(HANDLE, xbmcplugin.SORT_METHOD_GENRE) xbmcplugin.setContent(HANDLE, data["content_type"]) listitems = range(len(data["items"])) for i, item in enumerate(data["items"]): # Translate labels if item["label"][0:8] == "LOCALIZE": item["label"] = GetLocalizedString(item["label"]) if item["label2"][0:8] == "LOCALIZE": item["label2"] = GetLocalizedString(item["label2"]) listItem = xbmcgui.ListItem(label=item["label"], label2=item["label2"], iconImage=item["icon"], thumbnailImage=item["thumbnail"]) if item.get("info"): listItem.setInfo("video", item["info"]) if item.get("stream_info"): for type_, values in item["stream_info"].items(): listItem.addStreamInfo(type_, values) if item.get("art"): listItem.setArt(item["art"]) if item.get("context_menu"): # Translate context menus for m, menu in enumerate(item["context_menu"]): if menu[0][0:8] == "LOCALIZE": menu[0] = GetLocalizedString(menu[0]) listItem.addContextMenuItems(item["context_menu"]) listItem.setProperty("isPlayable", item["is_playable"] and "true" or "false") if item.get("properties"): for k, v in item["properties"].items(): listItem.setProperty(k, v) listitems[i] = (item["path"], listItem, not item["is_playable"]) xbmcplugin.addDirectoryItems(HANDLE, listitems, totalItems=len(listitems)) xbmcplugin.endOfDirectory(HANDLE, succeeded=True, updateListing=False, cacheToDisc=True)
Python
0.000001
@@ -3031,11 +3031,8 @@ rror -, e :%0A
f95d7011ff89badfadbd07da0226f67f6dbd27a5
Remove unused `organizations:new-tracebacks` flag. (#4083)
src/sentry/features/__init__.py
src/sentry/features/__init__.py
from __future__ import absolute_import from .base import * # NOQA from .handler import * # NOQA from .manager import * # NOQA default_manager = FeatureManager() # NOQA default_manager.add('auth:register') default_manager.add('organizations:api-keys', OrganizationFeature) # NOQA default_manager.add('organizations:create') default_manager.add('organizations:sso', OrganizationFeature) # NOQA default_manager.add('organizations:onboarding', OrganizationFeature) # NOQA default_manager.add('organizations:callsigns', OrganizationFeature) # NOQA default_manager.add('organizations:new-tracebacks', OrganizationFeature) # NOQA default_manager.add('organizations:reports:prepare', OrganizationFeature) # NOQA default_manager.add('organizations:reports:deliver', OrganizationFeature) # NOQA default_manager.add('projects:global-events', ProjectFeature) # NOQA default_manager.add('projects:quotas', ProjectFeature) # NOQA default_manager.add('projects:plugins', ProjectPluginFeature) # NOQA # expose public api add = default_manager.add get = default_manager.get has = default_manager.has
Python
0
@@ -551,89 +551,8 @@ OQA%0A -default_manager.add('organizations:new-tracebacks', OrganizationFeature) # NOQA%0A defa
bbfcddbb21a6b6f40fafe8c88ca76ab4a0b4667b
add script to analysis the flow map
FlowNet/flowAnalysis.py
FlowNet/flowAnalysis.py
Python
0
@@ -0,0 +1,2716 @@ +# When the movement of the objects in the video is not distinct to be%0A# captured by optical flow algorithm, training this %22noisy%22 flow map%0A# against the ground truth labeling is risky. In this code, we would%0A# like to iterate through all the generated flow videos, and filter%0A# out the noisy flow map.%0A#%0A#%0A# Contact: Chih-Yao Ma at [email protected]%0A# Last update: 05/17/2016%0A%0Aimport time%0Aimport numpy as np%0Aimport cv2%0Aimport matplotlib.pyplot as plt%0A%0A# cap = cv2.VideoCapture('v_HandStandPushups_g01_c04_flow.avi')%0Acap = cv2.VideoCapture('v_HandStandPushups_g12_c06_flow.avi')%0A%0A%0A# information of the video%0A# property identifier:%0A# 1: ?; 2: s/frame; 3: width; 4: height; 6: ?; 7: ?%0AFr = round(1 / cap.get(2))%0AWd = int(cap.get(3))%0AHt = int(cap.get(4))%0A%0A# Define the codec and create VideoWriter object%0A# fourcc = cv2.cv.CV_FOURCC('X', 'V', 'I', 'D') # opencv 2.4%0Afourcc = cv2.VideoWriter_fourcc(*'XVID') # opencv 3.0%0Aout = cv2.VideoWriter('out_flow.avi', fourcc, Fr, (Wd, Ht))%0A%0AindFrame = 1%0A%0Adef close_event():%0A plt.close() #timer calls this function after 3 seconds and closes the window %0A%0A%0Awhile(cap.isOpened):%0A # Capture frame-by-frame%0A ret, frame = cap.read()%0A%0A if ret == True:%0A%0A print('--------------------------------------')%0A print('Frame # ', indFrame)%0A%0A # convert back to HSV%0A hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)%0A # extract the channels and flat them%0A channel_0 = hsv%5B..., 0%5D.flatten()%0A channel_1 = hsv%5B..., 1%5D.flatten()%0A channel_2 = hsv%5B..., 2%5D.flatten()%0A%0A # out.write(frame)%0A # Display the resulting frame%0A cv2.imshow('Processed frame', frame)%0A%0A # plot histogram for each channel %0A fig, (ax0, ax1, ax2) = plt.subplots(ncols=3, figsize=(12, 4))%0A %0A ax0.hist(channel_0, 20, normed=1, histtype='bar', facecolor='r', alpha=0.75)%0A ax0.set_title('Channel #0')%0A ax1.hist(channel_1, 20, normed=1, histtype='bar', facecolor='g', alpha=0.75)%0A ax1.set_title('Channel #1')%0A ax2.hist(channel_2, 20, normed=1, histtype='bar', facecolor='b', alpha=0.75)%0A ax2.set_title('Channel #2')%0A%0A # plot the figure for a short time%0A plt.tight_layout()%0A%0A timer = fig.canvas.new_timer(interval = 4000) #creating a timer object and setting an interval of 3000 milliseconds%0A timer.add_callback(close_event)%0A timer.start()%0A plt.show()%0A # fname = 'histogramFrame_' + str(indFrame)%0A # plt.savefig(fname)%0A%0A if cv2.waitKey(1) & 0xFF == ord('q'):%0A break %0A%0A else:%0A break%0A indFrame = indFrame + 1%0A%0A# When everything done, release the capture%0Acap.release()%0Aout.release()%0Acv2.destroyAllWindows()%0A
629ccdc27d2eb3522def903cc42606e43c3f816b
Add script to write network related files
AdaptivePELE/analysis/writeNetworkFiles.py
AdaptivePELE/analysis/writeNetworkFiles.py
Python
0
@@ -0,0 +1,2832 @@ +import os%0Aimport sys%0Aimport argparse%0Afrom AdaptivePELE.utilities import utilities%0Aimport matplotlib.pyplot as plt%0A%0A%0Adef parseArguments():%0A desc = %22Write the information related to the conformation network to file%5Cn%22%0A parser = argparse.ArgumentParser(description=desc)%0A parser.add_argument(%22clusteringObject%22, type=str, help=%22Path to the clustering object%22)%0A parser.add_argument(%22suffix%22, type=str, help=%22Suffix to append to file names%22)%0A parser.add_argument(%22metricCol%22, type=int, help=%22Column of the metric of interest%22)%0A parser.add_argument(%22-o%22, type=str, default=None, help=%22Output path where to write the files%22)%0A args = parser.parse_args()%0A return args.clusteringObject, args.suffix, args.metricCol, args.o%0A%0A%0Aif __name__ == %22__main__%22:%0A clusteringObject, suffix, metricCol, outputPath = parseArguments()%0A if outputPath is not None:%0A outputPath = os.path.join(outputPath, %22%22)%0A else:%0A outputPath = %22%22%0A if not os.path.exists(outputPath):%0A os.makedirs(outputPath)%0A sys.stderr.write(%22Reading clustering object...%5Cn%22)%0A cl = utilities.readClusteringObject(clusteringObject)%0A optimalCluster = cl.getOptimalMetric()%0A pathway = cl.createPathwayToCluster(optimalCluster)%0A sys.stderr.write(%22Writing conformation network...%5Cn%22)%0A cl.writeConformationNetwork(outputPath+%22conformationNetwork%25s.edgelist%22 %25 suffix)%0A sys.stderr.write(%22Writing FDT...%5Cn%22)%0A cl.writeFDT(outputPath+%22FDT%25s.edgelist%22 %25 suffix)%0A sys.stderr.write(%22Writing pathway to optimal cluster...%5Cn%22)%0A # cl.writePathwayOptimalCluster(outputPath+%22pathwayFDT%25s.pdb%22 %25 suffix)%0A cl.writePathwayTrajectory(pathway, outputPath+%22pathwayFDT%25s.pdb%22 %25 suffix)%0A sys.stderr.write(%22Writing nodes population...%5Cn%22)%0A cl.writeConformationNodePopulation(outputPath+%22nodesPopulation%25s.txt%22 %25 suffix)%0A sys.stderr.write(%22Writing nodes metrics...%5Cn%22)%0A cl.writeConformationNodeMetric(outputPath+%22nodesMetric%25s.txt%22 %25 suffix, metricCol)%0A sys.stderr.write(%22Writing metastability indeces...%5Cn%22)%0A metInd = cl.calculateMetastabilityIndex()%0A cl.writeMetastabilityIndex(outputPath+%22nodesMetIndex%25s.txt%22 %25 suffix)%0A plt.figure()%0A plt.plot(pathway, %5Bcl.clusters.clusters%5Bi%5D.getMetricFromColumn(5) for i in pathway%5D)%0A plt.xlabel(%22Cluster number%22)%0A plt.ylabel(%22Binding energy(kcal/mol)%22)%0A plt.savefig(outputPath+%22bindingEnergy_%25s.png%22 %25 suffix)%0A plt.figure()%0A plt.plot(pathway, %5Bcl.clusters.clusters%5Bi%5D.getMetricFromColumn(3) for i in pathway%5D)%0A plt.xlabel(%22Cluster number%22)%0A plt.ylabel(%22Energy(kcal/mol)%22)%0A plt.savefig(outputPath+%22totalEnergy_%25s.png%22 %25 suffix)%0A plt.figure()%0A plt.plot(pathway, %5BmetInd%5Bi%5D for i in pathway%5D)%0A plt.xlabel(%22Cluster number%22)%0A plt.ylabel(%22Metastability index%22)%0A plt.savefig(outputPath+%22metIndex_%25s.png%22 %25 suffix)%0A plt.show()%0A
daa4565abe4059e8588ddf374fde0f51d9ec784e
Create a skeleton for node propagation integration tests
test/integration/test_node_propagation.py
test/integration/test_node_propagation.py
Python
0.000001
@@ -0,0 +1,366 @@ +class TestPropagation(object):%0A def test_node_propagation(self):%0A %22%22%22%0A Tests that check node propagation%0A%0A 1) Spin up four servers.%0A 2) Make the first one send a sync request to all three others.%0A 3) Count the numbers of requests made.%0A 4) Check databases to see that they all know each other.%0A%0A %22%22%22%0A%0A pass%0A
5b9b27d98cad06f0bbd67026b6533dee7c218df7
update series server code shifted from custom script to py file
setup/doctype/update_series/update_series.py
setup/doctype/update_series/update_series.py
Python
0
@@ -0,0 +1,2318 @@ +# Please edit this list and import only required elements%0Aimport webnotes%0A%0Afrom webnotes.utils import add_days, add_months, add_years, cint, cstr, date_diff, default_fields, flt, fmt_money, formatdate, generate_hash, getTraceback, get_defaults, get_first_day, get_last_day, getdate, has_common, month_name, now, nowdate, replace_newlines, sendmail, set_default, str_esc_quote, user_format, validate_email_add%0Afrom webnotes.model import db_exists%0Afrom webnotes.model.doc import Document, addchild, removechild, getchildren, make_autoname, SuperDocType%0Afrom webnotes.model.doclist import getlist, copy_doclist%0Afrom webnotes.model.code import get_obj, get_server_obj, run_server_obj, updatedb, check_syntax%0Afrom webnotes import session, form, is_testing, msgprint, errprint%0A%0Aset = webnotes.conn.set%0Asql = webnotes.conn.sql%0Aget_value = webnotes.conn.get_value%0Ain_transaction = webnotes.conn.in_transaction%0Aconvert_to_lists = webnotes.conn.convert_to_lists%0A%0A# -----------------------------------------------------------------------------------------%0A%0Aclass DocType:%0A def __init__(self, doc, doclist=%5B%5D):%0A self.doc = doc%0A self.doclist = doclist%0A%0A def update_series(self):%0A series = sql(%22select name,current from %60tabSeries%60 where name = %25s%22, self.doc.prefix,as_dict = 1)%0A if series:%0A msgprint(%22This is going to update Series with Prefix : %22 + series%5B0%5D%5B'name'%5D + %22 from Current : %22 + cstr(series%5B0%5D%5B'current'%5D) + %22 to Current : %22+ cstr(self.doc.current))%0A sql(%22update %60tabSeries%60 set current = '%25s' where name = '%25s'%22 %25 (self.doc.current,series%5B0%5D%5B'name'%5D))%0A msgprint(%22Series Updated Successfully%22)%0A else:%0A msgprint(%22Please Check Prefix as there is no such Prefix : %22+ self.doc.prefix +%22 Or Try Insert Button%22)%0A%0A def insert_series(self):%0A #sql(%22start transaction%22)%0A series = sql(%22select name,current from %60tabSeries%60 where name = %25s%22, self.doc.prefix, as_dict = 1)%0A if series:%0A msgprint(%22Series with Prefix : %22 + series%5B0%5D%5B'name'%5D + %22already in the system . Try Update Button%22)%0A else:%0A msgprint(%22This is going to Insert Series with Prefix : %22 + cstr(self.doc.prefix) + %22 Current: %22 + cstr(self.doc.current))%0A sql(%22insert into %60tabSeries%60 (name,current) values ('%25s','%25s')%22 %25 (self.doc.prefix, self.doc.current))%0A msgprint(%22Series Inserted Successfully%22)%0A
4eab434002c99daf9c302cb1007e7ec384453aae
Fix cherrypy example
examples/cherrypysample.py
examples/cherrypysample.py
Python
0.000045
@@ -0,0 +1,197 @@ +#! /usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# vim:fenc=utf-8%0A%0Aimport bottle%0A%0A%[email protected]('/')%0Adef index():%0A return %7B'key': 'value'%7D%0A%0Abottle.run(port=8080, host=%220.0.0.0%22, server=%22cherrypy%22)%0A
93e07841d961fb7956612339f13dfd4e8ddd8bac
Create RPi_Final.py
RPi_Final.py
RPi_Final.py
Python
0.000001
@@ -0,0 +1,21 @@ +from random import *%0A
2eba3f5072b547829964eac9d2d5b03076a49faf
add firmwareupdate sample
examples/firmwareupdate.py
examples/firmwareupdate.py
Python
0
@@ -0,0 +1,266 @@ +from sakuraio.hardware.rpi import SakuraIOGPIO%0A#from sakuraio.hardware.rpi import SakuraIOSMBus%0Aimport time%0A%0Asakuraio = SakuraIOGPIO()%0A#sakuraio = SakuraIOSMBus()%0A%0Asakuraio.unlock()%0Atime.sleep(1)%0Asakuraio.update_firmware()%0A%0A#print(sakuraio.get_firmware_version())%0A%0A%0A
16c57e5f3bd63667c7ca0b828e1f0fcd85d64b76
Create SecureMSG.py
SecureMSG.py
SecureMSG.py
Python
0.000001
@@ -0,0 +1,853 @@ +#!/usr/python%0A#%0A# I dedicate this application for my best friend, Robert Niemiec :)%0A#%0A# Copyright (c) 2015 Dawid Wiktor%0A# This app is writed for all whistleblowers, journalists and %0A# cryptoanarchists. Use it when you need. Be carefull! NSA watchin'%0A# %0A# This is the Open Source Software. You can freely use it, edit code, and %0A# ditribute. But you should respect Attribution.%0A%0Adef encryption():%0A%09key = input(%22Please, input a number here to be used as the key.%5Cn%22)%0A%09key = int(key)%0A%09dummy = 0%0A%09rawData = input(%22Enter string here.%5Cn%22)%0A%09rawlist = list(rawData)%0A%09data = rawlist%5B0 + dummy%5D%0A%09number = len(rawlist)%0A%0A%09while dummy != number:%0A%09%09data = ord(data)%0A%09%09data = data + key%0A%09%09print(data)%0A%09%09dummy = dummy + 1%0A%09%09data = rawlist%5B0 + dummy%5D%0A%0Arun = %22y%22%0Awhile run == %22y%22:%0A%09encryption()%0A%09run = input(%22Do you want to encrypt this? (y/n)%5Cn%22)%0A%0Aif run !=%22y%22:%0A%09exit()%0A
522fb2e4b9fdf46abed3b5ca8ba43758b22253a1
add missing file
addons/web/ir_module.py
addons/web/ir_module.py
Python
0.000001
@@ -0,0 +1,485 @@ +from openerp.osv import osv%0Aimport openerp.wsgi.core as oewsgi%0A%0Afrom common.http import Root%0A%0Aclass ir_module(osv.Model):%0A _inherit = 'ir.module.module'%0A%0A def update_list(self, cr, uid, context=None):%0A result = super(ir_module, self).update_list(cr, uid, context=context)%0A%0A if tuple(result) != (0, 0):%0A for handler in oewsgi.module_handlers:%0A if isinstance(handler, Root):%0A handler._load_addons()%0A%0A return result%0A
380a87e71c347eab5d9c5d22a255753e62e1d739
Add the original game code to the files to show progress made during the week using classes and other skills
Original_python_game.py
Original_python_game.py
Python
0
@@ -0,0 +1,1656 @@ +import random%0A%0AGuessesTaken = 0%0A%0Aprint (%22Hello and welcome to my higher or lower number guessing game.%22)%0Aprint (%22Whats your name?%22)%0AmyName = input()%0A%0Anumber = random.randint(1, 20)%0Anumber1 = random.randint(1, 20)%0Anumber2 = random.randint(1, 20)%0Anumber3 = random.randint(1, 20)%0Anumber4 = random.randint(1, 20)%0Anumber5 = random.randint(1, 20)%0Anumber6 = random.randint(1, 20)%0Anumber7 = random.randint(1, 20)%0Anumber8 = random.randint(1, 20)%0Anumber9 = random.randint(1, 20)%0Anumber10 = random.randint(1, 20)%0Anumber11 = random.randint(1, 20)%0Anumber12 = random.randint(1, 20)%0Anumber13 = random.randint(1, 20)%0Anumber14 = random.randint(1, 20)%0Anumber15 = random.randint(1, 20)%0Anumber16 = random.randint(1, 20)%0Anumber17 = random.randint(1, 20)%0Anumber18 = random.randint(1, 20)%0Anumber19 = random.randint(1, 20)%0Anumber20 = random.randint(1, 20)%0A%0Aprint (%22So, your names %22 + myName + %22 Hmmmmmmm%22)%0Aprint (%22Ok %22 + myName + %22 here is your first number%22)%0Aprint (%22%22)%0Aprint (number)%0Aprint (%22%22)%0Aprint (%22Also keep in mind that the numbers range from 1 to 20%22)%0Aprint (%22%22)%0Aprint (%22So will the next number be higher or lower?%22)%0Aprint (%22%22)%0Aprint (%22%22)%0Aprint (%22Use h to guess Higher and use l to guess Lower.%22)%0A%0Aguess = input('Enter either h or l: ')%0A%0Aif number %3E number1 and guess == %22l%22:%0A print (%22Well done the number was %22 + number1 + %22 Now onto stage 2%22)%0Aelif number %3E number1 and guess == %22h%22:%0A print (%22Incorrect the number was %22 + number1 + %22GAME OVER%22)%0Aelif number %3C number1 and guess == %22h%22:%0A print (%22Well done the number was %22 + number1 + %22 Now onto stage 2%22)%0Aelif number %3C number1 and guess == %22l%22:%0A print (%22Incorrect the number was %22 + number1 + %22 GAME OVER%22)
958e6ca0ba5be68802e61a450aeb2bf39ea5d5ba
Create psf2pdb.py
psf2pdb.py
psf2pdb.py
Python
0.000012
@@ -0,0 +1,733 @@ +import sys%0A%0Apdbfile = open(sys.argv%5B1%5D,'r')%0Apsfile = open(sys.argv%5B2%5D,'r')%0A%0Ainline = pdbfile.readline()%0Aoutput = ''%0Awhile inline != 'END%5Cn':%0A output = output + inline%0A inline = pdbfile.readline()%0A if inline == '': #sanity check%0A print %22Error%22%0A exit()%0A%0Ainline = psfile.readline().split()%0Awhile inline%5B1%5D != '!NBOND:':%0A inline = psfile.readline().split()%0A while len(inline)%3C1:%0A inline = psfile.readline().split()%0A%0Abondlist = psfile.readline().split()%0Afor i in range(int(inline%5B0%5D)):%0A new = bondlist.pop(0)%0A output = output + 'CONECT '+new+' '+bondlist.pop(0)+'%5Cn'%0A if len(bondlist)==0:%0A bondlist = psfile.readline().split()%0A%0Aoutfile = open(sys.argv%5B3%5D,'w')%0Aoutfile.write(output)%0A
e73d16d4051c6bc66daf415d2da4e8d204a97004
Add rainbow function
rainbow.py
rainbow.py
Python
0
@@ -0,0 +1,1694 @@ +import re%0Aimport colorsys%0Afrom pymol import cmd%0A%0A%0Adef rainbow(range_string):%0A %22%22%22%0ADESCRIPTION%0A%0A Colors rainbow spectrum for a selection given in range string.%0A%0A The difference between coloring in rainbow with built-in 'spectrum' is that%0A this relies on the segment order in range string (not alphabetically%0A sorted), so it can handle multiple chain domain as in insulin where usually%0A chain B should be before chain A in many cases.%0A%0AUSAGE%0A%0A rainbow range_string%0A%0AARGUMENTS%0A%0A range_string = 'B:2-29,A:1-21'%0A %22%22%22%0A seg_ptn = re.compile(r'(%5BA-Za-z0-9%5D%7B1%7D):(-?%5B0-9%5D+%5BA-Z%5D?)-(-?%5B0-9%5D+%5BA-Z%5D?)')%0A all_resi = %5B%5D%0A for seg in seg_ptn.finditer(range_string):%0A chain = seg.group(1)%0A local_space = %7B'resnums' : %5B%5D, 'chain': chain%7D%0A groups = list(seg.groups())%0A for i in %5B1, 2%5D:%0A # excape minus index%0A if groups%5Bi%5D.startswith('-'):%0A groups%5Bi%5D = '%5C%5C' + groups%5Bi%5D%0A cmd.iterate('c. %25s and i. %25s-%25s and n. CA' %25 seg.groups(),%0A 'resnums.append(resi)', space=local_space)%0A all_resi.append(local_space)%0A%0A total = reduce(lambda x, y: x + len(y%5B'resnums'%5D), all_resi, 0)%0A%0A cnt = 0%0A for seg in all_resi:%0A chain = seg%5B'chain'%5D%0A for i in seg%5B'resnums'%5D:%0A hue = colorsys.TWO_THIRD - colorsys.TWO_THIRD * cnt / (total - 1)%0A red, green, blue = colorsys.hsv_to_rgb(hue, 1, 1)%0A hexcolor = hex((int(red * 255) %3C%3C 16) + (int(green * 255) %3C%3C 8) +%0A int(blue * 255))%0A cmd.color(hexcolor, 'c. %25s and i. %25s' %25 (chain, i))%0A cnt += 1%0A%0Aif __name__ != %22rainbow%22:%0A cmd.extend('rainbow', rainbow)%0A
cbaed7d194f4a91198fc097d4657ad327819af4b
Add new migration.
invite/migrations/0004_auto_20191126_1740.py
invite/migrations/0004_auto_20191126_1740.py
Python
0
@@ -0,0 +1,1795 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.22 on 2019-11-26 17:40%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport uuid%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('invite', '0003_abstract_invitation_auto_now_add'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='invitation',%0A name='activation_code',%0A field=models.CharField(default=uuid.uuid4, editable=False, help_text='unique id, generated on email submission', max_length=36, unique=True),%0A ),%0A migrations.AlterField(%0A model_name='invitation',%0A name='date_invited',%0A field=models.DateField(auto_now_add=True, help_text='the day on which the superuser invited the potential member'),%0A ),%0A migrations.AlterField(%0A model_name='invitation',%0A name='email',%0A field=models.EmailField(help_text=%22the potential member's email address%22, max_length=41),%0A ),%0A migrations.AlterField(%0A model_name='passwordresetinvitation',%0A name='activation_code',%0A field=models.CharField(default=uuid.uuid4, editable=False, help_text='unique id, generated on email submission', max_length=36, unique=True),%0A ),%0A migrations.AlterField(%0A model_name='passwordresetinvitation',%0A name='date_invited',%0A field=models.DateField(auto_now_add=True, help_text='the day on which the superuser invited the potential member'),%0A ),%0A migrations.AlterField(%0A model_name='passwordresetinvitation',%0A name='email',%0A field=models.EmailField(help_text=%22the potential member's email address%22, max_length=41),%0A ),%0A %5D%0A
47044317e4067fb38bf9e0fdb2e9c5f9ccb78053
add migration
pokemon_v2/migrations/0006_auto_20200725_2205.py
pokemon_v2/migrations/0006_auto_20200725_2205.py
Python
0.000001
@@ -0,0 +1,691 @@ +from django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22pokemon_v2%22, %220005_auto_20200709_1930%22),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name=%22pokemon%22,%0A name=%22height%22,%0A field=models.IntegerField(blank=True, null=True),%0A ),%0A migrations.AlterField(%0A model_name=%22pokemon%22,%0A name=%22weight%22,%0A field=models.IntegerField(blank=True, null=True),%0A ),%0A migrations.AlterField(%0A model_name=%22pokemon%22,%0A name=%22base_experience%22,%0A field=models.IntegerField(blank=True, null=True),%0A ),%0A %5D%0A
bad97abfe7fd93cefac10d46b5434b63cc7e3d2b
add line to end of file
keras_contrib/constraints.py
keras_contrib/constraints.py
from __future__ import absolute_import from . import backend as K from keras.utils.generic_utils import get_from_module from keras.constraints import * class Clip(Constraint): """Clips weights to [-c, c]. # Arguments c: Clipping parameter. """ def __init__(self, c=0.01): self.c = c def __call__(self, p): return K.clip(p, -self.c, self.c) def get_config(self): return {'name': self.__class__.__name__, 'c': self.c} # Aliases. clip = Clip
Python
0.000001
@@ -508,12 +508,13 @@ %0Aclip = Clip +%0A
d558ed9875cf99ebdf6915e7acd877fc7fae69f3
Add missing migration
candidates/migrations/0028_auto_20160411_1055.py
candidates/migrations/0028_auto_20160411_1055.py
Python
0.0002
@@ -0,0 +1,1172 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('candidates', '0027_create_standard_complex_fields'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='complexpopolofield',%0A name='info_type_key',%0A field=models.CharField(help_text=%22Name of the field in the array that stores the type ('note' for links, 'contact_type' for contacts, 'scheme' for identifiers)%22, max_length=100),%0A ),%0A migrations.AlterField(%0A model_name='complexpopolofield',%0A name='info_value_key',%0A field=models.CharField(help_text=%22Name of the field in the array that stores the value, e.g 'url' for links, 'value' for contact_type, 'identifier' for identifiers%22, max_length=100),%0A ),%0A migrations.AlterField(%0A model_name='complexpopolofield',%0A name='old_info_type',%0A field=models.CharField(help_text=%22Used for supporting info_types that have been renamed. As such it's rarely used.%22, max_length=100, blank=True),%0A ),%0A %5D%0A
d663893c857605a53372d30395d3ca86f89bd201
FIX default attribute value
test/acceptance/commons/constants.py
test/acceptance/commons/constants.py
__author__ = 'arobres' #AUTHENTICATION CONSTANTS AUTH = u'auth' TENANT_NAME = u'tenantName' USERNAME = u'username' PASSWORD = u'password' ACCESS = u'access' TOKEN = u'token' TENANT = u'tenant' ID = u'id' #PRODUCT_PROPERTIES PRODUCT_NAME = u'name' PRODUCT_DESCRIPTION = u'description' PRODUCT = u'product' PRODUCTS = u'products' PRODUCT_ATTRIBUTES = u'attributes' PRODUCT_METADATAS = u'metadatas' METADATA = u'metadata' ATTRIBUTE = u'attribute' KEY = u'key' VALUE = u'value' DESCRIPTION = u'description' ATTRIBUTE_TYPE = u'type' ATTRIBUTE_TYPE_PLAIN = u'Plain' #HEADERS CONTENT_TYPE = u'content-type' CONTENT_TYPE_JSON = u'application/json' CONTENT_TYPE_XML = u'application/xml' AUTH_TOKEN_HEADER = u'X-Auth-Token' TENANT_ID_HEADER = u'Tenant-Id' ACCEPT_HEADER = u'Accept' ACCEPT_HEADER_XML = u'application/xml' ACCEPT_HEADER_JSON = u'application/json' #PRODUCT RELEASE PRODUCT_RELEASE = u'productRelease' PRODUCT_RELEASE_LIST = u'productReleases' VERSION = u'version' #INCORRECT PARAMETERS LONG_ID = 'long' * 64 + 'a' #STRING WITH 257 characters #DEFAULT_METADATA NUMBER_OF_DEFAULT_SDC_METADATA = 6 DEFAULT_METADATA = {"metadata": [{"key": "image", "value": "df44f62d-9d66-4dc5-b084-2d6c7bc4cfe4"}, {"key": "cookbook_url", "value": ''}, {"key": "cloud", "value": "yes"}, {"key": "installator", "value": "chef"}, {"key": "open_ports", "value": "80 22"}]} DEFAULT_ATTRIBUTE = {"attribute": [{"key": "custom_att_01", "value": "att_01_default"}, {"key": "custom_att_02", "value": "att_02_default"}]} PRODUCT_RELEASE_WITHOUT_RELEASES_RESPONSE = u'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' \ u'<productReleases></productReleases>' # FABRIC AND PRODUCT INSTALLATION FABRIC_RESULT_EXECUTE = u'<local-only>' PRODUCT_FILE_NAME_FORMAT = u'{product_name}_{product_version}_{installator}' PRODUCT_INSTALLATION_FILE_CONTENT = u'Operation: install; Product: {product_name}; Version: {product_version}; Att01: {att_01}; Att02: {att_02}' PRODUCT_INSTALLATION_ATT1_DEFAULT = u'att_01_default' PRODUCT_INSTALLATION_ATT2_DEFAULT = u'att_02_default' #PRODUCT_INSTALLATION_PARAMETERS PRODUCT_INSTANCE_LIST = u'productInstances' PRODUCT_INSTANCE = u'productInstanceDto' PRODUCT_INSTANCE_RES = u'productInstance' PRODUCT_INSTANCE_NAME = u'name' PRODUCT_INSTANCE_STATUS = u'status' PRODUCT_INSTANCE_VM = u'vm' PRODUCT_INSTANCE_VM_IP = u'ip' PRODUCT_INSTANCE_VM_FQN = u'fqn' PRODUCT_INSTANCE_VM_OSTYPE = u'osType' PRODUCT_INSTANCE_VM_HOSTNAME = u'hostname' PRODUCT_INSTANCE_ATTRIBUTES = u'attributes' # METADATAS VALUES INSTALLATOR = u'installator' INSTALLATOR_VALUE = (u'puppet', u'chef') METADATA_TENANT_ID = u'tenant_id' #TASKS TASK = u'task' TASK_HREF = u'href' TASK_STARTTIME = u'startTime' TASK_STATUS = u'status' TASK_DESCRIPTION = u'description' TASK_VDC = u'vdc' TASK_ERROR = u'error' TASK_ERROR_MINOR_CODE = u'minorErrorCode' TASK_URL = u'@href' STATUS = u'status' STATUS_XML = u'@status' VDC = u'vdc' TASK_STATUS_VALUE_RUNNING = u'RUNNING' TASK_STATUS_VALUE_SUCCESS = u'SUCCESS' TASK_STATUS_VALUE_ERROR = u'ERROR' TASK_STATUS_VALUE_INSTALLED = u'INSTALLED' TASK_STATUS_VALUE_UNINSTALLED = u'UNINSTALLED' #PRODUCTANDRELEASE VALUES PRODUCTANDRELEASE_LIST = u'productAndReleaseDtoes' PRODUCTANDRELEASE = u'productAndReleaseDto' #ATTRIBUTE FROM CONFIG FILE (for loading values from config_file) CONFIG_FILE = u'${CONFIG_FILE}'
Python
0.000001
@@ -1467,28 +1467,19 @@ %22key%22: %22 -custom_ att -_0 1%22, %22val @@ -1487,27 +1487,40 @@ e%22: %22att -_01_default +1_value%22, %22type%22: %22Plain %22%7D,%0A @@ -1563,20 +1563,11 @@ %22: %22 -custom_ att -_0 2%22, @@ -1579,27 +1579,40 @@ e%22: %22att -_02_default +2_value%22, %22type%22: %22Plain %22%7D%5D%7D%0A%0APR
90a5242a93beda053ad91adca0728995232e23d2
Create toggle_editor_text_console.py
cg/blender/scripts/toggle_editor_text_console.py
cg/blender/scripts/toggle_editor_text_console.py
Python
0
@@ -0,0 +1,395 @@ +import bpy%0A%0Akeyconfig = bpy.context.window_manager.keyconfigs.user%0A%0Aargs = ('wm.context_set_enum', 'ESC', 'PRESS')%0Akwargs = %7B'shift':True%7D%0A%0Afor source, destination in (('Console', 'TEXT_EDITOR'), ('Text', 'CONSOLE')): %0A kmi = keyconfig.keymaps%5Bsource%5D.keymap_items.new(*args, **kwargs)%0A properties = kmi.properties%0A properties.data_path = 'area.type'%0A properties.value = destination%0A
cb505bd4c86c39bd7ce575a7d72e4a3d33875b93
Create polyDataMake.py
figureCode/polyDataMake.py
figureCode/polyDataMake.py
Python
0
@@ -0,0 +1,1478 @@ +import numpy as np%0Afrom random import seed, getstate, setstate%0A%0Adef polyDataMake(n=21,deg=3,sampling='sparse'):%0A old_state = getstate()%0A seed(0)%0A%0A if sampling == 'irregular':%0A xtrain = np.array(%5Bnp.linspace(-1,-.5,6),np.linspace(3,3.5,6)%5D).reshape(-1,1)%0A elif sampling == 'sparse':%0A xtrain = np.array(%5B-3, -2, 0, 2, 3%5D)%0A elif sampling == 'dense':%0A xtrain = np.array(np.arange(-5,5,.6))%0A elif sampling == 'thibaux':%0A xtrain = np.linspace(0,20,n)%0A else:%0A raise ValueError('Unrecognized sampling provided.')%0A %0A if sampling == 'thibaux':%0A seed(654321)%0A xtest = np.linspace(0,20,201)%0A sigma2 = 4%0A w = np.array(%5B-1.5,1.0/9.0%5D).T%0A def fun(x):%0A return w%5B0%5D*x + w%5B1%5D*(x**2)%0A else:%0A xtest = np.linspace(-7,7,141)%0A if deg == 2:%0A def fun(x):%0A return 10 + x + x**2%0A elif deg == 3 :%0A def fun(x):%0A return 10 + x + x**3%0A else:%0A raise ValueError('Unrecognized degree.')%0A sigma2 = 25%0A %0A ytrain = fun(xtrain) + np.random.normal(size=xtrain.shape%5B0%5D)*np.sqrt(sigma2)%0A ytestNoisefree = fun(xtest)%0A ytestNoisy = ytestNoisefree + np.random.normal(size=xtest.shape%5B0%5D)*np.sqrt(sigma2)%0A %0A def shp(x):%0A return np.asarray(x).reshape(-1,1)%0A %0A setstate(old_state)%0A return shp(xtrain), shp(ytrain), shp(xtest), shp(ytestNoisefree), shp(ytestNoisy), sigma2%0A
f0fcfa4df3ce4e09e712a3f0a0fe4013fda10ca5
make optimize_png_images.py platform-agnostic.
Source/devtools/scripts/optimize_png_images.py
Source/devtools/scripts/optimize_png_images.py
#!/usr/bin/env python # Copyright (c) 2014 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import devtools_file_hashes import glob import hashlib import os import os.path import re import subprocess import sys try: import json except ImportError: import simplejson as json scripts_path = os.path.dirname(os.path.abspath(__file__)) devtools_path = os.path.dirname(scripts_path) blink_source_path = os.path.dirname(devtools_path) blink_path = os.path.dirname(blink_source_path) chromium_src_path = os.path.dirname(os.path.dirname(blink_path)) devtools_frontend_path = devtools_path + "/front_end" images_path = devtools_frontend_path + "/Images" image_sources_path = images_path + "/src" hashes_file_name = "optimize_png.hashes" hashes_file_path = image_sources_path + "/" + hashes_file_name file_names = os.listdir(image_sources_path) svg_file_paths = [image_sources_path + "/" + file_name for file_name in file_names if file_name.endswith(".svg")] svg_file_paths_to_optimize = devtools_file_hashes.files_with_invalid_hashes(hashes_file_path, svg_file_paths) svg_file_names = [re.sub(".svg$", "", re.sub(".*/", "", file_path)) for file_path in svg_file_paths_to_optimize] optimize_script_path = "tools/resources/optimize-png-files.sh" def check_installed(app_name, package, how_to): proc = subprocess.Popen("which %s" % app_name, stdout=subprocess.PIPE, shell=True) proc.communicate() if proc.returncode != 0: print "This script needs \"%s\" to be installed." % app_name if how_to: print how_to else: print "To install execute the following command: sudo apt-get install %s" % package sys.exit(1) check_installed("pngcrush", "pngcrush", None) check_installed("optipng", "optipng", None) check_installed("advdef", "advancecomp", None) check_installed("pngout", None, "Utility can be downloaded here: http://www.jonof.id.au/kenutils") def optimize_png(file_name): png_full_path = images_path + "/" + file_name + ".png" optimize_command = "bash %s -o2 %s" % (optimize_script_path, png_full_path) proc = subprocess.Popen(optimize_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, cwd=chromium_src_path) return proc if len(svg_file_names): print "%d unoptimized png files found." % len(svg_file_names) else: print "All png files are already optimized." sys.exit() processes = {} for file_name in svg_file_names: name = re.sub(".svg$", "", file_name) name2x = name + "_2x" processes[name] = optimize_png(name) processes[name2x] = optimize_png(name2x) for file_name, proc in processes.items(): (optimize_out, _) = proc.communicate() print("Optimization of %s finished: %s" % (file_name, optimize_out)) devtools_file_hashes.update_file_hashes(hashes_file_path, svg_file_paths)
Python
0.000009
@@ -2022,32 +2022,45 @@ frontend_path = +os.path.join( devtools_path + @@ -2056,21 +2056,19 @@ ols_path - + +, %22 -/ front_en @@ -2069,16 +2069,17 @@ ont_end%22 +) %0Aimages_ @@ -2085,16 +2085,29 @@ _path = +os.path.join( devtools @@ -2124,20 +2124,19 @@ path - + +, %22 -/ Images%22 +) %0Aima @@ -2145,32 +2145,45 @@ _sources_path = +os.path.join( images_path + %22/ @@ -2181,17 +2181,16 @@ path - + +, %22 -/ src%22 +) %0Ahas @@ -2242,24 +2242,37 @@ file_path = +os.path.join( image_source @@ -2277,24 +2277,17 @@ ces_path - + %22/%22 + +, hashes_ @@ -2295,16 +2295,17 @@ ile_name +) %0A%0Afile_n @@ -2360,16 +2360,29 @@ aths = %5B +os.path.join( image_so @@ -2387,32 +2387,25 @@ sources_path - + %22/%22 + +, file_name f @@ -2402,16 +2402,17 @@ ile_name +) for fil @@ -2615,26 +2615,25 @@ %22%22, -re.sub(%22.*/%22, %22%22, +os.path.basename( file @@ -2713,15 +2713,31 @@ h = +os.path.join( %22tools -/ +%22, %22 reso @@ -2741,17 +2741,20 @@ esources -/ +%22, %22 optimize @@ -2767,16 +2767,17 @@ iles.sh%22 +) %0A%0A%0Adef c @@ -3489,16 +3489,29 @@ _path = +os.path.join( images_p @@ -3517,16 +3517,9 @@ path - + %22/%22 + +, fil @@ -3533,16 +3533,17 @@ + %22.png%22 +) %0A opt
53dc0a5a1e8cc94dd23f6b6cfa1997f7b8b6f926
call FSL NIDM export from command line
nidm-results_fsl.py
nidm-results_fsl.py
Python
0
@@ -0,0 +1,920 @@ +#!/usr/bin/python%0A%22%22%22%0AExport neuroimaging results created with FSL feat following NIDM-Results %0Aspecification. The path to feat directory must be passed as first argument.%0A%0A@author: Camille Maumet %[email protected]%3E%0A@copyright: University of Warwick 2013-2014%0A%22%22%22%0A%0Aimport sys%0Aimport os%0Afrom fsl_exporter.fsl_exporter import FSLtoNIDMExporter%0A%0Aif __name__ == %22__main__%22:%0A # Remove first argument (script name)%0A num_args = len(sys.argv)-1%0A sys.argv.pop(0)%0A args = sys.argv%0A%0A usage = %22Usage: python nidm-results_fsl.py path/to/feat/dir%22%0A%0A if num_args != 1:%0A raise Exception(usage)%0A%0A feat_dir = args%5B0%5D%0A if not os.path.isdir(feat_dir):%0A raise Exception(%22Unknown directory: %22+str(feat_dir))%0A%0A fslnidm = FSLtoNIDMExporter(feat_dir=feat_dir, version=%220.2.0%22)%0A fslnidm.parse()%0A fslnidm.export()%0A%0A print 'NIDM export available at: '+str(os.path.join(feat_dir, %22nidm%22))%0A
6e2fb17f191047553d81c22d189d494d9d80faac
Fix KeyError on 'title' when title is empty
homeassistant/components/media_player/mpd.py
homeassistant/components/media_player/mpd.py
""" homeassistant.components.media_player.mpd ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Provides functionality to interact with a Music Player Daemon. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.mpd/ """ import logging import socket try: import mpd except ImportError: mpd = None from homeassistant.const import ( STATE_PLAYING, STATE_PAUSED, STATE_OFF) from homeassistant.components.media_player import ( MediaPlayerDevice, SUPPORT_PAUSE, SUPPORT_VOLUME_SET, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_PREVIOUS_TRACK, SUPPORT_NEXT_TRACK, MEDIA_TYPE_MUSIC) _LOGGER = logging.getLogger(__name__) REQUIREMENTS = ['python-mpd2==0.5.4'] SUPPORT_MPD = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_TURN_OFF | \ SUPPORT_TURN_ON | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """ Sets up the MPD platform. """ daemon = config.get('server', None) port = config.get('port', 6600) location = config.get('location', 'MPD') password = config.get('password', None) global mpd # pylint: disable=invalid-name if mpd is None: import mpd as mpd_ mpd = mpd_ # pylint: disable=no-member try: mpd_client = mpd.MPDClient() mpd_client.connect(daemon, port) if password is not None: mpd_client.password(password) mpd_client.close() mpd_client.disconnect() except socket.error: _LOGGER.error( "Unable to connect to MPD. " "Please check your settings") return False except mpd.CommandError as error: if "incorrect password" in str(error): _LOGGER.error( "MPD reported incorrect password. " "Please check your password.") return False else: raise add_devices([MpdDevice(daemon, port, location, password)]) class MpdDevice(MediaPlayerDevice): """ Represents a MPD server. """ # MPD confuses pylint # pylint: disable=no-member, abstract-method def __init__(self, server, port, location, password): self.server = server self.port = port self._name = location self.password = password self.status = None self.currentsong = None self.client = mpd.MPDClient() self.client.timeout = 10 self.client.idletimeout = None self.update() def update(self): try: self.status = self.client.status() self.currentsong = self.client.currentsong() except mpd.ConnectionError: self.client.connect(self.server, self.port) if self.password is not None: self.client.password(self.password) self.status = self.client.status() self.currentsong = self.client.currentsong() @property def name(self): """ Returns the name of the device. """ return self._name @property def state(self): """ Returns the media state. """ if self.status['state'] == 'play': return STATE_PLAYING elif self.status['state'] == 'pause': return STATE_PAUSED else: return STATE_OFF @property def media_content_id(self): """ Content ID of current playing media. """ return self.currentsong['id'] @property def media_content_type(self): """ Content type of current playing media. """ return MEDIA_TYPE_MUSIC @property def media_duration(self): """ Duration of current playing media in seconds. """ # Time does not exist for streams return self.currentsong.get('time') @property def media_title(self): """ Title of current playing media. """ name = self.currentsong.get('name', None) title = self.currentsong['title'] if name is None: return title else: return '{}: {}'.format(name, title) @property def media_artist(self): """ Artist of current playing media. (Music track only) """ return self.currentsong.get('artist') @property def media_album_name(self): """ Album of current playing media. (Music track only) """ return self.currentsong.get('album') @property def volume_level(self): return int(self.status['volume'])/100 @property def supported_media_commands(self): """ Flags of media commands that are supported. """ return SUPPORT_MPD def turn_off(self): """ Service to send the MPD the command to stop playing. """ self.client.stop() def turn_on(self): """ Service to send the MPD the command to start playing. """ self.client.play() def set_volume_level(self, volume): """ Sets volume """ self.client.setvol(int(volume * 100)) def volume_up(self): """ Service to send the MPD the command for volume up. """ current_volume = int(self.status['volume']) if current_volume <= 100: self.client.setvol(current_volume + 5) def volume_down(self): """ Service to send the MPD the command for volume down. """ current_volume = int(self.status['volume']) if current_volume >= 0: self.client.setvol(current_volume - 5) def media_play(self): """ Service to send the MPD the command for play/pause. """ self.client.pause(0) def media_pause(self): """ Service to send the MPD the command for play/pause. """ self.client.pause(1) def media_next_track(self): """ Service to send the MPD the command for next track. """ self.client.next() def media_previous_track(self): """ Service to send the MPD the command for previous track. """ self.client.previous()
Python
0.002891
@@ -4022,17 +4022,27 @@ song -%5B +.get( 'title' -%5D +, None) %0A%0A @@ -4095,32 +4095,112 @@ e%0A else:%0A + if title is None:%0A return name%0A else:%0A retu
88fe28ea1bca1f0f0784828592c2414e85e5ceb9
add update service
homeassistant/components/sensor/speedtest.py
homeassistant/components/sensor/speedtest.py
""" homeassistant.components.sensor.speedtest ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Speedtest.net sensor based on speedtest-cli. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.speedtest/ """ import logging import sys import re from datetime import timedelta from subprocess import check_output from homeassistant.util import Throttle from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import track_time_change REQUIREMENTS = ['speedtest-cli==0.3.4'] _LOGGER = logging.getLogger(__name__) _SPEEDTEST_REGEX = re.compile(r'Ping:\s(\d+\.\d+)\sms\nDownload:\s(\d+\.\d+)' r'\sMbit/s\nUpload:\s(\d+\.\d+)\sMbit/s\n') CONF_MONITORED_CONDITIONS = 'monitored_conditions' CONF_MINUTE = 'minute' CONF_HOUR = 'hour' CONF_DAY = 'day' SENSOR_TYPES = { 'ping': ['Ping', 'ms'], 'download': ['Download', 'Mbit/s'], 'upload': ['Upload', 'Mbit/s'], } # Return cached results if last scan was less then this time ago MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) def setup_platform(hass, config, add_devices, discovery_info=None): """ Setup the Speedtest sensor. """ data = SpeedtestData(hass, config) dev = [] for sensor in config[CONF_MONITORED_CONDITIONS]: if sensor not in SENSOR_TYPES: _LOGGER.error('Sensor type: "%s" does not exist', sensor) else: dev.append(SpeedtestSensor(data, sensor)) add_devices(dev) # pylint: disable=too-few-public-methods class SpeedtestSensor(Entity): """ Implements a speedtest.net sensor. """ def __init__(self, speedtest_data, sensor_type): self._name = SENSOR_TYPES[sensor_type][0] self.speedtest_client = speedtest_data self.type = sensor_type self._state = None self._unit_of_measurement = SENSOR_TYPES[self.type][1] @property def name(self): return '{} {}'.format('Speedtest', self._name) @property def state(self): """ Returns the state of the device. """ return self._state @property def unit_of_measurement(self): """ Unit of measurement of this entity, if any. """ return self._unit_of_measurement def update(self): """ Gets the latest data from Forecast.io and updates the states. """ data = self.speedtest_client.data if data is not None: if self.type == 'ping': self._state = data['ping'] elif self.type == 'download': self._state = data['download'] elif self.type == 'upload': self._state = data['upload'] class SpeedtestData(object): """ Gets the latest data from speedtest.net. """ def __init__(self, hass, config): self.data = None self.hass = hass self.path = hass.config.path track_time_change(self.hass, self.update, minute=config.get(CONF_MINUTE, 0), hour=config.get(CONF_HOUR, None), day=config.get(CONF_DAY, None)) @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self, now): """ Gets the latest data from speedtest.net. """ _LOGGER.info('Executing speedtest') re_output = _SPEEDTEST_REGEX.split( check_output([sys.executable, self.path( 'lib', 'speedtest_cli.py'), '--simple']).decode("utf-8")) self.data = {'ping': round(float(re_output[1]), 2), 'download': round(float(re_output[2]), 2), 'upload': round(float(re_output[3]), 2)}
Python
0
@@ -508,16 +508,107 @@ e_change +%0Afrom homeassistant.components.sensor import DOMAIN%0Aimport homeassistant.util.dt as dt_util %0A%0AREQUIR @@ -1330,17 +1330,16 @@ config)%0A -%0A dev @@ -1596,16 +1596,249 @@ s(dev)%0A%0A + def update(call=None):%0A %22%22%22 Update service for manual updates. %22%22%22%0A data.update(dt_util.now())%0A for sensor in dev:%0A sensor.update()%0A%0A hass.services.register(DOMAIN, 'update_speedtest', update)%0A%0A %0A# pylin
cfb3384ee31945d0afef6c558b873d956247e791
Add link to docs
homeassistant/components/switch/tellstick.py
homeassistant/components/switch/tellstick.py
""" homeassistant.components.switch.tellstick ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Support for Tellstick switches. Because the tellstick sends its actions via radio and from most receivers it's impossible to know if the signal was received or not. Therefore you can configure the switch to try to send each signal repeatedly with the config parameter signal_repetitions (default is 1). signal_repetitions: 3 """ import logging from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, ATTR_FRIENDLY_NAME) from homeassistant.helpers.entity import ToggleEntity import tellcore.constants as tellcore_constants from tellcore.library import DirectCallbackDispatcher SINGAL_REPETITIONS = 1 REQUIREMENTS = ['tellcore-py==1.1.2'] # pylint: disable=unused-argument def setup_platform(hass, config, add_devices_callback, discovery_info=None): """ Find and return Tellstick switches. """ try: import tellcore.telldus as telldus except ImportError: logging.getLogger(__name__).exception( "Failed to import tellcore") return core = telldus.TelldusCore(callback_dispatcher=DirectCallbackDispatcher()) signal_repetitions = config.get('signal_repetitions', SINGAL_REPETITIONS) switches_and_lights = core.devices() switches = [] for switch in switches_and_lights: if not switch.methods(tellcore_constants.TELLSTICK_DIM): switches.append( TellstickSwitchDevice(switch, signal_repetitions)) def _device_event_callback(id_, method, data, cid): """ Called from the TelldusCore library to update one device """ for switch_device in switches: if switch_device.tellstick_device.id == id_: switch_device.update_ha_state() break callback_id = core.register_device_event(_device_event_callback) def unload_telldus_lib(event): """ Un-register the callback bindings """ if callback_id is not None: core.unregister_callback(callback_id) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, unload_telldus_lib) add_devices_callback(switches) class TellstickSwitchDevice(ToggleEntity): """ Represents a Tellstick switch. """ last_sent_command_mask = (tellcore_constants.TELLSTICK_TURNON | tellcore_constants.TELLSTICK_TURNOFF) def __init__(self, tellstick_device, signal_repetitions): self.tellstick_device = tellstick_device self.state_attr = {ATTR_FRIENDLY_NAME: tellstick_device.name} self.signal_repetitions = signal_repetitions @property def should_poll(self): """ Tells Home Assistant not to poll this entity. """ return False @property def name(self): """ Returns the name of the switch if any. """ return self.tellstick_device.name @property def state_attributes(self): """ Returns optional state attributes. """ return self.state_attr @property def is_on(self): """ True if switch is on. """ last_command = self.tellstick_device.last_sent_command( self.last_sent_command_mask) return last_command == tellcore_constants.TELLSTICK_TURNON def turn_on(self, **kwargs): """ Turns the switch on. """ for _ in range(self.signal_repetitions): self.tellstick_device.turn_on() self.update_ha_state() def turn_off(self, **kwargs): """ Turns the switch off. """ for _ in range(self.signal_repetitions): self.tellstick_device.turn_off() self.update_ha_state()
Python
0
@@ -118,300 +118,141 @@ s.%0A%0A -Because the tellstick sends its actions via radio and from most%0Areceivers it's impossible to know if the signal w +For more details about this platform, ple as +e re -ceived or not.%0ATherefore you can configure the switch to try to send each signal repeatedly%0Awith the config parameter signal_repetitions (default is 1).%0Asignal_repetitions: 3 +fer to the documentation at%0Ahttps://home-assistant.io/components/switch.tellstick.html %0A%22%22%22
775a86179c321ac3cab73c9556edaa798f4273fd
add PassiveTotal OneShotAnalytics
plugins/analytics/passive_total.py
plugins/analytics/passive_total.py
Python
0.000001
@@ -0,0 +1,1545 @@ +import requests%0Aimport json%0Afrom datetime import datetime%0A%0Afrom core.analytics import OneShotAnalytics%0Afrom core.observables import Observable, Hostname%0A%0A%0Aclass PassiveTotal(OneShotAnalytics):%0A default_values = %7B%0A %22name%22: %22PassiveTotal Passive DNS%22,%0A %22description%22: %22Perform passive DNS (reverse) lookups on domain names or IP addresses.%22%0A %7D%0A%0A settings = %7B%0A %22passivetotal_api_key%22: %7B%0A %22name%22: %22PassiveTotal API Key%22,%0A %22description%22: %22API Key provided by PassiveTotal.%22%0A %7D%0A %7D%0A%0A ACTS_ON = %5B%22Hostname%22, %22Ip%22%5D%0A API_URL = 'https://api.passivetotal.org/api/v1/passive'%0A%0A @staticmethod%0A def analyze(observable, settings):%0A links = set()%0A%0A params = %7B%0A 'api_key': settings%5B'passivetotal_api_key'%5D,%0A 'query': observable.value%0A %7D%0A%0A r = requests.get(PassiveTotal.API_URL, params=params)%0A r.raise_for_status()%0A result = json.loads(r.content)%0A%0A for record in result%5B'results'%5D%5B'records'%5D:%0A first_seen = datetime.strptime(record%5B'firstSeen'%5D, %22%25Y-%25m-%25d %25H:%25M:%25S%22)%0A last_seen = datetime.strptime(record%5B'lastSeen'%5D, %22%25Y-%25m-%25d %25H:%25M:%25S%22)%0A%0A new = Observable.add_text(record%5B'resolve'%5D)%0A if isinstance(observable, Hostname):%0A links.update(observable.link_to(new, %22A record%22, 'PassiveTotal', first_seen, last_seen))%0A else:%0A links.update(new.link_to(observable, %22A record%22, 'PassiveTotal', first_seen, last_seen))%0A%0A return links%0A
24aad104e2cdc8e37e66c4d87401b30619c8cd97
Fix code style
chainer/functions/softplus.py
chainer/functions/softplus.py
import numpy from chainer import cuda from chainer import function from chainer.utils import type_check class Softplus(function.Function): """Softplus function.""" def __init__(self, beta=1.0): self.beta = numpy.float32(beta) self.beta_inv = numpy.float32(1.0 / beta) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 1) x_type, = in_types type_check.expect( x_type.dtype == numpy.float32, ) def check_type_backward(self, in_types, out_types): type_check.expect( in_types.size() == 1, out_types.size() == 1, ) x_type, = in_types g_type, = out_types type_check.expect( g_type.dtype == numpy.float32, x_type.shape == g_type.shape, ) def forward_cpu(self, inputs): x, = inputs # y = log(1 + exp(beta * x)) / beta bx = self.beta * x y = (numpy.fmax(bx, numpy.float32(0.0)) + numpy.log1p(numpy.exp(-numpy.fabs(bx)))) * self.beta_inv return y, def forward_gpu(self, inputs): x, = inputs y = cuda.empty(x.shape) cuda.elementwise( 'float* y, const float* x, float beta, float beta_inv', ''' float bx = beta * x[i]; y[i] = (max(bx, 0.f) + log1pf(__expf(-fabsf(bx)))) * beta_inv; ''', 'softplus' )(y, x, self.beta, self.beta_inv) return y, def backward_cpu(self, inputs, grads): x, = inputs g, = grads return (1 - 1 / (1 + numpy.exp(self.beta * x))) * g, def backward_gpu(self, inputs, grads): x, = inputs g, = grads gx = cuda.empty(x.shape, numpy.float32) cuda.elementwise( 'float* gx, const float* x, const float* g, float beta', 'gx[i] = (1.f - 1.f / (1.f + __expf(beta * x[i]))) * g[i];', 'softplus_backward' )(gx, x, g, self.beta) return gx, def softplus(x, beta=1.0): """Elementwise softplus function. This function is expressed as :math:`f(x) = \\frac{1}{\\beta}\\log(1 + \\exp(\\beta x))`, where :math:`\\beta` is a parameter. Args: x (~chainer.Variable): Input variable. beta (float): Parameter :math:`\\beta`. Returns: ~chainer.Variable: Output variable. """ return Softplus(beta=beta)(x)
Python
0.000169
@@ -2200,20 +2200,19 @@ ))%60, +%0A where -%0A :mat
2bd913c6cad94f3bc244d92a1ae1caffda82dcf8
Add humble plugin
plugins/humble.py
plugins/humble.py
Python
0
@@ -0,0 +1,1370 @@ +import lxml.html%0Aimport requests%0A%0Afrom smartbot import utils%0A%0Aclass Plugin:%0A def __call__(self, bot):%0A bot.on_respond(r%22humble( weekly)?( sale)?%22, self.on_respond)%0A bot.on_help(%22humble%22, self.on_help)%0A%0A def on_respond(self, bot, msg, reply):%0A page = requests.get(%22https://www.humblebundle.com/weekly%22)%0A tree = lxml.html.fromstring(page.text)%0A try:%0A title = tree.cssselect(%22title%22)%5B0%5D.text_content().strip()%0A clock = tree.cssselect(%22#heading-time-remaining .mini-digit-holder%22)%5B0%5D%0A c0 = clock.cssselect(%22.c0 .heading-num%22)%5B0%5D.text_content()%0A c1 = clock.cssselect(%22.c1 .heading-num%22)%5B0%5D.text_content()%0A c2 = clock.cssselect(%22.c2 .heading-num%22)%5B0%5D.text_content()%0A c3 = clock.cssselect(%22.c3 .heading-num%22)%5B0%5D.text_content()%0A c4 = clock.cssselect(%22.c4 .heading-num%22)%5B0%5D.text_content()%0A c5 = clock.cssselect(%22.c5 .heading-num%22)%5B0%5D.text_content()%0A c6 = clock.cssselect(%22.c6 .heading-num%22)%5B0%5D.text_content()%0A c7 = clock.cssselect(%22.c7 .heading-num%22)%5B0%5D.text_content()%0A reply(%22%7B0%7D - %7B1%7D%7B2%7D:%7B3%7D%7B4%7D:%7B5%7D%7B6%7D:%7B7%7D%7B8%7D left%22.format(title, c0, c1, c2, c3, c4, c5, c6, c7))%0A except IndexError:%0A reply(%22No weekly sale.%22)%0A%0A def on_help(self, bot, msg, reply):%0A reply(%22Syntax: humble %5Bweekly%5D %5Bdeal%5D%22)%0A
bd97873ea6b3b1de572027a8a13b895c84bdf7bf
version bump for 0.25.1.1.
oneflow/__init__.py
oneflow/__init__.py
VERSION = '0.25.1'
Python
0
@@ -11,11 +11,13 @@ '0.25.1 +.1 '%0A%0A
4d3dd9752681f1194241e6bf5c90dda34b600014
version bump for 0.88.8.
oneflow/__init__.py
oneflow/__init__.py
VERSION = '0.88.7'
Python
0
@@ -14,8 +14,8 @@ .88. -7 +8 '%0A%0A
6076340cf3b9f186b944423dd8d64d8d5d29b268
version bump for 0.25.11.21.
oneflow/__init__.py
oneflow/__init__.py
VERSION = '0.25.11.20'
Python
0
@@ -14,12 +14,12 @@ .25.11.2 -0 +1 '%0A%0A
48951aa7c2c82ca03e801e1bfce09be5492ce27b
Add python_analytics package
python_analytics/__init__.py
python_analytics/__init__.py
Python
0.000031
@@ -0,0 +1,244 @@ +import logging%0Atry: # pragma: no cover%0A from ._version import full_version as __version__%0Aexcept ImportError: # pragma: no cover%0A __version__ = %22not-built%22%0A%0A%0Alogger = logging.getLogger(__name__)%0Alogger.addHandler(logging.NullHandler())%0A
9fa9b339cb0da0ae6a4318288afd8c75e6890e4e
prepare for provider
flask_oauthlib/provider.py
flask_oauthlib/provider.py
Python
0
@@ -0,0 +1,141 @@ +# coding: utf-8%0A%22%22%22%0AFlask-OAuthlib%0A--------------%0A%0AImplemnts OAuth2 provider support for Flask.%0A%0A:copyright: (c) 2013 by Hsiaoming Yang.%0A%22%22%22%0A
f8e64d26c86e84ce9efe36db1155fdf5a4c6d5f8
Add example to show of icons.
flexx/ui/examples/icons.py
flexx/ui/examples/icons.py
Python
0
@@ -0,0 +1,1244 @@ +# doc-export: Icons%0A%0A%22%22%22%0AThis example demonstrates the use of icons in Flexx.%0A%22%22%22%0A%0Aimport os%0A%0Aimport flexx%0Afrom flexx import app, ui%0A%0A# todo: support icons in widgets like Button, TabWidget, etc.%0A# todo: support fontawesome icons%0A%0A%0Aclass Icons(ui.Widget):%0A %0A def init(self):%0A %0A ui.Button(text='Not much to see here yet')%0A%0A%0Aif __name__ == '__main__':%0A %0A fname = os.path.join(os.path.dirname(flexx.__file__), 'resources', 'flexx.ico')%0A black_png = ('iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAIUlEQVR42mNgY'%0A 'GD4TyEeTAacOHGCKDxqwKgBtDVgaGYmAD/v6XAYiQl7AAAAAElFTkSuQmCC')%0A %0A # Select application icon. Can be a url, a relative url to a shared asset,%0A # a base64 encoded image, or a local filename. Note that the local filename%0A # works for setting the aplication icon in a desktop-like app, but not for%0A # a web app. File types can be ico or png.%0A %0A icon = None # use default%0A # icon = 'https://assets-cdn.github.com/favicon.ico'%0A # icon = app.assets.add_shared_asset('ico.icon', open(fname, 'rb'))%0A # icon = 'data:image/png;base64,' + black_png%0A # icon = fname%0A %0A m = app.App(Icons, title='Icon demo', icon=icon).launch('firefox-browser')%0A app.start()%0A