commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
5e574a24d95e686bc2592af439e148e68036c61d
Add unit test for nova connector
tests/unit/cloud/clouds/nova_test.py
tests/unit/cloud/clouds/nova_test.py
Python
0
@@ -0,0 +1,1612 @@ +# -*- coding: utf-8 -*-%0A'''%0A :codeauthor: :email:%60Bo Maryniuk %[email protected]%3E%60%0A'''%0A%0A# Import Python libs%0Afrom __future__ import absolute_import%0A%0A# Import Salt Testing Libs%0Afrom salttesting import TestCase%0Afrom salt.cloud.clouds import nova%0Afrom salttesting.mock import MagicMock, patch%0Afrom tests.unit.cloud.clouds import _preferred_ip%0A%0A%0Aclass NovaTestCase(TestCase):%0A '''%0A Test case for openstack%0A '''%0A PRIVATE_IPS = %5B'0.0.0.0', '1.1.1.1', '2.2.2.2'%5D%0A%0A @patch('salt.cloud.clouds.nova.show_instance',%0A MagicMock(return_value=%7B'state': 'ACTIVE',%0A 'public_ips': %5B%5D,%0A 'addresses': %5B%5D,%0A 'private_ips': PRIVATE_IPS%7D))%0A @patch('salt.cloud.clouds.nova.rackconnect', MagicMock(return_value=False))%0A @patch('salt.cloud.clouds.nova.rackconnectv3', MagicMock(return_value=%7B'mynet': %5B'1.1.1.1'%5D%7D))%0A @patch('salt.cloud.clouds.nova.cloudnetwork', MagicMock(return_value=False))%0A @patch('salt.cloud.clouds.nova.managedcloud', MagicMock(return_value=False))%0A @patch('salt.cloud.clouds.nova.preferred_ip', _preferred_ip(PRIVATE_IPS, %5B'0.0.0.0'%5D))%0A @patch('salt.cloud.clouds.nova.ssh_interface', MagicMock(return_value='public_ips'))%0A def test_query_node_data_filter_preferred_ip_addresses(self):%0A '''%0A Test if query node data is filtering out unpreferred IP addresses.%0A '''%0A nova.__opts__ = %7B%7D%0A%0A vm = %7B'name': None%7D%0A data = MagicMock()%0A data.public_ips = %5B%5D%0A%0A assert nova._query_node_data(vm, data).public_ips == %5B'0.0.0.0'%5D%0A
2b8ff3b38e4f8bdc9da30c7978062174b0259f76
Add lc0068_text_justification.py
lc0068_text_justification.py
lc0068_text_justification.py
Python
0.000002
@@ -0,0 +1,2240 @@ +%22%22%22Leetcode 68. Text Justification%0AHard%0A%0AURL: https://leetcode.com/problems/text-justification/%0A%0AGiven an array of words and a width maxWidth, format the text such that each line has%0Aexactly maxWidth characters and is fully (left and right) justified.%0A%0AYou should pack your words in a greedy approach; that is, pack as many words as you%0Acan in each line. Pad extra spaces ' ' when necessary so that each line has exactly%0AmaxWidth characters.%0A%0AExtra spaces between words should be distributed as evenly as possible. If the number%0Aof spaces on a line do not divide evenly between words, the empty slots on the left%0Awill be assigned more spaces than the slots on the right.%0A%0AFor the last line of text, it should be left justified and no extra space is inserted%0Abetween words.%0A%0ANote:%0A- A word is defined as a character sequence consisting of non-space characters only.%0A- Each word's length is guaranteed to be greater than 0 and not exceed maxWidth.%0A- The input array words contains at least one word.%0A%0AExample 1:%0AInput:%0Awords = %5B%22This%22, %22is%22, %22an%22, %22example%22, %22of%22, %22text%22, %22justification.%22%5D%0AmaxWidth = 16%0AOutput:%0A%5B%0A %22This is an%22,%0A %22example of text%22,%0A %22justification. %22%0A%5D%0A%0AExample 2:%0AInput:%0Awords = %5B%22What%22,%22must%22,%22be%22,%22acknowledgment%22,%22shall%22,%22be%22%5D%0AmaxWidth = 16%0AOutput:%0A%5B%0A %22What must be%22,%0A %22acknowledgment %22,%0A %22shall be %22%0A%5D%0AExplanation: Note that the last line is %22shall be %22 instead of %22shall be%22,%0A because the last line must be left-justified instead of fully-justified.%0A Note that the second line is also left-justified becase it contains only%0A one word.%0A%0AExample 3:%0AInput:%0Awords = %5B%22Science%22,%22is%22,%22what%22,%22we%22,%22understand%22,%22well%22,%22enough%22,%22to%22,%22explain%22,%0A %22to%22,%22a%22,%22computer.%22,%22Art%22,%22is%22,%22everything%22,%22else%22,%22we%22,%22do%22%5D%0AmaxWidth = 20%0AOutput:%0A%5B%0A %22Science is what we%22,%0A %22understand well%22,%0A %22enough to explain to%22,%0A %22a computer. Art is%22,%0A %22everything else we%22,%0A %22do %22%0A%5D%0A%22%22%22%0A%0Aclass Solution(object):%0A def fullJustify(self, words, maxWidth):%0A %22%22%22%0A :type words: List%5Bstr%5D%0A :type maxWidth: int%0A :rtype: List%5Bstr%5D%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
8fe5e768f20abfdd790870075950b6537c5cad6a
Add class containing test state and report + print methods
ptest.py
ptest.py
Python
0
@@ -0,0 +1,1191 @@ +#!/usr/bin/python3%0A%0Afrom sys import exit%0A%0Aclass Ptest(object):%0A%0A def __init__(self, module_name):%0A self.module_name = module_name%0A self.passed = 0%0A self.failed = 0%0A%0A print('%5CnRunning tests for module %22', module_name, '%22', sep='')%0A%0A def report(self, test_name, test_result):%0A if test_result not in (True, False):%0A print('Invalid report argument for test %22', test_name, '%22', sep='')%0A exit(1)%0A%0A NORMAL = '%5Cx1B%5B0m'%0A RED = '%5Cx1B%5B31m'%0A GREEN = '%5Cx1B%5B32m'%0A%0A if test_result:%0A self.passed += 1%0A print('%5B', GREEN, 'PASSED', NORMAL, '%5D ', test_name, sep='')%0A else:%0A self.failed += 1%0A print('%5B', RED, 'FAILED', NORMAL, '%5D ', test_name, sep='')%0A%0A def print_statistics(self):%0A test_count = self.passed + self.failed%0A if test_count == 0:%0A print('No tests yet...')%0A return%0A pass_rate = 0%0A if self.passed != 0:%0A pass_rate = round(float(self.passed) / float(test_count), 3) * 100%0A print('Passed: ', self.passed, '/', test_count,%0A ' (', pass_rate, '%25)', sep='', end='%5Cn%5Cn')%0A%0A
1669f9a3a9fabc2ded8fa92542dca65036c201e5
Create sizes.py
plantcv/plantcv/visualize/sizes.py
plantcv/plantcv/visualize/sizes.py
Python
0.000001
@@ -0,0 +1,2512 @@ +# Visualize an annotated image with object sizes%0A%0Aimport os%0Aimport cv2%0Aimport random%0Aimport numpy as np%0Afrom plantcv.plantcv import params%0Afrom plantcv.plantcv import plot_image%0Afrom plantcv.plantcv import print_image%0Afrom plantcv.plantcv import find_objects%0Afrom plantcv.plantcv import color_palette%0A%0A%0Adef sizes(img, mask, num_objects=100):%0A %22%22%22 Visualize an RGB image in all potential colorspaces%0A%0A Inputs:%0A img = RGB or grayscale image data%0A mask = Binary mask made from selected contours%0A num_objects = Optional parameter to limit the number of objects that will get annotated.%0A%0A Returns:%0A plotting_img = Plotting image containing the original image and L,A,B,H,S, and V colorspaces%0A%0A :param img: numpy.ndarray%0A :param mask: numpy.ndarray%0A :param num_objects: int%0A :return plotting_img: numpy.ndarray%0A %22%22%22%0A%0A%0A plotting_img = np.copy(img)%0A%0A # Store debug%0A debug = params.debug%0A params.debug = None%0A%0A id_objects, obj_hierarchy = find_objects(img=img, mask=mask)%0A rand_color = color_palette(num=len(id_objects), saved=False)%0A random.shuffle(rand_color)%0A%0A label_coord_x = %5B%5D%0A label_coord_y = %5B%5D%0A area_vals = %5B%5D%0A%0A for i, cnt in enumerate(id_objects):%0A # Calculate geodesic distance, divide by two since cv2 seems to be taking the perimeter of the contour%0A area_vals.append(cv2.contourArea(cnt))%0A cv2.drawContours(plotting_img, id_objects, i, rand_color%5Bi%5D, thickness=-1)%0A # Store coordinates for labels%0A label_coord_x.append(id_objects%5Bi%5D%5B0%5D%5B0%5D%5B0%5D)%0A label_coord_y.append(id_objects%5Bi%5D%5B0%5D%5B0%5D%5B1%5D)%0A%0A segment_ids = %5B%5D%0A # Put labels of length%0A for c, value in enumerate(area_vals):%0A text = %22%7B:.0f%7D%22.format(value)%0A w = label_coord_x%5Bc%5D%0A h = label_coord_y%5Bc%5D%0A if c %3C int(num_objects):%0A cv2.putText(img=plotting_img, text=text, org=(w, h), fontFace=cv2.FONT_HERSHEY_SIMPLEX,%0A fontScale=params.text_size, color=(150, 150, 150), thickness=params.text_thickness)%0A else:%0A print(%22There were %22 + str(len(area_vals)-num_objects) + %22 objects not annotated.%22)%0A break%0A%0A # Auto-increment device%0A params.device += 1%0A # Reset debug mode%0A params.debug = debug%0A%0A if params.debug == 'print':%0A print_image(plotting_img, os.path.join(params.debug_outdir, str(params.device) + '_object_sizes.png'))%0A elif params.debug == 'plot':%0A plot_image(plotting_img)%0A%0A return plotting_img%0A
e03ecf68055e820106172413967713f98f7905ac
copy api_util to client to make it self-contained
net/client/api_util.py
net/client/api_util.py
Python
0.000001
@@ -0,0 +1,142 @@ +import simplejson%0A%0Adef json2python(json):%0A%09try:%0A%09%09return simplejson.loads(json)%0A%09except:%0A%09%09pass%0A%09return None%0A%0Apython2json = simplejson.dumps%0A%0A
addc7f33af75070333369a01c71e8acd231376ba
Add FilterNotifier for keyword based notification filtering
reconbot/notifiers/filter.py
reconbot/notifiers/filter.py
Python
0
@@ -0,0 +1,530 @@ +class FilterNotifier:%0A %22%22%22 Filters notifications based on their type or keywords %22%22%22%0A def __init__(self, notifier, keywords=%5B%5D, ignore=%5B%5D):%0A self.notifier = notifier%0A self.keywords = keywords%0A self.ignore = ignore%0A%0A def notify(self, text, options=%7B%7D):%0A if len(self.ignore) %3E 0 and any(keyword in text for keyword in self.ignore):%0A return False%0A%0A if len(self.keywords) == 0 or any(keyword in text for keyword in self.keywords):%0A self.notifier.notify(text, options)%0A
b09b11de1a025196cceb1c8fd71bda5515437a10
Add max31855 example driver
sw/examples/drivers/max31855.py
sw/examples/drivers/max31855.py
Python
0
@@ -0,0 +1,1403 @@ +#!/usr/bin/env python%0A%0A#%0A# SPI example (using the STM32F407 discovery board)%0A#%0A%0Aimport sys%0Aimport time%0Aimport ctypes%0Afrom silta import stm32f407%0A%0Adef bytes_to_int(byte_list):%0A num = 0%0A%0A for byte in range(len(byte_list)):%0A num += byte_list%5Bbyte%5D %3C%3C ((len(byte_list) - 1 - byte) * 8)%0A%0A return num%0A%0Aclass MAX31855(object):%0A def __init__(self, bridge, cs_pin):%0A self.bridge = bridge%0A self.cs_pin = cs_pin%0A self.last_fault = 0%0A%0A # Set the CS line as an output%0A self.bridge.gpiocfg(self.cs_pin, 'output')%0A%0A # Configure ~1.05MHz clock with CPOL=0,CPHA=0%0A self.bridge.spicfg(10500000, 0, 0)%0A%0A # CS is active low in this case%0A self.bridge.gpio(self.cs_pin, 1)%0A%0A def read(self):%0A # Read 32 bits%0A txbuff = %5B0x00, 0x00, 0x00, 0x00%5D%0A%0A rval = self.bridge.spi(self.cs_pin, txbuff)%0A%0A if isinstance(rval, list):%0A reg = bytes_to_int(rval)%0A%0A fault = ((reg %3E%3E 16) & 1) == 1%0A%0A if fault:%0A temperature = None%0A last_fault = reg & 0x7%0A else:%0A temperature = ctypes.c_int16((reg %3E%3E 16) & 0xFFFC).value %3E%3E 2%0A temperature = temperature * 0.25%0A%0A return temperature%0A else:%0A print('SPI Error: ' + str(rval))%0A return None%0A%0A def get_last_fault(self):%0A return last_fault%0A
a15e363718ab41c5e02b9eaa919fb689cd266af6
Add common module for our tests
nose2/tests/_common.py
nose2/tests/_common.py
Python
0
@@ -0,0 +1,994 @@ +%22%22%22Common functionality.%22%22%22%0D%0Aimport os.path%0D%0Aimport tempfile%0D%0Aimport shutil%0D%0Aimport sys%0D%0A%0D%0Aclass TestCase(unittest2.TestCase):%0D%0A %22%22%22TestCase extension.%0D%0A %0D%0A If the class variable _RUN_IN_TEMP is True (default: False), tests will be%0D%0A performed in a temporary directory, which is deleted afterwards.%0D%0A %22%22%22%0D%0A _RUN_IN_TEMP = False%0D%0A%0D%0A def setUp(self):%0D%0A super(TestCase, self).setUp()%0D%0A %0D%0A if self._RUN_IN_TEMP:%0D%0A self.__orig_dir = os.getcwd()%0D%0A work_dir = self.__work_dir = tempfile.mkdtemp()%0D%0A os.chdir(self.__work_dir)%0D%0A # Make sure it's possible to import modules from current directory%0D%0A sys.path.insert(0, work_dir)%0D%0A%0D%0A def tearDown(self):%0D%0A super(TestCase, self).tearDown()%0D%0A%0D%0A if self._RUN_IN_TEMP:%0D%0A os.chdir(self.__orig_dir)%0D%0A shutil.rmtree(self.__work_dir, ignore_errors=True)%0D%0A%0D%0A%0D%0Aclass _FakeEventBase(object):%0D%0A %22%22%22Baseclass for fake Events.%22%22%22%0D%0A
b802f1d5453840ea4b16113d5d03f6c27224ce0c
Add try/except example.
examples/try.py
examples/try.py
Python
0
@@ -0,0 +1,904 @@ +# Honeybadger for Python%0A# https://github.com/honeybadger-io/honeybadger-python%0A#%0A# This file is an example of how to catch an exception in Python and report it%0A# to Honeybadger without re-raising. To run this example:%0A%0A# $ pip install honeybadger%0A# $ HONEYBADGER_API_KEY=your-api-key python try.py%0Afrom honeybadger import honeybadger%0A%0A# Uncomment the following line or use the HONEYBADGER_API_KEY environment%0A# variable to configure the API key for your Honeybadger project:%0A# honeybadger.configure(api_key='your api key')%0A%0Aimport logging%0Alogging.getLogger('honeybadger').addHandler(logging.StreamHandler())%0A%0Adef method_two():%0A mydict = dict(a=1)%0A try:%0A print mydict%5B'b'%5D%0A except KeyError, exc:%0A honeybadger.notify(exc, context=%7B'foo': 'bar'%7D)%0A%0Adef method_one():%0A method_two()%0A%0Aif __name__ == '__main__':%0A honeybadger.set_context(user_email=%[email protected]%22)%0A method_one()%0A
6bbef11c982ddee4981318e6bca9fa85610f1cc8
Increase revision content lenght
src/ggrc/migrations/versions/20170112112254_177a979b230a_update_revision_content_field.py
src/ggrc/migrations/versions/20170112112254_177a979b230a_update_revision_content_field.py
Python
0
@@ -0,0 +1,949 @@ +# Copyright (C) 2017 Google Inc.%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A%0A%22%22%22Update revision content field.%0A%0ACreate Date: 2017-01-12 11:22:54.998164%0A%22%22%22%0A# disable Invalid constant name pylint warning for mandatory Alembic variables.%0A# pylint: disable=invalid-name%0A%0A%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import mysql%0Afrom alembic import op%0A%0A# revision identifiers, used by Alembic.%0Arevision = '177a979b230a'%0Adown_revision = '275cd0dcaea'%0A%0A%0Adef upgrade():%0A %22%22%22Upgrade database schema and/or data, creating a new revision.%22%22%22%0A op.alter_column(%0A %22revisions%22,%0A %22content%22,%0A existing_type=sa.Text(),%0A type_=mysql.LONGTEXT,%0A nullable=False%0A )%0A%0A%0Adef downgrade():%0A %22%22%22Downgrade database schema and/or data back to the previous revision.%22%22%22%0A op.alter_column(%0A %22revisions%22,%0A %22content%22,%0A existing_type=mysql.LONGTEXT,%0A type_=sa.Text(),%0A nullable=False%0A )%0A
eb4fbb28ed06b223282b02bb31f5f91e1eeb3f9f
Add RenormalizeWeight callback
seya/callbacks.py
seya/callbacks.py
Python
0
@@ -0,0 +1,511 @@ +import numpy as np%0Afrom keras.callbacks import Callback%0A%0A%0Aclass RenormalizeWeight(Callback):%0A def __init__(self, W):%0A Callback.__init__(self)%0A self.W = W%0A self.W_shape = self.W.get_value().shape%0A%0A def on_batch_start(self, batch, logs=%7B%7D):%0A W = self.W.get_value()%0A if self.W_shape == 4:%0A W = W.reshape((self.W_shape%5B0%5D, -1))%0A norm = np.sqrt((W**2).sum(axis=-1))%0A W /= norm%5B:, None%5D%0A W = W.reshape(self.W_shape)%0A self.W.set_value(W)%0A
1f1d2df36a16b80c770974a9ac2bf48ccbebc3ab
add callable list
jasily/collection/funcs.py
jasily/collection/funcs.py
Python
0.000002
@@ -0,0 +1,773 @@ +# -*- coding: utf-8 -*-%0A#%0A# Copyright (c) 2018~2999 - Cologler %[email protected]%3E%0A# ----------%0A#%0A# ----------%0A%0Afrom functools import partial%0A%0Aclass CallableList(list):%0A '''%0A a simple callable list.%0A '''%0A%0A def __call__(self):%0A ret = None%0A for func in self:%0A ret = func()%0A return ret%0A%0A def append_func(self, func, *args, **kwargs):%0A '''%0A append func with given arguments and keywords.%0A '''%0A wraped_func = partial(func, *args, **kwargs)%0A self.append(wraped_func)%0A%0A def insert_func(self, index, func, *args, **kwargs):%0A '''%0A insert func with given arguments and keywords.%0A '''%0A wraped_func = partial(func, *args, **kwargs)%0A self.insert(index, wraped_func)%0A
a44821de91b5ac3a082b2253ec8e695a69ccf2be
Fix GlobalStepTests to specify the collection
tensorflow/python/training/training_util_test.py
tensorflow/python/training/training_util_test.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for training_util.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import monitored_session from tensorflow.python.training import training_util @test_util.run_v1_only('b/120545219') class GlobalStepTest(test.TestCase): def _assert_global_step(self, global_step, expected_dtype=dtypes.int64): self.assertEqual('%s:0' % ops.GraphKeys.GLOBAL_STEP, global_step.name) self.assertEqual(expected_dtype, global_step.dtype.base_dtype) self.assertEqual([], global_step.get_shape().as_list()) def test_invalid_dtype(self): with ops.Graph().as_default() as g: self.assertIsNone(training_util.get_global_step()) variables.Variable( 0.0, trainable=False, dtype=dtypes.float32, name=ops.GraphKeys.GLOBAL_STEP) self.assertRaisesRegex(TypeError, 'does not have integer type', training_util.get_global_step) self.assertRaisesRegex(TypeError, 'does not have integer type', training_util.get_global_step, g) def test_invalid_shape(self): with ops.Graph().as_default() as g: self.assertIsNone(training_util.get_global_step()) variables.VariableV1( [0], trainable=False, dtype=dtypes.int32, name=ops.GraphKeys.GLOBAL_STEP) self.assertRaisesRegex(TypeError, 'not scalar', training_util.get_global_step) self.assertRaisesRegex(TypeError, 'not scalar', training_util.get_global_step, g) def test_create_global_step(self): self.assertIsNone(training_util.get_global_step()) with ops.Graph().as_default() as g: global_step = training_util.create_global_step() self._assert_global_step(global_step) self.assertRaisesRegex(ValueError, 'already exists', training_util.create_global_step) self.assertRaisesRegex(ValueError, 'already exists', training_util.create_global_step, g) self._assert_global_step(training_util.create_global_step(ops.Graph())) def test_get_global_step(self): with ops.Graph().as_default() as g: self.assertIsNone(training_util.get_global_step()) variables.VariableV1( 0, trainable=False, dtype=dtypes.int32, name=ops.GraphKeys.GLOBAL_STEP) self._assert_global_step( training_util.get_global_step(), expected_dtype=dtypes.int32) self._assert_global_step( training_util.get_global_step(g), expected_dtype=dtypes.int32) def test_get_or_create_global_step(self): with ops.Graph().as_default() as g: self.assertIsNone(training_util.get_global_step()) self._assert_global_step(training_util.get_or_create_global_step()) self._assert_global_step(training_util.get_or_create_global_step(g)) @test_util.run_v1_only('b/120545219') class GlobalStepReadTest(test.TestCase): def test_global_step_read_is_none_if_there_is_no_global_step(self): with ops.Graph().as_default(): self.assertIsNone(training_util._get_or_create_global_step_read()) training_util.create_global_step() self.assertIsNotNone(training_util._get_or_create_global_step_read()) def test_reads_from_cache(self): with ops.Graph().as_default(): training_util.create_global_step() first = training_util._get_or_create_global_step_read() second = training_util._get_or_create_global_step_read() self.assertEqual(first, second) def test_reads_before_increments(self): with ops.Graph().as_default(): training_util.create_global_step() read_tensor = training_util._get_or_create_global_step_read() inc_op = training_util._increment_global_step(1) inc_three_op = training_util._increment_global_step(3) with monitored_session.MonitoredTrainingSession() as sess: read_value, _ = sess.run([read_tensor, inc_op]) self.assertEqual(0, read_value) read_value, _ = sess.run([read_tensor, inc_three_op]) self.assertEqual(1, read_value) read_value = sess.run(read_tensor) self.assertEqual(4, read_value) if __name__ == '__main__': test.main()
Python
0.000069
@@ -919,58 +919,8 @@ ops%0A -from tensorflow.python.framework import test_util%0A from @@ -959,16 +959,16 @@ riables%0A + from ten @@ -1119,46 +1119,8 @@ l%0A%0A%0A -@test_util.run_v1_only('b/120545219')%0A clas @@ -1584,16 +1584,18 @@ Variable +V1 (%0A @@ -1694,32 +1694,83 @@ Keys.GLOBAL_STEP +,%0A collections=%5Bops.GraphKeys.GLOBAL_STEP%5D )%0A self.ass @@ -2276,32 +2276,83 @@ Keys.GLOBAL_STEP +,%0A collections=%5Bops.GraphKeys.GLOBAL_STEP%5D )%0A self.ass @@ -3391,16 +3391,67 @@ BAL_STEP +,%0A collections=%5Bops.GraphKeys.GLOBAL_STEP%5D )%0A @@ -3946,46 +3946,8 @@ )%0A%0A%0A -@test_util.run_v1_only('b/120545219')%0A clas
44b6b0ff5efc6d9fcda4f886640663b68e7d6c14
Add initial code for getting batting stats over a specified timeframe
pybaseball/league_batting_stats.py
pybaseball/league_batting_stats.py
Python
0
@@ -0,0 +1,1315 @@ +%0A%22%22%22%0ATODO%0Apull batting stats over specified time period %0Aallow option to get stats for full seasons instead of ranges%0A%22%22%22%0A%0Aimport requests%0Aimport pandas as pd%0Afrom bs4 import BeautifulSoup%0A%0Adef get_soup(start_dt, end_dt):%0A%09# get most recent standings if date not specified%0A%09if((start_dt is None) or (end_dt is None)):%0A%09%09print('Error: a date range needs to be specified')%0A%09%09return None%0A%09url = %22http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=b&lastndays=7&dates=fromandto&fromandto=%7B%7D.%7B%7D&level=mlb&franch=&stat=&stat_value=0%22.format(start_dt, end_dt)%0A%09s=requests.get(url).content%0A%09return BeautifulSoup(s, %22html.parser%22)%0A%0Adef get_table(soup):%0A%09table = soup.find_all('table')%5B0%5D%0A%09data = %5B%5D%0A%09headings = %5Bth.get_text() for th in table.find(%22tr%22).find_all(%22th%22)%5D%5B1:%5D%0A%09data.append(headings)%0A%09table_body = table.find('tbody')%0A%09rows = table_body.find_all('tr')%0A%09for row in rows:%0A%09 cols = row.find_all('td')%0A%09 cols = %5Bele.text.strip() for ele in cols%5D%0A%09 data.append(%5Bele for ele in cols if ele%5D)%0A%09data = pd.DataFrame(data)%0A%09data = data.rename(columns=data.iloc%5B0%5D)%0A%09data = data.reindex(data.index.drop(0))%0A%09return data%0A%0Adef batting_stats(start_dt=None, end_dt=None):%0A%09# retrieve html from baseball reference%0A%09soup = get_soup(start_dt, end_dt)%0A%09table = get_table(soup)%0A%09return table%0A%0A%0A
072423365ad1c03dd593f5b8528a7b60c0c9bee9
Add AuctionHouse table.
pydarkstar/tables/auction_house.py
pydarkstar/tables/auction_house.py
Python
0
@@ -0,0 +1,960 @@ +%22%22%22%0A.. moduleauthor:: Adam Gagorik %[email protected]%3E%0A%22%22%22%0Afrom sqlalchemy import Column, Integer, SmallInteger, String, text%0Afrom pydarkstar.tables.base import Base%0A%0Aclass AuctionHouse(Base):%0A __tablename__ = 'auction_house'%0A%0A id = Column(Integer, primary_key=True)%0A itemid = Column(SmallInteger, nullable=False, index=True, server_default=text(%22'0'%22))%0A stack = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A seller = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A seller_name = Column(String(15))%0A date = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A price = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A buyer_name = Column(String(15))%0A sale = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A sell_date = Column(Integer, nullable=False, server_default=text(%22'0'%22))%0A%0Aif __name__ == '__main__':%0A pass
f85f6ba07c47a6ccbd38a9e7bc2e9a2c69ebd09a
read senor values from rpi
pythonLib/ArduinoMoistureSensor.py
pythonLib/ArduinoMoistureSensor.py
Python
0.000001
@@ -0,0 +1,177 @@ +import smbus%0Aimport time%0A%0Abus = smbus.SMBus(1)%0Aaddress = int(sys.argv%5B1%5D)%0A%0Adata = bus.read_i2c_block_data(address,0)%0A%0Afor i in range (0,6):%0A%09print (data%5B2*i%5D %3C%3C 8)+ data%5B2*i+1%5D%0A
47d7cfcd9db1a54e52532819895060527e1988b9
update qlcoder
qlcoder/scheme_study/functional.py
qlcoder/scheme_study/functional.py
Python
0.000001
@@ -0,0 +1,109 @@ +if __name__ == '__main__':%0A my_arr = %5BNone%5D * 7654321%0A for i in range(0, 7654321):%0A my_arr%5Bi%5D=i%0A
7618697cdb892388d7c5ddb731f5b9f138389ca4
add A4
A4/TestHashtable.py
A4/TestHashtable.py
Python
0.999996
@@ -0,0 +1,2077 @@ +#!/usr/bin/env python2%0A%0Afrom hashtable import Hashtable, LinkedList, hashFunction%0Aimport unittest%0Aimport collections%0A%0A%0Aclass TestHashtable(unittest.TestCase):%0A%0A def setUp(self):%0A buildings = %7B%0A %22CSCI%22 : %22McGlothlin-Street%22,%0A %22GSWS%22 : %22Tucker%22,%0A %22ENGL%22 : %22Tucker%22,%0A %22LING%22 : %22Tyler%22,%0A %22GERM%22 : %22Washington%22,%0A %7D%0A%0A def testWithoutFunction(self):%0A testingFunction = lambda key, numBuckets: sum(map(ord, key)) %25 numBuckets%0A q = Hashtable(testingFunction, 1000)%0A for key, value in buildings.items:%0A q%5Bkey%5D = value%0A for key, expected in buildings.items:%0A observed = q%5Bkey%5D%0A self.assertEquals(observed, expected, %22small hashtable without your hash function: value changed after being added!%5Cnkey:%7B%7D%5Cnexpected value:%7B%7D%5Cnobserved value:%7B%7D%22.format(key, value, q%5Bkey%5D))%0A%0A def testWithFunction(self):%0A q = Hashtable(hashFunction, 1000)%0A for key, value in buildings.items:%0A q%5Bkey%5D = value%0A for key, expected in buildings.items:%0A observed = q%5Bkey%5D%0A self.assertEquals(observed, expected, %22small hashtable with your hash function: value changed after being added! check __getitem__/__setitem__%5Cnkey:%7B%7D%5Cnexpected value:%7B%7D%5Cnobserved value:%7B%7D%22.format(key, value, q%5Bkey%5D))%0A%0A def testContains(self):%0A q = Hashtable(hashFunction, 1000)%0A for key, value in buildings.items:%0A q%5Bkey%5D = value%0A for key in buildings.keys:%0A self.assertIn(key, q, %22membership in small hashtable: %60in%60 keyword didn't work! check __contains__.%5Cnkey:%7B%7D%22.format(key,))%0A%0A def testLen(self):%0A q = Hashtable(hashFunction, 1000)%0A for key, value in buildings.items:%0A q%5Bkey%5D = value%0A self.assertLessEqual(len(q), len(buildings), %22length: %7B%7D items is too many! check __len__.%22.format(len(q)))%0A self.assertGreaterEqual(len(q), len(buildings), %22length: %7B%7D items is not enough! check __len__.%22.format(len(q)))%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
d635a60140c11c64db4ac887bc79396484bb55e3
Add model_utils.print_graph_layer_shapes to handle Graph models. Also handle Merge layers
keras/utils/model_utils.py
keras/utils/model_utils.py
from __future__ import print_function import numpy as np import theano def print_layer_shapes(model, input_shape): """ Utility function that prints the shape of the output at each layer. Arguments: model: An instance of models.Model input_shape: The shape of the input you will provide to the model. """ # This is to handle the case where a model has been connected to a previous # layer (and therefore get_input would recurse into previous layer's # output). if hasattr(model.layers[0], 'previous'): # TODO: If the model is used as a part of another model, get_input will # return the input of the whole model and this won't work. So this is # not handled yet raise Exception("This function doesn't work on model used as subparts " " for other models") input_var = model.get_input(train=False) input_tmp = np.zeros(input_shape, dtype=np.float32) print("input shape : ", input_shape) for l in model.layers: shape_f = theano.function([input_var], l.get_output(train=False).shape) out_shape = shape_f(input_tmp) print('shape after', l.get_config()['name'], ":", out_shape)
Python
0
@@ -65,17 +65,16 @@ theano%0A%0A -%0A def prin @@ -74,16 +74,996 @@ ef print +_graph_layer_shapes(graph, input_shapes):%0A %22%22%22%0A Utility function to print the shape of the output at each layer of a Graph%0A%0A Arguments:%0A graph: An instance of models.Graph%0A input_shapes: A dict that gives a shape for each input to the Graph%0A %22%22%22%0A input_vars = %5Bgraph.inputs%5Bname%5D.input%0A for name in graph.input_order%5D%0A output_vars = %5Bgraph.outputs%5Bname%5D.get_output()%0A for name in graph.output_order%5D%0A input_dummy = %5Bnp.zeros(input_shapes%5Bname%5D, dtype=np.float32)%0A for name in graph.input_order%5D%0A%0A print(%22input shapes : %22, input_shapes)%0A for name, l in graph.nodes.items():%0A shape_f = theano.function(input_vars,%0A l.get_output(train=False).shape,%0A on_unused_input='ignore')%0A out_shape = shape_f(*input_dummy)%0A print('shape after', l.get_config()%5B'name'%5D, %22(%22, name, %22) :%22, out_shape)%0A%0Adef print_model _layer_s @@ -1086,16 +1086,17 @@ ut_shape +s ):%0A %22 @@ -1304,16 +1304,92 @@ model.%0A + Either a tuple (for a single input) or a list of tuple%0A %22%22%22%0A @@ -1911,16 +1911,197 @@ dels%22)%0A%0A + # We allow the shortcut input_shapes=(1, 1, 28) instead of%0A # input_shapes=%5B(1, 1, 28)%5D.%0A if not isinstance(input_shapes%5B0%5D, tuple):%0A input_shapes = %5Binput_shapes%5D%0A inpu @@ -2105,16 +2105,17 @@ nput_var +s = model @@ -2146,59 +2146,228 @@ -input_tmp = np.zeros(input_shape, dtype=np.float32) +# theano.function excepts a list of variables%0A if not isinstance(input_vars, list):%0A input_vars = %5Binput_vars%5D%0A input_dummy = %5Bnp.zeros(shape, dtype=np.float32)%0A for shape in input_shapes%5D%0A %0A @@ -2385,16 +2385,17 @@ ut shape +s : %22, in @@ -2399,24 +2399,25 @@ input_shape +s )%0A for l @@ -2467,17 +2467,16 @@ unction( -%5B input_va @@ -2476,18 +2476,52 @@ nput_var -%5D, +s,%0A l.get_o @@ -2578,17 +2578,20 @@ e_f( +* input_ -tmp +dummy )%0A
f379160e56a94359d9571ea1b1db1f7544677a57
Fix reference to `latestEvent` in tests.
tests/sentry/api/serializers/test_grouphash.py
tests/sentry/api/serializers/test_grouphash.py
from __future__ import absolute_import from sentry.api.serializers import serialize from sentry.models import Event, GroupHash from sentry.testutils import TestCase class GroupHashSerializerTest(TestCase): def test_no_latest_event(self): user = self.create_user() group = self.create_group() hash = GroupHash.objects.create( project=group.project, group=group, hash='xyz', ) result = serialize(hash, user=user) assert result['latest_event'] is None def test_missing_latest_event(self): user = self.create_user() group = self.create_group() hash = GroupHash.objects.create( project=group.project, group=group, hash='xyz', ) GroupHash.record_last_processed_event_id( group.project_id, [hash.id], ['invalid'], ) result = serialize(hash, user=user) assert result['latest_event'] is None def test_mismatched_latest_event(self): user = self.create_user() group = self.create_group() hash = GroupHash.objects.create( project=group.project, group=group, hash='xyz', ) event = self.create_event(group=self.create_group()) GroupHash.record_last_processed_event_id( group.project_id, [hash.id], event.event_id, ) result = serialize(hash, user=user) assert result['latest_event'] is None def test_valid_latest_event(self): user = self.create_user() group = self.create_group() hash = GroupHash.objects.create( project=group.project, group=group, hash='xyz', ) event = Event.objects.get(id=self.create_event(group=group).id) GroupHash.record_last_processed_event_id( group.project_id, [hash.id], event.event_id, ) result = serialize(hash, user=user) assert result['latest_event'] == serialize(event, user=user)
Python
0
@@ -509,34 +509,33 @@ t result%5B'latest -_e +E vent'%5D is None%0A%0A @@ -985,34 +985,33 @@ t result%5B'latest -_e +E vent'%5D is None%0A%0A @@ -1532,26 +1532,25 @@ sult%5B'latest -_e +E vent'%5D is No @@ -2081,26 +2081,25 @@ sult%5B'latest -_e +E vent'%5D == se
91e04b558b95aa21d5f7c730fc8355e5413ab83c
Use values and values_list in API closes #433
judge/views/api.py
judge/views/api.py
from operator import attrgetter from django.db.models import Prefetch from django.http import JsonResponse, Http404 from django.shortcuts import get_object_or_404 from dmoj import settings from judge.models import Contest, Problem, Profile, Submission, ContestTag def sane_time_repr(delta): days = delta.days hours = delta.seconds / 3600 minutes = (delta.seconds % 3600) / 60 return '%02d:%02d:%02d' % (days, hours, minutes) def api_contest_list(request): contests = {} for c in Contest.objects.filter(is_public=True, is_private=False).prefetch_related( Prefetch('tags', queryset=ContestTag.objects.only('name'), to_attr='tag_list')): contests[c.key] = { 'name': c.name, 'start_time': c.start_time.isoformat(), 'end_time': c.end_time.isoformat(), 'time_limit': c.time_limit and sane_time_repr(c.time_limit), 'labels': map(attrgetter('name'), c.tag_list), } return JsonResponse(contests) def api_problem_list(request): qs = Problem.objects.filter(is_public=True) if settings.ENABLE_FTS and 'search' in request.GET: query = ' '.join(request.GET.getlist('search')).strip() if query: qs = qs.search(query) problems = {} for p in qs: problems[p.code] = { 'points': p.points, 'partial': p.partial, 'name': p.name, 'group': p.group.full_name } return JsonResponse(problems) def api_problem_info(request, problem): p = get_object_or_404(Problem, code=problem) if not p.is_accessible_by(request.user): raise Http404() return JsonResponse({ 'name': p.name, 'authors': list(p.authors.values_list('user__username', flat=True)), 'types': list(p.types.values_list('full_name', flat=True)), 'group': p.group.full_name, 'time_limit': p.time_limit, 'memory_limit': p.memory_limit, 'points': p.points, 'partial': p.partial, 'languages': list(p.allowed_languages.values_list('key', flat=True)), }) def api_user_list(request): users = {} for p in Profile.objects.select_related('user').only('user__username', 'name', 'points', 'display_rank'): users[p.user.username] = { 'display_name': p.name, 'points': p.points, 'rank': p.display_rank } return JsonResponse(users) def api_user_info(request, user): p = get_object_or_404(Profile, user__username=user) return JsonResponse({ 'display_name': p.name, 'points': p.points, 'rank': p.display_rank, 'solved_problems': [], # TODO }) def api_user_submissions(request, user): p = get_object_or_404(Profile, user__username=user) subs = Submission.objects.filter(user=p, problem__is_public=True).select_related('problem', 'language') \ .only('id', 'problem__code', 'time', 'memory', 'points', 'language__key', 'status', 'result') data = {} for s in subs: data[s.id] = { 'problem': s.problem.code, 'time': s.time, 'memory': s.memory, 'points': s.points, 'language': s.language.key, 'status': s.status, 'result': s.result } return JsonResponse(data)
Python
0
@@ -668,16 +668,37 @@ _list')) +.defer('description') :%0A @@ -1308,15 +1308,117 @@ for -p in qs +code, points, partial, name, group in qs.values_list('code', 'points', 'partial', 'name', 'group__full_name') :%0A @@ -1432,18 +1432,16 @@ roblems%5B -p. code%5D = @@ -1456,34 +1456,32 @@ 'points': -p. points,%0A @@ -1487,34 +1487,32 @@ 'partial': -p. partial,%0A @@ -1516,34 +1516,32 @@ 'name': -p. name,%0A @@ -1551,33 +1551,21 @@ group': -p. group -.full_name %0A @@ -2268,56 +2268,67 @@ for -p in Profile.objects.select_related('user').only +username, name, points, rank in Profile.objects.values_list ('us @@ -2395,15 +2395,8 @@ ers%5B -p.user. user @@ -2425,34 +2425,32 @@ 'display_name': -p. name,%0A @@ -2453,34 +2453,32 @@ 'points': -p. points,%0A @@ -2485,34 +2485,24 @@ 'rank': -p.display_ rank%0A @@ -2964,62 +2964,48 @@ rue) -.select_related('problem', 'language') %5C%0A .only +%0A data = %7B%7D%0A%0A for s in subs.values ('id @@ -3092,41 +3092,8 @@ lt') -%0A data = %7B%7D%0A%0A for s in subs :%0A @@ -3108,11 +3108,14 @@ ta%5Bs -.id +%5B'id'%5D %5D = @@ -3144,21 +3144,25 @@ ': s -. +%5B' problem -. +__ code +'%5D ,%0A @@ -3180,21 +3180,24 @@ time': s -. +%5B' time +'%5D ,%0A @@ -3213,23 +3213,26 @@ mory': s -. +%5B' memory +'%5D ,%0A @@ -3248,23 +3248,26 @@ ints': s -. +%5B' points +'%5D ,%0A @@ -3289,17 +3289,18 @@ ': s -. +%5B' language .key @@ -3295,20 +3295,23 @@ language -. +__ key +'%5D ,%0A @@ -3331,15 +3331,18 @@ ': s -. +%5B' status +'%5D ,%0A @@ -3366,15 +3366,19 @@ ': s -. +%5B' result +'%5D, %0A
db13f88055d5ea2357ecc4b996f80d3392655516
Create parse.py
parse.py
parse.py
Python
0.00002
@@ -0,0 +1,2934 @@ +__version__ = %221.0%22%0A%0Aimport os%0Afrom ciscoconfparse import CiscoConfParse%0A%0A# -----------------------------------------------%0A# Create the db dictionary to store all records%0A# -----------------------------------------------%0Adb = %7B%7D%0A# ----------------------------------------------------------------%0A# Update the dictionary below to search for new search parameters%0A# ----------------------------------------------------------------%0Adata_to_search = %7B%22NTP%22 : r%22ntp server%22,%0A %22SNMP%22 : r%22snmp server%22,%0A %22USERNAME%22 : r%22username%22,%0A %22AAA%22 : r%22aaa%22,%0A %22VERSION%22 : r%22System image file%22%7D%0A%0Aprint (%22--------------------------------------------------------------------%22)%0Aprint (%22 Searching current directory and sub-directories for .txt files....%22)%0Aprint (%22--------------------------------------------------------------------%22)%0Afor path, dirs, files in os.walk(%22.%22):%0A for f in files:%0A if f.endswith('.txt'):%0A hostname = f.replace(%22.txt%22,%22%22)%0A print (%22Reading data from: %7B%7D%22.format(os.path.join(path, f)))%0A%0A # Create an entry for the devices based on the hostname%0A db%5Bhostname%5D = %7B%7D%0A for search_parameter in data_to_search:%0A db%5Bhostname%5D%5Bsearch_parameter%5D = %5B%5D%0A%0A # Read the configuration file%0A parse = CiscoConfParse(os.path.join(path, f))%0A%0A #----------------------------------------------------------%0A # Search for all relevant items and store findings in the%0A # db dictionary so that we can use later on%0A #----------------------------------------------------------%0A for search_parameter in data_to_search:%0A for obj in parse.find_objects(data_to_search%5Bsearch_parameter%5D):%0A db%5Bhostname%5D%5Bsearch_parameter%5D.append(obj.text)%0A%0Aprint (%22-----------------------%22)%0Aprint (%22 Configuration snapshot%22)%0Aprint (%22-----------------------%22)%0A%0A# Cycle through all the devices in the database%0Afor device in sorted(db):%0A print (%22%5B%7B%7D%5D%22.format(device))%0A # Cycle through each item in data_to_search%0A for search_parameter in data_to_search:%0A # If there is a value then print it%0A if db%5Bdevice%5D%5Bsearch_parameter%5D:%0A for line in db%5Bdevice%5D%5Bsearch_parameter%5D:%0A print (%22 %7B%7D: %7B%7D%22.format(search_parameter.ljust(10),line))%0A # Otherwise print that nothing was found%0A else:%0A print (%22 %7B%7D: NOT FOUND%22.format(search_parameter.ljust(10)))%0A print (%22%22)%0A%0Aprint (%22-------------------------------%22)%0Aprint (%22 Devices with missing entries %22)%0Aprint (%22-------------------------------%22)%0A%0Afor device in sorted(db):%0A for entry in data_to_search:%0A if not db%5Bdevice%5D%5Bentry%5D:%0A print (%22%5B%7B%7D%5D has no entry defined for '%7B%7D'%22.format(device.ljust(25),entry))%0A%0A%0A%0A
bee35885bb845ea77aa4586bca33da3e54b92ed2
Add `albumtypes` plugin
beetsplug/albumtypes.py
beetsplug/albumtypes.py
Python
0.000001
@@ -0,0 +1,2108 @@ +# -*- coding: utf-8 -*-%0A%0A# This file is part of beets.%0A# Copyright 2021, Edgars Supe.%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining%0A# a copy of this software and associated documentation files (the%0A# %22Software%22), to deal in the Software without restriction, including%0A# without limitation the rights to use, copy, modify, merge, publish,%0A# distribute, sublicense, and/or sell copies of the Software, and to%0A# permit persons to whom the Software is furnished to do so, subject to%0A# the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be%0A# included in all copies or substantial portions of the Software.%0A%0A%22%22%22Adds an album template field for formatted album types.%22%22%22%0A%0Afrom __future__ import division, absolute_import, print_function%0A%0Afrom beets.autotag.mb import VARIOUS_ARTISTS_ID%0Afrom beets.library import Album%0Afrom beets.plugins import BeetsPlugin%0A%0A%0Aclass AlbumTypesPlugin(BeetsPlugin):%0A %22%22%22Adds an album template field for formatted album types.%22%22%22%0A%0A def __init__(self):%0A %22%22%22Init AlbumTypesPlugin.%22%22%22%0A super(AlbumTypesPlugin, self).__init__()%0A self.album_template_fields%5B'atypes'%5D = self._atypes%0A%0A def _atypes(self, item: Album):%0A self.config.add(%7B%0A 'types': %5B%5D,%0A 'ignore_va': %5B%5D,%0A 'brackets': '%5B%5D'%0A %7D)%0A types = self.config%5B'types'%5D.as_pairs()%0A ignore_va = self.config%5B'ignore_va'%5D.as_str_seq()%0A bracket = self.config%5B'bracket'%5D.as_str()%0A%0A # Assign a left and right bracket or leave blank if argument is empty.%0A if len(bracket) == 2:%0A bracket_l = bracket%5B0%5D%0A bracket_r = bracket%5B1%5D%0A else:%0A bracket_l = u''%0A bracket_r = u''%0A%0A res = ''%0A albumtypes = item.albumtypes.split('; ')%0A is_va = item.mb_albumartistid == VARIOUS_ARTISTS_ID%0A for type in types:%0A if type%5B0%5D in albumtypes and type%5B1%5D:%0A if not is_va or (not type%5B0%5D in ignore_va and is_va):%0A res += bracket_l + type%5B1%5D + bracket_r%0A%0A return res%0A
f859eb67fdc66b930c3664a3586c454f5c9afe87
Add files via upload
subunits/blink.py
subunits/blink.py
Python
0
@@ -0,0 +1,514 @@ +from nanpy import ArduinoApi%0D%0Afrom nanpy import SerialManager%0D%0Afrom time import sleep%0D%0A%0D%0Alink = SerialManager(device='/dev/ttyACM0')%0D%0AA = ArduinoApi(connection=link)%0D%0A%0D%0Aled = 13%0D%0A%0D%0A# SETUP:%0D%0AA.pinMode(led, A.OUTPUT)%0D%0A%0D%0A# LOOP:%0D%0Awhile True:%0D%0A A.digitalWrite(led, A.HIGH) # turn the LED on (HIGH is the voltage level)%0D%0A print %22blink on%22%0D%0A sleep(1) # use Python sleep instead of arduino delay%0D%0A A.digitalWrite(led, A.LOW) # turn the LED off by making the voltage LOW%0D%0A print %22blink off%22%0D%0A sleep(1)%0D%0A
7d258bdb68119ad54a69e92ac7c7c1c2fc51e087
Create scrap.py
scrap.py
scrap.py
Python
0.000001
@@ -0,0 +1,222 @@ +#!usr/bin/env python %0A%0Aimport requests%0Afrom bs4 import BeautifulSoup%0A%0Auri = requests.get(%22http://video9.in/english/%22)%0Asoup=BeautifulSoup(url.text)%0Afor link in soup.find_all(%22div%22,%7B%22class%22: %22updates%22%7D):%0A print link.text%0A
2aae4701fd98f560e7e112084f47f66515f6f574
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,458 @@ +from setuptools import setup, find_packages%0Aimport go_nogo_rig%0A%0A%0Asetup(%0A name='Go-NoGo',%0A version=go_nogo_rig.__version__,%0A packages=find_packages(),%0A install_requires=%5B'moa', 'pybarst', 'moadevs'%5D,%0A author='Matthew Einhorn',%0A author_email='[email protected]',%0A url='https://cpl.cornell.edu/',%0A license='MIT',%0A description='Go/NoGo experiment.',%0A entry_points=%7B'console_scripts': %5B'go_nogo=go_nogo_rig.main:run_app'%5D%7D,%0A )%0A
a903268692cef9054de9281622423234714d4596
update FIXME to reflect partial progress
jsonrpc_http/Engine.py
jsonrpc_http/Engine.py
import inspect # import tabular_predDB.cython_code.State as State import tabular_predDB.python_utils.sample_utils as su def int_generator(start=0): next_i = start while True: yield next_i next_i += 1 class Engine(object): def __init__(self, seed=0): self.seed_generator = int_generator(seed) def get_next_seed(self): return self.seed_generator.next() def initialize(self, M_c, M_r, T, initialization='from_the_prior'): # FIXME: why is M_r passed? SEED = self.get_next_seed() p_State = State.p_State(M_c, T, initialization=initialization, SEED=SEED) X_L = p_State.get_X_L() X_D = p_State.get_X_D() return M_c, M_r, X_L, X_D def analyze(self, M_c, T, X_L, X_D, kernel_list, n_steps, c, r, max_iterations, max_time): SEED = self.get_next_seed() p_State = State.p_State(M_c, T, X_L, X_D, SEED=SEED) # FIXME: actually pay attention to c, r, max_time p_State.transition(kernel_list, n_steps, c, r, max_iterations, max_time) X_L_prime = p_State.get_X_L() X_D_prime = p_State.get_X_D() return X_L_prime, X_D_prime def simple_predictive_sample(self, M_c, X_L, X_D, Y, Q, n=1): if type(X_L) == list: assert type(X_D) == list samples = su.simple_predictive_sample_multistate(M_c, X_L, X_D, Y, Q, self.get_next_seed, n) else: samples = su.simple_predictive_sample(M_c, X_L, X_D, Y, Q, self.get_next_seed, n) return samples def simple_predictive_probability(self, M_c, X_L, X_D, Y, Q, n): p = None return p def impute(self, M_c, X_L, X_D, Y, Q, n): e = su.impute(M_c, X_L, X_D, Y, Q, n, self.get_next_seed) return e def impute_and_confidence(self, M_c, X_L, X_D, Y, Q, n): if type(X_L) == list: assert type(X_D) == list # TODO: multistate impute doesn't exist yet e,confidence = su.impute_and_confidence_multistate(M_c, X_L, X_D, Y, Q, n, self.get_next_seed) else: e,confidence = su.impute_and_confidence(M_c, X_L, X_D, Y, Q, n, self.get_next_seed) return (e,confidence) def conditional_entropy(M_c, X_L, X_D, d_given, d_target, n=None, max_time=None): e = None return e def predictively_related(self, M_c, X_L, X_D, d, n=None, max_time=None): m = [] return m def contextual_structural_similarity(self, X_D, r, d): s = [] return s def structural_similarity(self, X_D, r): s = [] return s def structural_anomalousness_columns(self, X_D): a = [] return a def structural_anomalousness_rows(self, X_D): a = [] return a def predictive_anomalousness(self, M_c, X_L, X_D, T, q, n): a = [] return a # helper functions get_name = lambda x: getattr(x, '__name__') get_Engine_attr = lambda x: getattr(Engine, x) is_Engine_method_name = lambda x: inspect.ismethod(get_Engine_attr(x)) # def get_method_names(): return filter(is_Engine_method_name, dir(Engine)) # def get_method_name_to_args(): method_names = get_method_names() method_name_to_args = dict() for method_name in method_names: method = Engine.__dict__[method_name] arg_str_list = inspect.getargspec(method).args[1:] method_name_to_args[method_name] = arg_str_list return method_name_to_args
Python
0
@@ -974,20 +974,30 @@ tion to -c, r +max_iterations , max_ti
7ae1d4b99e2354f76bed894493281d4885d97f34
Add newer template rendering code
cms/test_utils/project/placeholderapp/views.py
cms/test_utils/project/placeholderapp/views.py
from django.http import HttpResponse from django.shortcuts import render from django.template import RequestContext from django.template.base import Template from django.views.generic import DetailView from cms.test_utils.project.placeholderapp.models import ( Example1, MultilingualExample1, CharPksExample) from cms.utils import get_language_from_request from cms.utils.compat import DJANGO_1_7 def example_view(request): context = {} context['examples'] = Example1.objects.all() return render(request, 'placeholderapp.html', context) def _base_detail(request, instance, template_name='detail.html', item_name="char_1", template_string='',): context = {} context['instance'] = instance context['instance_class'] = instance.__class__() context['item_name'] = item_name if hasattr(request, 'toolbar'): request.toolbar.set_object(instance) if template_string: template = Template(template_string) if DJANGO_1_7: return HttpResponse(template.render(RequestContext(request=request, dict_=context))) else: from django.template.context import make_context context = make_context(context, request) return HttpResponse(template.render(context)) else: return render(request, template_name, context) def list_view_multi(request): context = {} context['examples'] = MultilingualExample1.objects.language( get_language_from_request(request)).all() context['instance_class'] = MultilingualExample1 return render(request, 'list.html', context) def detail_view_multi(request, pk, template_name='detail_multi.html', item_name="char_1", template_string='',): instance = MultilingualExample1.objects.language( get_language_from_request(request)).get(pk=pk) return _base_detail(request, instance, template_name, item_name, template_string) def detail_view_multi_unfiltered(request, pk, template_name='detail_multi.html', item_name="char_1", template_string='',): instance = MultilingualExample1.objects.get(pk=pk) return _base_detail(request, instance, template_name, item_name, template_string) def list_view(request): context = {} context['examples'] = Example1.objects.all() context['instance_class'] = Example1 return render(request, 'list.html', context) def detail_view(request, pk, template_name='detail.html', item_name="char_1", template_string='',): if request.user.is_staff and request.toolbar: instance = Example1.objects.get(pk=pk) else: instance = Example1.objects.get(pk=pk, publish=True) return _base_detail(request, instance, template_name, item_name, template_string) def detail_view_char(request, pk, template_name='detail.html', item_name="char_1", template_string='',): instance = CharPksExample.objects.get(pk=pk) return _base_detail(request, instance, template_name, item_name, template_string) class ClassDetail(DetailView): model = Example1 template_name = "detail.html" template_string = '' def render_to_response(self, context, **response_kwargs): if self.template_string: template = Template(self.template_string) if DJANGO_1_7: return HttpResponse(template.render( RequestContext(request=self.request, dict_=context) )) else: from django.template.context import make_context context = make_context(context, self.request) return HttpResponse(template.render(context)) else: return super(ClassDetail, self).render_to_response(context, **response_kwargs) def get_context_data(self, **kwargs): context = super(ClassDetail, self).get_context_data(**kwargs) context['instance_class'] = self.model return context
Python
0
@@ -614,33 +614,16 @@ l.html', -%0A item_na @@ -626,32 +626,49 @@ m_name=%22char_1%22, +%0A template_string
aef67e19a3494880620fd87a68ff581edaa9ce81
Add unittest for madx.evaluate
test/test_madx.py
test/test_madx.py
Python
0.000001
@@ -0,0 +1,395 @@ +import unittest%0Afrom cern.madx import madx%0Afrom math import pi%0A%0Aclass TestMadX(unittest.TestCase):%0A %22%22%22Test methods of the madx class.%22%22%22%0A%0A def setUp(self):%0A self.madx = madx()%0A%0A def tearDown(self):%0A del self.madx%0A%0A def testEvaluate(self):%0A self.madx.command(%22FOO = PI*3;%22)%0A val = self.madx.evaluate(%221/FOO%22)%0A self.assertAlmostEqual(val, 1/(3*pi))%0A
0a55f6f2bf49c679a422d44007df3f66c323e719
mask unit test
test/test_mask.py
test/test_mask.py
Python
0
@@ -0,0 +1,531 @@ +import numpy as np%0Afrom minimask.mask import Mask%0Afrom minimask.spherical_poly import spherical_polygon%0A%0A%0Adef test_mask_sample():%0A %22%22%22 %22%22%22%0A vertices = %5B%5B0,0%5D,%5B10,0%5D,%5B10,10%5D,%5B0,10%5D%5D%0A S = spherical_polygon(vertices)%0A%0A M = Mask(polys=%5BS%5D, fullsky=False)%0A%0A x,y = M.sample(100)%0A%0A assert len(x) == 1000%0A assert len(y) == 1000%0A %0A assert np.abs(x.min()) %3C 1%0A assert np.abs(y.min()) %3C 1%0A assert np.abs(x.max() - 10) %3C 1%0A assert np.abs(y.max() - 10) %3C 1%0A%0A r = M.contains(x, y)%0A assert np.sum(r) == 0
25495d675c44a75d7dedfe123f30a858f9cd60be
Add minimal (no asserts) test for play plugin
test/test_play.py
test/test_play.py
Python
0.000001
@@ -0,0 +1,852 @@ +# -*- coding: utf-8 -*-%0A%0A%22%22%22Tests for the play plugin%22%22%22%0A%0Afrom __future__ import (division, absolute_import, print_function,%0A unicode_literals)%0A%0Afrom mock import patch, Mock%0A%0Afrom test._common import unittest%0Afrom test.helper import TestHelper%0A%0Afrom beetsplug.play import PlayPlugin%0A%0A%0Aclass PlayPluginTest(unittest.TestCase, TestHelper):%0A def setUp(self):%0A self.setup_beets()%0A self.load_plugins('play')%0A self.add_item(title='aNiceTitle')%0A%0A def tearDown(self):%0A self.teardown_beets()%0A self.unload_plugins()%0A%0A @patch('beetsplug.play.util.interactive_open', Mock())%0A def test_basic(self):%0A self.run_command('play', 'title:aNiceTitle')%0A%0Adef suite():%0A return unittest.TestLoader().loadTestsFromName(__name__)%0A%0Aif __name__ == b'__main__':%0A unittest.main(defaultTest='suite')%0A
b3ad7a7735d55a91682ea6798e6ebcfcf94b1969
287. Find the Duplicate Number. Brent's
p287_brent.py
p287_brent.py
Python
0.999999
@@ -0,0 +1,1130 @@ +import unittest%0A%0A%0Aclass Solution(object):%0A def findDuplicate(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A t = nums%5B0%5D%0A h = nums%5Bt%5D%0A max_loop_length = 1%0A loop_length = 1%0A%0A while t != h:%0A if loop_length == max_loop_length:%0A t = h%0A max_loop_length %3C%3C= 1%0A loop_length = 0%0A h = nums%5Bh%5D%0A loop_length += 1%0A%0A t = 0%0A h = 0%0A for i in xrange(loop_length):%0A h = nums%5Bh%5D%0A%0A while t != h:%0A t = nums%5Bt%5D%0A h = nums%5Bh%5D%0A%0A return t%0A%0A%0Aclass Test(unittest.TestCase):%0A def test(self):%0A self._test(%5B1, 2, 3, 4, 4, 5%5D, 4)%0A self._test(%5B5, 1, 3, 4, 2, 4%5D, 4)%0A self._test(%5B1, 2, 3, 4, 5, 5, 6%5D, 5)%0A self._test(%5B1, 3, 4, 5, 6, 6, 6%5D, 6)%0A self._test(%5B1, 3, 4, 5, 6, 6, 6, 7%5D, 6)%0A self._test(%5B1, 3, 4, 2, 1%5D, 1)%0A%0A def _test(self, nums, expected):%0A actual = Solution().findDuplicate(nums)%0A self.assertEqual(actual, expected)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
83d00fea8adf611984c3b56a63f080f144612c69
Create data_tool.py
data_tool.py
data_tool.py
Python
0.000001
@@ -0,0 +1,823 @@ +#!/usr/bin/python%0A# -*- coding:utf-8 -*-%0A%0Aimport pickle%0Aimport random%0A%0Adef load_data():%0A with open('dataset.pkl', 'r') as file:%0A data_set = pickle.load(file)%0A return data_set%0A%0Adef feature_format(data_set):%0A features = %5B%5D%0A labels = %5B%5D%0A for item in data_set:%0A features.append(item%5B:-1%5D)%0A labels.append(item%5B-1%5D)%0A return features, labels%0A%0Adef train_test_split(features, test_rate):%0A random.shuffle(features)%0A total_number = len(features)%0A test_number = int(round(len(features) * test_rate))%0A train_data = features%5B0:-test_number%5D%0A test_data = features%5B-test_number:total_number%5D%0A features_train, labels_train = feature_format(train_data)%0A features_test, labels_test = feature_format(test_data)%0A return features_train, labels_train, features_test, labels_test%0A
c488e446aee3d28fa84bb24d446ca22af20e461c
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,492 @@ +#!/usr/bin/env python%0Afrom setuptools import setup, find_packages%0A%0A%0Adef lt27():%0A import sys%0A v = sys.version_info%0A return (v%5B0%5D, v%5B1%5D) %3C (2, 7)%0A %0A%0Atests_require = %5B%0A 'nose%3E=1.0',%0A 'mock',%0A%5D%0A%0A%0Aif lt27():%0A tests_require.append('unittest2')%0A%0A%0Asetup(%0A name='dynsupdate',%0A description='Dynamic DNS update like nsupdate',%0A install_requires=%5B%0A 'dnspython',%0A %5D,%0A tests_require=tests_require,%0A packages=find_packages(),%0A test_suite=%22nose.collector%22%0A)%0A%0A
6d3a9f41bec03405fa648ce169b9565f937e4598
add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1065 @@ +from setuptools import setup%0A%0A%0Asetup(%0A name=%22timekeeper%22,%0A version=%220.1.0%22,%0A description=%22Send runtime measurements of your code to InfluxDB%22,%0A author=%22Torsten Rehn%22,%0A author_email=%[email protected]%22,%0A license=%22ISC%22,%0A url=%22https://github.com/trehn/timekeeper%22,%0A keywords=%5B%22profiling%22, %22profile%22, %22metrics%22, %22instrumentation%22, %22measure%22, %22influxdb%22%5D,%0A classifiers=%5B%0A %22Development Status :: 4 - Beta%22,%0A %22Intended Audience :: Developers%22,%0A %22License :: OSI Approved :: ISC License (ISCL)%22,%0A %22Operating System :: MacOS :: MacOS X%22,%0A %22Operating System :: POSIX%22,%0A %22Operating System :: Unix%22,%0A %22Programming Language :: Python :: 2.7%22,%0A %22Programming Language :: Python :: 3.2%22,%0A %22Programming Language :: Python :: 3.3%22,%0A %22Programming Language :: Python :: 3.4%22,%0A %22Topic :: Software Development :: Libraries :: Python Modules%22,%0A %22Topic :: System :: Monitoring%22,%0A %5D,%0A install_requires=%5B%0A %22influxdb %3E= 2.0.0%22,%0A %5D,%0A py_modules=%5B'timekeeper'%5D,%0A)%0A
bc9401da60e8f10827f37772af937d4fb11ca248
Add PyPI setup.py file
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,617 @@ +try:%0A from setuptools import setup%0Aexcept ImportError:%0A from distutils.core import setup%0A%0A%0Asetup(%0A name='component',%0A author='Daniel Chatfield',%0A author_email='[email protected]',%0A version='0.0.1',%0A url='http://github.com/import/component',%0A py_modules=%5B'component'%5D,%0A description='A python library that makes component(1) play nicely with python.',%0A zip_safe=False,%0A classifiers=%5B%0A 'License :: OSI Approved :: MIT License',%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 2',%0A 'Programming Language :: Python :: 3',%0A %5D,%0A)
1d7fa31d9f4ce42586fb33bea98d5af87bd95f3a
Allow setup.py install
setup.py
setup.py
Python
0
@@ -0,0 +1,361 @@ +from setuptools import setup%0A%0Asetup(name='multifil',%0A version='0.2',%0A description='A spatial half-sarcomere model and the means to run it',%0A url='https://github.com/cdw/multifil',%0A author='C David Williams',%0A author_email='[email protected]',%0A license='MIT',%0A packages=%5B'multifil'%5D,%0A install_requires=%5B'numpy', 'boto'%5D%0A )%0A
30220f57bc5052cb05ed5c7e3dc01c763152d175
Add setup for python installation
setup.py
setup.py
Python
0
@@ -0,0 +1,217 @@ +#!/usr/bin/env python%0A%0Afrom distutils.core import setup%0A%0Asetup(name='lqrrt',%0A version='1.0',%0A description='Kinodynamic RRT Implementation',%0A author='Jason Nezvadovitz',%0A packages=%5B'lqrrt'%5D,%0A )%0A
0c7ec853c97a71eacc838be925c46ac0c26d1518
Create setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,498 @@ +from distutils.core import setup%0Asetup(%0A name = 'ratio-merge',%0A packages = %5B'ratio-merge'%5D,%0A version = '0.1',%0A description = 'A small utility function for merging two lists by some ratio',%0A author = 'Adam Lev-Libfeld',%0A author_email = '[email protected]',%0A url = 'https://github.com/daTokenizer/ratio-merge-python',%0A download_url = 'https://github.com/daTokenizer/ratio-merge-python/archive/0.1.tar.gz',%0A keywords = %5B'merge', 'ratio', 'lists'%5D, # arbitrary keywords%0A classifiers = %5B%5D,%0A)%0A
a29b7195af2550e5646f3aac581cbaf47244e8f4
Create setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1063 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# setup.py%0A%22%22%22%0ASetup files%0A%0ACopyright (c) 2020, David Hoffman%0A%22%22%22%0A%0Aimport setuptools%0A%0A# read in long description%0Awith open(%22README.md%22, %22r%22) as fh:%0A long_description = fh.read()%0A%0A# get requirements%0Awith open(%22requirements.txt%22, %22r%22) as fh:%0A requirements = %5Bline.strip() for line in fh%5D%0A%0Asetuptools.setup(%0A name=%22py-otf%22,%0A version=%220.0.1%22,%0A author=%22David Hoffman%22,%0A author_email=%[email protected]%22,%0A description=%22A python library for simulating and analyzing microscope point spread functions (PSFs)%22,%0A long_description=long_description,%0A long_description_content_type=%22text/markdown%22,%0A packages=setuptools.find_packages(),%0A classifiers=%5B%0A %22Development Status :: Alpha%22,%0A %22Programming Language :: Python :: 3%22,%0A %22License :: OSI Approved :: Apache License%22,%0A %22Natural Language :: English%22,%0A %22Operating System :: OS Independent%22,%0A %22Topic :: Scientific/Engineering%22,%0A %5D,%0A python_requires=%22%3E=3%22,%0A install_requires=requirements,%0A)%0A
45d734cb495e7f61c5cbbac2958e220868033a9d
Add setup.py for RTD
setup.py
setup.py
Python
0
@@ -0,0 +1,683 @@ +from distutils.core import setup%0A%0Asetup(%0A name='mayatools',%0A version='0.1-dev',%0A description='Collection of general tools and utilities for working in and with Maya.',%0A url='https://github.com/westernx/mayatools',%0A %0A packages=%5B'mayatools'%5D,%0A %0A author='Mike Boers',%0A author_email='[email protected]',%0A license='BSD-3',%0A %0A classifiers=%5B%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: BSD License',%0A 'Natural Language :: English',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python :: 2',%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0A %5D,%0A)%0A
1ac147a2a9f627cccd917006f61cdda7b25ccc06
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,531 @@ +from distutils.core import setup%0A%0Asetup(%0A name='applied-sims',%0A version='0.1',%0A classifiers=%5B%0A 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',%0A 'Programming Language :: Python :: 3',%0A 'Topic :: Scientific/Engineering :: Physics',%0A 'Intended Audience :: Other Audience',%0A %5D,%0A packages=%5B'polymer_states'%5D,%0A url='http://github.com/szabba/applied-sims',%0A license='MPL-2.0',%0A author='Karol Marcjan',%0A author_email='[email protected]',%0A description=''%0A)%0A
e6e96d9fa725ec28028b090c900086474e69cdb8
Add basic setup.py
setup.py
setup.py
Python
0.000002
@@ -0,0 +1,328 @@ +%0Afrom distutils.core import setup%0A%0Asetup(%0A name='litemap',%0A version='1.0a',%0A description='Mapping class which stores in SQLite database.',%0A url='http://github.com/mikeboers/LiteMap',%0A py_modules=%5B'litemap'%5D,%0A %0A author='Mike Boers',%0A author_email='[email protected]',%0A license='New BSD License',%0A)%0A
479ff810c07ebe5c309bb4c9f712e689e831945e
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1233 @@ +import os%0Afrom setuptools import setup%0A%0Athis_dir = os.path.dirname(__file__)%0Along_description = %22%5Cn%22 + open(os.path.join(this_dir, 'README.rst')).read()%0A%0Asetup(%0A name='ansible_role_apply',%0A version='0.0.0',%0A description='Apply a single Ansible role to host(s) easily',%0A long_description=long_description,%0A keywords='ansible',%0A author='Marc Abramowitz',%0A author_email='[email protected]',%0A url='https://github.com/msabramo/ansible-role-apply',%0A py_modules=%5B'ansible-role-apply'%5D,%0A zip_safe=False,%0A install_requires=%5B%0A 'ansible',%0A 'click',%0A %5D,%0A entry_points=%22%22%22%5C%0A %5Bconsole_scripts%5D%0A ansible-role-apply = ansible_role_apply:ansible_role_apply%0A %22%22%22,%0A license='MIT',%0A classifiers=%5B%0A 'Programming Language :: Python :: 2',%0A 'Programming Language :: Python :: 2.6',%0A 'Programming Language :: Python :: 2.7',%0A 'Programming Language :: Python :: 3',%0A 'Programming Language :: Python :: 3.2',%0A 'Programming Language :: Python :: 3.3',%0A 'Programming Language :: Python :: 3.4',%0A 'Topic :: Software Development :: Testing',%0A 'Natural Language :: English',%0A 'Intended Audience :: Developers',%0A %5D,%0A)%0A
9d12617170982fc1b6b01d109d986f5cd45e0552
Update setup.py.
setup.py
setup.py
from setuptools import setup,find_packages setup ( name = 'pymatgen', version = '1.0.1', packages = find_packages(), # Declare your packages' dependencies here, for eg: install_requires = ['numpy','matplotlib','pymongo','PyCIFRW','psycopg2'], author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Dan Gunter', author_email = '[email protected], [email protected], [email protected], [email protected]', summary = 'The Materials Project Python Library', url = 'www.materialsproject.org', license = '', long_description= 'pymatgen is a Python library for the Materials Project. It includes core structure definition and utilities, electronic structure objects, database access APIs, and convenient IO from VASP and CIF files.', # could also include long_description, download_url, classifiers, etc. )
Python
0
@@ -204,16 +204,24 @@ numpy',' +scipy',' matplotl @@ -229,42 +229,20 @@ b',' -pymongo','PyCIFRW','psycopg2 +PyCIFRW '%5D,%0A%0A -%0A au @@ -665,30 +665,8 @@ cts, - database access APIs, and
4a7234d4592166a1a13bc6b8e8b3b201019df23b
Create prims_minimum_spanning.py
algorithms/graph/prims_minimum_spanning.py
algorithms/graph/prims_minimum_spanning.py
Python
0.000041
@@ -0,0 +1,738 @@ +import heapq # for priority queue%0A%0A# input number of nodes and edges in graph%0An, e = map (int,input().split())%0A%0A# initializing empty graph as a dictionary (of the form %7Bint:list%7D)%0Ag = dict (zip (%5Bi for i in range(1,n+1)%5D,%5B%5B%5D for i in range(n)%5D))%0A%0A# input graph data%0Afor i in range(e):%0A a, b, c = map (int,input().split())%0A g%5Ba%5D.append(%5Bc,b%5D)%0A g%5Bb%5D.append(%5Bc,a%5D)%0A %0Avis = %5B%5D%0As = %5B%5B0,1%5D%5D%0Aprim = %5B%5D%0Amincost = 0%0A%0A# prim's algo. to find weight of minimum spanning tree%0Awhile (len(s)%3E0):%0A v = heapq.heappop(s)%0A x = v%5B1%5D%0A if (x in vis):%0A continue%0A%0A mincost += v%5B0%5D%0A prim.append(x)%0A vis.append(x)%0A%0A for j in g%5Bx%5D:%0A i = j%5B-1%5D%0A if(i not in vis):%0A heapq.heappush(s,j)%0A%0Aprint(mincost)%0A
a879ac0db99ba45ff3b6d453795a85635243fa11
version change
setup.py
setup.py
from distutils.core import setup setup(name='jieba', version='0.22', description='Chinese Words Segementation Utilities', author='Sun, Junyi', author_email='[email protected]', url='http://github.com/fxsjy', packages=['jieba'], package_dir={'jieba':'jieba'}, package_data={'jieba':['*.*','finalseg/*','analyse/*','posseg/*']} )
Python
0.000001
@@ -68,17 +68,17 @@ ion='0.2 -2 +3 ', %0A
b12072d9245aaec0d242ee1d78118e9ad8d0e93a
Build documentation before install
setup.py
setup.py
#!/usr/bin/python # # This file is part of Plinth. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ Plinth setup file """ from distutils import log from distutils.command.install_data import install_data import glob import os from setuptools import find_packages, setup import shutil from plinth import __version__ DIRECTORIES_TO_CREATE = [ '/var/lib/plinth', '/var/lib/plinth/sessions', '/var/log/plinth', ] DIRECTORIES_TO_COPY = [ ('/usr/share/plinth/static', 'static'), ('/usr/share/doc/plinth/', 'doc'), ] class CustomInstallData(install_data): """Override install command to allow directory creation""" def run(self): """Execute install command""" install_data.run(self) # Old style base class # Create empty directories for directory in DIRECTORIES_TO_CREATE: if not os.path.exists(directory): log.info("creating directory '%s'", directory) os.makedirs(directory) # Recursively copy directories for target, source in DIRECTORIES_TO_COPY: if not os.path.exists(target): log.info("recursive copy '%s' to '%s'", source, target) shutil.copytree(source, target) setup( name='Plinth', version=__version__, description='A web front end for administering FreedomBox', url='http://freedomboxfoundation.org', packages=find_packages(), scripts=['bin/plinth'], test_suite='plinth.tests.TEST_SUITE', license='COPYING', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: End Users/Desktop', 'License :: DFSG approved', 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Natural Language :: English', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Unix Shell', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'Topic :: System :: Systems Administration', ], install_requires=[ 'cherrypy >= 3.0', 'django >= 1.6.0', 'django-bootstrap-form', 'withsqlite', ], package_data={'plinth': ['modules/enabled/*', 'templates/*', 'modules/*/templates/*']}, data_files=[('/etc/init.d', ['data/etc/init.d/plinth']), ('/usr/lib/freedombox/setup.d/', ['data/usr/lib/freedombox/setup.d/86_plinth']), ('/usr/lib/freedombox/first-run.d', ['data/usr/lib/freedombox/first-run.d/90_firewall']), ('/etc/apache2/sites-available', ['data/etc/apache2/sites-available/plinth.conf', 'data/etc/apache2/sites-available/plinth-ssl.conf']), ('/etc/sudoers.d', ['data/etc/sudoers.d/plinth']), ('/usr/share/plinth/actions', glob.glob(os.path.join('actions', '*'))), ('/usr/share/man/man1', ['doc/plinth.1']), ('/etc/plinth', ['data/etc/plinth/plinth.config'])], cmdclass={'install_data': CustomInstallData}, )
Python
0
@@ -892,16 +892,34 @@ t shutil +%0Aimport subprocess %0A%0Afrom p @@ -1162,17 +1162,16 @@ c'),%0A%5D%0A%0A -%0A class Cu @@ -1260,16 +1260,25 @@ creation + and copy %22%22%22%0A @@ -1326,24 +1326,78 @@ command%22%22%22%0A + subprocess.check_call(%5B'make', '-C', 'doc'%5D)%0A%0A inst
24f6cbdcf2f4261a651d058934c65c3696988586
add setup.py to document deps
setup.py
setup.py
Python
0
@@ -0,0 +1,314 @@ +from setuptools import setup%0A%0Asetup(%0A name='gentle',%0A version='0.1',%0A description='Robust yet lenient forced-aligner built on Kaldi.',%0A url='http://lowerquality.com/gentle',%0A author='Robert M Ochshorn',%0A license='MIT',%0A packages=%5B'gentle'%5D,%0A install_requires=%5B'twisted'%5D,%0A)%0A
7f90879651bb3d0403db3a503ca53041aab1fefb
version bump
setup.py
setup.py
from setuptools import setup, find_packages setup( name='sendgrid', version='0.2.3', author='Yamil Asusta', author_email='[email protected]', url='https://github.com/sendgrid/sendgrid-python/', packages=find_packages(), license='MIT', description='SendGrid library for Python', long_description=open('./README.rst').read(), install_requires=[ 'requests', 'smtpapi' ], )
Python
0.000001
@@ -91,9 +91,9 @@ 0.2. -3 +4 ',%0D%0A
654bd46a8226ea97000a1263132a37f7bf130718
ADD setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,435 @@ +#!/usr/bin/env python%0A%0Afrom distutils.core import setup%0A%0Asetup(name='kernel_regression',%0A version='1.0',%0A description='Implementation of Nadaraya-Watson kernel regression with automatic bandwidth selection compatible with sklearn.',%0A author='Jan Hendrik Metzen',%0A author_email='[email protected]',%0A url='https://github.com/jmetzen/kernel_regression',%0A py_modules = %5B'kernel_regression'%5D%0A )%0A
1707306cdee6442e78fe9eaee1d472a0248f75d5
make license consistent
setup.py
setup.py
# -*- coding: utf-8 -*- """ argcomplete ~~~~ Argcomplete provides easy and extensible automatic tab completion of arguments and options for your Python script. It makes two assumptions: - You're using bash as your shell - You're using argparse to manage your command line options See AUTODOCS_LINK for more info. """ from setuptools import setup, find_packages setup( name='argcomplete', version='0.1.0', url='https://github.com/kislyuk/argcomplete', license='BSD', author='Andrey Kislyuk', author_email='[email protected]', description='Bash tab completion for argparse', long_description=__doc__, packages = find_packages(), scripts = ['scripts/register-python-argcomplete'], zip_safe=False, include_package_data=True, platforms=['MacOS X', 'Posix'], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Python
0.000004
@@ -480,11 +480,11 @@ se=' -BSD +GPL ',%0A
1f1096046e11067c4d42235d3b1aadbfec869bff
Remove setuptools from install_requires
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages from os import path import codecs import os import re import sys def read(*parts): file_path = path.join(path.dirname(__file__), *parts) return codecs.open(file_path, encoding='utf-8').read() def find_version(*parts): version_file = read(*parts) version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return str(version_match.group(1)) raise RuntimeError("Unable to find version string.") setup( name='django-polymorphic', version=find_version('polymorphic', '__init__.py'), license='BSD', description='Seamless Polymorphic Inheritance for Django Models', long_description=read('README.rst'), url='https://github.com/django-polymorphic/django-polymorphic', author='Bert Constantin', author_email='[email protected]', maintainer='Christopher Glass', maintainer_email='[email protected]', packages=find_packages(), package_data={ 'polymorphic': [ 'templates/admin/polymorphic/*.html', ], }, install_requires=['setuptools'], test_suite='runtests', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Framework :: Django', 'Framework :: Django :: 1.4', 'Framework :: Django :: 1.5', 'Framework :: Django :: 1.6', 'Framework :: Django :: 1.7', 'Framework :: Django :: 1.8', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
Python
0
@@ -1128,45 +1128,8 @@ %7D,%0A%0A - install_requires=%5B'setuptools'%5D,%0A
260911a0a46601092aa75882c806ca921a0cbf6d
Add setup.py file so we can install
setup.py
setup.py
Python
0
@@ -0,0 +1,1540 @@ +from __future__ import with_statement%0Aimport sys%0A%0Afrom setuptools import setup, find_packages%0Afrom setuptools.command.test import test as TestCommand%0A%0Aversion = %220.0.1-dev%22%0A%0A%0Adef readme():%0A with open('README.md') as f:%0A return f.read()%0A%0Areqs = %5Bline.strip() for line in open('requirements.txt')%5D%0A%0A%0Aclass PyTest(TestCommand):%0A def finalize_options(self):%0A TestCommand.finalize_options(self)%0A self.test_args = %5B%5D%0A self.test_suite = True%0A%0A def run_tests(self):%0A import pytest%0A errno = pytest.main(self.test_args)%0A sys.exit(errno)%0A%0Asetup(%0A name = %22pyaxiom%22,%0A version = version,%0A description = %22A library to manage various Axiom assets using Python%22,%0A long_description = readme(),%0A license = 'LGPLv3',%0A author = %22Kyle Wilcox%22,%0A author_email = %[email protected]%22,%0A url = %22https://git.axiom/axiom/pyncml%22,%0A packages = find_packages(),%0A install_requires = reqs,%0A tests_require = %5B'pytest'%5D,%0A cmdclass = %7B'test': PyTest%7D,%0A classifiers = %5B%0A 'Development Status :: 3 - Alpha',%0A 'Intended Audience :: Developers',%0A 'Intended Audience :: Science/Research',%0A 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',%0A 'Operating System :: POSIX :: Linux',%0A 'Programming Language :: Python',%0A 'Topic :: Scientific/Engineering',%0A %5D,%0A include_package_data = True,%0A)%0A
b1d87a8f96fb6a019bc7ebab71fe8e0c5921d80f
Include setup.py
setup.py
setup.py
Python
0
@@ -0,0 +1,474 @@ +from setuptools import find_packages%0Afrom setuptools import setup%0A%0AREQUIRED_PACKAGES = %5B'distance', 'tensorflow', 'numpy', 'six'%5D%0A%0Asetup(%0A name='attentionocr',%0A url='https://github.com/emedvedev/attention-ocr',%0A author_name='Ed Medvedev',%0A version='0.1',%0A install_requires=REQUIRED_PACKAGES,%0A packages=find_packages(),%0A include_package_data=True,%0A description='''Optical character recognition model%0A for Tensorflow based on Visual Attention.'''%0A)%0A
6ded510fa9c694e8a836302131157604859d40b1
add setup settings
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,355 @@ +from setuptools import setup%0A%0Asetup(name='uc-numero-alumno',%0A version='0.1.0',%0A description='Valida un n%C3%BAmero de alumno de la UC ',%0A url='https://github.com/mrpatiwi/uc-numero-alumno-python',%0A author='Patricio L%C3%B3pez',%0A author_email='[email protected]',%0A license='MIT',%0A packages=%5B'ucnumber'%5D,%0A zip_safe=False)%0A
414c5d0f9e7e92772cf65be976791889e96e2799
Package with setuptools
setup.py
setup.py
Python
0
@@ -0,0 +1,963 @@ +#!/usr/bin/env python%0A%0Afrom setuptools import setup, find_packages%0A%0Aclassifiers = %5B%0A 'Development Status :: 5 - Production/Stable',%0A 'Framework :: Twisted',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: MIT License',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 2.7',%0A 'Natural Language :: English',%0A 'Topic :: Software Development :: Libraries :: Python Modules'%0A%5D%0A%0Asetup(%0A name='npyscreenreactor',%0A version='1.1',%0A license='MIT',%0A classifiers=classifiers,%0A author='Mark Tearle',%0A author_email='[email protected]',%0A description = 'Twisted reactor for npyscreen',%0A long_description = 'npyscreenreactor is a Twisted reactor for the npyscreen curses library',%0A url='https://github.com/mtearle/npyscreenreactor',%0A packages=find_packages(),%0A keywords=%5B'npyscreen', 'twisted'%5D,%0A install_requires=%5B'twisted', 'npyscreen'%5D%0A)%0A
8aada38d951d039e11e03a6bae9445c784bb4cce
Write a brief demo using nltk
parse-demo.py
parse-demo.py
Python
0.000001
@@ -0,0 +1,720 @@ +#!/usr/bin/python3%0A%0Aimport sys, os%0Aimport nltk%0A%0Aif len(sys.argv) %3C 2:%0A print(%22Please supply a filename.%22)%0A sys.exit(1)%0A%0Afilename = sys.argv%5B1%5D%0A%0Awith open(filename, 'r') as f:%0A data = f.read()%0A%0A# Break the input down into sentences, then into words, and position tag%0A# those words.%0Asentences = %5Bnltk.pos_tag(nltk.word_tokenize(sentence)) %5C%0A for sentence in nltk.sent_tokenize(data)%5D%0A%0A# Define a grammar, and identify the noun phrases in the sentences.%0Achunk_parser = nltk.RegexpParser(r%22NP: %7B%3CDT%3E?%3CJJ%3E*%3CNN%3E%7D%22)%0A%0Atrees = %5Bchunk_parser.parse(sentence) for sentence in sentences%5D%0A%0Afor tree in trees:%0A print(tree)%0A #for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):%0A #print(subtree)%0A
89fa937d218bef113d2bcc681cb4dbd547940c45
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,549 @@ +from distutils.core import setup%0Asetup(%0A name = 'koofr',%0A packages = %5B'koofr'%5D, # this must be the same as the name above%0A install_requires=%5B'requests'%5D,%0A version = '0.1',%0A description = 'Python SDK for Koofr',%0A author = 'Andraz Vrhovec',%0A author_email = '[email protected]',%0A url = 'https://github.com/koofr/python-koofr', # use the URL to the github repo%0A download_url = 'https://github.com/koofr/python-koofr/tarball/0.1', # I'll explain this in a second%0A keywords = %5B'api', 'koofr', 'cloud'%5D, # arbitrary keywords%0A classifiers = %5B%5D,%0A)%0A
8ecfe73916fbca42b9a1b47fb2758bb561b76eec
Remove print.
setup.py
setup.py
import os from setuptools import setup, find_packages README = os.path.join(os.path.dirname(__file__), 'README.md') long_description = open(README).read() + '\n\n' print find_packages() setup ( name = 'pymatgen', version = '1.2.4', packages = find_packages(), install_requires = ['numpy', 'scipy', 'matplotlib', 'PyCIFRW'], package_data = {'pymatgen.core': ['*.json'], 'pymatgen.io': ['*.cfg']}, author = 'Shyue Ping Ong, Anubhav Jain, Michael Kocher, Geoffroy Hautier, Will Richards, Dan Gunter, Vincent L Chevrier, Rickard Armiento', author_email = '[email protected], [email protected], [email protected], [email protected], [email protected], [email protected], [email protected], [email protected]', maintainer = 'Shyue Ping Ong', url = 'https://github.com/CederGroupMIT/pymatgen_repo/', license = 'MIT', description = "pymatgen is the Python library powering the Materials Project (www.materialsproject.org).", long_description = long_description, keywords = ["vasp", "materials", "project", "electronic", "structure"], classifiers = [ "Programming Language :: Python :: 2.7", "Development Status :: 4 - Beta", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Scientific/Engineering :: Physics", "Topic :: Scientific/Engineering :: Chemistry", "Topic :: Software Development :: Libraries :: Python Modules", ], download_url = "https://github.com/CederGroupMIT/pymatgen_repo/tarball/master", test_suite = 'nose.collector', test_requires = ['nose'] )
Python
0.000001
@@ -164,31 +164,8 @@ n'%0A%0A -print find_packages()%0A%0A setu
004f720bd064c12f63aaf1dca44ee2da365a7365
version change
setup.py
setup.py
from distutils.core import setup setup(name='jieba', version='0.11', description='Chinese Words Segementation Utilities', author='Sun, Junyi', author_email='[email protected]', url='http://github.com/fxsjy', packages=['jieba'], package_dir={'jieba':'jieba'}, package_data={'jieba':['*.*','finalseg/*']} )
Python
0.000001
@@ -68,17 +68,17 @@ ion='0.1 -1 +4 ', %0A
fe8cc65832b389314ee6e83c76371809e40cc5d1
Bump to 0.1.1
setup.py
setup.py
from ez_setup import use_setuptools use_setuptools() from setuptools import setup setup( name='Kivy Garden', version='0.1', license='MIT', packages=['garden'], scripts=['bin/garden', 'bin/garden.bat'], install_requires=['requests'], )
Python
0.000683
@@ -132,16 +132,18 @@ ion='0.1 +.1 ',%0A
9a0d2a8d207d9f8a105795eb97bdeaac0c30ddec
add ping_interval property
aiohttp_sse/__init__.py
aiohttp_sse/__init__.py
import asyncio from aiohttp import hdrs from aiohttp.protocol import Response as ResponseImpl from aiohttp.web import StreamResponse from aiohttp.web import HTTPMethodNotAllowed __version__ = '0.0.1' __all__ = ['EventSourceResponse'] class EventSourceResponse(StreamResponse): PING_TIME = 15 def __init__(self, *, status=200, reason=None, headers=None): super().__init__(status=status, reason=reason) if headers is not None: self.headers.extend(headers) self.headers['Content-Type'] = 'text/event-stream' self.headers['Cache-Control'] = 'no-cache' self.headers['Connection'] = 'keep-alive' self._loop = None self._finish_fut = None self._ping_task = None def send(self, data, id=None, event=None, retry=None): if id is not None: self.write('id: {0}\n'.format(id).encode('utf-8')) if event is not None: self.write('event: {0}\n'.format(event).encode('utf-8')) for chunk in data.split('\n'): self.write('data: {0}\n'.format(chunk).encode('utf-8')) if retry is not None: self.write('retry: {0}\n'.format(retry).encode('utf-8')) self.write(b'\n') def start(self, request): if request.method != 'GET': raise HTTPMethodNotAllowed() self._loop = request.app.loop self._finish_fut = asyncio.Future(loop=self._loop) self._finish_fut.add_done_callback(self._cancel_ping) resp_impl = self._start_pre_check(request) if resp_impl is not None: return resp_impl self._req = request self._keep_alive = True resp_impl = self._resp_impl = ResponseImpl( request._writer, self._status, request.version, not self._keep_alive, self._reason) self._copy_cookies() if self._compression: if (self._compression_force or 'deflate' in request.headers.get( hdrs.ACCEPT_ENCODING, '')): resp_impl.add_compression_filter() if self._chunked: resp_impl.enable_chunked_encoding() if self._chunk_size: resp_impl.add_chunking_filter(self._chunk_size) headers = self.headers.items() for key, val in headers: resp_impl.add_header(key, val) resp_impl.send_headers() self._ping_task = asyncio.Task(self._ping(), loop=self._loop) return resp_impl def _cancel_ping(self, fut): self._ping_task.cancel() def wait(self): if not self._finish_fut: raise RuntimeError('Response is not started') return self._finish_fut def stop_streaming(self): if not self._finish_fut: raise RuntimeError('Response is not started') self._finish_fut.set_result(None) @asyncio.coroutine def _ping(self): while True: yield from asyncio.sleep(self.PING_TIME, loop=self._loop) if self._finish_fut.done(): break self.write(b':ping\n\n')
Python
0.000001
@@ -278,25 +278,37 @@ ):%0A%0A -PING_TIME +DEFAULT_PING_INTERVAL = 15%0A%0A @@ -713,32 +713,89 @@ nish_fut = None%0A + self._ping_interval = self.DEFAULT_PING_INTERVAL%0A self._pi @@ -1401,16 +1401,39 @@ Allowed( +request.method, %5B'GET'%5D )%0A%0A @@ -2634,24 +2634,391 @@ resp_impl%0A%0A + @property%0A def ping_interval(self):%0A return self._ping_interval%0A%0A @ping_interval.setter%0A def ping_interval(self, value):%0A%0A if not isinstance(value, int):%0A raise TypeError(%22ping interval must be int%22)%0A if value %3C 0:%0A raise ValueError(%22ping interval must be greater then 0%22)%0A%0A self._ping_interval = value%0A%0A def _can @@ -3490,17 +3490,22 @@ elf. -PING_TIME +_ping_interval , lo
b35affdf2183fa81e628f03a904ce80beb165de2
Fix quote output
bertil.py
bertil.py
# -*- coding: utf-8 -*- import sys import datetime import time import urllib import json import socket import re import random from slackbot.bot import Bot, listen_to, respond_to from tinydb import TinyDB db = TinyDB('/home/simon/bertil/quotes.json') def get_food(day): # Get JSON URL = 'http://www.hanssonohammar.se/veckansmeny.json' response = urllib.urlopen(URL) data = json.loads(response.read().decode('utf-8')) if day not in data: return "(no mat " + str(day) + ")" mat_today = data[day][0] if 'IKSU' not in mat_today: return "(no IKSU today)" return "\n".join(mat_today['IKSU']) @listen_to(r'^compile (.*)$') def compile(message, code): message.reply(u"Jag klarar inte av sånt längre :'(") @listen_to(r'^run (.*)$') def run(message, code): message.reply(u"Jag klarar inte av sånt längre :'(") @listen_to(r'^mat(\+*)$') def mat(message, plus): date = datetime.date.fromtimestamp(time.time() + (86400 * len(plus))) try: message.reply(u"```IKSU - {}\n{}```".format(str(date), get_food(str(date)))) except Exception as e: message.reply(u"Kom inte åt maten 😞 ({what})".format(what=e.message)) @listen_to(ur'^[e\u00E4\u00C4]r.*fredag.*\?', re.IGNORECASE) def fredag(message): if datetime.datetime.today().weekday() == 4: message.reply(u"Japp, idag är det fredag! :kreygasm:") else: message.reply(u"Nej, idag är det INTE fredag! :qq::gun:") @listen_to(r'^temp(\+*)$') def temp(message, plus): if len(plus) > 0: message.reply(u"Jag kan inte se in i framtiden... :qq::gun:") else: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(('temp.acc.umu.se', 2345)) tmp = s.recv(1024) s.close() time, temp = tmp[:len(tmp) - 1].split('=') message.reply(u"{} C klockan {}".format(temp, time)) @listen_to(r'^quote add (.*)$') def quote_add(message, quote): db.insert({'quote': quote}) message.reply(u"Quote inlagd!") @listen_to(r'^quote$') def quote(message): quotes = db.all() if len(quotes) == 0: message.reply(u"Inga quotes inlagda...") else: quote = random.choice(quotes) message.reply(u">{}".format(quote['quote'])) def main(): bot = Bot() bot.run() if __name__ == '__main__': main()
Python
0.999424
@@ -2225,17 +2225,16 @@ reply(u%22 -%3E %7B%7D%22.form
ce7914dd35e66820248cb82760b50a31bc8a625b
Add setup.py script to install whip-neustar cli script
setup.py
setup.py
Python
0
@@ -0,0 +1,231 @@ +from setuptools import setup%0A%0Asetup(%0A name='whip-neustar',%0A version='0.1',%0A packages=%5B'whip_neustar'%5D,%0A entry_points=%7B%0A 'console_scripts': %5B%0A 'whip-neustar = whip_neustar.cli:main',%0A %5D,%0A %7D%0A)%0A
a72bc73aab4b696113bee16f5f7f9da1540bc02f
Create playerlist.py
playerlist.py
playerlist.py
Python
0.000001
@@ -0,0 +1,937 @@ +import config%0Aclass players:%0A def __init__(self):%0A self.path=config.install_path+%22reg%5C%5CN_NOW_RUNNING%5C%5CPLAYERS%5C%5CLIST.nreg%22%0A def get_names_str(self,level):%0A a=open(self.path,%22r%22)%0A b=a.readlines()%0A string=%22%22%0A for i in b:%0A string=string+i%0A a.close()%0A return string%0A def get_names_list(self, level):%0A a=open(self.path,%22r%22)%0A b=a.readlines()%0A string=%5B%5D%0A for i in b:%0A string.append(i)%0A return string%0A def add(self, name, uuid, level, entity_id):%0A a=open(self.path,%22a%22)%0A a.write(name+%22;%22+uuid+%22;%22+entity_id+%22;%22+level)%0A a.close()%0A def remove(self, name, uuid, level, entity_id):%0A a=open(self.path, %22r%22)%0A b=a.readlines()%0A b.remove(name+%22;%22+uuid+%22;%22+str(entity_id)+%22;%22+str(level))%0A a=open(self.path,%22w%22)%0A for i in b:%0A a.write(i)%0A a.close()%0A del b%0A
7fa6d8beb2637bed6b31cf1cea5fdafffc6049bf
add tests
tests/test_dfg.py
tests/test_dfg.py
Python
0.000001
@@ -0,0 +1,2839 @@ +#!/usr/bin/env python%0A%0Aimport logging%0Aimport time%0Aimport sys%0A%0Afrom os.path import join, dirname, realpath%0A%0Al = logging.getLogger(%22angr.tests.test_dfg%22)%0Al.setLevel(logging.DEBUG)%0A%0Aimport nose%0Aimport angr%0Aimport pyvex%0A%0Atest_location = str(join(dirname(realpath(__file__)), %22../../binaries/tests%22))%0A%0Adef perform_one(binary_path):%0A proj = angr.Project(join(test_location, binary_path),%0A load_options=%7B'auto_load_libs': False%7D,%0A )%0A start = time.time()%0A cfg = proj.analyses.CFG(context_sensitivity_level=2)%0A end = time.time()%0A duration = end - start%0A l.info(%22CFG generated in %25f seconds.%22, duration)%0A%0A dfg = proj.analyses.DFG(cfg=cfg)%0A nose.tools.assert_true(len(dfg.dfgs) %3C= len(cfg.nodes()))%0A for addr, d in dfg.dfgs.items():%0A nose.tools.assert_true(cfg.get_any_node(addr) is not None)%0A # We check there is not node that we ignored%0A for n in d.nodes():%0A nose.tools.assert_not_equal(n.tag, 'Ist_IMark')%0A nose.tools.assert_not_equal(n.tag, 'Ist_AbiHint')%0A nose.tools.assert_not_equal(n.tag, 'Ist_Exit')%0A if n.tag == 'Ist_Put':%0A nose.tools.assert_not_equal(n.offset, n.arch.ip_offset)%0A%0A for (a, b) in d.edges():%0A if isinstance(a, pyvex.IRExpr.IRExpr):%0A # We check that there is no edge between two expressions/const%0A nose.tools.assert_false(isinstance(b, pyvex.IRExpr.IRExpr))%0A%0A # If there is an edge coming from an expr/const it should be in%0A # the dependencies of the other node%0A # FIXME%0A # Impossible to check because of the Unop optimization in the%0A # DFG...%0A # nose.tools.assert_true(a in b.expressions)%0A elif hasattr(a, 'tmp'):%0A # If there is an edge between a tmp and another node%0A # be sure that this tmp is in the dependencies of this node%0A tmps = %5B %5D%0A for e in b.expressions:%0A if hasattr(e, 'tmp'):%0A tmps.append(e.tmp)%0A%0A nose.tools.assert_true(a.tmp in tmps)%0A%0A%0Adef test_dfg_isalnum():%0A perform_one(%22i386/isalnum%22)%0A%0A%0Adef test_dfg_counter():%0A perform_one(%22i386/counter%22)%0A%0A%0Adef test_dfg_cfg_0():%0A perform_one(%22x86_64/cfg_0%22)%0A%0A%0Adef test_dfg_fauxware():%0A perform_one(%22mips/fauxware%22)%0A%0A%0Adef run_all():%0A functions = globals()%0A all_functions = dict(filter((lambda (k, v): k.startswith('test_') and hasattr(v, '__call__')), functions.items()))%0A for f in sorted(all_functions.keys()):%0A all_functions%5Bf%5D()%0A%0A%0Aif __name__ == %22__main__%22:%0A logging.getLogger(%22angr.analyses.dfg%22).setLevel(logging.DEBUG)%0A%0A if len(sys.argv) %3E 1:%0A globals()%5B'test_' + sys.argv%5B1%5D%5D()%0A else:%0A run_all()%0A
504612eb0c3c6ec210dd6e555941c13523333f12
install without cython
setup.py
setup.py
from setuptools import setup, Extension from Cython.Build import cythonize from glob import glob library = ('primesieve', dict( sources=glob("lib/primesieve/src/primesieve/*.cpp"), include_dirs=["lib/primesieve/include"], language="c++", )) extension = Extension( "primesieve", ["primesieve/primesieve.pyx"], include_dirs = ["lib/primesieve/include"], language="c++", ) setup( name='primesieve', url = "https://github.com/hickford/primesieve-python", license = "MIT", libraries = [library], ext_modules = cythonize(extension), )
Python
0
@@ -37,43 +37,8 @@ ion%0A -from Cython.Build import cythonize%0A from @@ -217,16 +217,102 @@ ))%0A%0A +try:%0A from Cython.Build import cythonize%0Aexcept ImportError:%0A cythonize = None%0A%0A extensio @@ -385,16 +385,64 @@ ve.pyx%22%5D + if cythonize else %5B%22primesieve/primesieve.cpp%22%5D ,%0A @@ -521,16 +521,68 @@ )%0A%0A +if cythonize:%0A extension = cythonize(extension)%0A%0A setup(%0A @@ -728,26 +728,17 @@ dules = -cythonize( +%5B extensio @@ -738,13 +738,13 @@ xtension -) +%5D ,%0A)%0A
42ca323888dc13246fa7f6a01a6e29efcdb2d5c5
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1293 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport os%0Afrom setuptools import setup%0A%0Aimport molvs%0A%0A%0Aif os.path.exists('README.rst'):%0A long_description = open('README.rst').read()%0Aelse:%0A long_description = ''''''%0A%0Asetup(%0A name='MolVS',%0A version=molvs.__version__,%0A author=molvs.__author__,%0A author_email=molvs.__email__,%0A license=molvs.__license__,%0A url='https://github.com/mcs07/MolVS',%0A packages=%5B'molvs'%5D,%0A description='',%0A long_description=long_description,%0A keywords='chemistry cheminformatics rdkit',%0A zip_safe=False,%0A test_suite='nose.collector',%0A entry_points=%7B'console_scripts': %5B'molvs = molvs.cli:main'%5D%7D,%0A classifiers=%5B%0A 'Environment :: Console',%0A 'Intended Audience :: Developers',%0A 'Intended Audience :: Healthcare Industry',%0A 'Intended Audience :: Science/Research',%0A 'License :: OSI Approved :: MIT License',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python :: 2',%0A 'Programming Language :: Python :: 2.7',%0A 'Topic :: Scientific/Engineering',%0A 'Topic :: Scientific/Engineering :: Bio-Informatics',%0A 'Topic :: Scientific/Engineering :: Chemistry',%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0A %5D,%0A)%0A
e91b1c56b252ddc3073a15209e38e73424911b62
Remove unused import.
setup.py
setup.py
#!/usr/bin/env python # # Copyright 2014 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup, find_packages, Extension from Cython.Build import cythonize import numpy as np ext_modules = [ Extension( 'zipline.assets._assets', ['zipline/assets/_assets.pyx'], include_dirs=[np.get_include()], ), Extension( 'zipline.lib.adjusted_array', ['zipline/lib/adjusted_array.pyx'], include_dirs=[np.get_include()], ), Extension( 'zipline.lib.adjustment', ['zipline/lib/adjustment.pyx'], include_dirs=[np.get_include()], ), Extension( 'zipline.data.ffc.loaders._us_equity_pricing', ['zipline/data/ffc/loaders/_us_equity_pricing.pyx'], include_dirs=[np.get_include()], ), ] setup( name='zipline', version='0.8.0rc1', description='A backtester for financial algorithms.', author='Quantopian Inc.', author_email='[email protected]', packages=['zipline'], ext_modules=cythonize(ext_modules), scripts=['scripts/run_algo.py'], include_package_data=True, license='Apache 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Topic :: Office/Business :: Financial', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: System :: Distributed Computing', ], install_requires=[ 'Logbook', 'pytz', 'requests', 'numpy', 'pandas', 'six', 'Cython', ], extras_require={ 'talib': ["talib"], }, url="http://zipline.io" )
Python
0
@@ -631,23 +631,8 @@ tup, - find_packages, Ext
52cd79d7045a69ff5073af7ed14e9ed774de7a39
Add setup.py.
setup.py
setup.py
Python
0
@@ -0,0 +1,1026 @@ +from setuptools import setup%0A%0Asetup(%0A name='pySUMO',%0A version='0.0.0a1',%0A description='A graphical IDE for Ontologies written in SUO-Kif',%0A long_description='A graphical IDE for Ontologies written in SUO-Kif',%0A url='',%0A author='',%0A author_email='',%0A license='',%0A classifiers=%5B'Development Status :: 3 - Alpha',%0A 'Intended Audience :: Developers',%0A 'License :: ',%0A 'Programming Language :: Python :: 3.4',%0A %5D,%0A keywords='SUMO Ontologies SUO-Kif',%0A package_dir=%7B'':'src'%7D,%0A packages=%5B'pysumo', 'pysumo.logger', 'pySUMOQt', 'pySUMOQt.Designer', 'pySUMOQt.Widget'%5D,%0A install_requires=%5B'pyside'%5D,%0A extras_require=%7B'test' : %5B'pytest'%5D%7D,%0A data_files=%5B('data', %5B'data/Merge.kif', 'data/MILO.kif'%5D),%0A ('data/wordnet', %5B''.join(%5B'data/wordnet/sdata.', x%5D) for x in%0A %5B'adj', 'adv', 'noun', 'verb'%5D%5D),%5D,%0A entry_points=%7B'gui_scripts': %5B'pySUMOQt = pySUMOQt.MainWindow:main'%5D%7D,%0A)%0A%0A
7354dc674a4551169fb55bfcec208256e956d14e
Add skeleton class for conditions
components/condition.py
components/condition.py
Python
0
@@ -0,0 +1,766 @@ +%22%22%22A class to store conditions (eg. WHERE %5Bcond%5D).%22%22%22%0A%0A%0Aclass SgConditionSimple:%0A %22%22%22%0A A class to store a simple condition.%0A A simple condition is composed of 2 operands and 1 operator.%0A %22%22%22%0A %0A def __init__(self, operand-l, operator, operand-r):%0A self._op-l = operand-l%0A self._op = operator%0A self._op-r = operand-r%0A%0A%0Aclass SgCondition:%0A %22%22%22A class to store a (complex) condition.%22%22%22%0A %0A def __init__(self, expr):%0A self._expr = expr%0A self._conds = %5B%5D # simple conditions%0A self._conns = %5B%5D # connectors (eg. and, or)%0A # TODO(lnishan): parse expr into _conds and _conns.%0A %0A def Evaluate(self, fields, row):%0A # TODO(lnishan): Evaluate the (complex) condition.%0A return True%0A
8e3de37e14013dc371064eec5102f682b32d0cfc
modify cwd so setup.py can be run from anywhere
setup.py
setup.py
#!/usr/bin/python import os # old crosscat setup.py from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext # venture setup.py # from distutils.core import setup, Extension def generate_sources(dir_files_tuples): sources = [] for dir, files in dir_files_tuples: full_files = [ os.path.join(dir, file) for file in files ] sources.extend(full_files) return sources # locations pyx_src_dir = 'crosscat/cython_code' cpp_src_dir = 'cpp_code/src' include_dirs = ['cpp_code/include/CrossCat'] # specify sources ContinuousComponentModel_pyx_sources = ['ContinuousComponentModel.pyx'] ContinuousComponentModel_cpp_sources = [ 'utils.cpp', 'numerics.cpp', 'RandomNumberGenerator.cpp', 'ComponentModel.cpp', 'ContinuousComponentModel.cpp', ] ContinuousComponentModel_sources = generate_sources([ (pyx_src_dir, ContinuousComponentModel_pyx_sources), (cpp_src_dir, ContinuousComponentModel_cpp_sources), ]) # MultinomialComponentModel_pyx_sources = ['MultinomialComponentModel.pyx'] MultinomialComponentModel_cpp_sources = [ 'utils.cpp', 'numerics.cpp', 'RandomNumberGenerator.cpp', 'ComponentModel.cpp', 'MultinomialComponentModel.cpp', ] MultinomialComponentModel_sources = generate_sources([ (pyx_src_dir, MultinomialComponentModel_pyx_sources), (cpp_src_dir, MultinomialComponentModel_cpp_sources), ]) # State_pyx_sources = ['State.pyx'] State_cpp_sources = [ 'utils.cpp', 'numerics.cpp', 'RandomNumberGenerator.cpp', 'DateTime.cpp', 'View.cpp', 'Cluster.cpp', 'ComponentModel.cpp', 'MultinomialComponentModel.cpp', 'ContinuousComponentModel.cpp', 'State.cpp', ] State_sources = generate_sources([ (pyx_src_dir, State_pyx_sources), (cpp_src_dir, State_cpp_sources), ]) # create exts ContinuousComponentModel_ext = Extension( "crosscat.cython_code.ContinuousComponentModel", libraries = ['boost_random'], extra_compile_args = [], sources=ContinuousComponentModel_sources, include_dirs=include_dirs, language="c++") MultinomialComponentModel_ext = Extension( "crosscat.cython_code.MultinomialComponentModel", libraries = ['boost_random'], extra_compile_args = [], sources=MultinomialComponentModel_sources, include_dirs=include_dirs, language="c++") State_ext = Extension( "crosscat.cython_code.State", libraries = ['boost_random'], extra_compile_args = [], sources=State_sources, include_dirs=include_dirs, language="c++") # ext_modules = [ ContinuousComponentModel_ext, MultinomialComponentModel_ext, State_ext, ] packages = ['crosscat', 'crosscat.utils', 'crosscat.convergence_analysis', 'crosscat.jsonrpc_http'] setup( name='CrossCat', version='0.1', author='MIT.PCP', url='TBA', long_description='TBA.', packages=packages, package_dir={'crosscat':'crosscat/'}, ext_modules=ext_modules, cmdclass = {'build_ext': build_ext} )
Python
0
@@ -11,18 +11,16 @@ /python%0A -%0A%0A import o @@ -24,33 +24,8 @@ t os -%0A%0A# old crosscat setup.py %0Afro @@ -140,74 +140,8 @@ xt%0A%0A -# venture setup.py%0A# from distutils.core import setup, Extension%0A%0A %0Adef @@ -405,16 +405,141 @@ urces%0A%0A%0A +# make sure cwd is correct%0Athis_file = os.path.abspath(__file__)%0Athis_dir = os.path.split(this_file)%5B0%5D%0Aos.chdir(this_dir)%0A%0A%0A # locati
a0607d0f9b7c08ddcf81459868b33761d8ed5bb2
Set up the dependency
setup.py
setup.py
Python
0.000008
@@ -0,0 +1,1614 @@ +# Copyright 2021 The KerasNLP Authors%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# https://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Setup script.%22%22%22%0A%0Afrom setuptools import find_packages%0Afrom setuptools import setup%0A%0Asetup(%0A name=%22keras-nlp%22,%0A description=%22High-level NLP libraries based on Keras%22,%0A url=%22https://github.com/keras-team/keras-nlp%22,%0A author=%22Keras team%22,%0A author_email=%[email protected]%22,%0A license=%22Apache License 2.0%22,%0A # tensorflow isn't a dependency because it would force the%0A # download of the gpu version or the cpu version.%0A # users should install it manually.%0A install_requires=%5B%22packaging%22, %22tensorflow%22, %22numpy%22%5D,%0A extras_require=%7B%22tests%22: %5B%22flake8%22, %22isort%22, %22black%22,%5D,%7D,%0A classifiers=%5B%0A %22Programming Language :: Python%22,%0A %22Programming Language :: Python :: 3.7%22,%0A %22Operating System :: Unix%22,%0A %22Operating System :: Microsoft :: Windows%22,%0A %22Operating System :: MacOS%22,%0A %22Intended Audience :: Science/Research%22,%0A %22Topic :: Scientific/Engineering%22,%0A %22Topic :: Software Development%22,%0A %5D,%0A packages=find_packages(exclude=(%22tests%22,)),%0A)
d9be2b8a61a88f0ee228c08d1f277770602840b1
Add python version for compress
compression/compress.py
compression/compress.py
Python
0.000002
@@ -0,0 +1,486 @@ +%0A%0A%0Adef compress(uncompressed):%0A count = 1%0A compressed = %22%22%0A if not uncompressed:%0A return compressed%0A letter = uncompressed%5B0%5D%0A for nx in uncompressed%5B1:%5D:%0A if letter == nx:%0A count = count + 1%0A else:%0A compressed += %22%7B%7D%7B%7D%22.format(letter, count)%0A count = 1%0A letter = nx%0A compressed += %22%7B%7D%7B%7D%22.format(letter, count)%0A return compressed%0A %0Aif __name__ == %22__main__%22:%0A print(compress(%22aaabbbccccd%22))
d480c2738bb4d0ae72643fc9bc1f911cb630539c
add 12-list.py
python/12-list.py
python/12-list.py
Python
0.000003
@@ -0,0 +1,429 @@ +#!/usr/bin/env python%0A%0Aimport math%0A%0Alist = %5B'physics', 'chemistry', 1997, 2001%5D;%0A%0Aprint %22list%5B2%5D = %22, list%5B2%5D%0Aprint %22list%5B1:3%5D = %22, list%5B1:3%5D%0A%0Alist%5B2%5D = %22math%22;%0A%0A%0Aprint %22update, list%5B2%5D = %22, list%5B2%5D%0A%0Adel list%5B2%5D%0Aprint %22delete, list%5B2%5D = %22, list%5B2%5D%0A%0Aprint %22length of delete:%22, len(list)%0A%0Aif ('physics' in list):%0A print %22physics is in list%22%0Aelse:%0A print %22physics is not in list%22%0A%0A%0Afor elem in list:%0A print %22elem :%22, elem%0A%0A
240b22d0b078951b7d1f0df70156b6e2041a530f
fix setup.py dor pypi.
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2009 Benoit Chesneau <[email protected]> # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. import os import sys from setuptools import setup data_files = [] root_dir = os.path.dirname(__file__) if root_dir != '': os.chdir(root_dir) for dirpath, dirnames, filenames in os.walk('app-template'): for i, dirname in enumerate(dirnames): if dirname.startswith('.'): del dirnames[i] data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]]) setup( name = 'Couchapp', version = '0.1.4', url = 'http://github.com/benoitc/couchapp/tree/master', license = 'Apache License 2', author = 'Benoit Chesneau', author_email = '[email protected]', description = 'Standalone CouchDB Application Development Made Simple.', long_description = """CouchApp is a set of helpers and a jQuery plugin that conspire to get you up and running on CouchDB quickly and correctly. It brings clarity and order to the freedom of CouchDB’s document-based approach.""", keywords = 'couchdb couchapp', platforms = 'any', zip_safe = False, packages= ['couchapp'], package_dir={'couchapp': 'python/couchapp'}, data_files = data_files, include_package_data = True, scripts = ['python/couchapp/bin/couchapp'], classifiers = [ 'License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Topic :: Database', 'Topic :: Utilities', ], setup_requires = [ 'setuptools>=0.6c9', 'couchdb>=0.5dev', ] )
Python
0
@@ -1132,9 +1132,9 @@ chDB -%E2%80%99 +' s%0A @@ -1884,24 +1884,16 @@ chdb -%3E=0.5dev ',%0A %5D %0A)%0A%0A @@ -1888,12 +1888,13 @@ ',%0A %5D +, %0A)%0A%0A
3ada80358a059b3a5ee4dd4ceed572f933a1ec67
Create setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1795 @@ +from setuptools import setup, find_packages%0A# To use a consistent encoding%0Afrom codecs import open%0Afrom os import path%0A%0Ahere = path.abspath(path.dirname(__file__))%0A%0A# Get the long description from the README file%0Awith open(path.join(here, 'README.rst'), encoding='utf-8') as f:%0A long_description = f.read()%0A%0Asetup(%0A name='compare-with-remote',%0A%0A version='0.1',%0A%0A description=' Compare local script output with remote script output',%0A long_description=long_description,%0A%0A url='https://github.com/guettli/compare-with-remote/',%0A%0A author='Thomas Guettler',%0A author_email='[email protected]',%0A%0A license='Apache2',%0A%0A # See https://pypi.python.org/pypi?%253Aaction=list_classifiers%0A classifiers=%5B%0A # How mature is this project? Common values are%0A # 3 - Alpha%0A # 4 - Beta%0A # 5 - Production/Stable%0A 'Development Status :: 3 - Alpha',%0A%0A # Indicate who your project is intended for%0A 'Intended Audience :: Developers',%0A%0A # Pick your license as you wish (should match %22license%22 above)%0A 'License :: OSI Approved :: Apache2',%0A%0A # Specify the Python versions you support here. In particular, ensure%0A # that you indicate whether you support Python 2, Python 3 or both.%0A 'Programming Language :: Python :: 2',%0A 'Programming Language :: Python :: 2.6',%0A 'Programming Language :: Python :: 2.7',%0A 'Programming Language :: Python :: 3',%0A 'Programming Language :: Python :: 3.2',%0A 'Programming Language :: Python :: 3.3',%0A 'Programming Language :: Python :: 3.4',%0A %5D,%0A%0A entry_points=%7B%0A 'console_scripts': %5B%0A 'sample=compare-with-remote:compare_with_remote/compare_with_remote:main',%0A %5D,%0A %7D,%0A)%0A
c5b12c2f93c5e1da1fa5011ecc089f46febe11d1
version bump
setup.py
setup.py
from setuptools import setup, find_packages setup( name='chromote', version='0.1.1', description="Python Wrapper for the Google Chrome Remote Debugging Protocol", author='Chris Seymour', packages=find_packages(), install_requires=['requests', 'websocket-client'] )
Python
0.000001
@@ -83,17 +83,17 @@ on='0.1. -1 +2 ',%0A d
606853d904c1967b41b30d828940c4aa7ab4c0ab
add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1062 @@ +#!/usr/bin/env python%0A#%0A# Licensed to the Apache Software Foundation (ASF) under one%0A# or more contributor license agreements. See the NOTICE file%0A# distributed with this work for additional information%0A# regarding copyright ownership. The ASF licenses this file%0A# to you under the Apache License, Version 2.0 (the%0A# %22License%22); you may not use this file except in compliance%0A# with the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing,%0A# software distributed under the License is distributed on an%0A# %22AS IS%22 BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY%0A# KIND, either express or implied. See the License for the%0A# specific language governing permissions and limitations%0A# under the License.%0A#%0Afrom distutils.core import setup%0A%0Asetup(name=%22fusion%22,%0A version=%220.1.0%22,%0A author=%22kgiusti%22,%0A author_email=%[email protected]%22,%0A packages=%5B%22fusion%22%5D,%0A package_dir=%7B%22fusion%22: %22python%22%7D,%0A license=%22Apache Software License%22)%0A
90ec011ebec93f4c0b0e93fc831b0f782be1b13e
Add the setup.py PIP install config file.
setup.py
setup.py
Python
0
@@ -0,0 +1,214 @@ +from setuptools import setup%0A%0Asetup(%0A name='SedLex',%0A version='0.1',%0A install_requires=%5B%0A 'html5lib',%0A 'beautifulsoup4',%0A 'requests',%0A 'jinja2',%0A 'python-gitlab'%0A %5D%0A)%0A
fa88dac9c35fc473ebfea05926e0200926251d9d
Create setup.py
setup.py
setup.py
Python
0
@@ -0,0 +1,564 @@ +#!/usr/bin/env python%0A%0Afrom distutils.core import setup%0A%0Asetup(name='RPiProcessRig',%0A version='1.0',%0A description='A simple industrial rig that can be used for experimentation with a variety of different control algortithms',%0A author='Alexander Leech',%0A author_email='[email protected]',%0A license = 'MIT',%0A keywords = %22Raspberry Pi Process Control Industrial Rig Hardware Experimentation%22,%0A url='https://github.com/FlaminMad/RPiProcessRig',%0A packages=%5B'yaml', 'pymodbus','spidev','RPi.GPIO'%5D,%0A py_modules=%0A )%0A
c0989ce01ee62367a92eb48855a42c3c4986de84
Add setup.py.
setup.py
setup.py
Python
0
@@ -0,0 +1,1331 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Aimport codecs%0Aimport os%0A%0Afrom setuptools import find_packages, setup%0A%0A%0Adef read(file_name):%0A file_path = os.path.join(os.path.dirname(__file__), file_name)%0A return codecs.open(file_path, encoding='utf-8').read()%0A%0A%0APACKAGE = %22add_another%22%0ANAME = %22django-add-another%22%0ADESCRIPTION = %22'Add another' functionality outside Django admin%22%0AAUTHOR = %22Karim Amzil%22%0AAUTHOR_EMAIL = %[email protected]%22%0AURL = %22https://github.com/djkartsa/django-add-another%22%0AVERSION = __import__(PACKAGE).__version__%0A%0A%0Asetup(%0A name=NAME,%0A version=VERSION,%0A description=DESCRIPTION,%0A long_description=read(%22README.md%22),%0A author=AUTHOR,%0A author_email=AUTHOR_EMAIL,%0A license=%22LICENSE.txt%22,%0A url=URL,%0A packages=find_packages(),%0A include_package_data=True,%0A classifiers=%5B%0A %22Development Status :: 3 - Alpha%22,%0A %22Environment :: Web Environment%22,%0A %22Intended Audience :: Developers%22,%0A %22License :: OSI Approved :: MIT License%22,%0A %22Operating System :: OS Independent%22,%0A %22Programming Language :: Python%22,%0A %22Programming Language :: Python :: 2%22,%0A %22Programming Language :: Python :: 3%22,%0A %22Framework :: Django%22,%0A %5D,%0A install_requires=%5B%0A 'Django',%0A %5D,%0A zip_safe=False,%0A)%0A
7634b58b1bd0fc2eee121bad2a20b61077a48d7b
Update setup.py
setup.py
setup.py
#!/usr/bin/env python import sys from distutils.core import setup try: import fontTools except: print "*** Warning: defcon requires FontTools, see:" print " fonttools.sf.net" try: import robofab except: print "*** Warning: defcon requires RoboFab, see:" print " robofab.com" #if "sdist" in sys.argv: # import os # import subprocess # import shutil # docFolder = os.path.join(os.getcwd(), "documentation") # # remove existing # doctrees = os.path.join(docFolder, "build", "doctrees") # if os.path.exists(doctrees): # shutil.rmtree(doctrees) # # compile # p = subprocess.Popen(["make", "html"], cwd=docFolder) # p.wait() # # remove doctrees # shutil.rmtree(doctrees) setup(name="defconAppKit", version="0.1", description="A set of interface objects for working with font data.", author="Tal Leming", author_email="[email protected]", url="http://code.typesupply.com", license="MIT", packages=[ "defconAppKit", "defconAppKit.controls", "defconAppKit.representationFactories", "defconAppKit.tools", "defconAppKit.windows" ], package_dir={"":"Lib"} )
Python
0
@@ -939,30 +939,46 @@ http +s :// -code.typesupply.com +github.com/typesupply/defconAppKit %22,%0A @@ -1211,8 +1211,9 @@ %22Lib%22%7D%0A) +%0A
3c5802bda34ed9c772f7bb2e33b29f265440f286
Add a simple setup.py.
setup.py
setup.py
Python
0
@@ -0,0 +1,947 @@ +import os%0Afrom setuptools import setup, find_packages%0A%0A%0AREADME_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README.markdown')%0Adescription = 'django-goodfields makes creating good form fields easy.'%0Along_description = os.path.exists(README_PATH) and open(README_PATH).read() or description%0A%0Asetup(%0A name='django-goodfields',%0A version='0.0.1',%0A description=description,%0A long_description=long_description,%0A author='Steve Losh',%0A author_email='[email protected]',%0A url='http://bitbucket.org/dwaiter/django-goodfields/',%0A packages=find_packages(),%0A classifiers=%5B%0A 'Development Status :: 4 - Beta',%0A 'Environment :: Web Environment',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved',%0A 'Operating System :: OS Independent',%0A 'Framework :: Django',%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 2.6',%0A %5D,%0A)%0A
26cc1c4ff2b5c0de8b83bb9bd088d80f5650dda1
Create setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,369 @@ +__author__ = 'Alumne'%0A%0Afrom distutils.core import setup%0A%0Asetup(name='PEACHESTORE',%0A version='python 3',%0A author='albert cuesta',%0A author_email='[email protected]',%0A url='https://github.com/albertcuesta/PEACHESTORE',%0A description='es una tienda online de aplicaciones moviles similar a google play',%0A packager=%5B'PEACHSTORE'%5D%0A )%0A
d64367eda03772997af21792e82a2825848c1ae6
add tests for splat utils
astroquery/splatalogue/tests/test_utils.py
astroquery/splatalogue/tests/test_utils.py
Python
0
@@ -0,0 +1,939 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst%0Afrom ... import splatalogue%0Afrom astropy import units as u%0Aimport numpy as np%0Afrom .test_splatalogue import patch_post%0Afrom .. import utils%0A%0Adef test_clean(patch_post):%0A x = splatalogue.Splatalogue.query_lines(114*u.GHz,116*u.GHz,chemical_name=' CO ')%0A c = utils.clean_column_headings(x)%0A assert 'Resolved QNs' not in c.colnames%0A assert 'QNs' in c.colnames%0A%0Adef test_merge(patch_post):%0A x = splatalogue.Splatalogue.query_lines(114*u.GHz,116*u.GHz,chemical_name=' CO ')%0A c = utils.merge_frequencies(x)%0A assert 'Freq' in c.colnames%0A assert np.all(c%5B'Freq'%5D %3E 0)%0A%0Adef test_minimize(patch_post):%0A x = splatalogue.Splatalogue.query_lines(114*u.GHz,116*u.GHz,chemical_name=' CO ')%0A c = utils.minimize_table(x)%0A%0A assert 'Freq' in c.colnames%0A assert np.all(c%5B'Freq'%5D %3E 0)%0A assert 'Resolved QNs' not in c.colnames%0A assert 'QNs' in c.colnames%0A
b72f8a9b0d9df7d42c43c6a294cc3aab2cb91641
Add missing migrations for limit_choices_to on BlogPage.author
blog/migrations/0002_auto_20190605_1104.py
blog/migrations/0002_auto_20190605_1104.py
Python
0.000001
@@ -0,0 +1,675 @@ +# Generated by Django 2.2.2 on 2019-06-05 08:04%0A%0Aimport blog.abstract%0Afrom django.conf import settings%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('blog', '0001_squashed_0006_auto_20180206_2239'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='blogpage',%0A name='author',%0A field=models.ForeignKey(blank=True, limit_choices_to=blog.abstract.limit_author_choices, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='author_pages', to=settings.AUTH_USER_MODEL, verbose_name='Author'),%0A ),%0A %5D%0A
7fdf796440c3a4ed84ffcb4343cd92f0013c8b1f
add current client, supports basic chatting
slack.py
slack.py
Python
0
@@ -0,0 +1,156 @@ +from slackclient import SlackClient%0A%0Adef get_client(token='4577027817.4577075131'):%0A return SlackClient(token)%0A%0Aprint get_client().api_call('api.test')%0A%0A
f1c1206af29ee0f7be8b7477cd409f2844c816b3
add Todo generator
todo/generator.py
todo/generator.py
Python
0
@@ -0,0 +1,1643 @@ +# coding=utf8%0A%0A%22%22%22%0AGenerator from todo object to todo format string%0A%22%22%22%0A%0Afrom models import Task%0Afrom models import Todo%0A%0A%0Aclass Generator(object):%0A %22%22%22%0A Generator from todo object to readable string.%0A %22%22%22%0A%0A newline = %22%5Cn%22%0A%0A def gen_task_id(self, task_id):%0A %22%22%22%0A int =%3E str e.g. 12 =%3E '12.'%0A %22%22%22%0A return str(task_id) + %22.%22%0A%0A def gen_task_done(self, done):%0A %22%22%22%0A boolen =%3E str e.g. True =%3E '%5Bx%5D'%0A %22%22%22%0A if done is True:%0A return '%5Bx%5D'%0A else:%0A return ' '%0A%0A def gen_task_content(self, content):%0A %22%22%22%0A str =%3E str%0A %22%22%22%0A return content%0A%0A def gen_name(self, name):%0A %22%22%22%0A str =%3E str e.g. 'name' =%3E 'name%5Cn------'%0A %22%22%22%0A return name + self.newline + '-' * len(name)%0A%0A def gen_task(self, task):%0A %22%22%22%0A Task =%3E str%0A e.g. Task(1, %22Write email%22, True) =%3E '1. %5Bx%5D Write email'%0A %22%22%22%0A lst = %5B%5D%0A lst.append(self.gen_task_id(task.id))%0A lst.append(self.gen_task_done(task.done))%0A lst.append(self.gen_task_content(task.content))%0A return %22 %22.join(lst)%0A%0A def generate(self, todo):%0A %22%22%22%0A Generate todo object to string.%0A%0A e.g. Todo(name, tasks) =%3E %221. (x) do something...%22%0A %22%22%22%0A lst = %5B%5D%0A%0A if todo.name:%0A head = self.gen_name(todo.name)%0A else:%0A head = %22%22%0A%0A lst.append(head)%0A%0A for task in todo.tasks:%0A lst.append(self.gen_task(task))%0A%0A return self.newline.join(lst)%0A%0A%0Agenerator = Generator() # build generator%0A
3c290803bbd6d7401903506b3a27cf2c9ebad0b4
Add ChatInfoFormatter
bot/action/standard/info/formatter/chat.py
bot/action/standard/info/formatter/chat.py
Python
0
@@ -0,0 +1,2510 @@ +from bot.action.standard.info.formatter import ApiObjectInfoFormatter%0Afrom bot.action.util.format import ChatFormatter%0Afrom bot.api.api import Api%0Afrom bot.api.domain import ApiObject%0A%0A%0Aclass ChatInfoFormatter(ApiObjectInfoFormatter):%0A def __init__(self, api: Api, chat: ApiObject, bot_user: ApiObject, user: ApiObject):%0A super().__init__(api, chat)%0A self.bot_user = bot_user%0A self.user = user%0A%0A def format(self, full_info: bool = False):%0A %22%22%22%0A :param full_info: If True, adds more info about the chat. Please, note that this additional info requires%0A to make THREE synchronous api calls.%0A %22%22%22%0A chat = self.api_object%0A if full_info:%0A self.__format_full(chat)%0A else:%0A self.__format_simple(chat)%0A%0A def __format_full(self, chat: ApiObject):%0A chat = self.api.getChat(chat_id=chat.id)%0A description = chat.description%0A invite_link = self._invite_link(chat.invite_link)%0A pinned_message = self._pinned_message(chat.pinned_message)%0A sticker_set_name = self._group_sticker_set(chat.sticker_set_name)%0A member_count = self.api.getChatMembersCount(chat_id=chat.id)%0A admins = self.api.getChatAdministrators(chat_id=chat.id)%0A admin_count = len(admins)%0A me_admin = self._yes_no(self._is_admin(self.bot_user, admins))%0A you_admin = self._yes_no(self._is_admin(self.user, admins))%0A self.__format_simple(chat)%0A self._add_info(%22Description%22, description)%0A self._add_info(%22Invite link%22, invite_link)%0A self._add_info(%22Pinned message%22, pinned_message)%0A self._add_info(%22Group sticker set%22, sticker_set_name)%0A self._add_info(%22Members%22, member_count)%0A self._add_info(%22Admins%22, admin_count, %22(not counting other bots)%22)%0A self._add_info(%22Am I admin%22, me_admin)%0A self._add_info(%22Are you admin%22, you_admin)%0A%0A def __format_simple(self, chat: ApiObject):%0A full_data = ChatFormatter(chat).full_data%0A title = chat.title%0A username = self._username(chat.username)%0A _type = chat.type%0A _id = chat.id%0A all_members_are_admins = self._yes_no(chat.all_members_are_administrators)%0A self._add_title(full_data)%0A self._add_empty()%0A self._add_info(%22Title%22, title)%0A self._add_info(%22Username%22, username)%0A self._add_info(%22Type%22, _type)%0A self._add_info(%22Id%22, _id)%0A self._add_info(%22All members are admins%22, all_members_are_admins)%0A
1ad56e631c29869d127931b555d0b366f7e75641
Add test for fftpack.
numpy/fft/tests/test_fftpack.py
numpy/fft/tests/test_fftpack.py
Python
0
@@ -0,0 +1,267 @@ +import sys%0Afrom numpy.testing import *%0Aset_package_path()%0Afrom numpy.fft import *%0Arestore_path()%0A%0Aclass test_fftshift(NumpyTestCase):%0A def check_fft_n(self):%0A self.failUnlessRaises(ValueError,fft,%5B1,2,3%5D,0)%0A%0Aif __name__ == %22__main__%22:%0A NumpyTest().run()%0A
ab6fa9717b092f3b8eea4b70920a1d7cef042b69
Return disappeared __main__
certchecker/__main__.py
certchecker/__main__.py
Python
0.000169
@@ -0,0 +1,290 @@ +import click%0A%0Afrom certchecker import CertChecker%0A%[email protected]()%[email protected](%0A '--profile',%0A default='default',%0A help=%22Section name in your boto config file%22%0A)%0Adef main(profile):%0A cc = CertChecker(profile)%0A print(cc.result)%0A%0Aif __name__ == %22__main__%22:%0A print(main())%0A
b9feeb2a37f0596b48f9582e8953d29485167fc8
Add an event-driven recording tool
tools/sofa-edr.py
tools/sofa-edr.py
Python
0.000002
@@ -0,0 +1,1975 @@ +#!/usr/bin/env python3%0Aimport subprocess%0Aimport time%0Aimport argparse%0A%0Aif __name__ == '__main__':%0A bwa_is_recorded = False%0A smb_is_recorded = False%0A htvc_is_recorded = False%0A%0A parser = argparse.ArgumentParser(description='A SOFA wrapper which supports event-driven recording.')%0A parser.add_argument('--trace-points', default='', metavar='Comma-sperated string list for interested keywords, e.g., %22keyword1,keyword2%22')%0A args = parser.parse_args()%0A%0A while True:%0A time.sleep(3)%0A print(time.time())%0A with open('/home/ubuntu/pbrun_error.log') as f:%0A lines = f.readlines()%0A lc = 0 %0A for line in lines:%0A #print('Line%25d'%25lc, line)%0A lc = lc + 1%0A if lc %3C 6:%0A continue%0A if line.find('BWA') != -1 and not smb_is_recorded:%0A bwa_is_recorded = True%0A print('BWA begins at ', time.time()) %0A time.sleep(120)%0A subprocess.call('sofa record %22sleep 20%22 --profile_all_cpus --logdir=sofalog-bwa ', shell=True)%0A break%0A if line.find('BQSR') != -1 and not smb_is_recorded:%0A smb_is_recorded = True%0A print('SMB begins at ', time.time()) %0A time.sleep(120)%0A subprocess.call('sofa record %22sleep 20%22 --profile_all_cpus --logdir=sofalog-smb ', shell=True)%0A break%0A if line.find('HaplotypeCaller') != -1 and not htvc_is_recorded: %0A htvc_is_recorded = True%0A print('HTVC begins at ', time.time()) %0A time.sleep(120)%0A subprocess.call('sofa record %22sleep 20%22 --profile_all_cpus --logdir=sofalog-htvc ', shell=True)%0A break%0A if bwa_is_recorded and smb_is_recorded and htvc_is_recorded:%0A print(%22Tracing is done.%22) %0A break%0A
42ab52b6d077443fac20ea872b503589f6ddb3f7
Create pyPostings.py
pyPostings.py
pyPostings.py
Python
0
@@ -0,0 +1,913 @@ +import re%0Aimport string%0A%0Adef posting(corpus):%0A posting = %5B%5D%0A %0A tokens = tokenize(corpus)%0A for index, token in enumerate(tokens):%0A posting.append(%5Btoken, (index+1)%5D)%0A%0A return posting%0A%0Adef posting_list(corpus):%0A posting_list = %7B%7D%0A %0A tokens = tokenize(corpus)%0A for index, token in enumerate(tokens):%0A if token not in posting_list:%0A posting_list%5Btoken%5D = %5B(index + 1)%5D%0A else:%0A posting_list%5Btoken%5D.append(index + 1)%0A %0A return posting_list%0A%0A%0Adef tokenize(corpus):%0A assert type(corpus) is str, 'Corpus must be a string of characters.'%0A # split%0A tokenized = corpus.split()%0A # normalize%0A for index, token in enumerate(tokenized):%0A tokenized%5Bindex%5D = re.sub('%5CW%5CZ', '', tokenized%5Bindex%5D)%0A tokenized%5Bindex%5D = re.sub('%5CA%5CW', '', tokenized%5Bindex%5D)%0A return tokenized%0A%0Adef not_string(a):%0A return a != %22 %22 and a != %22%22%0A
ee39e69fe5d6e93844f47eaff0d9547622600fa7
make parsing times easier
py/phlsys_strtotime.py
py/phlsys_strtotime.py
Python
0.000002
@@ -0,0 +1,2836 @@ +#!/usr/bin/env python%0A# encoding: utf-8%0A%0A%22%22%22A poor substitute for PHP's strtotime function.%22%22%22%0A%0Aimport datetime%0A%0A%0Adef describeDurationStringToTimeDelta():%0A return str('time can be specified like %225 hours 20 minutes%22, use '%0A 'combinations of seconds, minutes, hours, days, weeks. '%0A 'each unit should only appear once. you may use floating '%0A 'point numbers and negative numbers. '%0A 'e.g. %221 weeks -1.5 days%22.')%0A%0A%0Adef durationStringToTimeDelta(s):%0A %22%22%22Return a datetime.timedelta based on the supplied string 's'.%0A%0A Usage examples:%0A %3E%3E%3E str(durationStringToTimeDelta(%221 seconds%22))%0A '0:00:01'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%222 minutes%22))%0A '0:02:00'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%222 hours 2 minutes%22))%0A '2:02:00'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%221 days 2 hours 2 minutes%22))%0A '1 day, 2:02:00'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%221.5 days%22))%0A '1 day, 12:00:00'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%221 days -1 hours%22))%0A '23:00:00'%0A%0A %3E%3E%3E str(durationStringToTimeDelta(%221 milliseconds%22))%0A '0:00:00.001000'%0A%0A :s: a string in the appropriate time format%0A :returns: a datetime.timedelta%0A%0A %22%22%22%0A clauses = s.split()%0A if len(clauses) %25 2:%0A raise ValueError(%22odd number of clauses: %22 + s)%0A pairs = zip(clauses%5B::2%5D, clauses%5B1::2%5D)%0A d = %7Bp%5B1%5D: float(p%5B0%5D) for p in pairs%7D%0A if len(d) != len(pairs):%0A raise ValueError(%22duplicated clauses: %22 + s)%0A return datetime.timedelta(**d)%0A%0A%0A#------------------------------------------------------------------------------%0A# Copyright (C) 2012 Bloomberg L.P.%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to%0A# deal in the Software without restriction, including without limitation the%0A# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or%0A# sell copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING%0A# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS%0A# IN THE SOFTWARE.%0A#------------------------------- END-OF-FILE ----------------------------------%0A
2cf2a89bf3c7ccf667e4bcb623eeb6d0e1ea37bb
print sumthing pr1
python/py1.py
python/py1.py
Python
0.999948
@@ -0,0 +1,349 @@ +#!/usr/bin/env python3%0A%0A%22%22%22%0AIf we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.%0A%0AFind the sum of all the multiples of 3 or 5 below 1000.%0A%22%22%22%0A%0Athing = %5B%5D%0A%0Afor urmom in range(1,1000):%0A if urmom %25 5 == 0 or urmom %25 3 == 0:%0A thing.append(urmom)%0A%0Aprint(sum(thing))%0A%0A
784fd8b08ee0f268350a2003a9c06522c0678874
Add python code for doing tensor decomposition with scikit-tensor.
python/run.py
python/run.py
Python
0
@@ -0,0 +1,1093 @@ +import logging%0Aimport numpy%0Afrom numpy import genfromtxt%0Afrom sktensor import sptensor, cp_als%0A# Set logging to DEBUG to see CP-ALS information%0Alogging.basicConfig(level=logging.DEBUG)%0A%0Adata = genfromtxt('../datasets/movielens-synthesized/ratings-synthesized-50k.csv', delimiter=',')%0A%0A# we need to convert data into two lists; subscripts/coordinates and values%0An = len(data)%0A%0Asubs_1 = numpy.append(data%5B:,:2%5D, numpy.zeros((n, 1)), 1)%0Asubs_2 = numpy.append(data%5B:,:2%5D, numpy.ones((n, 1)), 1)%0A%0Asubs = numpy.vstack(%5Bsubs_1, subs_2%5D)%0Asubs = subs.astype(int)%0A%0Avals = numpy.hstack(%5Bdata%5B:,2%5D, data%5B:, 3%5D%5D)%0Avals = vals.flatten()%0A%0A# convert subs tuple of arrays (rows, cols, tubes)%0Asubs = (subs%5B:,0%5D, subs%5B:,1%5D, subs%5B:,2%5D)%0A%0A# load into sparse tensor%0AT = sptensor(subs, vals)%0A%0A# Decompose tensor using CP-ALS%0AP, fit, itr, exectimes = cp_als(T, 500, init='random')%0A%0AP = P.totensor()%0A%0Aprint P%5B1,1193,0%5D # 5%0Aprint P%5B1,661, 0%5D # 3%0Aprint P%5B1,594, 1%5D # 1.6%0Aprint P%5B1,1193, 1%5D # 2.2%0A%0A%0A%0A#print numpy.allclose(T, P)%0A#print P.U%5B0%5D.shape%0A#print %22-------%22%0A##print P.U%5B1%5D.shape%0A#print %22-------%22%0A#print P.U%5B2%5D.shape%0A
00413958a12607aab942c98581b1a9e6d682ef28
Create Single-Prime.py
python/Single-Prime.py
python/Single-Prime.py
Python
0.000003
@@ -0,0 +1,878 @@ +#By Isabelle.%0A#Checks a single number and lists all of its factors (except 1 and itself)%0A%0Aimport math%0A%0Anum = int(input(%22Pick a number to undergo the primality test!%5Cn%22))%0Aroot = int(round(math.sqrt(num)))%0Aprime = True%0Afor looper in range(2,root + 1): #53225 should normally be 3%0A%09if num %25 2 == 0 or num %25 3 == 0 or num %25 5 == 0: #End if number is even%0A%09%09print(%22%7B%7D is divisible by a prime number from 2 and 5. Silly you, stop wasting my time.%22.format(num))%0A%09%09prime = False%0A%09%09break%0A%09elif looper %25 2 == 0 or looper %25 3 == 0 or looper %25 5 == 0:%0A%09%09continue%0A%09else:%0A%09%09if num %25 looper == 0:%0A%09%09%09print(%22%7B%7D can be divided by %7B%7D.%22.format(num, looper))%0A%09%09%09looper += 1%0A%09%09%09prime = False%0A%09%09%09break%0A%09%09else:%0A%09%09%09print(%22%7B%7D cannot be divided by %7B%7D.%22.format(num, looper)) #delete%0A%09%09%09looper += 1%0A%09 %0Aif prime == True:%0A%09print(%22%7B%7D is prime%22.format(num))%0Aelse:%0A%09print(%22%7B%7D is not prime.%22.format(num))%0A
1cb8df64d4f6f257d0bd03caaaddb33ad11a5c2c
Add or_gate
python/ch02/or_gate.py
python/ch02/or_gate.py
Python
0.000001
@@ -0,0 +1,342 @@ +import numpy as np%0A%0A%0Adef OR(x1, x2):%0A x = np.array(%5Bx1, x2%5D)%0A w = np.array(%5B0.5, 0.5%5D)%0A b = -0.2%0A tmp = np.sum(w * x) + b%0A if tmp %3C= 0:%0A return 0%0A else:%0A return 1%0A%0A%0Aif __name__ == '__main__':%0A for xs in %5B(0, 0), (1, 0), (0, 1), (1, 1)%5D:%0A y = OR(xs%5B0%5D, xs%5B1%5D)%0A print(str(xs) + %22 -%3E %22 + str(y))%0A