commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
f51369999441cb85ed730488e943580d707e8856
use relative imports in parser/__init__.py
rflint/parser/__init__.py
rflint/parser/__init__.py
from parser import ResourceFile, SuiteFile, RobotFileFactory, Testcase, Keyword, Row, Statement from tables import DefaultTable, SettingTable, UnknownTable, VariableTable, MetadataTable, RobotTable
Python
0.000015
@@ -1,13 +1,14 @@ from +. parser i @@ -12,16 +12,30 @@ r import + (SuiteFolder, Resourc @@ -62,20 +62,16 @@ botF -ileF actory, Test @@ -66,16 +66,38 @@ actory, +%0A Testcase @@ -125,14 +125,45 @@ ment +, TestcaseTable, KeywordTable) %0Afrom +. tabl
650dae4ce3bd154dba442cf0476651e0e949b092
add default
controller/management/commands/2xmp.py
controller/management/commands/2xmp.py
from django.core.management.base import BaseCommand, CommandError from optparse import make_option from progressbar import ProgressBar, SimpleProgress import os, sys, subprocess from django.conf import settings from emma.core.metadata import Metadata class Command(BaseCommand): """ Migrates keywords from the IPTC framework to the XMP framework. Also removes iptc keywords. When -keywords is requested, by exiftool, only -xmp:keywords will be returned. """ exiftool_args_path = os.path.join(settings.APP_ROOT, 'project/script/exiftool/args/iptckw2xmpkw.arg') option_list = BaseCommand.option_list + ( make_option('-r', '--for-real', action='store_true', dest='action', default=False, help='Do the action.'), make_option('-p', '--path', dest='path', help='Enter path'), ) def handle(self, *args, **options): action = options.get('action', False) path = options.get('path', settings.APP_CONTENT_ROOT) if not action: print 'this is a dry run, only the subprocess command will be printed.' errors = [] for root, dirs, files in os.walk(path): for f in files: p = os.path.join(root, f) arg = self.exiftool_args_path # enter keyowrds migration script, and remove iptc keywords cmd = ['exiftool', '-overwrite_original_in_place', '-@', arg, p] if action: r = subprocess.call(cmd) # the call should return 0, if not write to errors list. if r: errors.append(p) print 'error for file %s' % p else: print 'successfully migrated keywords for %s' % p else: print cmd if errors: print errors return 'process complete, but with errors' else: return 'complete'
Python
0.000002
@@ -962,16 +962,67 @@ 'path',%0A + default=settings.APP_CONTENT_ROOT,%0A
b133d76627cb95b08a1c4e3cd357ea7204dfb5a2
Mark correct function as online
sunpy/lightcurve/tests/test_goes.py
sunpy/lightcurve/tests/test_goes.py
""" GOES LightCurve Tests """ from __future__ import absolute_import import pytest import sunpy.lightcurve from sunpy.time import TimeRange class TestGOESLightCurve(object): @pytest.fixture def timerange_a(self): return TimeRange('2008/06/01', '2008/06/02') @pytest.fixture def timerange_b(self): return TimeRange('1995/06/03', '1995/06/04') @pytest.mark.online def test_goes_range(self, timerange_a): """Test creation with two times""" lc1 = sunpy.lightcurve.GOESLightCurve.create(timerange_a.start, timerange_a.end) assert isinstance(lc1, sunpy.lightcurve.GOESLightCurve) @pytest.mark.online def test_goes_timerange(self, timerange_a): """Test creation with a TimeRange""" lc1 = sunpy.lightcurve.GOESLightCurve.create(timerange_a) assert isinstance(lc1, sunpy.lightcurve.GOESLightCurve) @pytest.mark.online def test_goes_default(self): """Test creation with no input""" lc1 = sunpy.lightcurve.GOESLightCurve.create() assert isinstance(lc1, sunpy.lightcurve.GOESLightCurve) @pytest.mark.online def test_data(self, timerange_a, timerange_b): """Test presence of data""" lc1 = sunpy.lightcurve.GOESLightCurve.create(timerange_b) lc2 = sunpy.lightcurve.GOESLightCurve.create(timerange_a) assert lc1.data.empty == False assert lc2.data.empty == False @pytest.mark.online def test_header(self, timerange_a, timerange_b): """Test presence of GOES satellite number in header""" lc1 = sunpy.lightcurve.GOESLightCurve.create(timerange_b) lc2 = sunpy.lightcurve.GOESLightCurve.create(timerange_a) assert lc1.header['TELESCOP'] == 'GOES 7' assert lc2.header['TELESCOP'] == 'GOES 10' def test_goes_url(self): """Test creation with url""" url = 'http://umbra.nascom.nasa.gov/goes/fits/1995/go07950603.fits' lc1 = sunpy.lightcurve.GOESLightCurve.create(url) assert isinstance(lc1, sunpy.lightcurve.GOESLightCurve) @pytest.mark.online def compare(self, lc1, lc2): try: (lc1.data == lc2.data) except: raise Exception @pytest.mark.online def test_filename(self, timerange_a, timerange_b): """Compare data from two different time ranges to make sure they are not the same""" lc1 = sunpy.lightcurve.GOESLightCurve.create(timerange_a) lc2 = sunpy.lightcurve.GOESLightCurve.create(timerange_b) # If the dataframes are non-idential it raises an error, if they are # identical it returns True with pytest.raises((Exception)): self.compare(lc1, lc2) def test_goes_sat_numbers(self, timerange_a, timerange_b): """Test the ability to return GOES satellite availability""" g = sunpy.lightcurve.GOESLightCurve assert g._get_goes_sat_num(timerange_a.start, timerange_a.end) == [10] assert g._get_goes_sat_num(timerange_b.start, timerange_b.end) == [7] def test_get_url(self, timerange_a, timerange_b): """Test the getting of urls""" g = sunpy.lightcurve.GOESLightCurve # time ranges create urls with either 4 digit or 2 digit years assert g._get_url_for_date_range(timerange_b) == 'http://umbra.nascom.nasa.gov/goes/fits/1995/go07950603.fits' assert g._get_url_for_date_range(timerange_a) == 'http://umbra.nascom.nasa.gov/goes/fits/2008/go1020080601.fits'
Python
0.999994
@@ -1802,16 +1802,40 @@ ES 10'%0A%0A + @pytest.mark.online%0A def
c984add3003b2970465d06556f8ec9366efc46d0
write more data
disk_balance_test.py
disk_balance_test.py
import os import os.path from dtest import DISABLE_VNODES, Tester, create_ks from tools.assertions import assert_almost_equal from tools.data import create_c1c2_table, insert_c1c2, query_c1c2 from tools.decorators import since from tools.jmxutils import (JolokiaAgent, make_mbean, remove_perf_disable_shared_mem) from tools.misc import new_node @since('3.2') class TestDiskBalance(Tester): """ @jira_ticket CASSANDRA-6696 """ def disk_balance_stress_test(self): cluster = self.cluster cluster.set_configuration_options(values={'allocate_tokens_for_keyspace': 'keyspace1'}) if not DISABLE_VNODES: cluster.set_configuration_options(values={'num_tokens': 256}) cluster.populate(4).start(wait_for_binary_proto=True) node1 = cluster.nodes['node1'] node1.stress(['write', 'n=10k', 'no-warmup', '-rate', 'threads=100', '-schema', 'replication(factor=2)']) cluster.flush() # make sure the data directories are balanced: for node in cluster.nodelist(): self.assert_balanced(node) def disk_balance_bootstrap_test(self): cluster = self.cluster if not DISABLE_VNODES: cluster.set_configuration_options(values={'num_tokens': 256}) # apparently we have legitimate errors in the log when bootstrapping (see bootstrap_test.py) self.allow_log_errors = True cluster.set_configuration_options(values={'allocate_tokens_for_keyspace': 'keyspace1'}) cluster.populate(4).start(wait_for_binary_proto=True) node1 = cluster.nodes['node1'] node1.stress(['write', 'n=50k', 'no-warmup', '-rate', 'threads=100', '-schema', 'replication(factor=3)', 'compaction(strategy=SizeTieredCompactionStrategy,enabled=false)']) cluster.flush() node5 = new_node(cluster) node5.start(wait_for_binary_proto=True) self.assert_balanced(node5) def disk_balance_decommission_test(self): cluster = self.cluster if not DISABLE_VNODES: cluster.set_configuration_options(values={'num_tokens': 256}) cluster.set_configuration_options(values={'allocate_tokens_for_keyspace': 'keyspace1'}) cluster.populate(4).start(wait_for_binary_proto=True) node1 = cluster.nodes['node1'] node4 = cluster.nodes['node4'] node1.stress(['write', 'n=50k', 'no-warmup', '-rate', 'threads=100', '-schema', 'replication(factor=2)', 'compaction(strategy=SizeTieredCompactionStrategy,enabled=false)']) cluster.flush() node4.decommission() for node in cluster.nodelist(): node.nodetool('relocatesstables') for node in cluster.nodelist(): self.assert_balanced(node) def blacklisted_directory_test(self): cluster = self.cluster cluster.set_datadir_count(3) cluster.populate(1) [node] = cluster.nodelist() remove_perf_disable_shared_mem(node) cluster.start(wait_for_binary_proto=True) session = self.patient_cql_connection(node) create_ks(session, 'ks', 1) create_c1c2_table(self, session) insert_c1c2(session, n=10000) node.flush() for k in xrange(0, 10000): query_c1c2(session, k) node.compact() mbean = make_mbean('db', type='BlacklistedDirectories') with JolokiaAgent(node) as jmx: jmx.execute_method(mbean, 'markUnwritable', [os.path.join(node.get_path(), 'data0')]) for k in xrange(0, 10000): query_c1c2(session, k) node.nodetool('relocatesstables') for k in xrange(0, 10000): query_c1c2(session, k) def alter_replication_factor_test(self): cluster = self.cluster if not DISABLE_VNODES: cluster.set_configuration_options(values={'num_tokens': 256}) cluster.set_configuration_options(values={'allocate_tokens_for_keyspace': 'keyspace1'}) cluster.populate(3).start(wait_for_binary_proto=True) node1 = cluster.nodes['node1'] node1.stress(['write', 'n=1', 'no-warmup', '-rate', 'threads=100', '-schema', 'replication(factor=1)']) cluster.flush() session = self.patient_cql_connection(node1) session.execute("ALTER KEYSPACE keyspace1 WITH replication = {'class':'SimpleStrategy', 'replication_factor':2}") node1.stress(['write', 'n=100k', 'no-warmup', '-rate', 'threads=100']) cluster.flush() for node in cluster.nodelist(): self.assert_balanced(node) def assert_balanced(self, node): sums = [] for sstabledir in node.get_sstables_per_data_directory('keyspace1', 'standard1'): sum = 0 for sstable in sstabledir: sum = sum + os.path.getsize(sstable) sums.append(sum) assert_almost_equal(*sums, error=0.1, error_message=node.name)
Python
0.000003
@@ -871,17 +871,17 @@ te', 'n= -1 +5 0k', 'no @@ -945,17 +945,84 @@ (factor= -2 +3)', 'compaction(strategy=SizeTieredCompactionStrategy,enabled=false )'%5D)%0A
aae0dbb42360727c8dc63a668c8f75dbdb1a0bb3
Remove exec permissions from condconfigparser/tests/test_condconfig.py
condconfigparser/tests/test_condconfig.py
condconfigparser/tests/test_condconfig.py
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # test_condconfig.py --- Automated tests for condconfigparser.condconfig # # Copyright (c) 2014, 2015, Florent Rougon # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # The views and conclusions contained in the software and documentation are # those of the authors and should not be interpreted as representing official # policies, either expressed or implied, of the CondConfigParser Project. import os import unittest # With absolute imports (e.g., 'from condconfigparser import InTestTypeError'), # one can run the test suite for an installed package like this: # # ( cd base_dir/lib/python3.4/site-packages/condconfigparser && \ # python3 -m unittest ) # # instead of: # # ( cd base_dir/lib/python3.4/site-packages/condconfigparser && \ # python3 -m unittest discover -v -t .. ) from .. import InTestTypeError, RawConditionalConfig # Hook doctest-based tests into the test discovery mechanism import doctest from .. import condconfig def load_tests(loader, tests, ignore): tests.addTests(doctest.DocTestSuite(condconfig)) return tests def loadCfgFile(cfgFile, context): cfgfilePath = os.path.join(os.path.dirname(__file__), "data", cfgFile) with open(cfgfilePath, "r", encoding="utf-8") as f: config = RawConditionalConfig(f, context.keys()) return config class TestRawConditionalConfigEval(unittest.TestCase): sampleContext = {"aircraft": "c172p", "airport": "LFPG", "parking": "XYZ0", "scenarios": ["nimitz_demo", "clemenceau_demo", "balloon_demo"]} def testEvalOnComplexCfgFile(self): """Process a complex configuration file with RawConditionalConfig.eval()""" from .data.config1 import variables, rawConfigLines config = loadCfgFile("config1", self.sampleContext) self.assertEqual(config.eval(self.sampleContext), (variables, rawConfigLines)) def testEvalOnFileWithParenthesizedOrTestBinOpOperands(self): """Process a file with parenthesized orTest binop operands with \ RawConditionalConfig.eval() The file has expressions of the form ( orTest ) == ( orTest ) and analogue with !=. """ from .data.config1 import variables, rawConfigLines config = loadCfgFile("config1", self.sampleContext) self.assertEqual(config.eval(self.sampleContext), (variables, rawConfigLines)) def testInTestWithMismatchingTypes(self): """Test that InTestTypeError is raised when performing invalid \ membership tests""" config = RawConditionalConfig('{ var = [] in "abcd" }\n', extvars=()) with self.assertRaises(InTestTypeError): config.eval({}) config2 = RawConditionalConfig('{ s = "abcd"\n' ' var = [] in s }\n', extvars=()) with self.assertRaises(InTestTypeError): config2.eval({}) config3 = RawConditionalConfig('{ l = []\n' ' var = l in "abcd" }\n', extvars=()) with self.assertRaises(InTestTypeError): config3.eval({}) def testOrderOfVariableAssignments(self): """Test that variable assignments are carried out in the proper order""" config = RawConditionalConfig(""" { a = "abc" b = a a = [b] }\n""", extvars=()) self.assertEqual(config.eval({}), ({"a": ["abc"], "b": "abc"}, [[]])) if __name__ == "__main__": unittest.main()
Python
0
42df834e1b259322703ae50e79bb3d8a93a4a296
Add destroy_all and update_all for updating/deleting list of objects
restlib/controller.py
restlib/controller.py
# # Copyright (c) 2008 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import re import urllib class _Controller(object): modelName = None processSuburls = False urls = None modelRegex = '[^/]*' def __init__(self, parent, path, subhandlerParams=None): self.parent = parent if parent: self._url = '%s/%s' % (parent._url, path) self.root = parent.root else: self._url = '' self.root = self if self.modelName: self.baseMethods = dict(POST='create', GET='index') self.modelMethods = dict(POST='process', GET='get', PUT='update', DELETE='destroy') else: self.baseMethods = dict(POST='process', GET='index', PUT='update', DELETE='destroy') if not subhandlerParams: subhandlerParams = [] self.urls = self._initializeUrls(self.urls, subhandlerParams) def _initializeUrls(self, urls, subhandlerParams): if not urls: return {} newUrls = {} for key, handler in urls.items(): if isinstance(handler, str): newUrls[key] = getattr(self, handler) elif isinstance(handler, dict): d = {} for method, handlerStr in handler.iteritems(): d[method] = getattr(self, handlerStr) newUrls[key] = d else: newUrls[key] = handler(self, key, *subhandlerParams) return newUrls def url(self, request, location, *args): return request.url(location, *args) def splitId(self, url): match = re.match('/?(%s)/?(.*|)' % self.modelRegex, urllib.unquote(url)) if match: return match.groups() raise NotImplementedError def splitSubdir(self, url): if url[0] == '/': url = url[1:] parts = url.split('/', 1) return (parts + [''])[0:2] def getView(self, method, url): # FIXME: this should separate into one part that dispatches # to the right depth (getController) # and a second part that assumes that the current controller # is the right one (getView body) return self.getController(method, url, (), {}) def getController(self, method, url, args, kwargs): if not url or url == '/': viewFnName = self.baseMethods.get(method, None) if viewFnName: viewFnName = getattr(self, viewFnName) return viewFnName, '', args, kwargs else: raise NotImplementedError elif self.modelName: modelId, newUrl = self.splitId(url) kwargs[self.modelName] = modelId return self.getControllerWithId(method, newUrl, args, kwargs) else: subDir, newUrl = self.splitSubdir(url) return self.getNextController(method, subDir, newUrl, args, kwargs) def getControllerWithId(self, method, url, args, kwargs): controller = None if url: subDir, newUrl = self.splitSubdir(url) if self.urls and subDir in self.urls: controller = self.urls[subDir] if isinstance(controller, dict): if method in controller: controller = controller[method] else: raise NotImplementedError if hasattr(controller, 'getController'): return controller.getController(method, newUrl, args, kwargs) return controller, newUrl, args, kwargs if not url or self.processSuburls: controller = self.modelMethods.get(method, None) if controller: controller = getattr(self, controller) return controller, url, args, kwargs raise NotImplementedError def getNextController(self, method, subDir, url, args, kwargs): if self.urls and subDir in self.urls: controller = self.urls[subDir] if isinstance(controller, dict): if method in controller: controller = controller[method] if hasattr(controller, 'getController'): return controller.getController(method, url, args, kwargs) if hasattr(controller, '__call__'): return controller, url, args, kwargs raise NotImplementedError class RestController(_Controller): """ @cvar urls: A dictionary of controllers for the I{head} part of the URL. If the controller is a string, the handler's attribute is returned; otherwise (assuming it's a class), a controller object is instantiated. @type urls: C{dict} """ def handle(self, url, request): # FIXME: this is not used in live code anymore. method = request.method controller, remainder, args, kw = self.getController(method, url, (), {}) # NOTE: the above could be cached pretty easily. request.unparsedPath = remainder return controller(request, *args, **kw) def index(self, request, *args, **kwargs): raise NotImplementedError def process(self, request, *args, **kwargs): raise NotImplementedError def create(self, request, *args, **kwargs): raise NotImplementedError def get(self, request, *args, **kwargs): raise NotImplementedError def destroy(self, request, *args, **kwargs): raise NotImplementedError def update(self, request, *args, **kwargs): raise NotImplementedError
Python
0
@@ -1058,24 +1058,136 @@ T='create',%0A + DELETE='destroy_all',%0A PUT='update_all',%0A
2653c537d8142bea9f7e68553a149cbcc0620895
Simplify get_dependency
injector.py
injector.py
import collections class InjectorException(Exception): pass class BadNameException(InjectorException): pass class DuplicateNameException(InjectorException): pass class MissingDependencyException(InjectorException): pass class CircularDependencyException(InjectorException): pass def has_missing_dependencies(dependency_graph): """ Checks to see if the graph contains any references to nodes that don't exist. dependency_graph - a graph of the form {name: [children names]} Returns True if there are missing dependencies. """ for dependencies in dependency_graph.values(): for dependency in dependencies: if dependency not in dependency_graph: return True return False def has_circular_dependencies(dependency_graph): """ Checks to see if the graph contains any cycles. dependency_graph - a graph of the form {name: [children names]} Returns True if there is a cycle. """ dep_counts = { name: len(dependencies) for name, dependencies in dependency_graph.items() } depends_on = collections.defaultdict(set) for name, dependencies in dependency_graph.items(): for dependency in dependencies: depends_on[dependency].add(name) deps_met = collections.deque( name for name, dependencies in dependency_graph.items() if len(dependencies) == 0 ) num_removed = 0 while deps_met: num_removed += 1 done = deps_met.pop() for name in depends_on[done]: dep_counts[name] -= 1 if dep_counts[name] == 0: deps_met.add(name) return num_removed < len(dependency_graph) class Dependencies(object): """ A factory for setting up and building an Injector instance. """ def __init__(self): self._factories = dict() def _add_item(self, kind, name, value, dependencies): self._names_used.add(name) def register_value(self, name, value): """ Bind a value to a name. The Injector will always return the value as-is. """ self.register_factory(name, lambda: value) def register_factory(self, name, factory, dependencies=None): """ Binds a factory to a name. The injector will call the factory function once (if the name is ever used), and always return the value that the factory returns. The factory will be called with the dependencies (if any listed) as arguments. """ if not name or not isinstance(name, str): raise BadNameException("Bad name: {!r}".format(name)) if name in self._factories: raise DuplicateNameException("Duplicate name: {}".format(name)) self._factories[name] = (factory, dependencies) def _make_dependency_graph(self): return { name: dependencies or [] for name, (_, dependencies) in self._factories.items() } def build_injector(self): """ Builds an injector instance that can be used to inject dependencies. Also checks for common errors (missing dependencies and circular dependencies). """ graph = self._make_dependency_graph() if has_missing_dependencies(graph): raise MissingDependencyException() if has_circular_dependencies(graph): raise CircularDependencyException() return Injector(self._factories) class Injector(object): def __init__(self, factories): """ Create an Injector. The prefered way to create an Injector is with `Dependencies.build_injector()`. """ self._factories = factories self._value_cache = {} def has_dependency(self, name): """ Check if the Injector has a dependency """ return name in self._factories def get_dependency(self, name): """ Get the value of a dependency. name - The name of the requested dependency Returns the value of the dependency """ if not self.has_dependency(name): raise MissingDependencyException("Missing dependency name: {}".format(name)) if name not in self._value_cache: (factory, dependencies) = self._factories[name] args = map(self.get_dependency, dependencies) if dependencies else [] self._value_cache[name] = factory(*args) return self._value_cache[name] def inject(self, fn, dependencies): """ Calls the function with the value of the listed dependencies fn - function that will be called dependencies - list of names of dependencies to inject Returns the result of calling the function. """ args = map(self.get_dependency, dependencies) if dependencies else [] return fn(*args)
Python
0.000003
@@ -4195,189 +4195,68 @@ -(factory, dependencies) = self._factories%5Bname%5D%0A args = map(self.get_dependency, dependencies) if dependencies else %5B%5D%0A self._value_cache%5Bname%5D = factory(*args +self._value_cache%5Bname%5D = self.inject(*self._factories%5Bname%5D )%0A
780f28cd91f92fea0dddee2b62bc659d244a8270
Change create sample code to select indexes by eval set
create_sample.py
create_sample.py
# importing modules/ libraries import pandas as pd import random import numpy as np # create a sample of prior orders orders_df = pd.read_csv("Data/orders.csv") s = round(3214874 * 0.1) i = sorted(random.sample(range(1,3214874), s)) orders_df.loc[i,:].to_csv("Data/orders_prior_sample.csv", index = False) # create a sample of train orders s = round(131209 * 0.1) j = sorted(random.sample(range(1,131209), s)) orders_df.loc[j,:].to_csv("Data/orders_train_sample.csv", index = False) # create a sample of test orders s = round(75000 * 0.1) k = sorted(random.sample(range(1,75000), s)) orders_df.loc[k,:].to_csv("Data/orders_test_sample.csv", index = False) # create a sample of prior order products order_products_prior_df = pd.read_csv('Data/order_products__prior.csv', index_col = 'order_id') order_products_prior_df.loc[orders_df.loc[i,:]['order_id'],:].to_csv("Data/order_products_prior_sample.csv", index = False) # create a sample of train order products order_products_train_df = pd.read_csv('Data/order_products__train.csv', index_col = 'order_id') order_products_train_df.loc[orders_df.loc[j,:]['order_id'],:].to_csv("Data/order_products_train_sample.csv", index = False)
Python
0
@@ -209,23 +209,60 @@ ple( -range(1,3214874 +list(orders_df%5Borders_df%5B%22eval_set%22%5D==%22prior%22%5D.index ), s @@ -425,22 +425,60 @@ ple( -range(1,131209 +list(orders_df%5Borders_df%5B%22eval_set%22%5D==%22train%22%5D.index ), s @@ -639,21 +639,59 @@ ple( -range(1,75000 +list(orders_df%5Borders_df%5B%22eval_set%22%5D==%22test%22%5D.index ), s
fee30c4017da4d41a9487d961ba543d2d1e20e85
Add explicit Note join relationship on NoteContent model. (also remove extraneous comments on old date format)
tuhi_flask/models.py
tuhi_flask/models.py
# Copyright 2015 icasdri # # This file is part of tuhi-flask. # # tuhi-flask is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # tuhi-flask is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with tuhi-flask. If not, see <http://www.gnu.org/licenses/>. from flask import current_app as app from sqlalchemy import Column, Integer, String, CHAR, Text, Boolean, DateTime, ForeignKey from werkzeug.security import generate_password_hash, check_password_hash from tuhi_flask.database import Base class User(Base): __tablename__ = 'users' user_id = Column(Integer, primary_key=True) username = Column(String, unique=True, index=True) password_hash = Column(String) def __init__(self, username, password): self.username = username self.set_password(password) def set_password(self, password): self.password_hash = generate_password_hash(password, method=app.config['PASSWORD_HASH_METHOD'], salt_length=app.config['PASSWORD_SALT_LENGTH']) def check_password(self, password): return check_password_hash(self.password_hash, password) class Note(Base): __tablename__ = 'notes' note_id = Column(CHAR(36), primary_key=True) user_id = Column(Integer, ForeignKey('users.user_id'), index=True) title = Column(String) deleted = Column(Boolean, default=False) date_modified = Column(Integer, index=True) # Seconds from epoch # date_modified = Column(DateTime) # May need to use Integer from epoch here class NoteContent(Base): __tablename__ = 'note_contents' note_content_id = Column(CHAR(36), primary_key=True) note_id = Column(CHAR(36), ForeignKey('notes.note_id'), index=True) data = Column(Text) date_created = Column(Integer, index=True) # Seconds from epoch # date_created = Column(DateTime) # May need to use Integer from epoch here
Python
0
@@ -834,16 +834,56 @@ eignKey%0A +from sqlalchemy.orm import relationship%0A from wer @@ -2000,90 +2000,8 @@ och%0A - # date_modified = Column(DateTime) # May need to use Integer from epoch here%0A %0A%0Acl @@ -2285,86 +2285,38 @@ och%0A +%0A -# date_created = Column(DateTime) # May need to use Integer from epoch here +note = relationship(%22Note%22) %0A%0A
0eba8c87373d44b1f7bec2afc8d4ee62d5abd2cb
Refactor debug interface to adapt connection
scripts/debug_serial.py
scripts/debug_serial.py
#!/usr/bin/python3 import struct import subprocess from enum import Enum import time import logging from functools import partial from typing import Callable import colorlog from meshnet.serio.connection import LegacyConnection, MessageHandler, MessageWriter from meshnet.serio.messages import MessageType, SerialMessageConsumer, SerialMessage logger = logging.getLogger(__name__) KEY = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' def build_fakeio(port_a, port_b): return subprocess.Popen(["socat", "PTY,link={:s},raw,wait-slave".format(port_a), "PTY,link={:s},raw,wait-slave".format(port_b)]) class FakeState(Enum): new = 0 configured = 1 class FakeDeviceMessage(SerialMessage): def _serio_header(self): return struct.pack(">HB", self.sender, self.msg_type.value) class FakeRouter(MessageHandler): def __init__(self, tty_dev: str, key: bytes): self.key = key self.conn = LegacyConnection(tty_dev) self.conn.register_handler(self) self.consumer = SerialMessageConsumer() self.devices = {} def register_device(self, device: 'FakeNode'): self.devices[device.node_id] = device def write_packet(self, writer: MessageWriter, message: SerialMessage): logger.debug("Write message: %s", message) writer.put_packet(message, self.key) def on_message(self, message: SerialMessage, writer: MessageWriter): sender = message.receiver message.receiver = message.sender message.sender = sender if not message.verify(KEY): logger.warning("cannot verify checksum") return if message.receiver not in self.devices: logger.warning("Unknown node id: %x", message.receiver) return dev = self.devices[message.receiver] dev.on_message(message, partial(self.write_packet, writer)) def on_connect(self, writer: MessageWriter): for dev in self.devices.values(): dev.on_connect(partial(self.write_packet, writer)) def on_disconnect(self): pass def run(self): self.conn.connect() while True: if not self.conn.read(self.consumer): time.sleep(0.3) class FakeDevice(object): def __init__(self, dev_type): self.dev_type = dev_type class FakeNode(object): def __init__(self, node_id: int): self.state = FakeState.new self.session = 0x12 self.counter = 0 self.node_id = node_id self.items = [] self.handlers = { MessageType.booted: self._dummy_handler, MessageType.configure: self._dummy_handler, MessageType.configured: self._dummy_handler, MessageType.set_state: self._dummy_handler, MessageType.get_state: self._dummy_handler, MessageType.reading: self._dummy_handler, MessageType.ping: self._dummy_handler, MessageType.pong: self._dummy_handler, MessageType.reset: self._dummy_handler, } def _dummy_handler(self, message: SerialMessage, write_func: Callable[[SerialMessage], None]): logger.warning("No actual handler for message %s defined.", message.msg_type.name) write_func(self.make_packet(MessageType.pong, b'1234')) def _config_handler(self): pass def on_message(self, message: SerialMessage, writer: FakeRouter): logger.info("Node %d got message %s", self.node_id, message) actual_handler = self.handlers.get(message.msg_type, self._dummy_handler) actual_handler(message, writer) def on_connect(self, write_func: Callable[[SerialMessage], None]): write_func(self.make_packet(MessageType.booted, b'1234')) def make_packet(self, msg_type: MessageType, payload: bytes) -> SerialMessage: cnt = self.counter self.counter += 1 return FakeDeviceMessage(self.node_id, 0, msg_type, counter=cnt, payload=payload, session=self.session) if __name__ == "__main__": handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter( '%(log_color)s%(levelname)s:%(name)s:%(message)s')) logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG, handlers=[handler]) proc = build_fakeio("/tmp/ttyS0", "/tmp/ttyS1") time.sleep(1) router = FakeRouter("/tmp/ttyS0", KEY) router.register_device(FakeNode(1)) router.run()
Python
0
@@ -931,22 +931,8 @@ elf, - tty_dev: str, key @@ -968,97 +968,8 @@ key%0A -%0A%0A self.conn = LegacyConnection(tty_dev)%0A self.conn.register_handler(self)%0A @@ -1012,17 +1012,16 @@ sumer()%0A -%0A @@ -2052,159 +2052,8 @@ ss%0A%0A - def run(self):%0A self.conn.connect()%0A%0A while True:%0A if not self.conn.read(self.consumer):%0A time.sleep(0.3)%0A%0A %0Acla @@ -3825,16 +3825,126 @@ sion)%0A%0A%0A +def loop(conn):%0A conn.connect()%0A%0A while True:%0A if not conn.read():%0A time.sleep(0.3)%0A%0A%0A if __nam @@ -4295,101 +4295,181 @@ -router = FakeRouter(%22/tmp/ttyS0%22, KEY)%0A router.register_device(FakeNode(1))%0A%0A router.run( +connection = LegacyConnection(%22/tmp/ttyS0%22)%0A%0A router = FakeRouter(KEY)%0A router.register_device(FakeNode(1))%0A%0A connection.register_handler(router)%0A%0A loop(connection )%0A
ed08ebd627846b8e4a3b3734a605495e64254a28
Fix a minor pylint warning
swift_scality_backend/http_utils.py
swift_scality_backend/http_utils.py
# Copyright (c) 2014 Scality # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. '''HTTP client utilities''' import errno import httplib import socket from swift_scality_backend.exceptions import InvariantViolation class SomewhatBufferedFileObject(socket._fileobject): '''A 'somewhat buffered' file-like object This works similar to `socket._fileobject`, which is what you get when calling `socket.socket.makefile`, but this one has a couple of differences: - All `write`-related methods are removed (raise `NotImplementedError`) - It's possible to retrieve the content of the buffer *once* - Once the content of the backing buffer has been retrieved, any `read`-related method will fail (raise `InvariantViolation`) ''' def get_buffered(self): '''Retrieve the buffered data''' if self._rbuf is None: raise InvariantViolation('Using `get_buffered` more than once') value = self._rbuf.getvalue() self._rbuf = None return value def flush(self): # We can't raise NotImplementedError here because when an HTTPResponse # object is closed, flush() is called pass def write(self, data): raise NotImplementedError def writelines(self, list): raise NotImplementedError def read(self, size=-1): if self._rbuf is None: raise InvariantViolation('Using `read` after `get_buffered`') return socket._fileobject.read(self, size) def readline(self, size=-1): if self._rbuf is None: raise InvariantViolation('Using `readline` after `get_buffered`') return socket._fileobject.readline(self, size) def readlines(self, sizehint=0): if self._rbuf is None: raise InvariantViolation('Using `readllines` after `get_buffered`') return socket._fileobject.readlines(self, sizehint) def next(self): if self._rbuf is None: raise InvariantViolation('Using `next` after `get_buffered`') return socket._fileobject.next(self) class SomewhatBufferedHTTPConnection(httplib.HTTPConnection): '''A somewhat buffered HTTP connection The response type used by this class wraps the underlying socket in a `SomewhatBufferedFileObject`. ''' class HTTPResponse(httplib.HTTPResponse): '''Like `httplib.HTTPResponse, but with its `fp` attribute wrapped in a `SomewhatBufferedFileObject` ''' def __init__(self, sock, debuglevel=0, strict=0, method=None, buffering=False): httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel, strict=strict, method=method, buffering=buffering) # Fetching in chunks of 1024 bytes seems like a sensible value, # since we want to retrieve as little more than the HTTP headers as # possible. self.fp = SomewhatBufferedFileObject(sock, 'rb', 1024) response_class = HTTPResponse def __enter__(self): return self def __exit__(self, *exc_info): self.close() def stream(fp, chunksize=(1024 * 64)): '''Yield blocks of data from a file-like object using a given chunk size This generator yield blocks from the given file-like object `fp` by calling its `read` method repeatedly, stopping the loop once a zero-length value is returned. Any `OSError` or `IOError` with `errno` equal to `errno.EINTR` will be caught and the loop will continue to run. ''' while True: try: chunk = fp.read(chunksize) except (OSError, IOError) as exc: if getattr(exc, 'errno', None) == errno.EINTR: continue else: raise if len(chunk) != 0: yield chunk else: break
Python
0.000001
@@ -1764,18 +1764,19 @@ self, li +ne s -t ):%0A
ee76ae4f41be17a0f6a482273e99783df8212004
Reconfigure key repeat (should change to be configurable)
riker/worker/utils.py
riker/worker/utils.py
from logging import getLogger import tempfile from threading import Thread import lirc from django.conf import settings from systemstate.models import RemoteButton from systemstate.utils import push_button LOGGER = getLogger(__name__) LIRCRC_TEMPLATE = ''' begin prog = {lirc_name} button = {key_name} config = {key_name} end ''' class LircListener(Thread): def __init__(self, lirc_name): self.lirc_name = lirc_name self.lircrc_filename = create_lircrc_tempfile() super(LircListener, self).__init__() def run(self): lirc.init(self.lirc_name, self.lircrc_filename) listen(self.lirc_name, self.lircrc_filename) def listen(lirc_name, lircrc_filename, callback=None): lirc.init(lirc_name, lircrc_filename) callback = callback or push_button while True: for key_code in lirc.nextcode(): LOGGER.warning(key_code) callback(key_code) def create_lircrc_tempfile(lirc_name): buttons = RemoteButton.objects.all().values_list('lirc_code', flat=True) with tempfile.NamedTemporaryFile(delete=False) as lircrc_file: lircrc_file.write(generate_lircrc(lirc_name, buttons).encode('ascii')) return lircrc_file.name def generate_lircrc(name, buttons): return '\n'.join( LIRCRC_TEMPLATE.format( lirc_name=name, key_name=button, ) for button in buttons )
Python
0
@@ -347,16 +347,53 @@ y_name%7D%0A + repeat = 2%0A delay = 3%0A end%0A%0A'''
2e99e481e36b4634859e4071984aa5e1ae440945
fix bug
deepchem/molnet/load_function/clearance_datasets.py
deepchem/molnet/load_function/clearance_datasets.py
""" clearance dataset loader. """ import os import logging import deepchem logger = logging.getLogger(__name__) DEFAULT_DIR = deepchem.utils.data_utils.get_data_dir() CLEARANCE_URL = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/clearance.csv" def load_clearance(featurizer='ECFP', split='random', reload=True, move_mean=True, data_dir=None, save_dir=None, **kwargs): """Load clearance datasets.""" # Featurize clearance dataset logger.info("About to featurize clearance dataset.") logger.info("About to load clearance dataset.") if data_dir is None: data_dir = DEFAULT_DIR if save_dir is None: save_dir = DEFAULT_DIR clearance_tasks = ['exp'] if reload: save_folder = os.path.join(save_dir, "clearance-featurized") if not move_mean: save_folder = os.path.join(save_folder, str(featurizer) + "_mean_unmoved") else: save_folder = os.path.join(save_folder, str(featurizer)) if featurizer == "smiles2img": img_spec = kwargs.get("img_spec", "std") save_folder = os.path.join(save_folder, img_spec) save_folder = os.path.join(save_folder, str(split)) loaded, all_dataset, transformers = deepchem.utils.data_utils.load_dataset_from_disk( save_folder) if loaded: return clearance_tasks, all_dataset, transformers dataset_file = os.path.join(data_dir, "clearance.csv") if not os.path.exists(dataset_file): deepchem.utils.data_utils.download_url(url=CLEARANCE_URL, dest_dir=data_dir) if featurizer == 'ECFP': featurizer = deepchem.feat.CircularFingerprint(size=1024) elif featurizer == 'GraphConv': featurizer = deepchem.feat.ConvMolFeaturizer() elif featurizer == 'Weave': featurizer = deepchem.feat.WeaveFeaturizer() elif featurizer == 'Raw': featurizer = deepchem.feat.RawFeaturizer() elif featurizer == "smiles2img": img_spec = kwargs.get("img_spec", "std") img_size = kwargs.get("img_size", 80) featurizer = deepchem.feat.SmilesToImage( img_size=img_size, img_spec=img_spec) loader = deepchem.data.CSVLoader( tasks=clearance_tasks, smiles_field="smiles", featurizer=featurizer) dataset = loader.featurize(dataset_file, shard_size=8192) if split is None: # Initialize transformers transformers = [ deepchem.trans.NormalizationTransformer( transform_y=True, dataset=dataset, move_mean=move_mean) ] logger.info("Split is None, about to transform data") for transformer in transformers: dataset = transformer.transform(dataset) return clearance_tasks, (dataset, None, None), transformers splitters = { 'index': deepchem.splits.IndexSplitter(), 'random': deepchem.splits.RandomSplitter(), 'scaffold': deepchem.splits.ScaffoldSplitter(), 'stratified': deepchem.splits.SingletaskStratifiedSplitter() } splitter = splitters[split] logger.info("About to split data with {} splitter.".format(split)) frac_train = kwargs.get("frac_train", 0.8) frac_valid = kwargs.get('frac_valid', 0.1) frac_test = kwargs.get('frac_test', 0.1) train, valid, test = splitter.train_valid_test_split( dataset, frac_train=frac_train, frac_valid=frac_valid, frac_test=frac_test) transformers = [ deepchem.trans.NormalizationTransformer( transform_y=True, dataset=train, move_mean=move_mean) ] logger.info("About to transform data") for transformer in transformers: train = transformer.transform(train) valid = transformer.transform(valid) test = transformer.transform(test) if reload: deepchem.utils.data_utils.save_dataset_to_disk(save_folder, train, valid, test, transformers) return clearance_tasks, (train, valid, test), transformers
Python
0.000005
@@ -817,11 +817,14 @@ = %5B' -exp +target '%5D%0D%0A
0827911184bf43a6dd50712444d3f9385a64eb31
support combining bigrams
constraintWriterTool.py
constraintWriterTool.py
#!/usr/bin/env python from autosuggest import * import os, sys from sys import argv, exit def printUsage(): print("Usage: constraintWriterTool action [options]\nActions:\n\tsuggest\t\tbigramfile word\n\tsuggestPfx\t\tbigramfile word prefix\n\tinWhitelist\tbigramfile word\n\tinBlacklist\tbigramfile word\n\tcompile\t\tcorpus bigramfile\n\tcompileMulti\tbigramfile corpus [corpus_2 ... corpus_n]\n") exit(1) if len(argv)<4: printUsage() world={} if argv[1] in ["suggest", "suggestPfx", "inWhitelist", "inBlacklist"]: def inBlacklist(world, word): return checkWhiteList(world, word, True) def pfx(world, word): return bigramSuggestPfx(world, word, argv[4]) funcs={"suggest":bigramSuggest, "inWhitelist":checkWhiteList, "inBlacklist":inBlacklist, "suggestPfx":pfx} world=loadBigrams(argv[2]) print(funcs[argv[1]](world, argv[3])) exit(0) elif argv[1]=="compile": with open(argv[2], 'r') as f: saveBigrams(corpus2bigrams(f.read()), argv[3]) elif argv[1]=="compileMulti": corpora=[] for fname in argv[3:]: with open(fname, 'r') as f: corpora.append(f.read()) saveBigrams(corpus2bigrams("\n".join(corpora)), argv[2])
Python
0
@@ -210,18 +210,16 @@ gestPfx%5C -t%5C tbigramf @@ -387,16 +387,66 @@ corpus_n +%5D%5Cn%5Ctcombine%5Ct%5Ctbigramfile_out %5Bbigramfile_in ... %5D%5Cn%22)%0A%09e @@ -1181,10 +1181,326 @@ rgv%5B2%5D)%0A +elif argv%5B1%5D==%22combine%22:%0A%09bigrams=%7B%7D%0A%09for fname in argv%5B3:%5D:%0A%09%09world=loadBigrams(fname)%0A%09%09for w1 in world.keys():%0A%09%09%09if not (w1 in bigrams):%0A%09%09%09%09bigrams%5Bw1%5D=%7B%7D%0A%09%09%09for w2 in world%5Bw1%5D.keys():%0A%09%09%09%09if not w2 in bigrams%5Bw1%5D:%0A%09%09%09%09%09bigrams%5Bw1%5D%5Bw2%5D=0%0A%09%09%09%09bigrams%5Bw1%5D%5Bw2%5D+=world%5Bw1%5D%5Bw2%5D%0A%09saveBigrams(bigrams, argv%5B2%5D)%0A%0A%09%09%09%09%0A %09%0A
a1e468f7bf22f46de7ec0d41e37d75299cc205a7
Allow email-src and email-dst as input for apivoid module
misp_modules/modules/expansion/apivoid.py
misp_modules/modules/expansion/apivoid.py
import json import requests from . import check_input_attribute, standard_error_message from pymisp import MISPAttribute, MISPEvent, MISPObject misperrors = {'error': 'Error'} mispattributes = {'input': ['domain', 'hostname', 'email'], 'format': 'misp_standard'} moduleinfo = {'version': '0.2', 'author': 'Christian Studer', 'description': 'On demand query API for APIVoid.', 'module-type': ['expansion', 'hover']} moduleconfig = ['apikey'] class APIVoidParser(): def __init__(self, attribute): self.misp_event = MISPEvent() self.attribute = MISPAttribute() self.attribute.from_dict(**attribute) self.misp_event.add_attribute(**self.attribute) self.url = 'https://endpoint.apivoid.com/{}/v1/pay-as-you-go/?key={}&' def get_results(self): if hasattr(self, 'result'): return self.result event = json.loads(self.misp_event.to_json()) results = {key: event[key] for key in ('Attribute', 'Object')} return {'results': results} def parse_domain(self, apikey): feature = 'dnslookup' if requests.get(f'{self.url.format(feature, apikey)}stats').json()['credits_remained'] < 0.13: self.result = {'error': 'You do not have enough APIVoid credits to proceed your request.'} return mapping = {'A': 'resolution-of', 'MX': 'mail-server-of', 'NS': 'server-name-of'} dnslookup = requests.get(f'{self.url.format(feature, apikey)}action=dns-any&host={self.attribute.value}').json() for item in dnslookup['data']['records']['items']: record_type = item['type'] try: relationship = mapping[record_type] except KeyError: continue self._handle_dns_record(item, record_type, relationship) ssl = requests.get(f'{self.url.format("sslinfo", apikey)}host={self.attribute.value}').json() self._parse_ssl_certificate(ssl['data']['certificate']) def handle_email(self, apikey): feature = 'emailverify' if requests.get(f'{self.url.format(feature, apikey)}stats').json()['credits_remained'] < 0.06: self.result = {'error': 'You do not have enough APIVoid credits to proceed your request.'} return emaillookup = requests.get(f'{self.url.format(feature, apikey)}email={self.attribute.value}').json() email_verification = MISPObject('apivoid-email-verification') boolean_attributes = ['valid_format', 'suspicious_username', 'suspicious_email', 'dirty_words_username', 'suspicious_email', 'valid_tld', 'disposable', 'has_a_records', 'has_mx_records', 'has_spf_records', 'is_spoofable', 'dmarc_configured', 'dmarc_enforced', 'free_email', 'russian_free_email', 'china_free_email', 'suspicious_domain', 'dirty_words_domain', 'domain_popular', 'risky_tld', 'police_domain', 'government_domain', 'educational_domain', 'should_block'] for boolean_attribute in boolean_attributes: email_verification.add_attribute(boolean_attribute, **{'type': 'boolean', 'value': emaillookup['data'][boolean_attribute]}) email_verification.add_attribute('email', **{'type': 'email', 'value': emaillookup['data']['email']}) email_verification.add_attribute('username', **{'type': 'text', 'value': emaillookup['data']['username']}) email_verification.add_attribute('role_address', **{'type': 'boolean', 'value': emaillookup['data']['role_address']}) email_verification.add_attribute('domain', **{'type': 'domain', 'value': emaillookup['data']['domain']}) email_verification.add_attribute('score', **{'type': 'float', 'value': emaillookup['data']['score']}) email_verification.add_reference(self.attribute['uuid'], 'related-to') self.misp_event.add_object(email_verification) def _handle_dns_record(self, item, record_type, relationship): dns_record = MISPObject('dns-record') dns_record.add_attribute('queried-domain', type='domain', value=item['host']) attribute_type, feature = ('ip-dst', 'ip') if record_type == 'A' else ('domain', 'target') dns_record.add_attribute(f'{record_type.lower()}-record', type=attribute_type, value=item[feature]) dns_record.add_reference(self.attribute.uuid, relationship) self.misp_event.add_object(**dns_record) def _parse_ssl_certificate(self, certificate): x509 = MISPObject('x509') fingerprint = 'x509-fingerprint-sha1' x509.add_attribute(fingerprint, type=fingerprint, value=certificate['fingerprint']) x509_mapping = {'subject': {'name': ('text', 'subject')}, 'issuer': {'common_name': ('text', 'issuer')}, 'signature': {'serial': ('text', 'serial-number')}, 'validity': {'valid_from': ('datetime', 'validity-not-before'), 'valid_to': ('datetime', 'validity-not-after')}} certificate = certificate['details'] for feature, subfeatures in x509_mapping.items(): for subfeature, mapping in subfeatures.items(): attribute_type, relation = mapping x509.add_attribute(relation, type=attribute_type, value=certificate[feature][subfeature]) x509.add_reference(self.attribute.uuid, 'seen-by') self.misp_event.add_object(**x509) def handler(q=False): if q is False: return False request = json.loads(q) if not request.get('config', {}).get('apikey'): return {'error': 'An API key for APIVoid is required.'} if not request.get('attribute') or not check_input_attribute(request['attribute']): return {'error': f'{standard_error_message}, which should contain at least a type, a value and an uuid.'} attribute = request['attribute'] if attribute['type'] not in mispattributes['input']: return {'error': 'Unsupported attribute type.'} apikey = request['config']['apikey'] apivoid_parser = APIVoidParser(attribute) if attribute['type'] in ['domain', 'hostname']: apivoid_parser.parse_domain(apikey) else: apivoid_parser.handle_email(apikey) return apivoid_parser.get_results() def introspection(): return mispattributes def version(): moduleinfo['config'] = moduleconfig return moduleinfo
Python
0
@@ -227,16 +227,42 @@ 'email' +, 'email-src', 'email-dst' %5D, 'form
8ec51a310324006b46d5bdfe1adfcef00f461d02
update construct images
construct_mockfields.py
construct_mockfields.py
import os import sys import numpy as np import glob import gfs_sublink_utils as gsu import shutil import math import astropy import astropy.io.fits as pyfits #import matplotlib #import matplotlib.pyplot as plt import scipy import scipy.ndimage #import make_color_image import numpy.random as random import congrid import tarfile import string import astropy.io.ascii as ascii #construct real illustris lightcones from individual images #in parallel, produce estimated Hydro-ART surveys based on matching algorithms -- high-res? def build_lightcone_images(image_info_file,run_type='images'): data=ascii.read(image_info_file) print(data) full_npix=data['full_npix'][0] #get expected shape test_file=os.path.join(data['run_dir'][0],'broadbandz.fits') tfo =pyfits.open(test_file) print(tfo.info()) cube=tfo['CAMERA0-BROADBAND-NONSCATTER'].data cubeshape=cube.shape print(cubeshape) auxcube=tfo['CAMERA0-AUX'].data filters_hdu = tfo['FILTERS'] lightcone_dir=os.path.abspath(os.path.dirname(image_info_file)) print('Constructing lightcone data from: ', lightcone_dir) output_dir = os.path.join(lightcone_dir,os.path.basename(image_info_file).rstrip('.txt')) print('Saving lightcone outputs in: ', output_dir) if not os.path.lexists(output_dir): os.mkdir(output_dir) success_catalog=os.path.join(output_dir,os.path.basename(image_info_file).rstrip('.txt')+'_success.txt' image_filelabel='lightcone_image' N_filters = cubeshape[0] N_aux=auxcube.shape[0] image_cube = np.zeros((N_filters,full_npix,full_npix),dtype=np.float64) aux_cube = np.zeros((N_aux,full_npix,full_npix),dtype=np.float64) success=[] for origin_i,origin_j,run_dir,this_npix in zip(data['origin_i'],data['origin_j'],data['run_dir'],data['this_npix']): try: bblist=pyfits.open(os.path.join(run_dir,'broadbandz.fits')) this_cube = bblist['CAMERA0-BROADBAND-NONSCATTER'].data bblist.close() success.append(True) except: print('Missing file, ', run_dir) success.append(False) continue i_tc=0 j_tc=0 i_tc1=this_npix j_tc1=this_npix if origin_i < 0: i0=0 i_tc=-1*origin_i else: i0=origin_i if origin_j < 0: j0=0 j_tc=-1*origin_j else: j0=origin_j if i0+this_npix > full_npix: i1=full_npix i_tc1= full_npix-i0 #this_npix - (i0+this_npix-full_npix) else: i1=i0+this_npix-i_tc if j0+this_npix > full_npix: j1=full_npix j_tc1= full_npix-j0 else: j1=j0+this_npix-j_tc sub_cube1=image_cube[:,i0:i1,j0:j1] this_subcube=this_cube[:,i_tc:i_tc1,j_tc:j_tc1] print(origin_i,origin_j,this_cube.shape, this_npix, sub_cube1.shape, this_subcube.shape) image_cube[:,i0:i1,j0:j1] = sub_cube1 + this_subcube success=np.asarray(success) data.add_column(data=success,name='success') ascii.write(data,output=success_catalog) filters_data=filters_hdu.data for i,filname in enumerate(filters_data['filter']): print(filname) outname=os.path.join(output_dir,image_filelabel+'_'+filname.replace('/','-')+'.fits') print('saving:', outname) primary_hdu=pyfits.PrimaryHDU(image_cube[i,:,:]) output_list=pyfits.HDUList([primary_hdu]) output_list.writeto(outname,overwrite=True) output_list.close() #convert units before saving.. or save both? return
Python
0.000009
@@ -1449,16 +1449,17 @@ ess.txt' +) %0A%0A im
23939ace63c12391dc07a3419a55ca573ee5dd73
Update debug output and remove unnecessary assignment
righteous/api/server_template.py
righteous/api/server_template.py
import re from urllib import urlencode from logging import getLogger import omnijson as json from .. import config from .base import _request, debug log = getLogger(__name__) def list_server_templates(): """ Lists ServerTemplates :return: list of dicts of server information with the following keys: :: [u'description', u'is_head_version', u'created_at', u'updated_at', u'href', u'version', u'nickname'] """ response = _request('/server_templates.js') return json.loads(response.content) def _extract_template_id(template_href): """ Returns the template id from an href :param template_href: String representing the server template href :return: String of the template_id or None """ result = re.match(config.account_url + config.settings.account_id + '/ec2_server_templates/(\d+)', template_href) if result: return result.groups()[0] return None def server_template_info(template_href): """ Details ServerTemplate information :param template_href: String representing the server template href :return: dict of server template information, with the following keys: :: [u'description', u'is_head_version', u'created_at', u'updated_at', u'href', u'version', u'nickname'] """ response = _request('/server_templates/%s.js' % _extract_template_id(template_href)) template = json.loads(response.content) if template: return template else: return None def create_server_template(nickname, description, multi_cloud_image_href): """ Create a new ServerTemplate Returns a tuple of operation status, href of the created, started server :param nickname: String of the template nickname :param description: String describing the ServerTemplate :param multi_cloud_image_href: String of the template image href :return: tuple of operation success and new server template href """ location = None success = False create_data = { 'server_template[nickname]': nickname, 'server_template[description]': description, 'server_template[multi_cloud_image_href]': multi_cloud_image_href, } response = _request('/server_templates', method='POST', body=urlencode(create_data)) success = response.status_code == 201 if success: location = response.headers.get('location') debug('Created ServerTemplate %s: %s (%s:%s)' % (nickname, location, response.status_code, response.content)) # TODO: error responses return success, location def delete_server_template(server_template_href): """ Deletes a ServerTemplate :param server_template_href: String of the ServerTemplate to delete :return: `requests.Response` """ return _request('/server_templates/%s.js' % _extract_template_id(server_template_href), method='DELETE').status_code == 200
Python
0
@@ -2110,28 +2110,8 @@ one%0A - success = False%0A @@ -2554,23 +2554,24 @@ Created -S +s erver -T + t emplate
5a3d991d53e35ba7b307b53998d38e7382b5ec61
Add SOFTMAT to instrument list
scripts/set_instrument_list.py
scripts/set_instrument_list.py
# This file is part of the ISIS IBEX application. # Copyright (C) 2012-2016 Science & Technology Facilities Council. # All rights reserved. # # This program is distributed in the hope that it will be useful. # This program and the accompanying materials are made available under the # terms of the Eclipse Public License v1.0 which accompanies this distribution. # EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM # AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details. # # You should have received a copy of the Eclipse Public License v1.0 # along with this program; if not, you can obtain a copy from # https://www.eclipse.org/org/documents/epl-v10.php or # http://opensource.org/licenses/eclipse-1.0.php import json import zlib import sys import os try: from server_common.channel_access import ChannelAccess as ca except ImportError: sys.path.append(os.path.join(os.path.dirname(sys.path[0]))) # to allow server common from dir below from server_common.channel_access import ChannelAccess as ca def compress_and_hex(value): compr = zlib.compress(value) return compr.encode('hex') def dehex_and_decompress(value): return zlib.decompress(value.decode("hex")) def set_env(): epics_ca_addr_list = "EPICS_CA_ADDR_LIST" """ If we're not in an EPICS terminal, add the address list to the set of environment keys """ if not epics_ca_addr_list in os.environ.keys(): os.environ[epics_ca_addr_list] = "127.255.255.255 130.246.51.255" print(epics_ca_addr_list + " = " + str(os.environ.get(epics_ca_addr_list))) def inst_dictionary(instrument_name, hostname_prefix="NDX", hostname=None, pv_prefix=None, is_scheduled=True): """ Generate the instrument dictionary for the instrument list Args: instrument_name: instrument name hostname_prefix: prefix for hostname (defaults to NDX) hostname: whole host name overrides prefix, defaults to hostname_prefix + instrument name pv_prefix: the pv prefeix; default to IN:instrument_name is_scheduled: whether the instrument has scheduled users and so should have user details written to it; default to True Returns: dictionary for instrument """ if hostname is not None: hostname_to_use = hostname else: hostname_to_use = hostname_prefix + instrument_name if pv_prefix is not None: pv_prefix_to_use = pv_prefix else: pv_prefix_to_use = "IN:{0}:".format(instrument_name) return {"name": instrument_name, "hostName": hostname_to_use, "pvPrefix": pv_prefix_to_use, "isScheduled": is_scheduled} if __name__ == "__main__": set_env() # The PV address list pv_address = "CS:INSTLIST" # instrument list values to set (uses utility to return the dictionary but you can use a dictionary directly) instruments_list = [ inst_dictionary("LARMOR"), inst_dictionary("ALF"), inst_dictionary("DEMO", is_scheduled=False), inst_dictionary("IMAT"), inst_dictionary("MUONFE", hostname_prefix="NDE", is_scheduled=False), inst_dictionary("ZOOM"), inst_dictionary("IRIS"), inst_dictionary("IRIS_SETUP", pv_prefix="IN:IRIS_S29:", is_scheduled=False), inst_dictionary("ENGINX_SETUP", pv_prefix="IN:ENGINX49:", is_scheduled=False), inst_dictionary("HRPD"), inst_dictionary("POLARIS"), inst_dictionary("VESUVIO"), inst_dictionary("ENGINX"), inst_dictionary("MERLIN"), inst_dictionary("RIKENFE", is_scheduled=False), inst_dictionary("SELAB", is_scheduled=False), inst_dictionary("EMMA-A", is_scheduled=False), inst_dictionary("SANDALS"), inst_dictionary("GEM"), inst_dictionary("MAPS"), inst_dictionary("OSIRIS"), inst_dictionary("INES"), inst_dictionary("TOSCA"), inst_dictionary("LOQ"), inst_dictionary("LET"), inst_dictionary("MARI"), inst_dictionary("CRISP", is_scheduled=False), ] new_value = json.dumps(instruments_list) new_value_compressed = compress_and_hex(new_value) ca.caput(pv_address, str(new_value_compressed), True) result_compr = ca.caget(pv_address, True) result = dehex_and_decompress(result_compr) print(result) if result != new_value: print("Warning! Entered value does not match new value.") print("Entered value: " + new_value) print("Actual value: " + result) else: print("Success! The PV now reads: {0}".format(result))
Python
0
@@ -4166,24 +4166,80 @@ led=False),%0A + inst_dictionary(%22SOFTMAT%22, is_scheduled=False),%0A %5D%0A%0A n
1f19fa52e40db1f28d620aa8bf75745e814c0f81
Remove unused import
cogs/fun.py
cogs/fun.py
import random import discord from discord.ext import commands from utils.messages import ColoredEmbed class Fun: def __init__(self, bot): self.bot = bot @commands.command() async def xkcd(self, ctx): """See the latest XKCD comic.""" async with self.bot.session.get('https://xkcd.com/info.0.json') as r: if r.status == 200: json = await r.json() embed = ColoredEmbed(title=json['title'], description=json['alt']) embed.set_image(url=json['img']) await ctx.send(embed=embed) @commands.command() async def lenny(self, ctx): """( ͡° ͜ʖ ͡°)""" await ctx.send('( ͡° ͜ʖ ͡°)') def setup(bot): bot.add_cog(Fun(bot))
Python
0.000001
@@ -1,18 +1,4 @@ -import random%0A impo
6725788d8bbd64cbfd251036787125c07a50b32c
convert camera parameter value to string
ros3ddevcontroller/controller.py
ros3ddevcontroller/controller.py
# # Copyright (c) 2015 Open-RnD Sp. z o.o. # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, copy, # modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Core Ros3D controller logic""" from __future__ import absolute_import import logging import datetime from ros3ddevcontroller.param.store import ParametersStore, ParameterSnapshotter from ros3ddevcontroller.param.backends import FileSnapshotBackend from ros3ddevcontroller.bus import servo from ros3ddevcontroller.util import make_dir from threading import Lock class Controller(object): def __init__(self): self.logger = logging.getLogger(__name__) self.servo = None self.camera = None self.snapshots_location = None self.snapshots_backend = None self.lock = Lock() def set_servo(self, servo): """Servo interface""" self.logger.debug('setting servo to %s', servo) self.servo = servo def set_camera(self, camera): self.logger.debug('setting camera to %s', camera) self.camera = camera def set_snapshots_location(self, loc): """Set location of snapshots""" self.snapshots_location = loc make_dir(self.snapshots_location) # update snapshots backend self.snapshots_backend = FileSnapshotBackend(self.snapshots_location) @classmethod def is_servo_parameter(cls, param): """Return true if parameter is applicable to servo""" return ParametersStore.is_servo_parameter(param.name) def apply_servo_parameter(self, param): """Apply parameter to servo :param param Parameter: parameter to apply :rtype: bool :return: True if successful""" value = param.value name = param.name self.logger.debug('set servo param') try: if self.servo.is_active(): res = self.servo.change_param(name, value) self.logger.debug('apply result: %s', res) return res else: return self.apply_other_parameter(param) except servo.ParamApplyError: self.logger.exception('error when applying a parameter') return False @classmethod def is_camera_parameter(cls, param): """Return True if parameter is applicable to camera""" return ParametersStore.is_camera_parameter(param.name) def apply_camera_parameter(self, param): """Apply camera parameter :param param Parameter: parameter to apply :rtype: bool :return: True if successful""" value, name = param.value, param.name try: if self.camera.is_active(): res = self.camera.set_param(name, value) self.logger.debug('apply result: %s', res) return res else: return self.apply_other_parameter(param) except Exception: self.logger.exception('unexpected error when setting camera parameter') return False @classmethod def is_parameter_writable(cls, param): """Return True if parameter is applicable to camera""" return ParametersStore.is_read_only(param.name) == False def apply_other_parameter(self, param): """Apply parameter directly in parameter store, i.e. skipping any interaction with external devices. :param param Parameter: :rtype: bool :return: True""" value = param.value name = param.name ParametersStore.set(name, value) return True def apply_single_parameter(self, param): """Apply single parameter :param param Parameter: parameter descriptor""" if not self.is_parameter_writable(param): self.logger.warning('parameter %s is read-only, skipping', param.name) status = False elif self.is_servo_parameter(param): status = self.apply_servo_parameter(param) elif self.is_camera_parameter(param): status = self.apply_camera_parameter(param) else: status = self.apply_other_parameter(param) return status def apply_parameters(self, params): """Apply a parameter set :param params list of ParamDesc: list of parameters to apply :rtype: list(Parameter) :return: list of parameters applied""" # record changed parameter descriptors changed_params = [] # apply parameters serially, note that if any parameter takes # longer to aplly this will contribute to wait time of the # whole request for param in params: applied = self.apply_single_parameter(param) # param validation was successful and was applied to servo if applied: par = ParametersStore.get(param.name) changed_params.append(par) return changed_params def get_parameters(self): """Return a dict with all parameters in the system""" return ParametersStore.parameters_as_dict() def _record_timestamp(self): """Helper for updating current timestamp in parameters""" now = datetime.datetime.now() ParametersStore.set('record_date', now.strftime('%Y-%m-%d'), notify=False) ParametersStore.set('record_time', now.strftime('%H:%M:%S'), notify=False) def take_snapshot(self): """Record a snapshot of current parameter set :return: ID of snapshot """ with self.lock: # record timestamp self._record_timestamp() return ParameterSnapshotter.save(self.snapshots_backend) def list_snapshots(self): """List IDs of available snapshots :rtype list(int): :return: list of snapshot IDs """ with self.lock: snapshots = self.snapshots_backend.list_snapshots() self.logger.debug('snapshots: %s', snapshots) return snapshots def get_snapshot(self, snapshot_id): """Obtain data of snapshot `snapshot_id`. Returns a dictionary with parameters stored in the snapshot :param snapshot_id int: ID of snapshot :rtype dict: """ with self.lock: self.logger.debug('load snapshot %d', snapshot_id) sdata = self.snapshots_backend.load(snapshot_id) if not sdata: self.logger.error('failed to load snapshot data for ID %d', snapshot_id) return sdata def delete_snapshot(self, snapshot_id): """Remove snapshot snapshot `snapshot_id`. :param snapshot_id int: ID of snapshot :return: ID of removed snapshot """ with self.lock: self.logger.debug('delete snapshot %d', snapshot_id) return self.snapshots_backend.delete(snapshot_id) def delete_all(self): """Remove all snapshots. :return: list of removed snapshot IDs """ with self.lock: return self.snapshots_backend.delete_all()
Python
0
@@ -3662,38 +3662,43 @@ set_param(name, +str( value) +) %0A
14eaff694912320296412f2e4ca51072c5dddf49
add unit_testing_only decorator
corehq/apps/userreports/dbaccessors.py
corehq/apps/userreports/dbaccessors.py
from django.conf import settings from dimagi.utils.couch.database import iter_docs from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class from corehq.apps.domain.models import Domain def get_number_of_report_configs_by_data_source(domain, data_source_id): """ Return the number of report configurations that use the given data source. """ from corehq.apps.userreports.models import ReportConfiguration return ReportConfiguration.view( 'userreports/report_configs_by_data_source', reduce=True, key=[domain, data_source_id] ).one()['value'] def get_all_report_configs(): assert settings.UNIT_TESTING all_domains = Domain.get_all() for domain in all_domains: for report_config in get_report_configs_for_domain(domain.name): yield report_config def get_report_configs_for_domain(domain): from corehq.apps.userreports.models import ReportConfiguration return sorted( get_docs_in_domain_by_class(domain, ReportConfiguration), key=lambda report: report.title, )
Python
0
@@ -1,89 +1,4 @@ -from django.conf import settings%0A%0Afrom dimagi.utils.couch.database import iter_docs%0A%0A from @@ -109,16 +109,69 @@ Domain%0A +from corehq.util.test_utils import unit_testing_only%0A %0A%0Adef ge @@ -570,16 +570,35 @@ lue'%5D%0A%0A%0A +@unit_testing_only%0A def get_ @@ -623,42 +623,8 @@ ():%0A - assert settings.UNIT_TESTING%0A%0A
e595d823e303a6db0a9c7e24f6a9d1644615009c
Bump version of CaptchaService.py
module/plugins/internal/CaptchaService.py
module/plugins/internal/CaptchaService.py
# -*- coding: utf-8 -*- from module.plugins.internal.Captcha import Captcha class CaptchaService(Captcha): __name__ = "CaptchaService" __type__ = "captcha" __version__ = "0.34" __status__ = "stable" __description__ = """Base anti-captcha service plugin""" __license__ = "GPLv3" __authors__ = [("Walter Purcaro", "[email protected]")] def init(self): self.key = None #: Last key detected #@TODO: Recheck in 0.4.10 def retrieve_key(self, data): if self.detect_key(data): return self.key else: self.fail(_("%s key not found") % self.__name__) def retrieve_data(self): return self.pyfile.plugin.data or self.pyfile.plugin.last_html or "" def detect_key(self, data=None): raise NotImplementedError def challenge(self, key=None, data=None): raise NotImplementedError def result(self, server, challenge): raise NotImplementedError
Python
0.003249
@@ -188,17 +188,17 @@ _ = %220.3 -4 +5 %22%0A __
06c72a654abf915c1ddbe8a58153a712de41eb69
fix defaults
rootpy/root2matplotlib.py
rootpy/root2matplotlib.py
from .plotting.hist import _HistBase from .plotting import HistStack from math import sqrt import matplotlib.pyplot as plt __all__ = [ 'hist', 'histstack', 'bar', 'barstack', 'errorbar', ] def _set_defaults(h, kwargs, types=['common']): defaults = {} for key in types: if key == 'common': defaults['facecolor'] = h.GetFillColor('mpl') defaults['edgecolor'] = h.GetLineColor('mpl') defaults['linestyle'] = h.GetLineStyle('mpl') defaults['ecolor'] = h.GetMarkerColor('mpl') defaults['label'] = h.GetTitle() defaults['visible'] = h.visible elif key == 'fill': defaults['facecolor'] = h.GetFillColor('mpl') defaults['hatch'] = h.GetFillStyle('mpl') for key, value in defaults.items(): if key not in kwargs: kwargs[key] = value def _set_bounds(h, was_empty): if was_empty: plt.ylim(ymax=h.maximum() * 1.1) plt.xlim([h.xedges[0], h.xedges[-1]]) else: ymin, ymax = plt.ylim() plt.ylim(ymax=max(ymax, h.maximum() * 1.1)) xmin, xmax = plt.xlim() plt.xlim([min(xmin, h.xedges[0]), max(xmax, h.xedges[-1])]) def maybe_reversed(x, reverse=False): if reverse: return reversed(x) return x def hist(hists, stacked=True, reverse=False, **kwargs): """ Make a matplotlib 'step' hist plot. *hists* may be a single :class:`rootpy.plotting.hist.Hist` object or a :class:`rootpy.plotting.hist.HistStack`. The *histtype* will be set automatically to 'step' or 'stepfilled' for each object based on its FillStyle. All additional keyword arguments will be passed to :func:`matplotlib.pyplot.hist`. Keyword arguments: *stacked*: If *True*, the hists will be stacked with the first hist on the bottom. If *False*, the hists will be overlaid with the first hist in the background. *reverse*: If *True*, the stacking order will be reversed. """ was_empty = plt.ylim()[1] == 1. returns = [] if isinstance(hists, _HistBase): # This is a single histogram. returns = _hist(hists, **kwargs) elif stacked: for i in range(len(hists)): if reverse: hsum = sum(hists[i:]) print hsum.GetFillColor() elif i: hsum = sum(reversed(hists[:-i])) else: hsum = sum(reversed(hists)) # Plot the fill with no edge. returns.append(_hist(hsum, **kwargs)) # Plot the edge with no fill. plt.hist(hsum.xcenters, weights=hsum, bins=hsum.xedges, histtype='step', edgecolor=hsum.GetLineColor()) else: for h in maybe_reversed(hists, reverse): returns.append(_hist(h, **kwargs)) _set_bounds(max(hists), was_empty) return returns def _hist(h, **kwargs): _set_defaults(h, kwargs, ['common', 'fill']) kwargs['histtype'] = h.GetFillStyle('root') and 'stepfilled' or 'step' return plt.hist(h.xcenters, weights=h, bins=h.xedges, **kwargs) def bar(hists, stacked=True, reverse=False, yerr=False, rwidth=0.8, **kwargs): """ Make a matplotlib bar plot. *hists* may be a single :class:`rootpy.plotting.hist.Hist` object or a :class:`rootpy.plotting.hist.HistStack`. All additional keyword arguments will be passed to :func:`matplotlib.pyplot.bar`. Keyword arguments: *stacked*: If *True*, the hists will be stacked with the first hist on the bottom. If *False*, the hists will be overlaid with the first hist in the background. If 'cluster', then the bars will be arranged side-by-side. *reverse*: If *True*, the stacking order is reversed. *yerr*: If *False*, no errors are displayed. If *True*, an individual error will be displayed for each hist in the stack. If 'linear' or 'quadratic', a single error bar will be displayed with either the linear or quadratic sum of the individual errors. *rwidth*: The relative width of the bars as a fraction of the bin width. """ was_empty = plt.ylim()[1] == 1. returns = [] nhists = len(hists) if isinstance(hists, _HistBase): # This is a single histogram. returns = _bar(hists, yerr, **kwargs) elif stacked == 'cluster': hlist = maybe_reversed(hists, reverse) contents = [list(h) for h in hlist] xcenters = [h.xcenters for h in hlist] for i, h in enumerate(hlist): width = rwidth/nhists offset = (1 - rwidth) / 2 + i * width returns.append(_bar(h, offset, width, yerr, **kwargs)) elif stacked is True: hlist = maybe_reversed(hists, reverse) bottom, toterr = None, None if yerr == 'linear': toterr = [sum([h.GetBinError(i + 1) for h in hists]) for i in range(len(hists[0]))] elif yerr == 'quadratic': toterr = [sqrt(sum([h.GetBinError(i + 1)**2 for h in hists])) for i in range(len(hists[0]))] for i, h in enumerate(hlist): err = None if yerr is True: err = True elif yerr and i == (nhists - 1): err = toterr returns.append(_bar(h, yerr=err, bottom=bottom, **kwargs)) if bottom: bottom += h else: bottom = h else: for h in hlist: returns.append(_bar(h, yerr=bool(yerr), **kwargs)) _set_bounds(sum(hists), was_empty) return returns def _bar(h, roffset=0., rwidth=1., yerr=None, **kwargs): if yerr is True: yerr = list(h.yerrors()) _set_defaults(h, kwargs) width = [x * rwidth for x in h.xwidths] left = [h.xedges[i] + h.xwidths[i] * roffset for i in range(len(h))] height = h return plt.bar(left, height, width, yerr=yerr, **kwargs) def _errorbar(h, **kwargs): _set_defaults(h, kwargs) return plt.bar(left=h.xedges[:-1], height=h, width=h.xwidths, **kwargs) def errorbar(h, **kwargs): was_empty = plt.ylim()[1] == 1. defaults = {'color': h.linecolor, 'label': h.GetTitle(), 'visible': h.visible, 'fmt': h.markerstyle, 'capsize': 0, 'label': h.GetTitle(), } for key, value in defaults.items(): if key not in kwargs: kwargs[key] = value r = plt.errorbar(h.xcenters, h, yerr=list(h.yerrors()), xerr=list(h.xerrors()), **kwargs) _set_bounds(h, was_empty) return r
Python
0.000002
@@ -6271,17 +6271,27 @@ : h. -linec +GetLineC olor +('mpl') ,%0A @@ -6394,19 +6394,29 @@ : h. -m +GetM arker -s +S tyle +('mpl') ,%0A
5efc40cd9be0c212f142d7469a9bf6f44da0827a
add story support in client with -s boolean operator
instapy_cli/__main__.py
instapy_cli/__main__.py
import sys from platform import python_version from instapy_cli.cli import InstapyCli as client from optparse import OptionParser import pkg_resources # part of setuptools version = pkg_resources.require('instapy_cli')[0].version ''' TODO: - use instapy_cli.media to download image link and use it for upload and configure_photo - rewrite main to support file and links for media ''' def main(args=None): welcome_msg = 'instapy-cli' print('instapy ' + version + ' | python ' + python_version()) # cli = client() # cli.loop(args) parser = OptionParser(usage="usage: %prog [options]") parser.add_option('-u', dest='username', help='username') parser.add_option('-p', dest='password', help='password') parser.add_option('-f', dest='file', help='file path or url') parser.add_option('-t', dest='caption', help='caption text') # parser.add_option('-h', dest='help', help='help') (options, args) = parser.parse_args(args) if args is None or ( not options.username and not options.password and not options.file and not options.caption ): print('[USE] instapy -u USR -p PSW -f FILE/LINK -t \'TEXT CAPTION\'') print('\nFor other reference go to >> https://github.com/b3nab/instapy-cli') return if not options.username: parser.error('Username is required') password = options.password if not options.password: import getpass password = getpass.getpass() if not options.file: parser.error('File path or url link is required to create a media to upload') with client(options.username, password) as cli: text = options.caption or '' cli.upload(options.file, text) if __name__ == '__main__': main()
Python
0
@@ -229,216 +229,29 @@ on%0A%0A -%0A'''%0ATODO:%0A- use instapy_cli.media to download image link and use it for upload and configure_photo%0A- rewrite main to support file and links for media%0A'''%0Adef main(args=None):%0A%0A welcome_msg = 'instapy-cli' +def main(args=None):%0A %0A @@ -265,16 +265,20 @@ 'instapy +-cli ' + ver @@ -324,51 +324,8 @@ ))%0A%0A - # cli = client()%0A # cli.loop(args)%0A%0A @@ -633,16 +633,104 @@ text')%0A + parser.add_option('-s', dest='story', action='store_true', help='publish to story')%0A # pa @@ -948,32 +948,33 @@ and%0A not +( options.caption%0A @@ -972,16 +972,34 @@ .caption + or options.story) %0A ):%0A @@ -1317,16 +1317,18 @@ :%0A + import g @@ -1334,16 +1334,18 @@ getpass%0A + pa @@ -1481,16 +1481,82 @@ upload') +%0A story = options.story%0A if not story:%0A story = False %0A%0A wi @@ -1675,16 +1675,23 @@ le, text +, story )%0A%0Aif __
6341f351aab0ff510fcf1d9ce135be680763a971
Fix comments in backtracking/coloring.py (#4857)
backtracking/coloring.py
backtracking/coloring.py
""" Graph Coloring also called "m coloring problem" consists of coloring given graph with at most m colors such that no adjacent vertices are assigned same color Wikipedia: https://en.wikipedia.org/wiki/Graph_coloring """ def valid_coloring( neighbours: list[int], colored_vertices: list[int], color: int ) -> bool: """ For each neighbour check if coloring constraint is satisfied If any of the neighbours fail the constraint return False If all neighbours validate constraint return True >>> neighbours = [0,1,0,1,0] >>> colored_vertices = [0, 2, 1, 2, 0] >>> color = 1 >>> valid_coloring(neighbours, colored_vertices, color) True >>> color = 2 >>> valid_coloring(neighbours, colored_vertices, color) False """ # Does any neighbour not satisfy the constraints return not any( neighbour == 1 and colored_vertices[i] == color for i, neighbour in enumerate(neighbours) ) def util_color( graph: list[list[int]], max_colors: int, colored_vertices: list[int], index: int ) -> bool: """ Pseudo-Code Base Case: 1. Check if coloring is complete 1.1 If complete return True (meaning that we successfully colored graph) Recursive Step: 2. Itterates over each color: Check if current coloring is valid: 2.1. Color given vertex 2.2. Do recursive call check if this coloring leads to solving problem 2.4. if current coloring leads to solution return 2.5. Uncolor given vertex >>> graph = [[0, 1, 0, 0, 0], ... [1, 0, 1, 0, 1], ... [0, 1, 0, 1, 0], ... [0, 1, 1, 0, 0], ... [0, 1, 0, 0, 0]] >>> max_colors = 3 >>> colored_vertices = [0, 1, 0, 0, 0] >>> index = 3 >>> util_color(graph, max_colors, colored_vertices, index) True >>> max_colors = 2 >>> util_color(graph, max_colors, colored_vertices, index) False """ # Base Case if index == len(graph): return True # Recursive Step for i in range(max_colors): if valid_coloring(graph[index], colored_vertices, i): # Color current vertex colored_vertices[index] = i # Validate coloring if util_color(graph, max_colors, colored_vertices, index + 1): return True # Backtrack colored_vertices[index] = -1 return False def color(graph: list[list[int]], max_colors: int) -> list[int]: """ Wrapper function to call subroutine called util_color which will either return True or False. If True is returned colored_vertices list is filled with correct colorings >>> graph = [[0, 1, 0, 0, 0], ... [1, 0, 1, 0, 1], ... [0, 1, 0, 1, 0], ... [0, 1, 1, 0, 0], ... [0, 1, 0, 0, 0]] >>> max_colors = 3 >>> color(graph, max_colors) [0, 1, 0, 2, 0] >>> max_colors = 2 >>> color(graph, max_colors) [] """ colored_vertices = [-1] * len(graph) if util_color(graph, max_colors, colored_vertices, 0): return colored_vertices return []
Python
0
@@ -74,16 +74,18 @@ oloring +a given gr @@ -158,16 +158,20 @@ ssigned +the same col @@ -370,32 +370,36 @@ ghbour check if +the coloring constra @@ -473,24 +473,24 @@ eturn False%0A - If all n @@ -508,16 +508,20 @@ alidate +the constrai @@ -1247,16 +1247,20 @@ colored +the graph)%0A%0A @@ -1288,17 +1288,16 @@ 2. It -t erates o @@ -1325,24 +1325,28 @@ Check if +the current colo @@ -1430,16 +1430,17 @@ ive call +, check i @@ -1468,23 +1468,18 @@ to +a sol -ving problem +ution %0A @@ -1521,16 +1521,18 @@ eads to +a solution
70076f43b3d430dbbe8a4106c53526a4dbacdfed
fix mention logging
cogs/msg.py
cogs/msg.py
import discord import logging import re from .utils import config from .utils.allmsgs import quickcmds, custom from .utils.checks import permEmbed, me from datetime import datetime from discord import utils log = logging.getLogger('LOG') class OnMessage: def __init__(self, bot): self.bot = bot self.config = config.Config('config.json') self.logging = config.Config('log.json') async def on_message(self, message): # Increase Message Count if hasattr(self.bot, 'message_count'): self.bot.message_count += 1 # Custom commands if me(message): if hasattr(self.bot, 'icount'): self.bot.icount += 1 prefix = '' for i in self.config.get('prefix', []): if message.content.startswith(i): prefix = i break if prefix is not '': response = custom(prefix, message.content) if response is None: pass else: if response[0] == 'embed': if permEmbed(message): await message.channel.send(content='%s' % response[2], embed=discord.Embed(colour=0x9b59b6).set_image(url=response[1])) else: await message.channel.send('{0}\n{1}'.format(response[2], response[1])) else: await message.channel.send('{0}\n{1}'.format(response[2], response[1])) self.bot.commands_triggered[response[3]] += 1 destination = None if isinstance(message.channel, discord.DMChannel): destination = 'Private Message' else: destination = '#{0.channel.name},({0.guild.name})'.format(message) log.info('In {1}:{0.content}'.format(message, destination)) else: response = quickcmds(message.content.lower().strip()) if response: await message.delete() self.bot.commands_triggered[response[1]] += 1 await message.channel.send(response[0]) destination = None if isinstance(message.channel, discord.DMChannel): destination = 'Private Message' else: destination = '#{0.channel.name},({0.guild.name})'.format(message) log.info('In {1}:{0.content}'.format(message, destination)) elif (message.guild is not None) and (self.config.get('setlog', []) == 'on'): if message.author.id in self.logging.get('block-user', []): return if message.channel.id in self.logging.get('block-channel', []): return if message.guild.id in self.logging.get('guild', []) or message.channel.id in self.logging.get('channel', []): mention = name = ping = False msg = re.sub('[,.!?]', '', message.content.lower()) if any(map(lambda v: v in msg.split(), self.logging.get('block-key', []))): return if (message.guild.get_member(self.config.get('me', [])).mentioned_in(message)): em = discord.Embed(title='\N{BELL} MENTION', colour=0x9b59b6) ping = True role = False if hasattr(self.bot, 'mention_count'): self.bot.mention_count += 1 for role in message.role_mentions: if utils.get(message.author.roles, id=role.id): role = True em = discord.Embed(title='\N{SPEAKER WITH THREE SOUND WAVES} ROLE MENTION', colour=0x9b59b6) log.info("Role Mention from #%s, %s" % (message.channel, message.guild)) if not role: log.info("Mention from #%s, %s" % (message.channel, message.guild)) else: for word in self.logging.get('key', []): if word in msg.split(): em = discord.Embed(title='\N{HEAVY EXCLAMATION MARK SYMBOL} %s MENTION' % word.upper(), colour=0x9b59b6) mention = name = True log.info("%s Mention in #%s, %s" % (word.title(), message.channel, message.guild)) break if mention or ping: if name: if hasattr(self.bot, 'mention_count_name'): self.bot.mention_count_name += 1 em.set_author(name=message.author, icon_url=message.author.avatar_url) em.add_field(name='In', value="#%s, ``%s``" % (message.channel, message.guild), inline=False) em.add_field(name='At', value="%s" % datetime.now().__format__('%A, %d. %B %Y @ %H:%M:%S'), inline=False) em.add_field(name='Message', value="%s" % message.clean_content, inline=False) em.set_thumbnail(url=message.author.avatar_url) await self.bot.get_channel(self.config.get('log_channel', [])).send(embed=em) def setup(bot): bot.add_cog(OnMessage(bot))
Python
0.000004
@@ -178,34 +178,8 @@ time -%0Afrom discord import utils %0A%0Alo @@ -3359,417 +3359,183 @@ -em = discord.Embed(title='%5CN%7BBELL%7D MENTION', colour=0x9b59b6)%0A ping = True%0A role = False%0A if hasattr(self.bot, 'mention_count'):%0A self.bot.mention_count += 1%0A for role in message.role_mentions:%0A if utils.get(message.author.roles, id=role.id):%0A role = True%0A +if hasattr(self.bot, 'mention_count'):%0A self.bot.mention_count += 1%0A ping = True%0A if message.role_mentions != %5B%5D:%0A @@ -3663,36 +3663,32 @@ - log.info(%22Role M @@ -3768,20 +3768,99 @@ -if not role: +else:%0A em = discord.Embed(title='%5CN%7BBELL%7D MENTION', colour=0x9b59b6) %0A
0e8214509db0b273e7136270946b8d6688899356
Fix api.resultdb.update_inclusions
recipe_modules/resultdb/api.py
recipe_modules/resultdb/api.py
# Copyright 2019 The LUCI Authors. All rights reserved. # Use of this source code is governed under the Apache License, Version 2.0 # that can be found in the LICENSE file. """API for interacting with the ResultDB service. Requires `rdb` command in `$PATH`: https://godoc.org/go.chromium.org/luci/resultdb/cmd/rdb """ import json from google.protobuf import json_format from recipe_engine import recipe_api from . import common class ResultDBAPI(recipe_api.RecipeApi): """A module for interacting with ResultDB.""" # Expose serialize and deserialize functions. serialize = staticmethod(common.serialize) deserialize = staticmethod(common.deserialize) Invocation = common.Invocation # TODO(nodir): add query method, a wrapper of rdb-ls. def remove_invocations(self, invocations, step_name=None): """Shortcut for resultdb.update_inclusions().""" return self.update_inclusions( remove_invocations=invocations, step_name=step_name) def include_invocations(self, invocations, step_name=None): """Shortcut for resultdb.update_inclusions().""" return self.update_inclusions( add_invocations=invocations, step_name=step_name) def update_inclusions(self, add_invocations=None, remove_invocations=None, step_name=None): """Add and/or remove included invocations to/from the current invocation. Args: add_invocations (list of str): invocation id's to add to the current invocation. remove_invocations (list of str): invocation id's to remove from the current invocation. This updates the inclusions of the current invocation specified in the LUCI_CONTEXT. """ if not (add_invocations or remove_invocations): # Nothing to do. return args = [] if add_invocations: args += ['-add', ','.join(sorted(add_invocations))] if remove_invocations: args += ['-remove', ','.join(sorted(remove_invocations))] return self._run_rdb( subcommand='update-invocations', args=args, step_name=step_name, ) def chromium_derive( self, swarming_host, task_ids, variants_with_unexpected_results=False, limit=None, step_name=None): """Returns results derived from the specified Swarming tasks. TODO(crbug.com/1030191): remove this function in favor of query(). Most users will be interested only in results of test variants that had unexpected results. This can be achieved by passing variants_with_unexpected_results=True. This significantly reduces output size and latency. Blocks on task completion. Example: results = api.resultdb.derive( 'chromium-swarm.appspot.com', ['deadbeef', 'badcoffee'], variants_with_unexpected_results=True, ) failed_tests = {r.test_path for r in results} Args: * `swarming_host` (str): hostname (without scheme) of the swarming server, such as chromium-swarm.appspot.com. * `task_ids` (list of str): ids of the tasks to fetch results from. If more than one, then a union of their test results is returned. Its ok to pass same task ids, or ids of tasks that ran the same tests and had different results. Each task should have * output.json or full_results.json in the isolated output. The file must be in Chromium JSON Test Result format or Chromium's GTest format. If the task does not have it, the request fails. * optional tag "bucket" with the LUCI bucket, e.g. "ci" If the tag is not present, the test variants will not have the corresponding key. * optional tag "buildername" with a builder name, e.g. "linux-rel" If the tag is not present, the test variants will not have the corresponding key. * optional tag "test_suite" with a name of a test suite from a JSON file in https://chromium.googlesource.com/chromium/src/+/master/testing/buildbot/ If the tag is not present, the test variants will not have the corresponding key. * optional tag "ninja_target" with a full name of the ninja target used to compile the test binary used in the task, e.g. "ninja_target://chrome/tests:browser_tests". If the tag is not present, the test paths are not prefixed. * `variants_with_unexpected_results` (bool): if True, return only test results from variants that have unexpected results. This significantly reduces output size and latency. * `limit` (int): maximum number of test results to return. Defaults to 1000. Returns: A dict {invocation_id: api.Invocation}. """ assert isinstance(swarming_host, str) and swarming_host, swarming_host assert not swarming_host.startswith('http://'), swarming_host assert not swarming_host.startswith('https://'), swarming_host assert all(isinstance(id, str) for id in task_ids), task_ids assert limit is None or limit >= 0 task_ids = list(task_ids) limit = limit or 1000 args = [ '-json', '-wait', '-n', str(limit), ] if variants_with_unexpected_results: args += ['-u'] args += [swarming_host] + task_ids step_res = self._run_rdb( subcommand='chromium-derive', args=args, step_name=step_name, stdout=self.m.raw_io.output(add_output_log=True), step_test_data=lambda: self.m.raw_io.test_api.stream_output(''), ) return common.deserialize(step_res.stdout) ############################################################################## # Implementation details. def _run_rdb( self, subcommand, step_name=None, args=None, stdout=None, step_test_data=None, timeout=None): """Runs rdb tool.""" cmdline = ['rdb', subcommand] + (args or []) return self.m.step( step_name or ('rdb ' + subcommand), cmdline, infra_step=True, stdout=stdout, step_test_data=step_test_data, timeout=timeout, )
Python
0.000001
@@ -2061,21 +2061,20 @@ pdate-in -vocat +clus ions',%0A
86c9a2191e412d7701940e5aa64279ca000235b3
Add ListsRanking, which is identical to ProbabilisticRanking, and PairwisePreferenceRanking as well #57
interleaving/ranking.py
interleaving/ranking.py
from collections import defaultdict class BalancedRanking(list): ''' A list of document IDs generated by an interleaving method including two rankers A and B ''' __slots__ = ['a', 'b'] def __hash__(self): return hash((tuple(self), tuple(self.a), tuple(self.b))) def dumpd(self): return { 'a': self.a, 'b': self.b, 'ranking_list': self, } class CreditRanking(list): ''' A list of document IDs generated by an interleaving method including credits Args: num_rankers: number of rankers contents: initial list of document IDs (optional) ''' __slots__ = ['credits'] def __init__(self, num_rankers, contents=[]): ''' Initialize self.credits num_rankers: number of rankers contents: initial list of document IDs (optional) ''' self += contents self.credits = {} for i in range(num_rankers): self.credits[i] = defaultdict(float) def __hash__(self): l = [] for k, v in self.credits.items(): ll = [] for kk, vv in v.items(): ll.append((kk, vv)) l.append((k, frozenset(ll))) return hash((tuple(self), frozenset(l))) def dumpd(self): return { 'credits': self.credits, 'ranking_list': self, } class ProbabilisticRanking(list): ''' A list of document IDs generated by an interleaving method, including original rankings Args: lists: list of original document ID lists contents: initial list of document IDs (optional) ''' __slots__ = ['lists'] def __init__(self, lists, contents=[]): ''' Initialize self.teams lists: list of original document ID lists contents: initial list of document IDs (optional) ''' self += contents self.lists = lists def __hash__(self): l = [] for v in self.lists: l.append(tuple(v)) return hash((tuple(self), tuple(l))) def dumpd(self): return { 'ranking_list': self, 'lists': self.lists, } class TeamRanking(list): ''' A list of document IDs generated by an interleaving method, including teams Args: team_indices: indices for self.teams contents: initial list of document IDs (optional) ''' __slots__ = ['teams'] def __init__(self, team_indices, contents=[]): ''' Initialize self.teams team_indices: indices for self.teams contents: initial list of document IDs (optional) ''' self += contents self.teams = {i: set() for i in team_indices} def __hash__(self): ''' TeamRanking can be a key by which rankings with the same document ID list and the same team assignment are the same ''' l = [] for k, v in self.teams.items(): l.append((k, frozenset(v))) return hash((tuple(self), frozenset(l))) def dumpd(self): team_dict = {} for tid, s in self.teams.items(): team_dict[tid] = sorted(list(s)) return { 'ranking_list': self, 'teams': team_dict, }
Python
0
@@ -1431,21 +1431,13 @@ ass -Probabilistic +Lists Rank @@ -2231,16 +2231,68 @@ %7D%0A%0A +class ProbabilisticRanking(ListsRanking):%0A pass%0A%0A class Te @@ -3390,20 +3390,77 @@ eam_dict,%0A %7D%0A +%0Aclass PairwisePreferenceRanking(ListsRanking):%0A pass%0A
ca295aff7a051c5a8b5272a47f4af32378db3185
Update PID_wrap.py
control/PID/PID_wrap.py
control/PID/PID_wrap.py
import sensors.SensorClass as SensorClass import PID_controller import numpy as np class PID(object): def __init__(self): self.data = SensorClass.Data_file.State() self.controller_x = PID_controller.PID_Controller(self) self.controller_y = PID_controller.PID_Controller(self) self.controller_z = PID_controller.PID_Controller(self) self.controller_phi = PID_controller.PID_Controller(self) self.controller_mu = PID_controller.PID_Controller(self) self.controller_theta = PID_controller.PID_Controller(self) def engage_PID(self): updated_x = self.update_x() updated_y = self.update_y() updated_z = self.update_z() updated_phi = self.update_phi() updated_mu = self.update_mu() updated_theta = self.update_theta() return np.array([(updated_x), (updated_y), (updated_z), (updated_phi), (updated_mu), (updated_theta)]) def update_x(self): self.controller_x.update(self.data.get_state()) return self.controller_x.getOutput() def update_y(self): self.controller_y.update(self.data.get_state()) return self.controller_y.getOutput() def update_z(self): self.controller_z.update(self.data.get_state()) return self.controller_z.getOutput() def update_phi(self): self.controller_phi.update(self.data.get_state('Roll')) return self.controller_phi.getOutput() def update_mu(self): self.controller_mu.update(self.data.get_state('Pitch')) return self.controller_mu.getOutput() def update_theta(self): self.controller_theta.update(self.data.get_state('Heading')) return self.controller_theta.getOutput() def set_destination(self, delta_x, delta_y, delta_z, delta_phi, delta_mu, delta_theta): curr_x = self.data.get_state('') curr_y = self.data.get_state('') curr_z = self.data.get_state('') curr_phi = self.data.get_state('Roll') curr_mu = self.data.get_state('Pitch') curr_theta = self.data.get_state('Heading') self.dest_x = curr_x + delta_x self.dest_y = curr_y + delta_y self.dest_z = curr_z + delta_z self.dest_phi = curr_phi + delta_phi self.dest_mu = curr_mu + delta_mu self.dest_theta = curr_theta + delta_theta self.controller_x.set_setpoint(self.dest_x) self.controller_y.set_setpoint(self.dest_y) self.controller_z.set_setpoint(self.dest_z) self.controller_phi.set_setpoint(self.dest_phi) self.controller_mu.set_setpoint(self.dest_mu) self.controller_theta.set_setpoint(self.dest_theta) if __name__ == '__main__': test = PID() while True: print(test.engage())
Python
0
@@ -76,16 +76,49 @@ y as np%0A +import Accel_to_Pos as posfinder%0A %0A%0Aclass @@ -146,32 +146,67 @@ __init__(self):%0A + self.pos = posfinder(self)%0A self.dat @@ -1892,16 +1892,55 @@ heta):%0A%0A + %22%22%22%0A Depended on Sonar%0A%0A @@ -2046,32 +2046,284 @@ a.get_state('')%0A + %22%22%22%0A curr_x = self.pos.integration(self.data.get_state('Acceleration-X'))%0A curr_y = self.pos.integration(self.data.get_state('Acceleration-Y'))%0A curr_z = self.pos.integration(self.data.get_state('Acceleration-Z'))%0A %0A curr_phi
c70725998da63acc0837400fcc885f60b98b8ee3
Update for new return format for rel show.
dbclient.py
dbclient.py
# -*- coding: iso-8859-1 -*- import socket, types, base64 class EResponse(Exception): pass class EDuplicate(EResponse): pass class dbclient: def __init__(self, host, port): self.server = (host, port) self.userpass = None self.auth_ok = False self.is_connected = False def _reconnect(self): if self.is_connected: return self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect(self.server) self.fh = self.sock.makefile() self.is_connected = True self.auth_ok = False if self.userpass: self._send_auth() def _writeline(self, line, retry=True): self._reconnect() line = line + "\n" try: self.sock.send(line) except: self.is_connected = False if retry: self._reconnect() self.sock.send(line) def _readline(self): return self.fh.readline() def _parse_search(self, line, posts, wanted): if line == "OK\n": return True if line[0] != "R": raise EResponse(line) tags = [] guids = [] f = {} md5 = None for token in line[1:].split(): type = token[0] data = token[1:] if type == "P": md5 = data elif type == "T": tags.append(data) elif type == "G": guids.append(data) elif type == "F": field, value = data.split("=", 1) f[field] = value else: raise EResponse(line) if not md5: raise EResponse(line) if md5 in posts: raise EDuplicate(md5) if not wanted or "tagname" in wanted: f["tagname"] = tags if not wanted or "tagguid" in wanted: f["tagguid"] = guids posts[md5] = f def _search_post(self, search, wanted = None): self._writeline(search) posts = {} while not self._parse_search(self._readline(), posts, wanted): pass return posts def get_post(self, md5): posts = self._search_post("SPM" + md5 + " Ftagname Ftagguid Fext Fcreated Fwidth Fheight") if not md5 in posts: return None post = posts[md5] post["md5"] = md5 return post def _list(self, data): if not data: return [] if type(data) == types.StringType: return [data] return data def search_post(self, tags=None, guids=None, excl_tags=None, excl_guids=None , wanted=None): search = "SP" for want in self._list(wanted): search += "F" + want + " " for tag in self._list(tags): search += "TN" + tag + " " for guid in self._list(guids): search += "TG" + guid + " " for tag in self._list(excl_tags): search += "tN" + tag + " " for guid in self._list(excl_guids): search += "tG" + guid + " " return self._search_post(search, wanted) def _send_auth(self): self._writeline("a" + self.userpass[0] + " " + self.userpass[1], False) if self._readline() == "OK\n": self.auth_ok = True def auth(self, user, password): self.userpass = (user, password) self._send_auth() return self.auth_ok def _enc(str): while len(str) % 3: str += "\x00" return base64.b64encode(str, "_-") def _dec(enc): str = base64.b64decode(enc, "_-") while str[-1] == "\x00": str = str[:-1] def add_post(self, md5, width, height, filetype, rating=None, source=None, title=None): cmd = "AP" + md5 cmd += " width=" + str(width) cmd += " height=" + str(height) cmd += " filetype=" + filetype if rating: cmd += " rating=" + rating if source: cmd += " source=" + self._enc(source) if title: cmd += " title=" + self._enc(title) self._writeline(cmd) res = self._readline() if res != "OK\n": raise EResponse(res) def _rels(self, c, md5, rels): cmd = "R" + c + md5 for rel in self._list(rels): cmd += " " + rel self._writeline(cmd) res = self._readline() if res != "OK\n": raise EResponse(res) def add_rels(self, md5, rels): self._rels("R", md5, rels) def remove_rels(self, md5, rels): self._rels("r", md5, rels) def _parse_rels(self, line, rels): if line == "OK\n": return True if line[0] != "P": raise EResponse(line) a = line[1:].split() p = a[0] l = [] if p in rels: l = rels[p] for rel in a[1:]: if rel[0] != "R": raise EResponse(line) l.append(rel[1:]) rels[p] = l def post_rels(self, md5): cmd = "RS" + md5 self._writeline(cmd) rels = {} while not self._parse_rels(self._readline(), rels): pass if not md5 in rels: return None return rels[md5]
Python
0
@@ -3756,17 +3756,17 @@ %5B0%5D != %22 -P +R %22: raise @@ -3877,51 +3877,8 @@ :%5D:%0A -%09%09%09if rel%5B0%5D != %22R%22: raise EResponse(line)%0A %09%09%09l @@ -3888,20 +3888,16 @@ pend(rel -%5B1:%5D )%0A%09%09rels
f26c2059ff6e2a595097ef7a03efe149f9e253eb
Add default images for podcasts if necessary
iterator.py
iterator.py
import os, re, requests rootdir = '_posts' for subdir, dirs, files in os.walk(rootdir): for file in files: filename = os.path.join(subdir, file) f = open(filename, "r") contents = f.readlines() f.close() # Find first image for key, line in enumerate(contents): src = re.search('\!\[.*?\]\((.*?)\)', line) if src: wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1)) if wordpress_src: image_src = wordpress_src.group(1) path = 'images/wordpress/'+image_src print 'Retrieving ' + path + '...' if not os.path.isfile(path): print path f = open(path, "w") f.write(requests.get("http://blog.stackoverflow.com/wp-content/uploads/" + wordpress_src.group(1)).content) f.close() continue f = open(filename, "w") contents = "".join(contents) f.write(contents) f.close()
Python
0.000001
@@ -259,591 +259,234 @@ -%09for key, line in enumerate(contents):%0A %09%09src = re.search('%5C!%5C%5B.*?%5C%5D%5C((.*?)%5C)', lin + if re.search('podcast', filenam e) +: %0A -%09%09if src:%0A %09%09%09wordpress_src = re.search('/blog/images/wordpress/(.*)', src.group(1))%0A %09%09%09if wordpress_src + if re.search('%5Ehero: ', contents%5B6%5D) :%0A -%09 -%09%09%09image_src = wordpress_src.group(1)%0A%09 %09%09%09path = 'images/wordpress/'+image_src%0A%09 + -%09%09%09 print -'Retrieving ' + path + '...'%0A%09 %09%09%09if not os.path.isfile(path):%0A%09 %09%09%09%09print path%0A%09%09 %09%09%09f = open(path, %22w%22)%0A%09%09 %09%09%09f.write(requests.get(%22http://blog.stackoverflow.com/wp-content/uploads/%22 + wordpress_src.group(1)).content)%0A%09%09 %09%09%09f.close()%0A%0A %09continue%0A +filename%0A contents.insert(6, 'hero: /blog/images/category/podcasts.jpg%5Cn')%0A -%09 f = +file. open @@ -509,51 +509,28 @@ -%09contents = %22%22.join(contents)%0A + -%09 + f.write( cont @@ -525,16 +525,24 @@ f.write( +%22%22.join( contents @@ -542,22 +542,34 @@ ontents) +) %0A -%09 + f.close( @@ -561,16 +561,17 @@ f.close() +%0A
6d52a6a1447ae854e22bc6317b694cb3bb317c12
Fix import paths
curiosity/bot.py
curiosity/bot.py
import traceback import logbook from ruamel import yaml from curious.commands.bot import CommandsBot from curious.commands.context import Context from curious.commands.exc import CheckFailureError, MissingArgumentError, ConversionFailedError from curious.dataclasses import Game, Status, Message from curious.event import EventContext from curious.ext.paginator import ReactionsPaginator class Curiosity(CommandsBot): def __init__(self): try: with open("config.yml") as f: self.config = yaml.safe_load(f) except FileNotFoundError as e: print("You need to make a config.yml.") raise SystemExit(1) from e token = self.config["bot_token"] super().__init__(token, command_prefix="c!") self.logger = logbook.Logger("curiosity") async def on_command_error(self, ctx: Context, exc: Exception): if isinstance(exc, (CheckFailureError, MissingArgumentError, ConversionFailedError)): await ctx.channel.send(":x: {}".format(str(exc))) else: fmtted = traceback.format_exception(None, exc.__cause__, exc.__cause__.__traceback__) final = "```{}```".format(''.join(fmtted)) if len(final) < 1900: await ctx.channel.send(final) else: items = ["```{}```".format(i) for i in traceback.format_exception(None, exc.__cause__, exc.__cause__.__traceback__)] p = ReactionsPaginator(channel=ctx.channel, content=items, respond_to=ctx.message.author.user) await p.paginate() async def on_connect(self, ctx): self.logger.info("Connected to Discord on shard {0}, " "logged in as {1.name}#{1.discriminator}.".format(ctx.shard_id, self.user)) self.logger.info("I am owned by {0.name}#{0.discriminator}.".format(self.application_info.owner)) self.logger.info("Invite URL: {}".format(self.invite_url)) await self.change_status(game=Game(name="curiosity loading..."), status=Status.DND, shard_id=ctx.shard_id) async def on_ready(self, ctx): await self.change_status(game=Game( name="[shard {}/{}] curio is the future!".format(ctx.shard_id + 1, self.shard_count) ), status=Status.ONLINE, shard_id=ctx.shard_id) if ctx.shard_id != 0: return plugins = self.config.get("plugins", []) for plugin in plugins: try: await self.load_plugins_from(plugin) except: self.logger.exception("Failed to load {}!".format(plugin)) else: self.logger.info("Loaded plugin {}.".format(plugin)) async def on_message_create(self, ctx: EventContext, message: Message): self.logger.info("Recieved message: {message.content} " "from {message.author.name} ({message.author.user.name}){bot}" .format(message=message, bot=" [BOT]" if message.author.user.bot else "")) self.logger.info(" On channel: #{message.channel.name}".format(message=message)) if message.guild: self.logger.info(" On guild: {message.guild.name} ({message.guild.id})".format(message=message))
Python
0.00008
@@ -261,16 +261,23 @@ aclasses +.status import @@ -288,17 +288,56 @@ , Status -, +%0Afrom curious.dataclasses.message import Message
a72e0a6068614b740ade7586ec316db7b9611b46
Make JBrowse work in DEBUG mode without nginx.
genome_designer/urls.py
genome_designer/urls.py
from django.conf.urls import include from django.conf.urls import patterns from django.conf.urls import url from django.views.generic import RedirectView urlpatterns = patterns('', url(r'^$', 'genome_designer.main.views.home_view'), # Project-specific views url(r'^projects$', 'genome_designer.main.views.project_list_view'), url(r'^projects/create$', 'genome_designer.main.views.project_create_view'), url(r'^projects/([\w-]+)$', 'genome_designer.main.views.project_view'), url(r'^projects/([\w-]+)/delete$', 'genome_designer.main.views.project_delete'), # Tab base views. url(r'^projects/([\w-]+)/data$', 'genome_designer.main.views.project_view'), url(r'^projects/([\w-]+)/align$', 'genome_designer.main.views.tab_root_align'), url(r'^projects/([\w-]+)/analyze$', 'genome_designer.main.views.tab_root_analyze'), # Reference genomes url(r'^projects/([\w-]+)/refgenomes$', 'genome_designer.main.views.reference_genome_list_view'), url(r'^projects/([\w-]+)/refgenomes/([\w-]+)$', 'genome_designer.main.views.reference_genome_view'), # Alignments url(r'^projects/([\w-]+)/alignments$', 'genome_designer.main.views.alignment_list_view'), url(r'^projects/([\w-]+)/alignments/create$', 'genome_designer.main.views.alignment_create_view'), url(r'^projects/([\w-]+)/alignments/([\w-]+)$', 'genome_designer.main.views.alignment_view'), url(r'^projects/([\w-]+)/alignments/([\w-]+)/samplealign/([\w-]+)/error$', 'genome_designer.main.views.sample_alignment_error_view'), # Variant sets url(r'^projects/([\w-]+)/sets$', 'genome_designer.main.views.variant_set_list_view'), url(r'^projects/([\w-]+)/sets/([\w-]+)$', 'genome_designer.main.views.variant_set_view'), # Samples url(r'^projects/([\w-]+)/samples$', 'genome_designer.main.views.sample_list_view'), # Variants url(r'^projects/([\w-]+)/refgenomes/([\w-]+)/variants/([\w-]+)$', 'genome_designer.main.views.single_variant_view'), # Genes url(r'^projects/([\w-]+)/genes$', 'genome_designer.main.views.gene_list_view'), # GO terms url(r'^projects/([\w-]+)/goterms$', 'genome_designer.main.views.goterm_list_view'), ############################################################################ # Templates ############################################################################ url(r'^templates/sample_list_targets_template.tsv$', 'genome_designer.main.views.sample_list_targets_template'), url(r'^templates/variant_set_upload_template.vcf$', 'genome_designer.main.views.variant_set_upload_template'), ############################################################################ # Auth ############################################################################ # django-registration defaults (further delgates to django.contrib.auth.url) (r'^accounts/', include('registration.backends.simple.urls')), # The default behavior of registration is redirect to 'users/<username>'. # For now let's catch this request here and just redirect to '/'. (r'^users/', RedirectView.as_view(url='/')), ############################################################################ # XHR Actions ############################################################################ url(r'^_/sets/exportcsv$', 'genome_designer.main.xhr_handlers.export_variant_set_as_csv'), url(r'^_/variants$', 'genome_designer.main.xhr_handlers.get_variant_list'), url(r'^_/variants/modify_set_membership$', 'genome_designer.main.xhr_handlers.modify_variant_in_set_membership'), )
Python
0.000001
@@ -148,16 +148,33 @@ ctView%0A%0A +import settings%0A%0A urlpatte @@ -3902,8 +3902,149 @@ ip'),%0A)%0A +%0Aif settings.DEBUG:%0A from django.conf.urls.static import static%0A urlpatterns += static('jbrowse', document_root=settings.JBROWSE_ROOT)%0A
e0f3e68435b406e3bad9b7f7e459b724ea832e9e
Disable summernote editor test from Travis
shuup_tests/browser/admin/test_editor.py
shuup_tests/browser/admin/test_editor.py
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2018, Shuup Inc. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. import os import pytest from django.core.urlresolvers import reverse from django.utils.translation import activate from shuup import configuration from shuup.testing import factories from shuup.testing.browser_utils import ( click_element, move_to_element, wait_until_appeared, wait_until_condition ) from shuup.testing.utils import initialize_admin_browser_test pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.") @pytest.mark.browser @pytest.mark.djangodb def test_summernote_editor_picture(browser, admin_user, live_server, settings): activate("en") factories.get_default_shop() factories.get_default_product_type() factories.get_default_sales_unit() factories.get_default_tax_class() filer_image = factories.get_random_filer_image() configuration.set(None, "shuup_product_tour_complete", True) initialize_admin_browser_test(browser, live_server, settings) browser.driver.set_window_size(1920, 1080) url = reverse("shuup_admin:shop_product.new") browser.visit("%s%s" % (live_server, url)) wait_until_condition(browser, condition=lambda x: x.is_text_present("New shop product")) img_icon_selector = "#id_base-description__en-editor-wrap i[class='note-icon-picture']" move_to_element(browser, img_icon_selector) click_element(browser, img_icon_selector) wait_until_condition(browser, lambda b: len(b.windows) == 2) # change to the media browser window browser.windows.current = browser.windows[1] # click to select the picture wait_until_appeared(browser, "a.file-preview") click_element(browser, "a.file-preview") # back to the main window wait_until_condition(browser, lambda b: len(b.windows) == 1) browser.windows.current = browser.windows[0] # make sure the image was added to the editor wait_until_appeared( browser, "#id_base-description__en-editor-wrap .note-editable img[src='%s']" % filer_image.url, timeout=20)
Python
0
@@ -772,16 +772,126 @@ jangodb%0A [email protected](os.environ.get(%22SHUUP_TESTS_TRAVIS%22, %220%22) == %221%22, reason=%22Disable when run through tox.%22)%0A def test
2d228ed998ecdfd95822189d3532dbaafd3827ef
refactor datetime field in TickObject
rqalpha/model/tick.py
rqalpha/model/tick.py
# -*- coding: utf-8 -*- # # Copyright 2017 Ricequant, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import numpy as np from rqalpha.environment import Environment from rqalpha.utils.logger import system_log from rqalpha.utils.datetime_func import convert_int_to_datetime, convert_ms_int_to_datetime class TickObject(object): def __init__(self, instrument, tick_dict): """ Tick 对象 :param instrument: Instrument :param tick_dict: dict """ try: dt = tick_dict["datetime"] except KeyError: pass else: if not dt: tick_dict["datetime"] = datetime.datetime.min else: if not isinstance(dt, datetime.datetime): if dt > 10000000000000000: # ms dt = convert_ms_int_to_datetime(dt) else: dt = convert_int_to_datetime(dt) tick_dict["datetime"] = dt self._instrument = instrument self._tick_dict = tick_dict @property def order_book_id(self): """ [str] 标的代码 """ return self._instrument.order_book_id @property def instrument(self): return self._instrument @property def datetime(self): """ [datetime.datetime] 当前快照数据的时间戳 """ try: return self._tick_dict['datetime'] except (KeyError, ValueError): return datetime.datetime.min @property def open(self): """ [float] 当日开盘价 """ return self._tick_dict['open'] @property def last(self): """ [float] 当前最新价 """ return self._tick_dict['last'] @property def high(self): """ [float] 截止到当前的最高价 """ return self._tick_dict['high'] @property def low(self): """ [float] 截止到当前的最低价 """ return self._tick_dict['low'] @property def prev_close(self): """ [float] 昨日收盘价 """ try: return self._tick_dict['prev_close'] except (KeyError, ValueError): return 0 @property def volume(self): """ [float] 截止到当前的成交量 """ try: return self._tick_dict['volume'] except (KeyError, ValueError): return 0 @property def total_turnover(self): """ [float] 截止到当前的成交额 """ try: return self._tick_dict['total_turnover'] except (KeyError, ValueError): return 0 @property def open_interest(self): """ [float] 截止到当前的持仓量(期货专用) """ try: return self._tick_dict['open_interest'] except (KeyError, ValueError): return 0 @property def prev_settlement(self): """ [float] 昨日结算价(期货专用) """ try: return self._tick_dict['prev_settlement'] except (KeyError, ValueError): return 0 @property def asks(self): try: return self._tick_dict['asks'] except (KeyError, ValueError): return [0] * 5 @property def ask_vols(self): try: return self._tick_dict['ask_vols'] except (KeyError, ValueError): return [0] * 5 @property def bids(self): try: return self._tick_dict['bids'] except (KeyError, ValueError): return [0] * 5 @property def bid_vols(self): try: return self._tick_dict['bid_vols'] except (KeyError, ValueError): return [0] * 5 @property def limit_up(self): try: return self._tick_dict['limit_up'] except (KeyError, ValueError): return [0] * 5 @property def limit_down(self): try: return self._tick_dict['limit_down'] except (KeyError, ValueError): return [0] * 5 @property def isnan(self): return np.isnan(self.last) def __repr__(self): items = [] for name in dir(self): if name.startswith("_"): continue items.append((name, getattr(self, name))) return "Tick({0})".format(', '.join('{0}: {1}'.format(k, v) for k, v in items)) def __getitem__(self, key): return getattr(self, key) class Tick(TickObject): def __init__(self, order_book_id, tick): system_log.warn("[deprecated] Tick class is no longer used. use TickObject class instead.") try: tick["asks"] = tick["ask"] except KeyError: pass try: tick["bids"] = tick["bid"] except KeyError: pass try: tick["ask_vols"] = tick["ask_vol"] except KeyError: pass try: tick["bid_vols"] = tick["bid_vol"] except KeyError: pass super(Tick, self).__init__( instrument=Environment.get_instance().data_proxy.instruments(order_book_id), tick_dict=tick ) @property def ask(self): return self.asks @property def bid(self): return self.bids @property def ask_vol(self): return self.ask_vols @property def bid_vol(self): return self.bid_vols
Python
0.000001
@@ -1003,519 +1003,8 @@ %22%22%22%0A -%0A try:%0A dt = tick_dict%5B%22datetime%22%5D%0A except KeyError:%0A pass%0A else:%0A if not dt:%0A tick_dict%5B%22datetime%22%5D = datetime.datetime.min%0A else:%0A if not isinstance(dt, datetime.datetime):%0A if dt %3E 10000000000000000: # ms%0A dt = convert_ms_int_to_datetime(dt)%0A else:%0A dt = convert_int_to_datetime(dt)%0A%0A tick_dict%5B%22datetime%22%5D = dt%0A%0A @@ -1398,38 +1398,36 @@ ry:%0A -return +dt = self._tick_dict @@ -1518,16 +1518,290 @@ time.min +%0A else:%0A if not isinstance(dt, datetime.datetime):%0A if dt %3E 10000000000000000: # ms%0A return convert_ms_int_to_datetime(dt)%0A else:%0A return convert_int_to_datetime(dt)%0A return dt %0A%0A @p
6f7c11c13793cbba7904cfd2a27ab3eb59ab9302
Update ispyb/sp/xtalimaging.py: proper multi-line docstring
ispyb/sp/xtalimaging.py
ispyb/sp/xtalimaging.py
from __future__ import absolute_import, division, print_function from ispyb.interface.dataarea import DataArea class XtalImaging(DataArea): """provides methods for accessing crystal imaging tables.""" def upsert_sample_image( self, id=None, sample_id=None, inspection_id=None, microns_per_pixel_x=None, microns_per_pixel_y=None, image_full_path=None, comments=None, ): """Store new or update existing sample image. :param image_full_path: The full path to the sample image :return: The sample_image_id. """ return self.get_connection().call_sp_write( procname="upsert_sample_image", args=[ id, sample_id, inspection_id, microns_per_pixel_x, microns_per_pixel_y, image_full_path, comments, ], ) def upsert_sample_image_auto_score( self, image_full_path, schema_name, score_class, probability ): """Store new or update existing automatic score for a sample image. :param image_full_path: The full path to the sample image :param schema_name: The name of the scoring schema, e.g. MARCO :param score_class: A string that describes the thing we're scoring, e.g. crystal, clear, precipitant, other :param probability: A float indicating the probability that the image contains the score_class """ self.get_connection().call_sp_write( procname="upsert_sample_image_auto_score", args=[image_full_path, schema_name, score_class, probability], ) def insert_subsample_for_image_full_path( self, image_full_path, source, position1x, position1y, position2x=None, position2y=None, ): """Store new subsample for a given sample image. Either specify a point (by providing position1x and position1y) or a ROI box (by additionally providing position2x and position2y). Position coordinates are given in pixels from the top-left corner of the image. :param image_full_path: The full path to the sample image :type image_full_path: str :param source: manual or auto :type source: str :param position1x: x component of position1 :type position1x: int :param position1y: y component of position1 :type position1y: int :param position2x: x component of position2 which is the lower right corner of a ROI box :type position2x: int :param position2y: y component of position2 which is the lower right corner of a ROI box :type position2y: int :return: The subsample_id. """ id = None return self.get_connection().call_sp_write( procname="insert_subsample_for_image_full_path", args=[ id, image_full_path, source, position1x, position1y, position2x, position2y, ], ) def retrieve_container_for_barcode(self, barcode): """Retrieve info about the container indetified by the give barcode.""" return self.get_connection().call_sp_retrieve( procname="retrieve_container_for_barcode", args=[barcode] ) def retrieve_container_for_inspection_id(self, inspection_id): """Retrieve info about the container identified by container inspection ID""" return self.get_connection().call_sp_retrieve( procname="retrieve_container_for_inspection_id", args=[inspection_id] ) def retrieve_sample_for_container_id_and_location(self, container_id, location): """Retrieve info about the sample identified by the given container ID and its location.""" return self.get_connection().call_sp_retrieve( procname="retrieve_sample_for_container_id_and_location", args=[container_id, location], )
Python
0
@@ -1967,16 +1967,33 @@ e image. +%0A %0A Either @@ -2007,24 +2007,16 @@ a point -%0A (by pro @@ -2048,16 +2048,24 @@ ition1y) +%0A or a RO @@ -2086,24 +2086,16 @@ tionally -%0A providi @@ -2124,16 +2124,24 @@ tion2y). +%0A Positio @@ -2166,24 +2166,16 @@ given in -%0A pixels @@ -2194,24 +2194,32 @@ -left corner +%0A of the imag
b1d1df9a5368a50f82a8bab6a4be023a51ef603f
Update PickList.py
Cogs/PickList.py
Cogs/PickList.py
import asyncio import discord from discord.ext import commands def setup(bot): # This module isn't actually a cog return class Picker: def __init__(self, **kwargs): self.list = kwargs.get("list", []) self.title = kwargs.get("title", None) self.timeout = kwargs.get("timeout", 60) self.ctx = kwargs.get("ctx", None) self.message = kwargs.get("message", None) # message to edit self.max = 10 # Don't set programmatically - as we don't want this overridden self.reactions = [ "🛑" ] async def _add_reactions(self, message, react_list): for r in react_list: await message.add_reaction(r) async def pick(self): # This actually brings up the pick list and handles the nonsense # Returns a tuple of (return_code, message) # The return code is -1 for cancel, -2 for timeout, -3 for error, 0+ is index # Let's check our prerequisites first if self.ctx == None or not len(self.list) or len(self.list) > self.max: return (-3, None) msg = "" if self.title: msg += self.title + "\n" msg += "```\n" # Show our list items current = 0 # current_reactions = [self.reactions[0]] current_reactions = [] for item in self.list: current += 1 current_reactions.append("{}\N{COMBINING ENCLOSING KEYCAP}".format(current)) msg += "{}. {}\n".format(current, item) msg += "```" # Add the stop reaction current_reactions.append(self.reactions[0]) if self.message: message = self.message await message.edit(content=msg) else: message = await self.ctx.send(msg) # Add our reactions await self._add_reactions(message, current_reactions) # Now we would wait... def check(reaction, user): return user == self.ctx.author and str(reaction.emoji) in current_reactions try: reaction, user = await self.ctx.bot.wait_for('reaction_add', timeout=self.timeout, check=check) except: # Didn't get a reaction await message.clear_reactions() return (-2, message) await message.clear_reactions() # Get the adjusted index ind = current_reactions.index(str(reaction.emoji))+1 if ind == len(current_reactions): ind = -1 return (ind, message)
Python
0
@@ -2400,10 +2400,8 @@ ji)) -+1 %0A @@ -2437,16 +2437,18 @@ actions) +-1 :%0A
eda2f6905a3275623525c4179358e55e472b4fd7
Fix bug in urls.py following the sample_list template being renamed.
genome_designer/urls.py
genome_designer/urls.py
from django.conf.urls.defaults import include from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url urlpatterns = patterns('', url(r'^$', 'genome_designer.main.views.home_view'), # Project-specific views url(r'^projects$', 'genome_designer.main.views.project_list_view'), url(r'^projects/([\w-]+)$', 'genome_designer.main.views.project_view'), url(r'^projects/([\w-]+)/refgenomes$', 'genome_designer.main.views.reference_genome_list_view'), url(r'^projects/([\w-]+)/alignments$', 'genome_designer.main.views.alignment_list_view'), url(r'^projects/([\w-]+)/sets$', 'genome_designer.main.views.variant_set_list_view'), url(r'^projects/([\w-]+)/samples$', 'genome_designer.main.views.sample_list_view'), url(r'^projects/([\w-]+)/variants$', 'genome_designer.main.views.variant_list_view'), url(r'^projects/([\w-]+)/genes$', 'genome_designer.main.views.gene_list_view'), url(r'^projects/([\w-]+)/goterms$', 'genome_designer.main.views.goterm_list_view'), ############################################################################ # Templates url(r'^templates/sample_list_targets_template.tsv$', 'genome_designer.main.views.sample_list_upload_template'), ############################################################################ ############################################################################ # Auth ############################################################################ # django-registration defaults (further delgates to django.contrib.auth.url) (r'^accounts/', include('registration.backends.simple.urls')), )
Python
0
@@ -1232,16 +1232,98 @@ mplates%0A + ############################################################################%0A%0A url( @@ -1427,14 +1427,15 @@ ist_ -upload +targets _tem @@ -1447,94 +1447,8 @@ '),%0A - ############################################################################%0A %0A %0A%0A
63702a236d6c882b747cb19b566122a4a8ddfa3b
Change Indicator.add_menu arguments to allow passing CheckMenuItem status
jackselect/indicator.py
jackselect/indicator.py
"""A convenience class for a GTK 3 system tray indicator.""" from pkg_resources import resource_filename import gi gi.require_version('Gtk', '3.0') # noqa from gi.repository import Gtk from gi.repository.GdkPixbuf import Pixbuf class Indicator: """This class defines a standard GTK3 system tray indicator. Class Indicator can be easily reused in any other project. """ def __init__(self, icon, title=None): """Create indicator icon and add menu. Args: icon (str): path to initial icon that will be shown on system panel """ self._icon_cache = {} self.icon = Gtk.StatusIcon.new_from_pixbuf(self._get_icon(icon)) self.menu = Gtk.Menu() self.icon.connect('activate', self.on_popup_menu_open) self.icon.connect('popup-menu', self.on_popup_menu_open) if title: self.icon.set_title(title) def _get_icon(self, icon): """Return icon from package as GdkPixbuf.Pixbuf. Extracts the image from package to a file, stores it in the icon cache if it's not in there yet and returns it. Otherwise just returns the image stored in the cache. """ if icon not in self._icon_cache: filename = resource_filename(__name__, "images/%s" % icon) self._icon_cache[icon] = Pixbuf.new_from_file(filename) return self._icon_cache[icon] def set_icon(self, icon): """Set new icon in system tray. Args: icon (str): path to file with new icon """ self.icon.set_from_pixbuf(self._get_icon(icon)) def set_tooltip(self, callback): self.icon.set_has_tooltip(True) self.icon.connect("query-tooltip", callback) def clear_menu(self): """Clear all entries from the main menu.""" self.menu = Gtk.Menu() def add_menu_item(self, command=None, title=None, icon=None, active=True, is_check=False, menu=None, data=None): """Add mouse right click menu item. Args: command (callable): function that will be called after left mouse click on title title (str): label that will be shown in menu icon (str): name of icon stored in application package active (bool): whether the menu entry can be activated (default: True) data (obj): arbitrary data to associate with the menu entry """ if icon: m_item = Gtk.ImageMenuItem(title) image = Gtk.Image.new_from_pixbuf(self._get_icon(icon)) m_item.set_image(image) elif is_check: m_item = Gtk.CheckMenuItem(title) else: m_item = Gtk.MenuItem(title) if command: m_item.connect('toggled' if is_check else 'activate', command) m_item.set_sensitive(active) m_item.data = data if menu: menu.append(m_item) else: self.menu.append(m_item) return m_item def add_submenu(self, title): """Add a sub menu popup menu.""" submenu = Gtk.Menu() m_item = Gtk.MenuItem(title) m_item.set_submenu(submenu) self.menu.append(m_item) return submenu def add_separator(self): """Add separator between labels in the popup menu.""" m_item = Gtk.SeparatorMenuItem() self.menu.append(m_item) def on_popup_menu_open(self, widget=None, button=None, *args): """Some action requested opening the popup menu.""" self.menu.popup(None, None, Gtk.StatusIcon.position_menu, widget or self.icon, button or 1, Gtk.get_current_event_time()) def on_popup_menu_close(self, widget=None, button=None, *args): """Some action requested closing the popup menu.""" self.menu.popdown()
Python
0
@@ -1919,22 +1919,23 @@ n=None, -active +enabled =True, i @@ -1966,24 +1966,38 @@ + active=False, menu=None, @@ -2681,32 +2681,70 @@ MenuItem(title)%0A + m_item.set_active(active)%0A else:%0A @@ -2904,30 +2904,31 @@ t_sensitive( -active +enabled )%0A m_
b324650da9dde4bbe2e235b29a3b4c571f41a7b2
Comment out debug output
run-google-java-format.py
run-google-java-format.py
#!/usr/bin/python # This script reformats each file supplied on the command line according to # the Google Java style (by calling out to the google-java-format program, # https://github.com/google/google-java-format), but with improvements to # the formatting of annotations in comments. from __future__ import print_function from distutils import spawn import filecmp import os import stat import subprocess import sys import tempfile import urllib debug = False # debug = True script_dir = os.path.dirname(os.path.abspath(__file__)) # Rather than calling out to the shell, it would be better to # call directly in Python. fixup_py = os.path.join(script_dir, "fixup-google-java-format.py") # Version 1.1 mangles "@param <P>", so take a risk on 1.2 for now. # gjf_version = "google-java-format-1.1" # gjf_snapshot = "" gjf_version = "google-java-format-1.2" gjf_snapshot = "-SNAPSHOT" gjf_jar_name = gjf_version + gjf_snapshot + "-all-deps.jar" # gjf_url = "https://github.com/google/google-java-format/releases/download/" + gjf_version + "/" + gjf_jar_name gjf_url = "http://types.cs.washington.edu/" + gjf_jar_name # gjf_url = "http://homes.cs.washington.edu/~mernst/tmp2/" + gjf_jar_name # Set gjf_jar_path, or retrieve it if it doesn't appear locally if os.path.isfile(os.path.join(script_dir, gjf_jar_name)): gjf_jar_path = os.path.join(script_dir, gjf_jar_name) elif os.path.isfile(os.path.join(os.path.dirname(script_dir), "lib", gjf_jar_name)): gjf_jar_path = os.path.join(os.path.dirname(script_dir), "lib", gjf_jar_name) else: gjf_jar_path = os.path.join(script_dir, gjf_jar_name) print("retrieving " + gjf_url + " to " + gjf_jar_path) urllib.urlretrieve(gjf_url, gjf_jar_path) # For some reason, the "git ls-files" must be run from the root. # (I can run "git ls-files" from the command line in any directory.) def under_git(dir, filename): """Return true if filename in dir is under git control.""" if not spawn.find_executable("git"): if debug: print("no git executable found") return False FNULL = open(os.devnull, 'w') p = subprocess.Popen(["git", "ls-files", filename, "--error-unmatch"], cwd=dir, stdout=FNULL, stderr=subprocess.STDOUT) p.wait() if debug: print("p.returncode", p.returncode) return p.returncode == 0 # Don't replace local with remote if local is under version control. # It would be better to just test whether the remote is newer than local, # But raw GitHub URLs don't have the necessary last-modified information. if not under_git(script_dir, "fixup-google-java-format.py"): try: urllib.urlretrieve("https://raw.githubusercontent.com/plume-lib/run-google-java-format/master/fixup-google-java-format.py", fixup_py) except: if os.path.exists(fixup_py): print("Couldn't retrieve fixup-google-java-format.py; using cached version") else: print("Couldn't retrieve fixup-google-java-format.py") sys.exit(1) os.chmod(fixup_py, os.stat(fixup_py).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) if debug: print("script_dir:", script_dir) print("fixup_py: ", fixup_py) print("gjf_jar_path: ", gjf_jar_path) files = sys.argv[1:] if len(files) == 0: print("run-google-java-format.py expects 1 or more filenames as arguments") sys.exit(1) result = subprocess.call(["java", "-jar", gjf_jar_path, "--replace"] + files) # Don't stop if there was an error, because google-java-format won't munge # files and we still want to run fixup-google-java-format.py. # if result != 0: # print("Error when running google-java-format") # sys.exit(result) # Remove command-line arguments files = [f for f in files if not f.startswith("-")] # Exit if no files were supplied (maybe "--help" was supplied) if not files: sys.exit(0) if debug: print("Running fixup-google-java-format.py") result = subprocess.call([fixup_py] + files) if result != 0: print("Error when running fixup-google-java-format.py") sys.exit(result)
Python
0.000001
@@ -1602,24 +1602,26 @@ ar_name)%0A + # print(%22retr
50bd1ce1118ddb52a54f679fc9faee4bc3110458
Allow the --force command line argument to accept one or more stage names'
rubra/cmdline_args.py
rubra/cmdline_args.py
# Process the unix command line of the pipeline. import argparse from version import rubra_version def get_cmdline_args(): return parser.parse_args() parser = argparse.ArgumentParser( description='A bioinformatics pipeline system.') parser.add_argument( '--pipeline', metavar='PIPELINE_FILE', type=str, help='Your Ruffus pipeline stages (a Python module)') parser.add_argument( '--config', metavar='CONFIG_FILE', type=str, nargs='+', required=True, help='One or more configuration files (Python modules)') parser.add_argument( '--verbose', type=int, choices=(0, 1, 2), required=False, default=1, help='Output verbosity level: 0 = quiet; 1 = normal; \ 2 = chatty (default is 1)') parser.add_argument( '--style', type=str, choices=('print', 'run', 'flowchart', 'touchfiles'), required=False, default='print', help='Pipeline behaviour: print; run; touchfiles; flowchart (default is print)') parser.add_argument( '--force', metavar='TASKNAME', type=str, required=False, default=[], help='tasks which are forced to be out of date regardless of timestamps') parser.add_argument( '--end', metavar='TASKNAME', type=str, required=False, help='end points (tasks) for the pipeline') parser.add_argument( '--rebuild', type=str, choices=('fromstart', 'fromend'), required=False, default='fromstart', help='rebuild outputs by working back from end tasks or forwards \ from start tasks (default is fromstart)') parser.add_argument( '--version', action='version', version='%(prog)s ' + rubra_version)
Python
0
@@ -1101,16 +1101,31 @@ ult=%5B%5D,%0A + nargs='+',%0A help
4009e01004ecd9b8f3d759842181b65a3893f73a
fix `TypeError: the JSON object must be str, bytes or bytearray, not NoneType`
simple_settings/dynamic_settings/base.py
simple_settings/dynamic_settings/base.py
# -*- coding: utf-8 -*- import re from copy import deepcopy import jsonpickle class BaseReader(object): """ Base class for dynamic readers """ _default_conf = {} def __init__(self, conf): self.conf = deepcopy(self._default_conf) self.conf.update(conf) self.key_pattern = self.conf.get('pattern') self.auto_casting = self.conf.get('auto_casting') self.key_prefix = self.conf.get('prefix') def get(self, key): if not self._is_valid_key(key): return result = self._get(self._qualified_key(key)) if self.auto_casting: result = jsonpickle.decode(result) return result def set(self, key, value): if not self._is_valid_key(key): return if self.auto_casting: value = jsonpickle.encode(value) self._set(self._qualified_key(key), value) def _is_valid_key(self, key): if not self.key_pattern: return True return bool(re.match(self.key_pattern, key)) def _qualified_key(self, key): """ Prepends the configured prefix to the key (if applicable). :param key: The unprefixed key. :return: The key with any configured prefix prepended. """ pfx = self.key_prefix if self.key_prefix is not None else '' return '{}{}'.format(pfx, key)
Python
0.000001
@@ -602,32 +602,57 @@ elf.auto_casting + and (result is not None) :%0A re
a6356d45059c0a323d315884bd1e24b8bf2eb4f5
Correct error in 'now' command with missing duration
commands.py
commands.py
# coding=utf-8 import argparse import dateutil.parser import re import sys from datetime import datetime, timedelta import connection parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() def quickadd(summary): from config import config service = connection.connect() # Double up single-time events to be 0-length match = re.match(r'^\d\d:\d\d ', summary) if match: summary = match.group(0)[:-1] + '-' + summary # Make request print "Quick add >>", summary result = service.events().quickAdd( calendarId=config['calendar_id'], summary=summary ).execute() if result['status'] == 'confirmed': print "Added! Link: ", result['htmlLink'] return True else: sys.stdout.write("Failed :( - status %s\n" % result['status']) return False parser_quickadd = subparsers.add_parser('quickadd') parser_quickadd.add_argument('summary') parser_quickadd.set_defaults(func=quickadd) def now(summary, offset=0, duration=0): from config import config service = connection.connect() start = datetime.now() + timedelta(minutes=offset) end = start + timedelta(minutes=duration) print "Adding %i-minute event >> %s" % (duration, summary) result = service.events().insert( calendarId=config['calendar_id'], body={ 'summary': summary, 'start': { 'dateTime': start.isoformat(), 'timeZone': config['timezone'] }, 'end': { 'dateTime': end.isoformat(), 'timeZone': config['timezone'] } } ).execute() if result['status'] == 'confirmed': print "Added! Link: ", result['htmlLink'] return True else: sys.stdout.write("Failed :( - status %s\n" % result['status']) return False parser_now = subparsers.add_parser('now') parser_now.add_argument('offset', type=int, default=0, nargs='?') parser_now.add_argument('-d', '--duration', type=int) parser_now.add_argument('summary') parser_now.set_defaults(func=now) def for_command(duration, summary): from config import config service = connection.connect() times = [ datetime.now(), datetime.now() + timedelta(minutes=duration) ] times.sort() start, end = times print "Adding %i-minute event >> %s" % (abs(duration), summary) result = service.events().insert( calendarId=config['calendar_id'], body={ 'summary': summary, 'start': { 'dateTime': start.isoformat(), 'timeZone': config['timezone'] }, 'end': { 'dateTime': end.isoformat(), 'timeZone': config['timezone'] } } ).execute() if result['status'] == 'confirmed': print "Added! Link: ", result['htmlLink'] return True else: sys.stdout.write("Failed :( - status %s\n" % result['status']) return False parser_for = subparsers.add_parser('for') parser_for.add_argument('duration', type=int) parser_for.add_argument('summary') parser_for.set_defaults(func=for_command) def add_command(summary, when=None, duration=0): if when is None: when = datetime.now() else: when = dateutil.parser.parse(when) from config import config service = connection.connect() times = [ when, when + timedelta(minutes=duration) ] times.sort() start, end = times print "Adding %i-minute event at %s >> %s" % (abs(duration), when, summary) result = service.events().insert( calendarId=config['calendar_id'], body={ 'summary': summary, 'start': { 'dateTime': start.isoformat(), 'timeZone': config['timezone'] }, 'end': { 'dateTime': end.isoformat(), 'timeZone': config['timezone'] } } ).execute() if result['status'] == 'confirmed': print "Added! Link: ", result['htmlLink'] return True else: sys.stdout.write("Failed :( - status %s\n" % result['status']) return False parser_add = subparsers.add_parser('add') parser_add.add_argument('-w', '--when', default=None) parser_add.add_argument('summary') parser_add.set_defaults(func=add_command)
Python
0.000011
@@ -1019,25 +1019,28 @@ 0, duration= -0 +None ):%0A from @@ -1053,32 +1053,79 @@ import config%0A%0A + if duration is None:%0A duration = 0%0A%0A service = co
b884341d73cb80e86df9ec756d5b84154a2bb982
Update migration for tweak to available_power
netbox/dcim/migrations/0072_powerfeeds.py
netbox/dcim/migrations/0072_powerfeeds.py
import django.core.validators from django.db import migrations, models import django.db.models.deletion import taggit.managers class Migration(migrations.Migration): dependencies = [ ('extras', '0021_add_color_comments_changelog_to_tag'), ('dcim', '0071_device_components_add_description'), ] operations = [ migrations.CreateModel( name='PowerFeed', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('created', models.DateField(auto_now_add=True, null=True)), ('last_updated', models.DateTimeField(auto_now=True, null=True)), ('name', models.CharField(max_length=50)), ('status', models.PositiveSmallIntegerField(default=1)), ('type', models.PositiveSmallIntegerField(default=1)), ('supply', models.PositiveSmallIntegerField(default=1)), ('phase', models.PositiveSmallIntegerField(default=1)), ('voltage', models.PositiveSmallIntegerField(default=120, validators=[django.core.validators.MinValueValidator(1)])), ('amperage', models.PositiveSmallIntegerField(default=20, validators=[django.core.validators.MinValueValidator(1)])), ('max_utilization', models.PositiveSmallIntegerField(default=80, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)])), ('available_power', models.PositiveSmallIntegerField(default=0)), ('comments', models.TextField(blank=True)), ('cable', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.Cable')), ], options={ 'ordering': ['power_panel', 'name'], }, ), migrations.CreateModel( name='PowerPanel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('created', models.DateField(auto_now_add=True, null=True)), ('last_updated', models.DateTimeField(auto_now=True, null=True)), ('name', models.CharField(max_length=50)), ('rack_group', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='dcim.RackGroup')), ('site', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='dcim.Site')), ], options={ 'ordering': ['site', 'name'], }, ), migrations.AddField( model_name='powerfeed', name='power_panel', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='powerfeeds', to='dcim.PowerPanel'), ), migrations.AddField( model_name='powerfeed', name='rack', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='dcim.Rack'), ), migrations.AddField( model_name='powerfeed', name='tags', field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'), ), migrations.AddField( model_name='powerfeed', name='connected_endpoint', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.PowerPort'), ), migrations.AddField( model_name='powerfeed', name='connection_status', field=models.NullBooleanField(), ), migrations.RenameField( model_name='powerport', old_name='connected_endpoint', new_name='_connected_poweroutlet', ), migrations.AddField( model_name='powerport', name='_connected_powerfeed', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.PowerFeed'), ), migrations.AddField( model_name='powerport', name='allocated_draw', field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)]), ), migrations.AddField( model_name='powerport', name='maximum_draw', field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)]), ), migrations.AddField( model_name='powerporttemplate', name='allocated_draw', field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)]), ), migrations.AddField( model_name='powerporttemplate', name='maximum_draw', field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)]), ), migrations.AlterUniqueTogether( name='powerpanel', unique_together={('site', 'name')}, ), migrations.AlterUniqueTogether( name='powerfeed', unique_together={('power_panel', 'name')}, ), migrations.AddField( model_name='poweroutlet', name='feed_leg', field=models.PositiveSmallIntegerField(blank=True, null=True), ), migrations.AddField( model_name='poweroutlet', name='power_port', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='poweroutlets', to='dcim.PowerPort'), ), migrations.AddField( model_name='poweroutlettemplate', name='feed_leg', field=models.PositiveSmallIntegerField(blank=True, null=True), ), migrations.AddField( model_name='poweroutlettemplate', name='power_port', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='poweroutlet_templates', to='dcim.PowerPortTemplate'), ), ]
Python
0
@@ -1553,16 +1553,32 @@ efault=0 +, editable=False )),%0A
a6cb3bfeb5f7201a0e702024257df1f874a3bb70
Bump version 15.
terroroftinytown/client/__init__.py
terroroftinytown/client/__init__.py
VERSION = 14 # Please update this whenever .client or .services changes # Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
Python
0
@@ -8,9 +8,9 @@ = 1 -4 +5 #
3f70ead379b7f586313d01d5ab617fd5368f8ce3
Print traceback if startup fails
cthulhubot/management/commands/restart_masters.py
cthulhubot/management/commands/restart_masters.py
from django.core.management.base import BaseCommand from cthulhubot.models import Buildmaster class Command(BaseCommand): help = 'Restart all Buildmaster processes' args = "" def handle(self, *fixture_labels, **options): verbosity = int(options.get('verbosity', 1)) commit = int(options.get('commit', 1)) if verbosity > 1: print 'Restarting buildmasters...' for b in Buildmaster.objects.all(): if verbosity > 1: print 'Handling buildmaster %s for project %s' % (str(b.id), str(b.project.name)) try: b.stop() except: print 'Failed to stop master' try: b.start() except: print 'Failed to start master'
Python
0.000008
@@ -1,12 +1,44 @@ +from traceback import print_exc%0A from django. @@ -732,16 +732,44 @@ master'%0A + print_exc()%0A @@ -875,16 +875,44 @@ o start master'%0A + print_exc()%0A
8095347cc2b4b1d10ae9e37223d98c2dd44b7164
fix reversing after commit
nm_payment/drivers/bbs/payment_session.py
nm_payment/drivers/bbs/payment_session.py
import concurrent.futures from threading import Lock import logging log = logging.getLogger('nm_payment') from nm_payment.base import PaymentSession from nm_payment.exceptions import ( SessionCompletedError, SessionCancelledError, CancelFailedError, ) from .session import BBSSession RUNNING = 'RUNNING' CANCELLING = 'CANCELLING' REVERSING = 'REVERSING' FINISHED = 'FINISHED' BROKEN = 'BROKEN' class BBSPaymentSession(BBSSession, PaymentSession): def __init__( self, connection, amount, *, before_commit=None, on_print=None, on_display=None): super(BBSPaymentSession, self).__init__(connection) self._future = concurrent.futures.Future() self._lock = Lock() self._state = RUNNING self._commit_callback = before_commit self._print_callback = on_print self._display_callback = on_display self._connection.request_transfer_amount(amount).result() def _start_reversal(self): try: self._state = REVERSING self._connection.request_reversal().result() except Exception as e: # XXX This is really really bad raise CancelFailedError() from e def _on_local_mode_running(self, result, **kwargs): if result == 'success': reverse = self._state == CANCELLING if self._commit_callback is not None: # TODO can't decide on commit callback api try: reverse = not self._commit_callback(result) except Exception: reverse = True if reverse: self._start_reversal() else: self._state = FINISHED self._future.set_result(None) else: # TODO interpret errors from ITU self._state = FINISHED self._future.set_exception(SessionCancelledError("itu error")) def _on_local_mode_cancelling(self, result, **kwargs): if result == 'success': self._start_reversal() else: self._state = FINISHED self._future.set_exception(SessionCancelledError()) def _on_local_mode_reversing(self, result, **kwargs): if result == 'success': self._state = FINISHED self._future.set_exception(SessionCancelledError()) else: # XXX self._state = BROKEN def on_req_local_mode(self, *args, **kwargs): """ .. note:: Internal use only """ with self._lock: if self._state == RUNNING: return self._on_local_mode_running(*args, **kwargs) elif self._state == CANCELLING: return self._on_local_mode_cancelling(*args, **kwargs) elif self._state == REVERSING: return self._on_local_mode_reversing(*args, **kwargs) else: raise Exception("invalid state") def on_display_text(self, text): if self._display_callback is not None: self._display_callback(text) def on_print_text(self, commands): if self._print_callback is not None: self._print_callback(commands) def on_reset_timer(self, timeout): pass def cancel(self): """ :raises SessionCompletedError: If session has already finished """ with self._lock: if self._state == RUNNING: self._state = CANCELLING # non-blocking, don't wait for result self._connection.request_cancel() # block until session finishes try: self.result() except SessionCancelledError: # this is what we want return else: raise CancelFailedError() def result(self, timeout=None): try: return self._future.result(timeout=timeout) except concurrent.futures.CancelledError as e: raise SessionCancelledError() from e def add_done_callback(self, fn): return self._future.add_done_callback(fn) def unbind(self): try: self.cancel() except SessionCompletedError: pass
Python
0.000002
@@ -1302,25 +1302,41 @@ -reverse = +commit = True%0A self._s @@ -1509,21 +1509,16 @@ -reverse = not +commit = sel @@ -1602,21 +1602,21 @@ -reverse = Tru +commit = Fals e%0A%0A @@ -1633,15 +1633,14 @@ if -reverse +commit :%0A @@ -1666,78 +1666,85 @@ _sta -rt_reversal()%0A else:%0A self._state = FINISHED +te = FINISHED%0A self._future.set_result(None)%0A else: %0A @@ -1762,38 +1762,31 @@ self._ -future.set_result(None +start_reversal( )%0A
4541b5edc808d77f53305eafca418d3be6715e8d
Cut 0.17.3
invocations/_version.py
invocations/_version.py
__version_info__ = (0, 17, 2) __version__ = '.'.join(map(str, __version_info__))
Python
0.000001
@@ -24,9 +24,9 @@ 17, -2 +3 )%0A__
8d70bad3968cb11c929beafcef44b023822b886f
make interval adjustable in poll_request, and also remove check_response call duplication
stacktester/common/http.py
stacktester/common/http.py
from stacktester import exceptions import httplib2 import os import time class Client(object): USER_AGENT = 'python-nova_test_client' def __init__(self, host='localhost', port=80, base_url=''): #TODO: join these more robustly self.base_url = "http://%s:%s/%s" % (host, port, base_url) def poll_request(self, method, url, check_response, **kwargs): timeout = kwargs.pop('timeout', 180) # Start timestamp start_ts = int(time.time()) resp, body = self.request(method, url, **kwargs) while (not check_response(resp, body)): if (int(time.time()) - start_ts >= (timeout * 1000)): raise exceptions.TimeoutException time.sleep(2) resp, body = self.request(method, url, **kwargs) def request(self, method, url, **kwargs): self.http_obj = httplib2.Http() params = {} params['headers'] = {'User-Agent': self.USER_AGENT} params['headers'].update(kwargs.get('headers', {})) if 'Content-Type' not in kwargs.get('headers',{}): params['headers']['Content-Type'] = 'application/json' if 'body' in kwargs: params['body'] = kwargs.get('body') req_url = "%s/%s" % (self.base_url, url) resp, body = self.http_obj.request(req_url, method, **params) return resp, body
Python
0
@@ -421,16 +421,61 @@ ', 180)%0A + interval = kwargs.pop('interval', 2)%0A @@ -529,16 +529,40 @@ ime())%0A%0A + while True:%0A @@ -622,19 +622,16 @@ -while (not + if ( chec @@ -651,24 +651,46 @@ sp, body)):%0A + break%0A @@ -820,70 +820,16 @@ eep( -2)%0A resp, body = self.request(method, url, **kwargs +interval )%0A%0A @@ -1087,21 +1087,21 @@ not in -kwarg +param s.get('h
a8fe56cd60296607f879dea86432532a5b40824a
Add a main method
dame/__init__.py
dame/__init__.py
Python
0.998985
@@ -0,0 +1,48 @@ +from .dame import *%0Adef main():%0A dame.main()%0A
c005ce217f77aa185ad8916475463f2040a3dc67
clean up yaml generator.
iridium/core/trapper.py
iridium/core/trapper.py
from functools import wraps from .logger import glob_logger from iridium.config import config from .exceptions import FunctionException import yaml from inspect import signature def tracer(func): """ tracer will decorate a given function which allow users to step through a function call on error. :param func: Function which is to be wrapped. :return: decorated function. """ import pdb @wraps(func) def wrapper(*args, **kwargs): glob_logger.information("calling: {0} with these args: {1}".format(func.__name__, str(signature(func)))) try: return func(*args, **kwargs) except FunctionException as fne: print('We catch a function: {0:s} with a value of: {1:s} doing something bad'.format(fne.func_name, fne.value)) pdb.set_trace() return wrapper def trap(func): """ trap will return the name of the function and its arguments as well as its return values. :param func: function for which to decorate. """ @wraps(func) def wrapper(*args, **kwargs): collector(func.__name__, str(signature(func)), *args, **kwargs) return func(*args, **kwargs) return wrapper def collector(fn_name, fn_args, *fn_args_val, **fn_kwargs_val): """ collector will format the return information from the decorator 'trap' and place it into a simple yaml file. :param fn_name: :param fn_args: :param fn_kwargs: :return file creation status and new file. """ fh = open(config.iridium_function_calls['function_log'], mode='a') fname = fh.name if fh.mode != 'a': raise "Please make sure %s is writable." % fname fn_output = yaml.dump({'Function Attributes': {'Function Name': fn_name, 'Function Arguments': fn_args, 'Function Argument Values': str(fn_args_val), 'Function Keyword values': str(fn_kwargs_val)}}, indent=4, default_flow_style=False, explicit_start=True) status = fh.write(fn_output) if status > 0: ret_val = "Data written to %s" % fname fh.close() else: ret_val = "Please check %s data was not saved." % fname fh.close() return ret_val
Python
0
@@ -2138,17 +2138,16 @@ _val)%7D%7D, - %0A
e6d0e3c2b01a28a3235d1de292f99328c77e6584
print usage
dnsquery.py
dnsquery.py
import sys import csv import time import logging from common import threadpool import dns.resolver import dns.message import dns.rdataclass import dns.rdatatype import dns.query import dns.exception class DNSQueryTask(threadpool.Task): def do(self): qname, qtype, qcount, bdnsip = "", "", 0, "" for arg in self.kargs: if arg == "qtype": qtype = self.kargs[arg] elif arg == "qname": qname = self.kargs[arg] elif arg == "qcount": qcount = int(self.kargs[arg]) elif arg == "bdnsip": bdnsip = self.kargs[arg] if (qname == "") or (qtype == "") or (qcount == 0) or (bdnsip == ""): logging.error("Incorrect task!") return False resolver = dns.resolver.Resolver(configure=False) resolver.nameservers = [bdnsip] for i in range(qcount): try: time_start = time.perf_counter() answer = resolver.query(qname, qtype) time_performance = time.perf_counter() - time_start for rr in answer: if time_performance > 0: logging.info("%02d %s %s %15s - performace = %3.3f sec", i, qname, qtype, rr, time_performance) time_performance = 0 else: logging.info(" %s %s %15s", qname, qtype, rr) except dns.exception.DNSException: time_performance = time.perf_counter() - time_start logging.warning("Exception - performance = %3.3f sec", time_performance) except Exception as ex: print(ex) return True QUERY_FILE = "QFile" DNS_IP = "DNSIP" ARGUMENT_LIST = [ [QUERY_FILE, "-f", "<query_list_file>"], [DNS_IP, "-s", "<DNS server IP>"] ] def PrintUsage(): print("python dnsquery.py -f <query_list.csv> -s <backend_dns_ip>") def GetArguments(argv): arguments = dict() idx, argc = 0, len(argv) while idx < argc: for argItem in ARGUMENT_LIST: if (argv[idx] == argItem[1]) and (idx < argc - 1): idx = idx + 1 arguments[argItem[0]] = argv[idx] idx = idx + 1 if (arguments[QUERY_FILE] == ""): PrintUsage() exit(0) else: return arguments if __name__ == '__main__': arguments = GetArguments(sys.argv) logging.basicConfig(level=logging.DEBUG, format="%(asctime)s-%(thread)06d-%(levelname)s: %(message)s", datefmt="%Y%m%d-%H%M%S") logging.info("dnsquery started...") csvfile = open(arguments[QUERY_FILE]) reader = csv.reader(csvfile) thdpool = threadpool.ThreadPool(20, 40) thdpool.start_pool() try: for row in reader: qtask = DNSQueryTask(qtype = row[0], qname = row[1], qcount = int(row[2]), bdnsip = arguments[DNS_IP]) thdpool.add_task(qtask) except csv.Error as ex: print(ex.args) thdpool.wait_completion() thdpool.stop_pool() logging.info("dnsquery complete...")
Python
0.000003
@@ -1760,14 +1760,18 @@ = %22Q +uery File%22%0A - DNS_ @@ -1918,16 +1918,18 @@ print(%22 +%5Cn python d @@ -1942,49 +1942,91 @@ y.py - -f %3Cquery_list.csv%3E -s %3Cbackend_dns_ip%3E%22 +%5Cn%22)%0A for argItem in ARGUMENT_LIST:%0A print(%22 %22, argItem%5B1%5D, argItem%5B2%5D )%0A%0Ad @@ -2340,35 +2340,64 @@ if ( -arguments%5BQUERY_FILE%5D == %22%22 +QUERY_FILE not in arguments) or (DNS_IP not in arguments ):%0A
083499cc0bb2ad443bbebb45d0e75bd0bc2df8b7
allow ssh key of any size
fig_leaf.py
fig_leaf.py
""" Fig Leaf: Encrypt and decrypt data with ssh keys! 2017 maryx Usage: 1. Run `pip install pycrypto` 2. To encrypt, run `python fig_leaf.py <path to file location> <path to output location> <path to public key>` 3. To decrypt, run `python fig_leaf.py <path to encrypted file location> <path to output location> <path to private key> --decrypt` """ import argparse from Crypto.Cipher import AES, PKCS1_OAEP from Crypto import Random from Crypto.PublicKey import RSA def encrypt(data, public_key): """ Returns RSA-encrypted symmetric key concatenated with symmetrically-encrypted data. """ # Symmetrically encrypt data initialization_vector = Random.new().read(AES.block_size) symmetric_key = Random.get_random_bytes(AES.key_size[2]) cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector) encrypted_data = initialization_vector + cipher.encrypt(data) # RSA-encrypt symmetric key public_key = RSA.importKey(public_key) rsa_cipher = PKCS1_OAEP.new(public_key) encrypted_symmetric_key = rsa_cipher.encrypt(symmetric_key) return encrypted_symmetric_key + encrypted_data def decrypt(encrypted_data, private_key): """ Given RSA-encrypted symmetric key and symmetrically-encrypted data, returns original data. """ encrypted_symmetric_key = encrypted_data[0:512] symmetrically_encrypted_data = encrypted_data[512:] # Decrypt RSA-encrypted symmetric key private_key = RSA.importKey(private_key) rsa_cipher = PKCS1_OAEP.new(private_key) symmetric_key = rsa_cipher.decrypt(encrypted_symmetric_key) # Decrypt symmetrically-encrypted data initialization_vector = Random.new().read(AES.block_size) aes_cipher = AES.new(symmetric_key, AES.MODE_CFB, initialization_vector) decrypted_data = aes_cipher.decrypt(symmetrically_encrypted_data) decrypted_data = decrypted_data[16:] # first 16 are extraneous return decrypted_data def command_line_arg_parser(): """ Command line argument parser. Encrypts by default. Decrypts when --decrypt flag is passed in. """ parser = argparse.ArgumentParser(description='Parses input args') parser.add_argument('input_file', type=str, help='Path to input file location') parser.add_argument('output_file', type=str, default='./output_data', help='Path to output file location') parser.add_argument('key_file', type=str, help='Path to public or private key file') parser.add_argument('--decrypt', dest='decrypt', action='store_true', help='Private key file (for decryption)') return parser def main(): parser = command_line_arg_parser() args = parser.parse_args() input_file_location = args.input_file output_file_location = args.output_file with open(args.key_file, 'rb') as f: key = f.read() # decrypting if args.decrypt: with open(input_file_location, 'rb') as f: encrypted_data = f.read() decrypted_data = decrypt(encrypted_data, key) with open(output_file_location, 'wb') as f: f.write(decrypted_data) print('Decrypted data to %s' % output_file_location) # encrypting else: with open(input_file_location, 'rb') as f: data = f.read() encrypted_data = encrypt(data, key) with open(output_file_location, 'wb') as f: f.write(encrypted_data) print('Encrypted data to %s' % output_file_location) if __name__ == '__main__': main()
Python
0.000001
@@ -344,16 +344,30 @@ t%60%0A%22%22%22%0A%0A +import pickle%0A import a @@ -1100,16 +1100,17 @@ return +%5B encrypte @@ -1124,18 +1124,17 @@ tric_key - + +, encrypt @@ -1136,24 +1136,25 @@ crypted_data +%5D %0A%0Adef decryp @@ -1346,12 +1346,8 @@ ta%5B0 -:512 %5D%0A @@ -1398,12 +1398,9 @@ ata%5B -512: +1 %5D%0A @@ -3016,31 +3016,37 @@ pted_data = -f.re +pickle.lo ad( +f )%0A de @@ -3452,23 +3452,27 @@ -f.write +pickle.dump (encrypt @@ -3470,32 +3470,35 @@ p(encrypted_data +, f )%0A print(
1cbe7b335405e6294fcbca792914932f7226ac9b
Fix entities API
openfisca_web_api/controllers/entities.py
openfisca_web_api/controllers/entities.py
# -*- coding: utf-8 -*- # OpenFisca -- A versatile microsimulation software # By: OpenFisca Team <[email protected]> # # Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team # https://github.com/openfisca # # This file is part of OpenFisca. # # OpenFisca is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # OpenFisca is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Entities controller""" import collections from .. import contexts, model, wsgihelpers @wsgihelpers.wsgify def api1_entities(req): def build_entity_data(entity_class): entity_data = { 'isPersonsEntity': entity_class.is_persons_entity, 'label': entity_class.label, 'nameKey': entity_class.name_key, } if hasattr(entity_class, 'roles_key'): entity_data.update({ 'maxCardinalityByRoleKey': entity_class.max_cardinality_by_role_key, 'roles': entity_class.roles_key, 'labelByRoleKey': entity_class.label_by_role_key, }) return entity_data ctx = contexts.Ctx(req) headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx) assert req.method == 'GET', req.method entities_class = model.tax_benefit_system.entity_class_by_key_plural.itervalues() data = collections.OrderedDict(sorted({ entity_class.key_plural: build_entity_data(entity_class) for entity_class in entities_class }.iteritems())) return wsgihelpers.respond_json(ctx, data, headers = headers)
Python
0.000035
@@ -969,16 +969,22 @@ ontexts, + conv, model, @@ -995,16 +995,16 @@ helpers%0A - %0A%0A@wsgih @@ -1736,16 +1736,1011 @@ q.method +%0A params = req.GET%0A inputs = dict(%0A context = params.get('context'),%0A )%0A data, errors = conv.pipe(%0A conv.struct(%0A dict(%0A context = conv.noop, # For asynchronous calls%0A ),%0A default = 'drop',%0A ),%0A )(inputs, state = ctx)%0A if errors is not None:%0A return wsgihelpers.respond_json(ctx,%0A collections.OrderedDict(sorted(dict(%0A apiVersion = '1.0',%0A context = inputs.get('context'),%0A error = collections.OrderedDict(sorted(dict(%0A code = 400, # Bad Request%0A errors = %5Bconv.jsonify_value(errors)%5D,%0A message = ctx._(u'Bad parameters in request'),%0A ).iteritems())),%0A method = req.script_name,%0A params = inputs,%0A url = req.url.decode('utf-8'),%0A ).iteritems())),%0A headers = headers,%0A ) %0A%0A en @@ -2823,20 +2823,24 @@ s()%0A -data +entities = colle @@ -2999,16 +2999,17 @@ ems()))%0A +%0A retu @@ -3040,22 +3040,269 @@ son(ctx, - data, +%0A collections.OrderedDict(sorted(dict(%0A apiVersion = '1.0',%0A context = data%5B'context'%5D,%0A entities = entities,%0A method = req.script_name,%0A params = inputs,%0A ).iteritems())),%0A headers @@ -3311,10 +3311,20 @@ headers +,%0A )%0A
f5e65b648d632f2e75dffe7943ed3e7105b21d7f
Remove GCS patch fixed upstream in te upstream library
core/polyaxon/fs/gcs.py
core/polyaxon/fs/gcs.py
#!/usr/bin/python # # Copyright 2018-2021 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import aiofiles from gcsfs import GCSFileSystem as BaseGCSFileSystem from gcsfs.checkers import get_consistency_checker from gcsfs.retry import retry_request, validate_response from polyaxon.connections.gcp.base import get_gc_credentials, get_project_id class GCSFileSystem(BaseGCSFileSystem): retries = 3 @retry_request(retries=retries) async def _get_file_request(self, rpath, lpath, *args, headers=None, **kwargs): consistency = kwargs.pop("consistency", self.consistency) async with self.session.get( url=rpath, params=self._get_params(kwargs), headers=self._get_headers(headers), timeout=self.requests_timeout, ) as r: r.raise_for_status() checker = get_consistency_checker(consistency) os.makedirs(os.path.dirname(lpath), exist_ok=True) async with aiofiles.open(lpath, "wb") as f2: while True: data = await r.content.read(4096 * 32) if not data: break await f2.write(data) checker.update(data) # validate http request validate_response(r.status, data, rpath) checker.validate_http_response(r) # validate file consistency return r.status, r.headers, r.request_info, data async def _get_file(self, rpath, lpath, callback=None, **kwargs): # TODO: Remove when https://github.com/dask/gcsfs/issues/433 is fixed if await self._isdir(rpath): return await super()._get_file(rpath, lpath, callback=callback, **kwargs) def get_fs( context_path: str = None, asynchronous: bool = False, use_listings_cache: bool = False, **kwargs ): return GCSFileSystem( project=get_project_id(context_path=context_path, **kwargs), token=get_gc_credentials(context_path=context_path, **kwargs), asynchronous=asynchronous, use_listings_cache=use_listings_cache, )
Python
0
@@ -601,36 +601,8 @@ se.%0A -import os%0A%0Aimport aiofiles%0A%0A from @@ -653,116 +653,8 @@ stem -%0Afrom gcsfs.checkers import get_consistency_checker%0Afrom gcsfs.retry import retry_request, validate_response %0A%0Afr @@ -770,16 +770,16 @@ ystem):%0A + retr @@ -791,1359 +791,8 @@ 3%0A%0A - @retry_request(retries=retries)%0A async def _get_file_request(self, rpath, lpath, *args, headers=None, **kwargs):%0A consistency = kwargs.pop(%22consistency%22, self.consistency)%0A%0A async with self.session.get(%0A url=rpath,%0A params=self._get_params(kwargs),%0A headers=self._get_headers(headers),%0A timeout=self.requests_timeout,%0A ) as r:%0A r.raise_for_status()%0A checker = get_consistency_checker(consistency)%0A%0A os.makedirs(os.path.dirname(lpath), exist_ok=True)%0A async with aiofiles.open(lpath, %22wb%22) as f2:%0A while True:%0A data = await r.content.read(4096 * 32)%0A if not data:%0A break%0A await f2.write(data)%0A checker.update(data)%0A%0A # validate http request%0A validate_response(r.status, data, rpath)%0A checker.validate_http_response(r) # validate file consistency%0A return r.status, r.headers, r.request_info, data%0A%0A async def _get_file(self, rpath, lpath, callback=None, **kwargs):%0A # TODO: Remove when https://github.com/dask/gcsfs/issues/433 is fixed%0A if await self._isdir(rpath):%0A return%0A await super()._get_file(rpath, lpath, callback=callback, **kwargs)%0A%0A %0Adef
bbd2a08d732a2db4c0e9ae1fcfd16b3959876526
fix starting error
package/scripts/mongo_db.py
package/scripts/mongo_db.py
import os from time import sleep from resource_management import * from mongo_base import MongoBase from resource_management.core.logger import Logger from resource_management.libraries.script.script import Script class MongoMaster(MongoBase): mongo_packages = ['mongodb-org'] def install(self, env): import params env.set_params(params) self.installMongo(env) def configure(self, env): import params env.set_params(params) self.configureMongo(env) def start(self, env): import params self.configure(env) print "start mongodb" import socket current_host_name=socket.getfqdn(socket.gethostname()) config = Script.get_config() shard_prefix = params.shard_prefix db_hosts = config['clusterHostInfo']['mongodb_hosts'] auth_pattern = '' print params.auth if params.auth : print 'add keyFile' # add keyfile keyfile_path = '/etc/security/' keyfile_name = keyfile_path + 'mongodb-keyfile' auth_pattern = ' --keyFile ' + keyfile_name if current_host_name == db_hosts[0] : Execute(format('openssl rand -base64 741 > {keyfile_name}'),logoutput=True) Execute(format('chmod 600 {keyfile_name}'),logoutput=True) for index,item in enumerate(db_hosts,start=1): Execute(format('scp {keyfile_name} root@{item}:{keyfile_path}'),logoutput=True) len_host=len(db_hosts) len_port=len(params.db_ports) if len(params.node_group) > 0: db_hosts = self.getdbhosts(db_hosts,params.node_group) #start shard service for index,item in enumerate(db_hosts,start=0): if item ==current_host_name: #foreach db_ports for index_p,p in enumerate(params.db_ports,start=0): #rm mongo_*.sock Execute(format('rm -rf /tmp/mongodb-{p}.sock'),logoutput=True) #get shard_name shard_name = shard_prefix + str((index-index_p)%len_host) #pid_file_name = params.shard_prefix + str((index-index_p)%len_host) #pid_file_name not the same to log,easy to status pid_file_name = params.shard_prefix + str(index_p) #get db_path db_path = params.db_path + '/' + shard_name if os.path.exists(db_path): print "File exists" else: Execute(format('mkdir -p {db_path}'),logoutput=True) log_file = params.log_path + '/' + shard_name + '.log' pid_file = params.pid_db_path + '/' + pid_file_name + '.pid' Execute(format('mongod -f /etc/mongod.conf --shardsvr -replSet {shard_name} -port {p} -dbpath {db_path} -oplogSize 100 -logpath {log_file} -pidfilepath {pid_file} {auth_pattern} ') ,logoutput=True) sleep(5) print 'sleep waiting for all mongod started' if params.node_group =='': members ='' index = db_hosts.index(current_host_name) shard_name = shard_prefix + str(index) current_index=0 current_shard=index while(current_index<len_port): current_host = db_hosts[current_shard] current_port = params.db_ports[current_index] members = members+ '{_id:'+format('{current_index},host:"{current_host}:{current_port}"') if current_index == 0: members = members +',priority:2' members = members + '},' current_index = current_index + 1 current_shard = (current_shard + 1)%len(db_hosts) replica_param ='rs.initiate( {_id:'+format('"{shard_name}",version: 1,members:') + '[' + members + ']})' cmd = format('mongo --host {current_host_name} --port 27017 <<EOF \n{replica_param} \nEOF\n') File('/var/run/mongo_config.sh', content=cmd, mode=0755 ) Execute('su - mongodb /var/run/mongo_config.sh',logoutput=True) else: groups = params.node_group.split(';') members ='' index = db_hosts.index(current_host_name) shard_name=shard_prefix + str(index) current_index=0 current_shard=index while(current_index<len_port): current_host = db_hosts[current_shard] current_port = params.db_ports[current_index] members = members+ '{_id:'+format('{current_index},host:"{current_host}:{current_port}"') if current_index == 0: members = members +',priority:2' members = members + '},' current_index = current_index + 1 current_shard = (current_shard + 1)%len(db_hosts) #if len(groups) > 1 and current_host_name in groups[-1]: # replica_param ='rs.initiate( {_id:'+format('"{shard_name}",version: 1,members:') + '[' + members + ']})' #else: replica_param ='rs.reconfig( {_id:'+format('"{shard_name}",version: 1,members:') + '[' + members + ']},{force:1})' cmd = format('mongo --host {current_host_name} --port 27017 <<EOF \n{replica_param} \nEOF\n') File('/var/run/mongo_config.sh', content=cmd, mode=0755 ) Execute('/var/run/mongo_config.sh',logoutput=True) def stop(self, env): print "stop services.." import params for port in params.db_ports: params.shutdown_port = port env.set_params(params) self.shutDown(env) def restart(self, env): self.configure(env) print "restart mongodb" self.stop(env) self.start(env) def status(self, env): db_ports=["27017","27018","27019"] for index_p,p in enumerate(db_ports,start=0): shard_name = "shard" + str(index_p) pid_file = '/var/run/mongodb' + '/' + shard_name + '.pid' check_process_status(pid_file) if __name__ == "__main__": MongoMaster().execute()
Python
0.000003
@@ -3126,16 +3126,25 @@ pattern%7D +; sleep 5 ')%0A
cb92a3cf67557fbd4a629601490a74bdb2119935
add print_list method to dijkstra
dijkstra.py
dijkstra.py
# -*- coding: utf-8 -*- class Dijkstra: def __init__(self, adj, start): self.adj = adj self.s = start self.dists = [0 for x in range(len(adj))] # Liefert minimales Element > 0 def minweight(self, verts): return min([x for x in verts if x>0]) # Baut liste der Entfernungen von s ausgehend auf def dist_list(self): i = s for v in adj[i]: if v>0: self.dists[adj[s].index(v)] = v
Python
0.000001
@@ -471,10 +471,286 @@ )%5D = v%0A%0A -%0A + # Ausgabe der k%C3%BCrzesten Wege von Knoten s zu alle anderen Knoten%0A def print_list(self):%0A print(%22Distance from Node %22+ str(adj%5Bself.s%5D) + %22 to:%22 )%0A for node in range(len(self.adj)):%0A print(%22%5Ct%5CtNode %22+str(adj%5Bnode%5D)+ %22: %22 + str(self.dists%5Bnode%5D)) %0A
ed42fa81e1029633f6b6f426c437df0c55262922
Fix LabHubApp.
jupyterlab/labhubapp.py
jupyterlab/labhubapp.py
import os from traitlets import default from .labapp import LabApp try: from jupyterhub.singleuser import SingleUserNotebookApp except ImportError: SingleUserLabApp = None raise ImportError('You must have jupyterhub installed for this to work.') else: class SingleUserLabApp(SingleUserNotebookApp, LabApp): @default("default_url") def _default_url(self): """when using jupyter-labhub, jupyterlab is default ui""" return "/lab" def init_webapp(self, *args, **kwargs): warnings.warn( "SingleUserLabApp is deprecated, use SingleUserNotebookApp and set " + "c.Spawner.default_url = '/lab' in jupyterhub_config.py", DeprecationWarning ) super().init_webapp(*args, **kwargs) def main(argv=None): return SingleUserLabApp.launch_instance(argv) if __name__ == "__main__": main()
Python
0
@@ -2,16 +2,32 @@ mport os +%0Aimport warnings %0A%0Afrom t @@ -557,17 +557,16 @@ - warnings @@ -658,16 +658,34 @@ set %22 + + %5C%0A %22c.Spaw
f4c1093616d08bd4abcb5ddc030b59d863dcec05
Change netapi to use processmanager
salt/client/netapi.py
salt/client/netapi.py
# encoding: utf-8 ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing import signal import os # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts # pid -> {fun: foo, Process: object} self.pid_map = {} self.netapi = salt.loader.netapi(self.opts) def add_process(self, fun): ''' Start a netapi child process of "fun" ''' p = multiprocessing.Process(target=self.netapi[fun]) p.start() logger.info("Started '{0}' api module with pid {1}".format(fun, p.pid)) self.pid_map[p.pid] = {'fun': fun, 'Process': p} def run(self): ''' Load and start all available api modules ''' for fun in self.netapi: if fun.endswith('.start'): self.add_process(fun) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) while True: pid, exit_status = os.wait() if pid not in self.pid_map: logger.info(('Process of pid {0} died, not a known netapi' ' process, will not restart').format(pid)) continue logger.info(('Process {0} ({1}) died with exit status {2},' ' restarting...').format(self.pid_map[pid]['fun'], pid, exit_status)) self.pid_map[pid]['Process'].join(1) self.add_process(self.pid_map[pid]['fun']) del self.pid_map[pid] def kill_children(self, *args): ''' Kill all of the children ''' for pid, p_map in self.pid_map.items(): p_map['Process'].terminate() p_map['Process'].join() del self.pid_map[pid]
Python
0
@@ -178,16 +178,42 @@ t.loader +%0Aimport salt.utils.process %0A%0Alogger @@ -411,1409 +411,389 @@ -# pid -%3E %7Bfun: foo, Process: object%7D%0A self.pid_map = %7B%7D%0A self.netapi = salt.loader.netapi(self.opts)%0A%0A def add_process(self, fun):%0A '''%0A Start a netapi child process of %22fun%22%0A '''%0A p = multiprocessing.Process(target=self.netapi%5Bfun%5D)%0A p.start()%0A logger.info(%22Started '%7B0%7D' api module with pid %7B1%7D%22.format(fun, p.pid))%0A self.pid_map%5Bp.pid%5D = %7B'fun': fun,%0A 'Process': p%7D%0A%0A def run(self):%0A '''%0A Load and start all available api modules%0A '''%0A for fun in self.netapi:%0A if fun.endswith('.start'):%0A self.add_process(fun)%0A%0A # make sure to kill the subprocesses if the parent is killed%0A signal.signal(signal.SIGTERM, self.kill_children)%0A%0A while True:%0A pid, exit_status = os.wait()%0A if pid not in self.pid_map:%0A logger.info(('Process of pid %7B0%7D died, not a known netapi'%0A ' process, will not restart').format(pid))%0A continue%0A logger.info(('Process %7B0%7D (%7B1%7D) died with exit status %7B2%7D,'%0A ' restarting...').format(self.pid_map%5Bpid%5D%5B'fun'%5D,%0A pid,%0A exit_status))%0A self.pid_map%5Bpid%5D%5B'Process'%5D.join(1)%0A self +self.process_manager = salt.utils.process.ProcessManager()%0A self.netapi = salt.loader.netapi(self.opts)%0A%0A def run(self):%0A '''%0A Load and start all available api modules%0A '''%0A for fun in self.netapi:%0A if fun.endswith('.start'):%0A logger.info('Starting %7B0%7D netapi module'.format(fun))%0A self.process_manager .add @@ -810,37 +810,30 @@ elf. -pid_map%5Bpid%5D%5B' +netapi%5B fun -' %5D)%0A +%0A @@ -832,283 +832,31 @@ - del self.pid_map%5Bpid%5D%0A%0A def kill_children(self, *args):%0A '''%0A Kill all of the children%0A '''%0A for pid, p_map in self.pid_map.items():%0A p_map%5B'Process'%5D.terminate()%0A p_map%5B'Process'%5D.join()%0A del self.pid_map%5Bpid%5D +self.process_manager.run() %0A
72b3d998a14388be98c73556df1cd20859a71573
remove invalid data
signal_receive.py
signal_receive.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2013 KuoE0 <[email protected]> # # Distributed under terms of the MIT license. """ """ import tornado.httpserver import tornado.ioloop import tornado.web import serial import signal import sys import json tornado_port = 8888 # create serial object serial_port = sys.argv[1] serial_baudrate = int(sys.argv[2]) ser = serial.Serial(serial_port, serial_baudrate, timeout=1) # global variable number_of_signal = 1000 serial_pending = list() signals = [[0] * 6] * number_of_signal signal_type = ['x-acc', 'y-acc', 'z-acc', 'x-gyro', 'y-gyro', 'z-gyro'] # SIGINT handler to close serial connection def handler_SIGINT(signum, frame): global ser print "Signal {0} happened!".format(signum) print "Serial connection closed..." ser.close() signal.signal(signal.SIGINT, handler_SIGINT) # receive signal with a non-blocking way def recieve_signal(): try: if ser.inWaiting() != 0: data = ser.readline() print data except Exception as e: print "Error reading from {0}".format(serial_port) template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(e).__name__, e.args) print message if len(data): parse_pending(data) # parse out the signal value def parse_pending(signal_string): global signals # split by ',' and get first element values = [int(x) for x in signal_string.split(',')] # push signal into list if len(values) == 6: signals.append(values) # tornado web handler class query_signal_handler(tornado.web.RequestHandler): def get(self, url='/'): print 'get' # get the name of callback parameter callback_func = self.get_argument('callback') self.handle_request(callback_func) # return signals def handle_request(self, callback): global signals global number_of_signal # retrieve signal needed ret_signals = signals[:number_of_signal] # transpose the list ret_signals = zip(*ret_signals) # create list of dict ret = list() for i in xrange(6): ret.append({ 'data': [p for p in enumerate(ret_signals[i])], 'label': signal_type[i] }) # convert to JSON format ret = json.dumps({'data': ret}) # convert to JSONP format ret = '{0}({1})'.format(callback, ret) # set content type self.set_header("Content-Type", "application/json") # write data self.write(ret) # remove first element to realtime plot signals.pop(0) application = tornado.web.Application([(r"/", query_signal_handler),]) if __name__ == "__main__": #tell tornado to run checkSerial every 50 ms serial_loop = tornado.ioloop.PeriodicCallback(recieve_signal, 10) serial_loop.start() application.listen(tornado_port) print "Starting server on port number {0}...".format(tornado_port) print "Open at http://localhost:{0}/".format(tornado_port) try: tornado.ioloop.IOLoop.instance().start() except KeyboardInterrupt: print 'Server closed...'
Python
0.001233
@@ -932,24 +932,38 @@ _signal():%0A%0A + data = %22%22%0A try:%0A @@ -951,32 +951,32 @@ a = %22%22%0A try:%0A - if ser.i @@ -1084,21 +1084,27 @@ -print +error_msg = %22Error @@ -1274,16 +1274,17 @@ e.args)%0A +%0A @@ -1288,16 +1288,27 @@ print + error_msg, message @@ -1434,24 +1434,34 @@ obal signals +%0A%0A try: %0A # split @@ -1490,16 +1490,20 @@ element%0A + valu @@ -1549,16 +1549,50 @@ it(',')%5D +%0A except:%0A values = None %0A%0A # @@ -1613,23 +1613,34 @@ to list%0A - if +values and len(valu
32e1ff21562c451ab790c0af077b3760855f1e6b
use slider component for bmi gui
ExPy/ExPy/module19.py
ExPy/ExPy/module19.py
""" BMI Calculator """ import rx import tkcomponents def calculate_bmi(weight, height): """ Given weight (pounds), height (inches) Return BMI """ return (weight / (height * height)) * 703. def bmi_recommendation(bmi): """Given a BMI, return a recommendation""" if bmi < 18.5: return 'You are underweight. You should see a doctor.' elif bmi < 25: return 'You are within the ideal weight range.' return 'You are overweight. You should see a doctor.' def prompt_float(prompt): """ Given a specified prompt, return a float """ while True: try: return float(input(prompt)) except ValueError: print('Enter a valid number') def ex19(): """ Prompt for weight and height Print BMI and BMI range """ weight = prompt_float('Enter weight in pounds(lbs): ') height = prompt_float('Enter height in inches: ') bmi = calculate_bmi(weight, height) recommendation = bmi_recommendation(bmi) print('Your BMI is {}'.format(bmi)) print(recommendation) def ex19gui(): """ GUI version of BMI """ root = tkcomponents.create('BMI') options = { 'Imperial': ('pounds(lbs)', 'inches'), 'Metric': ('kilograms(kg)', 'centimeters(cm)') } systems = tkcomponents.radio_stream(root, [(x, x) for x in options], 0, default='Imperial') weight_labels = systems.map(lambda x: 'Enter weight in {}'.format(options[x][0])) weights = tkcomponents.input_stream(root, weight_labels, 1) height_labels = systems.map(lambda x: 'Enter height in {}'.format(options[x][1])) heights = tkcomponents.input_stream(root, height_labels, 2) def callback(system, weight, height): """Given a system, a weight, and a height Calculate BMI""" try: weight = float(weight) except ValueError: return (False, 'Enter a valid weight') try: height = float(height) except ValueError: return (False, 'Enter a valid height') if system == 'Imperial': return (True, calculate_bmi(weight, height)) weight_lbs = weight * 2.20462 height_in = height * 0.393701 return (True, calculate_bmi(weight_lbs, height_in)) #pylint: disable=E1101 bmis = rx.Observable.combine_latest(systems, weights, heights, callback) def bmi_output(bmi): """ If the BMI could successfully be converted then display it Otherwise display the error """ okay, value = bmi if okay: return 'Your BMI is {}'.format(value) return value tkcomponents.output_label(root, bmis.map(bmi_output), 3) def bmi_recommendation_output(bmi): """ If the BMI could successfully be converted then display the recommendation Otherwise display the error """ okay, value = bmi if okay: return bmi_recommendation(value) return '' tkcomponents.output_label(root, bmis.map(bmi_recommendation_output), 4) root.mainloop() if __name__ == '__main__': ex19gui()
Python
0
@@ -17,16 +17,47 @@ or %22%22%22%0A%0A +from tkinter import HORIZONTAL%0A import r @@ -1505,37 +1505,37 @@ = tkcomponents. -input +scale _stream(root, we @@ -1548,16 +1548,65 @@ abels, 1 +, from_=1, to=500, orient=HORIZONTAL, default=160 )%0A he @@ -1712,21 +1712,21 @@ ponents. -input +scale _stream( @@ -1747,16 +1747,64 @@ abels, 2 +, from_=1, to=500, orient=HORIZONTAL, default=68 )%0A%0A d @@ -1915,33 +1915,16 @@ BMI%22%22%22%0A - try:%0A @@ -1958,202 +1958,29 @@ -except ValueError:%0A return (False, 'Enter a valid weight')%0A try:%0A height = float(height)%0A except ValueError:%0A return (False, 'Enter a valid height' +height = float(height )%0A @@ -2020,39 +2020,32 @@ return - (True, calculate_bmi(w @@ -2058,17 +2058,16 @@ height) -) %0A @@ -2153,15 +2153,8 @@ turn - (True, cal @@ -2186,17 +2186,16 @@ ight_in) -) %0A%0A #p @@ -2296,617 +2296,82 @@ ck)%0A - def bmi_output(bmi):%0A %22%22%22 If the BMI could successfully be converted then display it%0A Otherwise display the error%0A %22%22%22%0A okay, value = bmi%0A if okay:%0A return 'Your BMI is %7B%7D'.format(value)%0A return value%0A%0A tkcomponents.output_label(root, bmis.map(bmi_output), 3)%0A%0A def bmi_recommendation_output(bmi):%0A %22%22%22 If the BMI could successfully be converted then display%0A the recommendation%0A Otherwise display the error%0A %22%22%22%0A okay, value = bmi%0A if okay:%0A return bmi_recommendation(value)%0A return '' +%0A tkcomponents.output_label(root, bmis.map('Your BMI is %7B%7D'.format), 3) %0A @@ -2434,21 +2434,15 @@ tion -_output ), 4)%0A +%0A
89f8d0ebe01e188b5a043dfbf891cf3a3bca0504
Clarify that event is sent up to the master
salt/modules/event.py
salt/modules/event.py
''' Fire events on the minion, events can be fired up to the master ''' # Import salt libs import salt.crypt import salt.utils.event import salt.payload def fire_master(data, tag): ''' Fire an event off on the master server CLI Example:: salt '*' event.fire_master 'stuff to be in the event' 'tag' ''' load = {'id': __opts__['id'], 'tag': tag, 'data': data, 'cmd': '_minion_event'} auth = salt.crypt.SAuth(__opts__) sreq = salt.payload.SREQ(__opts__['master_uri']) try: sreq.send('aes', auth.crypticle.dumps(load)) except Exception: pass return True def fire(data, tag): ''' Fire an event on the local minion event bus CLI Example:: salt '*' event.fire 'stuff to be in the event' 'tag' ''' return salt.utils.event.MinionEvent(**__opts__).fire_event(data, tag)
Python
0.001957
@@ -207,18 +207,21 @@ ent off +up t o -n the mas
75ce6e126dc8865630d87d3a0dcad71017567082
update copyright
doc/conf.py
doc/conf.py
# -*- coding: utf-8 -*- # # StepScan doc import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.append(os.path.abspath('.')) sys.path.append(os.path.abspath(os.path.join('..', 'lib'))) sys.path.insert(0, 'macros') ## sys.path.append(os.path.abspath(os.path.join('.', 'ext'))) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath', 'sphinx.ext.linkcode', 'sphinx.ext.napoleon', 'numpydoc'] def linkcode_resolve(domain, info): if domain != 'py': return None mname = info.get('module', '') fname = info.get('fullname', '') return 'file:///T:/xas_user/scan_config/13ide/macros/%s.lar' % mname # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Epics Step Scans' copyright = u'2012, Matthew Newville' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. release = '1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'default' html_theme = 'sphinxdoc' # html_theme = 'nature' # html_theme = 'agogo' # html_theme_options = {'pagewidth':'85em', 'documentwidth':'60em', 'sidebarwidth': '25em', # # 'headercolor1': '#000080', # # 'headercolor2': '#0000A0', # } # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None html_title = 'Step Scanning and Data Acquisition with Python and Epics Channel Access' # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = 'StepScan with PyEpics' # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. html_use_smartypants = False # True # Custom sidebar templates, maps document names to template names. html_sidebars = {'index': ['indexsidebar.html','searchbox.html']} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. html_use_modindex = False # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'stepscan' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'stepscan.tex', u'Step Scanning and Data Acquisition with Python and Epics Channel Access', u'Matthew Newville', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True
Python
0
@@ -711,16 +711,17 @@ m ones.%0A +%0A extensio @@ -1449,14 +1449,8 @@ pics - Step Scan @@ -1469,17 +1469,17 @@ = u'201 -2 +5 , Matthe
d2f223c60c3b1e6ccf9ec088c821c92631b623a8
Comment that says PayPal doesn't work yet. Waiting on feedback from them.
config/idp_proxy_conf.example.py
config/idp_proxy_conf.example.py
# The port the service will listen on from idpproxy.social.facebook import FacebookOAuth2 from idpproxy.social.falltrough import FallTrough from idpproxy.social.google import GoogleOIC from idpproxy.social.twitter import Twitter from idpproxy.social.liveid import LiveIDOAuth2 from idpproxy.social.openidconnect import OpenIDConnect from idpproxy.social.paypal import PayPal from idpproxy.social.linkedin import LinkedIn # The name of the service, is used in the cache and in the cookies returned SERVER_NAME = "idpproxy" CACHE = "memory" # CACHE = "file:..." # CACHE = "memcache:['127.0.0.1:11212']" # Name of the eduPersonTargetedID database EPTID_DB = "eptid.db" # debugging or not DEBUG=True STATIC_DIR = "static/" SECRET = "hemlig_text" SIGN = True # Where consumer keys and secrets can be found CONSUMER_INFO = ["file:config/secrets", "metadata"] # ------- HTTPS ------- # These should point to relevant files SERVER_CERT= "" SERVER_KEY="" # This is of course the certificate chain for the CA that signed # you cert and all the way up to the top CERT_CHAIN="" # ------- HTTPS ------- # SAML endpoint, Social protocol endpoint, protocol handler class SERVICE = { "facebook":{ "saml_endpoint":"facebook_sso", "social_endpoint":"facebook", "class":FacebookOAuth2, "authenticating_authority": 'https://graph.facebook.com/oauth/', "token_endpoint": "https://graph.facebook.com/oauth/access_token", "authorization_endpoint": 'https://graph.facebook.com/oauth/authorize', "userinfo_endpoint": "https://graph.facebook.com/me", "scope": ["email"], "attribute_map": { "givenName": "first_name", "surName": "last_name", "displayName": "name", "uid": "link", }, "name": "Facebook", }, "twitter":{ "saml_endpoint":"twitter_sso", "social_endpoint":"twitter", "authenticating_authority": 'http://api.twitter.com/oauth/', "request_token_url": 'http://api.twitter.com/oauth/request_token', "token_endpoint": 'http://api.twitter.com/oauth/access_token', "authorization_endpoint": 'http://api.twitter.com/oauth/authorize', "class":Twitter, "attribute_map": { "eduPersonPrincipalName": ("%[email protected]", "screen_name"), "displayName": "screen_name", "uid": "user_id", }, "name": "Twitter", }, "google": { "saml_endpoint":"google_sso", "social_endpoint":"google", "authenticating_authority": "https://www.google.com/accounts/o8/id", # No completely true but .. "authorization_endpoint": "https://accounts.google.com/o/oauth2/auth", "token_endpoint": "https://accounts.google.com/o/oauth2/token", "verification_endpoint": "https://www.googleapis.com/oauth2/v1/tokeninfo", "userinfo_endpoint": "https://www.googleapis.com/oauth2/v1/userinfo", "scope": ["https://www.googleapis.com/auth/userinfo.profile", "https://www.googleapis.com/auth/userinfo.email"], "attribute_map": { "uid": "id", "email": "email", #"verified_email": true, "displayName": "name", "givenName": "given_name", "surname": "family_name", }, "class":GoogleOIC, "name": "Google", }, "fallthrough":{ # Just for testing "saml_endpoint":"fallthrough_sso", "social_endpoint":"fallthrough", "class":FallTrough, "variable": "session_id", "attribute_map": None, "name": "Fallthrough", }, "roland":{ "saml_endpoint": "oic_sso", "social_endpoint": "oic", "class": OpenIDConnect, "variable": "state", "srv_discovery_url": "https://lingon.ladok.umu.se:8092/", #"authenticating_authority": "https://lingon.ladok.umu.se:8092/", "scope": ["openid", "email", "profile"], "name": "Roland", "attribute_map": { "displayName": "name", "uid": "user_id", "email": "email", "given_name": "given_name", "surname": "family_name" } }, "liveid": { "saml_endpoint": "liveid_sso", "social_endpoint": "liveid", "class": LiveIDOAuth2, "authenticating_authority": "consent.live.com", "token_endpoint": "https://login.live.com/oauth20_token.srf", "authorization_endpoint": 'https://login.live.com/oauth20_authorize.srf', "userinfo_endpoint": "https://apis.live.net/v5.0/me", "name": "LiveID", "scope": ["wl.basic"], "attribute_map": { "uid": "id", #"email": "email", #"verified_email": true, "displayName": "name", "givenName": "first_name", "surname": "last_name", }, }, "linkedin":{ "saml_endpoint":"linkedin_sso", "social_endpoint":"linkedin", "authenticating_authority": 'http://api.linkedin.com/oauth/', "request_token_url": 'https://api.linkedin.com/uas/oauth/requestToken', "token_endpoint": 'https://api.linkedin.com/uas/oauth/accessToken', "userinfo_endpoint": "http://api.linkedin.com/v1/people/~?format=json", "authorization_endpoint": 'https://www.linkedin.com/uas/oauth/authenticate', "class":LinkedIn, "scope": ["r_basicprofile", "r_emailaddress"], "attribute_map": { #"eduPersonPrincipalName": ("%[email protected]", "user_id"), "givenName": "lastName", "surName": "firstName", "uid": "user_id", }, "name": "LinkedIn", }, "paypal": { "saml_endpoint":"paypal_sso", "social_endpoint":"paypal", "authenticating_authority": "https://www.paypal.com/webapps/auth/protocol/openidconnect/v1", "authorization_endpoint": "https://www.paypal.com/webapps/auth/protocol/openidconnect/v1/authorize", "token_endpoint": "https://www.paypal.com/webapps/auth/protocol/openidconnect/v1/tokenservice", "userinfo_endpoint": "https://www.paypal.com/webapps/auth/protocol/openidconnect/v1/userinfo", "scope": ["openid", "profile", "email"], "attribute_map": { "uid": "uid", "email": "email", #"verified_email": true, "displayName": "full_name", }, "class":PayPal, "name": "Paypal", }, }
Python
0
@@ -5757,24 +5757,55 @@ %0A %7D,%0A + # !!! Doesn't work yet !!!%0A %22paypal%22
4bc5883ad792078ade196610ee83adb967c95d8f
Check for available data before submitting it The code here is self-explaining again..
plugins/SliceInfoPlugin/SliceInfo.py
plugins/SliceInfoPlugin/SliceInfo.py
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from UM.Extension import Extension from UM.Application import Application from UM.Preferences import Preferences from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Scene.SceneNode import SceneNode from UM.Message import Message from UM.i18n import i18nCatalog from UM.Logger import Logger import collections import json import os.path import copy import platform import math import urllib.request import urllib.parse catalog = i18nCatalog("cura") ## This Extension runs in the background and sends several bits of information to the Ultimaker servers. # The data is only sent when the user in question gave permission to do so. All data is anonymous and # no model files are being sent (Just a SHA256 hash of the model). class SliceInfo(Extension): info_url = "https://stats.youmagine.com/curastats/slice" def __init__(self): super().__init__() Application.getInstance().getOutputDeviceManager().writeStarted.connect(self._onWriteStarted) Preferences.getInstance().addPreference("info/send_slice_info", True) Preferences.getInstance().addPreference("info/asked_send_slice_info", False) if not Preferences.getInstance().getValue("info/asked_send_slice_info"): self.send_slice_info_message = Message(catalog.i18nc("@info", "Cura automatically sends slice info. You can disable this in preferences"), lifetime = 0, dismissable = False) self.send_slice_info_message.addAction("Dismiss", catalog.i18nc("@action:button", "Dismiss"), None, "") self.send_slice_info_message.actionTriggered.connect(self.messageActionTriggered) self.send_slice_info_message.show() def messageActionTriggered(self, message_id, action_id): self.send_slice_info_message.hide() Preferences.getInstance().setValue("info/asked_send_slice_info", True) def _onWriteStarted(self, output_device): if not Preferences.getInstance().getValue("info/send_slice_info"): Logger.log("d", "'info/send_slice_info' is turned off.") return # Do nothing, user does not want to send data global_container_stack = Application.getInstance().getGlobalContainerStack() # Get total material used (in mm^3) print_information = Application.getInstance().getPrintInformation() material_radius = 0.5 * global_container_stack.getProperty("material_diameter", "value") material_used = math.pi * material_radius * material_radius * print_information.materialAmount #Volume of material used # Get model information (bounding boxes, hashes and transformation matrix) models_info = [] for node in DepthFirstIterator(Application.getInstance().getController().getScene().getRoot()): if type(node) is SceneNode and node.getMeshData() and node.getMeshData().getVertices() is not None: if not getattr(node, "_outside_buildarea", False): model_info = {} model_info["hash"] = node.getMeshData().getHash() model_info["bounding_box"] = {} model_info["bounding_box"]["minimum"] = {} model_info["bounding_box"]["minimum"]["x"] = node.getBoundingBox().minimum.x model_info["bounding_box"]["minimum"]["y"] = node.getBoundingBox().minimum.y model_info["bounding_box"]["minimum"]["z"] = node.getBoundingBox().minimum.z model_info["bounding_box"]["maximum"] = {} model_info["bounding_box"]["maximum"]["x"] = node.getBoundingBox().maximum.x model_info["bounding_box"]["maximum"]["y"] = node.getBoundingBox().maximum.y model_info["bounding_box"]["maximum"]["z"] = node.getBoundingBox().maximum.z model_info["transformation"] = str(node.getWorldTransformation().getData()) models_info.append(model_info) # Bundle the collected data submitted_data = { "processor": platform.processor(), "machine": platform.machine(), "platform": platform.platform(), "settings": global_container_stack.serialize(), # global_container with references on used containers "version": Application.getInstance().getVersion(), "modelhash": "None", "printtime": print_information.currentPrintTime.getDisplayString(), "filament": material_used, "language": Preferences.getInstance().getValue("general/language"), "materials_profiles ": {} } for container in global_container_stack.getContainers(): submitted_data["settings_%s" %(container.getId())] = container.serialize() # This can be anything, eg. INI, JSON, etc. # Convert data to bytes submitted_data = urllib.parse.urlencode(submitted_data) binary_data = submitted_data.encode("utf-8") # Submit data try: f = urllib.request.urlopen(self.info_url, data = binary_data, timeout = 1) Logger.log("i", "Sent anonymous slice info to %s", self.info_url) f.close() except Exception as e: Logger.logException("e", e)
Python
0
@@ -4790,16 +4790,159 @@ ners():%0A + container_id = container.getId()%0A container_serialized = container.serialize()%0A if container_serialized:%0A @@ -4985,24 +4985,19 @@ ontainer -.getId() +_id )%5D = con @@ -4994,33 +4994,33 @@ id)%5D = container -. +_ serialize() # Th @@ -5012,18 +5012,17 @@ erialize -() +d # This @@ -5057,16 +5057,222 @@ ON, etc. +%0A else:%0A Logger.log(%22i%22, %22No data found in %25s to be serialized!%22, container_id)%0A%0A for key in submitted_data.keys():%0A print(%22%25s -%3E %25s%22 %25(key, submitted_data%5Bkey%5D)) %0A%0A
e6d327a5249e14765999357a391d97c4fd2cd8b8
Test for sitemap.xml
skeleton/tests.py
skeleton/tests.py
from django.core import management from django.test import TestCase as BaseTestCase from django.test.client import Client as BaseClient, FakePayload, \ RequestFactory from django.core.urlresolvers import reverse from post.models import Post from foundry.models import Member class TestCase(BaseTestCase): @classmethod def setUpClass(cls): cls.request = RequestFactory() cls.client = BaseClient() # Post-syncdb steps management.call_command('load_photosizes', interactive=False) management.call_command('loaddata', 'skeleton/fixtures/sites.json', interactive=False) # Editor cls.editor, dc = Member.objects.get_or_create( username='editor', email='[email protected]' ) cls.editor.set_password("password") cls.editor.save() # Post post, dc = Post.objects.get_or_create( title='Post 1', content='<b>aaa</b>', owner=cls.editor, state='published', ) post.sites = [1] post.save() def test_common_urls(self): """High-level test to confirm common set of URLs render""" urls = ( (reverse('join'), 200), (reverse('login'), 200), (reverse('logout'), 302), (reverse('password_reset'), 200), (reverse('terms-and-conditions'), 200), ('/post/post-1/', 200), ) for url, code in urls: print "Checking path %s" % url response = self.client.get(url) self.assertEqual(response.status_code, code)
Python
0.000001
@@ -322,19 +322,17 @@ ssmethod - %0A + def @@ -839,17 +839,9 @@ e()%0A - %0A + @@ -1377,17 +1377,16 @@ ), 200), - %0A @@ -1406,32 +1406,67 @@ post-1/', 200),%0A + ('/sitemap.xml', 200),%0A )%0A
5f962415d401b3c37825d6e3a0560de47ce9ec3d
remove unused code
controller/lib/jubamgr/controller/main.py
controller/lib/jubamgr/controller/main.py
# -*- coding: utf-8 -*- import threading import msgpackrpc from jubavisor.client import Jubavisor from jubavisor.types import ServerArgv from .config import JubaManagerConfig from .zk import get_zk, cancel_if_down class JubaManagerController(): @classmethod def main(cls, args): myself = args.pop(0) # TODO externalize config field name with open('config.json') as f: cfg = JubaManagerConfig.from_json(f.read()) # TODO assert length of args subcmd = args[0] if subcmd == 'start': process_type = args[1] target_id = args[2] cls.start(cfg, process_type, target_id) elif subcmd == 'stop': process_type = args[1] target_id = args[2] cls.stop(cfg, process_type, target_id) elif subcmd == 'save': target_id = args[1] cls.local_model(cfg, target_id, 'save') elif subcmd == 'load': target_id = args[1] cls.local_model(cfg, target_id, 'load') elif subcmd == 'status': # TODO implement print "Not implemented yet: {0}".format(subcmd) else: print "Unknown subcmd: {0}".format(subcmd) @classmethod def start(cls, cfg, process_type, target_id): server = cfg.lookup(process_type, target_id) visor = cfg.lookup('visor', server._visor) cluster = cfg.lookup('cluster', server._cluster) client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10) argv = ServerArgv(server._port, "", "", 10, 10, 10, 2, 'juba' + cluster._type, cluster._type, cfg._global_zookeeper, cluster._id, "", "", "", "", 16, 512, "linear_mixer", False) client.start(1, argv) @classmethod def stop(cls, cfg, process_type, target_id): server = cfg.lookup(process_type, target_id) visor = cfg.lookup('visor', server._visor) cluster = cfg.lookup('cluster', server._cluster) client = Jubavisor(visor._host, visor._port, 'juba' + cluster._type + '/' + cluster._id, 10) client.stop(1) @classmethod def local_model(cls, cfg, target_id, method): cluster = cfg.lookup('cluster', target_id) servers = [] if cluster is None: server = cfg.lookup('server', target_id) if server is None: print "No such cluster or server matching the ID" return servers.append(server) cluster = cfg.lookup('cluster', server._cluster) else: servers = filter(lambda x: x._cluster == cluster._id, cfg.get_all('server')) threads = [] zk = get_zk() for s in servers: host = cfg.lookup('visor', s._visor)._host client = msgpackrpc.Client(msgpackrpc.Address(host, s._port), 0) cancel_if_down(client, zk, host, s._port, cluster._type, cluster._id) client.call(method, cluster._id, 'jubamgr',) #future = client.call_async(method, cluster._id, 'jubamgr',) #future.get() zk.stop()
Python
0.000017
@@ -2761,95 +2761,8 @@ ',)%0A - #future = client.call_async(method, cluster._id, 'jubamgr',)%0A #future.get()%0A
eb624f48a259e948f4bc4c33370fe971b19ea19b
Update alert() fn tests according to new signature
tests/alerts/geomodel/test_alert.py
tests/alerts/geomodel/test_alert.py
from datetime import datetime, timedelta from mozdef_util.utilities.toUTC import toUTC from alerts.geomodel.alert import alert import alerts.geomodel.locality as locality class TestAlert: '''Unit tests for alert generation. ''' def test_do_not_alert_when_travel_possible(self): state = locality.State('locality', 'testuser', [ locality.Locality( sourceipaddress='1.2.3.123', city='Toronto', country='CA', lastaction=toUTC(datetime.now()) - timedelta(minutes=5), latitude=43.6529, longitude=-79.3849, radius=50), locality.Locality( sourceipaddress='123.3.2.1', city='San Francisco', country='US', lastaction=toUTC(datetime.now()) - timedelta(hours=10), latitude=37.773972, longitude=-122.431297, radius=50) ]) alert_produced = alert(state) assert alert_produced is None def test_do_alert_when_travel_impossible(self): state = locality.State('locality', 'testuser', [ locality.Locality( sourceipaddress='1.2.3.123', city='Toronto', country='CA', lastaction=toUTC(datetime.now()) - timedelta(minutes=5), latitude=43.6529, longitude=-79.3849, radius=50), locality.Locality( sourceipaddress='123.3.2.1', city='San Francisco', country='US', lastaction=toUTC(datetime.now()) - timedelta(hours=1), latitude=37.773972, longitude=-122.431297, radius=50) ]) alert_produced = alert(state) assert alert_produced is not None assert alert_produced.username == 'testuser' assert alert_produced.sourceipaddress == '1.2.3.123' assert alert_produced.origin.city == 'Toronto'
Python
0
@@ -1012,32 +1012,59 @@ ed = alert(state +.username, state.localities )%0A%0A asser @@ -1872,16 +1872,43 @@ rt(state +.username, state.localities )%0A%0A
6312f162eb37ac3e57a18ea80fe201ab08a6ded1
Add Docstring entry for the filter feature of dir_items()
convenience/file_convenience/dir_items.py
convenience/file_convenience/dir_items.py
import os # ============================================================================== # DIR ITEMS # ============================================================================== def dir_items(d, opt="all", rel=True, root="", filter=""): """ Takes a directory path (as a string). And returns the paths/names of the items that are contained in that directory. ALL ITEMS, DIRECTORIES, OR JUST FILES ---------------------------- Depending on the options you use, you can return: - a list of ALL the items (opt="all") - just the subdirectories (opt="dirs") - only files (opt="files") - directories and files as a tuple of two separate lists (opt="grouped") RELATIVE OR ABSOLUTE PATHS ---------------------------- The items can be returned as: - just the filenames of the items (rel=True) - the absolute path to the items (rel=False) - the path to the items relative to any directory whatsoever in the entire system filestructure (rel=True, root="some/dir/") STRING FILTERS ---------------------------- You can also specify to only return items where the filename contains some specified text, eg, only return items containing ".jpg" NOTE ---------------------------- The items are not returned in any particular order. :param d: (str) The full path to the directory you want to search in. :param opt: (str or None) {default = "all"} The option to use: "dirs" : return just the subdirectories "files": return just the files "all" : return all items "grouped" : returns a tuple of two lists. ([directories], [files]) :param rel: (optional)(boolean) if True, then it returns the listed items as directories relative to the d directory. IF False, then it returns the FULL paths. :param root: (Optional)(str) A directory path that we want to use as the root for relative paths. If left blank, then it uses the directory set in d as the root directory. """ # ========================================================================== # TODO: create a filter, so you can filter for certain types of files, or # directories, using something like regex, or file extensions, or # mime types # -------------------------------------------------------------------------- # Setup # -------------------------------------------------------------------------- fList = [] # file List dList = [] # Directory List d = os.path.abspath(d) # desired directory as an absolute path # -------------------------------------------------------------------------- # Set the ralative/absolute path to append to the output list of items # -------------------------------------------------------------------------- if rel: root = root.strip() if root == "": root = d outpath = os.path.relpath(d, root) else: outpath = d # if the root path is d, then remove the "." from path. if outpath == ".": outpath = "" # -------------------------------------------------------------------------- # Sort each item in the directory into either a directory or a file # -------------------------------------------------------------------------- for item in os.listdir(d): # If item doesnt satisfy our filter condition then skip to the next item if filter not in item: continue full_item_path = os.path.join(d, item) # Full path to the item out_item_path = os.path.join(outpath, item) # Path used in output list if os.path.isfile(full_item_path): fList.append(out_item_path) elif os.path.isdir(full_item_path): dList.append(out_item_path) else: print "WARNING: directoryItems found an item that is neither a \n"\ " file, nor a directory" # -------------------------------------------------------------------------- # Return the item lists # -------------------------------------------------------------------------- if (opt is None) or (opt.lower() in ["none", "", "all"]): return dList + fList elif opt.lower() in ["file", "files", "f"]: return fList elif opt.lower() in ["dir", "dirs", "d", "folder", "folders"]: return dList elif opt.lower() in ["grouped", "group", "g"]: return (dList, fList) else: msg = "\n dir_items(): the only valid values for the `opt` argument" \ "\n are 'all', 'dirs', 'files', and 'grouped'" raise ValueError(msg)
Python
0
@@ -2132,16 +2132,117 @@ ectory.%0A + :param filter: (string)%0A Used to filter for items that contain this string in their name.%0A %22%22%22%0A @@ -2339,16 +2339,18 @@ DO: -create a +Expand the fil @@ -2376,94 +2376,20 @@ ter -for certain types of files, or%0A # directories, using something like +using regex, - or fil @@ -2401,19 +2401,16 @@ ensions, - or %0A # @@ -2414,16 +2414,19 @@ # +or mime typ
a710b93fead29a49cebebe37fecd33ed9ac4221c
add exception for not exit table
sacrud/pyramid_ext/views/CRUD.py
sacrud/pyramid_ext/views/CRUD.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 uralbash <[email protected]> # # Distributed under terms of the MIT license. """ Views for Pyramid frontend """ import itertools import json from pyramid.httpexceptions import HTTPFound, HTTPNotFound from pyramid.view import view_config from sqlalchemy import inspect from sacrud import action from sacrud.common.paginator import get_paginator from sacrud.common.pyramid_helpers import get_settings_param, sacrud_env from sacrud.common.sa_helpers import pk_to_list from sacrud.pyramid_ext.breadcrumbs import breadcrumbs def get_table(tname, request): """ Return table by table name from sacrud.models in settings. """ # convert values of models dict to flat list setting_params = get_settings_param(request, 'sacrud.models').values() tables_lists = map(lambda x: x['tables'], setting_params) tables = itertools.chain(*tables_lists) tables = filter(lambda table: (table.__tablename__). lower() == tname.lower(), tables) if not tables: return None return tables[0] def get_relationship(tname, request): table = get_table(tname, request) if not table: return None relations = inspect(table).relationships return [rel for rel in relations] def update_difference_object(obj, key, value): if isinstance(obj, dict): obj.update({key: value}) else: setattr(obj, key, value) # return obj def pk_list_to_dict(pk_list): if pk_list and len(pk_list) % 2 == 0: return dict(zip(pk_list[::2], pk_list[1::2])) return None class CRUD(object): def __init__(self, request): self.pk = None self.request = request self.tname = request.matchdict['table'] self.table = get_table(self.tname, self.request) self.relationships = get_relationship(self.tname, self.request) self.params = request.params.dict_of_lists() pk = request.matchdict.get('pk') if pk and len(pk) % 2 == 0: self.pk = pk_list_to_dict(pk) elif pk or pk == (): raise HTTPNotFound def flash_message(self, message, status="success"): if hasattr(self.request, 'session'): self.request.session.flash([message, status]) # XXX: C901 @sacrud_env @view_config(route_name='sa_list', renderer='/sacrud/list.jinja2') def sa_list(self): table = self.table request = self.request order_by = request.params.get('order_by', False) search = request.params.get('search') get_params = {'order_by': order_by, 'search': search} # Make url for table headrow links to order_by for col in getattr(table, 'sacrud_list_col', table.__table__.columns): order_param_list = [] column_name = col['column'].name if isinstance(col, dict) else col.name if order_by: if column_name not in order_by.replace('-', '').split('.'): order_param_list.append(column_name) for value in order_by.split('.'): none, pfx, col_name = value.rpartition('-') if column_name == col_name: new_pfx = {'': '-', '-': ''}[pfx] order_param_list.insert(0, '%s%s' % (new_pfx, col_name)) else: order_param_list.append('%s%s' % (pfx, col_name)) else: order_param_list.append(column_name) full_params = ['%s=%s' % (param, value) for param, value in get_params.items() if param != 'order_by' and value] full_params.append('order_by=%s' % '.'.join(order_param_list)) update_difference_object(col, 'head_url', '&'.join(full_params)) # Some actions with objects in grid selected_action = request.POST.get('selected_action') items_list = request.POST.getall('selected_item') if selected_action == 'delete': for item in items_list: pk_list = json.loads(item) pk = pk_list_to_dict(pk_list) action.CRUD(request.dbsession, table, pk=pk).delete() items_per_page = getattr(table, 'items_per_page', 10) resp = action.CRUD(request.dbsession, table)\ .rows_list(paginator=get_paginator(request, items_per_page), order_by=order_by, search=search) return {'sa_crud': resp, 'pk_to_list': pk_to_list, 'breadcrumbs': breadcrumbs(self.tname, 'sa_list'), 'get_params': get_params} @sacrud_env @view_config(route_name='sa_update', renderer='/sacrud/create.jinja2') @view_config(route_name='sa_create', renderer='/sacrud/create.jinja2') def sa_add(self): resp = action.CRUD(self.request.dbsession, self.table, self.pk) if 'form.submitted' in self.request.params: resp.request = self.params resp.add() if self.pk: self.flash_message("You updated object of %s" % self.tname) else: self.flash_message("You created new object of %s" % self.tname) return HTTPFound(location=self.request.route_url('sa_list', table=self.tname)) bc = breadcrumbs(self.tname, 'sa_create') if self.pk: bc = breadcrumbs(self.tname, 'sa_update', id=self.pk) return {'sa_crud': resp.add(), 'pk_to_list': pk_to_list, 'relationships': self.relationships, 'breadcrumbs': bc} @view_config(route_name='sa_delete') def sa_delete(self): action.CRUD(self.request.dbsession, self.table, pk=self.pk).delete() self.flash_message("You have removed object of %s" % self.tname) return HTTPFound(location=self.request.route_url('sa_list', table=self.tname))
Python
0.000003
@@ -1961,24 +1961,83 @@ of_lists()%0A%0A + if not self.table:%0A raise HTTPNotFound%0A%0A pk =
2eb3a682706a5ff0255474230ec32fd0ac96c727
update mac build script
sansview/setup_mac.py
sansview/setup_mac.py
""" This is a setup.py script partly generated by py2applet Usage: python setup.py py2app """ from setuptools import setup import periodictable.xsf import DataLoader.readers from distutils.sysconfig import get_python_lib import os DATA_FILES = [] RESOURCES_FILES = [] #Periodictable data file DATA_FILES = periodictable.data_files() #invariant and calculator help doc import sans.perspectives.calculator as calculator DATA_FILES += calculator.data_files() import sans.perspectives.invariant as invariant DATA_FILES += invariant.data_files() #CANSAxml reader data files RESOURCES_FILES.append(os.path.join(DataLoader.readers.get_data_path(),'defaults.xml')) # Locate libxml2 library lib_locs = ['/usr/local/lib', '/usr/lib'] libxml_path = None for item in lib_locs: libxml_path = '%s/libxml2.dylib' % item if os.path.isfile(libxml_path): break if libxml_path == None: raise RuntimeError, "Could not find libxml2 on the system" APP = ['sansview.py'] DATA_FILES += ['images','test','plugins','media'] OPTIONS = {'argv_emulation': True, 'packages': ['lxml','periodictable'], 'iconfile': 'images/ball.icns', 'frameworks':[libxml_path], 'resources': RESOURCES_FILES } setup( app=APP, data_files=DATA_FILES, include_package_data= True, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
Python
0
@@ -784,16 +784,21 @@ xml_path +_test = '%25s/l @@ -851,16 +851,21 @@ xml_path +_test ): %0A @@ -872,13 +872,38 @@ -break +libxml_path = libxml_path_test %0Aif
9777b51ee2f898c176b6e7d2c9412eb7556bb60b
fix typo for load array (#5315)
keras/utils/io_utils.py
keras/utils/io_utils.py
"""Utilities related to disk I/O.""" from __future__ import absolute_import from __future__ import print_function import numpy as np import sys from collections import defaultdict try: import h5py except ImportError: h5py = None try: import tables except ImportError: tables = None class HDF5Matrix(object): """Representation of HDF5 dataset to be used instead of a Numpy array. # Example ```python x_data = HDF5Matrix('input/file.hdf5', 'data') model.predict(x_data) ``` Providing `start` and `end` allows use of a slice of the dataset. Optionally, a normalizer function (or lambda) can be given. This will be called on every slice of data retrieved. # Arguments datapath: string, path to a HDF5 file dataset: string, name of the HDF5 dataset in the file specified in datapath start: int, start of desired slice of the specified dataset end: int, end of desired slice of the specified dataset normalizer: function to be called on data when retrieved # Returns An array-like HDF5 dataset. """ refs = defaultdict(int) def __init__(self, datapath, dataset, start=0, end=None, normalizer=None): if h5py is None: raise ImportError('The use of HDF5Matrix requires ' 'HDF5 and h5py installed.') if datapath not in list(self.refs.keys()): f = h5py.File(datapath) self.refs[datapath] = f else: f = self.refs[datapath] self.data = f[dataset] self.start = start if end is None: self.end = self.data.shape[0] else: self.end = end self.normalizer = normalizer def __len__(self): return self.end - self.start def __getitem__(self, key): if isinstance(key, slice): if key.stop + self.start <= self.end: idx = slice(key.start + self.start, key.stop + self.start) else: raise IndexError elif isinstance(key, int): if key + self.start < self.end: idx = key + self.start else: raise IndexError elif isinstance(key, np.ndarray): if np.max(key) + self.start < self.end: idx = (self.start + key).tolist() else: raise IndexError elif isinstance(key, list): if max(key) + self.start < self.end: idx = [x + self.start for x in key] else: raise IndexError if self.normalizer is not None: return self.normalizer(self.data[idx]) else: return self.data[idx] @property def shape(self): return (self.end - self.start,) + self.data.shape[1:] def save_array(array, name): if tables is None: raise ImportError('The use of `save_array` requires ' 'the tables module.') f = tables.open_file(name, 'w') atom = tables.Atom.from_dtype(array.dtype) ds = f.create_carray(f.root, 'data', atom, array.shape) ds[:] = array f.close() def load_array(name): if tables is None: raise ImportError('The use of `save_array` requires ' 'the tables module.') f = tables.open_file(name) array = f.root.data a = np.empty(shape=array.shape, dtype=array.dtype) a[:] = array[:] f.close() return a def ask_to_proceed_with_overwrite(filepath): """Produces a prompt asking about overwriting a file. # Arguments filepath: the path to the file to be overwritten. # Returns True if we can proceed with overwrite, False otherwise. """ get_input = input if sys.version_info[:2] <= (2, 7): get_input = raw_input overwrite = get_input('[WARNING] %s already exists - overwrite? ' '[y/n]' % (filepath)) while overwrite not in ['y', 'n']: overwrite = get_input('Enter "y" (overwrite) or "n" (cancel).') if overwrite == 'n': return False print('[TIP] Next time specify overwrite=True!') return True
Python
0.000001
@@ -3271,36 +3271,36 @@ or('The use of %60 -save +load _array%60 requires
b6c8e38b96293bacd7739411200cd85f47f1efca
handle division by zero
prophet/analyze.py
prophet/analyze.py
from prophet.utils.formatters import dict_to_table import math import numpy as np class Analyzer(object): def __repr__(self): return self.name class Volatility(Analyzer): name = 'volatility' def run(self, backtest, **kwargs): return backtest.get_daily_returns().std() class Sharpe(Analyzer): name = 'sharpe' def run(self, data, config, **kwargs): avg_daily_returns = data['average_return'] volatility = data['volatility'] risk_free_rate = config.get('RISK_FREE_RATE', 0) trading_days = config.get('YEARLY_TRADING_DAYS', 252) return ((avg_daily_returns - risk_free_rate) / volatility * math.sqrt(trading_days)) class Sortino(Analyzer): name = 'sortino' def run(self, backtest, data, config, **kwargs): avg_daily_returns = data['average_return'] negative_returns = backtest.get_daily_returns()[backtest.get_daily_returns() < 0] volatility_negative_returns = negative_returns.std() risk_free_rate = config.get('RISK_FREE_RATE', 0) trading_days = config.get('YEARLY_TRADING_DAYS', 252) return ((avg_daily_returns - risk_free_rate) / volatility_negative_returns * math.sqrt(trading_days)) class AverageReturn(Analyzer): name = 'average_return' def run(self, backtest, **kwargs): return backtest.get_daily_returns().mean() class CumulativeReturn(Analyzer): name = "cumulative_return" def run(self, backtest, **kwargs): return backtest.normalize0()[-1] class MaximumDrawdown(Analyzer): name = "maximum_drawdown" def run(self, backtest, **kwargs): dd_end = np.argmax(np.maximum.accumulate(backtest) - backtest) dd_start = np.argmax(backtest[:dd_end]) return 1-backtest[dd_end]/backtest[dd_start] class Analysis(dict): def __repr__(self): """ Represents Analysis object as a text table. """ return dict_to_table(self) default_analyzers = [Volatility(), AverageReturn(), Sharpe(), CumulativeReturn(), MaximumDrawdown(), Sortino()]
Python
0.002079
@@ -587,32 +587,81 @@ ING_DAYS', 252)%0A + if volatility == 0:%0A return 0%0A return ( @@ -710,16 +710,16 @@ atility%0A - @@ -1167,32 +1167,98 @@ ING_DAYS', 252)%0A + if volatility_negative_returns == 0:%0A return 0%0A return ( @@ -1842,16 +1842,16 @@ cktest)%0A - @@ -1890,16 +1890,73 @@ d_end%5D)%0A + if backtest%5Bdd_start%5D == 0:%0A return 0%0A
6ddc32c10124d10db53f9017044be65a00a35a33
Add newline to end of __init__.py.
propka/__init__.py
propka/__init__.py
"""PROPKA 3.1 See https://github.com/jensengroup/propka-3.1 for more information. Please cite these PROPKA references in publications: * Sondergaard, Chresten R., Mats HM Olsson, Michal Rostkowski, and Jan H. Jensen. "Improved Treatment of Ligands and Coupling Effects in Empirical Calculation and Rationalization of pKa Values." Journal of Chemical Theory and Computation 7, no. 7 (2011): 2284-2295. * Olsson, Mats HM, Chresten R. Sondergaard, Michal Rostkowski, and Jan H. Jensen. "PROPKA3: consistent treatment of internal and surface residues in empirical pKa predictions." Journal of Chemical Theory and Computation 7, no. 2 (2011): 525-537. """ __all__ = ["atom", "bonds", "calculations", "conformation_container", "coupled_groups", "determinant", "determinants", "group", "hybrid36", "iterative", "lib", "ligand_pka_values", "ligand", "molecular_container", "output", "parameters", "pdb", "protonate", "run", "vector_algebra", "version"]
Python
0.000005
@@ -985,8 +985,9 @@ ersion%22%5D +%0A
468dd25146fb5041f4fd029f72a8caa39c808e63
Fix typo in keystoneclient
keystoneclient/utils.py
keystoneclient/utils.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import getpass import hashlib import sys import prettytable import six from keystoneclient import exceptions # Decorator for cli-args def arg(*args, **kwargs): def _decorator(func): # Because of the sematics of decorator composition if we just append # to the options list positional options will appear to be backwards. func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs)) return func return _decorator def pretty_choice_list(l): return ', '.join("'%s'" % i for i in l) def print_list(objs, fields, formatters={}, order_by=None): pt = prettytable.PrettyTable([f for f in fields], caching=False, print_empty=False) pt.aligns = ['l' for f in fields] for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: field_name = field.lower().replace(' ', '_') data = getattr(o, field_name, '') if data is None: data = '' row.append(data) pt.add_row(row) if order_by is None: order_by = fields[0] print(pt.get_string(sortby=order_by)) def _word_wrap(string, max_length=0): """wrap long strings to be no longer then max_length.""" if max_length <= 0: return string return '\n'.join([string[i:i + max_length] for i in range(0, len(string), max_length)]) def print_dict(d, wrap=0): """pretty table prints dictionaries. Wrap values to max_length wrap if wrap>0 """ pt = prettytable.PrettyTable(['Property', 'Value'], caching=False, print_empty=False) pt.aligns = ['l', 'l'] for (prop, value) in six.iteritems(d): if value is None: value = '' value = _word_wrap(value, max_length=wrap) pt.add_row([prop, value]) print(pt.get_string(sortby='Property')) def find_resource(manager, name_or_id): """Helper for the _find_* methods.""" # first try to get entity as integer id try: if isinstance(name_or_id, int) or name_or_id.isdigit(): return manager.get(int(name_or_id)) except exceptions.NotFound: pass # now try the entity as a string try: return manager.get(name_or_id) except (exceptions.NotFound): pass # finally try to find entity by name try: if isinstance(name_or_id, str): name_or_id = name_or_id.decode('utf-8', 'strict') return manager.find(name=name_or_id) except exceptions.NotFound: msg = ("No %s with a name or ID of '%s' exists." % (manager.resource_class.__name__.lower(), name_or_id)) raise exceptions.CommandError(msg) except exceptions.NoUniqueMatch: msg = ("Multiple %s matches found for '%s', use an ID to be more" " specific." % (manager.resource_class.__name__.lower(), name_or_id)) raise exceptions.CommandError(msg) def unauthenticated(f): """Adds 'unauthenticated' attribute to decorated function. Usage:: @unauthenticated def mymethod(f): ... """ f.unauthenticated = True return f def isunauthenticated(f): """Checks to see if the function is marked as not requiring authentication with the @unauthenticated decorator. Returns True if decorator is set to True, False otherwise. """ return getattr(f, 'unauthenticated', False) def string_to_bool(arg): if isinstance(arg, bool): return arg return arg.strip().lower() in ('t', 'true', 'yes', '1') def hash_signed_token(signed_text): hash_ = hashlib.md5() hash_.update(signed_text) return hash_.hexdigest() def prompt_for_password(): """Prompt user for password if not provided so the password doesn't show up in the bash history. """ if not (hasattr(sys.stdin, 'isatty') and sys.stdin.isatty()): # nothing to do return while True: try: new_passwd = getpass.getpass('New Password: ') rep_passwd = getpass.getpass('Repeat New Password: ') if new_passwd == rep_passwd: return new_passwd except EOFError: return
Python
0.009128
@@ -830,16 +830,17 @@ the sema +n tics of
0560f65eb6740281c77c9b016bd9a44f486be6ae
Use dense matrices instead of sparse where that makes sense
openprescribing/matrixstore/matrix_ops.py
openprescribing/matrixstore/matrix_ops.py
import numpy import scipy.sparse def sparse_matrix(shape, integer=False): """ Create a new sparse matrix (either integer or floating point) in a form suitable for populating with data """ dtype = numpy.int_ if integer else numpy.float_ return scipy.sparse.lil_matrix(shape, dtype=dtype) def finalise_matrix(matrix): """ Return a copy of a sparse matrix in a form suitable for storage """ matrix = matrix.tocsc() matrix.sort_indices() if is_integer(matrix): matrix = convert_to_smallest_int_type(matrix) return matrix def is_integer(matrix): """ Return whether or not the matrix has integer type """ return numpy.issubdtype(matrix.dtype, numpy.integer) def convert_to_smallest_int_type(matrix): """ Convert a matrix to use the smallest integer type capable of representing all the values currently stored in it """ target_type = smallest_int_type_for_range(matrix.min(), matrix.max()) if target_type != matrix.dtype: matrix = matrix.astype(target_type, copy=False) return matrix def smallest_int_type_for_range(minimum, maximum): """ Return smallest numpy integer type capable of representing all values in the supplied range """ signed = minimum < 0 abs_max = max(maximum, abs(minimum)) if signed: if abs_max < 1 << 7: return numpy.int8 elif abs_max < 1 << 15: return numpy.int16 elif abs_max < 1 << 31: return numpy.int32 else: if abs_max < 1 << 8: return numpy.uint8 elif abs_max < 1 << 16: return numpy.uint16 elif abs_max < 1 << 32: return numpy.uint32 # Return default integer type (other than in the exceptional case that the # value is too big to store in a signed 64-bit int) if not signed and abs_max > 1 << 63: return numpy.uint64 else: return numpy.int64
Python
0.000011
@@ -1,12 +1,45 @@ +from __future__ import division%0A%0A import numpy @@ -61,16 +61,627 @@ parse%0A%0A%0A +# Above a certain level of density it becomes more efficient to use a normal%0A# dense matrix instead of a sparse one. This threshold was determined through a%0A# not particularly scientific process of trial and error. Due to the%0A# compression we apply there's very little difference in storage requirements%0A# between sparse and dense matrices, but the difference comes in the time take%0A# to perform operations (e.g summing) using these matrices and that's harder to%0A# measure. At some point we can profile and optimise the performance of the%0A# MatrixStore, but for now it's fast enough.%0ADENSITY_THRESHOLD = 0.5%0A%0A%0A def spar @@ -1061,24 +1061,76 @@ age%0A %22%22%22%0A + if get_density(matrix) %3C DENSITY_THRESHOLD:%0A matrix = @@ -1145,24 +1145,28 @@ tocsc()%0A + + matrix.sort_ @@ -1175,16 +1175,60 @@ dices()%0A + else:%0A matrix = matrix.toarray()%0A if i @@ -1410,24 +1410,24 @@ ype%0A %22%22%22%0A - return n @@ -1469,24 +1469,174 @@ .integer)%0A%0A%0A +def get_density(matrix):%0A %22%22%22%0A Return the density of a sparse matrix%0A %22%22%22%0A return matrix.getnnz() / (matrix.shape%5B0%5D * matrix.shape%5B1%5D)%0A%0A%0A def convert_
34f6eef42401590bc1809af68e15992f63736027
Add sensor class for gyroscope uncalibrated
plyer/platforms/android/gyroscope.py
plyer/platforms/android/gyroscope.py
''' Android Gyroscope --------------------- ''' from plyer.facades import Gyroscope from jnius import PythonJavaClass, java_method, autoclass, cast from plyer.platforms.android import activity Context = autoclass('android.content.Context') Sensor = autoclass('android.hardware.Sensor') SensorManager = autoclass('android.hardware.SensorManager') class GyroscopeSensorListener(PythonJavaClass): __javainterfaces__ = ['android/hardware/SensorEventListener'] def __init__(self): super(GyroscopeSensorListener, self).__init__() self.SensorManager = cast('android.hardware.SensorManager', activity.getSystemService(Context.SENSOR_SERVICE)) self.sensor = self.SensorManager.getDefaultSensor( Sensor.TYPE_GYROSCOPE) self.values = [None, None, None] def enable(self): self.SensorManager.registerListener(self, self.sensor, SensorManager.SENSOR_DELAY_NORMAL) def disable(self): self.SensorManager.unregisterListener(self, self.sensor) @java_method('(Landroid/hardware/SensorEvent;)V') def onSensorChanged(self, event): self.values = event.values[:3] @java_method('(Landroid/hardware/Sensor;I)V') def onAccuracyChanged(self, sensor, accuracy): # Maybe, do something in future? pass class AndroidGyroscope(Gyroscope): def __init__(self): super(AndroidGyroscope, self).__init__() self.bState = False def _enable(self): if (not self.bState): self.listener = GyroscopeSensorListener() self.listener.enable() self.bState = True def _disable(self): if (self.bState): self.bState = False self.listener.disable() del self.listener def _get_orientation(self): if (self.bState): return tuple(self.listener.values) else: return (None, None, None) def __del__(self): if(self.bState): self._disable() super(self.__class__, self).__del__() def instance(): return AndroidGyroscope()
Python
0.000001
@@ -1337,16 +1337,1016 @@ pass%0A%0A%0A +class GyroUncalibratedSensorListener(PythonJavaClass):%0A __javainterfaces__ = %5B'android/hardware/SensorEventListener'%5D%0A%0A def __init__(self):%0A super(GyroUncalibratedSensorListener, self).__init__()%0A service = activity.getSystemService(Context.SENSOR_SERVICE)%0A self.SensorManager = cast('android.hardware.SensorManager', service)%0A%0A self.sensor = self.SensorManager.getDefaultSensor(%0A Sensor.TYPE_GYROSCOPE_UNCALIBRATED)%0A self.values = %5BNone, None, None, None, None, None%5D%0A%0A def enable(self):%0A self.SensorManager.registerListener(self, self.sensor,%0A SensorManager.SENSOR_DELAY_NORMAL)%0A%0A def disable(self):%0A self.SensorManager.unregisterListener(self, self.sensor)%0A%0A @java_method('(Landroid/hardware/SensorEvent;)V')%0A def onSensorChanged(self, event):%0A self.values = event.values%5B:6%5D%0A%0A @java_method('(Landroid/hardware/Sensor;I)V')%0A def onAccuracyChanged(self, sensor, accuracy):%0A pass%0A%0A%0A class An @@ -2552,16 +2552,17 @@ listener +g = Gyros @@ -2607,16 +2607,117 @@ listener +gu = GyroUncalibratedSensorListener()%0A self.listenerg.enable()%0A self.listenergu .enable( @@ -2857,16 +2857,55 @@ listener +g.disable()%0A self.listenergu .disable @@ -2940,34 +2940,228 @@ ener -%0A%0A def _get_orientation +g%0A del self.listenergu%0A%0A def _get_rotation(self):%0A if (self.bState):%0A return tuple(self.listenerg.values)%0A else:%0A return (None, None, None)%0A%0A def _get_rotation_uncalib (sel @@ -3228,16 +3228,18 @@ listener +gu .values) @@ -3276,32 +3276,50 @@ urn (None, None, + None, None, None, None)%0A%0A def
00dce916e18bb0b05601d82d734e1cfdc45af140
Fix update container and call to ocular
polyaxon/monitor_statuses/monitor.py
polyaxon/monitor_statuses/monitor.py
import logging import ocular from django.conf import settings from constants.jobs import JobLifeCycle from db.redis.containers import RedisJobContainers from polyaxon.celery_api import celery_app from polyaxon.settings import K8SEventsCeleryTasks logger = logging.getLogger('polyaxon.monitors.statuses') def update_job_containers(event, status, job_container_name): if JobLifeCycle.is_done(status): # Remove the job monitoring job_uuid = event['metadata']['labels']['job_uuid'] logger.info('Stop monitoring job_uuid: %s', job_uuid) RedisJobContainers.remove_job(job_uuid) if event['status']['container_statuses'] is None: return def get_container_id(container_id): if not container_id: return None if container_id.startswith('docker://'): return container_id[len('docker://'):] return container_id for container_status in event['status']['container_statuses']: if container_status['name'] != job_container_name: continue container_id = get_container_id(container_status['container_id']) if container_id: job_uuid = event['metadata']['labels']['job_uuid'] if container_status['state']['running'] is not None: logger.info('Monitoring (container_id, job_uuid): (%s, %s)', container_id, job_uuid) RedisJobContainers.monitor(container_id=container_id, job_uuid=job_uuid) else: RedisJobContainers.remove_container(container_id=container_id) def get_label_selector(): return 'role in ({},{}),type={}'.format( settings.ROLE_LABELS_WORKER, settings.ROLE_LABELS_DASHBOARD, settings.TYPE_LABELS_RUNNER) def run(k8s_manager): for (event_object, pod_state) in ocular.monitor(k8s_manager.k8s_api, namespace=settings.K8S_NAMESPACE, job_container_names=( settings.CONTAINER_NAME_EXPERIMENT_JOB, settings.CONTAINER_NAME_PLUGIN_JOB, settings.CONTAINER_NAME_JOB, settings.CONTAINER_NAME_DOCKERIZER_JOB), label_selector=get_label_selector(), return_event=True): logger.info('-------------------------------------------\n%s\n', pod_state) if pod_state: continue status = pod_state['status'] labels = None if pod_state['details'] and pod_state['details']['labels']: labels = pod_state['details']['labels'] logger.info("Updating job container %s, %s", status, labels) experiment_job_condition = ( settings.CONTAINER_NAME_EXPERIMENT_JOB in pod_state['details']['container_statuses'] or (status and labels['app'] == settings.APP_LABELS_EXPERIMENT) ) job_condition = ( settings.CONTAINER_NAME_JOB in pod_state['details']['container_statuses'] or (status and labels['app'] == settings.APP_LABELS_JOB) ) plugin_job_condition = ( settings.CONTAINER_NAME_PLUGIN_JOB in pod_state['details']['container_statuses'] or (status and labels['app'] in (settings.APP_LABELS_TENSORBOARD, settings.APP_LABELS_NOTEBOOK)) ) dockerizer_job_condition = ( settings.CONTAINER_NAME_DOCKERIZER_JOB in pod_state['details']['container_statuses'] or (status and labels['app'] == settings.APP_LABELS_DOCKERIZER) ) if experiment_job_condition: update_job_containers(event_object, status, settings.CONTAINER_NAME_EXPERIMENT_JOB) logger.debug("Sending state to handler %s, %s", status, labels) # Handle experiment job statuses celery_app.send_task( K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES, kwargs={'payload': pod_state}) elif job_condition: update_job_containers(event_object, status, settings.CONTAINER_NAME_JOB) logger.debug("Sending state to handler %s, %s", status, labels) # Handle experiment job statuses celery_app.send_task( K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_JOB_STATUSES, kwargs={'payload': pod_state}) elif plugin_job_condition: logger.debug("Sending state to handler %s, %s", status, labels) # Handle plugin job statuses celery_app.send_task( K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_PLUGIN_JOB_STATUSES, kwargs={'payload': pod_state}) elif dockerizer_job_condition: logger.debug("Sending state to handler %s, %s", status, labels) # Handle dockerizer job statuses celery_app.send_task( K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_BUILD_JOB_STATUSES, kwargs={'payload': pod_state}) else: logger.info("Lost state %s, %s", status, pod_state)
Python
0
@@ -2013,20 +2013,16 @@ -job_ containe @@ -2577,20 +2577,21 @@ logger. -info +debug ('------ @@ -2650,32 +2650,36 @@ ate)%0A if +not pod_state:%0A
1185595ea55f4b4be2f227a37b33d8142bcf92c1
bump version to 6.0.0.dev0
stellar_sdk/__version__.py
stellar_sdk/__version__.py
""" _____ _______ ______ _ _ _____ _____ _____ _ __ / ____|__ __| ____| | | | /\ | __ \ / ____| __ \| |/ / | (___ | | | |__ | | | | / \ | |__) |____| (___ | | | | ' / \___ \ | | | __| | | | | / /\ \ | _ /______\___ \| | | | < ____) | | | | |____| |____| |____ / ____ \| | \ \ ____) | |__| | . \ |_____/ |_| |______|______|______/_/ \_\_| \_\ |_____/|_____/|_|\_\ """ __title__ = "stellar-sdk" __description__ = "The Python Stellar SDK library provides APIs to build transactions and connect to Horizon." __url__ = "https://github.com/StellarCN/py-stellar-base" __issues__ = f"{__url__}/issues" __version__ = "5.0.0" __author__ = "Eno, overcat" __author_email__ = "[email protected], [email protected]" __license__ = "Apache License 2.0"
Python
0.000004
@@ -718,12 +718,17 @@ = %22 -5.0. +6.0.0.dev 0%22%0A_
e589f4347a730abc64f43b3b427ac556643c361f
Use proper mesh
graphs/hydrogen_pfem.py
graphs/hydrogen_pfem.py
R_x = { 0: [5, 7, 13, 15, 21, 25, 29], } R_y = { (1, 0): [0.1843068472879863, 0.06235603500209852, 0.011111956105256449, 0.00050366115986938409, 7.1562463805907583e-06, 4.0298526238213839e-08, 3.9956294661802616e-08], (2, 0): [0.046105675637867799, 0.013246017078074462, 0.005542485710768652, 0.00067997064168938415, 4.6063134426982399e-05, 1.0777808132356181e-06, 4.2930948795927293e-09], (3, 0): [0.01713673973745012, 0.014641367192083615, 0.0026518937870781203, 0.00028054793547133139, 0.00010971626504485688, 2.7061872750686056e-06, 1.0121065385088057e-07], }
Python
0.000032
@@ -24,25 +24,29 @@ 7, 1 -3, 15 +0, 16 , -2 1 +8 , 2 -5, 29 +2, 28, 34 %5D,%0A @@ -82,162 +82,187 @@ %5B0.1 -8430684 +70766 72 -8 79 -86 +596837 3, 0.0 -6235603500209852, 0.011111956105256449, 0.00050366115986938409, 7.1562463805907583 +43356138894337204, 0.041045532914633254, 0.0058943597765469535, 0.00018759868059159412, 1.7891829137695048 e-06, -4.0298526238213839e-08, 3.9956294661802616e-08 +7.0804719309869313e-09, 6.5346731359383625e-09 %5D,%0A @@ -285,164 +285,187 @@ 0.04 -6105675637867799, 0.013246017078074462, 0.005542485710768652, 0.00067997064168938415, 4.6063134426982399e-05, 1.0777808132356 +4544127593562716, 0.01096517172704424, 0.0089970596630603861, 0.0029545088700716898, 0.00014843931056320037, 1.455401275177 181 +7 e-06, -4.2930948795927 +9.5016940149239559e-10, 1.39585787373164 29 -3 e- +1 0 -9 %5D,%0A @@ -488,160 +488,184 @@ 0.01 -713673973745012, 0.014641367192083615, 0.0026518937870781203, 0.00028054793547133139, 0.00010971626504485688, 2.7061872750686056e-06, 1.0121065385088057 +5033623095632616, 0.009520817576687414, 0.0069807964180385249, 0.0016484608980817175, 0.00023362526206750084, 1.1715220633229384e-05, 3.2697808772796932e-06, 4.7940380232952551 e-07
9b3fee4f413c02c0b69465fe935b9ea48206191d
Add context to excerpts
conjure/controllers/jujumodel.py
conjure/controllers/jujumodel.py
from conjure.ui.views.jujumodel import (NewModelView, ExistingModelView) from conjure.controllers.deploy import DeployController from conjure.controllers.jujumodels.maas import MaasJujuModelController from conjure.controllers.jujumodels.openstack import OpenStackJujuModelController # noqa from conjure.controllers.jujumodels.local import LocalJujuModelController class JujuModelController: def __init__(self, common, jujumodels=None): self.common = common self.config = self.common['config'] self.jujumodels = jujumodels if self.jujumodels is None: self.excerpt = ( "A Juju environment is required to deploy the solution. " "Since no existing environments were found please " "select the model you wish to use.") self.view = NewModelView(self.common, self.render_model_view) else: self.excerpt = ( "It looks like there are existing Juju Models, please select " "the model you wish to deploy to.") self.view = ExistingModelView(self.common, self.jujumodels, self.deploy) def deploy(self, model): """ An existing Juju model was found load the deploy controller to start installation Arguments: model: Juju model to deploy to """ self.common['juju'].switch(model) model_info = self.common['juju'].client.Client(request="ModelInfo") if model_info['ProviderType'] in self.config['juju-models']: model = self.config['juju-models'][model_info['ProviderType']] DeployController(self.common, model).render() else: raise Exception("Unknown Provider Type found: {}".format( model_info['ProviderType'] )) def render_model_view(self, model): """ No juju model found, render the selected models view for a new installation. Arguments: modmel: name of juju model to use """ model = model.lower() if model == "maas": MaasJujuModelController(self.common).render() elif model == "openstack": OpenStackJujuModelController(self.common).render() elif model == "local": LocalJujuModelController(self.common).render() def render(self): self.common['ui'].set_header( title="Select a Juju Model", excerpt=self.excerpt ) self.common['ui'].set_body(self.view)
Python
0.999997
@@ -803,17 +803,238 @@ to use. -%22 + This would be the %22%0A %22equivalent of running %60juju bootstrap -e %3Cmodel%3E%60.%5Cn%5Cn%22%0A %22For more information type %60%7Bcmd%7D%60 at your %22%0A %22command prompt.%22.format(cmd='juju help bootstrap') )%0A%0A @@ -1310,17 +1310,231 @@ ploy to. -%22 + This would be the %22%0A %22equivalent of running %60juju list-models%60.%5Cn%5Cn%22%0A %22For more information type %60%7Bcmd%7D%60 at your %22%0A %22command prompt.%22.format(cmd='juju help controllers') )%0A @@ -1953,418 +1953,52 @@ -model_info = self.common%5B'juju'%5D.client.Client(request=%22ModelInfo%22)%0A if model_info%5B'ProviderType'%5D in self.config%5B'juju-models'%5D:%0A model = self.config%5B'juju-models'%5D%5Bmodel_info%5B'ProviderType'%5D%5D%0A DeployController(self.common, model).render()%0A else:%0A raise Exception(%22Unknown Provider Type found: %7B%7D%22.format(%0A model_info%5B'ProviderType'%5D%0A ) +DeployController(self.common, model).render( )%0A%0A
5ad7dfbea0b85fca283025e09a0dc33b2fbe97a6
switch to the cdn domain
crate_project/settings/production/base.py
crate_project/settings/production/base.py
from ..base import * LOGGING = { "version": 1, "disable_existing_loggers": True, "filters": { "require_debug_false": { "()": "django.utils.log.RequireDebugFalse", }, }, "formatters": { "simple": { "format": "%(levelname)s %(message)s" }, }, "handlers": { "console": { "level": "DEBUG", "class": "logging.StreamHandler", "formatter": "simple" }, "mail_admins": { "level": "ERROR", "filters": ["require_debug_false"], "class": "django.utils.log.AdminEmailHandler", }, "sentry": { "level": "ERROR", "class": "raven.contrib.django.handlers.SentryHandler", }, }, "loggers": { "": { "handlers": ["console", "sentry"], "propagate": True, "level": "INFO", }, "django.request": { "handlers": ["mail_admins"], "level": "ERROR", "propagate": True, }, "sentry.errors": { "level": "DEBUG", "handlers": ["console"], "propagate": False, }, } } SITE_ID = 3 EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" SERVER_EMAIL = "[email protected]" DEFAULT_FROM_EMAIL = "[email protected]" CONTACT_EMAIL = "[email protected]" # MIDDLEWARE_CLASSES += ["privatebeta.middleware.PrivateBetaMiddleware"] PACKAGE_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage" PACKAGE_FILE_STORAGE_OPTIONS = { "bucket": "crate-production", "custom_domain": "packages.crate.io", } DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage" # STATICFILES_STORAGE = "storages.backends.s3boto.S3BotoStorage" AWS_STORAGE_BUCKET_NAME = "crate-media-production" AWS_S3_CUSTOM_DOMAIN = "media.crate.io" # PRIVATE_BETA_ALLOWED_URLS = [ # "/account/login/", # "/account/signup/", # "/account/confirm_email/", # ] # PRIVATE_BETA_ALLOWED_HOSTS = [ # "simple.crate.io", # ] INTERCOM_APP_ID = "79qt2qu3" SIMPLE_API_URL = "http://simple.crate.io/"
Python
0.000001
@@ -1646,19 +1646,24 @@ es.crate -.io +-cdn.com %22,%0A%7D%0A%0ADE @@ -1872,27 +1872,32 @@ %22media.crate -.io +-cdn.com %22%0A%0A# PRIVATE
01be6bb650c7077cd5b3b7edacfa1b325c798def
Set email addresses
deployment/templates/local_settings.py
deployment/templates/local_settings.py
from opendebates.settings import * DEBUG = False # logging settings #LOGGING['filters']['static_fields']['fields']['deployment'] = '{{ deployment_tag }}' #LOGGING['filters']['static_fields']['fields']['environment'] = '{{ environment }}' #LOGGING['filters']['static_fields']['fields']['role'] = '{{ current_role }}' # AWS_STORAGE_BUCKET_NAME = '{{ staticfiles_s3_bucket }}' # AWS_ACCESS_KEY_ID = 'AKIAI3XJKABCOBWLX33A' # AWS_SECRET_ACCESS_KEY = "{{ s3_secret }}" SECRET_KEY = "{{ secret_key }}" # Tell django-storages that when coming up with the URL for an item in S3 storage, keep # it simple - just use this domain plus the path. (If this isn't set, things get complicated). # This controls how the `static` template tag from `staticfiles` gets expanded, if you're using it. # We also use it in the next setting. # AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME # This is used by the `static` template tag from `static`, if you're using that. Or if anything else # refers directly to STATIC_URL. So it's safest to always set it. #STATIC_URL = "https://%s/" % AWS_S3_CUSTOM_DOMAIN # (STATIC_URL is set in settings.py to /static/ which is fine without S3) # Tell the staticfiles app to use S3Boto storage when writing the collected static files (when # you run `collectstatic`). #STATICFILES_STORAGE = 'opendebates.storage.S3PipelineCachedStorage' # Auto-create the bucket if it doesn't exist # AWS_AUTO_CREATE_BUCKET = True # AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires # 'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT', # 'Cache-Control': 'max-age=94608000', # } # Having AWS_PRELOAD_META turned on breaks django-storages/s3 - # saving a new file doesn't update the metadata and exists() returns False #AWS_PRELOAD_METADATA = True # database settings DATABASES = { {% for server in all_databases %} '{{ server.database_key }}': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '{{ server.database_local_name }}', 'USER': '{{ database_user }}', 'PASSWORD': '{{ database_password }}', 'HOST': 'localhost', 'PORT': '{{ pgbouncer_port }}', },{% endfor %} } # django-balancer settings DATABASE_POOL = { {% for server in slave_databases %} '{{ server.database_key }}': 1,{% endfor %} } MASTER_DATABASE = '{{ master_database.database_key }}' DATABASE_ROUTERS = [ 'opendebates.router.DBRouter', ] # media roots MEDIA_ROOT = "{{ media_root }}" STATIC_ROOT = "{{ static_root }}" # email settings EMAIL_HOST_PASSWORD = '{{ smtp_password }}' EMAIL_SUBJECT_PREFIX = '[{{ deployment_tag }} {{ environment }}] ' # Redis DB map: # 0 = cache # 1 = unused (formerly celery task queue) # 2 = celery results # 3 = session store # 4-16 = (free) # Cache settings CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': '{{ cache_server.internal_ip }}:11211', 'VERSION': '{{ current_changeset }}', }, 'session': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': '{{ cache_server.internal_ip }}:6379', 'OPTIONS': { 'DB': 3, }, }, } # Task queue settings # see https://github.com/ask/celery/issues/436 BROKER_URL = "amqp://{{ deploy_user }}:{{ broker_password }}@{{ cache_server.internal_ip }}:5672/{{ vhost }}" BROKER_CONNECTION_TIMEOUT = 4 BROKER_POOL_LIMIT = 10 CELERY_RESULT_BACKEND = "redis://{{ cache_server.internal_ip }}:6379/2" # Session settings SESSION_ENGINE = 'django.contrib.sessions.backends.cache' SESSION_CACHE_ALIAS = 'session' ALLOWED_HOSTS = [{% for host in allowed_hosts %}'{{ host }}', {% endfor %}] LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'formatters': { 'basic': { 'format': '%(asctime)s %(name)-20s %(levelname)-8s %(message)s', }, }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' }, 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'basic', }, }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, 'django.security': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, }, 'root': { 'handlers': ['console', ], 'level': 'INFO', }, }
Python
0
@@ -2537,16 +2537,17 @@ ettings%0A +# EMAIL_HO @@ -2648,16 +2648,188 @@ nt %7D%7D%5D ' +%0AADMINS = %5B%0A ('Caktus Opendebates team', '[email protected]'),%0A%5D%0ADEFAULT_FROM_EMAIL = '[email protected]'%0ASERVER_EMAIL = DEFAULT_FROM_EMAIL %0A%0A# Redi
f4f39057cbbfa4daf66f82061e57101b88d88d05
Make csviter work on python 3.7
scrapy/utils/iterators.py
scrapy/utils/iterators.py
import re import csv import logging try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO from io import StringIO import six from scrapy.http import TextResponse, Response from scrapy.selector import Selector from scrapy.utils.python import re_rsearch, to_unicode logger = logging.getLogger(__name__) def xmliter(obj, nodename): """Return a iterator of Selector's over all nodes of a XML document, given the name of the node to iterate. Useful for parsing XML feeds. obj can be: - a Response object - a unicode string - a string encoded as utf-8 """ nodename_patt = re.escape(nodename) HEADER_START_RE = re.compile(r'^(.*?)<\s*%s(?:\s|>)' % nodename_patt, re.S) HEADER_END_RE = re.compile(r'<\s*/%s\s*>' % nodename_patt, re.S) text = _body_or_str(obj) header_start = re.search(HEADER_START_RE, text) header_start = header_start.group(1).strip() if header_start else '' header_end = re_rsearch(HEADER_END_RE, text) header_end = text[header_end[1]:].strip() if header_end else '' r = re.compile(r'<%(np)s[\s>].*?</%(np)s>' % {'np': nodename_patt}, re.DOTALL) for match in r.finditer(text): nodetext = header_start + match.group() + header_end yield Selector(text=nodetext, type='xml').xpath('//' + nodename)[0] def xmliter_lxml(obj, nodename, namespace=None, prefix='x'): from lxml import etree reader = _StreamReader(obj) tag = '{%s}%s' % (namespace, nodename) if namespace else nodename iterable = etree.iterparse(reader, tag=tag, encoding=reader.encoding) selxpath = '//' + ('%s:%s' % (prefix, nodename) if namespace else nodename) for _, node in iterable: nodetext = etree.tostring(node, encoding='unicode') node.clear() xs = Selector(text=nodetext, type='xml') if namespace: xs.register_namespace(prefix, namespace) yield xs.xpath(selxpath)[0] class _StreamReader(object): def __init__(self, obj): self._ptr = 0 if isinstance(obj, Response): self._text, self.encoding = obj.body, obj.encoding else: self._text, self.encoding = obj, 'utf-8' self._is_unicode = isinstance(self._text, six.text_type) def read(self, n=65535): self.read = self._read_unicode if self._is_unicode else self._read_string return self.read(n).lstrip() def _read_string(self, n=65535): s, e = self._ptr, self._ptr + n self._ptr = e return self._text[s:e] def _read_unicode(self, n=65535): s, e = self._ptr, self._ptr + n self._ptr = e return self._text[s:e].encode('utf-8') def csviter(obj, delimiter=None, headers=None, encoding=None, quotechar=None): """ Returns an iterator of dictionaries from the given csv object obj can be: - a Response object - a unicode string - a string encoded as utf-8 delimiter is the character used to separate fields on the given obj. headers is an iterable that when provided offers the keys for the returned dictionaries, if not the first row is used. quotechar is the character used to enclosure fields on the given obj. """ encoding = obj.encoding if isinstance(obj, TextResponse) else encoding or 'utf-8' def _getrow(csv_r): return [to_unicode(field, encoding) for field in next(csv_r)] # Python 3 csv reader input object needs to return strings if six.PY3: lines = StringIO(_body_or_str(obj, unicode=True)) else: lines = BytesIO(_body_or_str(obj, unicode=False)) kwargs = {} if delimiter: kwargs["delimiter"] = delimiter if quotechar: kwargs["quotechar"] = quotechar csv_r = csv.reader(lines, **kwargs) if not headers: headers = _getrow(csv_r) while True: row = _getrow(csv_r) if len(row) != len(headers): logger.warning("ignoring row %(csvlnum)d (length: %(csvrow)d, " "should be: %(csvheader)d)", {'csvlnum': csv_r.line_num, 'csvrow': len(row), 'csvheader': len(headers)}) continue else: yield dict(zip(headers, row)) def _body_or_str(obj, unicode=True): expected_types = (Response, six.text_type, six.binary_type) assert isinstance(obj, expected_types), \ "obj must be %s, not %s" % ( " or ".join(t.__name__ for t in expected_types), type(obj).__name__) if isinstance(obj, Response): if not unicode: return obj.body elif isinstance(obj, TextResponse): return obj.text else: return obj.body.decode('utf-8') elif isinstance(obj, six.text_type): return obj if unicode else obj.encode('utf-8') else: return obj.decode('utf-8') if unicode else obj
Python
0
@@ -3320,24 +3320,25 @@ 'utf-8'%0A +%0A def _getrow( @@ -3329,29 +3329,35 @@ def -_getrow(csv_r +row_to_unicode(row_ ):%0A @@ -3412,19 +3412,12 @@ in -next(csv_r) +row_ %5D%0A%0A @@ -3810,77 +3810,185 @@ -headers = _getrow(csv_r)%0A%0A while True:%0A row = _getrow(csv_r +try:%0A row = next(csv_r)%0A except StopIteration:%0A return%0A headers = row_to_unicode(row)%0A%0A for row in csv_r:%0A row = row_to_unicode(row )%0A
61cf2cb400e5ad80923570824e50dd40828ce434
make scope_server status work again
scope/scope_server.py
scope/scope_server.py
# The MIT License (MIT) # # Copyright (c) 2014-2015 WUSTL ZPLAB # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # Authors: Zach Pincus import zmq import time import threading import json import pathlib from .util import logging from .util import base_daemon from .config import scope_configuration logger = logging.get_logger(__name__) class ScopeServer(base_daemon.Runner): def __init__(self): self.base_dir = pathlib.Path(scope_configuration.CONFIG_DIR) self.log_dir = self.base_dir / 'scope_server_logs' self.arg_file = self.base_dir / 'server_options.json' super().__init__(name='Scope Server', pidfile_path=self.base_dir / 'scope_server.pid') def start(self): with self.arg_file.open('r') as f: args = json.load(f) self.config = scope_configuration.get_config() self.host = self.config.server.PUBLICHOST if args['public'] else self.config.server.LOCALHOST super().start(self.log_dir, args['verbose']) # function is to be run only when NOT running as a daemon def status(self): is_running = self.is_running() if not is_running: print('Microscope server is NOT running.') else: print('Microscope server is running (PID {}).'.format(self.get_pid())) client_tester = ScopeClientTester() connected = lambda: client_tester.connected # wait for connection to be established if _wait_for_it(connected, 'Establishing connection to scope server'): print('Microscope server is responding to new connections.') else: raise RuntimeError('Could not communicate with microscope server') def stop(self, force=False): self.assert_daemon() pid = self.get_pid() if force: self.kill() # send SIGKILL -- immeiate exit else: self.terminate() # send SIGTERM -- allow for cleanup exited = lambda: not base_daemon.is_valid_pid(pid) # wait for pid to become invalid (i.e. process no longer is running) if _wait_for_it(exited, 'Waiting for server to terminate'): print('Microscope server is stopped.') else: raise RuntimeError('Could not terminate microscope server') # overrides from base_daemon.Runner to implement server behavior def initialize_daemon(self): # do scope imports here so any at-import debug logging gets properly recorded from . import scope from .simple_rpc import rpc_server from .simple_rpc import property_server from .simple_rpc import heartbeat from .util import transfer_ism_buffer addresses = scope_configuration.get_addresses(self.host) self.context = zmq.Context() self.heartbeat_server = heartbeat.ZMQServer(addresses['heartbeat'], interval_sec=self.config.server.HEARTBEAT_INTERVAL_SEC, context=self.context) property_update_server = property_server.ZMQServer(addresses['property'], context=self.context) scope_controller = scope.Scope(property_update_server) scope_controller._sleep = time.sleep # way to provide a no-op RPC call for testing... image_transfer_namespace = Namespace() # add transfer_ism_buffer as hidden elements of the namespace, which RPC clients can use for seamless buffer sharing image_transfer_namespace._transfer_ism_buffer = transfer_ism_buffer if hasattr(scope_controller, 'camera'): image_transfer_namespace.latest_image=scope_controller.camera.latest_image image_transfer_server = rpc_server.BackgroundBaseZMQServer(image_transfer_namespace, addresses['image_transfer_rpc'], context=self.context) interrupter = rpc_server.ZMQInterrupter(addresses['interrupt'], context=self.context) self.scope_server = rpc_server.ZMQServer(scope_controller, interrupter, addresses['rpc'], context=self.context) logger.info('Scope Server Ready (Listening on {})', self.host) def run_daemon(self): try: self.scope_server.run() finally: self.context.term() class ScopeClientTester(threading.Thread): def __init__(self): self.connected = False super().__init__(daemon=True) self.start() def run(self): from . import scope_client scope_client.client_main() self.connected = True class Namespace: pass def _wait_for_it(wait_condition, message, wait_time=15, output_interval=0.5, sleep_time=0.1): wait_iters = int(wait_time // sleep_time) output_iters = int(output_interval // sleep_time) condition_true = False print('(' + message + '...', end='', flush=True) for i in range(wait_iters): condition_true = wait_condition() if condition_true: break if i % output_iters == 0: print('.', end='', flush=True) time.sleep(sleep_time) print(')') return condition_true
Python
0
@@ -2318,43 +2318,64 @@ -client_tester = ScopeClientTester() +from . import scope_client%0A connected = False %0A @@ -2375,32 +2375,35 @@ lse%0A +is_ connected = lamb @@ -2411,86 +2411,296 @@ a: c -lient_tester.connected # wait for connection to be established%0A if +onnected%0A # slightly ugly hack: print status messages from bg thread while waiting%0A # in main thread for connection to be made or time out.%0A # (This is because we can't construct scope_client in background thread.)%0A t = threading.Thread(target= _wai @@ -2707,17 +2707,27 @@ t_for_it -( +, args=(is_ connecte @@ -2775,104 +2775,347 @@ er') -:%0A print('Microscope server is responding to new connections.')%0A else: +, daemon=True)%0A t.start()%0A try:%0A scope_client.client_main()%0A connected = True%0A t.join()%0A print('Microscope server is responding to new connections.')%0A except:%0A connected = True # required to stop the thread%0A t.join() %0A @@ -5669,286 +5669,8 @@ ()%0A%0A -class ScopeClientTester(threading.Thread):%0A def __init__(self):%0A self.connected = False%0A super().__init__(daemon=True)%0A self.start()%0A%0A def run(self):%0A from . import scope_client%0A scope_client.client_main()%0A self.connected = True%0A%0A clas
59833d6a9ef216ae78c4116e6fae8441f1ba5d9c
remove redundants
tests/chainer_tests/functions_tests/activation_tests/test_sigmoid.py
tests/chainer_tests/functions_tests/activation_tests/test_sigmoid.py
import unittest import numpy import chainer from chainer.backends import cuda from chainer import functions from chainer import testing from chainer.testing import attr def _sigmoid(x): half = x.dtype.type(0.5) return numpy.tanh(x * half) * half + half @testing.parameterize(*testing.product({ 'shape': [(3, 2), ()], 'dtype': [numpy.float16, numpy.float32, numpy.float64], 'contiguous': [None, 'C'], })) @testing.fix_random() @testing.inject_backend_tests( None, # CPU tests [ {}, {'use_ideep': True}, ] # GPU tests + testing.product({ 'use_cuda': [True], 'use_cudnn': ['never', 'always'], 'cuda_device': [0, 1], }) # ChainerX tests + testing.product({ 'use_chainerx': [True], 'chainerx_device': ['native:0', 'cuda:0', 'cuda:1'], }) ) class TestSigmoid(testing.FunctionTestCase): def setUp(self): if self.dtype == numpy.float16: self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3} self.check_backward_options = {'atol': 1e-2, 'rtol': 5e-2} self.check_double_backward_options = {'atol': 1e-2, 'rtol': 5e-2} def generate_inputs(self): x = numpy.random.uniform(-.5, .5, self.shape).astype(self.dtype) return x, def forward_expected(self, inputs): x, = inputs y = _sigmoid(x.copy()).astype(self.dtype) if numpy.isscalar(y): y = numpy.asarray(y) return y, def forward(self, inputs, device): x, = inputs return functions.sigmoid(x), @testing.parameterize(*testing.product({ 'use_cudnn': ['always', 'auto', 'never'], 'dtype': [numpy.float16, numpy.float32, numpy.float64], })) @attr.cudnn class TestSigmoidCudnnCall(unittest.TestCase): def setUp(self): self.x = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype) self.gy = cuda.cupy.random.uniform(-1, 1, (2, 3)).astype(self.dtype) with chainer.using_config('use_cudnn', self.use_cudnn): self.expect = chainer.should_use_cudnn('==always') def forward(self): x = chainer.Variable(self.x) return functions.sigmoid(x) def test_call_cudnn_forward(self): default_func = cuda.cupy.cudnn.activation_forward with chainer.using_config('use_cudnn', self.use_cudnn): with testing.patch('cupy.cudnn.activation_forward') as func: func.side_effect = default_func self.forward() self.assertEqual(func.called, self.expect) def test_call_cudnn_backward(self): with chainer.using_config('use_cudnn', self.use_cudnn): y = self.forward() y.grad = self.gy default_func = cuda.cupy.cudnn.activation_backward with testing.patch('cupy.cudnn.activation_backward') as func: func.side_effect = default_func y.backward() self.assertEqual(func.called, self.expect) testing.run_module(__name__, __file__)
Python
0.999999
@@ -1386,34 +1386,8 @@ id(x -.copy()).astype(self.dtype )%0A
bbb9d7e65fdff189562d1e2a5cdf4a302e562f3e
Use parameterized test
tests/chainer_tests/functions_tests/math_tests/test_trigonometric.py
tests/chainer_tests/functions_tests/math_tests/test_trigonometric.py
import unittest import numpy import chainer from chainer import cuda import chainer.functions as F from chainer import gradient_check from chainer import testing from chainer.testing import attr from chainer.testing import condition class UnaryFunctionsTestBase(object): def make_data(self): raise NotImplementedError def setUp(self): self.x, self.gy = self.make_data() def check_forward(self, op, op_np, x_data): x = chainer.Variable(x_data) y = op(x) testing.assert_allclose( op_np(self.x), y.data, atol=1e-7, rtol=1e-7) def check_forward_cpu(self, op, op_np): self.check_forward(op, op_np, self.x) def check_forward_gpu(self, op, op_np): self.check_forward(op, op_np, cuda.to_gpu(self.x)) @condition.retry(3) def test_cos_forward_cpu(self): self.check_forward_cpu(F.cos, numpy.cos) @condition.retry(3) def test_sin_forward_cpu(self): self.check_forward_cpu(F.sin, numpy.sin) @condition.retry(3) def test_tan_forward_cpu(self): self.check_forward_cpu(F.tan, numpy.tan) @attr.gpu @condition.retry(3) def test_cos_forward_gpu(self): self.check_forward_gpu(F.cos, numpy.cos) @attr.gpu @condition.retry(3) def test_sin_forward_gpu(self): self.check_forward_gpu(F.sin, numpy.sin) @attr.gpu @condition.retry(3) def test_tan_forward_gpu(self): self.check_forward_gpu(F.tan, numpy.tan) def check_backward(self, op, x_data, y_grad): gradient_check.check_backward(op, x_data, y_grad) def check_backward_cpu(self, op): self.check_backward(op, self.x, self.gy) def check_backward_gpu(self, op): self.check_backward(op, cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) @condition.retry(3) def test_cos_backward_cpu(self): self.check_backward_cpu(F.cos) @condition.retry(3) def test_sin_backward_cpu(self): self.check_backward_cpu(F.sin) @condition.retry(3) def test_tan_backward_cpu(self): self.check_backward_cpu(F.tan) @attr.gpu @condition.retry(3) def test_cos_backward_gpu(self): self.check_backward_gpu(F.cos) @attr.gpu @condition.retry(3) def test_sin_backward_gpu(self): self.check_backward_gpu(F.sin) @attr.gpu @condition.retry(3) def test_tan_backward_gpu(self): self.check_backward_gpu(F.tan) def test_sin(self): self.assertEqual(F.Sin().label, 'sin') def test_cos(self): self.assertEqual(F.Cos().label, 'cos') def test_tan(self): self.assertEqual(F.Tan().label, 'tan') class TestUnaryFunctionsSimple(UnaryFunctionsTestBase, unittest.TestCase): def make_data(self): x = numpy.random.uniform(.5, 1, (3, 2)).astype(numpy.float32) gy = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32) return x, gy class TestUnaryFunctionsZeroDimension(UnaryFunctionsTestBase, unittest.TestCase): def make_data(self): x = numpy.random.uniform(.5, 1, ()).astype(numpy.float32) gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32) return x, gy testing.run_module(__name__, __file__)
Python
0.000001
@@ -230,16 +230,88 @@ ition%0A%0A%0A [email protected](*testing.product(%7B%0A 'shape': %5B(3, 2), ()%5D,%0A%7D))%0A class Un @@ -330,19 +330,26 @@ Test -Base(object +(unittest.TestCase ):%0A%0A @@ -447,35 +447,151 @@ lf.x -, self.gy = self.make_data( + = numpy.random.uniform(.5, 1, self.shape).astype(numpy.float32)%0A self.gy = numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32 )%0A%0A @@ -2860,575 +2860,8 @@ )%0A%0A%0A -class TestUnaryFunctionsSimple(UnaryFunctionsTestBase, unittest.TestCase):%0A%0A def make_data(self):%0A x = numpy.random.uniform(.5, 1, (3, 2)).astype(numpy.float32)%0A gy = numpy.random.uniform(-1, 1, (3, 2)).astype(numpy.float32)%0A return x, gy%0A%0A%0Aclass TestUnaryFunctionsZeroDimension(UnaryFunctionsTestBase,%0A unittest.TestCase):%0A%0A def make_data(self):%0A x = numpy.random.uniform(.5, 1, ()).astype(numpy.float32)%0A gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)%0A return x, gy%0A%0A%0A test
f265c31a8500e5f66096bc157266c4400e68cb2e
Version bump
rest_witchcraft/__version__.py
rest_witchcraft/__version__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals __author__ = 'Serkan Hosca' __author_email__ = '[email protected]' __version__ = '0.2.3' __description__ = 'SQLAlchemy specific things for django-rest-framework'
Python
0.000001
@@ -178,11 +178,11 @@ '0. -2.3 +3.0 '%0A__
a985f0ac4ef31e534e96319da9980745f5169252
Disable some Pylint errors
tests/integration/modules/pillar.py
tests/integration/modules/pillar.py
# -*- coding: utf-8 -*- # Import Python Libs from distutils.version import LooseVersion # Import Salt Testing libs from salttesting import skipIf from salttesting.helpers import ( ensure_in_syspath, requires_network ) ensure_in_syspath('../../') # Import salt libs import integration GIT_PYTHON = '0.3.2' HAS_GIT_PYTHON = False try: import git if LooseVersion(git.__version__) >= LooseVersion(GIT_PYTHON): HAS_GIT_PYTHON = True except ImportError: pass class PillarModuleTest(integration.ModuleCase): ''' Validate the pillar module ''' def test_data(self): ''' pillar.data ''' grains = self.run_function('grains.items') pillar = self.run_function('pillar.data') self.assertEqual(pillar['os'], grains['os']) self.assertEqual(pillar['monty'], 'python') if grains['os'] == 'Fedora': self.assertEqual(pillar['class'], 'redhat') else: self.assertEqual(pillar['class'], 'other') @requires_network() @skipIf(HAS_GIT_PYTHON is False, 'GitPython must be installed and >= version {0}'.format(GIT_PYTHON)) def test_two_ext_pillar_sources_override(self): ''' https://github.com/saltstack/salt/issues/12647 ''' self.assertEqual( self.run_function('pillar.data')['info'], 'bar' ) @requires_network() @skipIf(HAS_GIT_PYTHON is False, 'GitPython must be installed and >= version {0}'.format(GIT_PYTHON)) def test_two_ext_pillar_sources(self): ''' https://github.com/saltstack/salt/issues/12647 ''' self.assertEqual( self.run_function('pillar.data')['abc'], 'def' ) def test_issue_5449_report_actual_file_roots_in_pillar(self): ''' pillar['master']['file_roots'] is overwritten by the master in order to use the fileclient interface to read the pillar files. We should restore the actual file_roots when we send the pillar back to the minion. ''' self.assertIn( integration.TMP_STATE_TREE, self.run_function('pillar.data')['master']['file_roots']['base'] ) def test_ext_cmd_yaml(self): ''' pillar.data for ext_pillar cmd.yaml ''' self.assertEqual( self.run_function('pillar.data')['ext_spam'], 'eggs' ) def test_issue_5951_actual_file_roots_in_opts(self): self.assertIn( integration.TMP_STATE_TREE, self.run_function('pillar.data')['test_ext_pillar_opts']['file_roots']['base'] ) def no_test_issue_10408_ext_pillar_gitfs_url_update(self): import os from salt.pillar import git_pillar import git original_url = 'git+ssh://[email protected]/home/git/test' changed_url = 'git+ssh://[email protected]/home/git/test' rp_location = os.path.join(self.master_opts['cachedir'], 'pillar_gitfs/0/.git') opts = { 'ext_pillar': [{'git': 'master {0}'.format(original_url)}], 'cachedir': self.master_opts['cachedir'], } git_pillar.GitPillar('master', original_url, opts) opts['ext_pillar'] = [{'git': 'master {0}'.format(changed_url)}] grepo = git_pillar.GitPillar('master', changed_url, opts) repo = git.Repo(rp_location) self.assertEqual(grepo.rp_location, repo.remotes.origin.url) if __name__ == '__main__': from integration import run_tests run_tests(PillarModuleTest)
Python
0.000001
@@ -81,16 +81,66 @@ eVersion + # pylint: disable=import-error,no-name-in-module %0A%0A# Impo
abb72e478fd0bdf5929111d3bc782b94e98819ab
Revert b00b3c4 (but keep addition to allow_failure list in test_valid_docs())
tests/integration/modules/sysmod.py
tests/integration/modules/sysmod.py
# -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import import re # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration # Import 3rd-party libs import salt.ext.six as six class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' # Get all functions funcs = self.run_function('sys.list_functions') self.assertIn('hosts.list_hosts', funcs) self.assertIn('pkg.install', funcs) # Just pkg funcs = self.run_function('sys.list_functions', ('pkg.',)) self.assertNotIn('sys.doc', funcs) self.assertIn('pkg.install', funcs) # Just sys funcs = self.run_function('sys.list_functions', ('sys.',)) self.assertNotIn('pkg.install', funcs) self.assertIn('sys.doc', funcs) # Starting with sys funcs = self.run_function('sys.list_functions', ('sys',)) self.assertNotIn('sysctl.get', funcs) self.assertIn('sys.doc', funcs) def test_list_modules(self): ''' sys.list_modules ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) def test_list_modules_with_arg_glob(self): ''' sys.list_modules u* Tests getting the list of modules with 'u*', and looking for the "user" module ''' mods = self.run_function('sys.list_modules', ['u*']) self.assertNotIn('bigip', mods) self.assertIn('user', mods) def test_list_modules_with_arg_exact_match(self): ''' sys.list_modules user Tests getting the list of modules looking for the "user" module with an exact match of 'user' being passed at the CLI instead of something with '*'. ''' mods = self.run_function('sys.list_modules', ['user']) self.assertEqual(mods, ['user']) def test_valid_docs(self): ''' Make sure no functions are exposed that don't have valid docstrings ''' docs = self.run_function('sys.doc') nodoc = set() noexample = set() allow_failure = ( 'cp.recv', 'libcloud_dns.get_driver', 'lxc.run_cmd', 'ipset.long_range', 'pkg.expand_repo_def', 'runtests_decorators.depends', 'runtests_decorators.depends_will_fallback', 'runtests_decorators.missing_depends', 'runtests_decorators.missing_depends_will_fallback', 'swift.head', 'glance.warn_until', 'yumpkg.expand_repo_def', 'yumpkg5.expand_repo_def', 'container_resource.run', 'nspawn.stop', 'nspawn.restart', 'lowpkg.bin_pkg_info', 'state.apply', 'cmd.win_runas', 'status.list2cmdline' ) for fun in docs: if fun.startswith('runtests_helpers'): continue if fun in allow_failure: continue if not isinstance(docs[fun], six.string_types): nodoc.add(fun) elif not re.search(r'([E|e]xample(?:s)?)+(?:.*)::?', docs[fun]): noexample.add(fun) if not nodoc and not noexample: return raise AssertionError( 'There are some functions which do not have a docstring or do not ' 'have an example:\nNo docstring:\n{0}\nNo example:\n{1}\n'.format( '\n'.join([' - {0}'.format(f) for f in sorted(nodoc)]), '\n'.join([' - {0}'.format(f) for f in sorted(noexample)]), ) ) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
Python
0
@@ -380,1737 +380,8 @@ '''%0A - def test_list_functions(self):%0A '''%0A sys.list_functions%0A '''%0A # Get all functions%0A funcs = self.run_function('sys.list_functions')%0A self.assertIn('hosts.list_hosts', funcs)%0A self.assertIn('pkg.install', funcs)%0A%0A # Just pkg%0A funcs = self.run_function('sys.list_functions', ('pkg.',))%0A self.assertNotIn('sys.doc', funcs)%0A self.assertIn('pkg.install', funcs)%0A%0A # Just sys%0A funcs = self.run_function('sys.list_functions', ('sys.',))%0A self.assertNotIn('pkg.install', funcs)%0A self.assertIn('sys.doc', funcs)%0A%0A # Starting with sys%0A funcs = self.run_function('sys.list_functions', ('sys',))%0A self.assertNotIn('sysctl.get', funcs)%0A self.assertIn('sys.doc', funcs)%0A%0A def test_list_modules(self):%0A '''%0A sys.list_modules%0A '''%0A mods = self.run_function('sys.list_modules')%0A self.assertTrue('hosts' in mods)%0A self.assertTrue('pkg' in mods)%0A%0A def test_list_modules_with_arg_glob(self):%0A '''%0A sys.list_modules u*%0A%0A Tests getting the list of modules with 'u*', and looking for the%0A %22user%22 module%0A '''%0A mods = self.run_function('sys.list_modules', %5B'u*'%5D)%0A self.assertNotIn('bigip', mods)%0A self.assertIn('user', mods)%0A%0A def test_list_modules_with_arg_exact_match(self):%0A '''%0A sys.list_modules user%0A%0A Tests getting the list of modules looking for the %22user%22 module with%0A an exact match of 'user' being passed at the CLI instead of something%0A with '*'.%0A '''%0A mods = self.run_function('sys.list_modules', %5B'user'%5D)%0A self.assertEqual(mods, %5B'user'%5D)%0A%0A
c98ef2e8b64b7daa94c0e883f71813a3e3226a78
set current day to none if there is none
riskgame/context_processors.py
riskgame/context_processors.py
# -*- coding: utf-8 from riskgame.models import Player, TeamPlayer, EpisodeDay, Game def player(request): returnDict = {} if request.user.is_authenticated(): try: currentPlayer = Player.objects.get(user=request.user) except Player.DoesNotExist: currentPlayer = Player.objects.create(user=request.user) returnDict['current_player'] = currentPlayer try: returnDict['current_teamplayer'] = TeamPlayer.objects.get(player=currentPlayer) except TeamPlayer.DoesNotExist: pass try: returnDict['current_day'] = EpisodeDay.objects.get(current=True) except EpisodeDay.DoesNotExist: pass try: returnDict['current_game'] = Game.objects.get_latest_game() except Game.DoesNotExist: pass # try: # game = Game.objects.get_latest_game() # returnDict['game'] = game # except: # pass return returnDict
Python
0.020175
@@ -701,36 +701,64 @@ st:%0A -pass +returnDict%5B'current_day'%5D = None %0A%0A try:%0A
734c7c9ad2d1290a202e48ebb5535a0bc371bd75
fix iarc constant test
mkt/constants/tests/test_ratingsbodies.py
mkt/constants/tests/test_ratingsbodies.py
from contextlib import contextmanager from nose.tools import eq_ from tower import activate import amo.tests import mkt.constants.ratingsbodies as ratingsbodies class TestRatingsBodies(amo.tests.TestCase): def test_all_ratings_waffle_off(self): ratings = ratingsbodies.ALL_RATINGS() # Assert only CLASSIND and GENERIC ratings are present. assert ratingsbodies.CLASSIND_L in ratings assert ratingsbodies.GENERIC_3 in ratings assert ratingsbodies.ESRB_E not in ratings assert ratingsbodies.PEGI_3 not in ratings assert ratingsbodies.USK_0 not in ratings def test_all_ratings_waffle_on(self): self.create_switch('iarc') ratings = ratingsbodies.ALL_RATINGS() # Assert all ratings bodies are present. assert ratingsbodies.CLASSIND_L in ratings assert ratingsbodies.GENERIC_3 in ratings assert ratingsbodies.ESRB_E in ratings assert ratingsbodies.PEGI_3 in ratings assert ratingsbodies.USK_0 in ratings def test_ratings_by_name_waffle(self): without_waffle = ratingsbodies.RATINGS_BY_NAME() self.create_switch('iarc', db=True) with_waffle = ratingsbodies.RATINGS_BY_NAME() # Test waffle off excludes ratings. assert len(without_waffle) < len(with_waffle) def test_ratings_by_name_lazy_translation(self): generic_3_choice = ratingsbodies.RATINGS_BY_NAME()[6] eq_(generic_3_choice[1], 'Generic - For ages 3+') def test_ratings_has_ratingsbody(self): eq_(ratingsbodies.GENERIC_3.ratingsbody, ratingsbodies.GENERIC) eq_(ratingsbodies.CLASSIND_L.ratingsbody, ratingsbodies.CLASSIND) eq_(ratingsbodies.ESRB_E.ratingsbody, ratingsbodies.ESRB) eq_(ratingsbodies.USK_0.ratingsbody, ratingsbodies.USK) eq_(ratingsbodies.PEGI_3.ratingsbody, ratingsbodies.PEGI) def test_dehydrate_rating(self): self.create_switch('iarc') for rating in ratingsbodies.ALL_RATINGS(): rating = ratingsbodies.dehydrate_rating(rating) assert isinstance(rating.name, unicode), rating assert rating.label and rating.label != str(None), rating assert isinstance(rating.description, unicode), rating def test_dehydrate_ratings_body(self): self.create_switch('iarc') for k, body in ratingsbodies.RATINGS_BODIES.iteritems(): body = ratingsbodies.dehydrate_ratings_body(body) assert isinstance(body.name, unicode) assert body.label and body.label != str(None) assert isinstance(body.description, unicode) @contextmanager def tower_activate(self, region): try: activate(region) yield finally: activate('en-US') def test_dehydrate_rating_language(self): self.create_switch('iarc') with self.tower_activate('es'): rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T) eq_(rating.name, 'Adolescente') with self.tower_activate('fr'): rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T) eq_(rating.name, 'Adolescents') rating = ratingsbodies.dehydrate_rating(ratingsbodies.ESRB_T) eq_(rating.name, 'Teen')
Python
0.000002
@@ -2205,75 +2205,8 @@ ting -%0A assert isinstance(rating.description, unicode), rating %0A%0A
7bb6ac8e1d6e742c2c02c3cc489ea350175dc8cd
Refactor source grab's try/catch
vizydrop/rest/base.py
vizydrop/rest/base.py
from http.client import INTERNAL_SERVER_ERROR import json from vizydrop.rest import VizydropAppRequestHandler from vizydrop.sdk.source import StreamingDataSource from tornado.gen import coroutine from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError from . import TpaHandlerMixin class BaseHandler(VizydropAppRequestHandler, TpaHandlerMixin): def options(self): self.set_status(204) self.finish() def get(self): meta = self.tpa.Meta self.finish({ "version": meta.version, "tags": meta.tags, "name": meta.name, "color": meta.color or None, "description": meta.description, "site": meta.website, "sources": [source.get_schema() for source in meta.sources], "authentication": [account.get_schema() for account in meta.authentication] }) @coroutine def post(self): post_body = json.loads(self.request.body.decode('utf-8')) source = post_body.get('source') account_fields = post_body.get('account') account_identifier = account_fields.pop('auth') filter_fields = post_body.get('filter') limit = post_body.get('limit', 100) skip = post_body.get('skip', 0) account_type = self.tpa.get_auth(account_identifier) account = account_type(account_fields) source_type = self.tpa.get_source(source) filter = source_type.Meta.filter(filter_fields) if issubclass(source_type, StreamingDataSource): source_type.stream_callback = self.on_stream_data try: yield source_type.get_data(account, filter, limit=limit, skip=skip) self.flush() self.finish('') except HTTPError as e: raise e except Exception as e: self.set_status(INTERNAL_SERVER_ERROR) self._handle_request_exception(e) else: try: data = yield source_type.get_data(account, filter, limit=limit, skip=skip) self.finish(data, encode=False) except HTTPError as e: raise e except Exception as e: self.set_status(INTERNAL_SERVER_ERROR) self._handle_request_exception(e) def on_stream_data(self, data): self.write(data) self.flush()
Python
0
@@ -1489,16 +1489,33 @@ ields)%0A%0A + try:%0A @@ -1567,32 +1567,36 @@ e):%0A + + source_type.stre @@ -1629,33 +1629,16 @@ am_data%0A - try:%0A @@ -1791,223 +1791,11 @@ e -xcept HTTPError as e:%0A raise e%0A except Exception as e:%0A self.set_status(INTERNAL_SERVER_ERROR)%0A self._handle_request_exception(e)%0A else:%0A try +lse :%0A @@ -1935,28 +1935,24 @@ se)%0A - except HTTPE @@ -1978,24 +1978,78 @@ - raise e%0A +self.set_status(e.code)%0A self.finish(str(e), encode=False)%0A @@ -2071,28 +2071,24 @@ ption as e:%0A - @@ -2126,20 +2126,16 @@ _ERROR)%0A -
546d4ac0bbf14da2bc5610aa09b5a75627b297a6
Add 131
100_to_199/euler_131.py
100_to_199/euler_131.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ''' Problem 131 There are some prime values, p, for which there exists a positive integer, n, such that the expression n3 + n2p is a perfect cube. For example, when p = 19, 83 + 82×19 = 123. What is perhaps most surprising is that for each prime with this property the value of n is unique, and there are only four such primes below one-hundred. How many primes below one million have this remarkable property? ''' from util import prime_sieve from itertools import count def is_perfect_cube(x): # x = abs(x) return int(round(x ** (1. / 3))) ** 3 == x def p131(): # Answer: 173, 68.54s Mac pro 2016 cnt = 0 primes = prime_sieve(1000000) for p in primes: for i in count(1): n = i ** 3 if is_perfect_cube(n + p): if is_perfect_cube(n ** 2): # print('[great] ', [p, n, i], n**2, n+p) cnt += 1 break if i > 600: break print(cnt) p131()
Python
0.999937
@@ -486,16 +486,26 @@ me_sieve +, is_prime %0Afrom it @@ -625,16 +625,21 @@ def p131 +_slow (): # A @@ -1051,15 +1051,254 @@ (cnt)%0A%0A%0A +def p131():%0A # n**3 + p = (n+1)**3%0A # p = 3n**2 + 3n + 1%0A cnt = 0%0A for n in count(1):%0A p = 3 * (n ** 2) + 3 * n + 1%0A if p %3E= 1000000:%0A break%0A if is_prime(p):%0A cnt += 1%0A print(cnt)%0A%0A p131()%0A
6e04d3ab8a6d967c14afaf45869152c93d94d1ec
Refactor remote URI formatting functions
gutenberg/textsource.py
gutenberg/textsource.py
"""Module providing implementations of the api.TextSource interface.""" from __future__ import absolute_import from . import beautify from . import api from .common import wget import itertools import logging import os import rdflib import tarfile class GutenbergEbooks(api.TextSource): """Implementation of api.TextSource that fetches books from Project Gutenberg. """ RDF_URL = r'http://www.gutenberg.org/cache/epub/feeds/rdf-files.tar.bz2' def cleanup_text(self, lines): return beautify.strip_headers(lines) def _raw_source(self, start, stop, step): logging.info('fetching meta-data archive (this might take a while)') filename, _ = wget.grab(GutenbergEbooks.RDF_URL) with tarfile.open(filename) as archive: for tarinfo in itertools.islice(archive, start, stop, step): graph = rdflib.Graph() graph.parse(archive.extractfile(tarinfo)) yield graph def _format_remote_uris(self, text_info): if 0 < text_info.uid < 10: basic_url = '{root}/{path}/{file}.txt'.format( root=r'http://www.gutenberg.lib.md.us', path='etext90', file=["when11", "bill11", "jfk11", "getty11", "const11", "liber11", "mayfl11", "linc211", "linc111"][text_info.uid-1]) else: uid = str(text_info.uid) basic_url = '{root}/{path}/{uid}/{uid}.txt'.format( root=r'http://www.gutenberg.lib.md.us', path='/'.join(uid[:len(uid)-1]), uid=text_info.uid) yield basic_url def textinfo_converter(self, rdf_graph): ebook = next(iter(rdf_graph.query(''' SELECT ?ebook ?author ?title WHERE { ?ebook a pgterms:ebook. OPTIONAL { ?ebook dcterms:creator [ pgterms:name ?author ]. } OPTIONAL { ?ebook dcterms:title ?title. } } LIMIT 1 '''))) return api.TextInfo( uid=int(os.path.basename(ebook.ebook.toPython())), author=ebook.author.toPython() if ebook.author else None, title=ebook.title.toPython() if ebook.title else None)
Python
0
@@ -245,16 +245,982 @@ rfile%0A%0A%0A +def _is_legacy_uid(uid):%0A return 0 %3C uid %3C 10%0A%0A%0Adef _format_uri(uid):%0A if _is_legacy_uid(uid):%0A raise ValueError('should use legacy URI format for UIDs in (0..10)')%0A%0A uid = str(uid)%0A return '%7Broot%7D/%7Bpath%7D/%7Buid%7D/%7Buid%7D.txt'.format(%0A root=r'http://www.gutenberg.lib.md.us',%0A path='/'.join(uid%5B:len(uid) - 1%5D),%0A uid=uid)%0A%0A%0Adef _format_legacy_uri(uid):%0A if not _is_legacy_uid(uid):%0A raise ValueError('should use non-legacy URI format for UIDs %3E= 10')%0A%0A legacy_files = (%0A 'when11',%0A 'bill11',%0A 'jfk11',%0A 'getty11',%0A 'const11',%0A 'liber11',%0A 'mayfl11',%0A 'linc211',%0A 'linc111',%0A )%0A return '%7Broot%7D/%7Bpath%7D/%7Bname%7D.txt'.format(%0A root=r'http://www.gutenberg.lib.md.us',%0A path='etext90',%0A name=legacy_files%5Buid - 1%5D)%0A%0A%0Adef remote_uri_formatter(uid):%0A if _is_legacy_uid(uid):%0A return _format_legacy_uri%0A%0A return _format_uri%0A%0A%0A class Gu @@ -1990,639 +1990,102 @@ -if 0 %3C text_info.uid %3C 10:%0A basic_url = '%7Broot%7D/%7Bpath%7D/%7Bfile%7D.txt'.format(%0A root=r'http://www.gutenberg.lib.md.us',%0A path='etext90',%0A file=%5B%22when11%22, %22bill11%22, %22jfk11%22, %22getty11%22,%0A %22const11%22, %22liber11%22, %22mayfl11%22,%0A %22linc211%22, %22linc111%22%5D%5Btext_info.uid-1%5D)%0A else:%0A uid = str(text_info.uid)%0A basic_url = '%7Broot%7D/%7Bpath%7D/%7Buid%7D/%7Buid%7D.txt'.format(%0A root=r'http://www.gutenberg.lib.md.us',%0A path='/'.join(uid%5B:len(uid)-1%5D),%0A uid=text_info.uid)%0A yield basic_url +uri_formatter = remote_uri_formatter(text_info.uid)%0A yield uri_formatter(text_info.uid) %0A%0A
2e158ee445e2714b655ea12ef09810aad2c172fb
decrease ct size
tests/syft/lib/tenseal/duet_test.py
tests/syft/lib/tenseal/duet_test.py
# stdlib import atexit from multiprocessing import Process from multiprocessing import set_start_method import socket import sys import time from typing import Any from typing import Generator from typing import List # third party import pytest # syft absolute import syft as sy # syft relative from ...grid.duet.process_test import SyftTestProcess from ...grid.duet.signaling_server_test import run ts = pytest.importorskip("tenseal") sy.load_lib("tenseal") set_start_method("spawn", force=True) PORT = 21000 def chunks(lst: List[Any], n: int) -> Generator[Any, Any, Any]: """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i : i + n] # noqa: E203 def do(ct_size: int, batch_size: int) -> None: # third party import numpy as np import tenseal as ts # syft absolute import syft as sy sy.load_lib("tenseal") sy.logger.add(sys.stderr, "ERROR") duet = sy.launch_duet(loopback=True, network_url=f"http://127.0.0.1:{PORT}/") duet.requests.add_handler(action="accept") context = ts.context( ts.SCHEME_TYPE.CKKS, 8192, coeff_mod_bit_sizes=[60, 40, 40, 60], n_threads=1 ) context.global_scale = pow(2, 40) data = np.random.uniform(-10, 10, 100) enc = [] for i in range(ct_size): enc.append(ts.ckks_vector(context, data)) start = time.time() _ = context.send(duet, searchable=True) for chunk in chunks(enc, batch_size): _ = sy.lib.python.List(chunk).send(duet, searchable=True) sys.stderr.write( f"[{ct_size}][{batch_size}] DO sending took {time.time() - start} sec\n" ) sy.core.common.event_loop.loop.run_forever() def ds(ct_size: int, batch_size: int) -> None: # syft absolute import syft as sy sy.load_lib("tenseal") sy.logger.add(sys.stderr, "ERROR") duet = sy.join_duet(loopback=True, network_url=f"http://127.0.0.1:{PORT}/") time.sleep(10) cnt = int(ct_size / batch_size) start = time.time() ctx = duet.store[0].get(request_block=True, delete_obj=False) for idx in range(1, cnt + 1): data = duet.store[idx].get(request_block=True, delete_obj=False) for tensor in data: tensor.link_context(ctx) assert len(data) == batch_size, len(data) sys.stderr.write( f"[{ct_size}][{batch_size}] DS get took {time.time() - start} sec\n" ) @pytest.fixture(scope="module") def signaling_server() -> Process: print(f"creating signaling server on port {PORT}") grid_proc = Process(target=run, args=(PORT,)) grid_proc.start() def grid_cleanup() -> None: print("stop signaling server") grid_proc.terminate() grid_proc.join() atexit.register(grid_cleanup) return grid_proc @pytest.mark.vendor(lib="tenseal") def test_tenseal_duet_ciphertext_size(signaling_server: Process) -> None: time.sleep(3) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: assert s.connect_ex(("localhost", PORT)) == 0 for ct_size in [10 ** p for p in range(2, 3)]: for batch_size in [1, 10, ct_size]: start = time.time() do_proc = SyftTestProcess(target=do, args=(ct_size, batch_size)) do_proc.start() ds_proc = SyftTestProcess(target=ds, args=(ct_size, batch_size)) ds_proc.start() ds_proc.join(120) do_proc.terminate() if do_proc.exception: exception, tb = do_proc.exception raise Exception(tb) from exception if ds_proc.exception: exception, tb = ds_proc.exception raise Exception(tb) from exception if ds_proc.is_alive(): ds_proc.terminate() raise Exception(f"ds_proc is hanged for {ct_size}") print( f"test {ct_size} batch_size {batch_size} passed in {time.time() - start} seconds" )
Python
0.000005
@@ -3060,34 +3060,12 @@ %5B10 - ** p for p in range(2, 3) +, 20 %5D:%0A
10318a11dded5e69c3d9c98325613700c9b3db63
Fix for dependent package detection.
lib/spack/spack/cmd/dependents.py
lib/spack/spack/cmd/dependents.py
Python
0
@@ -0,0 +1,1814 @@ +##############################################################################%0A# Copyright (c) 2013, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Written by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://scalability-llnl.github.io/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License (as published by%0A# the Free Software Foundation) version 2.1 dated February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public License%0A# along with this program; if not, write to the Free Software Foundation,%0A# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Aimport argparse%0A%0Aimport llnl.util.tty as tty%0A%0Aimport spack%0Aimport spack.cmd%0A%0Adescription = %22Show dependent packages.%22%0A%0Adef setup_parser(subparser):%0A subparser.add_argument(%0A 'spec', nargs=argparse.REMAINDER, help=%22specs to list dependencies of.%22)%0A%0A%0Adef dependents(parser, args):%0A specs = spack.cmd.parse_specs(args.spec, concretize=True)%0A if len(specs) != 1:%0A tty.die(%22spack dependents takes only one spec.%22)%0A%0A fmt = '$_$@$%25@$+$=$#'%0A deps = %5Bd.format(fmt) for d in specs%5B0%5D.package.installed_dependents%5D%0A tty.msg(%22Dependents of %25s%22 %25 specs%5B0%5D.format(fmt), *deps)%0A
a941b99cd198b1be7707c1110a2d0e24f7539b2e
support for the new output mode
lib/svtplay_dl/service/tv4play.py
lib/svtplay_dl/service/tv4play.py
# ex:ts=4:sw=4:sts=4:et # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- from __future__ import absolute_import import sys import re import xml.etree.ElementTree as ET import json import copy from svtplay_dl.utils.urllib import urlparse, parse_qs, quote_plus from svtplay_dl.service import Service, OpenGraphThumbMixin from svtplay_dl.utils import get_http_data, is_py2_old from svtplay_dl.log import log from svtplay_dl.fetcher.hls import hlsparse, HLS from svtplay_dl.fetcher.rtmp import RTMP from svtplay_dl.fetcher.hds import hdsparse from svtplay_dl.subtitle import subtitle_smi class Tv4play(Service, OpenGraphThumbMixin): supported_domains = ['tv4play.se', 'tv4.se'] def __init__(self, url): Service.__init__(self, url) self.subtitle = None def get(self, options): parse = urlparse(self.url) if "tv4play.se" in self.url: try: vid = parse_qs(parse[4])["video_id"][0] except KeyError: log.error("Can't find video file") sys.exit(2) else: match = re.search(r"-(\d+)$", self.url) if match: vid = match.group(1) else: match = re.search(r"\"vid\":\"(\d+)\",", self.get_urldata()) if match: vid = match.group(1) else: log.error("Can't find video file") sys.exit(2) url = "http://premium.tv4play.se/api/web/asset/%s/play" % vid data = get_http_data(url) xml = ET.XML(data) ss = xml.find("items") if is_py2_old: sa = list(ss.getiterator("item")) else: sa = list(ss.iter("item")) if xml.find("live").text: if xml.find("live").text != "false": options.live = True if xml.find("drmProtected").text == "true": log.error("DRM protected content.") sys.exit(2) for i in sa: if i.find("mediaFormat").text == "mp4": base = urlparse(i.find("base").text) parse = urlparse(i.find("url").text) if base.scheme == "rtmp": swf = "http://www.tv4play.se/flash/tv4playflashlets.swf" options.other = "-W %s -y %s" % (swf, i.find("url").text) yield RTMP(copy.copy(options), i.find("base").text, i.find("bitrate").text) elif parse.path[len(parse.path)-3:len(parse.path)] == "f4m": query = "" if i.find("url").text[-1] != "?": query = "?" manifest = "%s%shdcore=2.8.0&g=hejsan" % (i.find("url").text, query) streams = hdsparse(copy.copy(options), manifest) for n in list(streams.keys()): yield streams[n] elif i.find("mediaFormat").text == "smi": yield subtitle_smi(i.find("url").text) url = "http://premium.tv4play.se/api/web/asset/%s/play?protocol=hls" % vid data = get_http_data(url) xml = ET.XML(data) ss = xml.find("items") if is_py2_old: sa = list(ss.getiterator("item")) else: sa = list(ss.iter("item")) for i in sa: if i.find("mediaFormat").text == "mp4": parse = urlparse(i.find("url").text) if parse.path.endswith("m3u8"): streams = hlsparse(i.find("url").text) for n in list(streams.keys()): yield HLS(copy.copy(options), streams[n], n) def find_all_episodes(self, options): parse = urlparse(self.url) show = quote_plus(parse.path[parse.path.find("/", 1)+1:]) data = get_http_data("http://webapi.tv4play.se/play/video_assets?type=episode&is_live=false&platform=web&node_nids=%s&per_page=99999" % show) jsondata = json.loads(data) episodes = [] for i in jsondata["results"]: try: days = int(i["availability"]["availability_group_free"]) except ValueError: days = 999 if days > 0: video_id = i["id"] url = "http://www.tv4play.se/program/%s?video_id=%s" % ( show, video_id) episodes.append(url) return sorted(episodes)
Python
0.000001
@@ -142,16 +142,26 @@ port re%0A +import os%0A import x @@ -393,16 +393,28 @@ _py2_old +, filenamify %0Afrom sv @@ -2005,32 +2005,422 @@ sys.exit(2)%0A +%0A if options.output_auto:%0A directory = os.path.dirname(options.output)%0A options.service = %22tv4play%22%0A title = %22%25s-%25s-%25s%22 %25 (options.output, vid, options.service)%0A title = filenamify(title)%0A if len(directory):%0A options.output = %22%25s/%25s%22 %25 (directory, title)%0A else:%0A options.output = title%0A%0A for i in
4f3c3755d9fcbfd9ce0551c19bb893e7ba73db91
Add missing setup.py
numpy/numarray/setup.py
numpy/numarray/setup.py
Python
0.000003
@@ -0,0 +1,524 @@ +%0Afrom os.path import join%0A%0Adef configuration(parent_package='',top_path=None):%0A from numpy.distutils.misc_util import Configuration%0A config = Configuration('numarray',parent_package,top_path)%0A%0A config.add_data_files('include/numarray/*.h')%0A%0A # Configure fftpack_lite%0A config.add_extension('_capi',%0A sources=%5B'_capi.c'%5D%0A )%0A%0A return config%0A%0Aif __name__ == '__main__':%0A from numpy.distutils.core import setup%0A setup(**configuration(top_path='').todict())%0A
735ce7f11b70bf8e916bad5610093b5886c57db6
Add quickseg.py
cyvlfeat/quickshift/quickseg.py
cyvlfeat/quickshift/quickseg.py
Python
0
@@ -0,0 +1,3196 @@ +import numpy as np%0Afrom cyvlfeat.quickshift import quickshift%0Afrom cyvlfeat.quickshift import flatmap%0Afrom cyvlfeat.quickshift import imseg%0A%0A%0Adef quickseg(image, ratio, kernel_size, max_dist):%0A %22%22%22%0A Produce a quickshift segmentation of a greyscale image.%0A%0A Parameters%0A ----------%0A image : %5BH, W%5D or %5BH, W, 1%5D %60float64%60 %60ndarray%60%0A Input image, Greyscale. A single channel, greyscale,%0A %60float64%60 numpy array (ndarray).%0A ratio : %60double%60%0A Trade-off between spatial consistency and color consistency.%0A Small ratio gives more importance to the spatial component.%0A Note that distance calculations happen in unnormalized image%0A coordinates, so RATIO should be adjusted to compensate for%0A larger images.%0A kernel_size : %60double%60%0A The standard deviation of the parzen window density estimator.%0A max_dist : %60double%60%0A The maximum distance between nodes in the quickshift tree. Used%0A to cut links in the tree to form the segmentation.%0A%0A Returns%0A -------%0A i_seg :%0A A color image where each pixel is labeled by the mean color in its%0A region.%0A labels : %5BH, W%5D %60float64%60 %60ndarray%60.%0A Array of the same size of image.%0A A labeled image where the number corresponds to the cluster identity.%0A maps : %5BH, W%5D %60float64%60 %60ndarray%60.%0A Array of the same size of image.%0A %60maps%60 as returned by %60quickshift%60: For each pixel, the pointer to the%0A nearest pixel which increases the estimate of the density.%0A gaps : %5BH, W%5D %60float64%60 %60ndarray%60.%0A Array of the same size of image.%0A %60gaps%60 as returned by %60quickshift%60: For each pixel, the distance to%0A the nearest pixel which increases the estimate of the density.%0A estimate : %5BH, W%5D %60float64%60 %60ndarray%60.%0A Array of the same size of image.%0A %60estimate%60 as returned by %60quickshift%60: The estimate of the density.%0A %22%22%22%0A%0A # validate image%0A if image.dtype != np.float64:%0A raise ValueError('Image array must be of Double precision')%0A # image = np.asarray(image, dtype=np.float64)%0A%0A # Add less than one pixel noise to break ties caused by%0A # constant regions in an arbitrary fashions%0A noise = np.random.random(image.shape) / 2250%0A image += noise%0A%0A # For now we're dealing with Greyscale images only.%0A if image.shape%5B2%5D == 1:%0A imagex = ratio * image%0A%0A # Perform quickshift to obtain the segmentation tree, which is already cut by%0A # maxdist. If a pixel has no nearest neighbor which increases the density, its%0A # parent in the tree is itself, and gaps is inf.%0A%0A (maps, gaps, estimate) = quickshift(image, kernel_size, max_dist)%0A%0A # Follow the parents of the tree until we have reached the root nodes%0A # mapped: a labeled segmentation where the labels are the indices of the modes%0A # in the original image.%0A # labels: mapped after having been renumbered 1: nclusters and reshaped into a%0A # vector%0A%0A (mapped, labels) = flatmap(maps)%0A labels = np.resize(labels, maps.shape)%0A%0A # imseg builds an average description of the region by color%0A%0A i_seg = imseg(image, labels)%0A%0A return i_seg, labels, maps, gaps, estimate%0A