commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
c1b4216e610a46260f52d5ed71267a2ed5fcdd25
update debug url to account for downloads
hs_core/debug_urls.py
hs_core/debug_urls.py
"""Extra URLs that add debugging capabilities to resources.""" from django.conf.urls import url from hs_core import views urlpatterns = [ # Resource Debugging: print consistency problems in a resource url(r'^resource/(?P<shortkey>[0-9a-f-]+)/debug/$', views.debug_resource_view.debug_resource, name='debug_resource'), url(r'^resource/(?P<shortkey>[0-9a-f-]+)/debug/irods-issues/$', views.debug_resource_view.irods_issues, name='debug_resource'), url(r'^taskstatus/(?P<task_id>[A-z0-9\-]+)/$', views.debug_resource_view.check_task_status, name='get_debug_task_status'), ]
Python
0
@@ -203,32 +203,38 @@ urce%0A url(r'%5E +debug/ resource/(?P%3Csho @@ -251,22 +251,16 @@ a-f-%5D+)/ -debug/ $',%0A @@ -348,16 +348,22 @@ url(r'%5E +debug/ resource @@ -388,22 +388,16 @@ a-f-%5D+)/ -debug/ irods-is
2c06c20b8d96274f0dd1da1bcbd33b8b73e5d786
clean up logging handlers.
reservoir/settings.py
reservoir/settings.py
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Django settings for reservoir project. Generated by 'django-admin startproject' using Django 2.0.5. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '5a-%%8)gkf#hq5&!#6(w+6xyra(!fx3lm4y3+jhrb=9)p@qh!o' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = os.environ.get('RESERVOIR_DEBUG', True) ALLOWED_HOSTS = ['*'] RESERVOIR_SITE_PREFIX = os.environ.get('RESERVOIR_SITE_PREFIX', '') # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'mod_wsgi.server', 'reservoir.third_party.3dmr', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'reservoir.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'reservoir.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': os.environ.get('RESERVOIR_DB_NAME', '3dmr'), 'USER': os.environ.get('RESERVOIR_DB_USER', '3dmr'), 'PASSWORD': os.environ.get('RESERVOIR_DB_PASSWORD','3dmr'), 'HOST': os.environ.get('RESERVOIR_DB_HOST','127.0.0.1'), 'PORT': os.environ.get('RESERVOIR_DB_PORT','5432'), } } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = os.environ.get('RESERVOIR_STATIC_URL', '/static/') STATIC_ROOT = os.environ.get('RESERVOIR_STATIC_ROOT', '/var/www/reservoir') STATICFILES_DIRS = [ './reservoir/third_party/3dmr/mainapp/static', ] # Logging DEFAULT_LOG_LEVEL = os.environ.get('RESERVOIR_LOG_LEVEL', 'DEBUG') LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'simple': { 'format': '[H3DMR {levelname} {asctime} {filename}:{lineno}] {message}', 'style': '{', }, }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'simple', }, 'file': { 'class': 'logging.handlers.RotatingFileHandler', 'filename': '/var/log/h3dmr.log', 'maxBytes': 1024*1024*15, # 15MB 'backupCount': 10, 'formatter': 'simple', } }, 'root': { 'handlers': ['console', 'file'], 'level': os.environ.get('H3DMR_LOG_LEVEL', DEFAULT_LOG_LEVEL), } }
Python
0
@@ -4355,76 +4355,8 @@ ing%0A -DEFAULT_LOG_LEVEL = os.environ.get('RESERVOIR_LOG_LEVEL', 'DEBUG')%0A%0A LOGG @@ -4484,20 +4484,24 @@ mat': '%5B -H3DM +RESERVOI R %7Blevel @@ -5069,12 +5069,16 @@ et(' -H3DM +RESERVOI R_LO @@ -5091,25 +5091,15 @@ L', +' DE -FAULT_LOG_LEVEL +BUG' ),%0A
e68307e10e1aebe8a6c527a15bfc34b1158bf0eb
Use labels in API for #
judge/views/api.py
judge/views/api.py
from django.core.exceptions import ObjectDoesNotExist from django.http import JsonResponse, Http404 from judge.models import Contest, Problem, Profile, Submission def sane_time_repr(delta): days = delta.days hours = delta.seconds / 3600 minutes = (delta.seconds % 3600) / 60 return '%02d:%02d:%02d' % (days, hours, minutes) def api_contest_list(request): contests = {} for c in Contest.objects.filter(is_public=True, is_private=False): contests[c.key] = { 'name': c.name, 'start_time': c.start_time.isoformat(), 'end_time': c.end_time.isoformat(), 'time_limit': c.time_limit and sane_time_repr(c.time_limit), 'external': c.is_external, } return JsonResponse(contests) def api_problem_list(request): problems = {} for p in Problem.objects.filter(is_public=True): problems[p.code] = { 'points': p.points, 'partial': p.partial, 'name': p.name, 'group': p.group.full_name } return JsonResponse(problems) def api_problem_info(request, problem): try: p = Problem.objects.get(code=problem) except ObjectDoesNotExist: raise Http404() return JsonResponse({ 'name': p.name, 'authors': list(p.authors.values_list('user__username', flat=True)), 'types': list(p.types.values_list('full_name', flat=True)), 'group': p.group.full_name, 'time_limit': p.time_limit, 'memory_limit': p.memory_limit, 'points': p.points, 'partial': p.partial, 'languages': list(p.allowed_languages.values_list('key', flat=True)), }) def api_user_list(request): users = {} for p in Profile.objects.select_related('user').only('user__username', 'name', 'points', 'display_rank'): users[p.user.username] = { 'display_name': p.name, 'points': p.points, 'rank': p.display_rank } return JsonResponse(users) def api_user_info(request, user): try: p = Profile.objects.get(user__username=user) except ObjectDoesNotExist: raise Http404() return JsonResponse({ 'display_name': p.name, 'points': p.points, 'rank': p.display_rank, 'solved_problems': [], # TODO }) def api_user_submissions(request, user): try: p = Profile.objects.get(user__username=user) except ObjectDoesNotExist: raise Http404() subs = Submission.objects.filter(user=p, problem__is_public=True).select_related('problem', 'language') \ .only('id', 'problem__code', 'time', 'memory', 'points', 'language__key', 'status', 'result') data = {} for s in subs: data[s.id] = { 'problem': s.problem.code, 'time': s.time, 'memory': s.memory, 'points': s.points, 'language': s.language.key, 'status': s.status, 'result': s.result } return JsonResponse(data)
Python
0
@@ -700,16 +700,27 @@ +'labels': %5B 'externa @@ -721,17 +721,20 @@ xternal' -: +%5D if c.is_ex @@ -739,16 +739,24 @@ external + else %5B%5D ,%0A
9a92f1e214a9bf9bfa33a47eae31c3a626c4377c
improve backend id for vcloud
src/mist/io/model.py
src/mist/io/model.py
"""Mist Io Model Here we define the schema of our data structure in an object oriented way. Simple, low level, helper functions can also be added to the following classes. (eg user.get_num_mon_machines(), user.keys.unused()). It is recommended that only pure functions (no side-effects) are used as class methods. How this works: The basic class is the OODict. This defines a dict to object mapper. When we need a new data structure, we define a new subclass of OODict. Class properties that are instances of Field subclasses are considered to be OODict fields. These are the keys in the underlying dict that will be used. There is a large variety of standard type fields. One can create an OODict that has a field which is also parsed by some OODict. To do so, you define a field on the outer OODict that is created by make_field. Finally, list or dict like collections can be created by subclassing FieldsList and FieldsDict. The items of these collections will be parsed according to the field type defined in the class. This collection can be used as a field in some OODict by use of make_field. If it sounds too complicated, just look the code below, it should be pretty self-explanatory. """ import os import logging from Crypto.PublicKey import RSA from hashlib import sha1 from mist.io.dal import StrField, HtmlSafeStrField from mist.io.dal import IntField, FloatField, BoolField from mist.io.dal import ListField, DictField from mist.io.dal import OODict, FieldsDict, FieldsList, make_field try: from mist.core.dal import User as DalUser from mist.core.dal import FieldsDict # escapes dots in keys (for mongo) except ImportError: from mist.io.dal import User as DalUser from mist.io import exceptions try: from mist.core import config except ImportError: from mist.io import config import logging logging.basicConfig(level=config.PY_LOG_LEVEL, format=config.PY_LOG_FORMAT, datefmt=config.PY_LOG_FORMAT_DATE) log = logging.getLogger(__name__) class Machine(OODict): """A saved machine in the machines list of some backend. For the time being, only bare metal machines are saved, for API backends we get the machine list from the provider. """ ## hasMonitoring = BoolField() uuid = StrField() ## monitor_server = make_field(MonitorServer)() dns_name = HtmlSafeStrField() public_ips = ListField() ## collectd_password = StrField() name = HtmlSafeStrField() ssh_port = IntField(22) class Machines(FieldsDict): """Collection of machines of a certain backend. For the time being, only bare metal machines are saved, for API backends we get the machine list from the provider. """ _item_type = make_field(Machine) _key_error = exceptions.MachineNotFoundError class Backend(OODict): """A cloud vm provider backend""" enabled = BoolField() machine_count = IntField() apiurl = StrField() apikey = HtmlSafeStrField() apisecret = StrField() title = HtmlSafeStrField() tenant_name = HtmlSafeStrField() auth_version = HtmlSafeStrField() region = HtmlSafeStrField() poll_interval = IntField(10000) provider = HtmlSafeStrField() ## datacenter = StrField() compute_endpoint = StrField() docker_port = IntField(4243) machines = make_field(Machines)() starred = ListField() unstarred = ListField() def __repr__(self): print_fields = ['title', 'provider', 'region'] return super(Backend, self).__repr__(print_fields) def get_id(self): from mist.io.helpers import b58_encode if self.provider == 'docker': concat = '%s%s%s' % (self.provider, self.title, self.apiurl) elif self.provider == 'bare_metal': name = self.machines.values()[0].name concat = '%s%s%s' % (self.provider, '', name) elif self.provider == 'openstack' or 'hpcloud' in self.provider: concat = "%s%s%s%s%s" % (self.provider, self.region, self.apikey, self.apiurl, self.tenant_name) elif self.provider == 'libvirt': concat = "%s%s" % (self.provider, self.apiurl) else: concat = '%s%s%s' % (self.provider, self.region, self.apikey) return b58_encode(int(sha1(concat).hexdigest(), 16)) class Backends(FieldsDict): _item_type = make_field(Backend) _key_error = exceptions.BackendNotFoundError class Keypair(OODict): """An ssh keypair.""" public = StrField() private = StrField() default = BoolField() machines = ListField() def generate(self): """Generates a new RSA keypair and assignes to self.""" from Crypto import Random Random.atfork() key = RSA.generate(2048) self.private = key.exportKey() self.public = key.exportKey('OpenSSH') def isvalid(self): """Checks if self is a valid RSA keypair.""" from Crypto import Random Random.atfork() message = 'Message 1234567890' if 'ssh-rsa' in self.public: public_key_container = RSA.importKey(self.public) private_key_container = RSA.importKey(self.private) encr_message = public_key_container.encrypt(message, 0) decr_message = private_key_container.decrypt(encr_message) if message == decr_message: return True return False def construct_public_from_private(self): """Constructs pub key from self.private and assignes to self.public. Only works for RSA. """ from Crypto import Random Random.atfork() if 'RSA' in self.private: try: key = RSA.importKey(self.private) public = key.publickey().exportKey('OpenSSH') self.public = public return True except: pass return False def __repr__(self): return super(Keypair, self).__repr__(['default', 'machines']) class Keypairs(FieldsDict): _item_type = make_field(Keypair) _key_error = exceptions.KeypairNotFoundError class User(DalUser): """The basic model class is User. It contains all the methods necessary to find and save users in memcache and in mongo. It transforms the user dict into an object with consistent attributes instead of inconsistent dict with missing keys. """ email = StrField() mist_api_token = StrField() backends = make_field(Backends)() keypairs = make_field(Keypairs)() def __repr__(self): return super(User, self).__repr__(['email'])
Python
0.000001
@@ -4156,32 +4156,187 @@ r, self.apiurl)%0A + elif self.provider in %5B'vcloud', 'indonesian_vcloud'%5D:%0A concat = %22%25s%25s%25s%25s%22 %25 (self.provider, self.apikey, self.apisecret, self.apiurl)%0A else:%0A
600a19b8a3f6d320b00d1d2b25e5c0f341f821d1
bump version
kaggle_cli/main.py
kaggle_cli/main.py
import sys from cliff.app import App from cliff.commandmanager import CommandManager VERSION = '0.6.0' class KaggleCLI(App): def __init__(self): super(KaggleCLI, self).__init__( description='An unofficial Kaggle command line tool.', version=VERSION, command_manager=CommandManager('kaggle_cli'), ) def main(argv=sys.argv[1:]): app = KaggleCLI() return app.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Python
0
@@ -95,17 +95,17 @@ = '0.6. -0 +1 '%0A%0A%0Aclas
5a87dbbb0ea4faa44d743a21a2f7d7aca46242f9
Document internet gateway properties:
heat/engine/resources/internet_gateway.py
heat/engine/resources/internet_gateway.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.engine import clients from heat.common import exception from heat.openstack.common import log as logging from heat.engine import resource logger = logging.getLogger(__name__) class InternetGateway(resource.Resource): tags_schema = {'Key': {'Type': 'String', 'Required': True}, 'Value': {'Type': 'String', 'Required': True}} properties_schema = { 'Tags': {'Type': 'List', 'Schema': { 'Type': 'Map', 'Implemented': False, 'Schema': tags_schema}} } def handle_create(self): self.resource_id_set(self.physical_resource_name()) def handle_delete(self): pass @staticmethod def get_external_network_id(client): ext_filter = {'router:external': True} ext_nets = client.list_networks(**ext_filter)['networks'] if len(ext_nets) != 1: # TODO(sbaker) if there is more than one external network # add a heat configuration variable to set the ID of # the default one raise exception.Error( 'Expected 1 external network, found %d' % len(ext_nets)) external_network_id = ext_nets[0]['id'] return external_network_id class VPCGatewayAttachment(resource.Resource): properties_schema = { 'VpcId': { 'Type': 'String', 'Required': True}, 'InternetGatewayId': {'Type': 'String'}, 'VpnGatewayId': { 'Type': 'String', 'Implemented': False} } def _vpc_route_tables(self): for resource in self.stack.resources.itervalues(): if (resource.has_interface('AWS::EC2::RouteTable') and resource.properties.get('VpcId') == self.properties.get('VpcId')): yield resource def add_dependencies(self, deps): super(VPCGatewayAttachment, self).add_dependencies(deps) # Depend on any route table in this template with the same # VpcId as this VpcId. # All route tables must exist before gateway attachment # as attachment happens to routers (not VPCs) for route_table in self._vpc_route_tables(): deps += (self, route_table) def handle_create(self): client = self.neutron() external_network_id = InternetGateway.get_external_network_id(client) for router in self._vpc_route_tables(): client.add_gateway_router(router.resource_id, { 'network_id': external_network_id}) def handle_delete(self): from neutronclient.common.exceptions import NeutronClientException client = self.neutron() for router in self._vpc_route_tables(): try: client.remove_gateway_router(router.resource_id) except NeutronClientException as ex: if ex.status_code != 404: raise ex def resource_mapping(): if clients.neutronclient is None: return {} return { 'AWS::EC2::InternetGateway': InternetGateway, 'AWS::EC2::VPCGatewayAttachment': VPCGatewayAttachment, }
Python
0
@@ -2042,32 +2042,102 @@ 'Required': True +,%0A 'Description': _('VPC ID for this gateway association.') %7D,%0A 'Inte @@ -2149,24 +2149,37 @@ atewayId': %7B +%0A 'Type': 'Str @@ -2182,16 +2182,76 @@ 'String' +,%0A 'Description': _('ID of the InternetGateway.') %7D,%0A @@ -2333,16 +2333,92 @@ ': False +,%0A 'Description': _('ID of the VPNGateway to attach to the VPC.') %7D%0A %7D%0A
77503c0e09c0a520ffdc3b4f936c579148acd915
Clean up the code a bit
piwik_tracking/piwiktracker.py
piwik_tracking/piwiktracker.py
import datetime import httplib import random import urllib import urlparse class PiwikTracker: VERSION = 1 def __init__(self, id_site, api_url, request, token_auth): random.seed() self.id_site = id_site self.api_url = api_url self.request = request self.token_auth = token_auth self.page_url = self.get_current_url() self.set_request_parameters() self.set_local_time(self.get_timestamp()) def set_request_parameters(self): # django-specific self.user_agent = self.request.META.get('HTTP_USER_AGENT', '') self.referer = self.request.META.get('HTTP_REFERER', '') self.ip = self.request.META.get('REMOTE_ADDR') self.accept_language = self.request.META.get('HTTP_ACCEPT_LANGUAGE', '') def set_local_time(self, datetime): self.local_hour = datetime.hour self.local_minute = datetime.minute self.local_second = datetime.second def get_current_scheme(self): # django-specific if self.request.is_secure(): scheme = 'https' else: scheme = 'http' return scheme def get_current_host(self): # django-specific return self.request.get_host() def get_current_script_name(self): # django-specific return self.request.path_info def get_current_query_string(self): # django-specific return self.request.META.get('QUERY_STRING', '') def get_current_url(self): url = self.get_current_scheme() + '://' url += self.get_current_host() url += self.get_current_script_name() url += self.get_current_query_string() return url def get_timestamp(self): return datetime.datetime.now() def get_query_vars(self, document_title=False): url = "?idsite=%d&rec=1&apiv=%s&r=%s&url=%s&urlref=%s&cip=%s&token_auth=%s" % ( self.id_site, self.VERSION, random.randint(0, 99999), urllib.quote_plus(self.page_url), urllib.quote_plus(self.referer), # Forcing IP requires the auth token self.ip, self.token_auth, ) if document_title: url += '&action_name=%s' % urllib.quote_plus(document_title) return url def send_request(self, query_vars): "Send the request to piwik" headers = { 'Accept-Language': self.accept_language, 'User-Agent': self.user_agent, } parsed = urlparse.urlparse(self.api_url) connection = httplib.HTTPConnection(parsed.hostname) url = parsed.path + query_vars connection.request('GET', url, '', headers) response = connection.getresponse() return response.read() def do_track_page_view(self, document_title): query_vars = self.get_query_vars(document_title) return self.send_request(query_vars); def piwik_get_url_track_page_view(id_site, api_url, request, token_auth, document_title=False): tracker = PiwikTracker(id_site, api_url, request, token_auth) return tracker.do_track_page_view(document_title)
Python
0
@@ -790,16 +790,28 @@ NGUAGE', +%0A '')%0A%0A @@ -1537,17 +1537,16 @@ url - = self.g @@ -1858,87 +1858,22 @@ -url = %22?idsite=%25d&rec=1&apiv=%25s&r=%25s&url=%25s&urlref=%25s&cip=%25s&token_auth=%25s%22 %25 ( +parameters = %7B %0A @@ -1876,24 +1876,34 @@ %0A + 'idsite': self.id_sit @@ -1912,24 +1912,54 @@ %0A + +'rec': 1,%0A 'apiv': self.VERSIO @@ -1968,24 +1968,29 @@ %0A + 'r': random.rand @@ -2016,34 +2016,23 @@ +' url -lib.quote_plus( +': self.pag @@ -2036,17 +2036,16 @@ page_url -) ,%0A @@ -2050,34 +2050,26 @@ +' url -lib.quote_plus( +ref': self.ref @@ -2072,17 +2072,16 @@ .referer -) ,%0A @@ -2130,24 +2130,31 @@ %0A + 'cip': self.ip,%0A @@ -2158,24 +2158,38 @@ %0A + 'token_auth': self.token_ @@ -2202,17 +2202,17 @@ -) +%7D %0A @@ -2247,17 +2247,20 @@ -url += '& +parameters%5B' acti @@ -2270,14 +2270,12 @@ name -=%25s' %25 +'%5D = url @@ -2315,32 +2315,57 @@ return url +lib.urlencode(parameters) %0A%0A def send_r @@ -2676,16 +2676,27 @@ url = +%22%25s?%25s%22 %25 ( parsed.p @@ -2698,18 +2698,17 @@ sed.path - + +, query_v @@ -2710,16 +2710,17 @@ ery_vars +) %0A @@ -2995,9 +2995,9 @@ ars) -; +%0A %0A%0Ade @@ -3066,16 +3066,24 @@ en_auth, +%0A documen
b1e2275b47e70949c018ab276279c9e6b8f6d3cf
Add debug (#10828)
homeassistant/components/sensor/serial.py
homeassistant/components/sensor/serial.py
""" Support for reading data from a serial port. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.serial/ """ import asyncio import logging import json import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_NAME, CONF_VALUE_TEMPLATE, EVENT_HOMEASSISTANT_STOP) from homeassistant.helpers.entity import Entity REQUIREMENTS = ['pyserial-asyncio==0.4'] _LOGGER = logging.getLogger(__name__) CONF_SERIAL_PORT = 'serial_port' CONF_BAUDRATE = 'baudrate' DEFAULT_NAME = "Serial Sensor" DEFAULT_BAUDRATE = 9600 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_SERIAL_PORT): cv.string, vol.Optional(CONF_BAUDRATE, default=DEFAULT_BAUDRATE): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, }) @asyncio.coroutine def async_setup_platform(hass, config, async_add_devices, discovery_info=None): """Set up the Serial sensor platform.""" name = config.get(CONF_NAME) port = config.get(CONF_SERIAL_PORT) baudrate = config.get(CONF_BAUDRATE) value_template = config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass sensor = SerialSensor(name, port, baudrate, value_template) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_STOP, sensor.stop_serial_read()) async_add_devices([sensor], True) class SerialSensor(Entity): """Representation of a Serial sensor.""" def __init__(self, name, port, baudrate, value_template): """Initialize the Serial sensor.""" self._name = name self._state = None self._port = port self._baudrate = baudrate self._serial_loop_task = None self._template = value_template self._attributes = [] @asyncio.coroutine def async_added_to_hass(self): """Handle when an entity is about to be added to Home Assistant.""" self._serial_loop_task = self.hass.loop.create_task( self.serial_read(self._port, self._baudrate)) @asyncio.coroutine def serial_read(self, device, rate, **kwargs): """Read the data from the port.""" import serial_asyncio reader, _ = yield from serial_asyncio.open_serial_connection( url=device, baudrate=rate, **kwargs) while True: line = yield from reader.readline() line = line.decode('utf-8').strip() try: data = json.loads(line) if isinstance(data, dict): self._attributes = data except ValueError: pass if self._template is not None: line = self._template.async_render_with_possible_json_value( line) self._state = line self.async_schedule_update_ha_state() @asyncio.coroutine def stop_serial_read(self): """Close resources.""" if self._serial_loop_task: self._serial_loop_task.cancel() @property def name(self): """Return the name of the sensor.""" return self._name @property def should_poll(self): """No polling needed.""" return False @property def device_state_attributes(self): """Return the attributes of the entity (if any JSON present).""" return self._attributes @property def state(self): """Return the state of the sensor.""" return self._state
Python
0.000001
@@ -2952,24 +2952,72 @@ line)%0A%0A + _LOGGER.debug(%22Received: %25s%22, line)%0A
ad6b055b53d621addc3565209c7af095b6d6d0e7
Add .delete() and the start of Room
hypchat/jsonobject.py
hypchat/jsonobject.py
from __future__ import absolute_import, division import json from . import requests class Linker(object): """ Responsible for on-demand loading of JSON objects. """ def __init__(self, url, parent=None, _requests=None): self.url = url self.__parent = parent self._requests = _requests or __import__('requests') def __call__(self): def _object_hook(obj): if 'links' in obj: rv = JsonObject(obj) rv._requests = self._requests return rv else: return obj rv = json.JSONDecoder(object_hook=_object_hook).decode(self._requests.get(self.url).text) rv._requests = self._requests if self.__parent is not None: rv.parent = self.__parent return rv def __repr__(self): return "<%s url=%r>" % (type(self).__name__, self.url) class JsonObject(dict): """ Nice wrapper around the JSON objects and their links. """ def __getattr__(self, name): if name in self.get('links', {}): return Linker(self['links'][name], parent=self, _requests=self._requests) elif name in self: return self[name] else: raise AttributeError("%r object has no attribute %r" % (type(self).__name__, name)) def save(self): return requests.put(self['links']['self']).json()
Python
0.000001
@@ -54,16 +54,26 @@ rt json%0A +import re%0A from . i @@ -88,16 +88,39 @@ quests%0A%0A +_urls_to_objects = %7B%7D%0A%0A class Li @@ -1230,8 +1230,590 @@ .json()%0A +%0A%09def delete(self):%0A%09%09return requests.delete(self%5B'links'%5D%5B'self'%5D).json()%0A%0Aclass Room(JsonObject):%0A%09def message(self, *p, **kw):%0A%09%09%22%22%22%0A%09%09Redirects to notification (for now)%0A%09%09%22%22%22%0A%09%09return self.notification(*p, **kw)%0A%0A%09def notification(self, message, color='yellow', notify=False, format='html'):%0A%09%09raise NotImplementedError%0A%0A%09def topic(self, text):%0A%09%09raise NotImplementedError%0A%0A%09def history(self, date='recent'):%0A%09%09raise NotImplementedError%0A%0A%09def invite(self, user, reason):%0A%09%09raise NotImplementedError%0A%0A_urls_to_objects%5Bre.compile(r'https://api.hipchat.com/v2/room/%5B%5E/%5D+')%5D = Room
52dfb3b81b0d7fa27db9acf8033f28f31f8290ad
scale large icons to avoid out of memory
hypergan/tk_viewer.py
hypergan/tk_viewer.py
""" Opens a window that displays an image. Usage: from viewer import GlobalViewer GlobalViewer.update(image) """ import numpy as np import os import contextlib class TkViewer: def __init__(self, title="HyperGAN", viewer_size=1, enabled=True): self.screen = None self.title = title self.viewer_size = viewer_size self.enabled = enabled self.enable_menu = True def update(self, gan, image): if not self.enabled: return original_image = image if len(np.shape(image)) == 2: s = np.shape(image) image = np.reshape(image, [s[0], s[1], 1]) image = np.tile(image, [1,1,3]) image = np.transpose(image, [1, 0,2]) if not self.screen: with contextlib.redirect_stdout(None): import pygame import tkinter as tk import tkinter.ttk class ResizableFrame(tk.Frame): def __init__(self,parent,tkviewer=None,**kwargs): tk.Frame.__init__(self,parent,**kwargs) self.bind("<Configure>", self.on_resize) self.height = kwargs['height'] self.width = kwargs['width'] self.tkviewer = tkviewer self.aspect_ratio = float(self.width)/float(self.height) def on_resize(self,event): wscale = float(event.width)/self.width hscale = float(event.height)/self.height self.width = event.width self.height = event.height self.config(width=self.width, height=self.height) self.tkviewer.size = [self.width, self.height] self.tkviewer.screen = self.tkviewer.pg.display.set_mode(self.tkviewer.size,self.tkviewer.pg.RESIZABLE) self.enforce_aspect_ratio(event) def enforce_aspect_ratio(self, event): desired_width = event.width desired_height = int(event.width / self.aspect_ratio) if desired_height > event.height: desired_height = event.height desired_width = int(event.height * self.aspect_ratio) self.config(width=desired_width, height=desired_height) self.tkviewer.size = [desired_width, desired_height] self.tkviewer.screen = self.tkviewer.pg.display.set_mode(self.tkviewer.size,self.tkviewer.pg.RESIZABLE) self.size = [int(image.shape[0] * self.viewer_size), int(image.shape[1] * self.viewer_size)] self.pg = pygame self.tk = tk root = tk.Tk(className=self.title) embed = ResizableFrame(root, width=self.size[0], height=self.size[1], tkviewer=self) root.rowconfigure(0,weight=1) root.rowconfigure(1,weight=1) root.columnconfigure(0,weight=1) root.columnconfigure(1,weight=1) embed.pack(expand=tk.YES, fill=tk.BOTH) def _save_model(*args): gan.save(gan.save_file) def _exit(*args): gan.exit() def _create_status_bar(root): statusbar = tk.Frame(root, height=24) statusbar.pack(side=tk.BOTTOM, fill=tk.X) label_training = tk.Label(statusbar, text="Training", font=12) label_training.grid(row=0,column=0) sep = tkinter.ttk.Separator(statusbar, orient=tk.VERTICAL).grid(column=1, row=0, sticky='ns') label = tk.Label(statusbar, text="Starting", font=12) label.grid(row=0, column=2) def __update_step(): if hasattr(gan, 'step_count'): label['text']=("Step " + str(gan.step_count)) root.after(1000, __update_step) __update_step() return statusbar menubar = tk.Menu(root) filemenu = tk.Menu(menubar, tearoff=0) filemenu.add_command(label="Save", command=_save_model, underline=0, accelerator="Ctrl+S") filemenu.add_separator() filemenu.add_command(label="Save and Exit", command=_exit, underline=10, accelerator="Ctrl+Q") menubar.add_cascade(label="File", menu=filemenu, underline=0) root.bind_all("<Control-q>", _exit) root.bind_all("<Control-s>", _save_model) if self.enable_menu: root.config(menu=menubar) _create_status_bar(root) # Tell pygame's SDL window which window ID to use os.environ['SDL_WINDOWID'] = str(embed.winfo_id()) # Show the window so it's assigned an ID. root.update() self.root = root # Usual pygame initialization if self.viewer_size <= 0: self.viewer_size = 0.1 self.aspect_w = image.shape[1] / image.shape[0] self.aspect_h = image.shape[0] / image.shape[1] self.temp_size = self.size self.screen = self.pg.display.set_mode(self.size,self.pg.RESIZABLE) self.pg.display.set_caption(self.title) root.title(self.title) root.wm_title(self.title) embed.winfo_toplevel().title(self.title) padw = 0 padh = 0 if original_image.shape[0] > original_image.shape[1]: padh = (original_image.shape[0] - original_image.shape[1])//2 if original_image.shape[1] > original_image.shape[0]: padw = (original_image.shape[1] - original_image.shape[0])//2 pad_image = np.pad(original_image, [(padw, padw), (padh,padh), (0,0)], 'constant') w = pad_image.shape[0] h = pad_image.shape[1] xdata = b'P6 ' + str(w).encode() + b' ' + str(h).encode() + b' 255 ' + pad_image.tobytes() tk_image = self.tk.PhotoImage(data=xdata, format="PPM", width=w, height=h) self.root.tk.call('wm', 'iconphoto', self.root._w, tk_image.subsample(max(1, w//256), max(1, h//256))) surface = self.pg.Surface([image.shape[0],image.shape[1]]) self.pg.surfarray.blit_array(surface, image) self.screen.blit(self.pg.transform.scale(surface,self.size),(0,0)) self.pg.display.flip() def tick(self): """ Called repeatedly regardless of gan state. """ if hasattr(self, 'root'): self.root.update()
Python
0
@@ -1928,16 +1928,345 @@ (event)%0A + surface = self.tkviewer.pg.Surface(%5Bimage.shape%5B0%5D,image.shape%5B1%5D%5D)%0A self.tkviewer.pg.surfarray.blit_array(surface, image)%0A self.tkviewer.screen.blit(self.tkviewer.pg.transform.scale(surface,self.tkviewer.size),(0,0))%0A self.tkviewer.pg.display.flip()%0A%0A %0A
1ae34b1a9035ec8813c40477a6f83bfdf10413f3
Add chkdown in the list of server metrics
haproxystats/metrics.py
haproxystats/metrics.py
"""Provide constants for grouping metric names. There are seperated groups for frontend, backend, servers and haproxy daemon. Metric names are the field names contained in the HAProxy statistics. """ from collections import namedtuple DAEMON_METRICS = [ 'CompressBpsIn', 'CompressBpsOut', 'CompressBpsRateLim', 'ConnRate', 'ConnRateLimit', 'CumConns', 'CumReq', 'CumSslConns', 'CurrConns', 'CurrSslConns', 'Hard_maxconn', 'MaxConnRate', 'MaxSessRate', 'MaxSslConns', 'MaxSslRate', 'MaxZlibMemUsage', 'Maxconn', 'Maxpipes', 'Maxsock', 'Memmax_MB', 'PipesFree', 'PipesUsed', 'Run_queue', 'SessRate', 'SessRateLimit', 'SslBackendKeyRate', 'SslBackendMaxKeyRate', 'SslCacheLookups', 'SslCacheMisses', 'SslFrontendKeyRate', 'SslFrontendMaxKeyRate', 'SslFrontendSessionReuse_pct', 'SslRate', 'SslRateLimit', 'Tasks', 'Ulimit-n', 'ZlibMemUsage', ] DAEMON_AVG_METRICS = ['Idle_pct', 'Uptime_sec'] COMMON = [ 'bin', 'bout', 'dresp', 'hrsp_1xx', 'hrsp_2xx', 'hrsp_3xx', 'hrsp_4xx', 'hrsp_5xx', 'hrsp_other', 'rate', 'rate_max', 'scur', 'slim', 'smax', 'stot' ] SERVER_METRICS = [ 'chkfail', 'cli_abrt', 'econ', 'eresp', 'lbtot', 'qcur', 'qmax', 'srv_abrt', 'wredis', 'wretr' ] + COMMON SERVER_AVG_METRICS = ['qtime', 'rtime', 'throttle', 'ttime', 'weight'] BACKEND_METRICS = [ 'chkdown', 'cli_abrt', 'comp_byp', 'comp_in', 'comp_out', 'comp_rsp', 'downtime', 'dreq', 'econ', 'eresp', 'lbtot', 'qcur', 'qmax', 'srv_abrt', 'wredis', 'wretr', ] + COMMON BACKEND_AVG_METRICS = [ 'act', 'bck', 'rtime', 'ctime', 'qtime', 'ttime', 'weight' ] FRONTEND_METRICS = [ 'comp_byp', 'comp_in', 'comp_out', 'comp_rsp', 'dreq', 'ereq', 'rate_lim', 'req_rate', 'req_rate_max', 'req_tot' ] + COMMON MetricNamesPercentage = namedtuple('MetricsNamesPercentage', ['name', 'limit', 'title'])
Python
0
@@ -1291,16 +1291,31 @@ kfail',%0A + 'chkdown',%0A 'cli
fe9226898772c4ff909f9c3f0cb05c271333b73a
Make auth_url lookup dynamic
heat/common/auth_url.py
heat/common/auth_url.py
# # Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from webob import exc from heat.common import endpoint_utils from heat.common.i18n import _ from heat.common import wsgi class AuthUrlFilter(wsgi.Middleware): def __init__(self, app, conf): super(AuthUrlFilter, self).__init__(app) self.conf = conf self.auth_url = self._get_auth_url() def _get_auth_url(self): if 'auth_uri' in self.conf: return self.conf['auth_uri'] else: return endpoint_utils.get_auth_uri(v3=False) def _validate_auth_url(self, auth_url): """Validate auth_url to ensure it can be used.""" if not auth_url: raise exc.HTTPBadRequest(_('Request missing required header ' 'X-Auth-Url')) allowed = cfg.CONF.auth_password.allowed_auth_uris if auth_url not in allowed: raise exc.HTTPUnauthorized(_('Header X-Auth-Url "%s" not ' 'an allowed endpoint') % auth_url) return True def process_request(self, req): auth_url = self.auth_url if cfg.CONF.auth_password.multi_cloud: auth_url = req.headers.get('X-Auth-Url') self._validate_auth_url(auth_url) req.headers['X-Auth-Url'] = auth_url return None def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) def auth_url_filter(app): return AuthUrlFilter(app, conf) return auth_url_filter
Python
0.000009
@@ -890,24 +890,129 @@ self. +_auth_url = None%0A%0A @property%0A def auth_url(self):%0A if not self._auth_url:%0A self._ auth_url = s @@ -1030,16 +1030,46 @@ th_url() +%0A return self._auth_url %0A%0A de
a79db7cf85dac6d74d7929137f640a0ac10ddf7d
return from sys.exit for easier testing
p7doi/__init__.py
p7doi/__init__.py
# -*- coding: UTF-8 -*- from __future__ import print_function import webbrowser import sys __version__ = '0.0.1' DOI_URL = 'http://rproxy.sc.univ-paris-diderot.fr/login' + \ '?url=http://dx.doi.org/%s' def make_doi_url(doi): """ Return an URL for the given DOI """ return DOI_URL % doi def open_url(url): """ Open an URL in the default browser, in a new tab if possible """ webbrowser.open_new_tab(url) def open_doi(doi): """ Open the URL for the given DOI in the default browser """ open_url(make_doi_url(doi)) def cli(): """ CLI endpoint """ if len(sys.argv) < 2: print('Usage: %s <doi>' % sys.argv[0]) sys.exit(1) doi = sys.argv[1] if doi.startswith('-'): if doi in ['-v', '-version', '--version']: print('p7doi v%s' % __version__) else: print("Unrecognized option: '%s'" % doi) sys.exit(1) sys.exit(0) open_doi(doi)
Python
0.000001
@@ -685,32 +685,39 @@ rgv%5B0%5D)%0A +return sys.exit(1)%0A%0A @@ -931,32 +931,39 @@ oi)%0A +return sys.exit(1)%0A @@ -966,16 +966,23 @@ +return sys.exit
7ed627991632cf761dfccb553f830a6e9e3c37e9
fix bitonality test for solid color images
kraken/lib/util.py
kraken/lib/util.py
""" Ocropus's magic PIL-numpy array conversion routines. They express slightly different behavior from PIL.Image.toarray(). """ import unicodedata import numpy as np from PIL import Image __all__ = ['pil2array', 'array2pil'] def pil2array(im: Image, alpha: int = 0) -> np.array: if im.mode == '1': return np.array(im.convert('L')) return np.array(im) def array2pil(a: np.array) -> Image: if a.dtype == np.dtype("B"): if a.ndim == 2: return Image.frombytes("L", (a.shape[1], a.shape[0]), a.tostring()) elif a.ndim == 3: return Image.frombytes("RGB", (a.shape[1], a.shape[0]), a.tostring()) else: raise Exception("bad image rank") elif a.dtype == np.dtype('float32'): return Image.frombytes("F", (a.shape[1], a.shape[0]), a.tostring()) else: raise Exception("unknown image type") def is_bitonal(im: Image) -> bool: """ Tests a PIL.Image for bitonality. Args: im (PIL.Image): Image to test Returns: True if the image contains only two different color values. False otherwise. """ return im.getcolors(2) is not None def get_im_str(im: Image) -> str: return im.filename if hasattr(im, 'filename') else str(im) def is_printable(char: str) -> bool: """ Determines if a chode point is printable/visible when printed. Args: char (str): Input code point. Returns: True if printable, False otherwise. """ letters = ('LC', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu') numbers = ('Nd', 'Nl', 'No') punctuation = ('Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps') symbol = ('Sc', 'Sk', 'Sm', 'So') printable = letters + numbers + punctuation + symbol return unicodedata.category(char) in printable def make_printable(char: str) -> str: """ Takes a Unicode code point and return a printable representation of it. Args: char (str): Input code point Returns: Either the original code point, the name of the code point if it is a combining mark, whitespace etc., or the hex code if it is a control symbol. """ if not char or is_printable(char): return char elif unicodedata.category(char) in ('Cc', 'Cs', 'Co'): return '0x{:x}'.format(ord(char)) else: return unicodedata.name(char)
Python
0.000001
@@ -1236,16 +1236,46 @@ not None + and len(im.getcolors(2)) == 2 %0A%0A%0Adef g
4d5889e87399e940ebb08c7513f24466c0a93eaf
Remove useless space.
plugins/ChannelStats/config.py
plugins/ChannelStats/config.py
### # Copyright (c) 2005, Jeremiah Fincher # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### import re import supybot.conf as conf import supybot.registry as registry from supybot.i18n import PluginInternationalization, internationalizeDocstring _ = PluginInternationalization('ChannelStats') def configure(advanced): # This will be called by supybot to configure this module. advanced is # a bool that specifies whether the user identified himself as an advanced # user or not. You should effect your configuration by manipulating the # registry as appropriate. from supybot.questions import expect, anything, something, yn conf.registerPlugin('ChannelStats', True) class Smileys(registry.Value): def set(self, s): L = s.split() self.setValue(L) def setValue(self, v): self.s = ' '.join(v) self.value = re.compile('|'.join(map(re.escape, v))) def __str__(self): return self.s ChannelStats = conf.registerPlugin('ChannelStats') conf.registerChannelValue(ChannelStats, 'selfStats', registry.Boolean(True, _("""Determines whether the bot will keep channel statistics on itself, possibly skewing the channel stats (especially in cases where the bot is relaying between channels on a network)."""))) conf.registerChannelValue(ChannelStats, 'smileys', Smileys(':) ;) ;] :-) :-D :D :P :p (= =)'.split(), _("""Determines what words (i.e., pieces of text with no spaces in them) are considered 'smileys' for the purposes of stats-keeping."""))) conf.registerChannelValue(ChannelStats, 'frowns', Smileys(':| :-/ :-\\ :\\ :/ :( :-( :\'('.split(), _("""Determines what words (i.e., pieces of text with no spaces in them ) are considered 'frowns' for the purposes of stats-keeping."""))) # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
Python
0
@@ -3209,17 +3209,16 @@ in them - ) are co
8d7bde02f376acfdb4a06335c541133a31f76abe
Fix unslicable ("striped") model rendering and add overhang color theming
plugins/SolidView/SolidView.py
plugins/SolidView/SolidView.py
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from UM.View.View import View from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Scene.Selection import Selection from UM.Resources import Resources from UM.Application import Application from UM.Preferences import Preferences from UM.View.Renderer import Renderer from UM.Settings.Validator import ValidatorState from UM.View.GL.OpenGL import OpenGL import cura.Settings from cura.Settings.ExtruderManager import ExtruderManager import math ## Standard view for mesh models. class SolidView(View): def __init__(self): super().__init__() Preferences.getInstance().addPreference("view/show_overhang", True) self._enabled_shader = None self._disabled_shader = None self._extruders_model = cura.Settings.ExtrudersModel() def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._enabled_shader: self._enabled_shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "overhang.shader")) if not self._disabled_shader: self._disabled_shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "striped.shader")) theme = Application.getInstance().getTheme() self._disabled_shader.setUniformValue("u_diffuseColor1", theme.getColor("model_unslicable").getRgbF()) self._disabled_shader.setUniformValue("u_diffuseColor2", theme.getColor("model_unslicable_alt").getRgbF()) self._disabled_shader.setUniformValue("u_width", 50.0) multi_extrusion = False global_container_stack = Application.getInstance().getGlobalContainerStack() if global_container_stack: multi_extrusion = global_container_stack.getProperty("machine_extruder_count", "value") > 1 if multi_extrusion: support_extruder_nr = global_container_stack.getProperty("support_extruder_nr", "value") support_angle_stack = ExtruderManager.getInstance().getExtruderStack(support_extruder_nr) if not support_angle_stack: support_angle_stack = global_container_stack else: support_angle_stack = global_container_stack if Preferences.getInstance().getValue("view/show_overhang"): angle = support_angle_stack.getProperty("support_angle", "value") # Make sure the overhang angle is valid before passing it to the shader # Note: if the overhang angle is set to its default value, it does not need to get validated (validationState = None) if angle is not None and global_container_stack.getProperty("support_angle", "validationState") in [None, ValidatorState.Valid]: self._enabled_shader.setUniformValue("u_overhangAngle", math.cos(math.radians(90 - angle))) else: self._enabled_shader.setUniformValue("u_overhangAngle", math.cos(math.radians(0))) #Overhang angle of 0 causes no area at all to be marked as overhang. else: self._enabled_shader.setUniformValue("u_overhangAngle", math.cos(math.radians(0))) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): uniforms = {} shade_factor = 1.0 if not multi_extrusion: if global_container_stack: material = global_container_stack.findContainer({ "type": "material" }) material_color = material.getMetaDataEntry("color_code", default = self._extruders_model.defaultColors[0]) if material else self._extruders_model.defaultColors[0] else: material_color = self._extruders_model.defaultColors[0] else: # Get color to render this mesh in from ExtrudersModel extruder_index = 0 extruder_id = node.callDecoration("getActiveExtruder") if extruder_id: extruder_index = max(0, self._extruders_model.find("id", extruder_id)) try: material_color = self._extruders_model.getItem(extruder_index)["color"] except KeyError: material_color = self._extruders_model.defaultColors[0] if extruder_index != ExtruderManager.getInstance().activeExtruderIndex: # Shade objects that are printed with the non-active extruder 25% darker shade_factor = 0.6 try: # Colors are passed as rgb hex strings (eg "#ffffff"), and the shader needs # an rgba list of floats (eg [1.0, 1.0, 1.0, 1.0]) uniforms["diffuse_color"] = [ shade_factor * int(material_color[1:3], 16) / 255, shade_factor * int(material_color[3:5], 16) / 255, shade_factor * int(material_color[5:7], 16) / 255, 1.0 ] except ValueError: pass if hasattr(node, "_outside_buildarea"): if node._outside_buildarea: renderer.queueNode(node, shader = self._disabled_shader) else: renderer.queueNode(node, shader = self._enabled_shader, uniforms = uniforms) else: renderer.queueNode(node, material = self._enabled_shader, uniforms = uniforms) if node.callDecoration("isGroup") and Selection.isSelected(node): renderer.queueNode(scene.getRoot(), mesh = node.getBoundingBoxMesh(), mode = Renderer.RenderLines) def endRendering(self): pass #def _onPreferenceChanged(self, preference): #if preference == "view/show_overhang": ## Todo: This a printer only setting. Should be removed from Uranium. #self._enabled_material = None
Python
0
@@ -429,16 +429,47 @@ orState%0A +from UM.Math.Color import Color %0Afrom UM @@ -1202,16 +1202,192 @@ hader%22)) +%0A theme = Application.getInstance().getTheme()%0A self._enabled_shader.setUniformValue(%22u_overhangColor%22, Color(*theme.getColor(%22model_overhang%22).getRgb())) %0A%0A @@ -1677,16 +1677,23 @@ olor1%22, +Color(* theme.ge @@ -1717,34 +1717,34 @@ licable%22).getRgb -F ( +) ))%0A s @@ -1799,16 +1799,23 @@ olor2%22, +Color(* theme.ge @@ -1855,10 +1855,10 @@ tRgb -F ( +) ))%0A @@ -4829,17 +4829,16 @@ eyError: - %0A
eaac4e45928b7008e6c561e28e9b5ed5dc427587
fix redis storage
labDNS/storages.py
labDNS/storages.py
try: import redis except ImportError: redis = None class BaseStorage: DEFAULT_CONFIG = dict() def __init__(self, config): self.config = self.DEFAULT_CONFIG self._configure(config) def get(self, key): raise NotImplementedError def _configure(self, config): self.config.update(config) class DictStorage(BaseStorage): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.dictionary = self.config def get(self, key, default=None): return self.dictionary.get(key, default) class RedisStorage(BaseStorage): DEFAULT_SETTINGS = dict(host='localhost', port=6379, db=0) def __init__(self, config): self.redis = redis.StrictRedis(**self.config) def get(self, key, default=None): return self.redis.get(key, default)
Python
0
@@ -698,32 +698,83 @@ nit__(self, -config): +*args, **kwargs):%0A super().__init__(*args, **kwargs) %0A sel @@ -880,27 +880,46 @@ lf.redis.get(key -, +).decode(%22utf-8%22) or default )%0A @@ -908,18 +908,17 @@ f-8%22) or default -) %0A
36cd155bb317872a43a0e455a012a06cd5b6ffff
store callbacks in OrderedSets
hotdoc/utils/signals.py
hotdoc/utils/signals.py
# -*- coding: utf-8 -*- # A signal/slot implementation # # Author: Thiago Marcos P. Santos # Author: Christopher S. Case # Author: David H. Bronke # Author: Mathieu Duponchelle # Author: Thibault Saunier # Created: August 28, 2008 # Updated: January 29, 2016 # License: MIT # http://code.activestate.com/recipes/577980-improved-signalsslots-implementation-in-python/ """ Simple signalling system """ import unittest import inspect class Slot: """Banana banana""" # pylint: disable=too-few-public-methods def __init__(self, func, *extra_args): self.extra_args = extra_args if inspect.ismethod(func): self.obj = func.__self__ self.func = func.__func__ else: self.obj = None self.func = func def __hash__(self): return hash((self.func, self.extra_args)) def __eq__(self, other): return (self.func, self.extra_args, self.obj) == ( other.func, other.extra_args, other.obj) def __ne__(self, other): return not self == other def __call__(self, *args, **kwargs): _args = [] if self.obj: _args.append(self.obj) _args += list(args) + list(self.extra_args) return self.func(*_args, **kwargs) class Signal(object): """ The Signalling class """ def __init__(self, optimized=False): self._functions = set() self._after_functions = set() self._optimized = optimized def __call__(self, *args, **kargs): res_list = [] # Call handler functions for func in self._functions: res = func(*args, **kargs) if res and self._optimized: return res res_list.append(res) for func in self._after_functions: res = func(*args, **kargs) if res and self._optimized: return res res_list.append(res) if self._optimized: return None return res_list def connect(self, slot, *extra_args): """ @slot: The method to be called on signal emission Connects to @slot """ slot = Slot(slot, *extra_args) self._functions.add(slot) def connect_after(self, slot, *extra_args): """ @slot: The method to be called at last stage of signal emission Connects to the signal after the signals has been handled by other connect callbacks. """ slot = Slot(slot, *extra_args) self._after_functions.add(slot) def disconnect(self, slot, *extra_args): """ Disconnect @slot from the signal """ slot = Slot(slot, *extra_args) if slot in self._functions: self._functions.remove(slot) elif slot in self._after_functions: self._after_functions.remove(slot) def clear(self): """ Cleanup the signal """ self._functions.clear() self._after_functions.clear() class TestSignals(unittest.TestCase): """Banana Banana""" def test_connect_func(self): """Banana Banana""" called = [] def func(arg, extra_arg): """Banana Banana""" self.assertEqual(arg, 1) self.assertEqual(extra_arg, "extra") called.append(True) signal = Signal() signal.connect(func, "extra") signal(1) self.assertEqual(called, [True]) def test_connect_method(self): """Banana Banana""" called = [] # pylint: disable=too-few-public-methods class _Test(unittest.TestCase): """Banana Banana""" def method(self, arg, extra_arg): """Banana Banana""" self.assertEqual(arg, 1) self.assertEqual(extra_arg, "extra") called.append(True) signal = Signal() test = _Test() signal.connect(test.method, "extra") signal(1) self.assertEqual(called, [True])
Python
0
@@ -433,16 +433,58 @@ nspect%0A%0A +from hotdoc.utils.utils import OrderedSet%0A %0Aclass S @@ -1437,33 +1437,40 @@ lf._functions = -s +OrderedS et()%0A sel @@ -1490,17 +1490,24 @@ tions = -s +OrderedS et()%0A
e4ac571eb44954254b15857d88bde145e66d297b
Remove typo
labellines/core.py
labellines/core.py
from math import atan2, degrees import warnings import numpy as np import matplotlib.pyplot as plt from matplotlib.dates import date2num, DateConverter, num2date from matplotlib.container import ErrorbarContainer from datetime import datetime # Label line with line2D label data def labelLine(line, x, label=None, align=True, drop_label=False, **kwargs): '''Label a single matplotlib line at position x Parameters ---------- line : matplotlib.lines.Line The line holding the label x : number The location in data unit of the label label : string, optional The label to set. This is inferred from the line by default drop_label : bool, optional If True, the label is consumed by the function so that subsequent calls to e.g. legend do not use it anymore. kwargs : dict, optional Optional arguments passed to ax.text ''' ax = line.axes xdata = line.get_xdata() ydata = line.get_ydata() mask = np.isfinite(ydata) if mask.sum() == 0: raise Exception('The line %s only contains nan!' % line) # Find first segment of xdata containing x if len(xdata) == 2: i = 0 xa = min(xdata) xb = max(xdata) else: for i, (xa, xb) in enumerate(zip(xdata[:-1], xdata[1:])): if min(xa, xb) <= x <= max(xa, xb): break else: raise Exception('x label location is outside data range!') def x_to_float(x): """Make sure datetime values are properly converted to floats.""" v return date2num(x) if isinstance(x, datetime) else x xfa = x_to_float(xa) xfb = x_to_float(xb) ya = ydata[i] yb = ydata[i + 1] y = ya + (yb - ya) * (x_to_float(x) - xfa) / (xfb - xfa) if not (np.isfinite(ya) and np.isfinite(yb)): warnings.warn(("%s could not be annotated due to `nans` values. " "Consider using another location via the `x` argument.") % line, UserWarning) return if not label: label = line.get_label() if drop_label: line.set_label(None) if align: # Compute the slope and label rotation screen_dx, screen_dy = ax.transData.transform((xfa, ya)) - ax.transData.transform((xfb, yb)) rotation = (degrees(atan2(screen_dy, screen_dx)) + 90) % 180 - 90 else: rotation = 0 # Set a bunch of keyword arguments if 'color' not in kwargs: kwargs['color'] = line.get_color() if ('horizontalalignment' not in kwargs) and ('ha' not in kwargs): kwargs['ha'] = 'center' if ('verticalalignment' not in kwargs) and ('va' not in kwargs): kwargs['va'] = 'center' if 'backgroundcolor' not in kwargs: kwargs['backgroundcolor'] = ax.get_facecolor() if 'clip_on' not in kwargs: kwargs['clip_on'] = True if 'zorder' not in kwargs: kwargs['zorder'] = 2.5 ax.text(x, y, label, rotation=rotation, **kwargs) def labelLines(lines, align=True, xvals=None, drop_label=False, shrink_factor=0.05, **kwargs): '''Label all lines with their respective legends. Parameters ---------- lines : list of matplotlib lines The lines to label align : boolean, optional If True, the label will be aligned with the slope of the line at the location of the label. If False, they will be horizontal. xvals : (xfirst, xlast) or array of float, optional The location of the labels. If a tuple, the labels will be evenly spaced between xfirst and xlast (in the axis units). drop_label : bool, optional If True, the label is consumed by the function so that subsequent calls to e.g. legend do not use it anymore. shrink_factor : double, optional Relative distance from the edges to place closest labels. Defaults to 0.05. kwargs : dict, optional Optional arguments passed to ax.text ''' ax = lines[0].axes labLines, labels = [], [] handles, allLabels = ax.get_legend_handles_labels() all_lines = [] for h in handles: if isinstance(h, ErrorbarContainer): all_lines.append(h.lines[0]) else: all_lines.append(h) # Take only the lines which have labels other than the default ones for line in lines: if line in all_lines: label = allLabels[all_lines.index(line)] labLines.append(line) labels.append(label) if xvals is None: xvals = ax.get_xlim() # set axis limits as annotation limits, xvals now a tuple xvals_rng = xvals[1] - xvals[0] shrinkage = xvals_rng * shrink_factor xvals = (xvals[0] + shrinkage, xvals[1] - shrinkage) if type(xvals) == tuple: xmin, xmax = xvals xscale = ax.get_xscale() if xscale == "log": xvals = np.logspace(np.log10(xmin), np.log10(xmax), len(labLines)+2)[1:-1] else: xvals = np.linspace(xmin, xmax, len(labLines)+2)[1:-1] if isinstance(ax.xaxis.converter, DateConverter): # Convert float values back to datetime in case of datetime axis xvals = [num2date(x).replace(tzinfo=ax.xaxis.get_units()) for x in xvals] for line, x, label in zip(labLines, xvals, labels): labelLine(line, x, label, align, drop_label, **kwargs)
Python
0.999292
@@ -1554,17 +1554,16 @@ ats.%22%22%22%0A -v
13ffa4113341c13e635896f94a29df5cff5c0348
Build objects in JSON generator tool
test/generate-json.py
test/generate-json.py
#!/usr/bin/env python import argparse import random def random_array_element(): return random.choice(['123', 'true', 'false', 'null', '3.1415', '"foo"']) def main(): parser = argparse.ArgumentParser(description="Generate a large JSON document.") parser.add_argument('--array-size', nargs=1, type=int, default=[100000]) parser.add_argument('--array-type', choices=['int', 'array', 'object'], default='object') parser.add_argument('--array-elements', nargs=1, type=int, default=[3]) args = parser.parse_args() n = args.array_size[0] type = args.array_type print('{"x": [') if type == 'int': elem_format = "%d%s" need_i = True elif type == 'object': elem_format = '{"a": %d}%s' need_i = True elif type == 'array': nelems = args.array_elements[0] arr = [] if nelems > 0: arr.append('%s') if nelems > 1: arr.extend([random_array_element() for _ in range(nelems-1)]) elem_format = '[%s]%%s' % ", ".join(arr) need_i = nelems > 0 else: raise Exception("Unknown array type %s" % type) for i in range(n): semicolon = "," if i < n-1 else "" if need_i: print(elem_format % (i, semicolon)) else: print(elem_format % semicolon) print(']}') if __name__ == "__main__": main()
Python
0.000002
@@ -505,36 +505,333 @@ -args = parser.parse_args()%0A%0A +parser.add_argument('--object-size', nargs=1, type=int, default=None)%0A args = parser.parse_args()%0A%0A if args.object_size:%0A print('%7B')%0A for i in range(args.object_size%5B0%5D - 1):%0A print(' %22x%25d%22: %25s,' %25 (i, random_array_element()))%0A print(' %22no%22: %22comma%22')%0A print('%7D')%0A else:%0A @@ -849,24 +849,28 @@ ray_size%5B0%5D%0A + type = a @@ -884,16 +884,20 @@ ay_type%0A + prin @@ -913,16 +913,20 @@ %5B')%0A + + if type @@ -931,24 +931,28 @@ e == 'int':%0A + elem @@ -968,32 +968,36 @@ %22%25d%25s%22%0A + + need_i = True%0A @@ -990,32 +990,36 @@ ed_i = True%0A + + elif type == 'ob @@ -1021,24 +1021,28 @@ = 'object':%0A + elem @@ -1065,32 +1065,36 @@ %25d%7D%25s'%0A + + need_i = True%0A @@ -1087,24 +1087,28 @@ ed_i = True%0A + elif typ @@ -1129,16 +1129,20 @@ + nelems = @@ -1173,16 +1173,20 @@ + + arr = %5B%5D @@ -1186,16 +1186,20 @@ rr = %5B%5D%0A + @@ -1221,24 +1221,28 @@ + arr.append(' @@ -1246,32 +1246,36 @@ d('%25s')%0A + + if nelems %3E 1:%0A @@ -1273,16 +1273,20 @@ ms %3E 1:%0A + @@ -1351,32 +1351,36 @@ ms-1)%5D)%0A + elem_format = '%5B @@ -1404,32 +1404,36 @@ in(arr)%0A + + need_i = nelems @@ -1436,16 +1436,20 @@ ems %3E 0%0A + else @@ -1442,32 +1442,36 @@ 0%0A else:%0A + raise Ex @@ -1510,24 +1510,28 @@ %25 type)%0A + + for i in ran @@ -1537,16 +1537,20 @@ nge(n):%0A + @@ -1588,24 +1588,28 @@ %22%22%0A + if need_i:%0A @@ -1607,16 +1607,20 @@ need_i:%0A + @@ -1659,38 +1659,46 @@ colon))%0A + else:%0A + prin @@ -1724,16 +1724,20 @@ icolon)%0A + prin
8ec6662a903aeea9613a53016d4f684a833605e5
clean up impors
porkchop/commandline.py
porkchop/commandline.py
import logging from optparse import OptionParser from server import GetHandler import sys from porkchop.plugin import PorkchopPluginHandler from porkchop.server import GetHandler, ThreadedHTTPServer def coerce_number(s): try: return int(s) except: return float(s) def get_logger(level = logging.INFO): logger = logging.getLogger('porkchop') logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(level) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) return logger def main(): plugin_dir = '/usr/share/porkchop/plugins' listen_address = '' listen_port = 5000 parser = OptionParser() parser.add_option('-d', dest='plugindir', default=plugin_dir, help='Load plugins from DIR (default: %s)' % plugin_dir, metavar='DIR') parser.add_option('-s', dest='listen_address', default=listen_address, help='Bind to ADDRESS', metavar='ADDRESS') parser.add_option('-p', type="int", dest='listen_port', default=listen_port, help='Bind to PORT (default: %d)' % listen_port, metavar='PORT') parser.add_option('-v', dest='verbose', default=False, help='Verbose logging', action='store_true') (options, args) = parser.parse_args() if options.verbose: logger = get_logger(logging.DEBUG) else: logger = get_logger() PorkchopPluginHandler(options.plugindir) server = ThreadedHTTPServer((options.listen_address, options.listen_port), GetHandler) server.serve_forever() def collector(): import requests import socket import sys import time carbon_host = 'localhost' carbon_port = 2003 porkchop_url = 'http://localhost:5000/' interval = 10 prefix = 'porkchop.%s' % socket.gethostname().split('.')[0].replace('.','_') parser = OptionParser() parser.add_option('--carbon-host', dest='carbon_host', default=carbon_host, help='Connect to carbon on HOST (default: %s)' % carbon_host, metavar='HOST') parser.add_option('--carbon-port', type='int', dest='carbon_port', default=carbon_port, help='Connect to carbon on PORT (default: %d)' % carbon_port, metavar='PORT') parser.add_option('--porkchop-url', dest='porkchop_url', default=porkchop_url, help='Connect to porkchop on URL (default: %s)' % porkchop_url, metavar='URL') parser.add_option('-i', type='int', dest='interval', default=interval, help='Fetch data at INTERVAL (default: %d)' % interval, metavar='INTERVAL') parser.add_option('-n', dest='noop', default=False, help='Don\'t actually send to graphite', action='store_true') parser.add_option('-P', dest='prefix', default=prefix, help='Graphite prefix (default: %s)' % prefix) parser.add_option('-v', dest='verbose', default=False, help='Verbose logging', action='store_true') (options, args) = parser.parse_args() if options.verbose: logger = get_logger(logging.DEBUG) else: logger = get_logger() if not options.noop: carbon = Carbon(options.carbon_host, options.carbon_port, logger) while True: try: logger.debug('Fetching porkchop data from %s', options.porkchop_url) r = requests.get(options.porkchop_url) r.raise_for_status() lines = [] now = int(time.time()) for line in r.content.strip('\n').splitlines(): (key, val) = line.lstrip('/').split(' ', 1) key = '.'.join([options.prefix, key.replace('/', '.')]) try: if not options.noop: carbon.data.append((key, coerce_number(val), now)) except: pass if not options.noop: carbon.send() except: logger.error('Got bad response code from porkchop: %s', sys.exc_info()[1]) logger.debug('Sleeping for %d', options.interval) time.sleep(options.interval) class Carbon(object): def __init__(self, host, port, logger): self.data = [] self.host = host self.port = port self.logger = logger try: self.sock = self._connect() except socket.error: self.logger.fatal('Unable to connect to carbon.') def _connect(self): import socket self.logger.info('Connecting to carbon on %s:%d', self.host, self.port) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((self.host, self.port)) return sock def send(self): self.logger.info('Sending to carbon.') try: for met in self.data: self.logger.debug(met) self.sock.sendall('\n'.join(['%s %s %s' % met for met in self.data])) except socket.socket.error: self.logger.error('Error send to carbon, trying to reconnect.') self.sock = self._connect()
Python
0.000001
@@ -45,49 +45,8 @@ rser -%0Afrom server import GetHandler%0Aimport sys %0A%0Afr @@ -96,67 +96,8 @@ dler -%0Afrom porkchop.server import GetHandler, ThreadedHTTPServer %0A%0Ade @@ -507,16 +507,109 @@ main():%0A + from server import GetHandler%0A from porkchop.server import GetHandler, ThreadedHTTPServer%0A plugin
02e4d3664b4436724ad11f045bbfb631014eeadb
Handle unload errors
hubbot/modulehandler.py
hubbot/modulehandler.py
import operator import sys from twisted.internet import threads from hubbot.response import IRCResponse, ResponseType from hubbot.moduleinterface import ModuleAccessLevel class ModuleHandler(object): def __init__(self, bot): """ @type bot: hubbot.bot.Hubbot """ self.bot = bot self.modules = {} self.moduleCaseMap = {} self.mappedTriggers = {} self.modulesToLoad = bot.bothandler.config.serverItemWithDefault(self.bot.server, "modulesToLoad", ["all"]) def sendResponse(self, response): """ @type response: hubbot.response.IRCResponse || list """ responses = [] if hasattr(response, "__iter__"): for r in response: if r is None or r.Response is None or r.Response == "": continue responses.append(r) elif response is not None and response.Response is not None and response.Response != "": responses.append(response) for response in responses: try: if response.Type == ResponseType.Say: self.bot.msg(response.Target.encode("utf-8"), response.Response.encode("utf-8")) self.bot.logger.info(u'{} | <{}> {}'.format(response.Target, self.bot.nickname, response.Response)) elif response.Type == ResponseType.Do: self.bot.describe(response.Target.encode("utf-8"), response.Response.encode("utf-8")) self.bot.logger.info(u'{} | *{} {}*'.format(response.Target, self.bot.nickname, response.Response)) elif response.Type == ResponseType.Notice: self.bot.notice(response.Target.encode("utf-8"), response.Response.encode("utf-8")) self.bot.logger.info(u'{} | [{}] {}'.format(response.Target, self.bot.nickname, response.Response)) elif response.Type == ResponseType.Raw: self.bot.logger.info(u"Sent raw \"{}\"".format(response.Response)) self.bot.sendLine(response.Response.encode("utf-8")) except Exception: self.bot.logger.exception("Python Execution Error sending responses \"{}\"".format(responses)) def handleMessage(self, message): """ @type message: hubbot.message.IRCMessage """ for module in sorted(self.modules.values(), key=operator.attrgetter("priority")): try: if module.shouldTrigger(message): if module.accessLevel == ModuleAccessLevel.ADMINS and len(self.bot.admins) != 0 and message.User.Name not in self.bot.admins: self.bot.logger.info("User {} tried to use {} but was denied access.".format(message.User.Name, message.Command)) self.sendResponse(IRCResponse(ResponseType.Say, "Only my admins can use {}!".format(message.Command), message.ReplyTo)) elif len(self.bot.ignores) == 0 or message.User.Name not in self.bot.ignores: if not module.runInThread: response = module.onTrigger(message) self.sendResponse(response) else: d = threads.deferToThread(module.onTrigger, message) d.addCallback(self.sendResponse) except Exception: self.bot.logger.exception("Python Execution Error in \"{}\"".format(module.__class__.__name__)) def enableModule(self, moduleName): moduleName = moduleName.lower() moduleList = self.getModuleDirList() moduleListCaseMap = {key.lower(): key for key in moduleList} if moduleName not in moduleListCaseMap: self.bot.logger.warning("Module \"{}\" was requested to enable but it is not imported!".format(moduleName)) return False if moduleName in self.moduleCaseMap: self.bot.logger.warning("Module \"{}\" was requested to enable but it is already enabled!".format(moduleName)) return False module = sys.modules["{}.{}".format("hubbot.Modules", moduleListCaseMap[moduleName])] class_ = getattr(module, moduleListCaseMap[moduleName]) constructedModule = class_(self.bot) self.modules.update({moduleListCaseMap[moduleName]: constructedModule}) self.moduleCaseMap.update({moduleName: moduleListCaseMap[moduleName]}) constructedModule.onEnable() # map module triggers for trigger in constructedModule.triggers: self.mappedTriggers[trigger] = constructedModule self.bot.logger.info('-- {} enabled.'.format(self.moduleCaseMap[moduleName])) return True def disableModule(self, moduleName, check=True): if moduleName.lower() in self.moduleCaseMap.keys(): properName = self.moduleCaseMap[moduleName.lower()] # unmap module triggers for trigger in self.modules[properName].triggers: del self.mappedTriggers[trigger] self.modules[properName].onDisable() del self.modules[self.moduleCaseMap[moduleName.lower()]] del self.moduleCaseMap[moduleName.lower()] self.bot.logger.info("-- {} disabled.".format(properName)) if check: self.bot.bothandler.checkModuleUsage(properName) else: self.bot.logger.warning("Module \"{}\" was requested to disable but it is not enabled!".format(moduleName)) return False return True def enableAllModules(self): modulesToLoad = [] for moduleName in self.modulesToLoad: if moduleName.lower() == "all": for module in self.getModuleDirList(): modulesToLoad.append(module) elif moduleName[0] != "-": modulesToLoad.append(moduleName) else: modulesToLoad.remove(moduleName[1:]) for module in modulesToLoad: try: self.enableModule(module) except Exception: self.bot.logger.exception("Exception when enabling \"{}\"".format(str(module))) def getModuleDirList(self): return self.bot.bothandler.modules.keys()
Python
0
@@ -4955,17 +4955,37 @@ ower()%5D%0A -%0A + try:%0A @@ -5004,32 +5004,36 @@ module triggers%0A + for @@ -5086,32 +5086,36 @@ + del self.mappedT @@ -5131,17 +5131,20 @@ rigger%5D%0A -%0A + @@ -5180,25 +5180,168 @@ onDisable()%0A -%0A + except:%0A self.bot.logger.exception(%22Exception when disabling module %7B%7D%22.format(moduleName))%0A finally:%0A @@ -5397,16 +5397,20 @@ wer()%5D%5D%0A + @@ -5460,32 +5460,36 @@ ()%5D%0A + self.bot.logger. @@ -5543,16 +5543,20 @@ + if check @@ -5557,16 +5557,20 @@ check:%0A +
c48b0ae4331d1d039cb6bc29ef25fc7c4a5df8da
Bump version to 0.2.7
approvaltests/version.py
approvaltests/version.py
version_number = "0.2.6"
Python
0.000001
@@ -19,7 +19,7 @@ 0.2. -6 +7 %22%0A
903d9b000c4d7b333b5d3000aeb38b7e4d818c27
add "Partly Cloudy" to color_icons
i3pystatus/weather.py
i3pystatus/weather.py
from i3pystatus import IntervalModule import pywapi from i3pystatus.core.util import internet, require class Weather(IntervalModule): """ This module gets the weather from weather.com using pywapi module First, you need to get the code for the location from the www.weather.com Available formatters: * {current_temp} * {humidity} Requires pywapi from PyPI. """ interval = 20 settings = ( "location_code", ("colorize", "Enable color with temperature and UTF-8 icons."), ("units", "Celsius (metric) or Fahrenheit (imperial)"), "format", ) required = ("location_code",) units = "metric" format = "{current_temp}" colorize = None color_icons = {'Fair': (u'\u2600', '#FFCC00'), 'Cloudy': (u'\u2601', '#F8F8FF'), 'Rainy': (u'\u2614', '#CBD2C0'), 'Sunny': (u'\u263C', '#FFFF00'), 'Snow': (u'\u2603', '#FFFFFF'), 'default': ('', None), } @require(internet) def run(self): result = pywapi.get_weather_from_weather_com(self.location_code, self.units) conditions = result['current_conditions'] temperature = conditions['temperature'] humidity = conditions['humidity'] units = result['units'] color = None current_temp = '{t}°{d} '.format(t=temperature, d=units['temperature']) if self.colorize: icon, color = self.color_icons.get(conditions['text'], self.color_icons['default']) current_temp = '{t}°{d} {i}'.format(t=temperature, d=units['temperature'], i=icon) color = color self.output = { "full_text": self.format.format(current_temp=current_temp, humidity=humidity), "color": color }
Python
0.999821
@@ -819,24 +819,114 @@ '#F8F8FF'),%0A + 'Partly Cloudy': (u'%5Cu2601', '#F8F8FF'), # %5Cu26c5 is not in many fonts%0A
f4a39adc6513f41dc33c4ecf597f4a80dd846dd9
rename LdapConnection to DatabaseWrapper and accept configuration as a dict
ldapdb/__init__.py
ldapdb/__init__.py
# -*- coding: utf-8 -*- # # django-ldapdb # Copyright (c) 2009-2010, Bolloré telecom # All rights reserved. # # See AUTHORS file for a full list of contributors. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of Bolloré telecom nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import ldap from django.conf import settings from django.db.backends import BaseDatabaseFeatures, BaseDatabaseOperations def escape_ldap_filter(value): value = unicode(value) return value.replace('\\', '\\5c') \ .replace('*', '\\2a') \ .replace('(', '\\28') \ .replace(')', '\\29') \ .replace('\0', '\\00') class DatabaseCursor(object): def __init__(self, ldap_connection): self.connection = ldap_connection class DatabaseFeatures(BaseDatabaseFeatures): def __init__(self, connection): self.connection = connection class DatabaseOperations(BaseDatabaseOperations): def quote_name(self, name): return name class LdapConnection(object): def __init__(self): self.connection = None self.charset = "utf-8" self.features = DatabaseFeatures(self) self.ops = DatabaseOperations() def _cursor(self): if self.connection is None: self.connection = ldap.initialize(settings.LDAPDB_SERVER_URI) self.connection.simple_bind_s( settings.LDAPDB_BIND_DN, settings.LDAPDB_BIND_PASSWORD) return DatabaseCursor(self.connection) def add_s(self, dn, modlist): cursor = self._cursor() return cursor.connection.add_s(dn.encode(self.charset), modlist) def delete_s(self, dn): cursor = self._cursor() return cursor.connection.delete_s(dn.encode(self.charset)) def modify_s(self, dn, modlist): cursor = self._cursor() return cursor.connection.modify_s(dn.encode(self.charset), modlist) def rename_s(self, dn, newrdn): cursor = self._cursor() return cursor.connection.rename_s(dn.encode(self.charset), newrdn.encode(self.charset)) def search_s(self, base, scope, filterstr, attrlist): cursor = self._cursor() results = cursor.connection.search_s(base, scope, filterstr.encode(self.charset), attrlist) output = [] for dn, attrs in results: output.append((dn.decode(self.charset), attrs)) return output # FIXME: is this the right place to initialize the LDAP connection? connection = LdapConnection()
Python
0.000002
@@ -2411,30 +2411,31 @@ %0A%0Aclass -LdapConnection +DatabaseWrapper (object) @@ -2453,26 +2453,101 @@ _init__(self -): +, settings_dict=%7B%7D, alias='ldap'):%0A self.settings_dict = settings_dict %0A sel @@ -2685,16 +2685,51 @@ ions()%0A%0A + def close(self):%0A pass%0A%0A def @@ -2825,24 +2825,29 @@ tialize( +self. settings .LDAPDB_ @@ -2842,26 +2842,21 @@ ings -.LDAPDB_SERVER_URI +_dict%5B'NAME'%5D )%0A @@ -2908,32 +2908,37 @@ +self. settings .LDAPDB_BIND @@ -2929,23 +2929,21 @@ ings -.LDAPDB_BIND_DN +_dict%5B'USER'%5D ,%0A @@ -2956,24 +2956,29 @@ +self. settings .LDAPDB_ @@ -2973,29 +2973,23 @@ ings -.LDAPDB_BIND_ +_dict%5B' PASSWORD )%0A @@ -2984,16 +2984,18 @@ PASSWORD +'%5D )%0A @@ -4027,22 +4027,148 @@ n = -LdapConnection( +DatabaseWrapper(%7B%0A 'NAME': settings.LDAPDB_SERVER_URI,%0A 'USER': settings.LDAPDB_BIND_DN,%0A 'PASSWORD': settings.LDAPDB_BIND_PASSWORD%7D )%0A%0A
9729d98b470c62bb0e9f63e086d576b773b68047
Disable some web content related tasks depending on the config file.
ichnaea/data/tasks.py
ichnaea/data/tasks.py
""" Contains all celery tasks. The task function names and this module's import path is used in generating automatic statsd timer metrics to track the runtime of each task. """ from datetime import timedelta from celery.schedules import crontab from ichnaea.async.app import celery_app from ichnaea.async.task import BaseTask from ichnaea.data import area from ichnaea.data.datamap import DataMapUpdater from ichnaea.data import export from ichnaea.data import monitor from ichnaea.data import ocid from ichnaea.data import station from ichnaea.data import stats from ichnaea import models def _cell_export_enabled(app_config): return ('assets' in app_config.sections() and bool(app_config.get('assets', 'bucket', False))) def _ocid_import_enabled(app_config): return 'import:ocid' in app_config.sections() @celery_app.task(base=BaseTask, bind=True, queue='celery_ocid', expires=2700, _schedule=crontab(minute=3), _enabled=_cell_export_enabled) def cell_export_diff(self, _bucket=None): ocid.CellExport(self)(hourly=True, _bucket=_bucket) @celery_app.task(base=BaseTask, bind=True, queue='celery_ocid', expires=39600, _schedule=crontab(hour=0, minute=13), _enabled=_cell_export_enabled) def cell_export_full(self, _bucket=None): ocid.CellExport(self)(hourly=False, _bucket=_bucket) @celery_app.task(base=BaseTask, bind=True, queue='celery_ocid', expires=2700, _schedule=crontab(minute=52), _enabled=_ocid_import_enabled) def cell_import_external(self): ocid.ImportExternal(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_monitor', expires=570, _schedule=timedelta(seconds=600)) def monitor_api_key_limits(self): monitor.ApiKeyLimits(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_monitor', expires=570, _schedule=timedelta(seconds=600)) def monitor_api_users(self): monitor.ApiUsers(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_monitor', expires=570, _schedule=timedelta(seconds=600), _enabled=_ocid_import_enabled) def monitor_ocid_import(self): monitor.OcidImport(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_monitor', expires=57, _schedule=timedelta(seconds=60)) def monitor_queue_size(self): monitor.QueueSize(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_reports', _countdown=2, expires=20, _schedule=timedelta(seconds=32)) def update_incoming(self): export.IncomingQueue(self)(export_reports) @celery_app.task(base=BaseTask, bind=True, queue='celery_export', _countdown=1, expires=300) def export_reports(self, name, queue_key): export.ReportExporter.export(self, name, queue_key) @celery_app.task(base=BaseTask, bind=True, queue='celery_blue', _countdown=5, expires=30, _schedule=timedelta(seconds=48), _shard_model=models.BlueShard) def update_blue(self, shard_id=None): station.BlueUpdater(self, shard_id=shard_id)() @celery_app.task(base=BaseTask, bind=True, queue='celery_cell', _countdown=5, expires=30, _schedule=timedelta(seconds=41), _shard_model=models.CellShard) def update_cell(self, shard_id=None): station.CellUpdater(self, shard_id=shard_id)() @celery_app.task(base=BaseTask, bind=True, queue='celery_wifi', _countdown=5, expires=30, _schedule=timedelta(seconds=40), _shard_model=models.WifiShard) def update_wifi(self, shard_id=None): station.WifiUpdater(self, shard_id=shard_id)() @celery_app.task(base=BaseTask, bind=True, queue='celery_cell', _countdown=5, expires=30, _schedule=timedelta(seconds=44)) def update_cellarea(self): area.CellAreaUpdater(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_ocid', _countdown=5, expires=30, _schedule=timedelta(seconds=45)) def update_cellarea_ocid(self): area.CellAreaOCIDUpdater(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_content', _countdown=2, expires=30, _schedule=timedelta(seconds=47), _shard_model=models.DataMap) def update_datamap(self, shard_id=None): DataMapUpdater(self, shard_id=shard_id)() @celery_app.task(base=BaseTask, bind=True, queue='celery_content', expires=18000, _schedule=timedelta(seconds=21600)) def update_statregion(self): stats.StatRegion(self)() @celery_app.task(base=BaseTask, bind=True, queue='celery_content', expires=2700, _schedule=crontab(minute=2)) def update_statcounter(self, ago=None): # BBB: ago argument stats.StatCounterUpdater(self)()
Python
0
@@ -826,24 +826,235 @@ ections()%0A%0A%0A +def _web_content_enabled(app_config):%0A if ('web' in app_config.sections() and%0A app_config.get('web', 'enabled', True) in ('false', '0')):%0A return False # pragma: no cover%0A return True%0A%0A%0A @celery_app. @@ -4267,18 +4267,66 @@ onds=45) +,%0A _enabled=_ocid_import_enabled )%0A - def upda @@ -4575,16 +4575,64 @@ .DataMap +,%0A _enabled=_web_content_enabled )%0Adef up @@ -4843,24 +4843,72 @@ conds=21600) +,%0A _enabled=_web_content_enabled )%0Adef update @@ -5016,32 +5016,32 @@ elery_content',%0A - @@ -5082,16 +5082,64 @@ inute=2) +,%0A _enabled=_web_content_enabled )%0Adef up
faebe4928b4bef33efd6183f97f1ff1396a701ee
fix missing urls.
blackgate/cli.py
blackgate/cli.py
# -*- coding: utf-8 -*- import click from blackgate.core import component from blackgate.config import parse_yaml_config from blackgate.config import read_yaml_config from blackgate.config import read_default_config from blackgate.server import run @click.group() @click.option('-c', '--config', default='') @click.pass_context def main(ctx, config): if not config: config = read_default_config() else: config = read_yaml_config(config) if not config: ctx.fail('config not found.') try: config = parse_yaml_config(config) except ValueError: ctx.fail('config is not valid yaml.') ctx.obj['config'] = config @main.command() @click.pass_context def start(ctx): config = ctx.obj['config'] component.configurations = config component.install() run(config.get('port', 9654)) if __name__ == '__main__': main()
Python
0.00013
@@ -635,24 +635,41 @@ id yaml.')%0A%0A + ctx.obj = %7B%7D%0A ctx.obj%5B
66b95e2e0b89470993c52998eeb179d9e4926713
test public list
project/test_project.py
project/test_project.py
#from django.contrib.auth.models import User from .models import * from django.test import TestCase from django.db import transaction import reversion #from reversion.models import Version TEST_USER_NAME_CREATOR = 'test project creator' TEST_USER_NAME_NOT_MEMBER = 'user is not a member' TEST_PROJECT_PUBLIC_NAME = 'test project name public' TEST_PROJECT_PRIVATE_NAME = 'test project name private' def get_public_project(): return Project.objects.get(fullname=TEST_PROJECT_PUBLIC_NAME) def get_private_project(): return Project.objects.get(fullname=TEST_PROJECT_PRIVATE_NAME) def get_creator_user(): return User.objects.get( username = TEST_USER_NAME_CREATOR ) def get_user_not_member(): return User.objects.get( username = TEST_USER_NAME_NOT_MEMBER ) class Project_Test(TestCase): def setUp(self): user_creator = User.objects.create_user( username = TEST_USER_NAME_CREATOR, password = '-' ) user_creator.save() user_not_member = User.objects.create_user( username = TEST_USER_NAME_NOT_MEMBER, password = '-' ) user_not_member.save() test_project_public = Project(fullname=TEST_PROJECT_PUBLIC_NAME) test_project_public.set_change_user(user_creator) test_project_public.save() test_project_private = Project(fullname=TEST_PROJECT_PRIVATE_NAME) test_project_private.set_change_user(user_creator) test_project_private.private_flag = True test_project_private.save() def test_have_repo_false(self): test_project = get_public_project() self.assertEqual( test_project.have_repo(), False ) def test_creator_is_member(self): test_project = get_public_project() user_creator = get_creator_user() self.assertEqual( test_project.is_member(user_creator), True ) def test_creator_is_member_False(self): test_project = get_public_project() user_not_member = get_user_not_member() self.assertEqual( test_project.is_member(user_not_member), False ) def test_creator_is_member_None(self): test_project = get_public_project() self.assertEqual( test_project.is_member(None), False ) def test_creator_is_admin(self): test_project = get_public_project() user_creator = get_creator_user() self.assertEqual( test_project.is_admin(user_creator), True ) def test_creator_can_admin(self): test_project = get_public_project() user_creator = get_creator_user() self.assertEqual( test_project.can_admin(user_creator), True ) def test_creator_acl_admin(self): test_project = get_public_project() user_creator = get_creator_user() self.assertEqual( test_project.user_access_level(user_creator), PROJECT_ACCESS_ADMIN ) def test_none_user_acl_admin_public(self): test_project = get_public_project() self.assertEqual( test_project.user_access_level( None ), PROJECT_ACCESS_VIEW ) def test_none_user_acl_admin_private(self): test_project = get_private_project() self.assertEqual( test_project.user_access_level( None ), PROJECT_ACCESS_NONE )
Python
0.000001
@@ -3148,8 +3148,198 @@ _NONE )%0A + %0A def test_public_project_list(self):%0A pl = GetAllPublicProjectList()%0A self.assertEqual( get_public_project() in pl, True )%0A self.assertEqual( pl.count(), 1 )
1408f2a7e782c0ca059b04cab2526cef558312b6
add comment to explain not extending BaseGoAccountCommand
go/base/management/commands/go_generate_export_conversations_urls.py
go/base/management/commands/go_generate_export_conversations_urls.py
""" Dump URLs that can be used by cURL for downloading conversation data """ from optparse import make_option from django.core.management.base import CommandError from django.utils.text import slugify from go.base.command_utils import BaseGoCommand class Command(BaseGoCommand): help = "Dump URLs for use with cURL for downloading message data." DEFAULT_TEMPLATE = ( 'curl -o {file_name}-{created_at}-{status}-{direction}.json ' '{base_url}{key}/{direction}.json?concurrency=100\n') option_list = BaseGoCommand.option_list + ( make_option( '--email', dest='email', default=None, help='The user to generate export URLs for.'), make_option( '--base-url', dest='base_url', default=None, help='http://export-host:export-port/message_store_exporter/'), make_option( '--template', dest='template', default=DEFAULT_TEMPLATE, help='The template for generating the cURL.') ) def handle(self, *args, **kwargs): self.email = kwargs['email'] if self.email is None: raise CommandError('--email is mandatory.') self.base_url = kwargs['base_url'] if self.base_url is None: raise CommandError('--base-url is mandatory.') self.template = kwargs['template'] self.user, self.user_api = self.mk_user_api(self.email) conversation_store = self.user_api.conversation_store conversation_keys = conversation_store.list_conversations() for conversation_key in conversation_keys: conversation = self.user_api.get_wrapped_conversation( conversation_key) for direction in ['inbound', 'outbound']: self.stdout.write( self.template.format( file_name=slugify(conversation.name), created_at=conversation.created_at.isoformat(), base_url=self.base_url, key=conversation.batch.key, direction=direction, status=(conversation.archive_status if conversation.archived() else conversation.get_status()), ) )
Python
0
@@ -247,16 +247,121 @@ mmand%0A%0A%0A +# We don't extend BaseGoAccountCommand since the '--email' option is used%0A# instead of '--email-address'%0A class Co
bdef12745f5d91bf196139b444b34810b529c38d
Fix #37: Make subclassing of btuple work for __add__ and __radd__.
blist/_btuple.py
blist/_btuple.py
from blist._blist import blist from ctypes import c_int import collections class btuple(collections.Sequence): def __init__(self, seq=None): if isinstance(seq, btuple): self._blist = seq._blist elif seq is not None: self._blist = blist(seq) else: self._blist = blist() self._hash = -1 def _btuple_or_tuple(self, other, f): if isinstance(other, btuple): rv = f(self._blist, other._blist) elif isinstance(other, tuple): rv = f(self._blist, blist(other)) else: return NotImplemented if isinstance(rv, blist): rv = btuple(rv) return rv def __hash__(self): # Based on tuplehash from tupleobject.c if self._hash != -1: return self._hash n = len(self) mult = c_int(1000003) x = c_int(0x345678) for ob in self: n -= 1 y = c_int(hash(ob)) x = (x ^ y) * mult mult += c_int(82520) + n + n x += c_int(97531) if x == -1: x = -2; self._hash = x.value return self._hash def __add__(self, other): rv = self._btuple_or_tuple(other, blist.__add__) if rv is NotImplemented: raise TypeError return rv def __radd__(self, other): rv = self._btuple_or_tuple(other, blist.__radd__) if rv is NotImplemented: raise TypeError return rv def __contains__(self, item): return item in self._blist def __eq__(self, other): return self._btuple_or_tuple(other, blist.__eq__) def __ge__(self, other): return self._btuple_or_tuple(other, blist.__ge__) def __gt__(self, other): return self._btuple_or_tuple(other, blist.__gt__) def __le__(self, other): return self._btuple_or_tuple(other, blist.__le__) def __lt__(self, other): return self._btuple_or_tuple(other, blist.__lt__) def __ne__(self, other): return self._btuple_or_tuple(other, blist.__ne__) def __iter__(self): return iter(self._blist) def __len__(self): return len(self._blist) def __getitem__(self, key): if isinstance(key, slice): return btuple(self._blist[key]) return self._blist[key] def __getslice__(self, i, j): return btuple(self._blist[i:j]) def __repr__(self): return 'btuple((' + repr(self._blist)[7:-2] + '))' def __str__(self): return repr(self) def __mul__(self, i): return btuple(self._blist * i) def __rmul__(self, i): return btuple(i * self._blist) def count(self, item): return self._blist.count(item) def index(self, item): return self._blist.index(item) del c_int del collections
Python
0
@@ -1227,35 +1227,37 @@ ther):%0A r -v = +eturn self._btuple_or @@ -1289,87 +1289,8 @@ __)%0A - if rv is NotImplemented:%0A raise TypeError%0A return rv%0A @@ -1325,19 +1325,21 @@ r -v = +eturn self._b @@ -1380,87 +1380,8 @@ __)%0A - if rv is NotImplemented:%0A raise TypeError%0A return rv%0A
8abcd25ccd36d614ad650e0983385fbeb5a1777c
Add more search engines ping
blog/__init__.py
blog/__init__.py
# -*- coding: UTF-8 -*- # YaBlog # (c) Regis FLORET # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the <organization> nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Regis FLORET BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unicodedata import re # Code from internet def ping_all(sitemap_url='http://www.regisblog.fr/sitemap.xml'): """ Pings the popular search engines, Google, Yahoo, ASK, and Bing, to let them know that you have updated your site's sitemap. Returns successfully pinged servers. """ from django.contrib.sitemaps import ping_google SEARCH_ENGINE_PING_URLS = ( ('google', 'http://www.google.com/webmasters/tools/ping'), ('yahoo', 'http://search.yahooapis.com/SiteExplorerService/V1/ping'), ('ask', 'http://submissions.ask.com/ping'), ('bing', 'http://www.bing.com/webmaster/ping.aspx'), ) successfully_pinged = [] for (site, url) in SEARCH_ENGINE_PING_URLS: try: ping_google(sitemap_url=sitemap_url, ping_url=url) pinged = True except: pinged = False if pinged: successfully_pinged.append(site) return successfully_pinged def sanitize_name(name): """ Ensure to remove all non-alphanum characters """ name = unicodedata.normalize('NFKD', name).encode('ascii','ignore') for c in "&\"'()'ç=²¹~#{}[]+°$£^*µ%!§:/;.,?": name = name.replace(c,"") name = name.lower().strip() name = re.sub("\s","-",re.sub("\s+$","",name)) return name
Python
0
@@ -1969,32 +1969,212 @@ s/tools/ping'),%0A + ('technorati', 'http://rpc.technorati.com/rpc/ping'),%0A ('pigomatic', 'http://rpc.pingomatic.com'),%0A ('googleblog', 'http://blogsearch.google.com/ping/RPC2'),%0A ('yahoo'
3154f0098f9696cd48536599413659e47747491f
Add api [2]
blue/__init__.py
blue/__init__.py
from flask import Flask app = Flask(__name__) from blue.site.routes import mod from blue.api.routes import mod app.register_blueprint(site.routes.mod) app.register_blueprint(api.routes.mod)
Python
0
@@ -176,17 +176,36 @@ t(api.routes.mod +, url_prefix='/api' )
20d47877bf426bc2ec9ac1b8a99ec887faec31c5
Fix minor problems with mux function
boolexpr/misc.py
boolexpr/misc.py
# Copyright 2016 Chris Drake # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Miscellaneous features not implemented in C++ API """ import functools import itertools import operator from .util import clog2 from .util import iter_space from .wrap import array from .wrap import not_ from .wrap import or_ from .wrap import and_ from .wrap import _expect_array def nhot(n, *args): """ Return a CNF expression that means "exactly N input functions are true". """ if not 0 <= n <= len(args): fstr = "expected 0 <= n <= {}, got {}" raise ValueError(fstr.format(len(args), n)) clauses = list() for xs in itertools.combinations(args, n+1): clauses.append(or_(*[not_(x) for x in xs])) for xs in itertools.combinations(args, (len(args)+1)-n): clauses.append(or_(*xs)) return and_(*clauses) def majority(*args, conj=False): """ Return an expression that means "the majority of input functions are true". If *conj* is ``True``, return a CNF. Otherwise, return a DNF. """ clauses = list() if conj: for xs in itertools.combinations(args, (len(args) + 1) // 2): clauses.append(or_(*xs)) return and_(*clauses) else: for xs in itertools.combinations(args, len(args) // 2 + 1): clauses.append(and_(*xs)) return or_(*clauses) def achilles_heel(*args): r""" Return the Achille's Heel function, defined as: :math:`\prod_{i=0}^{n/2-1}{X_{2i} + X_{2i+1}}`. """ num = len(args) if num & 1: fstr = "expected an even number of arguments, got {}" raise ValueError(fstr.format(num)) return and_(*[or_(args[2*i], args[2*i+1]) for i in range(num // 2)]) def mux(xs, sel): """ Return an expression that multiplexes a sequence of input functions over a sequence of select functions. """ xs = _expect_array(xs) sel = _expect_array(sel) if sel.size < clog2(xs.size): fstr = "expected at least {} select bits, got {}" raise ValueError(fstr.format(clog2(xs.size), sel.size)) terms = (tuple(sel[i] if vertex[i] else ~sel[i] for i in range(sel.size)) for vertex in iter_space(sel.size)) return or_(*[and_(x, *term) for (x, term) in zip(xs, terms)]) def exists(xs, f): """ Return an expression that means "there exists a variable in *xs* such that *f* true." This is identical to ``f.smoothing(xs)``. """ return f.smoothing(xs) def forall(xs, f): """ Return an expression that means "for all variables in *xs*, *f* is true." This is identical to ``f.consensus(xs)``. """ return f.consensus(xs) def cat(*xs): """Concatenate a sequence of expressions.""" return functools.reduce(operator.add, xs, array([]))
Python
0.000018
@@ -2245,18 +2245,17 @@ def mux( -xs +a , sel):%0A @@ -2309,76 +2309,41 @@ es a - sequence of input functions over a%0A sequence of select functions +n input array over a select array .%0A @@ -2352,18 +2352,17 @@ %22%22%22%0A -xs +a = _expe @@ -2370,18 +2370,17 @@ t_array( -xs +a )%0A se @@ -2422,26 +2422,25 @@ ize %3C clog2( -xs +a .size):%0A @@ -2536,18 +2536,17 @@ t(clog2( -xs +a .size), @@ -2736,18 +2736,22 @@ in zip( -xs +a.flat , terms)
8da750eddfecb2c7162e1a33c7c830fd083944bd
Change allowed exception raising
intelmq/bots/parsers/abusech/parser_ip.py
intelmq/bots/parsers/abusech/parser_ip.py
# -*- coding: utf-8 -*- """ Parses simple newline separated list of IPs. Docs: - https://feodotracker.abuse.ch/blocklist/ - https://zeustracker.abuse.ch/blocklist.php """ import re import dateutil from intelmq.lib.bot import ParserBot from intelmq.lib import utils from intelmq.lib.exceptions import PipelineError FEEDS = { 'https://feodotracker.abuse.ch/downloads/ipblocklist.csv': { 'format': [ 'time.source', 'source.ip', 'source.port', 'malware.name' ], 'malware': 'Cridex' }, 'https://zeustracker.abuse.ch/blocklist.php?download=ipblocklist': { 'format': [ 'source.ip' ], 'malware': 'Zeus' }, 'https://zeustracker.abuse.ch/blocklist.php?download=badips': { 'format': [ 'source.ip' ], 'malware': 'Zeus' } } class AbusechIPParserBot(ParserBot): __last_generated_date = None __is_comment_line_regex = re.compile(r'^#+.*') __date_regex = re.compile(r'[0-9]{4}.[0-9]{2}.[0-9]{2}.[0-9]{2}.[0-9]{2}.[0-9]{2}( UTC)?') def parse(self, report: dict): feed = report['feed.url'] raw_lines = utils.base64_decode(report.get("raw")).splitlines() comments = list(r for r in raw_lines if self.__is_comment_line_regex.search(r)) fields = comments[-1].split(',') if len(fields) is not len(FEEDS[feed]['format']): self.logger.warning("Feed '{}' has not the expected fields: {} != {}".format(feed, len(fields), len(FEEDS[feed]['format']))) raise PipelineError("Abusech ip parser is not up to date with the format online") for line in comments: if 'Last updated' in line: self.__last_generated_date = dateutil.parser.parse(self.__date_regex.search(line).group(0)).isoformat() lines = (l for l in raw_lines if not self.__is_comment_line_regex.search(l)) for line in lines: line = line.strip() if not any([line.startswith(prefix) for prefix in self.ignore_lines_starting]): yield line def parse_line(self, line, report): event = self.new_event(report) self.__process_defaults(event, line, report['feed.url']) self.__process_fields(event, line, report['feed.url']) yield event def __process_defaults(self, event, line, feed_url): defaults = { ('malware.name', FEEDS[feed_url]['malware']), ('raw', line), ('classification.type', 'c&c'), ('classification.taxonomy', 'malicious code'), ('time.observation', self.__last_generated_date) } for i in defaults: if i[0] not in FEEDS[feed_url]['format']: if i[1] is None: continue else: event.add(i[0], i[1], overwrite=True) @staticmethod def __process_fields(event, line, feed_url): for field, value in zip(FEEDS[feed_url]['format'], line.split(',')): if field == 'time.source': ts = dateutil.parser.parse(value + ' UTC').isoformat() if not value.endswith(' UTC') else value event.add(field, ts) else: event.add(field, value) def recover_line(self, line): return '\n'.join(self.tempdata + [line]) BOT = AbusechIPParserBot
Python
0
@@ -1760,23 +1760,20 @@ raise -Pipelin +Valu eError(%22
7c3faea0dde53163628462db4fe688fa47cc63ab
comment fixed
sanic/sanic.py
sanic/sanic.py
from asyncio import get_event_loop from inspect import isawaitable from multiprocessing import Process, Event from signal import signal, SIGTERM, SIGINT from time import sleep from traceback import format_exc from .config import Config from .exceptions import Handler from .log import log, logging from .response import HTTPResponse from .router import Router from .server import serve from .exceptions import ServerError class Sanic: def __init__(self, name, router=None, error_handler=None): self.name = name self.router = router or Router() self.error_handler = error_handler or Handler(self) self.config = Config() self.request_middleware = [] self.response_middleware = [] self.blueprints = {} self._blueprint_order = [] # -------------------------------------------------------------------- # # Registration # -------------------------------------------------------------------- # # Decorator def route(self, uri, methods=None): """ Decorates a function to be registered as a route :param uri: path of the URL :param methods: list or tuple of methods allowed :return: decorated function """ def response(handler): self.router.add(uri=uri, methods=methods, handler=handler) return handler return response # Decorator def exception(self, *exceptions): """ Decorates a function to be registered as a route :param uri: path of the URL :param methods: list or tuple of methods allowed :return: decorated function """ def response(handler): for exception in exceptions: self.error_handler.add(exception, handler) return handler return response # Decorator def middleware(self, *args, **kwargs): """ Decorates and registers middleware to be called before a request can either be called as @app.middleware or @app.middleware('request') """ attach_to = 'request' def register_middleware(middleware): if attach_to == 'request': self.request_middleware.append(middleware) if attach_to == 'response': self.response_middleware.append(middleware) return middleware # Detect which way this was called, @middleware or @middleware('AT') if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): return register_middleware(args[0]) else: attach_to = args[0] return register_middleware def register_blueprint(self, blueprint, **options): """ Registers a blueprint on the application. :param blueprint: Blueprint object :param options: option dictionary with blueprint defaults :return: Nothing """ if blueprint.name in self.blueprints: assert self.blueprints[blueprint.name] is blueprint, \ 'A blueprint with the name "%s" is already registered. ' \ 'Blueprint names must be unique.' % \ (blueprint.name,) else: self.blueprints[blueprint.name] = blueprint self._blueprint_order.append(blueprint) blueprint.register(self, options) # -------------------------------------------------------------------- # # Request Handling # -------------------------------------------------------------------- # async def handle_request(self, request, response_callback): """ Takes a request from the HTTP Server and returns a response object to be sent back The HTTP Server only expects a response object, so exception handling must be done here :param request: HTTP Request object :param response_callback: Response function to be called with the response as the only argument :return: Nothing """ try: # Middleware process_request response = False # The if improves speed. I don't know why if self.request_middleware: for middleware in self.request_middleware: response = middleware(request) if isawaitable(response): response = await response if response: break # No middleware results if not response: # Fetch handler from router handler, args, kwargs = self.router.get(request) if handler is None: raise ServerError( ("'None' was returned while requesting a " "handler from the router")) # Run response handler response = handler(request, *args, **kwargs) if isawaitable(response): response = await response # Middleware process_response if self.response_middleware: for middleware in self.response_middleware: _response = middleware(request, response) if isawaitable(_response): _response = await _response if _response: response = _response break except Exception as e: try: response = self.error_handler.response(request, e) if isawaitable(response): response = await response except Exception as e: if self.debug: response = HTTPResponse( "Error while handling error: {}\nStack: {}".format( e, format_exc())) else: response = HTTPResponse( "An error occured while handling an error") response_callback(response) # -------------------------------------------------------------------- # # Execution # -------------------------------------------------------------------- # def run(self, host="127.0.0.1", port=8000, debug=False, after_start=None, before_stop=None, sock=None, workers=1): """ Runs the HTTP Server and listens until keyboard interrupt or term signal. On termination, drains connections before closing. :param host: Address to host on :param port: Port to host on :param debug: Enables debug output (slows server) :param after_start: Function to be executed after the server starts listening :param before_stop: Function to be executed when a stop signal is received before it is respected :param sock: Socket for the server to accept connections from :param workers: Number of processes received before it is respected :return: Nothing """ self.error_handler.debug = True self.debug = debug server_settings = { 'host': host, 'port': port, 'sock': sock, 'debug': debug, 'request_handler': self.handle_request, 'request_timeout': self.config.REQUEST_TIMEOUT, 'request_max_size': self.config.REQUEST_MAX_SIZE, } if debug: log.setLevel(logging.DEBUG) log.debug(self.config.LOGO) # Serve log.info('Goin\' Fast @ http://{}:{}'.format(host, port)) try: if workers == 1: server_settings['after_start'] = after_start server_settings['before_stop'] = before_stop serve(**server_settings) else: log.info('Spinning up {} workers...'.format(workers)) self.serve_multiple(server_settings, workers) except Exception as e: log.exception( 'Experienced exception while trying to serve: {}'.format(e)) pass log.info("Server Stopped") def stop(self): """ This kills the Sanic """ get_event_loop().stop() @staticmethod def serve_multiple(server_settings, workers, stop_event=None): """ Starts multiple server processes simultaneously. Stops on interrupt and terminate signals, and drains connections when complete. :param server_settings: kw arguments to be passed to the serve function :param workers: number of workers to launch :param stop_event: if provided, is used as a stop signal :return: """ server_settings['reuse_port'] = True # Create a stop event to be triggered by a signal if not stop_event: stop_event = Event() signal(SIGINT, lambda s, f: stop_event.set()) signal(SIGTERM, lambda s, f: stop_event.set()) processes = [] for w in range(workers): process = Process(target=serve, kwargs=server_settings) process.start() processes.append(process) # Infinitely wait for the stop event try: while not stop_event.is_set(): sleep(0.3) except: pass log.info('Spinning down workers...') for process in processes: process.terminate() for process in processes: process.join()
Python
0
@@ -1508,106 +1508,69 @@ s a -route%0A :param uri: path of the URL%0A :param methods: list or tuple of methods allowed +handler for exceptions%0A :param *exceptions: exceptions %0A
ff9e3e99e7a5bda1eefdd925960b6b6153a9e10d
Update messenger.py
bot/messenger.py
bot/messenger.py
import logging import random logger = logging.getLogger(__name__) class Messenger(object): def __init__(self, slack_clients): self.clients = slack_clients def send_message(self, channel_id, msg): # in the case of Group and Private channels, RTM channel payload is a complex dictionary if isinstance(channel_id, dict): channel_id = channel_id['id'] logger.debug('Sending msg: {} to channel: {}'.format(msg, channel_id)) channel = self.clients.rtm.server.channels.find(channel_id) channel.send_message("{}".format(msg.encode('ascii', 'ignore'))) def write_help_message(self, channel_id): bot_uid = self.clients.bot_user_id() txt = '{}\n{}\n{}\n{}'.format( "I'm your friendly Slack bot written in Python. I'll *_respond_* to the following commands:", "> `hi <@" + bot_uid + ">` - I'll respond with a randomized greeting mentioning your user. :wave:", "> `<@" + bot_uid + "> joke` - I'll tell you one of my finest jokes, with a typing pause for effect. :laughing:", "> `<@" + bot_uid + "> attachment` - I'll demo a post with an attachment using the Web API. :paperclip:") self.send_message(channel_id, txt) def write_greeting(self, channel_id, user_id): greetings = ['Hi', 'Hello', 'Nice to meet you', 'Howdy', 'Salutations'] txt = '{}, <@{}>!'.format(random.choice(greetings), user_id) self.send_message(channel_id, txt) def write_prompt(self, channel_id): bot_uid = self.clients.bot_user_id() txt = "I'm sorry, I didn't quite understand... Can I help you? (e.g. `<@" + bot_uid + "> help`)" self.send_message(channel_id, txt) def write_joke(self, channel_id): question = "Why did the python cross the road?" self.send_message(channel_id, question) self.clients.send_user_typing_pause(channel_id) answer = "To eat the chicken on the other side! :laughing:" self.send_message(channel_id, answer) def write_error(self, channel_id, err_msg): txt = ":face_with_head_bandage: my maker didn't handle this error very well:\n>```{}```".format(err_msg) self.send_message(channel_id, txt) def demo_attachment(self, channel_id): txt = "Beep Beep Boop is a ridiculously simple hosting platform for your Slackbots." attachment = { "pretext": "We bring bots to life. :sunglasses: :thumbsup:", "title": "Host, deploy and share your bot in seconds.", "title_link": "https://beepboophq.com/", "text": txt, "fallback": txt, "image_url": "https://storage.googleapis.com/beepboophq/_assets/bot-1.22f6fb.png", "color": "#7CD197", } self.clients.web.chat.post_message(channel_id, txt, attachments=[attachment], as_user='true') def write_task_link(self, channel_id, task): txt = "task # " + task self.send_message(channel_id, txt)
Python
0.000001
@@ -2952,26 +2952,68 @@ -txt = %22task # %22 + +fs_url = os.getenv(%22FLYSPRAY_URL%22, %22%22)%0A txt = fs_url+ task
1e930adbfb1714670ad04717401b36b59bf12558
Bump version to 0.0.2
bqdm/__init__.py
bqdm/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import __version__ = '0.0.1' CONTEXT_SETTINGS = dict( help_option_names=['-h', '--help'], max_content_width=120, )
Python
0.000001
@@ -77,17 +77,17 @@ = '0.0. -1 +2 '%0A%0A%0ACONT
24f1c94817bbeeca18d9dd307c1086ad76f9f167
add missing curly braces for string formatting (#1148)
appengine/standard/storage/appengine-client/main.py
appengine/standard/storage/appengine-client/main.py
#!/usr/bin/env python # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START sample] """A sample app that uses GCS client to operate on bucket and file.""" # [START imports] import os import cloudstorage from google.appengine.api import app_identity import webapp2 # [END imports] # [START retries] cloudstorage.set_default_retry_params( cloudstorage.RetryParams( initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15 )) # [END retries] class MainPage(webapp2.RequestHandler): """Main page for GCS demo application.""" # [START get_default_bucket] def get(self): bucket_name = os.environ.get( 'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) self.response.headers['Content-Type'] = 'text/plain' self.response.write( 'Demo GCS Application running from Version: {}\n'.format( os.environ['CURRENT_VERSION_ID'])) self.response.write('Using bucket name: \n\n'.format(bucket_name)) # [END get_default_bucket] bucket = '/' + bucket_name filename = bucket + '/demo-testfile' self.tmp_filenames_to_clean_up = [] self.create_file(filename) self.response.write('\n\n') self.read_file(filename) self.response.write('\n\n') self.stat_file(filename) self.response.write('\n\n') self.create_files_for_list_bucket(bucket) self.response.write('\n\n') self.list_bucket(bucket) self.response.write('\n\n') self.list_bucket_directory_mode(bucket) self.response.write('\n\n') self.delete_files() self.response.write('\n\nThe demo ran successfully!\n') # [START write] def create_file(self, filename): """Create a file.""" self.response.write('Creating file {}\n'.format(filename)) # The retry_params specified in the open call will override the default # retry params for this particular file handle. write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1) with cloudstorage.open( filename, 'w', content_type='text/plain', options={ 'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'}, retry_params=write_retry_params) as cloudstorage_file: cloudstorage_file.write('abcde\n') cloudstorage_file.write('f'*1024*4 + '\n') self.tmp_filenames_to_clean_up.append(filename) # [END write] # [START read] def read_file(self, filename): self.response.write( 'Abbreviated file content (first line and last 1K):\n') with cloudstorage.open(filename) as cloudstorage_file: self.response.write(cloudstorage_file.readline()) cloudstorage_file.seek(-1024, os.SEEK_END) self.response.write(cloudstorage_file.read()) # [END read] def stat_file(self, filename): self.response.write('File stat:\n') stat = cloudstorage.stat(filename) self.response.write(repr(stat)) def create_files_for_list_bucket(self, bucket): self.response.write('Creating more files for listbucket...\n') filenames = [bucket + n for n in [ '/foo1', '/foo2', '/bar', '/bar/1', '/bar/2', '/boo/']] for f in filenames: self.create_file(f) # [START list_bucket] def list_bucket(self, bucket): """Create several files and paginate through them.""" self.response.write('Listbucket result:\n') # Production apps should set page_size to a practical value. page_size = 1 stats = cloudstorage.listbucket(bucket + '/foo', max_keys=page_size) while True: count = 0 for stat in stats: count += 1 self.response.write(repr(stat)) self.response.write('\n') if count != page_size or count == 0: break stats = cloudstorage.listbucket( bucket + '/foo', max_keys=page_size, marker=stat.filename) # [END list_bucket] def list_bucket_directory_mode(self, bucket): self.response.write('Listbucket directory mode result:\n') for stat in cloudstorage.listbucket(bucket + '/b', delimiter='/'): self.response.write(stat) self.response.write('\n') if stat.is_dir: for subdir_file in cloudstorage.listbucket( stat.filename, delimiter='/'): self.response.write(' {}'.format(subdir_file)) self.response.write('\n') # [START delete_files] def delete_files(self): self.response.write('Deleting files...\n') for filename in self.tmp_filenames_to_clean_up: self.response.write('Deleting file {}\n'.format(filename)) try: cloudstorage.delete(filename) except cloudstorage.NotFoundError: pass # [END delete_files] app = webapp2.WSGIApplication( [('/', MainPage)], debug=True) # [END sample]
Python
0
@@ -1510,16 +1510,18 @@ t name: +%7B%7D %5Cn%5Cn'.fo
649e34402477d1d93efc9be11394d69cf676af1d
fix indent
script/iris.py
script/iris.py
# coding: UTF-8 # # --- Iris.py --- # Irisのデータを用いてtsaliisエントロピー正則化FCM法を試す # import numpy as np import matplotlib.pyplot as plt from sklearn import datasets as ds from sklearn.metrics import accuracy_score import random import math import sys import copy def main(): # Irisのデータを読み込む iris = ds.load_iris() # Irisのデータをロードする data = iris.data # Irisの4次元データ(150個) target = iris.target # 正解 [0,0,0,1.... # 標準偏差を表示する # print "std="+str(np.std(x)) # 各種パラメータの設定 C = 3 # v length N = 150 # x length P = 4 # Demention Thigh = 20 q = 1.1 # fcmで求める result = fcm(data,P,N,C,Thigh,q) predict = result[0] loop = result[1] # 正解とpredictを表示 print "target=" np.savetxt(sys.stdout,target[None],fmt='%.0f',delimiter=' ') print "predict=" np.savetxt(sys.stdout,predict[None],fmt='%.0f',delimiter=' ') # 正答率を表示 score = accuracy_score(target,predict) print str(score*100) + "% (" + str(score*N) + "/" + str(N) + ")" # ループ回数の表示 print "loop=" + str(loop) # # 収束判定のための移動度を求める関数 # @param v1 # @param v2 # def distance(v1,v2): max = 0.0 for i in range(len(v1)): score = np.linalg.norm(v1[i]-v2[i]) print score if max > score: max = score return max # # 目的関数Jfcm # この関数を最小化するのが目的 # @param u # @param x # @param v # @param q def jfcm(u,x,v,q): score = 0.0 for k in range(len(x)): for i in range(len(v)): dik = np.linalg.norm(v[i]-x[k]) score += (u[i][k] ** q) * dik return score # # fuzzy_clustring_method # ファジィクラスタリングを実行する # # 1.クラスタ中心vをランダムに決定する # 2.帰属度関数uをゼロクリアする # 3.以下の操作を繰り返す # 3-1.ループ回数を更新する # 3-2.温度を更新する # 3-3.uを更新する # 3-4.vを更新する # 3-5.終了条件を判定 # 4.クラスタリング結果をpredictに保存する # 5.ラベルを先頭が0,後ろが2になるように再配置する # 6.[predict,loop]を返す # # @param x データ集合 # @param P データの次元数 # @param N データ集合の個数 # @param C クラスタ中心の数 # @param Thigh 初期温度 # @param q q値 # @return [predict,loop] # loop:ループ回数 # predict:クラスタリング結果 # def fcm(x,P,N,C,Thigh,q): e1 = 0.01 e2 = 0.01 # クラスタ中心を初期化する v = np.array( [ np.random.rand(P) for i in range(C) ]) # 帰属度関数を初期化する u = np.zeros([C,N]) # 現温度での最適解を初期化する V = copy.deepcopy(v) score = float("inf") # 前温度での最適解を初期化する Vdash = copy.deepcopy(v) vdash = copy.deepcopy(v) # ループ開始 loop = 0 update_temperature = 0 while True: # ループを更新する loop+=1 # 温度を更新する T = Thigh * math.exp (-2.0*update_temperature**(1.0/P)) beta = 1.0 / T # --- Cal u[i][k] --- # ここの部分はデバッグ済み。触らない。 for k in range(N): denominator = 0.0 for j in range(C): djk = np.linalg.norm(v[j]-x[k]) denominator += (1.0-beta*(1.0-q)*djk)**(1.0/(1.0-q)) for i in range(C): dik = np.linalg.norm(v[i]-x[k]) u[i][k] = (1.0-beta*(1.0-q)*dik)**(1.0/(1.0-q)) / denominator # --- Cal v[i] --- # ここの部分はデバッグ済み。触らない。 for i in range(C): # cal denominator denominator = 0.0 for k in range(N): denominator += u[i][k]**q # cal numerator numerator = np.zeros(P) for k in range(N): numerator += (u[i][k] ** q)*x[k] # cal v num = numerator / denominator v[i] = num print "vdash=" print vdash print "v=" print v print distance(v,vdash) # --- 収束チェック --- if distance(v,vdash) < e1: # 同一温度で収束した場合 if distance(V,Vdash) < e2: # 異なる温度で最適解が収束した場合 # クラスタリングを終了する break # 温度を更新する update_temperature +=1 # Vdashを更新する Vdash = copy.deepcopy(V) # 最適解を更新する tmp = jfcm(u,x,v,q) if tmp < score: score = tmp V = copy.deepcopy(v) # vdashを更新してループする vdash = copy.deepcopy(v) # loop end # クラスタリング結果を取得 predict = np.array( [ np.argmax(u[:,k]) for k in range(N) ] ) # ラベルの再割り振り first = predict[0] last = predict[N-1] for k in range(N): if predict[k] == first: predict[k] = 0 elif predict[k] == last: predict[k] = 2 else: predict[k] = 1 return [predict,loop] # --- main()
Python
0.00006
@@ -1174,9 +1174,9 @@ max -%3E +%3C sco
e01a06c735d0ac59f7b0827839a5e1bcfd7200bc
Allow non-integer parts of GNOME version number
gsetting.py
gsetting.py
#!/usr/bin/python from os import environ import re import subprocess from ansible.module_utils.basic import * def _check_output_strip(command): return subprocess.check_output(command).decode('utf-8').strip() def _escape_single_quotes(string): return re.sub("'", r"'\''", string) def _split_key(full_key): key_array = full_key.split('.') schema = '.'.join(key_array[0:-1]) single_key = key_array[-1] return (schema, single_key) def _get_gnome_version(): try: return tuple(map(int, (_check_output_strip( ['gnome-shell', '--version']).split(' ')[2].split('.')))) except FileNotFoundError: return None def _get_gnome_session_pid(user): gnome_ver = _get_gnome_version() if (gnome_ver and gnome_ver >= (3, 33, 90)): # From GNOME 3.33.90 session process has changed # https://github.com/GNOME/gnome-session/releases/tag/3.33.90 pgrep_cmd = ['pgrep', '-u', user, '-f', 'session=gnome'] else: pgrep_cmd = ['pgrep', '-u', user, 'gnome-session'] try: return _check_output_strip(pgrep_cmd) except subprocess.CalledProcessError: return None def _get_phoc_session_pid(user): pgrep_cmd = ['pgrep', '-u', user, 'phoc'] try: return _check_output_strip(pgrep_cmd) except subprocess.CalledProcessError: return None def _get_dbus_bus_address(user): if user is None: if environ.get('DBUS_SESSION_BUS_ADDRESS') is None: return None return "DBUS_SESSION_BUS_ADDRESS={}".format(environ['DBUS_SESSION_BUS_ADDRESS']) pid = _get_gnome_session_pid(user) or _get_phoc_session_pid(user) if pid: return _check_output_strip( ['grep', '-z', '^DBUS_SESSION_BUS_ADDRESS', '/proc/{}/environ'.format(pid)]).strip('\0') def _run_cmd_with_dbus(user, cmd, dbus_addr): if not dbus_addr: command = ['dbus-run-session', '--'] else: command = ['export', dbus_addr, ';'] command.extend(cmd) if user is None: return _check_output_strip(['/bin/sh', '-c', " ".join(command)]) return _check_output_strip(['su', '-', user , '-c', " ".join(command)]) def _set_value(schemadir, user, full_key, value, dbus_addr): schema, single_key = _split_key(full_key) command = ['/usr/bin/gsettings'] if schemadir: command.extend(['--schemadir', schemadir]) command.extend(['set', schema, single_key, "'%s'" % _escape_single_quotes(value)]) return _run_cmd_with_dbus(user, command, dbus_addr) def _get_value(schemadir, user, full_key, dbus_addr): schema, single_key = _split_key(full_key) command = ['/usr/bin/gsettings'] if schemadir: command.extend(['--schemadir', schemadir]) command.extend(['get', schema, single_key]) return _run_cmd_with_dbus(user, command, dbus_addr) def main(): module = AnsibleModule( argument_spec = { 'state': { 'choices': ['present'], 'default': 'present' }, 'user': { 'default': None }, 'schemadir': { 'required': False }, 'key': { 'required': False }, 'value': { 'required': False }, 'settings': { 'type': 'dict', "required": False, 'default': dict() }, }, supports_check_mode = True, ) params = module.params state = module.params['state'] user = module.params['user'] schemadir = module.params['schemadir'] key = module.params['key'] value = module.params['value'] settings = module.params['settings'] any_changed = False unchanged_settings = list() changed_settings = list() if key is None and len(settings) == 0: module.fail_json(msg="Either a key or a settings dict is required, " "neither was provided.") if key is not None: settings[key] = value dbus_addr = _get_dbus_bus_address(user) for key, value in settings.items(): old_value = _get_value(schemadir, user, key, dbus_addr) result = { 'key': key, 'value': old_value } changed = old_value != value any_changed = any_changed or changed if changed and not module.check_mode: _set_value(schemadir, user, key, value, dbus_addr) result['new_value'] = value changed_settings.append(result) else: unchanged_settings.append(result) module.exit_json(**{ 'changed': any_changed, 'unchanged_settings': unchanged_settings, 'changed_settings': changed_settings, }) main()
Python
0.000007
@@ -450,16 +450,111 @@ e_key)%0A%0A +def _maybe_int(val):%0A try:%0A return int(val)%0A except ValueError:%0A return 0%0A%0A def _get @@ -605,16 +605,23 @@ ple(map( +_maybe_ int, (_c
6c556f6c5e4aa70173a84f6e6854390241231021
Update the Jinja2Templates() constructor to allow PathLike (#1292)
starlette/templating.py
starlette/templating.py
import typing from starlette.background import BackgroundTask from starlette.responses import Response from starlette.types import Receive, Scope, Send try: import jinja2 # @contextfunction renamed to @pass_context in Jinja 3.0, to be removed in 3.1 if hasattr(jinja2, "pass_context"): pass_context = jinja2.pass_context else: # pragma: nocover pass_context = jinja2.contextfunction except ImportError: # pragma: nocover jinja2 = None # type: ignore class _TemplateResponse(Response): media_type = "text/html" def __init__( self, template: typing.Any, context: dict, status_code: int = 200, headers: dict = None, media_type: str = None, background: BackgroundTask = None, ): self.template = template self.context = context content = template.render(context) super().__init__(content, status_code, headers, media_type, background) async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: request = self.context.get("request", {}) extensions = request.get("extensions", {}) if "http.response.template" in extensions: await send( { "type": "http.response.template", "template": self.template, "context": self.context, } ) await super().__call__(scope, receive, send) class Jinja2Templates: """ templates = Jinja2Templates("templates") return templates.TemplateResponse("index.html", {"request": request}) """ def __init__(self, directory: str) -> None: assert jinja2 is not None, "jinja2 must be installed to use Jinja2Templates" self.env = self._create_env(directory) def _create_env(self, directory: str) -> "jinja2.Environment": @pass_context def url_for(context: dict, name: str, **path_params: typing.Any) -> str: request = context["request"] return request.url_for(name, **path_params) loader = jinja2.FileSystemLoader(directory) env = jinja2.Environment(loader=loader, autoescape=True) env.globals["url_for"] = url_for return env def get_template(self, name: str) -> "jinja2.Template": return self.env.get_template(name) def TemplateResponse( self, name: str, context: dict, status_code: int = 200, headers: dict = None, media_type: str = None, background: BackgroundTask = None, ) -> _TemplateResponse: if "request" not in context: raise ValueError('context must include a "request" key') template = self.get_template(name) return _TemplateResponse( template, context, status_code=status_code, headers=headers, media_type=media_type, background=background, )
Python
0
@@ -6,16 +6,40 @@ t typing +%0Afrom os import PathLike %0A%0Afrom s @@ -1692,27 +1692,51 @@ directory: -str +typing.Union%5Bstr, PathLike%5D ) -%3E None:%0A @@ -1887,16 +1887,25 @@ ate_env( +%0A self, di @@ -1913,19 +1913,48 @@ ectory: -str +typing.Union%5Bstr, PathLike%5D%0A ) -%3E %22ji
a0dfb1ce1a72880da34ad817c8021e54e2ce0e5d
add fields.
lib/acli/output.py
lib/acli/output.py
# from tabulate import tabulate from terminaltables import AsciiTable def output_ec2(output_type=None, instances=None): if output_type == 'console': heading = ['id', 'state'] table_data = [heading] for instance in instances: instance_id = instance[0].id instance_state = instance[0].state table_data.append([instance_id, instance_state]) table = AsciiTable(table_data) print(table.table) def output_elb(output_type=None, elbs=None): if output_type == 'console': heading = ['id', 'name'] table_data = [heading] for elb in elbs: elb_id = elb.name elb_name = elb.name table_data.append([elb_id, elb_name]) table = AsciiTable(table_data) print(table.table) # def console_table(collection_type, collection_data): # pass
Python
0
@@ -180,18 +180,62 @@ 'state' +, 'type', 'image', 'public ip', 'private ip' %5D%0A - @@ -397,57 +397,508 @@ -table_data.append(%5Binstance_id, instance_state +instance_type = instance%5B0%5D.instance_type%0A image_id = instance%5B0%5D.image_id%0A public_ip = instance%5B0%5D.ip_address%0A private_ip = instance%5B0%5D.private_ip_address%0A%0A table_data.append(%5Binstance_id,%0A instance_state,%0A instance_type,%0A image_id,%0A public_ip if public_ip else '-',%0A private_ip if private_ip else '-' %5D)%0A +%0A @@ -1272,32 +1272,32 @@ ble(table_data)%0A + print(ta @@ -1311,71 +1311,4 @@ le)%0A -%0A# def console_table(collection_type, collection_data):%0A# pass%0A%0A
2d7b3afaca97a3e6a115c077586d0a9fb9daf8b2
Fix imap connection lost (#380)
i3pystatus/mail/imap.py
i3pystatus/mail/imap.py
import sys import imaplib from i3pystatus.mail import Backend from i3pystatus.core.util import internet class IMAP(Backend): """ Checks for mail on a IMAP server """ settings = ( "host", "port", "username", "password", ('keyring_backend', 'alternative keyring backend for retrieving credentials'), "ssl", "mailbox", ) required = ("host", "username", "password") keyring_backend = None port = 993 ssl = True mailbox = "INBOX" imap_class = imaplib.IMAP4 connection = None last = 0 def init(self): if self.ssl: self.imap_class = imaplib.IMAP4_SSL def get_connection(self): if not self.connection: self.connection = self.imap_class(self.host, self.port) self.connection.login(self.username, self.password) self.connection.select(self.mailbox) self.connection.select(self.mailbox) return self.connection @property def unread(self): if internet(): conn = self.get_connection() self.last = len(conn.search(None, "UnSeen")[1][0].split()) return self.last Backend = IMAP
Python
0.000001
@@ -4,19 +4,23 @@ ort -sys +imaplib %0Aimport imap @@ -15,23 +15,22 @@ %0Aimport -imaplib +socket %0A%0Afrom i @@ -63,50 +63,8 @@ end%0A -from i3pystatus.core.util import internet%0A %0A%0Acl @@ -644,32 +644,535 @@ nnection(self):%0A + if self.connection:%0A try:%0A self.connection.select(self.mailbox)%0A except socket.error:%0A # NOTE(sileht): retry just once if the connection have been%0A # broken to ensure this is not a sporadic connection lost.%0A # Like wifi reconnect, sleep wake up%0A try:%0A self.connection.logout()%0A except socket.error:%0A pass%0A self.connection = None%0A%0A if not s @@ -1373,54 +1373,8 @@ x)%0A%0A - self.connection.select(self.mailbox)%0A%0A @@ -1441,35 +1441,8 @@ f):%0A - if internet():%0A @@ -1474,20 +1474,16 @@ ction()%0A -
c0f959446731b8ce2677c56afd5456c2e047cabb
Change the webapp tests to not interfere with instance level connections.
ichnaea/webapp/tests.py
ichnaea/webapp/tests.py
from ichnaea.config import DummyConfig from ichnaea.tests.base import ( _make_app, _make_db, DBTestCase, RedisIsolation, REDIS_URI, SQLURI, ) class TestApp(RedisIsolation, DBTestCase): def test_db_hooks(self): app_config = DummyConfig({'ichnaea': { 'db_master': SQLURI, 'db_slave': SQLURI, }}) app = _make_app(app_config=app_config, _raven_client=self.raven_client, _redis_client=self.redis_client, _stats_client=self.stats_client, ) self.db_rw = app.app.registry.db_rw self.db_ro = app.app.registry.db_ro app.get('/stats_wifi.json', status=200) def test_db_config(self): self.db_rw = _make_db() self.db_ro = _make_db() app = _make_app(_db_rw=self.db_rw, _db_ro=self.db_ro, _raven_client=self.raven_client, _redis_client=self.redis_client, _stats_client=self.stats_client, ) app.get('/stats_wifi.json', status=200) def test_redis_config(self): app_config = DummyConfig({'ichnaea': { 'db_master': SQLURI, 'db_slave': SQLURI, 'redis_url': REDIS_URI, }}) app = _make_app(app_config=app_config, _raven_client=self.raven_client, _stats_client=self.stats_client) self.assertTrue(app.app.registry.redis_client is not None)
Python
0
@@ -225,13 +225,14 @@ _db_ -hooks +config (sel @@ -603,37 +603,32 @@ )%0A -self. db_rw = app.app. @@ -642,37 +642,32 @@ y.db_rw%0A -self. db_ro = app.app. @@ -693,46 +693,300 @@ -app.get('/stats_wifi.json', status=200 +# the configured databases are working%0A try:%0A self.assertTrue(db_rw.ping())%0A self.assertTrue(db_ro.ping())%0A finally:%0A # clean up the new db engine's _make_app created%0A db_rw.engine.pool.dispose()%0A db_ro.engine.pool.dispose( )%0A%0A @@ -992,38 +992,37 @@ def test_db_ -config +hooks (self):%0A @@ -1017,29 +1017,24 @@ f):%0A -self. db_rw = _mak @@ -1048,21 +1048,16 @@ -self. db_ro = @@ -1098,21 +1098,16 @@ (_db_rw= -self. db_rw,%0A @@ -1136,21 +1136,16 @@ _db_ro= -self. db_ro,%0A @@ -1352,46 +1352,166 @@ -app.get('/stats_wifi.json', status=200 +# check that our _db hooks are passed through%0A self.assertTrue(app.app.registry.db_rw is db_rw)%0A self.assertTrue(app.app.registry.db_ro is db_ro )%0A%0A @@ -1593,73 +1593,8 @@ : %7B%0A - 'db_master': SQLURI,%0A 'db_slave': SQLURI,%0A @@ -1676,32 +1676,118 @@ fig=app_config,%0A + _db_rw=self.db_rw,%0A _db_ro=self.db_ro,%0A @@ -1892,32 +1892,31 @@ -self.assertTrue( +redis_client = app.app. @@ -1940,18 +1940,150 @@ ient - is not None +%0A self.assertTrue(redis_client is not None)%0A self.assertEqual(%0A redis_client.connection_pool.connection_kwargs%5B'db'%5D, 1 )%0A
aa096411780cb2c02e123e1f4e456d6fb4b3d551
make it guess etc directory for ease of use
src/server/rz_cli.py
src/server/rz_cli.py
#!/usr/bin/python import argparse # TODO - use the newer / shorter argument parser. y? from collections import namedtuple import json import uuid from neo4j_util import generate_random_id__uuid from rz_server import init_config from rz_kernel import RZ_Kernel from model.graph import Topo_Diff import db_controller as dbc def clone(rzdoc_name): return kernel.rzdoc__clone(kernel.rzdoc__lookup_by_name(rzdoc_name)) def names(): return [v['name'] for v in kernel.rzdoc__list()] def create_id(): return uuid.uuid4().get_hex()[:8] def merge_topos(topos): """ Merge a number of Topo_Diff's result node set is union of nodes conflicting nodes: same name nodes take the first (id wise) attributes: take the first result link set is merged (well defined: replace dropped id by chosen id in all links) """ result = Topo_Diff() node_names = dict() node_ids = set() dropped_nodes = set() links_src_dst = set() def rename(links, from_id, to_id): for link in links: if link['__src_id'] == from_id: link['__src_id'] = to_id if link['__dst_id'] == from_id: link['__dst_id'] = to_id def merge(topo): links = topo.link_set_add nodes = topo.node_set_add new_nodes = [] for node in nodes: name = node['name'] if name in node_names: dropped_nodes.add(node['name']) rename(links, node['id'], node_names[name]['id']) else: node_names[name] = node new_nodes.append(node) node_ids.add(node['id']) new_links = [] for link in links: k = (link['__src_id'], link['__dst_id']) if k not in links_src_dst: links_src_dst.add(k) new_links.append(link) if len(dropped_nodes) > 0: print("dropped duplicate nodes count: %s" % len(dropped_nodes)) if len(new_links) != len(links): print("dropped duplicate links count: %s" % (len(links) - len(new_links))) print("adding %s nodes, %s links" % (len(new_nodes), len(new_links))) result.node_set_add.extend(new_nodes) result.link_set_add.extend(new_links) for topo in topos: merge(topo) # now realloc all ids since otherwise they are duplicates of originals renames = [(node['id'], create_id()) for node in result.node_set_add] def show(word): print("=" * 80) print(word) print(result.node_set_add) print(result.link_set_add) for src, dst in renames: rename(result.link_set_add, src, dst) for node, (_, new_id) in zip(result.node_set_add, renames): node['id'] = new_id return result def merge(destination_name, sources=None): if sources == None: sources = names() docs = [kernel.rzdoc__lookup_by_name(name) for name in sources] topos = [kernel.rzdoc__clone(doc) for doc in docs] destination = kernel.rzdoc__create(destination_name) ctx = namedtuple('Context', ['user_name', 'rzdoc'])(None, destination) kernel.diff_commit__topo(merge_topos(topos), ctx) def remove(rzdoc_name): kernel.rzdoc__delete(kernel.rzdoc__lookup_by_name(rzdoc_name)) if __name__ == '__main__': p = argparse.ArgumentParser(description="rhizi command line interface") p.add_argument('--config-dir', help='path to Rhizi config dir', default='res/etc') p.add_argument('--list-names', default=False, action='store_true') p.add_argument('--list-table', default=False, action='store_true') p.add_argument('--delete', help='doc name to delete') p.add_argument('--merge-target', help='name of resulting rzdoc') p.add_argument('--merge', help='comma separated names of docs to merge') p.add_argument('--merge-file', help='filename with line per doc name') p.add_argument('--clone', help='show contents of doc') args = p.parse_args() cfg = init_config(args.config_dir) kernel = RZ_Kernel() db_ctl = dbc.DB_Controller(cfg) kernel.db_ctl = db_ctl if args.list_table: print('\n'.join('%30s %30s' % (d['name'].ljust(30), d['id'].ljust(30)) for d in kernel.rzdoc__list())) raise SystemExit if args.list_names: print('\n'.join(d['name'] for d in kernel.rzdoc__list())) raise SystemExit if args.delete: remove(args.delete) if args.clone: print(json.dumps(clone(args.clone).to_json_dict())) if args.merge_target: merge_sources = None if args.merge: merge_sources = args.merge.split(',') if args.merge_file: with open(args.merge_file) as fd: merge_sources = [line.strip() for line in fd.readlines()] if merge_sources: merge(args.merge_target, merge_sources)
Python
0
@@ -12,16 +12,26 @@ python%0A%0A +import os%0A import a @@ -3496,17 +3496,12 @@ ult= -'res/etc' +None )%0A @@ -4009,16 +4009,184 @@ ()%0A %0A + if args.config_dir is None:%0A for d in %5B'res/etc', '/etc/rhizi'%5D:%0A if os.path.exists(d):%0A args.config_dir = d%0A break%0A cfg
17f1c210c9c8b410cb6888a51ea1d863b74c14be
Use has_module check in _can_read
imageio/plugins/gdal.py
imageio/plugins/gdal.py
# -*- coding: utf-8 -*- # Copyright (c) 2015, imageio contributors # imageio is distributed under the terms of the (new) BSD License. """ Plugin for reading gdal files. """ from __future__ import absolute_import, print_function, division from .. import formats from ..core import Format _gdal = None # lazily loaded in load_lib() def load_lib(): global _gdal try: import osgeo.gdal as _gdal except ImportError: raise ImportError("The GDAL format relies on the GDAL package." "Please refer to http://www.gdal.org/" "for further instructions.") return _gdal GDAL_FORMATS = ('.tiff', ' .tif', '.img', '.ecw', '.jpg', '.jpeg') class GdalFormat(Format): """ Parameters for reading ---------------------- None """ def _can_read(self, request): return request.filename.lower().endswith(GDAL_FORMATS) def _can_write(self, request): return False # -- class Reader(Format.Reader): def _open(self): if not _gdal: load_lib() self._ds = _gdal.Open(self.request.get_local_filename()) def _close(self): del self._ds def _get_length(self): return 1 def _get_data(self, index): if index != 0: raise IndexError('Gdal file contains only one dataset') return self._ds.ReadAsArray(), self._get_meta_data(index) def _get_meta_data(self, index): return self._ds.GetMetadata() # Add this format formats.add_format(GdalFormat( 'gdal', 'Geospatial Data Abstraction Library', ' '.join(GDAL_FORMATS), 'iIvV'))
Python
0
@@ -281,16 +281,28 @@ t Format +, has_module %0A%0A_gdal @@ -857,32 +857,151 @@ self, request):%0A + if request.filename.lower().endswith('.ecw'):%0A return True%0A if has_module('osgeo.gdal'):%0A return r @@ -1033,28 +1033,31 @@ ndswith( -GDAL_FORMATS +self.extensions )%0A%0A d
70f1838951460c16b7eb4b8220621c198d4634a5
remove pdb
inferelator_ng/tfa.py
inferelator_ng/tfa.py
import numpy as np import pandas as pd from scipy import linalg class TFA: """ TFA calculates transcription factor activity using matrix pseudoinverse Parameters -------- prior: pd.dataframe binary or numeric g by t matrix stating existence of gene-TF interactions. g--gene, t--TF. exp.mat: pd.dataframe normalized expression g by c matrix. g--gene, c--conditions exp.mat.halftau: pd.dataframe normalized expression matrix for time series. dup_self=True: boolean If dup_slef (duplicate self) is True, TFs that other TFs with the exact same set of interactions in the prior are kept and will have the same activities """ def __init__(self, prior, exp_mat, exp_mat_halftau): self.prior = prior self.exp_mat = exp_mat self.exp_mat_halftau = exp_mat_halftau def tfa(self, allow_self_interactions_for_duplicate_prior_columns = True): import pdb; pdb.set_trace() # Create activity dataframe with default values set to the expression activity = pd.DataFrame(self.exp_mat.loc[self.prior.columns,:].values, index = self.prior.columns, columns = self.exp_mat.columns) # Finds tfs that have non-zero regulation # TODO: Remove as some form of pre-processing??? non_zero_tfs = self.prior.loc[:, (self.prior != 0).any(axis=0)].columns.values.tolist() # dup_tfs: duplicated TFs dup_tfs = [] if allow_self_interactions_for_duplicate_prior_columns: # Everything up til now is useless if the prior is well-made. # could replace with checks: check the TF list is duplicates = self.prior[non_zero_tfs].transpose().duplicated(keep=False) # mark duplicates as true dup_tfs = duplicates[duplicates].index.tolist() # find non-duplicated TFs that are also present in target gene list ndup_tfs = list(set(non_zero_tfs).difference(dup_tfs)) self_tfs = list(set(ndup_tfs).intersection(self.prior.index.values.tolist())) # Set the diagonal of the self-interaction tfs to zero subset = self.prior.loc[self_tfs, self_tfs].values np.fill_diagonal(subset, 0) self.prior.set_value(self_tfs, self_tfs, subset) if non_zero_tfs: activity.loc[non_zero_tfs,:] = np.matrix(linalg.pinv2(self.prior[non_zero_tfs])) * np.matrix(self.exp_mat_halftau) return activity
Python
0.000024
@@ -956,44 +956,8 @@ e):%0A - import pdb; pdb.set_trace()%0A
ebfc7969fc2559d7f67eae628f00e0465b85e0c5
Add blur filter to supported URLs
imboclient/url/image.py
imboclient/url/image.py
from imboclient.url import accesstoken from imboclient.url import url class UrlImage (url.Url): def __init__(self, base_url, public_key, private_key, image_identifier): url.Url.__init__(self, base_url, public_key, private_key) self._image_identifier = image_identifier def resource_url(self): return self._base_url + '/users/' + self._public_key + '/images/' + self._image_identifier def border(self, color = '000000', width = 1, height = 1): self.add_query_param('t[]', "border:color={},width={},height={}".format(color, width, height)) return self def compress(self, quality = 75): self.add_query_param('t[]', "compress:quality={}".format(quality)) return self def convert(self, ctype): self._image_identifier = self._image_identifier[:32] + '.' + ctype return self def gif(self): self.convert('gif') return self def jpg(self): self.convert('jpg') return self def png(self): self.convert('png') return self def crop(self, x, y, width, height): self.add_query_param('t[]', "crop:x={},y={},width={},height={}".format(x, y, width, height)) return self def flip_horizontally(self): self.add_query_param('t[]', 'flipHorizontally') return self def flip_vertically(self): self.add_query_param('t[]', 'flipVertically') return self def resize(self, width = None, height = None): params = [] if (width): params.append('width='+str(width)) if (height): params.append('height='+str(height)) self.add_query_param('t[]', 'resize:' + ",".join(params)) return self def max_size(self, max_width = None, max_height = None): params = [] if (max_width): params.append('width='+str(max_width)) if (max_height): params.append('height='+str(max_height)) self.add_query_param('t[]', 'maxSize:' + ",".join(params)) return self def rotate(self, angle, bg = '000000'): self.add_query_param('t[]', "rotate:angle={},bg={}".format(angle, bg)) return self def thumbnail(self, width = 50, height = 50, fit = 'outbound'): self.add_query_param('t[]', "thumbnail:width={},height={},fit={}".format(width, height, fit)) return self def canvas(self, width, height, mode = None, x = None, y = None, bg = None): self.add_query_param('t[]', "canvas:width={},height={},mode={},x={},y={},bg={}".format(width, height, mode, x, y, bg)) return self def transpose(self): self.add_query_param('t[]', "transpose") return self def transverse(self): self.add_query_param('t[]', "transverse") return self def desaturate(self): self.add_query_param('t[]', "desaturate") return self def sepia(self, threshold = 80): self.add_query_param('t[]', "sepia:threshold={}".format(threshold)) return self def reset(self): url.Url.reset() self._image_identifier = self._image_identifier[:32] return self
Python
0
@@ -3045,32 +3045,208 @@ return self%0A%0A + def blur(self, type='gaussian', radius=5, sigma=2):%0A self.add_query_param('t%5B%5D', %22blur:type=%7B%7D,radius=%7B%7D,sigma=%7B%7D%22.format(type, radius, sigma))%0A return self%0A%0A def reset(se
f1d76611b6b7c2f1b1a15c72976e5c1029f3b4a8
Use the executable bit on AWS callback.
scripts/aws.py
scripts/aws.py
import logging import requests from requests.exceptions import RequestException import sys import boto.ec2 logger = logging.getLogger(__name__) class AWSConnection: def __init__(self, config): self.available = False self.config = config if 'cluster_name' in config: self.cluster_name = config.get('cluster_name') elif 'etcd' in config and isinstance(config['etcd'], dict): self.cluster_name = config['etcd'].get('scope', 'unknown') else: self.cluster_name = 'unknown' try: # get the instance id r = requests.get('http://169.254.169.254/latest/dynamic/instance-identity/document', timeout=0.1) except RequestException: logger.info("cannot query AWS meta-data") return if r.ok: try: content = r.json() self.instance_id = content['instanceId'] self.region = content['region'] except Exception as e: logger.info('unable to fetch instance id and region from AWS meta-data: {}'.format(e)) return self.available = True def aws_available(self): return self.available def _tag_ebs(self, role): """ set tags, carrying the cluster name, instance role and instance id for the EBS storage """ if not self.available: return False tags = {'Name': 'spilo_'+self.cluster_name, 'Role': role, 'Instance': self.instance_id} try: conn = boto.ec2.connect_to_region(self.region) volumes = conn.get_all_volumes(filters={'attachment.instance-id': self.instance_id}) conn.create_tags([v.id for v in volumes], tags) except Exception as e: logger.info('could not set tags for EBS storage devices attached: {}'.format(e)) return False return True def _tag_ec2(self, role): """ tag the current EC2 instance with a cluster role """ if not self.available: return False tags = {'Role': role} try: conn = boto.ec2.connect_to_region(self.region) conn.create_tags([self.instance_id], tags) except Exception as e: logger.info("could not set tags for EC2 instance {}: {}".format(self.instance_id, e)) return False return True def on_role_change(self, new_role): ret = self._tag_ec2(new_role) return self._tag_ebs(new_role) and ret if __name__ == '__main__': if len(sys.argv) != 3: print ("Usage: {0} action role name".format(sys.argv[0])) return 1 action, role, name = sys.argv[1:] if action in ('on_start', 'on_stop', 'on_role_change'): aws = AWSConnection({'cluster_name': name}) aws.on_role_change(role) return 0 return 2
Python
0
195925232b47e2c607b95dfaabb46f884f1a1a5e
remove extra newlines in usage string.
lib/bup/options.py
lib/bup/options.py
"""Command-line options parser. With the help of an options spec string, easily parse command-line options. """ import sys import textwrap import getopt import re class OptDict: def __init__(self): self._opts = {} def __setitem__(self, k, v): if k.startswith('no-') or k.startswith('no_'): k = k[3:] v = not v self._opts[k] = v def __getitem__(self, k): if k.startswith('no-') or k.startswith('no_'): return not self._opts[k[3:]] return self._opts[k] def __getattr__(self, k): return self[k] def _default_onabort(msg): sys.exit(97) def _intify(v): try: vv = int(v or '') if str(vv) == v: return vv except ValueError: pass return v def _remove_negative_kv(k, v): if k.startswith('no-') or k.startswith('no_'): return k[3:], not v return k,v def _remove_negative_k(k): return _remove_negative_kv(k, None)[0] class Options: """Option parser. When constructed, two strings are mandatory. The first one is the command name showed before error messages. The second one is a string called an optspec that specifies the synopsis and option flags and their description. For more information about optspecs, consult the bup-options(1) man page. Two optional arguments specify an alternative parsing function and an alternative behaviour on abort (after having output the usage string). By default, the parser function is getopt.gnu_getopt, and the abort behaviour is to exit the program. """ def __init__(self, exe, optspec, optfunc=getopt.gnu_getopt, onabort=_default_onabort): self.exe = exe self.optspec = optspec self._onabort = onabort self.optfunc = optfunc self._aliases = {} self._shortopts = 'h?' self._longopts = ['help'] self._hasparms = {} self._defaults = {} self._usagestr = self._gen_usage() def _gen_usage(self): out = [] lines = self.optspec.strip().split('\n') lines.reverse() first_syn = True while lines: l = lines.pop() if l == '--': break out.append('%s: %s\n' % (first_syn and 'usage' or ' or', l)) first_syn = False out.append('\n') while lines: l = lines.pop() if l.startswith(' '): out.append('\n%s\n' % l.lstrip()) elif l: (flags, extra) = l.split(' ', 1) extra = extra.strip() if flags.endswith('='): flags = flags[:-1] has_parm = 1 else: has_parm = 0 g = re.search(r'\[([^\]]*)\]$', extra) if g: defval = g.group(1) else: defval = None flagl = flags.split(',') flagl_nice = [] for f in flagl: f,dvi = _remove_negative_kv(f, _intify(defval)) self._aliases[f] = _remove_negative_k(flagl[0]) self._hasparms[f] = has_parm self._defaults[f] = dvi if len(f) == 1: self._shortopts += f + (has_parm and ':' or '') flagl_nice.append('-' + f) else: f_nice = re.sub(r'\W', '_', f) self._aliases[f_nice] = _remove_negative_k(flagl[0]) self._longopts.append(f + (has_parm and '=' or '')) self._longopts.append('no-' + f) flagl_nice.append('--' + f) flags_nice = ', '.join(flagl_nice) if has_parm: flags_nice += ' ...' prefix = ' %-20s ' % flags_nice argtext = '\n'.join(textwrap.wrap(extra, width=70, initial_indent=prefix, subsequent_indent=' '*28)) out.append(argtext + '\n') else: out.append('\n') return ''.join(out).rstrip() + '\n' def usage(self, msg=""): """Print usage string to stderr and abort.""" sys.stderr.write(self._usagestr) e = self._onabort and self._onabort(msg) or None if e: raise e def fatal(self, s): """Print an error message to stderr and abort with usage string.""" msg = 'error: %s\n' % s sys.stderr.write(msg) return self.usage(msg) def parse(self, args): """Parse a list of arguments and return (options, flags, extra). In the returned tuple, "options" is an OptDict with known options, "flags" is a list of option flags that were used on the command-line, and "extra" is a list of positional arguments. """ try: (flags,extra) = self.optfunc(args, self._shortopts, self._longopts) except getopt.GetoptError, e: self.fatal(e) opt = OptDict() for k,v in self._defaults.iteritems(): k = self._aliases[k] opt[k] = v for (k,v) in flags: k = k.lstrip('-') if k in ('h', '?', 'help'): self.usage() if k.startswith('no-'): k = self._aliases[k[3:]] v = 0 else: k = self._aliases[k] if not self._hasparms[k]: assert(v == '') v = (opt._opts.get(k) or 0) + 1 else: v = _intify(v) opt[k] = v for (f1,f2) in self._aliases.iteritems(): opt[f1] = opt._opts.get(f2) return (opt,flags,extra)
Python
0.000013
@@ -2363,32 +2363,64 @@ ut.append('%5Cn')%0A + last_was_option = False%0A while li @@ -2416,32 +2416,32 @@ while lines:%0A - l = @@ -2518,29 +2518,142 @@ nd(' -%5Cn%25s%5Cn' %25 l.lstrip()) +%25s%25s%5Cn' %25 (last_was_option and '%5Cn' or '',%0A l.lstrip()))%0A last_was_option = False %0A @@ -4377,46 +4377,125 @@ -else:%0A out.append('%5Cn') + last_was_option = True%0A else:%0A out.append('%5Cn')%0A last_was_option = False %0A
00cd014118c4af028579143db73f089a169a0e2d
Fix error when you provide the wrong arguments
lib/circonusapi.py
lib/circonusapi.py
import json import urllib import urllib2 class CirconusAPI(object): def __init__(self, token): self.hostname = 'circonus.com' self.token = token # List valid api methods and their parameters here # The two lists are required and optional parameters self.methods = { ### Check management 'list_agents': { 'form_method': 'GET' }, 'list_checks': { 'form_method': 'GET', 'optional': ['active'] }, 'list_metrics': { 'form_method': 'GET', 'required': ['check_id'] }, 'add_check_bundle': { 'form_method': 'POST', 'required': ['agent_id', 'target', 'metric_name'], 'optional': ['module', 'period', 'timeout','*'] }, 'edit_check_bundle': { 'form_method': 'POST', 'required': ['bundle_id', 'metric_name', 'period', 'timeout'], 'optional': ['*'] }, 'enable_check_bundle': { 'form_method': 'POST', 'required': ['bundle_id'] }, 'disable_check_bundle': { 'form_method': 'POST', 'required': ['bundle_id'] }, 'enable_check': { 'form_method': 'POST', 'required': ['check_id'] }, 'disable_check': { 'form_method': 'POST', 'required': ['check_id'] }, ### Account management 'list_accounts': { 'form_method': 'POST' }, 'list_users': { 'form_method': 'GET', 'required': ['check_id', 'metric_name'] }, 'list_contact_groups': { 'form_method': 'GET' }, 'add_contact_group': { 'form_method': 'POST', 'required': ['name'], 'optional': ['agg_window'] }, 'edit_contact_group': { 'form_method': 'POST', 'required': ['contact_group_id'], 'optional': ['name', 'agg_window'] }, 'remove_contact_group': { 'form_method': 'POST', 'required': ['contact_group_id'] }, 'add_contact': { 'form_method': 'POST', 'required' : ['contact_group_id', 'contact_method'], 'optional' : ['user_id', 'contact_info'] }, 'remove_contact': { 'form_method': 'POST', 'required': ['contact_group_id'], 'optional': ['user_id', 'id', 'contact_method'] }, ### Rule management 'list_alerts': { 'form_method': 'GET', 'required': ['start', 'end'] }, 'list_rules': { 'form_method': 'GET', 'required': ['check_id', 'metric_name'] }, 'add_metric_rule': { 'form_method': 'POST', 'required': ['check_id', 'metric_name', 'order', 'severity', 'value'] }, 'remove_metric_rule': { 'form_method': 'POST', 'required' : ['check_id', 'metric_name', 'order'] }, 'add_metric_parent': { 'form_method': 'POST', 'required' : ['check_id', 'parent_check_id', 'metric_name', 'parent_metric_name'] }, 'remove_metric_parent': { 'form_method': 'POST', 'required': ['check_id', 'metric_name'] }, 'add_rule_contact_group': { 'form_method': 'POST', 'required': ['contact_group_id', 'check_id', 'metric_name', 'severity'] }, 'remove_rule_contact_group': { 'form_method': 'POST', 'required': ['contact_group_id', 'check_id', 'metric_name', 'severity'] }, ### Graph management 'get_graph': { 'api_method': 'graph', 'form_method': 'GET', 'required': ['graph_id'] }, 'add_graph': { 'api_method': 'graph', 'form_method': 'POST', 'required': ['graph_data'] }, 'edit_graph': { 'api_method': 'graph', 'form_method': 'POST', 'required': ['graph_id', 'graph_data'] }, 'remove_graph': { 'form_method': 'POST', 'required': ['graph_id'] }, 'list_graphs': { 'form_method': 'GET' }, 'get_worksheet': { 'api_method': 'worksheet', 'form_method': 'GET', 'required': ['worksheet_id'] }, 'add_worksheet': { 'api_method': 'worksheet', 'form_method': 'POST', 'required': ['worksheet_data'] }, 'edit_worksheet': { 'api_method': 'worksheet', 'form_method': 'POST', 'required': ['worksheet_id', 'worksheet_data'] }, 'remove_worksheet': { 'form_method': 'POST', 'required': ['worksheet_id'] }, 'list_worksheets': { 'form_method': 'GET' } } def __getattr__(self, name): if name in self.methods: def f(**parameters): # Verify that we passed the right parameters required = set(self.methods[name].get('required', [])) optional = set(self.methods[name].get('optional', [])) params = set(parameters.keys()) if not params >= required: raise TypeError("%s requires the following arguments: %s" % (name, ' '.join(self.methods[name][0]))) if '*' not in optional and not params <= (required | optional): raise TypeError("Invalid parameters given to %s" % name) # Make the api call return self.api_call( self.methods[name].get('form_method', 'GET'), self.methods[name].get('api_method', name), **parameters) return f else: raise AttributeError("%s instance has no attribute '%s'" % ( self.__class__.__name__, name)) def api_call(self, form_method, method, **parameters): """Performs a circonus api call. Post is always used, even for read only requests. The api says that post is always valid, so there is no need to decide if something is read/write and setting get/post appropriately """ # Convert list to multiple values # i.e. "a" : [1,2,3] to (eventually) a=1&a=2&a=3 plist = [] for k in parameters: if type(parameters[k]) == list: for v in parameters[k]: plist.append((k,v)) else: plist.append((k, parameters[k])) url = "https://%s/api/json/%s" % (self.hostname, method) if form_method == 'POST': data = urllib.urlencode(plist) elif form_method == 'GET': data = None url = "%s?%s" % (url, urllib.urlencode(plist)) req = urllib2.Request( url = url, data = data, headers = { "X-Circonus-Auth-Token" : self.token, "X-Circonus-App-Name" : "Circus" } ) try: fh = urllib2.urlopen(req) except urllib2.HTTPError, e: if e.code == 401: raise TokenNotValidated if e.code == 403: raise AccessDenied raise response = json.load(fh) fh.close() return response class CirconusAPIException(Exception): pass class TokenNotValidated(CirconusAPIException): pass class AccessDenied(CirconusAPIException): pass
Python
0.000006
@@ -6334,16 +6334,57 @@ '.join( +%0A self.met @@ -6394,17 +6394,26 @@ s%5Bname%5D%5B -0 +'required' %5D)))%0A
c8bdf3b95b2ff8e4049a109f65728619a55a927c
Add parallelism to generate_departures (that was easy)
busstops/management/commands/generate_departures.py
busstops/management/commands/generate_departures.py
from datetime import date, timedelta from django.core.management.base import BaseCommand from django.db import transaction from txc import txc from ...models import Region, Service, Journey, StopUsageUsage, StopPoint from ...utils import get_files_from_zipfile ONE_DAY = timedelta(days=1) def handle_timetable(service, timetable, day): if hasattr(timetable, 'operating_profile') and day.weekday() not in timetable.operating_profile.regular_days: return if not timetable.operating_period.contains(day): return # if not hasattr(timetable, 'groupings'): # return for grouping in timetable.groupings: stops = {row.part.stop.atco_code for row in grouping.rows} existent_stops = StopPoint.objects.filter(atco_code__in=stops).values_list('atco_code', flat=True) for vj in grouping.journeys: if not vj.should_show(day): continue date = day previous_time = None stopusageusages = [] journey = Journey(service=service, datetime='{} {}'.format(date, vj.departure_time)) for i, (su, time) in enumerate(vj.get_times()): if previous_time and previous_time > time: date += ONE_DAY if su.stop.atco_code in existent_stops: if not su.activity or su.activity.startswith('pickUp'): stopusageusages.append( StopUsageUsage(datetime='{} {}'.format(date, time), order=i, stop_id=su.stop.atco_code) ) journey.destination_id = su.stop.atco_code previous_time = time if journey.destination_id: journey.save() for suu in stopusageusages: suu.journey = journey StopUsageUsage.objects.bulk_create(stopusageusages) @transaction.atomic def handle_region(region): today = date.today() NEXT_WEEK = today + ONE_DAY * 7 # delete journeys before today print('deleting journeys before', today) print(Journey.objects.filter(service__region=region, datetime__date__lt=today).delete()) # get the date of the last generated journey last_journey = Journey.objects.filter(service__region=region).order_by('datetime').last() if last_journey: today = last_journey.datetime.date() + ONE_DAY if today > NEXT_WEEK: return for service in Service.objects.filter(region=region, current=True): # print(service) for i, xml_file in enumerate(get_files_from_zipfile(service)): timetable = txc.Timetable(xml_file, None) day = today while day <= NEXT_WEEK: # print('generating departures for', day) handle_timetable(service, timetable, day) day += ONE_DAY class Command(BaseCommand): def handle(self, *args, **options): for region in Region.objects.all().exclude(id__in=('L', 'Y', 'NI')): print(region) handle_region(region)
Python
0.000002
@@ -1,12 +1,45 @@ +from multiprocessing import Pool%0A from datetim @@ -2022,16 +2022,34 @@ egion):%0A + print(region)%0A toda @@ -3054,21 +3054,64 @@ -for +pool = Pool(processes=4)%0A pool.map(handle_ region - in +, Reg @@ -3164,66 +3164,6 @@ I')) -:%0A print(region)%0A handle_region(region )%0A
eaa062840c0b56bbd7c47986b77f08528bb39eb7
Fix typo leading to misinterpretation of the doc in the implementation.
ppp_datamodel/communication.py
ppp_datamodel/communication.py
"""Contains the classes representing a request to and a response of a module.""" import json from .abstractnode import register, AbstractNode class Request: """Represents a request. https://github.com/ProjetPP/Documentation/blob/master/module-communication.md#request """ __slots__ = ('language', 'pertinence', 'tree') def __init__(self, language, tree): if isinstance(tree, dict) or isinstance(tree, str): tree = AbstractNode.from_json(tree) self.language = language self.tree = tree def __repr__(self): return '<PPP request language=%r, tree=%r>' % \ (self.language, self.tree) def __eq__(self, other): if isinstance(other, dict) or isinstance(other, str): other = Request.from_json(other) return self.language == other.language and \ self.tree == other.tree @staticmethod def from_json(data): if isinstance(data, str): data = json.loads(data) return Request(data['language'], data['tree']) def as_dict(self): return {'language': self.language, 'tree': self.tree.as_dict()} def as_json(self): return json.dumps(self.as_dict()) class Response: """Represents a response. https://github.com/ProjetPP/Documentation/blob/master/module-communication.md#response """ __slots__ = ('language', 'pertinence', 'tree') def __init__(self, language, pertinence, tree): if isinstance(tree, dict) or isinstance(tree, str): tree = AbstractNode.from_json(tree) self.language = language self.pertinence = pertinence self.tree = tree def __repr__(self): return '<PPP response language=%r, pertinence=%r, tree=%r>' % \ (self.language, self.pertinence, self.tree) def __eq__(self, other): if isinstance(other, dict) or isinstance(other, str): other = Response.from_json(other) return self.language == other.language and \ self.pertinence == other.pertinence and \ self.tree == other.tree @staticmethod def from_json(data): if isinstance(data, str): data = json.loads(data) return Response(data['language'], data['pertinence'], data['tree']) def as_dict(self): return {'language': self.language, 'pertinence': self.pertinence, 'tree': self.tree.as_dict()} def as_json(self): return json.dumps(self.as_dict())
Python
0
@@ -302,38 +302,36 @@ = ('language', ' -pertin +sent ence', 'tree')%0A%0A @@ -363,27 +363,181 @@ nguage, tree -):%0A +_or_sentence, is_sentence=False):%0A if is_sentence:%0A self.sentence = tree_or_sentence%0A else:%0A tree = tree_or_sentence%0A if i @@ -588,32 +588,36 @@ r):%0A + + tree = AbstractN @@ -648,56 +648,60 @@ + self. -language = language%0A self.tree = tre +tree = tree%0A self.language = languag e%0A%0A
749471d32e18eb14a0d4094ef265dede80cb58c1
modify name
automation_calculator.py
automation_calculator.py
#!python3 # -*- coding: utf-8 -*- # test only on windows XP, 7, 10 import time import subprocess import uiautomation as automation def Calc(window, btns, expression): expression = ''.join(expression.split()) if not expression.endswith('='): expression += '=' for char in expression: automation.Logger.Write(char, writeToFile = False) btns[char].Click(waitTime = 0.05) window.SendKeys('{Ctrl}c', waitTime = 0) result = automation.Win32API.GetClipboardText() automation.Logger.WriteLine(result, automation.ConsoleColor.Cyan, writeToFile = False) time.sleep(1) def CaclOnXP(): chars = '0123456789.+-*/=()' calcWindow = automation.WindowControl(searchDepth = 1, ClassName = 'SciCalc') if not calcWindow.Exists(0, 0): subprocess.Popen('calc') calcWindow.SetTopmost() calcWindow.SendKeys('{Alt}vs', 0.5) clearBtn = calcWindow.ButtonControl(Name = 'CE') clearBtn.Click() char2Button = {} for key in chars: char2Button[key] = calcWindow.ButtonControl(Name = key) Calc(calcWindow, char2Button, '1234 * (4 + 5 + 6) - 78 / 90') Calc(calcWindow, char2Button, '2*3.14159*10') def CalcOnWindows7(): char2Id = { '0' : '130', '1' : '131', '2' : '132', '3' : '133', '4' : '134', '5' : '135', '6' : '136', '7' : '137', '8' : '138', '9' : '139', '.' : '84', '+' : '93', '-' : '94', '*' : '92', '/' : '91', '=' : '121', '(' : '128', ')' : '129', } calcWindow = automation.WindowControl(searchDepth = 1, ClassName = 'CalcFrame') if not calcWindow.Exists(0, 0): subprocess.Popen('calc') calcWindow.SetTopmost() calcWindow.SendKeys('{Alt}2') clearBtn = calcWindow.ButtonControl(foundIndex= 8, Depth = 3) #test foundIndex and Depth, the 8th button is clear if clearBtn.AutomationId == '82': clearBtn.Click() char2Button = {} for key in char2Id: char2Button[key] = calcWindow.ButtonControl(AutomationId = char2Id[key]) Calc(calcWindow, char2Button, '1234 * (4 + 5 + 6) - 78 / 90') Calc(calcWindow, char2Button, '2*3.14159*10') def CalcOnWindows10(): char2Id = { '0' : 'num0Button', '1' : 'num1Button', '2' : 'num2Button', '3' : 'num3Button', '4' : 'num4Button', '5' : 'num5Button', '6' : 'num6Button', '7' : 'num7Button', '8' : 'num8Button', '9' : 'num9Button', '.' : 'decimalSeparatorButton', '+' : 'plusButton', '-' : 'minusButton', '*' : 'multiplyButton', '/' : 'divideButton', '=' : 'equalButton', '(' : 'openParanthesisButton', ')' : 'closeParanthesisButton', } calcWindow = automation.WindowControl(searchDepth = 1, ClassName = 'ApplicationFrameWindow', Name = 'Calculator') if not calcWindow.Exists(0, 0): subprocess.Popen('calc') calcWindow.SetTopmost() calcWindow.ButtonControl(AutomationId = 'NavButton').Click() calcWindow.ListItemControl(Name = 'Scientific Calculator').Click() calcWindow.ButtonControl(AutomationId = 'clearButton').Click() char2Button = {} for key in char2Id: char2Button[key] = calcWindow.ButtonControl(AutomationId = char2Id[key]) Calc(calcWindow, char2Button, '1234 * (4 + 5 + 6) - 78 / 90') Calc(calcWindow, char2Button, '2*3.14159*10') if __name__ == '__main__': import platform osVersion = int(platform.version().split('.')[0]) if osVersion < 6: CaclOnXP() elif osVersion == 6: CalcOnWindows7() elif osVersion >= 10: CalcOnWindows10()
Python
0.000894
@@ -33,17 +33,13 @@ -%0A# -test only +works on @@ -52,16 +52,22 @@ s XP, 7, + 8 and 10%0Aimpo @@ -1191,16 +1191,20 @@ Windows7 +And8 ():%0A @@ -3676,16 +3676,20 @@ Windows7 +And8 ()%0A e
c2d543a3de566443a2c61761f9a190e915426fec
Return stream_client instead of binding it inside method (tests now passing)
stream_django/client.py
stream_django/client.py
from stream_django import conf import os import stream from stream_django.conf import DJANGO_MAJOR_VERSION from django.core.exceptions import ImproperlyConfigured def init_client(mayRaise=False): if conf.API_KEY and conf.API_SECRET: stream_client = stream.connect( conf.API_KEY, conf.API_SECRET, location=conf.LOCATION, timeout=conf.TIMEOUT) elif os.environ.get('STREAM_URL') is not None: stream_client = stream.connect() else: stream_client = None if mayRaise: raise ImproperlyConfigured('Stream credentials are not set in your settings') stream_client = init_client(mayRaise=DJANGO_MAJOR_VERSION<1.7)
Python
0
@@ -173,24 +173,34 @@ _client( -mayRaise +raise_config_error =False): @@ -241,39 +241,30 @@ CRET:%0A -stream_client = +return stream.conn @@ -271,19 +271,8 @@ ect( -%0A conf @@ -399,31 +399,22 @@ :%0A -stream_client = +return stream. @@ -431,60 +431,31 @@ el -se:%0A stream_client = None%0A if mayRaise:%0A +if raise_config_error:%0A @@ -567,16 +567,26 @@ ent( -mayRaise +raise_config_error =DJA
c895a8b62754f5df32aba06cd2231ba43acc9576
Update algo.py
server/algo.py
server/algo.py
from __future__ import division import math import itertools SPACING = 15 def iter_to_runs(visibles, pixels): cur_val = 6666666 start_idx = None out = [] for i, val in enumerate(itertools.chain(visibles, [None])): if cur_val != val: if cur_val is True: # we just ended a run of "True" values out.append((pixels[start_idx], pixels[i - 1])) cur_val = val start_idx = i return out def generate_line_segments(radius, center): """Generate radii of a circle that are a fixed width apart on the circle. Args: radius: radius of the circle, in pixels center: center of the circle (x, y) as tuple Returns: iterator of points (center, point on circle) """ ang_step = SPACING / radius # angle step in radians ang = 0 while ang < 2 * math.pi: ang += ang_step yield (center, (center[0] + radius * math.cos(ang), center[1] + radius * math.sin(ang))) def generate_visible(tower_height, heightmap): """Trace a ray and determine if a region is viewable. Args: tower_height: the elevation in meters above sea level of your antenna heightmap: an enumerable of heights in a given direction Returns: an enumerable of True/False for visibility """ min_angle = -10000 for i, height in enumerate(heightmap): if tower_height - height == 0: angle_to_point = 0 elif tower_height > height: angle_to_point = math.atan(i / (tower_height - height)) else: angle_to_point = math.atan((height - tower_height) / i) + math.pi / 2 if angle_to_point >= min_angle: min_angle = angle_to_point yield True else: yield False if __name__ == '__main__': assert iter_to_runs([False, False, True, True, False, True, False, True, True]) == [(2, 3), (5, 5), (7, 8)] assert iter_to_runs([True]) == [(0, 0)] assert iter_to_runs([True, True, True, True, False, True, True]) == [(0, 3), (5, 6)] import matplotlib.pyplot as plt heightmap = [math.sin(x/15.0) * x for x in xrange(360)] tower_height = 100.0 # foots above MSL filt = ray(tower_height, heightmap) fhm = [h if fl else 0 for (h, fl) in zip(heightmap, filt)] plt.scatter(range(len(heightmap)), fhm) plt.scatter([0], [tower_height], color='red') plt.plot(heightmap) plt.show()
Python
0.000007
@@ -66,17 +66,16 @@ ACING = -1 5%0A%0Adef i
315a8ea99240d0eebe2335f25269154475dda679
Fix broken svn_export test
py/desimodel/install.py
py/desimodel/install.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ desimodel.install ================= Install data files not handled by pip install. """ def default_install_dir(): """Return the default install directory. Assumes this file lives in a 'site-packages' directory. Returns ------- :class:`str` The path to the install directory. """ from os.path import dirname return dirname(dirname(dirname(dirname(dirname(__file__))))) def svn_export(desimodel_version=None): """Create a :command:`svn export` command suitable for downloading a particular desimodel version. Parameters ---------- desimodel_version : :class:`str`, optional The version X.Y.Z to download, trunk, or something of the form branches/... Defaults to trunk. Returns ------- :class:`list` A :command:`svn` command in list form, suitable for passing to :class:`subprocess.Popen`. """ from . import __version__ as this_version if desimodel_version is None: export_version = 'trunk' elif 'branches/' in desimodel_version: export_version = desimodel_version else: export_version = 'tags/' + desimodel_version return ["svn", "export", ("https://desi.lbl.gov/svn/code/desimodel/" + "{0}/data").format(export_version)] def install(desimodel=None, version=None): """Primary workhorse function. Parameters ---------- desimodel : :class:`str`, optional Allows the install directory to be explicitly set. version : :class:`str`, optional Allows the desimodel version to be explicitly set. Raises ------ :class:`RuntimeError` Standard error output from svn export command when status is non-zero. """ from os import chdir, environ from os.path import exists, join from subprocess import Popen, PIPE try: install_dir = environ['DESIMODEL'] except KeyError: if desimodel is not None: install_dir = desimodel else: install_dir = default_install_dir() if exists(join(install_dir, 'data')): raise ValueError("{0} already exists!".format(join(install_dir, 'data'))) chdir(install_dir) command = svn_export(version) # print(' '.join(command)) proc = Popen(command, stdout=PIPE, stderr=PIPE) out, err = proc.communicate() status = proc.returncode if status != 0: raise RuntimeError(err.rstrip()) def main(): """Entry point for the :command:`install_desimodel_data` script. Returns ------- :class:`int` An integer suitable for passing to :func:`sys.exit`. """ from sys import argv from argparse import ArgumentParser desc = """Install desimodel data. This script will attempt to download and install the desimodel data/ directory. The script will attempt to attempt to install the data in the following locations, in order of preference: 1. :envvar:`DESIMODEL`, that is, the directory specified by the environment variable. 2. The value set with the -d option on the command line. 3. A directory relative to the file containing this script. This directory is currently {0}. If the data directory already exists, this script will not do anything. """.format(default_install_dir()) parser = ArgumentParser(description=desc, prog=argv[0]) parser.add_argument('-d', '--desimodel', action='store', dest='desimodel', metavar='DESIMODEL', help=('Place the data/ directory in this directory. ' + 'In other words, the environment variable ' + 'DESIMODEL should be set to this directory.')) parser.add_argument('-D', '--desimodel-version', action='store', dest='desimodel_version', metavar='VERSION', help='Explicitly set the version to download.') options = parser.parse_args() try: install(options.desimodel, options.desimodel_version) except (ValueError, RuntimeError) as e: print(e.message) return 1 return 0
Python
0.000007
@@ -1115,16 +1115,48 @@ elif +desimodel_version is 'truck' or 'branche
a22174dcd9fa8540329f0e69a17f021ccb59b678
load only first 1000 data
server/init.py
server/init.py
# -*- coding: utf-8 -*- from werkzeug.contrib.profiler import ProfilerMiddleware from flask import Flask, request, g, render_template from flask.ext.triangle import Triangle from scipy import sparse, io from sklearn.metrics.pairwise import pairwise_distances import numpy as np from matlab import engine import os, json from flask.ext.cors import CORS from jinja2 import Environment # Configuration app = Flask(__name__, static_path='/static') Triangle(app) CORS(app) app.config['CORS_HEADERS'] = 'Content-Type' app.config['DEBUG'] = True app.config.from_object(__name__) app.config.from_envvar('FLASKR_SETTINGS', silent=True) app.config['PROFILE'] = True app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[10]) # @app.after_request # def after_request(response): # response.headers.add('Access-Control-Allow-Origin', '*') # response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization') # response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE') # return response # Routing # @app.before_first_request def before__first_request(): global eng global mappedX global cl_idx global Wtopk global voca global distanceMatrix print 'Starting matlab' eng = engine.start_matlab() eng.cd(os.path.dirname(os.getcwd())) [mappedX, cl_idx, Wtopk_idx,voca] = eng.main_topic(nargout=4) distanceMatrix = io.loadmat('./../tdm2.mat')['DD']#.tolist() Wtopk = [] for idxArray in Wtopk_idx: tempArray = [] for idx in idxArray: tempArray.append(voca[int(idx)-1]) Wtopk.append(tempArray) cl_idx = cl_idx[0] print 'Server Ready' distanceMatrix = np.round(distanceMatrix,decimals=4).tolist() cl_idx = cl_idx @app.teardown_request def teardown_request(exception): print('Teardown arose!'.format(exception)) @app.route('/get_subTopic') def get_subTopic(): global eng global voca idx = json.loads(request.args.get('idx')) [mappedX_sub, cl_idx_sub, Wtopk_idx_sub] = eng.sub_topic(idx,nargout=3) print mappedX_sub Wtopk_sub = [] for idxArray in Wtopk_idx_sub: tempArray = [] for idx in idxArray: tempArray.append(voca[int(idx)-1]) Wtopk_sub.append(tempArray) cl_idx_sub = cl_idx_sub[0] # mappedX_sub = np.array(mappedX_sub).tolist() cl_idx_sub = np.array(cl_idx_sub).tolist() return json.dumps({'mappedX_sub':mappedX_sub, 'cl_idx_sub':cl_idx_sub, 'Wtopk_sub':Wtopk_sub}) @app.route('/get_subTopic_tsne') def get_subTopic_tsne(): global eng global voca idx = json.loads(request.args.get('idx')) [mappedX_sub, cl_idx_sub, Wtopk_idx_sub] = eng.sub_topic_tsne(idx,nargout=3) print mappedX_sub Wtopk_sub = [] for idxArray in Wtopk_idx_sub: tempArray = [] for idx in idxArray: tempArray.append(voca[int(idx)-1]) Wtopk_sub.append(tempArray) cl_idx_sub = cl_idx_sub[0] mappedX_sub = np.array(mappedX_sub).tolist() cl_idx_sub = np.array(cl_idx_sub).tolist() return json.dumps({'mappedX_sub':mappedX_sub, 'cl_idx_sub':cl_idx_sub, 'Wtopk_sub':Wtopk_sub}) # keyword 입력받음 @app.route('/') def form(): global mappedX global cl_idx global Wtopk global distanceMatrix return render_template('tsne.html', cl_idx=cl_idx, Wtopk= Wtopk, distanceMatrix=distanceMatrix) before__first_request() # Execute the main program if __name__ == '__main__': app.run(host='0.0.0.0',port=5004)
Python
0
@@ -342,19 +342,27 @@ import C -ORS +onfigParser %0Afrom ji @@ -447,16 +447,17 @@ static') +%09 %0ATriangl @@ -1406,18 +1406,8 @@ DD'%5D -#.tolist() %0A%0A%09W @@ -1645,25 +1645,17 @@ imals=4) -.tolist() +%0A %0A%09cl_idx @@ -1663,17 +1663,115 @@ = cl_idx -%0A +%5B0:1000%5D%0A%09distanceMatrix = distanceMatrix%5B0:1000,0:1000%5D%0A%0A%09distanceMatrix = distanceMatrix.tolist() %0A%[email protected]
7af40645756fbe2f1b7d44f0ea357a710d10fd3c
Use list2cmdline to rebuild the shell cmdline
substance/subenv/api.py
substance/subenv/api.py
import os from substance.monads import * from substance.constants import * from substance.logs import dinfo from substance import Shell from substance.subenv import SubenvSpec from substance.exceptions import (InvalidEnvError, InvalidOptionError) from substance.utils import makeSymlink, readSymlink logger = logging.getLogger(__name__) class SubenvAPI(object): ''' Subenv API ''' def __init__(self, basePath="/substance"): self.basePath = basePath self.envsPath = os.path.join(self.basePath, "envs") self.assumeYes = False self.struct = {'dirs':[], 'files':[]} def setAssumeYes(self, b): if b: self.assumeYes = True else: self.assumeYes = False def initialize(self): return self.assertPaths() def assertPaths(self): return OK([self.basePath, self.envsPath]).mapM(Shell.makeDirectory) def init(self, path, env={}): logger.info("Initializing subenv from: %s" % path) return SubenvSpec.fromSpecPath(path, env) \ .bind(self._applyEnv) def exists(self, name): if os.path.isdir(os.path.join(self.envsPath, name)): return OK(True) return OK(False) def vars(self, envName=None, vars=None): if not envName: envSpec = self._getCurrentEnv() if not envSpec: return Fail(InvalidEnvError("No env is currently active.")) envName = envSpec.name def varfilter(o): if vars is not None: return {k:v for k,v in o.iteritems() if k in vars} else: return o return OK(envName) \ .bind(self._loadEnvSpec) \ .map(lambda e: e.getEnvVars()) \ .map(varfilter) def delete(self, name): envPath = os.path.normpath(os.path.join(self.envsPath, name)) if not os.path.isdir(envPath): return Fail(InvalidOptionError("Environment '%s' does not exist.")) return Shell.nukeDirectory(envPath) def use(self, name): envPath = os.path.normpath(os.path.join(self.envsPath, name)) if not os.path.isdir(envPath): return Fail(InvalidOptionError("Environment '%s' does not exist.")) envSpec = SubenvSpec.fromEnvPath(envPath) current = os.path.join(self.basePath, "current") return Try.attempt(makeSymlink, envSpec.envPath, current, True) \ .then(dinfo("Current substance environment now: '%s'" % envSpec.name)) def current(self): return OK(self._getCurrentEnv()) def run(self, args, envName=None): if not envName: envSpec = self._getCurrentEnv() if not envSpec: return Fail(InvalidEnvError("No env is currently active.")) envName = envSpec.name return OK(envName) \ .bind(self._loadEnvSpec) \ .map(lambda x: x.envPath) \ .bind(lambda p: Shell.call(args, cwd=p, shell=False)) def ls(self): envs = [] current = self._getCurrentEnv() for f in os.listdir(self.envsPath): path = os.path.join(self.envsPath, f) if os.path.isdir(path): env = SubenvSpec.fromEnvPath(path) if current and env.envPath == current.envPath: env.current = True envs.append(env) return OK(envs) def _loadEnvSpec(self, name): envPath = os.path.normpath(os.path.join(self.envsPath, name)) if not os.path.isdir(envPath): return Fail(InvalidOptionError("Environment '%s' does not exist." % name)) return OK(SubenvSpec.fromEnvPath(envPath)) def _getCurrentEnv(self): try: current = readSymlink(os.path.join(self.basePath, "current")) return SubenvSpec.fromEnvPath(current) except Exception as err: return None def _applyEnv(self, envSpec): envPath = os.path.join(self.envsPath, envSpec.name) logger.info("Applying environment to: %s" % envPath) return envSpec.applyTo(envPath)
Python
0
@@ -3,16 +3,34 @@ port os%0A +import subprocess%0A from sub @@ -2600,24 +2600,69 @@ vSpec.name%0A%0A + cmd = subprocess.list2cmdline(args)%0A %0A return O @@ -2774,20 +2774,19 @@ ll.call( -args +cmd , cwd=p, @@ -2792,20 +2792,19 @@ , shell= -Fals +Tru e))%0A
066e60897aa931b22ce92776b896912dbec3ccf6
bump dev version
py/desispec/_version.py
py/desispec/_version.py
__version__ = '0.47.1.dev6104'
Python
0
@@ -24,8 +24,8 @@ ev61 -04 +82 '%0A
339fdf6a588fdcd9613015814d3f242d9a0f4118
Fix minor bug
lib/models/vgg.py
lib/models/vgg.py
import cupy import chainer import chainer.links as L import chainer.functions as F from chainer import Variable class VGG(chainer.Chain): """Input dimensions are (224, 224).""" def __init__(self): super().__init__( conv1_1=L.Convolution2D(3, 64, 3, stride=1, pad=1), conv1_2=L.Convolution2D(64, 64, 3, stride=1, pad=1), conv2_1=L.Convolution2D(64, 128, 3, stride=1, pad=1), conv2_2=L.Convolution2D(128, 128, 3, stride=1, pad=1), conv3_1=L.Convolution2D(128, 256, 3, stride=1, pad=1), conv3_2=L.Convolution2D(256, 256, 3, stride=1, pad=1), conv3_3=L.Convolution2D(256, 256, 3, stride=1, pad=1), conv4_1=L.Convolution2D(256, 512, 3, stride=1, pad=1), conv4_2=L.Convolution2D(512, 512, 3, stride=1, pad=1), conv4_3=L.Convolution2D(512, 512, 3, stride=1, pad=1), conv5_1=L.Convolution2D(512, 512, 3, stride=1, pad=1), conv5_2=L.Convolution2D(512, 512, 3, stride=1, pad=1), conv5_3=L.Convolution2D(512, 512, 3, stride=1, pad=1), fc6=L.Linear(25088, 4096), fc7=L.Linear(4096, 4096), fc8=L.Linear(4096, 1000) ) self.conv_blocks = [ [self.conv1_1, self.conv1_2], [self.conv2_1, self.conv2_2], [self.conv3_1, self.conv3_2, self.conv3_3], [self.conv4_1, self.conv4_2, self.conv4_3], [self.conv5_1, self.conv5_2, self.conv5_3] ] self.deconv_blocks= [] # Keep track of the pooling indices inside each function instance self.mps = [F.MaxPooling2D(2, 2, use_cudnn=False) for _ in self.conv_blocks] def __call__(self, x, train=False): """Return a softmax probability distribution over predicted classes.""" # Convolutional layers hs, _ = self.feature_map_activations(x) h = hs[-1] # Fully connected layers h = F.dropout(F.relu(self.fc6(h)), train=train) h = F.dropout(F.relu(self.fc7(h)), train=train) h = self.fc8(h) return F.softmax(h) def feature_map_activations(self, x): """Forward pass through the convolutional layers of the VGG returning all of its intermediate feature map activations.""" hs = [] pre_pooling_sizes = [] h = x for conv_block, mp in zip(self.conv_blocks, self.mps): for conv in conv_block: h = F.relu(conv(h)) pre_pooling_sizes.append(h.data.shape[2:]) h = mp(h) hs.append(h.data) return hs, pre_pooling_sizes def activations(self, x, layer_idx): """Return filter activations projected back to the input space, e.g. RGB images with shape (n_layers, n_feature_maps, 3, 224, 224) for a particula layer. The given layer index is expected to be 1-based. """ if x.shape[0] != 1: raise TypeError('Visualization is only supported for a single image at a time') self.check_add_deconv_layers() hs, unpooling_sizes = self.feature_map_activations(x) activation_maps = [] n_activation_maps = hs[layer_idx].shape[1] xp = self.xp for i in range(n_activation_maps): # For each channel h = hs[layer_idx].copy() condition = xp.zeros_like(h) condition[0][i] = 1 # Keep one feature map and zero all other h = Variable(xp.where(condition, h, xp.zeros_like(h))) for i in reversed(range(layer_idx+1)): p = self.mps[i] h = F.upsampling_2d(h, p.indexes, p.kh, p.sy, p.ph, unpooling_sizes[i]) for deconv in reversed(self.deconv_blocks[i]): h = deconv(F.relu(h)) activation_maps.append(h.data) return xp.concatenate(activation_maps) def check_add_deconv_layers(self, nobias=True): """Add a deconvolutional layer for each convolutional layer already defined in the network.""" if len(self.deconv_blocks) == len(self.conv_blocks): return for conv_block in self.conv_blocks: deconv_block = [] for conv in conv_block: out_channels, in_channels, kh, kw = conv.W.data.shape if isinstance(conv.W.data, cupy.ndarray): initialW = cupy.asnumpy(conv.W.data) else: initialW = conv.W.data deconv = L.Deconvolution2D(out_channels, in_channels, (kh, kw), stride=conv.stride, pad=conv.pad, initialW=initialW, nobias=nobias) if isinstance(conv.W.data, cupy.ndarray): deconv.to_gpu() self.add_link('de{}'.format(conv.name), deconv) deconv_block.append(deconv) self.deconv_blocks.append(deconv_block)
Python
0.000001
@@ -2593,37 +2593,32 @@ hs.append(h -.data )%0A%0A retur @@ -3153,16 +3153,50 @@ tions(x) +%0A hs = %5Bh.data for h in hs%5D %0A%0A
8b6e7fab7b81ee1488687ab4a2b00ea6f4914e64
fix iam access json schema (#244)
c7n/iamaccess.py
c7n/iamaccess.py
# Copyright 2016 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ IAM Resource Policy Checker --------------------------- When securing resources with iam policies, we want to parse and evaluate the resource's policy for any cross account or public access grants that are not intended. In general, iam policies can be complex, and where possible using iam simulate is preferrable, but requires passing the caller's arn, which is not feasible when we're evaluating who the valid set of callers are. References - IAM Policy Evaluation - http://goo.gl/sH5Dt5 - IAM Policy Reference - http://goo.gl/U0a06y """ import json from c7n.filters import Filter from c7n.utils import get_account_id, local_session, type_schema class CrossAccountAccessFilter(Filter): """Matches any resource which """ schema = type_schema( 'cross-account', whitelist={'type': 'array', 'items': 'string'}) policy_attribute = 'Policy' def process(self, resources, event=None): self.accounts = self.get_accounts() return super(CrossAccountAccessFilter, self).process(resources, event) def get_accounts(self): owner_id = get_account_id(local_session(self.manager.session_factory)) accounts = set(self.data.get('whitelist', ())) accounts.add(owner_id) return accounts def get_resource_policy(self, r): return r.get(self.policy_attribute, None) def __call__(self, r): p = self.get_resource_policy(r) if p is None: return False violations = check_cross_account(p, self.accounts) if violations: r['CrossAccountViolations'] = violations return True def _account(arn): # we could try except but some minor runtime cost, basically flag # invalids values if ':' not in arn: return arn return arn.split(':', 5)[4] def check_cross_account(policy_text, allowed_accounts): """Find cross account access policy grant not explicitly allowed """ if isinstance(policy_text, basestring): policy = json.loads(policy_text) else: policy = policy_text violations = [] for s in policy['Statement']: principal_ok = True if s['Effect'] != 'Allow': continue # Highly suspect in an allow if 'NotPrincipal' in s: violations.append(s) continue assert len(s['Principal']) == 1, "Too many principals %s" % s # Skip relays for events to sns if 'Service' in s['Principal']: continue # At this point principal is required? p = ( isinstance(s['Principal'], basestring) and s['Principal'] or s['Principal']['AWS']) p = isinstance(p, basestring) and (p,) or p for pid in p: if pid == '*': principal_ok = False else: account_id = _account(pid) if account_id not in allowed_accounts: principal_ok = False if principal_ok: continue if 'Condition' not in s: violations.append(s) continue if 'StringEquals' in s['Condition']: # Default SNS Policy does this if 'AWS:SourceOwner' in s['Condition']['StringEquals']: so = s['Condition']['StringEquals']['AWS:SourceOwner'] if so in allowed_accounts: principal_ok = True # Default keys in kms do this if 'kms:CallerAccount' in s['Condition']['StringEquals']: so = s['Condition']['StringEquals']['kms:CallerAccount'] if so in allowed_accounts: principal_ok = True if 'ArnEquals' in s['Condition']: # Other valid arn equals? / are invalids allowed? # duplicate block from below, inline closure func # would remove, but slower, else move to class eval principal_ok = True v = s['Condition']['ArnEquals']['aws:SourceArn'] v = isinstance(v, basestring) and (v,) or v for arn in v: aid = _account(arn) if aid not in allowed_accounts: violations.append(s) if 'ArnLike' in s['Condition']: # Other valid arn equals? / are invalids allowed? v = s['Condition']['ArnLike']['aws:SourceArn'] v = isinstance(v, basestring) and (v,) or v principal_ok = True for arn in v: aid = _account(arn) if aid not in allowed_accounts: violations.append(s) if not principal_ok: violations.append(s) return violations
Python
0
@@ -1409,16 +1409,25 @@ 'items': + %7B'type': 'string @@ -1428,16 +1428,17 @@ string'%7D +%7D )%0A%0A p
54c48073dfb8ffd418efe234c0c107f7a5c303a9
Fix failing imports in Python 2
svg/templatetags/svg.py
svg/templatetags/svg.py
import logging import os from django import template from django.conf import settings from django.contrib.staticfiles import finders from django.utils.safestring import mark_safe from svg.exceptions import SVGNotFound logger = logging.getLogger(__name__) register = template.Library() @register.simple_tag def svg(filename): path = finders.find(os.path.join('svg', '%s.svg' % filename), all=True) if not path: message = "SVG 'svg/%s.svg' not found" % filename if settings.DEBUG: raise SVGNotFound(message) else: logger.warning(message) return '' if isinstance(path, (list, tuple)): path = path[0] with open(path) as svg_file: svg = mark_safe(svg_file.read()) return svg
Python
0.000196
@@ -1,12 +1,51 @@ +from __future__ import absolute_import%0A import loggi
b71ef8c05a9afa9eb3614c863650c12df0967fae
document methods
svtools/vcf/genotype.py
svtools/vcf/genotype.py
import sys class Genotype(object): def __init__(self, variant, gt): self.format = dict() self.variant = variant self.set_format('GT', gt) def set_formats(self, fields, values): format_set = self.variant.format_set add_to_active = self.variant.active_formats.add active_formats = self.variant.active_formats format_dict = self.format for field, value in zip(fields, values): if field in format_set: format_dict[field] = value if field not in active_formats: add_to_active(field) else: sys.stderr.write('\nError: invalid FORMAT field, \"' + field + '\"\n') sys.exit(1) def set_format(self, field, value, update_active=True): if field in self.variant.format_set: self.format[field] = value if field not in self.variant.active_formats: self.variant.active_formats.add(field) self.variant.update_active_format_list() else: sys.stderr.write('\nError: invalid FORMAT field, \"' + field + '\"\n') sys.exit(1) def get_format(self, field): return self.format[field] def get_gt_string(self): g_list = list() for f in self.variant.active_format_list: if f in self.format: if type(self.format[f]) == float: g_list.append('%0.2f' % self.format[f]) else: g_list.append(str(self.format[f])) else: g_list.append('.') return ':'.join(g_list)
Python
0.000002
@@ -37,40 +37,194 @@ -def __init__(self, variant, gt): +'''%0A This class stores information about each sample.%0A '''%0A def __init__(self, variant, gt):%0A '''%0A Initialize the class. All instances have a GT field.%0A ''' %0A @@ -354,32 +354,170 @@ ields, values):%0A + '''%0A Set many format fields for this instance.%0A Updates format information in the owning Variant class.%0A '''%0A format_s @@ -1100,16 +1100,96 @@ =True):%0A + '''%0A Set information for an individual format field.%0A '''%0A @@ -1600,63 +1600,387 @@ -return self.format%5Bfield%5D%0A%0A def get_gt_string(self): +'''%0A Get value of particular field key%0A '''%0A return self.format%5Bfield%5D%0A%0A def get_gt_string(self):%0A '''%0A Convert object back to string. %0A %0A If some values are missing (at the end for example) they are printed out as %0A all format fields present in any Genotype instance in the Variant line%0A are tracked.%0A ''' %0A
e2a0fb602c9de9f988d733a30b466dc400cd9503
update issue 84
test/test_issue084.py
test/test_issue084.py
from rdflib.term import URIRef from rdflib.graph import Graph rdf = u"""@prefix skos: <http://www.w3.org/2004/02/skos/core#> . @prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> . @prefix : <http://www.test.org/#> . :world rdf:type skos:Concept; skos:prefLabel "World"@en. :africa rdf:type skos:Concept; skos:prefLabel "Africa"@en; skos:broaderTransitive :world. :CI rdf:type skos:Concept; skos:prefLabel "C\u00f4te d'Ivoire"@en; skos:broaderTransitive :africa. """.encode('utf-8') def test_issue(): g = Graph() g.parse(data=rdf, format='n3') v = g.value(subject=URIRef("http://www.test.org/#CI"), predicate=URIRef("http://www.w3.org/2004/02/skos/core#prefLabel")) assert v==u"C\u00f4te d'Ivoire"
Python
0
@@ -1,20 +1,80 @@ +from codecs import getreader%0Afrom StringIO import StringIO%0A%0A from rdflib.term imp @@ -116,17 +116,16 @@ Graph%0A%0A -%0A rdf = u%22 @@ -552,16 +552,32 @@ %0A%22%22%22 +%0A%0Ardf_utf8 = rdf .encode( @@ -584,16 +584,82 @@ 'utf-8') +%0A%0Ardf_reader = getreader('utf-8')(StringIO(rdf.encode('utf-8')))%0A%0A %0A @@ -673,13 +673,9 @@ est_ -issue +a ():%0A @@ -888,8 +888,479 @@ voire%22%0A%0A +def test_b():%0A g = Graph()%0A g.parse(data=rdf_utf8, format='n3')%0A v = g.value(subject=URIRef(%22http://www.test.org/#CI%22), predicate=URIRef(%22http://www.w3.org/2004/02/skos/core#prefLabel%22))%0A assert v==u%22C%5Cu00f4te d'Ivoire%22%0A%0Adef test_c():%0A g = Graph()%0A g.parse(source=rdf_reader, format='n3')%0A v = g.value(subject=URIRef(%22http://www.test.org/#CI%22), predicate=URIRef(%22http://www.w3.org/2004/02/skos/core#prefLabel%22))%0A assert v==u%22C%5Cu00f4te d'Ivoire%22%0A%0A%0A
f486280a264c195c989d59f0b3fa631d9e165a18
Fix comment
servo_write.py
servo_write.py
from tamproxy import Sketch, SyncedSketch, Timer from tamproxy.devices import Servo # Cycles a motor back and forth between -255 and 255 PWM every ~5 seconds class ServoWrite(Sketch): def setup(self): self.servo = Servo(self.tamp, 9) self.servo.write(1050) self.timer = Timer() self.end = False def loop(self): if (self.timer.millis() > 2000): self.timer.reset() if self.end: self.servo.write(1050) else: self.servo.write(1950) self.end = not self.end if __name__ == "__main__": sketch = ServoWrite() sketch.run()
Python
0
@@ -82,10 +82,42 @@ vo%0A%0A -# +%0Aclass ServoWrite(Sketch):%0A %22%22%22 Cycl @@ -125,13 +125,13 @@ s a -motor +servo bac @@ -154,68 +154,69 @@ een --255 and 255 PWM every ~5 seconds%0A%0Aclass ServoWrite(Sketch): +1050us and 1950us pulse widths (most servos are 1000-2000)%22%22%22 %0A%0A @@ -656,8 +656,9 @@ ch.run() +%0A
a0a2810e52ba27bb2b6eba5d13d8a3bc88bca266
Complete overhaul because I hated the ConfigParser module.
camoco/Config.py
camoco/Config.py
#!/usr/env/python3 import os import configparser global cf cf = configparser.ConfigParser() cf._interpolation = configparser.ExtendedInterpolation() cf_file = os.path.expanduser('~/.camoco.conf') default_config = ''' [options] basedir = ~/.camoco/ testdir = ~/.camoco/tests/ [logging] log_level = verbose [test] force = True refgen = Zm5bFGS cob = NewRoot ontology = ZmIonome term = Fe57 gene = GRMZM2G000014 ''' # Check to see if if not os.path.isfile(cf_file): with open(cf_file, 'w') as CF: print(default_config, file=CF) cf.read(os.path.expanduser('~/.camoco.conf'))
Python
0
@@ -47,201 +47,125 @@ ser%0A -%0A%0Aglobal cf%0A%0A%0Acf = configparser.ConfigParser()%0Acf._interpolation = configparser.ExtendedInterpolation()%0A%0Acf_file = os.path.expanduser('~/.camoco.conf')%0A%0Adefault_config = '''%0A%5B +import yaml%0Aimport pprint%0A%0A%0Aglobal cf%0A%0Adefault_config = '''--- # YAML Camoco Configuration File%0A options -%5D%0A +:%0A basedir - = +: ~/. @@ -176,17 +176,20 @@ co/%0A + testdir - = +: ~/. @@ -207,18 +207,21 @@ s/%0A%0A -%5B logging -%5D%0A +:%0A log_ @@ -225,18 +225,17 @@ og_level - = +: verbose @@ -240,41 +240,107 @@ se%0A%0A -%5B test -%5D%0A +:%0A force +:%0A -= True%0A + RefGen: True%0A COB: True%0A Ontology: True%0A refgen - +: -= Zm5 @@ -348,18 +348,21 @@ FGS%0A + cob +: - = New @@ -370,16 +370,20 @@ oot%0A + ontology = Z @@ -378,18 +378,17 @@ ontology - = +: ZmIonom @@ -393,34 +393,40 @@ ome%0A + term +: - = Fe57%0A + gene - +: -= GRM @@ -445,25 +445,971 @@ ''%0A%0A -# Check to see if +class Level(dict):%0A '''%0A Ha! Take that config parser! I am accessing%0A everything like an object.%0A '''%0A def __init__(self,*args,**kwargs):%0A super().__init__(*args,**kwargs)%0A%0A def __getattr__(self,item):%0A if isinstance(self%5Bitem%5D,dict):%0A return Level(self%5Bitem%5D)%0A else:%0A if 'dir' in item and '~' in self%5Bitem%5D:%0A return os.path.expanduser(self%5Bitem%5D)%0A return self%5Bitem%5D%0A%0Aclass Config(object):%0A def __init__(self,filename):%0A filename = os.path.expanduser(filename)%0A self.data = Level(yaml.load(open(filename,'r')))%0A def __getattr__(self,item):%0A return Level(self.data%5Bitem%5D)%0A%0A def __repr__(self):%0A return pprint.pformat(self.data)%0A%0A''' -------------------------------------------------------------------------%0A Program Logic%0A'''%0A%0Acf_file = os.path.expanduser('~/.camoco.conf')%0A%0A# Check to see if there is a config file available %0Aif @@ -1515,51 +1515,35 @@ CF)%0A -%0Acf.read(os.path.expanduser('~/.camoco.conf') +else:%0A cf = Config(cf_file )%0A
f32c834ea5c3ac937ba608985dbe0a4f72b6a21a
move dangling return
pybossa/view/twitter.py
pybossa/view/twitter.py
# This file is part of PyBOSSA. # # PyBOSSA is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBOSSA is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBOSSA. If not, see <http://www.gnu.org/licenses/>. from flask import Blueprint, request, url_for, flash, redirect from flaskext.login import login_user, current_user import pybossa.model as model from pybossa.core import db from pybossa.util import Twitter, get_user_signup_method # Required to access the config parameters outside a # context as we are using Flask 0.8 # See http://goo.gl/tbhgF for more info from pybossa.core import app # This blueprint will be activated in web.py # if the TWITTER CONSUMER KEY and SECRET # are available blueprint = Blueprint('twitter', __name__) twitter = Twitter(app.config['TWITTER_CONSUMER_KEY'], app.config['TWITTER_CONSUMER_SECRET']) @blueprint.route('/', methods=['GET', 'POST']) def login(): return twitter.oauth.authorize(callback=url_for('.oauth_authorized', next=request.args.get("next"))) @twitter.oauth.tokengetter def get_twitter_token(): if current_user.is_anonymous(): return None else: return((current_user.info['twitter_token']['oauth_token'], current_user.info['twitter_token']['oauth_token_secret'])) def manage_user(access_token, user_data, next_url): """Manage the user after signin""" # Twitter API does not provide a way # to get the e-mail so we will ask for it # only the first time user = db.session.query(model.User)\ .filter_by(twitter_user_id=user_data['user_id'])\ .first() if user is None: twitter_token = dict(oauth_token=access_token['oauth_token'], oauth_token_secret=access_token['oauth_token_secret']) info = dict(twitter_token=twitter_token) user = db.session.query(model.User)\ .filter_by(name=user_data['screen_name'])\ .first() if user is None: user = model.User(fullname=user_data['screen_name'], name=user_data['screen_name'], email_addr=user_data['screen_name'], twitter_user_id=user_data['user_id'], info=info) db.session.add(user) db.session.commit() return user else: return None else: return user @blueprint.route('/oauth-authorized') @twitter.oauth.authorized_handler def oauth_authorized(resp): """Called after authorization. After this function finished handling, the OAuth information is removed from the session again. When this happened, the tokengetter from above is used to retrieve the oauth token and secret. Because the remote application could have re-authorized the application it is necessary to update the values in the database. If the application redirected back after denying, the response passed to the function will be `None`. Otherwise a dictionary with the values the application submitted. Note that Twitter itself does not really redirect back unless the user clicks on the application name. """ next_url = request.args.get('next') or url_for('home') if resp is None: flash(u'You denied the request to sign in.', 'error') return redirect(next_url) access_token = dict(oauth_token=resp['oauth_token'], oauth_token_secret=resp['oauth_token_secret']) user_data = dict(screen_name=resp['screen_name'], user_id=resp['user_id']) user = manage_user(access_token, user_data, next_url) if user is None: user = db.session.query(model.User)\ .filter_by(name=user_data['screen_name'])\ .first() msg, method = get_user_signup_method(user) flash(msg, 'info') if method == 'local': return redirect(url_for('account.forgot_password')) else: return redirect(url_for('account.signin')) first_login = False request_email = False login_user(user, remember=True) flash("Welcome back %s" % user.fullname, 'success') if (user.email_addr == user.name): request_email = True if not request_email: return redirect(next_url) if first_login: flash("This is your first login, please add a valid e-mail") else: flash("Please update your e-mail address in your profile page") return redirect(url_for('account.update_profile'))
Python
0.000002
@@ -2134,35 +2134,69 @@ %0A if user is -Non +not None:%0A return user%0A els e:%0A twitt @@ -2975,38 +2975,8 @@ one%0A - else:%0A return user%0A %0A%0A@b
648de375f5e9ae1620bc836e5d647688b541690c
Add atom package
test/test_packages.py
test/test_packages.py
import pytest @pytest.mark.parametrize("name", [ ("apt-file"), ("apt-transport-https"), ("blktrace"), ("ca-certificates"), ("chromium-browser"), ("cron"), ("curl"), ("diod"), ("docker-ce"), ("fonts-font-awesome"), ("git"), ("gnupg"), ("handbrake"), ("handbrake-cli"), ("haveged"), ("htop"), ("i3"), ("iotop"), ("language-pack-en-base"), ("laptop-mode-tools"), ("nfs-common"), ("ntop"), ("ntp"), ("openssh-client"), ("openssh-server"), ("openssh-sftp-server"), ("openssl"), ("python"), ("python-pip"), ("software-properties-common"), ("suckless-tools"), ("sysstat"), ("tree"), ("vagrant"), ("vim"), ("virtualbox"), ("vlc"), ("wget"), ("whois"), ("x264"), ("xfce4-terminal"), ("xfonts-terminus"), ("xinit"), ]) def test_packages(Package, name): assert Package(name).is_installed
Python
0.000002
@@ -82,24 +82,36 @@ rt-https%22),%0A + (%22atom%22),%0A (%22blktrace
89664ec37036553534c07d65f2df2b9fa07bfe80
Check total weights remain correct.
test/test_priority.py
test/test_priority.py
# -*- coding: utf-8 -*- """ test_priority ~~~~~~~~~~~~~ Tests for the Priority trees """ from hypothesis import given from hypothesis.strategies import integers, lists, tuples import priority STREAMS_AND_WEIGHTS = lists( elements=tuples( integers(min_value=1), integers(min_value=1, max_value=255) ), unique_by=lambda x: x[0], ) class TestPriorityTree(object): def test_priority_tree_one_stream(self): """ When only one stream is in the PriorityTree, priorities are easy. """ p = priority.PriorityTree() p.insert_stream(stream_id=1) priorities = p.priorities() assert len(priorities) == 1 priorities.total_weight == 16 @given(lists(elements=integers(min_value=0))) def test_priority_tree_single_level(self, weights): """ If lots of elements are added to the tree all at the top level, their weights are summed properly and the priorities object has the correct length. """ p = priority.PriorityTree() stream_id = 1 for weight in weights: p.insert_stream(stream_id=stream_id, weight=weight) stream_id += 1 priorities = p.priorities() assert len(priorities) == len(weights) assert priorities.total_weight == sum(weights) @given(STREAMS_AND_WEIGHTS) def test_priorities_stream_weights(self, stream_data): """ For a given set of priorities, we can index by ID and find the weight of the stream. """ p = priority.PriorityTree() for stream_id, weight in stream_data: p.insert_stream(stream_id=stream_id, weight=weight) priorities = p.priorities() for stream_id, weight in stream_data: assert weight == priorities.stream_weight(stream_id) def test_drilling_down(self, readme_tree): """ We can drill down each layer of the tree by stream ID. """ top_level = readme_tree.priorities() assert 7 in top_level dependents = top_level[7] assert len(dependents) == 1 assert 11 in dependents second_level_dependents = dependents[11] assert len(second_level_dependents) == 1 assert 9 in second_level_dependents
Python
0
@@ -2155,16 +2155,61 @@ pendents +%0A assert dependents.total_weight == 16 %0A%0A @@ -2336,16 +2336,73 @@ evel_dependents%0A + assert second_level_dependents.total_weight == 8%0A
4530eea92e37c087b6f25fe3a0e48e54b949b68b
allow setup.py to work without django
cart/__init__.py
cart/__init__.py
from django.utils.importlib import import_module from django.core.exceptions import ImproperlyConfigured __version__ = '1.1' VERSION = tuple(map(int, __version__.split('.'))) + ('dev',) def get_helper_module(): '''Get the helper module as defined in the settings.''' import settings as cart_settings if cart_settings.HELPER_MODULE: try: package = import_module(cart_settings.HELPER_MODULE) except ImportError, e: raise ImproperlyConfigured(u'The CART_HELPER_MODULE setting refers to a ' \ 'non-existent package, or the import failed ' \ 'due to an error. Error details: %s' % e) return package else: return None
Python
0.000001
@@ -1,110 +1,4 @@ -from django.utils.importlib import import_module%0Afrom django.core.exceptions import ImproperlyConfigured%0A%0A __ve @@ -160,16 +160,232 @@ ngs.'''%0A + %0A # need to be able to import file without importing django, so these can't go%0A # at the top%0A from django.utils.importlib import import_module%0A from django.core.exceptions import ImproperlyConfigured%0A impo @@ -413,16 +413,21 @@ ettings%0A + %0A if c
a3b6306b2288b6dc4a9ec6e04a5962c7fb94699e
Update addition.py: s/stop/N
pyeda/logic/addition.py
pyeda/logic/addition.py
""" Logic functions for addition Interface Functions: ripple_carry_add kogge_stone_add brent_kung_add """ # Disable "invalid variable name" # pylint: disable=C0103 from math import floor, log from pyeda.boolalg.expr import Xor, Majority from pyeda.boolalg.vexpr import BitVector from pyeda.util import clog2 def ripple_carry_add(A, B, cin=0): """Return symbolic logic for an N-bit ripple carry adder.""" assert len(A) == len(B) s, c = list(), list() for i, ai in enumerate(A, A.start): carry = (cin if i == 0 else c[i-1]) s.append(Xor(ai, B[i], carry)) c.append(Majority(ai, B[i], carry)) return BitVector(s), BitVector(c) def kogge_stone_add(A, B, cin=0): """Return symbolic logic for an N-bit Kogge-Stone adder.""" assert len(A) == len(B) stop = len(A) # generate/propagate logic g = [A[i] * B[i] for i in range(stop)] p = [Xor(A[i], B[i]) for i in range(stop)] for i in range(clog2(stop)): start = 1 << i for j in range(start, stop): g[j] = g[j] + p[j] * g[j-start] p[j] = p[j] * p[j-start] # sum logic s = [Xor(A[i], B[i], (cin if i == 0 else g[i-1])) for i in range(stop)] return BitVector(s), BitVector(g) def brent_kung_add(A, B, cin=0): """Return symbolic logic for an N-bit Brent-Kung adder.""" assert len(A) == len(B) N = len(A) # generate/propagate logic g = [A[i] * B[i] for i in range(N)] p = [Xor(A[i], B[i]) for i in range(N)] # carry tree for i in range(floor(log(N, 2))): step = 2**i for start in range(2**(i+1)-1, N, 2**(i+1)): g[start] = g[start] + p[start] * g[start-step] p[start] = p[start] * p[start-step] # inverse carry tree for i in range(floor(log(N, 2))-2, -1, -1): start = 2**(i+1)-1 step = 2**i while start + step < N: g[start+step] = g[start+step] + p[start+step] * g[start] p[start+step] = p[start+step] * p[start] start += step # sum logic s = [Xor(A[i], B[i], (cin if i == 0 else g[i-1])) for i in range(N)] return BitVector(s), BitVector(g)
Python
0.000001
@@ -808,20 +808,17 @@ (B)%0A -stop +N = len(A @@ -882,28 +882,25 @@ i in range( -stop +N )%5D%0A p = %5B @@ -926,28 +926,25 @@ i in range( -stop +N )%5D%0A for i @@ -959,20 +959,17 @@ e(clog2( -stop +N )):%0A @@ -1017,20 +1017,17 @@ (start, -stop +N ):%0A @@ -1191,12 +1191,9 @@ nge( -stop +N )%5D%0A
99d76458256da781fc4b25a75d68a7a6d8c9379d
Correcting a typo in entries URLConf
urls/entries.py
urls/entries.py
""" URLs for entries in a weblog. """ from django.conf.urls.defaults import * from django.views.generic import date_based from coltrane.models import Entry entry_info_dict = { 'queryset': Entry.live.all(), 'date_field': 'pub_date', } urlpatterns = patterns('', url(r'^$', date_based.archive_index, entry_info_dict, name='coltrane_entry_archive_index'), url(r'^(?P<year>\d{4})/$', date_based.archive_year, entry_info_dict, name='coltrane_entry_archive_year'), url(r'^(?P<year>\d{4})/(?P<month>\w{3})/$', date_based.archive_month, entry_info_dict, name='coltrane_entry_archive_month'), url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{2})/$', date_based.archive_day, entry_info_dict, name='coltrane.entry_archive_day'), url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$', date_based.object_detail, dict(entry_info_dict, slug_field='slug'), name='coltrane_entry_detail'), )
Python
0.998607
@@ -1126,17 +1126,17 @@ coltrane -. +_ entry_ar
5295cf039960e239d509cdc9e5b5f23c88cb1fb1
use the category type for performance wins http://pandas.pydata.org/pandas-docs/stable/categorical.html
synthpop/categorizer.py
synthpop/categorizer.py
import itertools import numpy as np import pandas as pd # TODO DOCSTRINGS!! def categorize(df, eval_d, index_cols=None): cat_df = pd.DataFrame(index=df.index) for index, expr in eval_d.iteritems(): cat_df[index] = df.eval(expr) if index_cols is not None: cat_df[index_cols] = df[index_cols] cat_df = cat_df.set_index(index_cols) cat_df.columns = pd.MultiIndex.from_tuples(cat_df.columns, names=['cat_name', 'cat_value']) cat_df = cat_df.sort_index(axis=1) return cat_df def sum_accross_category(df, subtract_mean=True): """ This is a convenience function to sum the categorical values for each category - the mean across each category is then subtracted so all the cells in the table should be close to zero. The reason why it's not exactly zero is because of rounding errors in the scaling of any tract variables down to block group variables """ df = df.stack(level=1).fillna(0).groupby(level=0).sum() if subtract_mean: df = df.sub(df.mean(axis=1), axis="rows") return df def category_combinations(index): """ THis method converts a hierarchical multindex of category names and category values and converts to the cross-product of all possible category combinations. """ d = {} for cat_name, cat_value in index: d.setdefault(cat_name, []) d[cat_name].append(cat_value) for cat_name in d.keys(): if len(d[cat_name]) == 1: del d[cat_name] df = pd.DataFrame(list(itertools.product(*d.values()))) df.columns = cols = d.keys() df.index.name = "cat_id" df = df.reset_index().set_index(cols) return df def joint_distribution(sample_df, category_df, mapping_functions): # set counts to zero category_df["frequency"] = 0 category_names = category_df.index.names for name in category_names: assert name in mapping_functions, "Every category needs to have a " \ "mapping function with the same " \ "name to define that category for " \ "the pums sample records" sample_df[name] = sample_df.apply(mapping_functions[name], axis=1) category_df["frequency"] = sample_df.groupby(category_names).size() category_df["frequency"] = category_df["frequency"].fillna(0) # do the merge to add the category id sample_df = pd.merge(sample_df, category_df[["cat_id"]], left_on=category_names, right_index=True) return sample_df, category_df def _frequency_table(sample_df, category_ids): """ Take the result that comes out of the method above and turn it in to the frequencytable format used by the ipu """ df = sample_df.groupby(['hh_id', 'cat_id']).size().unstack().fillna(0) # need to manually add in case we missed a whole cat_id in the sample missing_ids = list(set(category_ids) - set(df.columns)) if missing_ids: missing_df = pd.DataFrame( data=np.zeros((len(df), len(missing_ids))), index=df.index, columns=missing_ids) df = df.merge(missing_df, left_index=True, right_index=True) assert len(df.columns) == len(category_ids) assert df.sum().sum() == len(sample_df) return df def frequency_tables(persons_sample_df, households_sample_df, person_cat_ids, household_cat_ids): households_sample_df.index.name = "hh_id" households_sample_df = households_sample_df.reset_index().\ set_index("serialno") h_freq_table = _frequency_table(households_sample_df, household_cat_ids) persons_sample_df = pd.merge(persons_sample_df, households_sample_df[["hh_id"]], left_on=["serialno"], right_index=True) p_freq_table = _frequency_table(persons_sample_df, person_cat_ids) p_freq_table = p_freq_table.reindex(h_freq_table.index).fillna(0) assert len(h_freq_table) == len(p_freq_table) h_freq_table = h_freq_table.sort_index(axis=1) p_freq_table = p_freq_table.sort_index(axis=1) return h_freq_table, p_freq_table
Python
0
@@ -2314,24 +2314,43 @@ me%5D, axis=1) +.astype('category') %0A%0A catego
f89bc55aebeba0cbf3c8423c97599aa0d334d9c9
Fix lint error (#113)
synthtool/gcp/common.py
synthtool/gcp/common.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pathlib import Path from synthtool.languages import node from synthtool.sources import templates from synthtool import _tracked_paths _TEMPLATES_DIR = Path(__file__).parent / "templates" class CommonTemplates: def __init__(self): self._templates = templates.Templates(_TEMPLATES_DIR) def py_library(self) -> Path: raise NotImplemented() def node_library(self, **kwargs) -> Path: kwargs["metadata"] = node.read_metadata() t = templates.TemplateGroup(_TEMPLATES_DIR / "node_library") result = t.render(**kwargs) _tracked_paths.add(result) return result def php_library(self, **kwargs) -> Path: t = templates.TemplateGroup(_TEMPLATES_DIR / "php_library") result = t.render(**kwargs) _tracked_paths.add(result) return result def render(self, template_name: str, **kwargs) -> Path: return self._templates.render(template_name, **kwargs)
Python
0.000001
@@ -937,16 +937,21 @@ lemented +Error ()%0A%0A
541822e07634a6dce374fed5b47b34212afd657f
Fix python 2.4 compatibility
lib/serializer.py
lib/serializer.py
# # # Copyright (C) 2007, 2008 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Serializer abstraction module This module introduces a simple abstraction over the serialization backend (currently json). """ import simplejson import re import hmac import hashlib from ganeti import errors # Check whether the simplejson module supports indentation _JSON_INDENT = 2 try: simplejson.dumps(1, indent=_JSON_INDENT) except TypeError: _JSON_INDENT = None _RE_EOLSP = re.compile('[ \t]+$', re.MULTILINE) def DumpJson(data, indent=True): """Serialize a given object. @param data: the data to serialize @param indent: whether to indent output (depends on simplejson version) @return: the string representation of data """ if not indent or _JSON_INDENT is None: txt = simplejson.dumps(data) else: txt = simplejson.dumps(data, indent=_JSON_INDENT) txt = _RE_EOLSP.sub("", txt) if not txt.endswith('\n'): txt += '\n' return txt def LoadJson(txt): """Unserialize data from a string. @param txt: the json-encoded form @return: the original data """ return simplejson.loads(txt) def DumpSignedJson(data, key, salt=None): """Serialize a given object and authenticate it. @param data: the data to serialize @param key: shared hmac key @return: the string representation of data signed by the hmac key """ txt = DumpJson(data, indent=False) if salt is None: salt = '' signed_dict = { 'msg': txt, 'salt': salt, 'hmac': hmac.new(key, salt + txt, hashlib.sha256).hexdigest(), } return DumpJson(signed_dict) def LoadSignedJson(txt, key, salt_verifier=None): """Verify that a given message was signed with the given key, and load it. @param txt: json-encoded hmac-signed message @param key: shared hmac key @param salt_verifier: function taking a salt as input and returning boolean @rtype: tuple of original data, string @return: (original data, salt) @raises errors.SignatureError: if the message signature doesn't verify """ signed_dict = LoadJson(txt) if not isinstance(signed_dict, dict): raise errors.SignatureError('Invalid external message') try: msg = signed_dict['msg'] salt = signed_dict['salt'] hmac_sign = signed_dict['hmac'] except KeyError: raise errors.SignatureError('Invalid external message') if salt and not salt_verifier: raise errors.SignatureError('Salted message is not verified') elif salt_verifier is not None: if not salt_verifier(salt): raise errors.SignatureError('Invalid salt') if hmac.new(key, salt + msg, hashlib.sha256).hexdigest() != hmac_sign: raise errors.SignatureError('Invalid Signature') return LoadJson(msg) def SaltEqualTo(expected): """Helper salt verifier function that checks for equality. @type expected: string @param expected: expected salt @rtype: function @return: salt verifier that returns True if the target salt is "x" """ return lambda salt: salt == expected def SaltIn(expected): """Helper salt verifier function that checks for equality. @type expected: collection @param expected: collection of possible valid salts @rtype: function @return: salt verifier that returns True if the salt is in the collection """ return lambda salt: salt in expected def SaltInRange(min, max): """Helper salt verifier function that checks for equality. @type min: integer @param min: minimum salt value @type max: integer @param max: maximum salt value @rtype: function @return: salt verifier that returns True if the salt is in the min,max range """ def _CheckSaltInRange(salt): try: i_salt = int(salt) except (TypeError, ValueError), err: return False return i_salt > min and i_salt < max return _CheckSaltInRange Dump = DumpJson Load = LoadJson DumpSigned = DumpSignedJson LoadSigned = LoadSignedJson
Python
0.000002
@@ -924,23 +924,8 @@ mac%0A -import hashlib%0A %0Afro @@ -947,16 +947,89 @@ errors%0A +%0Atry:%0A from hashlib import sha1%0Aexcept ImportError:%0A import sha as sha1 %0A%0A# Chec @@ -2253,30 +2253,20 @@ + txt, -hashlib.sha256 +sha1 ).hexdig @@ -3310,22 +3310,12 @@ sg, -hashlib.sha256 +sha1 ).he
7b4531ec867982ba2f660a2a08e85dbae457083e
Fix new line stripping in admin site
users/models.py
users/models.py
import hashlib import urllib.parse as urllib from django.contrib.auth.models import User from django.db import models # extension to django's User class which has authentication details # as well as some basic info such as name class Member(models.Model): def gravatar(self, size=128): default = "https://pbs.twimg.com/media/Civ9AUkVAAAwihS.jpg" h = hashlib.md5( self.equiv_user.email.encode('utf8').lower() ).hexdigest() q = urllib.urlencode({ # 'd':default, 'd': 'identicon', 's': str(size), }) return 'https://www.gravatar.com/avatar/{}?{}'.format(h, q) equiv_user = models.OneToOneField(User, on_delete=models.CASCADE) def __str__(self): return self.equiv_user.username bio = models.CharField(max_length=4096, blank=True) signature = models.CharField(max_length=1024, blank=True) def notification_count(self): return len(self.notifications_owned.filter(is_unread=True)) official_photo_url = models.CharField(max_length=512, null=True, blank=True) def is_exec(self): return len(self.execrole_set.all()) > 0
Python
0
@@ -799,36 +799,36 @@ bio = models. -Char +Text Field(max_length @@ -861,36 +861,36 @@ nature = models. -Char +Text Field(max_length
1116d028071f542c3452a61b3f64a2b9f81f94ff
Fix facts broken by default change.
pyinfra/facts/server.py
pyinfra/facts/server.py
# pyinfra # File: pyinfra/facts/server.py # Desc: server/os related facts from __future__ import unicode_literals import re from datetime import datetime from dateutil.parser import parse as parse_date from pyinfra.api import FactBase class Home(FactBase): command = 'echo $HOME' class Hostname(FactBase): command = 'hostname' class Os(FactBase): command = 'uname -s' class OsVersion(FactBase): command = 'uname -r' class Arch(FactBase): command = 'uname -p' class Command(FactBase): def command(self, command): return command class Which(FactBase): def command(self, name): return 'which {0}'.format(name) class Date(FactBase): '''Returns the current datetime on the server.''' default = datetime.now() command = 'LANG=C date' def process(self, output): return parse_date(output[0]) class LsbRelease(FactBase): command = 'lsb_release -ca' def process(self, output): items = {} for line in output: if ':' not in line: continue key, value = line.split(':') key = key.strip().lower() # Turn "distributor id" into "id" if ' ' in key: key = key.split(' ')[-1] value = value.strip() items[key] = value return items class Groups(FactBase): ''' Returns a list of groups on the system. ''' command = 'cat /etc/group' def process(self, output): groups = [] for line in output: if ':' in line: groups.append(line.split(':')[0]) return groups class Users(FactBase): ''' Returns a dict of users -> details: .. code:: python 'user_name': { 'home': '/home/user_name', 'shell': '/bin/bash, 'group': 'main_user_group', 'groups': [ 'other', 'groups' ] }, ... ''' command = ''' for i in `cat /etc/passwd | cut -d: -f1`; do ID=`id $i` META=`cat /etc/passwd | grep ^$i: | cut -d: -f6-7` echo "$ID $META" done ''' default = dict regex = r'^uid=[0-9]+\(([a-zA-Z0-9_\.\-]+)\) gid=[0-9]+\(([a-zA-Z0-9_\.\-]+)\) groups=([a-zA-Z0-9_\.\-,\(\)\s]+) (.*)$' # noqa group_regex = r'^[0-9]+\(([a-zA-Z0-9_\.\-]+)\)$' def process(self, output): users = {} for line in output: matches = re.match(self.regex, line) if matches: # Parse out the home/shell home_shell = matches.group(4) home = shell = None # /blah: is just a home if home_shell.endswith(':'): home = home_shell[:-1] # :/blah is just a shell elif home_shell.startswith(':'): shell = home_shell[1:] # Both home & shell elif ':' in home_shell: home, shell = home_shell.split(':') # Main user group group = matches.group(2) # Parse the groups groups = [] for group_matches in matches.group(3).split(','): name = re.match(self.group_regex, group_matches.strip()) if name: name = name.group(1) else: continue # We only want secondary groups here if name != group: groups.append(name) users[matches.group(1)] = { 'group': group, 'groups': groups, 'home': home, 'shell': shell, } return users class LinuxDistribution(FactBase): ''' Returns a dict of the Linux distribution version. Ubuntu, Debian, CentOS, Fedora & Gentoo currently. Also contains any key/value items located in release files. .. code:: python { 'name': 'CentOS', 'major': 6, 'minor': 5, 'release_meta': { 'DISTRIB_CODENAME': 'trusty', ... } } ''' command = 'cat /etc/*-release' default = { 'name': None, 'major': None, 'minor': None, } # Currently supported distros regexes = [ r'(Ubuntu) ([0-9]{2})\.([0-9]{2})', r'(CentOS) release ([0-9]).([0-9])', r'(Red Hat Enterprise Linux) Server release ([0-9]).([0-9])', r'(CentOS) Linux release ([0-9])\.([0-9])', r'(Debian) GNU/Linux ([0-9])()', r'(Gentoo) Base System release ([0-9])\.([0-9])', r'(Fedora) release ([0-9]+)()', ] def process(self, output): release_info = { 'release_meta': {}, } # Start with a copy of the default (None) data release_info.update(self.default) for line in output: # Check if we match a known version/major/minor string for regex in self.regexes: matches = re.search(regex, line) if matches: release_info.update({ 'name': matches.group(1), 'major': matches.group(2) and int(matches.group(2)) or None, 'minor': matches.group(3) and int(matches.group(3)) or None, }) if '=' in line: key, value = line.split('=') release_info['release_meta'][key] = value.strip('"') return release_info
Python
0
@@ -1468,16 +1468,35 @@ c/group' +%0A default = list %0A%0A de @@ -4391,99 +4391,8 @@ e'%0A%0A - default = %7B%0A 'name': None,%0A 'major': None,%0A 'minor': None,%0A %7D%0A%0A @@ -4794,16 +4794,161 @@ %0A %5D%0A%0A + @staticmethod%0A def default():%0A return %7B%0A 'name': None,%0A 'major': None,%0A 'minor': None,%0A %7D%0A%0A def @@ -5133,16 +5133,18 @@ .default +() )%0A%0A
3fe1eab593d2f2ef5125570c297e14b96ad7d709
Reword get_installation_order docstring
src/pip/_internal/resolution/resolvelib/resolver.py
src/pip/_internal/resolution/resolvelib/resolver.py
import functools import logging from pip._vendor import six from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible from pip._vendor.resolvelib import Resolver as RLResolver from pip._internal.exceptions import InstallationError from pip._internal.req.req_set import RequirementSet from pip._internal.resolution.base import BaseResolver from pip._internal.resolution.resolvelib.provider import PipProvider from pip._internal.utils.typing import MYPY_CHECK_RUNNING from .factory import Factory if MYPY_CHECK_RUNNING: from typing import Dict, List, Optional, Tuple from pip._vendor.resolvelib.resolvers import Result from pip._internal.cache import WheelCache from pip._internal.index.package_finder import PackageFinder from pip._internal.operations.prepare import RequirementPreparer from pip._internal.req.req_install import InstallRequirement from pip._internal.resolution.base import InstallRequirementProvider logger = logging.getLogger(__name__) class Resolver(BaseResolver): def __init__( self, preparer, # type: RequirementPreparer finder, # type: PackageFinder wheel_cache, # type: Optional[WheelCache] make_install_req, # type: InstallRequirementProvider use_user_site, # type: bool ignore_dependencies, # type: bool ignore_installed, # type: bool ignore_requires_python, # type: bool force_reinstall, # type: bool upgrade_strategy, # type: str py_version_info=None, # type: Optional[Tuple[int, ...]] ): super(Resolver, self).__init__() self.factory = Factory( finder=finder, preparer=preparer, make_install_req=make_install_req, force_reinstall=force_reinstall, ignore_installed=ignore_installed, ignore_requires_python=ignore_requires_python, py_version_info=py_version_info, ) self.ignore_dependencies = ignore_dependencies self._result = None # type: Optional[Result] def resolve(self, root_reqs, check_supported_wheels): # type: (List[InstallRequirement], bool) -> RequirementSet # FIXME: Implement constraints. if any(r.constraint for r in root_reqs): raise InstallationError("Constraints are not yet supported.") provider = PipProvider( factory=self.factory, ignore_dependencies=self.ignore_dependencies, ) reporter = BaseReporter() resolver = RLResolver(provider, reporter) requirements = [ self.factory.make_requirement_from_install_req(r) for r in root_reqs ] try: self._result = resolver.resolve(requirements) except ResolutionImpossible as e: error = self.factory.get_installation_error(e) if not error: # TODO: This needs fixing, we need to look at the # factory.get_installation_error infrastructure, as that # doesn't really allow for the logger.critical calls I'm # using here. for req, parent in e.causes: logger.critical( "Could not find a version that satisfies " + "the requirement " + str(req) + ("" if parent is None else " (from {})".format( parent.name )) ) raise InstallationError( "No matching distribution found for " + ", ".join([r.name for r, _ in e.causes]) ) raise six.raise_from(error, e) req_set = RequirementSet(check_supported_wheels=check_supported_wheels) for candidate in self._result.mapping.values(): ireq = provider.get_install_requirement(candidate) if ireq is None: continue ireq.should_reinstall = self.factory.should_reinstall(candidate) req_set.add_named_requirement(ireq) return req_set def get_installation_order(self, req_set): # type: (RequirementSet) -> List[InstallRequirement] """Create a list that orders given requirements for installation. The returned list should contain all requirements in ``req_set``, so the caller can loop through it and have a requirement installed before the requiring thing. The current implementation walks the resolved dependency graph, and make sure every node has a greater "weight" than all its parents. """ assert self._result is not None, "must call resolve() first" weights = {} # type: Dict[Optional[str], int] graph = self._result.graph key_count = len(self._result.mapping) + 1 # Packages plus sentinal. while len(weights) < key_count: progressed = False for key in graph: if key in weights: continue parents = list(graph.iter_parents(key)) if not all(p in weights for p in parents): continue if parents: weight = max(weights[p] for p in parents) + 1 else: weight = 0 weights[key] = weight progressed = True # FIXME: This check will fail if there are unbreakable cycles. # Implement something to forcifully break them up to continue. if not progressed: raise InstallationError( "Could not determine installation order due to cicular " "dependency." ) sorted_items = sorted( req_set.requirements.items(), key=functools.partial(_req_set_item_sorter, weights=weights), reverse=True, ) return [ireq for _, ireq in sorted_items] def _req_set_item_sorter( item, # type: Tuple[str, InstallRequirement] weights, # type: Dict[Optional[str], int] ): # type: (...) -> Tuple[int, str] """Key function used to sort install requirements for installation. Based on the "weight" mapping calculated in ``get_installation_order()``. The canonical package name is returned as the second member as a tie- breaker to ensure the result is predictable, which is useful in tests. """ name = canonicalize_name(item[0]) return weights[name], name
Python
0
@@ -4362,41 +4362,55 @@ %22%22%22 -Create a list that orders given r +Get order for installation of requirements in R equi @@ -4415,34 +4415,19 @@ uirement -s for installation +Set .%0A%0A @@ -4451,26 +4451,18 @@ ist -should contain +s a -ll req @@ -4473,135 +4473,150 @@ ment -s in %60%60req_set%60%60,%0A so the caller can loop through it and have a requirement installed%0A before the requiring thing + before another that depends on%0A it. This helps ensure that the environment is kept consistent as they%0A get installed one-by-one .%0A%0A
0b7ab77ef7d290ef4c9708d0564c474fa51dcc2d
rename flipoutput method to flip_output
psv/core/objects/selections.py
psv/core/objects/selections.py
from ..utils import cleanup_name, multiple_index, limit_text from ..utils import _index_function_gen, generate_func, asciireplace from ..exceptions.messages import ApiObjectMsg as msg from types import FunctionType from tabulate import tabulate class Selection(object): __slots__ = ["__rows__", "__apimother__"] def __init__(self, selection, api_mother): self.__rows__ = (selection) self.__apimother__ = api_mother if not self.rows: Exception("Selection Error") @property def rows(self): if not isinstance(self.__rows__, tuple): self.__rows__ = tuple(self.__rows__) return self.__rows__ else: return self.__rows__ @property def columns(self): return self.__apimother__.__columns__ @columns.setter def columns(self, v): self.__apimother__.rebuildcolumnsmap(v) def single_find(self, selectionfirstarg_data=None, **kwargs): """Find a single row based off search criteria given. will raise error if returns more than one result""" try: result = None func = generate_func(selectionfirstarg_data, kwargs) g = self._find_all(func) result = next(g) next(g) raise Exception(msg.singlefindmsg) except StopIteration: return result def find(self, selectionfirstarg_data=None, **kwargs): try: func = generate_func(selectionfirstarg_data, kwargs) g = self._find_all(func) return next(g) except StopIteration: return None def _find_all(self, func): for x in self.rows: if func(x): yield x def find_all(self, selectionfirstarg_data=None, **kwargs): func = generate_func(selectionfirstarg_data, kwargs) return tuple(self._find_all(func)) def flipoutput(self): for x in self.rows: ~x return self def no_output(self): """Sets all rows to not output""" for x in self.rows: -x return self def all_output(self): """Sets all rows to output""" for x in self.rows: +x return self def lenoutput(self): return len(tuple(filter(lambda x: x.outputrow, self.rows))) def enable(self, selectionfirstarg_data=None, **kwargs): v = generate_func(selectionfirstarg_data, kwargs) for x in self.rows: if bool(v(x)): +x return self def disable(self, selectionfirstarg_data=None, **kwargs): v = generate_func(selectionfirstarg_data, kwargs) for x in self.rows: if bool(v(x)): -x return self def flip(self, selectionfirstarg_data=None, **kwargs): v = generate_func(selectionfirstarg_data, kwargs) for x in self.rows: if bool(v(x)): ~x return self def select(self, selectionfirstarg_data=None, **kwargs): """Method for selecting part of the csv document. generates a function based of the parameters given. """ if not selectionfirstarg_data and not kwargs: return Selection(self.__rows__, self.__apimother__) func = generate_func(selectionfirstarg_data, kwargs) return self[func] def grab(self, *args): """Grabs specified columns from every row :returns: :class:`tuple` of the result. """ arg = tuple(args) if len(arg) > 1: return tuple(self[arg]) elif len(arg) == 1: return tuple(self[arg[0]]) else: raise Exception("Empty Grab") def unique(self, *args): arg = tuple(args) if len(arg) > 1: return set(self[arg]) elif len(arg) == 1: return set(self[arg[0]]) else: raise Exception("Empty Grab") def __len__(self): return len(self.rows) def __getitem__(self, v): if isinstance(v, slice): return Selection(self.rows[v], self.__apimother__) if isinstance(v, int): return (self.rows[v]) elif isinstance(v, str): return (x.getcolumn(v) for x in self.rows) elif isinstance(v, tuple): return (multiple_index(x,v) for x in self.rows) elif isinstance(v, FunctionType): return Selection(_index_function_gen(self, v), self.__apimother__) else: raise TypeError(msg.getitemmsg.format(type(v))) def addcolumn(self, columnname, columndata="", add_to_columns=True): """Adds a column :param columnname: Name of the column to add. :param columndata: The default value of the new column. :param add_to_columns: Determines whether this column should be added to the internal tracker. :type columnname: :class:`str` :type add_to_columns: :class:`bool` """ for row in self.rows: row.addcolumn(columnname, columndata) if add_to_columns: self.columns += (columnname,) return self @property def outputtedrows(self): return Selection(filter(lambda x:x.outputrow, self.rows), self.__apimother__) @property def nonoutputtedrows(self): return Selection(filter(lambda x: not x.outputrow, self.rows), self.__apimother__) def tabulate(self, limit=100, format="grid", only_ascii=True, columns=None, text_limit=None, remove_newline=True): data = [x.longcolumn() for x in self.rows[:limit]] sortedcolumns = self.columns if not columns else columns if remove_newline: for i, longcolumn in enumerate(data): for key in longcolumn: if isinstance(longcolumn[key], str): longcolumn[key] = longcolumn[key].replace("\n", "") result = tabulate( [sortedcolumns] + [[limit_text(x[c], text_limit) for c in sortedcolumns] for x in data], headers="firstrow", tablefmt=format) if only_ascii: return asciireplace(result) return result
Python
0.000036
@@ -1930,16 +1930,17 @@ def flip +_ output(s
2af91316f3ee5174d6183fd376d527e072bebdd2
Make deleting from form skip validation, because we really don't actually care.
sleep/forms.py
sleep/forms.py
from django import forms from sleep.models import * from django.core.exceptions import * import pytz import datetime class GroupForm(forms.ModelForm): class Meta: model=SleeperGroup fields = ['name'] class SleeperProfileForm(forms.ModelForm): idealWakeupWeekend = forms.CharField(max_length=30) idealWakeupWeekday = forms.CharField(max_length=30) idealSleepTimeWeekend = forms.CharField(max_length=30) idealSleepTimeWeekday = forms.CharField(max_length=30) class Meta: model = SleeperProfile fields = ['privacy','privacyLoggedIn','privacyFriends', 'use12HourTime', 'idealSleep', 'timezone', 'useGravatar', 'idealWakeupWeekend', 'idealWakeupWeekday', 'idealSleepTimeWeekday', 'idealSleepTimeWeekend'] def __init__(self, fmt, *args, **kwargs): self.fmt = fmt super(SleeperProfileForm, self).__init__(*args,**kwargs) def clean(self): cleaned_data = super(SleeperProfileForm, self).clean() for k in ['idealWakeupWeekend', 'idealWakeupWeekday', 'idealSleepTimeWeekday', 'idealSleepTimeWeekend']: try: cleaned_data[k] = datetime.datetime.strptime(cleaned_data[k], self.fmt).time() except ValueError: self._errors[k] = self.error_class(["The time must be in the format %s" % datetime.time(23,59,59).strftime(self.fmt)]) del cleaned_data[k] return cleaned_data class SleeperSearchForm(forms.Form): username = forms.CharField(max_length=30) class AllNighterForm(forms.ModelForm): delete = forms.BooleanField(required=False) class Meta: model = Allnighter fields = ["date", "comments"] def __init__(self, user, *args, **kwargs): self.user = user super(AllNighterForm, self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(AllNighterForm,self).clean() s = self.user.sleep_set.filter(date=cleaned_data["date"]) if len(s) > 0: raise ValidationError({NON_FIELD_ERRORS: ["You have sleeps entered for " + str(cleaned_data["date"]) + "!"]}) return cleaned_data class SleepForm(forms.ModelForm): start_time = forms.CharField(max_length=30) end_time = forms.CharField(max_length=30) delete = forms.BooleanField(required=False) class Meta: model = Sleep fields = ['start_time','end_time', 'date', 'comments', 'timezone'] def __init__(self, user, fmt, *args, **kwargs): self.fmt = fmt self.user = user super(SleepForm,self).__init__(*args, **kwargs) def clean(self): cleaned_data = super(SleepForm,self).clean() a = self.user.allnighter_set.filter(date=cleaned_data["date"]) if 'timezone' in cleaned_data and 'start_time' in cleaned_data and 'end_time' in cleaned_data: tz = pytz.timezone(cleaned_data['timezone']) for k in ['start_time','end_time']: #manually convert the strf-ed time to a datetime.datetime so we can make sure to do it in the right timezone try: dt = datetime.datetime.strptime(cleaned_data[k],self.fmt) cleaned_data[k]=tz.localize(dt) except ValueError: self._errors[k] = self.error_class(["The time must be in the format %s" % datetime.datetime(1999, 12, 31, 23, 59, 59).strftime(self.fmt)]) del cleaned_data[k] if len(a) > 0 : raise ValidationError({NON_FIELD_ERRORS: ["You have an allnighter entered for " + str(cleaned_data["date"]) + "!"]}) if "start_time" in cleaned_data and "end_time" in cleaned_data and cleaned_data["start_time"] >= cleaned_data["end_time"]: raise ValidationError({NON_FIELD_ERRORS: ["End time must be later than start time!"]}) return cleaned_data
Python
0
@@ -2605,32 +2605,178 @@ ef clean(self):%0A + if 'delete' in self.data and self.data%5B'delete'%5D =='on': return %7B'delete': 'on' %7D #Skip validation, I don't actually care if I'm deleting%0A cleaned_
57cec2b03eaa6857bcb1b3780c4de00c3165b281
Return early if owner
utils/checks.py
utils/checks.py
from discord.ext import commands def is_owner_or(**perms): async def predicate(ctx): owner = await ctx.bot.is_owner(ctx.author) permissions = ctx.channel.permissions_for(ctx.author) return all(getattr(permissions, perm, None) == value for perm, value in perms.items()) or owner return commands.check(predicate)
Python
0.000006
@@ -101,15 +101,10 @@ -owner = +if awa @@ -126,32 +126,58 @@ wner(ctx.author) +:%0D%0A return True %0D%0A permis @@ -343,17 +343,8 @@ s()) - or owner %0D%0A
42cec96f1e3465d5c2ce0e47d66c4ffa17f6548b
Fix log method if iotdb_log cannot be imported.
smartthings.py
smartthings.py
# # smartthings.py # # David Janes # IOTDB.org # 2014-01-31 # # Demonstrate how to use the SmartThings API from Python. # # See also: # Example App explanation: # http://build.smartthings.com/blog/tutorial-creating-a-custom-rest-smartapp-endpoint/ # # Example PHP code: # https://www.dropbox.com/s/7m7gmlr9q3u7rmk/exampleOauth.php # # Example "Groovy"/SMART code (this is the app we tap into) # https://www.dropbox.com/s/lohzziy2wjlrppb/endpointExample.groovy # import sys import requests import pprint import json ## import httplib ## httplib.HTTPConnection.debuglevel = 1 from optparse import OptionParser try: import iotdb_log except: class iotdb_log(object): def log(self, **ad): pprint.pprint(ad) class SmartThings(object): def __init__(self, verbose=True): self.verbose = verbose self.std = {} self.endpointd = {} self.deviceds = {} def load_settings(self, filename="smartthings.json"): """Load the JSON Settings file. See the documentation, but briefly you can get it from here: https://iotdb.org/playground/oauthorize """ with open(filename) as fin: self.std = json.load(fin) def request_endpoints(self): """Get the endpoints exposed by the SmartThings App The first command you need to call """ endpoints_url = self.std["api"] endpoints_paramd = { "access_token": self.std["access_token"] } endpoints_response = requests.get(url=endpoints_url, params=endpoints_paramd) self.endpointd = endpoints_response.json()[0] if self.verbose: iotdb_log.log( "endpoints", endpoints_url=endpoints_url, endpoints_paramd=endpoints_paramd, resultds=self.endpointd, ) def request_devices(self, device_type): """List the devices""" devices_url = "https://graph.api.smartthings.com%s/%s" % ( self.endpointd["url"], device_type, ) devices_paramd = { } devices_headerd = { "Authorization": "Bearer %s" % self.std["access_token"], } devices_response = requests.get(url=devices_url, params=devices_paramd, headers=devices_headerd) self.deviceds = devices_response.json() for switchd in self.deviceds: switchd['url'] = "%s/%s" % ( devices_url, switchd['id'], ) if self.verbose: iotdb_log.log( "devices", url=devices_url, paramd=devices_paramd, deviceds=self.deviceds, ) return self.deviceds def device_request(self, deviced, requestd): """Send a request the named device""" command_url = deviced['url'] command_paramd = { "access_token": self.std["access_token"] } command_headerd = {} command_response = requests.put( url=command_url, params=command_paramd, headers=command_headerd, data=json.dumps(requestd) ) if __name__ == '__main__': parser = OptionParser() parser.add_option( "", "--debug", default = False, action = "store_true", dest = "debug", help = "", ) parser.add_option( "", "--verbose", default = False, action = "store_true", dest = "verbose", help = "", ) parser.add_option( "", "--type", dest = "device_type", help = "The device type (required), one of switch, motion, presence, acceleration, contact" ) parser.add_option( "", "--id", dest = "device_id", help = "The ID or Name of the device to manipulate" ) parser.add_option( "", "--request", dest = "request", help = "Something to do, e.g. 'switch=1', 'switch=0'" ) (options, args) = parser.parse_args() if not options.device_type: print >> sys.stderr, "%s: --type <switch|motion|presence|accleration|contact>" % ( sys.argv[0], ) parser.print_help(sys.stderr) sys.exit(1) st = SmartThings(verbose=options.verbose) st.load_settings() st.request_endpoints() ds = st.request_devices(options.device_type) if options.device_id: ds = filter(lambda d: options.device_id in [ d.get("id"), d.get("label"), ], ds) if options.request: key, value = options.request.split('=', 2) try: value = int(value) except ValueError: pass requestd = { key: value } for d in ds: iotdb_log.log(device=d, request=requestd) st.device_request(d, requestd) else: print json.dumps(ds, indent=2, sort_keys=True)
Python
0
@@ -689,24 +689,46 @@ og(object):%0A + @staticmethod%0A def @@ -731,22 +731,16 @@ def log( -self, **ad):%0A
28c314e98ec88586b8c423b0941d8f029e4946e9
fix function which has obviously never been tested
lib/xdg_secret.py
lib/xdg_secret.py
import subprocess def xdg_secret_store(label, secret, attrs): with subprocess.Popen(["secret-tool", "store", "--label", label] + attrs, stdin=subprocess.PIPE) as proc: proc.communicate(secret.encode("utf-8")) return proc.wait() == 0 def xdg_secret_lookup_secret(attrs): with subprocess.Popen(["secret-tool", "lookup"] + attrs, stdout=subprocess.PIPE) as proc: return proc.stdout.read().rstrip("\n") def xdg_secret_search_stdout(attrs): return subprocess.call(["secret-tool", "search"] + attrs) == 0 def xdg_secret_clear(attrs): return subprocess.call(["secret-tool", "clear"] + attrs) == 0
Python
0.000001
@@ -474,16 +474,17 @@ .rstrip( +b %22%5Cn%22)%0A%0Ad
2d3016ce69e9a40dc5e428a0aa6ea75775c7d84a
Fix subscribe merge_vars.
pybossa/newsletter/__init__.py
pybossa/newsletter/__init__.py
# -*- coding: utf8 -*- # This file is part of PyBossa. # # Copyright (C) 2014 SF Isle of Man Limited # # PyBossa is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyBossa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PyBossa. If not, see <http://www.gnu.org/licenses/>. """PyBossa module for subscribing users to Mailchimp lists.""" import mailchimp class Newsletter(object): """Newsletter class to handle mailchimp subscriptions.""" def __init__(self, app=None): """Init method for flask extensions.""" self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): """Configure newsletter Mailchimp client.""" self.app = app self.client = mailchimp.Mailchimp(app.config.get('MAILCHIMP_API_KEY')) self.list_id = app.config.get('MAILCHIMP_LIST_ID') def is_user_subscribed(self, email, list_id=None): """Check if user is subscribed or not.""" try: if list_id is None: list_id = self.list_id res = self.client.lists.member_info(list_id, [{'email': email}]) if (res.get('success_count') == 1 and res['data'][0]['email'] == email): return True else: return False except mailchimp.Error, e: msg = 'MAILCHIMP: An error occurred: %s - %s' % (e.__class__, e) self.app.logger.error(msg) def subscribe_user(self, user, list_id=None, old_email=None): """Subscribe, update a user of a mailchimp list.""" try: update_existing = False if list_id is None: list_id = self.list_id merge_vars = {'FNAME': user.fullname} if old_email: email = {'email': old_email} merge_vars['new-email'] = user.email_addr update_existing = self.is_user_subscribed(old_email) else: email = {'email': user.email_addr} merge_vars['email'] = user.email_addr self.client.lists.subscribe(list_id, email, merge_vars, update_existing=update_existing) except mailchimp.Error, e: msg = 'MAILCHIMP: An error occurred: %s - %s' % (e.__class__, e) self.app.logger.error(msg)
Python
0
@@ -2502,62 +2502,8 @@ ddr%7D -%0A merge_vars%5B'email'%5D = user.email_addr %0A%0A
e1a4b0d7f7d9e860dce794e07aadedea193d470e
Set version to v2.0.18.dev1
spacy/about.py
spacy/about.py
# inspired from: # https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/ # https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py __title__ = 'spacy' __version__ = '2.0.18' __summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython' __uri__ = 'https://spacy.io' __author__ = 'Explosion AI' __email__ = '[email protected]' __license__ = 'MIT' __release__ = True __download_url__ = 'https://github.com/explosion/spacy-models/releases/download' __compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json' __shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
Python
0.000011
@@ -210,16 +210,21 @@ '2.0.18 +.dev1 '%0A__summ @@ -440,11 +440,12 @@ _ = -Tru +Fals e%0A%0A_
e08ab1b77f72336f868fafc4d87b0ef36d703ac4
Support alternative lib file name on Mac OS
pymediainfo/__init__.py
pymediainfo/__init__.py
import json import os import sys from pkg_resources import get_distribution import xml.etree.ElementTree as ET from ctypes import * __version__ = get_distribution("pymediainfo").version class Track(object): def __getattribute__(self, name): try: return object.__getattribute__(self, name) except: pass return None def __init__(self, xml_dom_fragment): self.xml_dom_fragment = xml_dom_fragment self.track_type = xml_dom_fragment.attrib['type'] for el in self.xml_dom_fragment: node_name = el.tag.lower().strip().strip('_') if node_name == 'id': node_name = 'track_id' node_value = el.text other_node_name = "other_%s" % node_name if getattr(self, node_name) is None: setattr(self, node_name, node_value) else: if getattr(self, other_node_name) is None: setattr(self, other_node_name, [node_value, ]) else: getattr(self, other_node_name).append(node_value) for o in [d for d in self.__dict__.keys() if d.startswith('other_')]: try: primary = o.replace('other_', '') setattr(self, primary, int(getattr(self, primary))) except: for v in getattr(self, o): try: current = getattr(self, primary) setattr(self, primary, int(v)) getattr(self, o).append(current) break except: pass def __repr__(self): return("<Track track_id='{0}', track_type='{1}'>".format(self.track_id, self.track_type)) def to_data(self): data = {} for k, v in self.__dict__.items(): if k != 'xml_dom_fragment': data[k] = v return data class MediaInfo(object): def __init__(self, xml): self.xml_dom = MediaInfo.parse_xml_data_into_dom(xml) @staticmethod def parse_xml_data_into_dom(xml_data): try: return ET.fromstring(xml_data.encode("utf-8")) except: return None @staticmethod def parse(filename): if os.name in ("nt", "dos", "os2", "ce"): lib = windll.MediaInfo elif sys.platform == "darwin": lib = CDLL("libmediainfo.0.dylib") else: lib = CDLL("libmediainfo.so.0") # Define arguments and return types lib.MediaInfo_Inform.restype = c_wchar_p lib.MediaInfo_New.argtypes = [] lib.MediaInfo_New.restype = c_void_p lib.MediaInfo_Option.argtypes = [c_void_p, c_wchar_p, c_wchar_p] lib.MediaInfo_Option.restype = c_wchar_p lib.MediaInfoA_Option.argtypes = [c_void_p, c_char_p, c_char_p] lib.MediaInfoA_Option.restype = c_char_p lib.MediaInfo_Inform.argtypes = [c_void_p, c_size_t] lib.MediaInfo_Inform.restype = c_wchar_p lib.MediaInfoA_Open.argtypes = [c_void_p, c_char_p] lib.MediaInfoA_Open.restype = c_size_t lib.MediaInfo_Delete.argtypes = [c_void_p] lib.MediaInfo_Delete.restype = None lib.MediaInfo_Close.argtypes = [c_void_p] lib.MediaInfo_Close.restype = None # Create a MediaInfo handle handle = lib.MediaInfo_New() lib.MediaInfo_Option(handle, "CharSet", "UTF-8") lib.MediaInfoA_Option(None, b"Inform", b"XML") lib.MediaInfoA_Option(None, b"Complete", b"1") lib.MediaInfoA_Open(handle, filename.encode("utf8")) xml = lib.MediaInfo_Inform(handle, 0) # Delete the handle lib.MediaInfo_Close(handle) lib.MediaInfo_Delete(handle) return MediaInfo(xml) def _populate_tracks(self): if self.xml_dom is None: return for xml_track in self.xml_dom.iter("track"): self._tracks.append(Track(xml_track)) @property def tracks(self): if not hasattr(self, "_tracks"): self._tracks = [] if len(self._tracks) == 0: self._populate_tracks() return self._tracks def to_data(self): data = {'tracks': []} for track in self.tracks: data['tracks'].append(track.to_data()) return data def to_json(self): return json.dumps(self.to_data())
Python
0
@@ -2418,16 +2418,37 @@ arwin%22:%0A + try:%0A @@ -2486,16 +2486,93 @@ dylib%22)%0A + except OSError:%0A lib = CDLL(%22libmediainfo.dylib%22)%0A
d5c8d2f5fd4177b6f4980689ae972352563c28e5
Update about.py and increment version
spacy/about.py
spacy/about.py
# inspired from: # https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/ # https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py __title__ = 'spacy' __version__ = '1.8.2' __summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython' __uri__ = 'https://spacy.io' __author__ = 'Matthew Honnibal' __email__ = '[email protected]' __license__ = 'MIT' __docs_models__ = 'https://spacy.io/docs/usage/models' __download_url__ = 'https://github.com/explosion/spacy-models/releases/download' __compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json' __shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json' __model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/v2/templates/model/'
Python
0
@@ -208,13 +208,13 @@ = ' -1.8.2 +2.0.0 '%0A__ @@ -351,24 +351,20 @@ = ' -Matthew Honnibal +Explosion AI '%0A__ @@ -378,11 +378,14 @@ = ' -mat +contac t@ex
845be624ed0dfb8d942b240034af8b58f7a32e13
Fix the benchmark warm up code to make sure op that graph is not re-optimized during the timed run.
tensorflow/python/data/benchmarks/benchmark_base.py
tensorflow/python/data/benchmarks/benchmark_base.py
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test utilities for tf.data benchmarking functionality.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import time import numpy as np from tensorflow.python.client import session from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.util import nest from tensorflow.python.platform import test # TODO(b/119837791): Add eager benchmarks. class DatasetBenchmarkBase(test.Benchmark): """Base class for dataset benchmarks.""" def run_benchmark(self, dataset, num_elements, iters=1, warmup=True, apply_default_optimizations=False): """Benchmarks the dataset. Runs the dataset `iters` times. In each iteration, the benchmark measures the time it takes to go through `num_elements` elements of the dataset. Args: dataset: Dataset to benchmark. num_elements: Number of dataset elements to iterate through each benchmark iteration. iters: Number of times to repeat the timing. warmup: If true, warms up the session caches by running an untimed run. apply_default_optimizations: Determines whether default optimizations should be applied. Returns: A float, representing the per-element wall time of the dataset in seconds. This is the median time (with respect to `iters`) it takes for the dataset to go through `num_elements` elements, divided by `num_elements.` """ options = dataset_ops.Options() options.experimental_optimization.apply_default_optimizations = ( apply_default_optimizations) dataset = dataset.with_options(options) # NOTE: We use `dataset.skip()` to perform the iterations in C++, avoiding # the overhead of multiple `session.run()` calls. Note that this relies on # the underlying implementation of `skip`: if it is optimized in the future, # we will have to change this code. dataset = dataset.skip(num_elements - 1) iterator = dataset_ops.make_initializable_iterator(dataset) next_element = iterator.get_next() next_element = nest.flatten(next_element)[0] deltas = [] for _ in range(iters): with session.Session() as sess: if warmup: # Run once to warm up the session caches. sess.run(iterator.initializer) sess.run(next_element) sess.run(iterator.initializer) start = time.time() sess.run(next_element.op) end = time.time() deltas.append(end - start) return np.median(deltas) / float(num_elements) def run_and_report_benchmark(self, dataset, num_elements, name, iters=5, extras=None, warmup=True, apply_default_optimizations=False): # Measure the per-element wall time. wall_time = self.run_benchmark(dataset, num_elements, iters, warmup, apply_default_optimizations) if extras is None: extras = {} extras["num_elements"] = num_elements self.report_benchmark( wall_time=wall_time, iters=iters, name=name, extras=extras)
Python
0.000087
@@ -3121,16 +3121,19 @@ _element +.op )%0A%0A
0aa167d5959eefdb3509b1b74796741793df227a
add test cases
pyfluka/reader/UsrbinReader.py
pyfluka/reader/UsrbinReader.py
import operator from functools import partial from itertools import chain import numpy as np from BaseReader import BaseReader from pyfluka.base import InvalidInputError class UsrbinReader(BaseReader): def __init__(self, quantity="Activity", dim=None, weights=None): super(self.__class__, self).__init__(quantity, dim, weights) def _load(self, filename, weight): """ Incorporate search for "this is" to detect line above the first line and "error" to detect end of value block """ def pack_data(dataraw, axesdata): try: bin_shape = (axesdata[0][2], axesdata[1][2], axesdata[2][2]) # x,y,z except: bin_shape = (axesdata[0][2], axesdata[1][2]) # x,y,z reverse_bin_shape = list(bin_shape) reverse_bin_shape.reverse() dataraw = [self.pq(v, unit=self.dim) if hasattr(self, 'dim') else self.pq(v) for v in dataraw] try: return np.reshape(np.array(dataraw), reverse_bin_shape).transpose() except: return np.reshape(np.array(dataraw[:-1]), reverse_bin_shape).transpose() usrbin_data_dict = {} current_detector_name = None data = [] axesdata = [] primaries_weight_info = None data_reached = False error_mode = False for (i, line) in enumerate(file(filename)): if line.find("error") > 0: error_mode = True if line.find("Total number of particles followed") > 0: linesplit = line.split() primaries_weight_info = (float(linesplit[5][:-1]), float(linesplit[11])) if line.find("binning n.") > 0: error_mode = False if current_detector_name is not None: usrbin_data = pack_data(data, axesdata) usrbin_data_dict[current_detector_name] = (usrbin_data, axesdata, primaries_weight_info) current_detector_name = None data = [] axesdata = [] data_reached = False error_mode = False primaries_weight_info = None current_detector_name = line.split("\"")[1].strip() if data_reached and not error_mode: data_line = [x for x in map(float, line.split())] if data_line: data.append(data_line) else: if line.find("coordinate:") > 0: splitted = line.split() axis_data = (splitted[3], splitted[5], splitted[7]) axesdata.append((float(splitted[3]), float(splitted[5]), int(splitted[7]))) if line.find("this is") > 0 or line.find("accurate deposition") > 0: data_reached = True data = list(chain.from_iterable(data)) usrbin_data = pack_data(data, axesdata) usrbin_data *= weight primaries_weight_info = tuple(map(partial(operator.mul, weight), primaries_weight_info)) usrbin_data_dict[current_detector_name] = {self.pq.__name__: usrbin_data, "Binning": axesdata, "Weight": primaries_weight_info} return usrbin_data_dict @staticmethod def get_axis_index(axisdata, value): start, end, nbins = axisdata step = (end - start) / nbins if value < start: return -1 if value > end: return nbins return int((value - start) / step) @staticmethod def _merge(merged_data, data): def _validate_merge(merged_data, data): if not merged_data["Binning"] == data["Binning"]: raise InvalidInputError("Requested merging with inconsistent binning: " + str(merged_data["Binning"]) + " and " + str(data["Binning"])) for det in data.keys(): keys = data[det].keys() keys.remove("Binning") try: _validate_merge(merged_data[det], data[det]) except Exception as e: raise e for key in keys: if isinstance(data[det][key], tuple): merged_data[det][key] = tuple(map(operator.add, merged_data[det][key], data[det][key])) elif isinstance(data[det][key], np.ndarray): merged_data[det][key] += data[det][key] else: raise InvalidInputError("Request merge for unsupported type " + type(data[det][key]))
Python
0.000011
@@ -4010,25 +4010,24 @@ Binning%22%5D))%0A -%0A for @@ -4672,16 +4672,20 @@ ype %22 + +str( type(dat @@ -4681,25 +4681,26 @@ tr(type(data%5Bdet%5D%5Bkey%5D)) +) %0A
4e2f5c79b67a86fce622c486a0ea28fca0130015
clean up default arguments in strip_training_tags()
taggertester/testing.py
taggertester/testing.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from nltk.tag.stanford import StanfordPOSTagger from .config import DATA_DIR_NAME, PATH_TO_DATA_DIR from .files import TrainingFile, write_to_directory from .tag import FilePair class TaggerTester(object): """Collection of files for training/testing part-of-speech taggers. """ def __init__(self): """Initialize the test suite.""" pass class SentencePair(object): """Pair of sentences: one tagged by hand, one by a POS tagger.""" def __init__(self, hand_tagged_sentence, auto_tagged_sentence, separator='_'): """Initialize the object. Parameters ---------- hand_tagged_sentence (unicode / str) : a sentence which has been tagged by hand (i.e., it belongs to part of the original training file which was set aside to serve as a test set) auto_tagged_sentence (list) : a sentence which has been tagged automatically by a part-of-speech tagger separator (str) : the character which serves to separate words from their part-of-speech tags (likely '_' or '/') """ # split the hand-tagged sentence on whitespace, since the auto-tagged # sentence will already be split and we want them to match self.hand_tagged = hand_tagged_sentence.split() self.auto_tagged = auto_tagged_sentence self.sep = separator def strip_training_tags(self, hand_tagged_sentence): """Remove the part-of-speech tags from a test sentence.""" return [w.split(self.sep, 1)[0] for w in self.hand_tagged]
Python
0
@@ -1505,35 +1505,38 @@ s(self, -hand_tagged_sentenc +sentence=None, sep=Non e):%0A @@ -1594,24 +1594,144 @@ entence.%22%22%22%0A + if sentence == None:%0A sentence = self.hand_tagged%0A if sep == None:%0A sep = self.sep%0A retu @@ -1742,21 +1742,16 @@ w.split( -self. sep, 1)%5B @@ -1764,24 +1764,16 @@ w in se -lf.hand_tagged +ntence %5D%0A
b82996ce03989c38e076679df35b378acdce753e
fix bug: random shuffle
spider/task.py
spider/task.py
# -*- coding: utf-8 -*- import logging import re import json import uuid import random from urllib.parse import urlparse from requests.exceptions import ConnectionError import requests import redis import pika from lxml import etree from twisted.internet import reactor, defer from scrapy.utils.project import get_project_settings from scrapy.crawler import CrawlerRunner from scrapy.utils.log import configure_logging from mydm.model import save_spider_settings, save_feed, is_exists_spider from mydm.spiderfactory import SpiderFactory, SpiderFactoryException from mydm.util import parse_redis_url logger = logging.getLogger(__name__) SETTINGS = get_project_settings() def _send(key, data): body = json.dumps(data) connection = pika.BlockingConnection( pika.connection.URLParameters(SETTINGS['BROKER_URL'])) channel = connection.channel() channel.exchange_declare(exchange='direct_logs', type='direct') channel.basic_publish(exchange='direct_logs', routing_key=key, body=body) connection.close() def get_feed_name(url): parser = urlparse(url) fields = parser.hostname.split('.') if len(fields) == 1: return re.sub(r'[^a-zA-Z]', '', fields[0] ).lower().capitalize() else: return ''.join([re.sub(r'[^a-zA-Z]', '', _).lower().capitalize() for _ in fields[:-1] if _.lower() != 'www']) def test_spider(setting): setting = setting.copy() spid = str(uuid.uuid4()) setting['_id'] = spid try: cls = SpiderFactory.mkspider(setting) except SpiderFactoryException as e: logger.error('{}'.format(e)) return False url = SETTINGS['TEMP_SPIDER_STATS_URL'] TEST_SETTINGS = {'ITEM_PIPELINES': {'mydm.pipelines.StatsPipeline': 255}, 'SPIDER_STATS_URL': url, 'BOT_NAME': 'TestSpider', 'WEBSERVICE_ENABLED': False, 'TELNETCONSOLE_ENABLED': False, 'LOG_LEVEL': 'INFO', 'LOG_FORMAT': '%(asctime)s-%(levelname)s: %(message)s', 'LOG_DATEFORMAT': '%Y-%m-%d %H:%M:%S'} configure_logging(TEST_SETTINGS, install_root_handler=False) logging.getLogger('scrapy').setLevel(logging.WARNING) runner = CrawlerRunner(TEST_SETTINGS) d = runner.crawl(cls) d.addBoth(lambda _: reactor.stop()) logger.info('test_spider reator starting ...') reactor.run() logger.info('test_spider reator stopped') def get_stats(url, spid): conf = parse_redis_url(url) r = redis.Redis(host=conf.host, port=conf.port, db=conf.database) n = r.get(spid) r.delete(spid) return 0 if n is None else int(n) n = get_stats(url, spid) return True if n > 0 else False def gen_lxmlspider(setting): url = setting['url'] del setting['url'] save_feed(url) try: r = requests.get(url, headers=SETTINGS['DEFAULT_REQUEST_HEADERS']) except ConnectionError: logger.error('Error in gen_lxmlspider connection[%s]', url) return False if r.status_code != 200: logger.error('Error in gen_lxmlspider requests[%s, status=%d]', url, r.status_code) return False parser = etree.XMLParser(ns_clean=True) root = etree.XML(r.content, parser) while len(root) == 1: root = root[0] for e in root: try: en = etree.QName(e.tag).localname.lower() except ValueError: continue else: if en == 'title': setting['title'] = re.sub(r'^(\r|\n|\s)+|(\r|\n|\s)+$', '', e.text) setting['name'] = get_feed_name(url) if 'title' not in setting: setting['title'] = setting['name'] setting['type'] = 'xml' setting['start_urls'] = [url] if is_exists_spider(url): return True if test_spider(setting): save_spider_settings(setting) return True else: logger.error('Error in gen_lxmlspider[%s]', url) return False def gen_blogspider(setting): url = setting['url'] del setting['url'] save_feed(url) setting['name'] = get_feed_name(url) setting['title'] = setting['name'] setting['type'] = 'blog' setting['start_urls'] = [url] if is_exists_spider(url): return True if test_spider(setting): save_spider_settings(setting) return True else: logger.error('Error in gen_blogspider[%s]', url) return False def _get_failed_spiders(spids): conf = parse_redis_url(SETTINGS['SPIDER_STATS_URL']) r = redis.Redis(host=conf.host, port=conf.port, db=conf.database) def get_stats(spid): n = r.get(spid) return 0 if n is None else int(n) return [_ for _ in spids if 0 == get_stats(_)] def _flush_db(): conf = parse_redis_url(SETTINGS['SPIDER_STATS_URL']) r = redis.Redis(host=conf.host, port=conf.port, db=conf.database) r.flushdb() def crawl(args): spids = args.get('spiders') configure_logging(SETTINGS, install_root_handler=False) logging.getLogger('scrapy').setLevel(logging.WARNING) runner = CrawlerRunner(SETTINGS) loader = runner.spider_loader if 'all' in spids: spiders = [loader.load(_) for _ in loader.list()] else: spiders = [loader.load(_) for _ in filter(lambda __: __ in loader.list(), spids)] if not spiders: return False for __ in random.shuffle(spiders): runner.crawl(__) d = runner.join() d.addBoth(lambda _: reactor.stop()) _flush_db() logger.info('crawl reator starting ...') reactor.run() logging.info('crawl reator stopped') if len(spiders) > 4: failed_spiders = _get_failed_spiders(spids) if failed_spiders: _send(SETTINGS['CRAWL2_KEY'], {'spiders': failed_spiders}) def crawl2(args): spids = args.get('spiders') configure_logging(SETTINGS, install_root_handler=False) logging.getLogger('scrapy').setLevel(logging.WARNING) runner = CrawlerRunner(SETTINGS) loader = runner.spider_loader spiders = [loader.load(_) for _ in spids] if not spiders: return False @defer.inlineCallbacks def seqcrawl(): for __ in random.shuffle(spiders): yield runner.crawl(__) seqcrawl() reactor.run()
Python
0.000002
@@ -6103,34 +6103,24 @@ False%0A%0A -for __ in random.shuff @@ -6122,32 +6122,55 @@ shuffle(spiders) + %0A for __ in spiders :%0A runner @@ -6904,24 +6904,52 @@ turn False%0A%0A + random.shuffle(spiders)%0A @defer.i @@ -7001,39 +7001,23 @@ r __ in -random.shuffle( spiders -) :%0A
e0ff861e26c0f3c73c8f8c55ee5be0ad5a9cd68e
Fix issues on linux (due to previous commits on windows)
pyqode/core/__init__.py
pyqode/core/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # pyQode - Python/Qt Code Editor widget # Copyright 2013, Colin Duquesnoy <[email protected]> # # This software is released under the LGPLv3 license. # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ This package contains the core classes of pyqode and an example of a generic code editor widget """ # # exposes public core api # import os from pyqode.core import constants from pyqode.core import logger from pyqode.core.constants import PanelPosition from pyqode.core.decoration import TextDecoration from pyqode.core.editor import QCodeEdit from pyqode.core.syntax_highlighter import SyntaxHighlighter from pyqode.core.syntax_highlighter import FoldDetector from pyqode.core.syntax_highlighter import IndentBasedFoldDetector from pyqode.core.syntax_highlighter import CharBasedFoldDetector from pyqode.core.mode import Mode from pyqode.core.modes import AutoIndentMode from pyqode.core.modes import CaretLineHighlighterMode from pyqode.core.modes import CheckerMode, CheckerMessage from pyqode.core.modes import MSG_STATUS_ERROR from pyqode.core.modes import MSG_STATUS_INFO from pyqode.core.modes import MSG_STATUS_WARNING from pyqode.core.modes import CHECK_TRIGGER_TXT_CHANGED from pyqode.core.modes import CHECK_TRIGGER_TXT_SAVED from pyqode.core.modes import CodeCompletionMode from pyqode.core.modes import CompletionProvider from pyqode.core.modes import Completion from pyqode.core.modes import DocumentWordCompletionProvider from pyqode.core.modes import FileWatcherMode from pyqode.core.panel import Panel from pyqode.core.modes import PygmentsSyntaxHighlighter, PYGMENTS_STYLES from pyqode.core.modes import RightMarginMode from pyqode.core.modes import SymbolMatcherMode from pyqode.core.modes import ZoomMode from pyqode.core.panels import FoldingPanel from pyqode.core.panels import LineNumberPanel from pyqode.core.panels import MarkerPanel, Marker from pyqode.core.panels import SearchAndReplacePanel from pyqode.core.properties import PropertyRegistry from pyqode.core.system import indexByName from pyqode.core.system import indexMatching from pyqode.core.system import TextStyle from pyqode.core.system import JobRunner from pyqode.core.system import DelayJobRunner from pyqode.core.system import SubprocessServer from pyqode.core.system import memoized from pyqode.qt.ui import importRc #: pyqode-core version __version__ = "1.0b" def getUiDirectory(): """ Gets the pyqode-core ui directory """ return os.path.join(os.path.abspath(os.path.join(__file__, "..")), "ui") def getRcDirectory(): """ Gets the pyqode-core rc directory """ return os.path.join(os.path.abspath(os.path.join(__file__, "..")), "ui", "rc") # import the core rc modules if os.environ["QT_API"] == "PyQt4": from pyqode.core.ui import pyqode_icons_pyqt_rc else: from pyqode.core.ui import pyqode_icons_pyside_rc # # Example of a generic code editor widgey # class QGenericCodeEdit(QCodeEdit): """ Extends QCodeEdit with a hardcoded set of modes and panels. **Panels:** * line number panel * search and replace panel **Modes:** * document word completion * generic syntax highlighter (pygments) """ def __init__(self, parent=None): QCodeEdit.__init__(self, parent) self.setLineWrapMode(self.NoWrap) self.setWindowTitle("pyQode - Generic Editor") self.installPanel(FoldingPanel(), PanelPosition.LEFT) self.installPanel(LineNumberPanel(), PanelPosition.LEFT) self.installPanel(SearchAndReplacePanel(), PanelPosition.BOTTOM) self.installMode(FileWatcherMode()) self.installMode(CaretLineHighlighterMode()) self.installMode(RightMarginMode()) self.installMode(PygmentsSyntaxHighlighter(self.document())) self.installMode(ZoomMode()) self.installMode(AutoIndentMode()) self.installMode(CodeCompletionMode()) self.codeCompletionMode.addCompletionProvider( DocumentWordCompletionProvider()) self.installMode(SymbolMatcherMode()) __all__ = ["__version__", "constants", "logger", "Mode", "Panel", "QCodeEdit", "SyntaxHighlighter", "LineNumberPanel", "MarkerPanel", "Marker", "FoldingPanel", "SearchAndReplacePanel", "CaretLineHighlighterMode", "CheckerMode", "CheckerMessage", "MSG_STATUS_INFO", "MSG_STATUS_ERROR", "MSG_STATUS_WARNING", "FoldDetector", "IndentBasedFoldDetector", "CharBasedFoldDetector", "CHECK_TRIGGER_TXT_CHANGED", "CHECK_TRIGGER_TXT_SAVED", "CodeCompletionMode", "CompletionProvider", "Completion", "DocumentWordCompletionProvider" "FileWatcherMode", "RightMarginMode", "ZoomMode", "PygmentsSyntaxHighlighter", "AutoIndentMode", "PanelPosition", "TextDecoration", "PropertyRegistry", "TextStyle", "QGenericCodeEdit", "JobRunner", "DelayJobRunner", "getUiDirectory", "getRcDirectory", "PYGMENTS_STYLES", "indexByName", "indexMatching", "memoized", "SubprocessServer", "SymbolMatcherMode"]
Python
0
@@ -2913,9 +2913,8 @@ PyQt -4 %22:%0A
e8a5a97ea18120915dba74b9a73fdca4eb381568
Fix indentation level
tail/tests/test_tail.py
tail/tests/test_tail.py
""" Tests for the tail implementation """ from tail import FileBasedTail def test_tail_from_file(): """Tests that tail works as advertised from a file""" from unittest.mock import mock_open, patch, Mock # The mock_data we are using for our test mock_data = """A B C D E F """ mocked_open = mock_open(read_data=mock_data) # mock_open does not support iteration by lines by default so # we must define the following: mocked_open.return_value.__iter__.return_value = mock_data.splitlines() # The file check in the class returns no value upon a valid file # the error states just raise exceptions. mocked_file_validity_check = Mock() # We need to patch the open found in the namespace of the module # where the function is defined with patch('builtins.open', mocked_open, create=True) as mocked_file_open: # We also need to patch the file checking because we are not dealing # with an actual file in the filesystem in this unit test with patch('tail.tail.check_file_validity', mocked_file_validity_check): res = FileBasedTail('Test_filename.txt').tail(3) mocked_file_validity_check.assert_called_once_with('Test_filename.txt') mocked_file_open.assert_called_once_with('Test_filename.txt', 'r') assert len(res) == 3 assert res == ["D", "E", "F"] def test_head_from_file(): """Tests that tail works as advertised from a file""" from unittest.mock import mock_open, patch, Mock # The mock_data we are using for our test mock_data = """A B C D E F """ mocked_open = mock_open(read_data=mock_data) # mock_open does not support iteration by lines by default so # we must define the following: mocked_open.return_value.__iter__.return_value = mock_data.splitlines() # The file check in the class returns no value upon a valid file # the error states just raise exceptions. mocked_file_validity_check = Mock() # We need to patch the open found in the namespace of the module # where the function is defined with patch('builtins.open', mocked_open, create=True) as mocked_file_open: # We also need to patch the file checking because we are not dealing # with an actual file in the filesystem in this unit test with patch('tail.tail.check_file_validity', mocked_file_validity_check): res = FileBasedTail('Test_filename.txt').head(3) mocked_file_validity_check.assert_called_once_with('Test_filename.txt') mocked_file_open.assert_called_once_with('Test_filename.txt', 'r') assert len(res) == 3 assert res == ["A", "B", "C"]
Python
0.035546
@@ -792,32 +792,28 @@ with patch(' -builtins +tail .open', mock @@ -1350,20 +1350,16 @@ %22F%22%5D%0A%0A%0A - def test @@ -1377,20 +1377,16 @@ file():%0A - %22%22%22T @@ -1436,20 +1436,16 @@ ile%22%22%22%0A%0A - from @@ -1490,20 +1490,16 @@ , Mock%0A%0A - # Th @@ -1532,28 +1532,24 @@ or our test%0A - mock_dat @@ -1561,50 +1561,26 @@ %22%22A%0A - B%0A C%0A D%0A E%0A F%0A +B%0AC%0AD%0AE%0AF%0A %22%22%22%0A @@ -1571,28 +1571,24 @@ E%0AF%0A%22%22%22%0A - - mocked_open @@ -1621,20 +1621,16 @@ _data)%0A%0A - # mo @@ -1691,20 +1691,16 @@ so%0A - - # we mus @@ -1723,20 +1723,16 @@ lowing:%0A - mock @@ -1805,20 +1805,16 @@ )%0A%0A%0A - - # The fi @@ -1870,20 +1870,16 @@ id file%0A - # th @@ -1916,28 +1916,24 @@ ptions.%0A - - mocked_file_ @@ -1957,20 +1957,16 @@ Mock()%0A%0A - # We @@ -2030,20 +2030,16 @@ ule%0A - - # where @@ -2062,20 +2062,16 @@ defined%0A - with @@ -2082,16 +2082,12 @@ ch(' -builtins +tail .ope @@ -2142,28 +2142,24 @@ n:%0A%0A - - # We also ne @@ -2219,28 +2219,24 @@ ing%0A - # with an ac @@ -2285,28 +2285,24 @@ est%0A - - with patch(' @@ -2366,36 +2366,32 @@ k):%0A - res = FileBasedT @@ -2424,28 +2424,24 @@ ead(3)%0A%0A - - mocked_file_ @@ -2500,20 +2500,16 @@ e.txt')%0A - mock @@ -2571,28 +2571,24 @@ ', 'r')%0A - - assert len(r @@ -2596,20 +2596,16 @@ s) == 3%0A - asse
17951915f22d12223373bec5e8003b4de666b843
__main__ compatible with python 3.5
pyqualtrics/__main__.py
pyqualtrics/__main__.py
# -*- coding: utf-8 -*- # # This file is part of the pyqualtrics package. # For copyright and licensing information about this package, see the # NOTICE.txt and LICENSE.txt files in its top-level directory; they are # available at https://github.com/Baguage/pyqualtrics # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import os from pyqualtrics import Qualtrics def main(argv): kwargs = {} iterator = iter(argv) executable = next(iterator) # argv[0] try: command = next(iterator) # argv[1] except StopIteration: print("The name of the API call to be made is required") return None user = None if "QUALTRICS_USER" not in os.environ: user = raw_input("Enter Qualtrics username: ") token = None if "QUALTRICS_TOKEN" not in os.environ: token = raw_input("Enter Qualtrics token: ") qualtrics = Qualtrics(user, token) method = getattr(qualtrics, command) if not method: print("%s API call is not implement" % method) return None for option in argv: try: arg, value = option.split("=") kwargs[arg] = value except ValueError: # Ignore parameter in wrong format pass return method(**kwargs) if __name__ == "__main__": # main(["", "createPanel", "library_id=1", "name=b"]) result = main(sys.argv) if result is None: print("Error executing API Call") else: print("Success: %s" % result)
Python
0.999665
@@ -867,16 +867,106 @@ trics%0A%0A%0A +try:%0A # Python 2.7%0A input = raw_input%0Aexcept NameError:%0A # Python 3.5%0A pass%0A%0A%0A def main @@ -1300,20 +1300,16 @@ user = -raw_ input(%22E @@ -1414,20 +1414,16 @@ token = -raw_ input(%22E
f76c6376d8fe2b7fb8c1b9a9ad2566cc22b109fb
Change dict to kwarg format in test_monomerpattern
pysb/tests/test_core.py
pysb/tests/test_core.py
from pysb.testing import * from pysb.core import * from functools import partial def test_component_names_valid(): for name in 'a', 'B', 'AbC', 'dEf', '_', '_7', '__a01b__999x_x___': c = Component(name, _export=False) eq_(c.name, name) def test_component_names_invalid(): for name in 'a!', '!B', 'A!bC~`\\', '_!', '_!7', '__a01b 999x_x___!': assert_raises(InvalidComponentNameError, Component, name, _export=False) def test_monomer(): sites = ['x', 'y', 'z'] states = {'y': ['foo', 'bar', 'baz'], 'x': ['e']} m = Monomer('A', sites, states, _export=False) assert_equal(m.sites, sites) assert_equal(m.site_states, states) assert_equal(type(m()), MonomerPattern) assert_raises(ValueError, Monomer, 'A', 'x', _export=False) assert_raises(Exception, Monomer, 'A', 'x', 'x', _export=False) assert_raises(Exception, Monomer, 'A', ['x'], {'y': ['a']}, _export=False) assert_raises(Exception, Monomer, 'A', ['x'], {'x': [1]}, _export=False) @with_model def test_monomer_model(): Monomer('A') ok_(A in model.monomers) ok_(A in model.all_components()) ok_(A not in model.all_components() - model.monomers) @with_model def test_initial(): Monomer('A', ['s']) Parameter('A_0') Initial(A(s=None), A_0) assert_raises_iice = partial(assert_raises, InvalidInitialConditionError, Initial) assert_raises_iice('not a complexpattern', A_0) assert_raises_iice(A(), A_0) assert_raises_iice(A(s=None), A_0) assert_raises_iice(MatchOnce(A(s=None)), A_0) @with_model def test_model_pickle(): import pickle A = Monomer('A', _export=False) B = Monomer('B', ['x', 'y'], {'x': ['e', 'f']}, _export=False) k = Parameter('k', 1.0, _export=False) r = Rule('r', A() + B(x='e', y=WILD) >> A() % B(x='f', y=None), k, _export=False) o = Observable('o', A() % B(), _export=False) e = Expression('e', k * o, _export=False) c = Compartment('c', None, 3, k, _export=False) for comp in [A, B, k, r, o, e, c]: model.add_component(comp) model.add_component(c) Initial(A() ** c, k) assert_equal(len(model.all_components()), 7) model2 = pickle.loads(pickle.dumps(model)) check_model_against_component_list(model, model2.all_components()) @with_model def test_monomer_as_reaction_pattern(): A = Monomer('A', _export=False) as_reaction_pattern(A) @with_model def test_monomer_as_complex_pattern(): A = Monomer('A', _export=False) as_complex_pattern(A) @with_model def test_monomerpattern(): A = Monomer('A',sites=['a'],site_states={'a':['u','p']},_export=False) Aw = A({'a':('u',ANY)}) @with_model def test_observable_constructor_with_monomer(): A = Monomer('A', _export=False) o = Observable('o', A, _export=False) @with_model def test_compartment_initial_error(): Monomer('A', ['s']) Parameter('A_0', 2.0) c1 = Compartment("C1") c2 = Compartment("C2") Initial(A(s=None)**c1, A_0) Initial(A(s=None)**c2, A_0) @with_model def test_monomer_pattern_add_to_none(): """Ensure that MonomerPattern + None returns a ReactionPattern.""" Monomer('A', ['s']) ok_(isinstance(A() + None, ReactionPattern), 'A() + None did not return a ReactionPattern.') @with_model def test_complex_pattern_add_to_none(): """Ensure that ComplexPattern + None returns a ReactionPattern.""" Monomer('A', ['s']) ok_(isinstance(A(s=1) % A(s=1) + None, ReactionPattern), 'A(s=1) % A(s=1) + None did not return a ReactionPattern.') @with_model def test_reaction_pattern_add_to_none(): """Ensure that ReactionPattern + None returns a ReactionPattern.""" Monomer('A', ['s']) cp = A(s=1) % A(s=1) rp = cp + cp ok_(isinstance(rp + None, ReactionPattern), 'ReactionPattern + None did not return a ReactionPattern.') @with_model def test_complex_pattern_call(): """Ensure ComplexPattern calling (refinement) works as expected.""" Monomer('A', ['w', 'x'], {'x': ('e', 'f')}) Monomer('B', ['y', 'z'], {'z': ('g', 'h')}) cp = A(w=1, x='e') % B(y=1, z='g') r = {'x': 'f', 'z': 'h'} # refinement for complexpattern ok_(cp(**r)) ok_(cp(**r).monomer_patterns[0].site_conditions['x'] == r['x']) ok_(cp(r)) ok_(cp(r).monomer_patterns[0].site_conditions['x'] == r['x']) assert_raises(RedundantSiteConditionsError, cp, {'x': 'f'}, z='h')
Python
0.00001
@@ -2685,23 +2685,20 @@ = A( -%7B'a': +a= ('u', + ANY) -%7D )%0A%0A@
33efe92104ad139f9313d91ae7b2eea8a76da9d7
fix flake8
pyscalambda/__init__.py
pyscalambda/__init__.py
from pyscalambda.operands import Underscore from pyscalambda.operators import UnaryOperator from pyscalambda.quote import quote from pyscalambda.scalambdable import scalambdable_const, scalambdable_func, scalambdable_iterator from pyscalambda.utility import convert_operand _ = Underscore(0) _1 = Underscore(1) _2 = Underscore(2) _3 = Underscore(3) _4 = Underscore(4) _5 = Underscore(5) _6 = Underscore(6) _7 = Underscore(7) _8 = Underscore(8) _9 = Underscore(9) SF = scalambdable_func SC = scalambdable_const SI = scalambdable_iterator Q = quote def not_(value): return UnaryOperator("not ", convert_operand(value)) __all__ = ("_", "_1", "_2", "_3", "_4", "_5", "_6", "_7", "_8", "_9", "SF", "SC", "Q", "not_")
Python
0
@@ -622,16 +622,17 @@ alue))%0A%0A +%0A __all__
4b6117fd4835cbde52e8d3fba79e46c2ec63a637
Add explanatory comments about the parent-child relationships
mapit/management/commands/find_parents.py
mapit/management/commands/find_parents.py
# This script is used after Boundary-Line has been imported to # associate shapes with their parents. With the new coding # system coming in, this could be done from a BIG lookup table; however, # I reckon P-in-P tests might be quick enough... from django.core.management.base import NoArgsCommand from mapit.models import Area, Generation class Command(NoArgsCommand): help = 'Find parents for shapes' def handle_noargs(self, **options): new_generation = Generation.objects.new() if not new_generation: raise Exception, "No new generation to be used for import!" parentmap = { 'DIW': 'DIS', 'CED': 'CTY', 'LBW': 'LBO', 'LAC': 'GLA', 'MTW': 'MTD', 'UTE': 'UTA', 'UTW': 'UTA', 'SPC': 'SPE', 'WAC': 'WAE', 'CPC': ('DIS', 'UTA', 'MTD', 'LBO', 'COI'), } for area in Area.objects.filter( type__code__in=parentmap.keys(), generation_low__lte=new_generation, generation_high__gte=new_generation, ): polygon = area.polygons.all()[0] try: args = { 'polygons__polygon__contains': polygon.polygon.point_on_surface, 'generation_low__lte': new_generation, 'generation_high__gte': new_generation, } if isinstance(parentmap[area.type.code], str): args['type__code'] = parentmap[area.type.code] else: args['type__code__in'] = parentmap[area.type.code] parent = Area.objects.get(**args) except Area.DoesNotExist: raise Exception, "Area %s does not have a parent?" % (self.pp_area(area)) if area.parent_area != parent: print "Parent for %s was %s, is now %s" % (self.pp_area(area), self.pp_area(area.parent_area), self.pp_area(parent)) area.parent_area = parent area.save() def pp_area(self, area): if not area: return "None" return "%s [%d] (%s)" % (area.name, area.id, area.type.code)
Python
0
@@ -635,229 +635,1140 @@ -'DIW': 'DIS',%0A 'CED': 'CTY',%0A 'LBW': 'LBO',%0A 'LAC': 'GLA',%0A 'MTW': 'MTD',%0A 'UTE': 'UTA',%0A 'UTW': 'UTA',%0A 'SPC': 'SPE',%0A 'WAC': 'WAE', +# A District council ward's parent is a District council:%0A 'DIW': 'DIS',%0A # A County council ward's parent is a County council:%0A 'CED': 'CTY',%0A # A London borough ward's parent is a London borough:%0A 'LBW': 'LBO',%0A # A London Assembly constituency's parent is the Greater London Authority:%0A 'LAC': 'GLA',%0A # A Metropolitan district ward's parent is a Metropolitan district:%0A 'MTW': 'MTD',%0A # A Unitary Authority ward (UTE)'s parent is a Unitary Authority:%0A 'UTE': 'UTA',%0A # A Unitary Authority ward (UTW)'s parent is a Unitary Authority:%0A 'UTW': 'UTA',%0A # A Scottish Parliament constituency's parent is a Scottish Parliament region:%0A 'SPC': 'SPE',%0A # A Welsh Assembly constituency's parent is a Welsh Assembly region:%0A 'WAC': 'WAE',%0A # A Civil Parish's parent is one of:%0A # District council%0A # Unitary Authority%0A # Metropolitan district%0A # London borough%0A # Scilly Isles %0A
5d94f90126260f147822ba8d3afe9c1c0a85e943
Discard FASTA headers by default.
pypeline/common/formats/msa.py
pypeline/common/formats/msa.py
#!/usr/bin/python # # Copyright (c) 2012 Mikkel Schubert <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # import sys import gzip import types from collections import defaultdict from pypeline.common.sequences import split from pypeline.common.formats.fasta import parse_fasta, print_fasta class MSAError(RuntimeError): pass def split_msa(msa, split_by = "123"): validate_msa(msa) if not split_by: raise TypeError("No partitions to split by specified") results = {} for key in split_by: results[key] = dict((name, {}) for name in msa) for (name, sequence) in msa.iteritems(): for (key, partition) in split(sequence, split_by).iteritems(): results[key][name] = partition return results def join_msa(*msas): validate_msa(*msas) results = defaultdict(list) for msa in msas: for (name, sequence) in msa.iteritems(): results[name].append(sequence) return dict((key, "".join(value)) for (key, value) in results.iteritems()) def parse_msa(lines): msa = {} for (name, sequence) in parse_fasta(lines): if name in msa: raise MSAError("Duplicate names found, cannot be represented as MSA") msa[name] = sequence validate_msa(msa) return msa def read_msa(filename): func = gzip.open if filename.endswith(".gz") else open fasta_file = func(filename, "r") try: return parse_msa(iter(fasta_file)) finally: fasta_file.close() def print_msa(msa, file = sys.stdout): validate_msa(msa) for group in sorted(msa): print_fasta(group, msa[group], file) def write_msa(msa, filename): validate_msa(msa) with open(filename, "w") as fileobj: print_msa(msa, fileobj) def validate_msa(*msas): if not msas: raise TypeError("No MSAs given as arguments") keywords = set(msas[0]) for msa in msas: if not msa: raise MSAError("MSA with no sequences found") elif not all((name and isinstance(name, types.StringTypes)) for name in msa): raise MSAError("Names in MSA must be non-empty strings") elif len(set(len(seq) for seq in msa.itervalues())) != 1: raise MSAError("MSA contains sequences of differing lengths") elif set(msa) != keywords: raise MSAError("MSAs contain mismatching sequences")
Python
0
@@ -2088,24 +2088,45 @@ se_msa(lines +, read_header = False ):%0A msa = @@ -2123,18 +2123,31 @@ %0A msa +, headers = + %7B%7D, %7B%7D%0A @@ -2143,36 +2143,38 @@ %7B%7D, %7B%7D%0A for ( -name +header , sequence) in p @@ -2188,24 +2188,64 @@ sta(lines):%0A + name = header.split(None, 1)%5B0%5D%0A if n @@ -2367,38 +2367,116 @@ equence%0A -%0A validate_msa(msa) + headers%5Bname%5D = header%0A%0A validate_msa(msa)%0A if read_header:%0A return msa, header %0A ret @@ -2502,24 +2502,45 @@ msa(filename +, read_header = False ):%0A func @@ -2676,16 +2676,43 @@ ta_file) +, read_header = read_header )%0A fi
c0824d3cb9cba811ba36c2f8937e91716f5a50df
Fix lint
ci/run_script.py
ci/run_script.py
""" Run tests and linters on Travis CI. """ import os import subprocess import sys from pathlib import Path import pytest def run_test(test_filename: str) -> None: """ Run pytest with a given filename. """ path = Path('tests') / 'mock_vws' / test_filename result = pytest.main([ '-vvv', '--exitfirst', str(path), '--cov=src', '--cov=tests', ]) sys.exit(result) if __name__ == '__main__': TEST_FILENAME = os.environ.get('TEST_FILENAME') if TEST_FILENAME: run_test(test_filename=TEST_FILENAME) else: subprocess.check_call(['make', 'lint'])
Python
0.000032
@@ -294,16 +294,25 @@ st.main( +%0A %5B%0A @@ -317,16 +317,20 @@ + + '-vvv',%0A @@ -325,16 +325,20 @@ '-vvv',%0A + @@ -360,16 +360,20 @@ + + str(path @@ -375,16 +375,20 @@ (path),%0A + @@ -400,16 +400,20 @@ v=src',%0A + @@ -431,17 +431,26 @@ s',%0A -%5D + %5D%0A )%0A sy