commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
a9cfdf8fdb6853f175cdc31abc2dec91ec6dcf3a
fix import
InvenTree/part/tasks.py
InvenTree/part/tasks.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import logging from django.utils.translation import gettext_lazy as _ import InvenTree.helpers import InvenTree.tasks import common.notifications import part.models from part import tasks as part_tasks logger = logging.getLogger("inventree") def notify_low_stock(part: part.models.Part): name = _("Low stock notification") message = _(f'The available stock for {part.name} has fallen below the configured minimum level') context = { 'part': part, 'name': name, 'message': message, 'link': InvenTree.helpers.construct_absolute_url(part.get_absolute_url()), 'template': { 'html': 'email/low_stock_notification.html', 'subject': "[InvenTree] " + name, }, } common.notifications.trigger_notifaction( part, 'part.notify_low_stock', target_fnc=part.get_subscribers, context=context, ) def notify_low_stock_if_required(part: part.models.Part): """ Check if the stock quantity has fallen below the minimum threshold of part. If true, notify the users who have subscribed to the part """ # Run "up" the tree, to allow notification for "parent" parts parts = part.get_ancestors(include_self=True, ascending=True) for p in parts: if p.is_part_low_on_stock(): InvenTree.tasks.offload_task( part_tasks.notify_low_stock, p )
Python
0.000001
@@ -229,45 +229,8 @@ dels -%0Afrom part import tasks as part_tasks %0A%0Alo @@ -1407,19 +1407,8 @@ -part_tasks. noti
26a21a9f5da718852c193420a0132ad822139ec0
Remove PHPBB crap
apps/devmo/context_processors.py
apps/devmo/context_processors.py
from django.conf import settings from django.utils import translation def i18n(request): return {'LANGUAGES': settings.LANGUAGES, 'LANG': settings.LANGUAGE_URL_MAP.get(translation.get_language()) or translation.get_language(), 'DIR': 'rtl' if translation.get_language_bidi() else 'ltr', } def next_url(request): if 'login' not in request.path and 'register' not in request.path: return {'next_url': request.path } return {} def phpbb_logged_in(request): """Detect PHPBB login cookie.""" return { 'PHPBB_LOGGED_IN': (request.COOKIES.get( '%s_u' % settings.PHPBB_COOKIE_PREFIX, '1') != '1'), 'PHPBB_SID': request.COOKIES.get( '%s_sid' % settings.PHPBB_COOKIE_PREFIX), }
Python
0
@@ -500,303 +500,4 @@ n %7B%7D -%0A%0A%0Adef phpbb_logged_in(request):%0A %22%22%22Detect PHPBB login cookie.%22%22%22%0A return %7B%0A 'PHPBB_LOGGED_IN': (request.COOKIES.get(%0A '%25s_u' %25 settings.PHPBB_COOKIE_PREFIX, '1') != '1'),%0A 'PHPBB_SID': request.COOKIES.get(%0A '%25s_sid' %25 settings.PHPBB_COOKIE_PREFIX),%0A %7D%0A
5c9452a125bd3d2bbeb15224db0a7effa94e5330
Correct showVisible value.
apps/python/PartyLaps/ACTable.py
apps/python/PartyLaps/ACTable.py
""" A table drawing utility for Assetto Corsa. """ class ACTable(object): def __init__(self, ac, window): self.ac = ac self.window = window self.setTablePadding(0, 0) self.setCellSpacing(0, 0) self.data = {} self.cells = {} def draw(self): """ Initialise the data storage array and label array. We are required to store cell data so that the cell information can be retrieved when redrawing due to a font size change. """ self.data = {} # if self.ac is unavailable then we must be in a test and cannot # proceed. if self.ac is None: return # Delete all existing labels for label in self.cells.items(): self.ac.setVisible(label, False) self.cells = {} for i in range(self.nColumns): for j in range(self.nRows): label = self.ac.addLabel(self.window, "") self.ac.setSize(label, self.columnWidths[i] * self.fontSize, self.fontSize) self.ac.setPosition(label, *self._cellPosition(i, j)) self.ac.setFontSize(label, self.fontSize) self.ac.setFontAlignment(label, self.columnAlignments[i]) self.cells[(i, j)] = label def setSize(self, nColumns, nRows): """ Set the size of the table in columns and rows. """ self.nColumns = nColumns self.nRows = nRows def setFontSize(self, fontSize): self.fontSize = fontSize def setTablePadding(self, paddingX, paddingY): """ Set the pixel amount of padding at the top and left of the table. """ self.paddingX = paddingX self.paddingY = paddingY def setCellSpacing(self, spacingX, spacingY): """ Set the pixel amount of spacing between each cell. """ self.spacingX = spacingX self.spacingY = spacingY def setColumnWidths(self, *columnWidths): """ Set the width of each column. The width is given in multiples of the font size. """ if len(columnWidths) != self.nColumns: raise ValueError("The number of provided column width entries does " "not match the expected number of columns.") self.columnWidths = columnWidths def setColumnAlignments(self, *columnAlignments): """ Set the alignments of each column, possible values are 'left', 'right' and 'center'. """ if len(columnAlignments) != self.nColumns: raise ValueError("The number of provided column alignment entries " "does not match the expected number of columns.") self.columnAlignments = columnAlignments def _cellPosition(self, iX, iY): """ Return the (x,y) co-ordinates for a cell at position iX,iY. """ #self.ac.log(" ".join(map(str, [type(iX), type(iY), type(self.fontSize), type(self.spacing)]))) #self.ac.log(repr(self.columnWidths)) x = self.paddingX + (sum(self.columnWidths[:iX]) * self.fontSize) + (iX * self.spacingX) y = self.paddingY + iY * (self.fontSize + self.spacingY) return (x, y) def setCellValue(self, text, iX, iY): """ Set the cell text at position iX,iY. """ self.ac.setText(self.getCellLabel(iX, iY), text) self.data[(iX, iY)] = text def setFontColor(self, r, g, b, s, iX, iY): """ Set the font color of the cell at iX,iY. """ self.ac.setFontColor(self.getCellLabel(iX, iY), r, g, b, s) def getCellLabel(self, iX, iY): try: return self.cells[(iX, iY)] except KeyError: raise ValueError("Cell not found: (%s,%s)" % (iX, iY)) def addOnClickedListener(self, iX, iY, callback): self.ac.addOnClickedListener(self.getCellLabel(iX, iY), callback)
Python
0
@@ -801,13 +801,9 @@ el, -False +0 )%0A%0A
243f7eb02ad1f44560df1a24d357f97277f39531
Refactor get_content_types_pks to use ContentType.objects.get_for_models (which implements its own TransactionTestCase-compatible caching)
wagtail/contrib/postgres_search/utils.py
wagtail/contrib/postgres_search/utils.py
from __future__ import absolute_import, division, unicode_literals import operator import re from functools import partial, reduce from django.apps import apps from django.db import connections from django.db.models import Q from django.utils.lru_cache import lru_cache from django.utils.six.moves import zip_longest from wagtail.wagtailsearch.index import Indexed, RelatedFields, SearchField try: # Only use the GPLv2 licensed unidecode if it's installed. from unidecode import unidecode except ImportError: def unidecode(value): return value def get_postgresql_connections(): return [connection for connection in connections.all() if connection.vendor == 'postgresql'] # Reduce any iterable to a single value using a logical OR e.g. (a | b | ...) OR = partial(reduce, operator.or_) # Reduce any iterable to a single value using a logical AND e.g. (a & b & ...) AND = partial(reduce, operator.and_) # Reduce any iterable to a single value using an addition ADD = partial(reduce, operator.add) def keyword_split(keywords): """ Return all the keywords in a keyword string. Keeps keywords surrounded by quotes together, removing the surrounding quotes: >>> keyword_split('Hello I\\'m looking for "something special"') ['Hello', "I'm", 'looking', 'for', 'something special'] Nested quoted strings are returned as is: >>> keyword_split("He said \\"I'm looking for 'something special'\\" so I've given him the 'special item'") ['He', 'said', "I'm looking for 'something special'", 'so', "I've", 'given', 'him', 'the', 'special item'] """ matches = re.findall(r'"([^"]+)"|\'([^\']+)\'|(\S+)', keywords) return [match[0] or match[1] or match[2] for match in matches] def get_descendant_models(model): """ Returns all descendants of a model, including the model itself. """ descendant_models = {other_model for other_model in apps.get_models() if issubclass(other_model, model)} descendant_models.add(model) return descendant_models def get_descendants_content_types_pks(models, db_alias): return get_content_types_pks( tuple(descendant_model for model in models for descendant_model in get_descendant_models(model)), db_alias) @lru_cache() def get_content_types_pks(models, db_alias): # We import it locally because this file is loaded before apps are ready. from django.contrib.contenttypes.models import ContentType return list(ContentType._default_manager.using(db_alias) .filter(OR([Q(app_label=model._meta.app_label, model=model._meta.model_name) for model in models])) .values_list('pk', flat=True)) def get_search_fields(search_fields): for search_field in search_fields: if isinstance(search_field, SearchField): yield search_field elif isinstance(search_field, RelatedFields): for sub_field in get_search_fields(search_field.fields): yield sub_field WEIGHTS = 'ABCD' WEIGHTS_COUNT = len(WEIGHTS) # These are filled when apps are ready. BOOSTS_WEIGHTS = [] WEIGHTS_VALUES = [] def get_boosts(): boosts = set() for model in apps.get_models(): if issubclass(model, Indexed): for search_field in get_search_fields(model.get_search_fields()): boost = search_field.boost if boost is not None: boosts.add(boost) return boosts def determine_boosts_weights(boosts=()): if not boosts: boosts = get_boosts() boosts = list(sorted(boosts, reverse=True)) min_boost = boosts[-1] if len(boosts) <= WEIGHTS_COUNT: return list(zip_longest(boosts, WEIGHTS, fillvalue=min(min_boost, 0))) max_boost = boosts[0] boost_step = (max_boost - min_boost) / (WEIGHTS_COUNT - 1) return [(max_boost - (i * boost_step), weight) for i, weight in enumerate(WEIGHTS)] def get_weight(boost): if boost is None: return WEIGHTS[-1] for max_boost, weight in BOOSTS_WEIGHTS: if boost >= max_boost: return weight return weight
Python
0
@@ -193,84 +193,8 @@ ons%0A -from django.db.models import Q%0Afrom django.utils.lru_cache import lru_cache%0A from @@ -2217,21 +2217,8 @@ )%0A%0A%0A -@lru_cache()%0A def @@ -2403,28 +2403,37 @@ ype%0A -return list( +content_types_dict = ContentT @@ -2440,24 +2440,26 @@ ype. -_default +objects.db _manager .usi @@ -2458,14 +2458,8 @@ ager -.using (db_ @@ -2468,229 +2468,89 @@ ias) -%0A .filter(OR(%5BQ(app_label=model._meta.app_label,%0A +.get_for_ model -= +s(* model -._meta.model_name)%0A for model in models%5D))%0A .values_list('pk', flat=True)) +s)%0A return %5Bct.pk for ct in content_types_dict.values()%5D %0A%0A%0Ad
2bf43e3ba86cc248e752175ffb82f4eab1803119
delete question module had bug previously
survey/models/question_module.py
survey/models/question_module.py
from survey.models import BaseModel from django.db import models class QuestionModule(BaseModel): name = models.CharField(max_length=255) description = models.TextField(null=True, blank=True) def remove_related_questions(self): self.question_templates.delete() def __unicode__(self): return self.name
Python
0
@@ -268,16 +268,22 @@ mplates. +all(). delete()
e21e2ff9b8258be5533261f7834438c80b0082cc
Use iter(...) instead of .iter()
framework/tasks/handlers.py
framework/tasks/handlers.py
# -*- coding: utf-8 -*- import logging import functools from flask import g from celery import group from website import settings logger = logging.getLogger(__name__) def celery_before_request(): g._celery_tasks = [] def celery_teardown_request(error=None): if error is not None: return try: tasks = g._celery_tasks if tasks: group(tasks.iter()).apply_async() except AttributeError: if not settings.DEBUG_MODE: logger.error('Task queue not initialized') def enqueue_task(signature): """If working in a request context, push task signature to ``g`` to run after request is complete; else run signature immediately. :param signature: Celery task signature """ try: if signature not in g._celery_tasks: g._celery_tasks.append(signature) except RuntimeError: signature() def queued_task(task): """Decorator that adds the wrapped task to the queue on ``g`` if Celery is enabled, else runs the task synchronously. Can only be applied to Celery tasks; should be used for all tasks fired within a request context that may write to the database to avoid race conditions. """ @functools.wraps(task) def wrapped(*args, **kwargs): if settings.USE_CELERY: signature = task.si(*args, **kwargs) enqueue_task(signature) else: task(*args, **kwargs) return wrapped handlers = { 'before_request': celery_before_request, 'teardown_request': celery_teardown_request, }
Python
0.000004
@@ -386,19 +386,18 @@ oup( -tasks. iter( +tasks )).a
9e2f9b040d0dde3237daca1c483c8b2bf0170663
Update Arch package to 2.7
archlinux/archpack_settings.py
archlinux/archpack_settings.py
# # Biicode Arch Linux package settings. # # Check PKGBUILD_template docs for those settings and # what they mean. # def settings(): return { "version": "2.6.1", "release_number": "1", "arch_deps": ["cmake>=3.0.2", "zlib", "glibc", "sqlite", "wget", "python2-pmw" ], "debian_deps": ["zlib1g", "libc-bin", "libsqlite3-0", "wget", "lib32z1", "python-tk" ] } if __name__ == '__main__': print(settings())
Python
0
@@ -157,11 +157,9 @@ %222. -6.1 +7 %22,%0A%09
98e4452e07256aa3285906bba60e16ce4dfd1dc3
Replace do_add_subscription() in add_users_to_streams.
zerver/management/commands/add_users_to_streams.py
zerver/management/commands/add_users_to_streams.py
from __future__ import absolute_import from __future__ import print_function from optparse import make_option from typing import Any from django.core.management.base import BaseCommand from zerver.lib.actions import create_stream_if_needed, do_add_subscription from zerver.models import UserProfile, get_realm, get_user_profile_by_email class Command(BaseCommand): help = """Add some or all users in a realm to a set of streams.""" option_list = BaseCommand.option_list + ( make_option('-d', '--domain', dest='domain', type='str', help='The name of the realm in which you are adding people to streams.'), make_option('-s', '--streams', dest='streams', type='str', help='A comma-separated list of stream names.'), make_option('-u', '--users', dest='users', type='str', help='A comma-separated list of email addresses.'), make_option('-a', '--all-users', dest='all_users', action="store_true", default=False, help='Add all users in this realm to these streams.'), ) def handle(self, **options): # type: (**Any) -> None if options["domain"] is None or options["streams"] is None or \ (options["users"] is None and options["all_users"] is None): self.print_help("python manage.py", "add_users_to_streams") exit(1) stream_names = set([stream.strip() for stream in options["streams"].split(",")]) realm = get_realm(options["domain"]) if options["all_users"]: user_profiles = UserProfile.objects.filter(realm=realm) else: emails = set([email.strip() for email in options["users"].split(",")]) user_profiles = [] for email in emails: user_profiles.append(get_user_profile_by_email(email)) for stream_name in set(stream_names): for user_profile in user_profiles: stream, _ = create_stream_if_needed(user_profile.realm, stream_name) did_subscribe = do_add_subscription(user_profile, stream) print("%s %s to %s" % ( "Subscribed" if did_subscribe else "Already subscribed", user_profile.email, stream_name))
Python
0
@@ -238,18 +238,20 @@ needed, -do +bulk _add_sub @@ -259,16 +259,17 @@ cription +s %0Afrom ze @@ -2245,19 +2245,32 @@ -did +_ignore, already _subscri @@ -2275,13 +2275,16 @@ ribe +d = -do +bulk _add @@ -2300,30 +2300,132 @@ tion -(user_profile, stream) +s(%5Bstream%5D, %5Buser_profile%5D)%0A was_there_already = user_profile.id in %7Btup%5B0%5D.id for tup in already_subscribed%7D %0A @@ -2486,9 +2486,17 @@ %22 -S +Already s ubsc @@ -2509,37 +2509,33 @@ if -did_subscribe else %22Already s +was_there_already else %22S ubsc
ecd33e00eb5eb8ff58358e01a6d618262e8381a6
Update upstream version of vo
astropy/io/vo/setup_package.py
astropy/io/vo/setup_package.py
from distutils.core import Extension from os.path import join from astropy import setup_helpers def get_extensions(build_type='release'): VO_DIR = 'astropy/io/vo/src' return [Extension( "astropy.io.vo.tablewriter", [join(VO_DIR, "tablewriter.c")], include_dirs=[VO_DIR])] def get_package_data(): return { 'astropy.io.vo': [ 'data/ucd1p-words.txt', 'data/*.xsd', 'data/*.dtd'], 'astropy.io.vo.tests': [ 'data/*.xml', 'data/*.gz', 'data/*.json', 'data/*.fits', 'data/*.txt'], 'astropy.io.vo.validator': [ 'urls/*.dat.gz']} def get_legacy_alias(): return setup_helpers.add_legacy_alias( 'vo', 'astropy.io.vo', '0.7.2')
Python
0
@@ -735,10 +735,8 @@ '0. -7.2 +8 ')%0A
dc7ac28109609e2a90856dbaf01ae8bbb2fd6985
Repair the test (adding a docstring to the module type changed the docstring for an uninitialized module object).
Lib/test/test_module.py
Lib/test/test_module.py
# Test the module type from test_support import verify, vereq, verbose, TestFailed import sys module = type(sys) # An uninitialized module has no __dict__ or __name__, and __doc__ is None foo = module.__new__(module) verify(foo.__dict__ is None) try: s = foo.__name__ except AttributeError: pass else: raise TestFailed, "__name__ = %s" % repr(s) vereq(foo.__doc__, None) # Regularly initialized module, no docstring foo = module("foo") vereq(foo.__name__, "foo") vereq(foo.__doc__, None) vereq(foo.__dict__, {"__name__": "foo", "__doc__": None}) # ASCII docstring foo = module("foo", "foodoc") vereq(foo.__name__, "foo") vereq(foo.__doc__, "foodoc") vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc"}) # Unicode docstring foo = module("foo", u"foodoc\u1234") vereq(foo.__name__, "foo") vereq(foo.__doc__, u"foodoc\u1234") vereq(foo.__dict__, {"__name__": "foo", "__doc__": u"foodoc\u1234"}) # Reinitialization should not replace the __dict__ foo.bar = 42 d = foo.__dict__ foo.__init__("foo", "foodoc") vereq(foo.__name__, "foo") vereq(foo.__doc__, "foodoc") vereq(foo.bar, 42) vereq(foo.__dict__, {"__name__": "foo", "__doc__": "foodoc", "bar": 42}) verify(foo.__dict__ is d) if verbose: print "All OK"
Python
0
@@ -373,20 +373,30 @@ _doc__, -None +module.__doc__ )%0A%0A# Reg
5abac5e7cdc1d67ec6ed0996a5b132fae20af530
Use the URLs input in the UI boxes
compare_text_of_urls.py
compare_text_of_urls.py
#!/usr/bin/env python from __future__ import print_function import json import os from os.path import join, dirname, abspath import subprocess import sys from get_text_from_url import process_page def main(argv=None): if argv is None: argv = sys.argv arg = argv[1:] # Enter two URLs with a space between them if len(arg) > 0: # Developers can supply URL as an argument... urls = arg[0] else: # ... but normally the URL comes from the allSettings.json file with open(os.path.expanduser("~/allSettings.json")) as settings: urls = json.load(settings)['source-url'] parsed_urls = urls.strip().split(' ') assert len(parsed_urls) == 2, 'Two URLs not entered.' diff_urls(parsed_urls[0], parsed_urls[1]) def diff_urls(url1, url2): text1 = process_page('text_from_url1', url1) text2 = process_page('text_from_url2', url2) subprocess.check_output("./diff_text.sh", cwd=dirname(abspath(__file__))) if __name__ == '__main__': main()
Python
0
@@ -578,16 +578,21 @@ settings +_json :%0A @@ -593,27 +593,31 @@ -url +setting s = json.loa @@ -626,17 +626,50 @@ settings -) +_json)%0A url1 = settings %5B'source @@ -683,82 +683,70 @@ -%0A parsed_urls = urls.strip().split(' ')%0A assert len(parsed_urls) == + url2 = settings%5B'source-url2'%5D%0A assert url1 and url 2, ' @@ -786,38 +786,18 @@ rls( -parsed_urls%5B0%5D, parsed_urls%5B1%5D +url1, url2 )%0A%0A%0A
4b8dbc9516a667c944107a9ef4039d0d88ca1e84
Fix location of TTF font for watermarks
astrobin/thumbnail_processors.py
astrobin/thumbnail_processors.py
import os from django.conf import settings from PIL import Image, ImageOps, ImageDraw, ImageEnhance, ImageFont, ImageFilter def rounded_corners(image, rounded = False, **kwargs): if rounded: mask = Image.open('astrobin/thumbnail-mask.png').convert('L') mask = mask.resize(image.size, Image.ANTIALIAS) image = ImageOps.fit(image, mask.size, centering = (0.5, 0.5)) image.putalpha(mask) return image def invert(image, invert = False, **kwargs): if invert: image = ImageOps.grayscale(image) image = ImageOps.invert(image) enhancer = ImageEnhance.Contrast(image) image = enhancer.enhance(2.5) return image def watermark(image, watermark = False, **kwargs): if watermark: try: text = kwargs['watermark_text'] position = kwargs['watermark_position'] size = kwargs['watermark_size'] opacity = kwargs['watermark_opacity'] except KeyError: return image if text: watermark_image = Image.new('RGBA', image.size) watermark_image_shadow = Image.new('RGBA', image.size) draw = ImageDraw.Draw(watermark_image, 'RGBA') draw_shadow = ImageDraw.Draw(watermark_image_shadow, 'RGBA') fontsize = 1 ttf = os.path.join(settings.STATIC_ROOT, 'fonts/arial.ttf') img_fraction = 0.33 if size == 'S': img_fraction = 0.25 elif size == 'L': img_fraction = 0.5 font = ImageFont.truetype(ttf, fontsize) while font.getsize(text)[0] < img_fraction*image.size[0]: # iterate until the text size is just larger than the criteria fontsize += 1 font = ImageFont.truetype(ttf, fontsize) # de-increment to be sure it is less than criteria fontsize -= 1 font = ImageFont.truetype(ttf, fontsize) if position == 0: pos = (image.size[0] * .5 - font.getsize(text)[0] * .5, image.size[1] * .5 - font.getsize(text)[1] * .5) elif position == 1: pos = (image.size[0] * .02, image.size[1] * .02) elif position == 2: pos = (image.size[0] * .5 - font.getsize(text)[0] * .5, image.size[1] * .02) elif position == 3: pos = (image.size[0] * .98 - font.getsize(text)[0], image.size[1] * .02) elif position == 4: pos = (image.size[0] * .02, image.size[1] * .98 - font.getsize(text)[1]) elif position == 5: pos = (image.size[0] * .5 - font.getsize(text)[0] * .5, image.size[1] * .98 - font.getsize(text)[1]) elif position == 6: pos = (image.size[0] * .98 - font.getsize(text)[0], image.size[1] * .98 - font.getsize(text)[1]) # Draw shadow text shadowcolor = 0x000000 x = pos[0] + 1 y = pos[1] draw_shadow.text((x,y), text, font=font, fill=(255,0,0,255)) watermark_image_shadow = watermark_image_shadow.filter(ImageFilter.BLUR) # Draw text draw.text(pos, text, font=font) # Opacity mask = watermark_image.convert('L').point(lambda x: min(x, opacity)) watermark_image.putalpha(mask) mask_shadow = watermark_image_shadow.convert('L').point(lambda x: min(x, opacity)) watermark_image_shadow.putalpha(mask_shadow) image.paste(watermark_image_shadow, None, watermark_image_shadow) image.paste(watermark_image, None, watermark_image) return image # RGB Hitogram # This script will create a histogram image based on the RGB content of # an image. It uses PIL to do most of the donkey work but then we just # draw a pretty graph out of it. # # May 2009, Scott McDonough, www.scottmcdonough.co.uk # def histogram(image, histogram = False, **kwargs): if not histogram: return image histWidth = kwargs['size'][0] # Width of the histogram histHeight = kwargs['size'][1] # Height of the histogram multiplerValue = 1.0 # The multiplier value basically increases # the histogram height so that love values # are easier to see, this in effect chops off # the top of the histogram. showFstopLines = True # True/False to hide outline fStopLines = 5 # Colours to be used backgroundColor = (0,0,0,0) # Background color lineColor = (102,102,102) # Line color of fStop Markers red = (255,60,60) # Color for the red lines green = (51,204,51) # Color for the green lines blue = (0,102,255) # Color for the blue lines ################################################################################## hist = image.histogram() histMax = max(hist) # comon color xScale = float(histWidth)/len(hist) # xScaling yScale = float((histHeight)*multiplerValue)/histMax # yScaling im = Image.new("RGBA", (histWidth, histHeight), backgroundColor) red_layer = Image.new("RGBA", (histWidth, histHeight), red) green_layer = Image.new("RGBA", (histWidth, histHeight), green) blue_layer = Image.new("RGBA", (histWidth, histHeight), blue) draw = ImageDraw.Draw(im) # Draw Outline is required if showFstopLines: xmarker = histWidth/fStopLines x =0 for i in range(1,fStopLines+1): draw.line((x, 0, x, histHeight), fill=lineColor) x+=xmarker draw.line((histWidth-1, 0, histWidth-1, 200), fill=lineColor) draw.line((0, 0, 0, histHeight), fill=lineColor) # Draw the RGB histogram lines x = 0; c = 0; for i in hist: if int(i) == 0: pass else: color = red if c > 255: color = green if c > 511: color = blue # Wow, we could _not_ be any slower here. :-/ alpha_mask = Image.new("L", (histWidth, histHeight), 0) alpha_mask_draw = ImageDraw.Draw(alpha_mask) alpha_mask_draw.line((x, histHeight, x, histHeight - (i * yScale)), fill = 128) if color == red: im = Image.composite(red_layer, im, alpha_mask) elif color == green: im = Image.composite(green_layer, im, alpha_mask) elif color == blue: im = Image.composite(blue_layer, im, alpha_mask) if x > 255: x=0 else: x += 1 c += 1 return im
Python
0.000001
@@ -1344,31 +1344,38 @@ oin( -settings.STATIC_ROOT, ' +os.getcwd(), 'astrobin/static/ font
f81c36d4fe31815ed6692b573ad660067151d215
Drop use of 'oslo' namespace package
zaqarclient/_i18n.py
zaqarclient/_i18n.py
# Copyright 2014 Red Hat, Inc # All Rights .Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.i18n import * # noqa _translators = TranslatorFactory(domain='zaqarclient') # The primary translation function using the well-known name "_" _ = _translators.primary # Translators for log levels. # # The abbreviated names are meant to reflect the usual use of a short # name like '_'. The "L" is for "log" and the other letter comes from # the level. _LI = _translators.log_info _LW = _translators.log_warning _LE = _translators.log_error _LC = _translators.log_critical
Python
0.998108
@@ -631,17 +631,17 @@ rom oslo -. +_ i18n imp
2959d12b0131311faeb427f9022a0e1bbe8e7120
allow string keys in size legend
ggplot/components/legend.py
ggplot/components/legend.py
from matplotlib.patches import Rectangle import matplotlib.pyplot as plt from matplotlib.offsetbox import AnchoredOffsetbox, TextArea, DrawingArea, HPacker, VPacker import matplotlib.lines as mlines import operator import numpy as np def make_title(title): title = title.title() return TextArea(" %s " % title, textprops=dict(color="k", fontweight="bold")) def make_marker_key(label, marker): idx = len(label) pad = 20 - idx lab = label[:max(idx, 20)] pad = " "*pad label = TextArea(": %s" % lab, textprops=dict(color="k")) viz = DrawingArea(15, 20, 0, 0) fontsize = 10 key = mlines.Line2D([0.5*fontsize], [0.75*fontsize], marker=marker, markersize=(0.5*fontsize), c="k") viz.add_artist(key) return HPacker(children=[viz, label], align="center", pad=5, sep=0) def make_size_key(label, size): label = round(label, 2) label = str(label) idx = len(label) pad = 20 - idx lab = label[:max(idx, 20)] pad = " "*pad label = TextArea(": %s" % lab, textprops=dict(color="k")) viz = DrawingArea(15, 20, 0, 0) fontsize = 10 key = mlines.Line2D([0.5*fontsize], [0.75*fontsize], marker="o", markersize=size / 20., c="k") viz.add_artist(key) return HPacker(children=[viz, label], align="center", pad=5, sep=0) def make_line_key(label, color): label = str(label) idx = len(label) pad = 20 - idx lab = label[:max(idx, 20)] pad = " "*pad label = TextArea(": %s" % lab, textprops=dict(color="k")) viz = DrawingArea(20, 20, 0, 0) viz.add_artist(Rectangle((0, 5), width=16, height=5, fc=color)) return HPacker(children=[viz, label], height=25, align="center", pad=5, sep=0) def make_linestyle_key(label, style): idx = len(label) pad = 20 - idx lab = label[:max(idx, 20)] pad = " "*pad label = TextArea(": %s" % lab, textprops=dict(color="k")) viz = DrawingArea(30, 20, 0, 0) fontsize = 10 x = np.arange(0.5, 2.25, 0.25) * fontsize y = np.repeat(0.75, 7) * fontsize key = mlines.Line2D(x, y, linestyle=style, c="k") viz.add_artist(key) return HPacker(children=[viz, label], align="center", pad=5, sep=0) legend_viz = { "color": make_line_key, "linestyle": make_linestyle_key, "marker": make_marker_key, "size": make_size_key, } def draw_legend(ax, legend, legend_type, ith_legend): children = [] children.append(make_title(legend_type)) viz_handler = legend_viz[legend_type] legend_items = sorted(legend.items(), key=operator.itemgetter(1)) children += [viz_handler(lab, col) for col, lab in legend_items] box = VPacker(children=children, align="left", pad=0, sep=5) # TODO: The vertical spacing between the legends isn't consistent. Should be # padded consistently anchored_box = AnchoredOffsetbox(loc=6, child=box, pad=0., frameon=True, #bbox_to_anchor=(0., 1.02), # Spacing goes here bbox_to_anchor=(1, 0.8 - 0.35 * ith_legend), bbox_transform=ax.transAxes, borderpad=1., ) return anchored_box if __name__=="__main__": fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.4, 0.7]) ax.add_artist(draw_legend(ax,{1: "blah", 2: "blah2", 15: "blah4"}, "size", 1)) plt.show(block=True)
Python
0.000012
@@ -864,24 +864,81 @@ bel, size):%0A + if not isinstance(label, (type(%22%22), type(u%22%22))):%0A label = @@ -953,16 +953,20 @@ bel, 2)%0A + labe
de44f08772dfeca31635167019f051ca16392dd5
Fix typo and send proper dbtime to statsd
zephyr/middleware.py
zephyr/middleware.py
from __future__ import absolute_import from django.conf import settings from zephyr.decorator import RequestVariableMissingError, RequestVariableConversionError from zephyr.lib.response import json_error from django.db import connection from zephyr.lib.utils import statsd from zephyr.lib.cache import get_memcached_time, get_memcached_requests import logging import time logger = logging.getLogger('humbug.requests') def async_request_stop(request): request._time_stopped = time.time() request._memcached_time_stopped = get_memcached_time() request._memcached_requests_stopped = get_memcached_requests() def async_request_restart(request): request._time_restarted = time.time() request._memcached_time_restarted = get_memcached_time() request._memcached_requests_restarted = get_memcached_requests() class LogRequests(object): def process_request(self, request): request._time_started = time.time() request._memcached_time_start = get_memcached_time() request._memcached_requests_start = get_memcached_requests() def process_response(self, request, response): def timedelta_ms(timedelta): return timedelta * 1000 def format_timedelta(timedelta): if (timedelta >= 1): return "%.1fs" % (timedelta) return "%.0fms" % (timedelta_ms(timedelta),) # For statsd timer name if request.get_full_path() == '/': statsd_path = 'webreq' else: statsd_path = "webreq.%s" % (request.get_full_path()[1:].replace('/', '.'),) # The reverse proxy might have sent us the real external IP remote_ip = request.META.get('HTTP_X_REAL_IP') if remote_ip is None: remote_ip = request.META['REMOTE_ADDR'] time_delta = -1 # A time duration of -1 means the StartLogRequests middleware # didn't run for some reason optional_orig_delta = "" if hasattr(request, '_time_started'): time_delta = time.time() - request._time_started if hasattr(request, "_time_stopped"): orig_time_delta = time_delta time_delta = ((request._time_stopped - request._time_started) + (time.time() - request._time_restarted)) optional_orig_delta = " (lp: %s)" % (format_timedelta(orig_time_delta),) memcached_output = "" if hasattr(request, '_memcached_time_start'): memcached_time_delta = get_memcached_time() - request._memcached_time_start memcached_count_delta = get_memcached_requests() - request._memcached_requests_start if hasattr(request, "_memcached_requests_stopped"): # (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted) memcached_time_delta += (request._memcached_time_stopped - request._memcached_time_restarted) memcached_count_delta += (request._memcached_requests_stopped - request._memcached_requests_restarted) if (memcached_time_delta > 0.005): memcached_output = " (mem: %s/%s)" % (format_timedelta(memcached_time_delta), memcached_count_delta) # Get the amount of time spent doing database queries db_time_output = "" if len(connection.queries) > 0: query_time = sum(float(query.get('time', 0)) for query in connection.queries) db_time_output = " (db: %s/%sq)" % (format_timedelta(query_time), len(connection.queries)) # Log ms, db ms, and num queries to statsd statsd.timing("%s.dbtime" % (statsd_path,), timedelta_ms(time_delta)) statsd.incr("%s.dbq" % (statsd_path, ), len(connection.queries)) statsd.timing("%s.total" % (statsd_path,), timedelta_ms(time_delta)) # Get the requestor's email address and client, if available. try: email = request._email except Exception: email = "unauth" try: client = request.client.name except Exception: client = "?" logger.info('%-15s %-7s %3d %5s%s%s%s %s (%s via %s)' % (remote_ip, request.method, response.status_code, format_timedelta(time_delta), optional_orig_delta, memcached_output, db_time_output, request.get_full_path(), email, client)) # Log some additional data whenever we return certain 40x errors if 400 <= response.status_code < 500 and response.status_code not in [401, 404, 405]: content = response.content if len(content) > 100: content = "[content more than 100 characters]" logger.info('status=%3d, data=%s, uid=%s' % (response.status_code, content, email)) return response class JsonErrorHandler(object): def process_exception(self, request, exception): if hasattr(exception, 'to_json_error_msg') and callable(exception.to_json_error_msg): return json_error(exception.to_json_error_msg()) return None # Monkeypatch in time tracking to the Django non-debug cursor # Code comes from CursorDebugWrapper def wrapper_execute(self, action, sql, params=()): self.set_dirty() start = time.time() try: return action(sql, params) finally: stop = time.time() duration = stop - start self.db.queries.append({ 'time': "%.3f" % duration, }) from django.db.backends.util import CursorWrapper def cursor_execute(self, sql, params=()): return wrapper_execute(self, self.cursor.execute, sql, params) CursorWrapper.execute = cursor_execute def cursor_executemany(self, sql, params=()): return wrapper_execute(self, self.cursor.executemany, sql, params) CursorWrapper.executemany = cursor_executemany
Python
0
@@ -3832,26 +3832,26 @@ elta_ms( -time_delta +query_time ))%0A
962114f65db5de4a0e58ebec93ec8f06147ae790
add RAMONVolume#data
ndio/ramon/RAMONVolume.py
ndio/ramon/RAMONVolume.py
from __future__ import absolute_import from .enums import * from .errors import * import numpy from .RAMONBase import RAMONBase class RAMONVolume(RAMONBase): """ RAMONVolume Object for storing neuroscience data with a voxel volume """ def __init__(self, xyz_offset=(0, 0, 0), resolution=0, cutout=None, voxels=None, id=DEFAULT_ID, confidence=DEFAULT_CONFIDENCE, dynamic_metadata=DEFAULT_DYNAMIC_METADATA, status=DEFAULT_STATUS, author=DEFAULT_AUTHOR): """ Initialize a new RAMONVolume object. Inherits attributes from RAMONBase as well as: Arguments: xyz_offset (int[3] : (0, 0, 0)): x,y,z coordinates of the minimum corner of the cube (if data is a cutout), otherwise empty resolution (int : 0): level in the database resolution hierarchy cutout (numpy.ndarray): dense matrix of data voxels: Unused for now """ self.xyz_offset = xyz_offset self.resolution = resolution self.cutout = cutout self.voxels = voxels RAMONBase.__init__(self, id=id, confidence=confidence, dynamic_metadata=dynamic_metadata, status=status, author=author)
Python
0
@@ -1399,8 +1399,560 @@ author)%0A +%0A def data(self):%0A %22%22%22%0A Gets the data from the volume and pumps it into a numpy.ndarray format,%0A regardless of whether it's stored in %60cutout%60 or %60voxels%60. Returns it%0A as though it were stored in %60cutout%60.%0A%0A This is useful for cases where you need to operate on a 3D matrix.%0A%0A Arguments:%0A None%0A%0A Returns:%0A numpy.ndarray%0A %22%22%22%0A if self.cutout:%0A return self.cutout%0A else:%0A raise NotImplementedError(%22Cannot convert from voxel list yet.%22)%0A
1cba70e91b6592253a74d2c030e9c57faf0a1485
add header to backend.py
zmq/sugar/backend.py
zmq/sugar/backend.py
# this will be try/except when other try: from zmq.core import ( Context, Socket, IPC_PATH_MAX_LEN, Frame, Message, Stopwatch, device, proxy, strerror, zmq_errno, zmq_poll, zmq_version_info, constants, ) except ImportError: # here will be the cffi backend import, when it exists raise __all__ = [ 'Context', 'Socket', 'Frame', 'Message', 'Stopwatch', 'device', 'proxy', 'zmq_poll', 'strerror', 'zmq_errno', 'constants', 'zmq_version_info', 'IPC_PATH_MAX_LEN', ]
Python
0.000001
@@ -1,8 +1,623 @@ +%22%22%22Import basic exposure of libzmq C API as a backend%22%22%22%0A%0A#-----------------------------------------------------------------------------%0A# Copyright (C) 2013 Brian Granger, Min Ragan-Kelley%0A#%0A# This file is part of pyzmq%0A#%0A# Distributed under the terms of the New BSD License. The full license is in%0A# the file COPYING.BSD, distributed as part of this software.%0A#-----------------------------------------------------------------------------%0A%0A#-----------------------------------------------------------------------------%0A# Imports%0A#-----------------------------------------------------------------------------%0A # this w
398c190bd96c2e398cdb6cafff526efcc27f9f77
Improve SpatialContext comparison
niche_vlaanderen/niche.py
niche_vlaanderen/niche.py
import rasterio import numpy as np from .vegetation import Vegetation from .acidity import Acidity from .nutrient_level import NutrientLevel import logging import os.path _allowed_input = ["soil_code", "mlw", "msw", "mhw", "seepage", "nutrient_level", "inundation_acidity", "inundation_nutrient","nitrogen_atmospheric", "nitrogen_animal", "nitrogen_fertilizer", "management", "conductivity", "rainwater", "inundation_vegetation"] _minimal_input = ["soil_code", "mlw", "msw", "mhw", "seepage", "inundation_acidity", "nitrogen_atmospheric", "nitrogen_animal", "nitrogen_fertilizer", "management", "conductivity", "rainwater", "inundation_vegetation", "inundation_nutrient"] logging.basicConfig() class SpatialContext(object): """Stores the spatial context of the grids in niche """ def __init__(self, dst): self.affine = dst.affine self.width = dst.width self.heigth = dst.height self.crs = dst.crs def compare(self, other): """Compare two SpatialContexts Small differences (<1cm are allowed) """ if self.affine.almost_equals(other.affine, precision=0.01): return True else: print(self.affine) print(other.affine) return False class Niche(object): ''' ''' def __init__(self): self._inputfiles = dict() self._inputarray = dict() self._abiotic = dict() self._result = dict() self.log = logging.getLogger() self._context = None def set_input(self, type, path, set_spatial_context=False): if not set_spatial_context and self._context is None: self.log.error("Spatial context not yet set") return False if set_spatial_context and self._context is True: self.log.error("Spatial context can only be set once") return False # check type is valid value from list if (type not in _allowed_input): self.log.error("Unrecognized type %s" % type) return False # check file exists if not os.path.exists(path): self.log.error("File %s does not exist" % path) return False with rasterio.open(path) as dst: sc_new = SpatialContext(dst) if set_spatial_context: self._context = sc_new else: if not self._context.compare(sc_new): self.log.error("Spatial context differs") self._context.affine sc_new.affine return False self._inputfiles[type] = path return True def _check_input_files(self): """ basic input checks (valid files etc) """ # check all necessary files are set if not ["mhw","mlw"] in self._inputfiles.keys(): self.log.error("MHW and MLW must be defined") # check files exist for f in self._inputfiles: if not os.path.exists(f): self.log.error("File %s does not exist" % f) return False # check boundaries overlap with study area + same grid # we should also check for files top/bottom and bottom/top for f in self._inputfiles: try: dst = rasterio.open(f) except: self.log.error("Error while opening file %s" % f) # Load every input_file in the input_array inputarray = dict() for f in self._inputfiles: dst = rasterio.open(self._inputfiles[f]) nodata = dst.nodatavals[0] band = dst.read(1) # create a mask for no-data values, taking into account the data-types if band.dtype == 'float32': band[band == nodata] = np.nan else: band[band == nodata] = -99 inputarray[f] = band # check if valid values are used in inputarrays # check for valid datatypes - values will be checked in the low-level api (eg soilcode present in codetable) if np.any(inputarray.mhw<=inputarray.mlw): self.log.error("Error: not all MHW values are higher than MLW") return False # if all is succesfull: self._inputarray = inputarray return(True) def run(self): """ Runs niche Vlaanderen and saves the predicted vegetation to 17 grids. """ missing_keys = set(_minimal_input) - set(self._inputfiles.keys()) if len(missing_keys) > 0: print("error, different obliged keys are missing") print(missing_keys) return False self._check_input_files # Load every input_file in the input_array for f in self._inputfiles: dst = rasterio.open(self._inputfiles[f]) self._inputarray[f] = dst.read(1) nl = NutrientLevel() # TODO: error handling self._abiotic["nutrient_level"] = \ nl.calculate(self._inputarray["soil_code"], self._inputarray["msw"], self._inputarray["nitrogen_atmospheric"], self._inputarray["nitrogen_animal"], self._inputarray["nitrogen_fertilizer"], self._inputarray["management"], self._inputarray["inundation_nutrient"] ) acidity = Acidity() self._abiotic["acidity"] = acidity.calculate(self._inputarray["soil_code"], self._inputarray["mlw"], self._inputarray["inundation_acidity"], self._inputarray["seepage"], self._inputarray["conductivity"], self._inputarray["rainwater"]) vegetation = Vegetation() self._vegetation = vegetation.calculate( soil_code=self._inputarray["soil_code"], nutrient_level=self._abiotic["nutrient_level"], acidity=self._abiotic["acidity"], inundation=self._inputarray["inundation_vegetation"], mhw=self._inputarray["mhw"], mlw=self._inputarray["mlw"] ) def write(self, folder): # TODO: check calculate has been done for vi in self._vegetation: with rasterio.open(folder+'/V%s.tif'%vi, 'w', driver='GTiff', height=self._context.heigth, width = self._context.width, crs = self._context.crs, affine = self._context.affine, count=1, dtype="int16") as dst: dst.write(self._vegetation[vi], 1) with rasterio.open(folder+'/nutrient_level.tif', 'w', driver='GTiff', height=self._context.heigth, width = self._context.width, crs = self._context.crs, affine = self._context.affine, count=1, dtype="int16") as dst: nutrient_level = self._abiotic["nutrient_level"].astype("int16") dst.write(nutrient_level, 1)
Python
0.000001
@@ -1023,23 +1023,22 @@ def -compare +__eq__ (self, o @@ -1208,17 +1208,17 @@ on=0.01) -: +%5C %0A @@ -1226,96 +1226,112 @@ -return True%0A else:%0A print(self.affine)%0A print(other.affine) + and self.width == other.width and self.heigth == other.heigth:%0A return True%0A else: %0A @@ -2512,24 +2512,16 @@ text -.compare( +!= sc_new -) :%0A
9d3d06e760cb4210405a3b720eb67c5da0478f72
remove succes_message variable
sync_settings/thread_progress.py
sync_settings/thread_progress.py
# -*- coding: utf-8 -*- #Credits to @wbond package_control import sublime, threading class ThreadProgress(): """ Animates an indicator, [= ], in the status area while a thread runs :param thread: The thread to track for activity :param message: The message to display next to the activity indicator :param success_message: The message to display once the thread is complete """ def __init__(self, thread_target, message, success_message = ''): self.message = message self.success_message = success_message self.addend = 1 self.size = 8 self.thread = threading.Thread(target=thread_target) self.thread.start() sublime.set_timeout(lambda: self.run(0), 100) def run(self, i): if not self.thread.is_alive(): if self.success_message != "": self.success_message = 'Sync Settings: %s' %(self.success_message) sublime.status_message(self.success_message) return before = i % self.size after =(self.size - 1) - before sublime.status_message('Sync Settings: %s [%s=%s]' %(self.message, ' ' * before, ' ' * after)) if not after: self.addend = -1 if not before: self.addend = 1 i += self.addend sublime.set_timeout(lambda: self.run(i), 100)
Python
0.000774
@@ -465,33 +465,11 @@ sage -, success_message = '' ):%0A + @@ -495,51 +495,8 @@ age%0A - self.success_message = success_message%0A @@ -682,16 +682,16 @@ lf, i):%0A + if n @@ -721,171 +721,8 @@ ():%0A - if self.success_message != %22%22:%0A self.success_message = 'Sync Settings: %25s' %25(self.success_message)%0A sublime.status_message(self.success_message)%0A
c691c256682bec5f9a242ab71ab42d296bbf88a9
Add `Post`, `Tag` models to Admin
nightreads/posts/admin.py
nightreads/posts/admin.py
from django.contrib import admin # Register your models here.
Python
0.000001
@@ -31,33 +31,86 @@ in%0A%0A -# Register your models here. +from .models import Post, Tag%0A%0Aadmin.site.register(Post)%0Aadmin.site.register(Tag) %0A
0915a9d0ec055ebb0115f631baa78d8d15563918
Use logger
nipype/interfaces/bids.py
nipype/interfaces/bids.py
# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Set of interfaces that allow interaction with BIDS data. Currently available interfaces are: BIDSDataGrabber: Query data from BIDS dataset using pybids grabbids. Change directory to provide relative paths for doctests >>> import os >>> import bids >>> filepath = os.path.realpath(os.path.dirname(bids.__file__)) >>> datadir = os.path.realpath(os.path.join(filepath, 'grabbids/tests/data/')) >>> os.chdir(datadir) """ from .base import (traits, DynamicTraitedSpec, Directory, BaseInterface, isdefined, Str, Undefined) try: from bids.grabbids import BIDSLayout except ImportError: have_pybids = False else: have_pybids = True from warnings import warn class BIDSDataGrabberInputSpec(DynamicTraitedSpec): base_dir = Directory(exists=True, desc='Path to BIDS Directory.', mandatory=True) output_query = traits.Dict(key_trait=Str, value_trait=traits.Dict, desc='Queries for outfield outputs') raise_on_empty = traits.Bool(True, usedefault=True, desc='Generate exception if list is empty ' 'for a given field') return_type = traits.Enum('filename', 'namedtuple', usedefault=True) class BIDSDataGrabber(BaseInterface): """ BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. Examples -------- >>> from nipype.interfaces.bids import BIDSDataGrabber >>> from os.path import basename >>> import pprint Select all files from a BIDS project >>> bg = BIDSDataGrabber() >>> bg.inputs.base_dir = 'ds005/' >>> results = bg.run() >>> len(results.outputs.outfield) # doctest: +ALLOW_UNICODE 135 Using dynamically created, user-defined input fields, filter files based on BIDS entities. >>> bg = BIDSDataGrabber(infields = ['subject', 'run']) >>> bg.inputs.base_dir = 'ds005/' >>> bg.inputs.subject = '01' >>> bg.inputs.run = '01' >>> results = bg.run() >>> basename(results.outputs.outfield[0]) # doctest: +ALLOW_UNICODE 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' Using user-defined output fields, return different types of outputs, filtered on common entities filter files based on BIDS entities. >>> bg = BIDSDataGrabber(infields = ['subject'], outfields = ['func', 'anat']) >>> bg.inputs.base_dir = 'ds005/' >>> bg.inputs.subject = '01' >>> bg.inputs.output_query['func'] = dict(modality='func') >>> bg.inputs.output_query['anat'] = dict(modality='anat') >>> results = bg.run() >>> basename(results.outputs.func[0]) # doctest: +ALLOW_UNICODE 'sub-01_task-mixedgamblestask_run-01_bold.nii.gz' >>> basename(results.outputs.anat[0]) # doctest: +ALLOW_UNICODE 'sub-01_T1w.nii.gz' """ input_spec = BIDSDataGrabberInputSpec output_spec = DynamicTraitedSpec _always_run = True def __init__(self, infields=None, outfields=None, **kwargs): """ Parameters ---------- infields : list of str Indicates the input fields to be dynamically created outfields: list of str Indicates output fields to be dynamically created. If no matching items, returns Undefined. """ if not outfields: outfields = [] if not infields: infields = [] super(BIDSDataGrabber, self).__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields self._outfields = outfields for key in infields: self.inputs.add_trait(key, traits.Any) undefined_traits[key] = Undefined if not isdefined(self.inputs.output_query): self.inputs.output_query = {} self.inputs.trait_set(trait_change_notify=False, **undefined_traits) def _run_interface(self, runtime): if not have_pybids: raise ImportError("The BIDSEventsGrabber interface requires pybids." " Please make sure it is installed.") return runtime def _list_outputs(self): if not self._outfields: self._outfields = ['outfield'] self.inputs.output_query = {'outfield' : {}} else: for key in self._outfields: if key not in self.inputs.output_query: raise ValueError("Define query for all outputs") for key in self._infields: value = getattr(self.inputs, key) if not isdefined(value): msg = "%s requires a value for input '%s' because" \ " it was listed in 'infields'" % \ (self.__class__.__name__, key) raise ValueError(msg) layout = BIDSLayout(self.inputs.base_dir) filters = {i: getattr(self.inputs, i) for i in self._infields} outputs = {} for key, query in self.inputs.output_query.items(): args = query.copy() args.update(filters) filelist = layout.get(return_type='file', **args) if len(filelist) == 0: msg = 'Output key: %s returned no files' % ( key) if self.inputs.raise_on_empty: raise IOError(msg) else: warn(msg) filelist = Undefined else: outputs[key] = filelist return outputs
Python
0.000004
@@ -555,16 +555,38 @@ r)%0A%0A%22%22%22%0A +from .. import logging %0Afrom .b @@ -917,33 +917,47 @@ ue%0A%0A -from warnings import warn +LOGGER = logging.getLogger('workflows') %0A%0Acl @@ -5783,12 +5783,22 @@ +LOGGER. warn +ing (msg
e3226faeabc7e424442484355e27ed2861adcc59
Fix setmeta test because tab width changed.
gslib/tests/test_setmeta.py
gslib/tests/test_setmeta.py
# -*- coding: utf-8 -*- # Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import gslib.tests.testcase as testcase from gslib.util import Retry from gslib.tests.util import ObjectToURI as suri class TestSetMeta(testcase.GsUtilIntegrationTestCase): """Integration tests for setmeta command.""" def test_initial_metadata(self): objuri = suri(self.CreateObject(contents='foo')) inpath = self.CreateTempFile() ct = 'image/gif' self.RunGsUtil(['-h', 'x-goog-meta-xyz:abc', '-h', 'Content-Type:%s' % ct, 'cp', inpath, objuri]) # Use @Retry as hedge against bucket listing eventual consistency. @Retry(AssertionError, tries=3, timeout_secs=1) def _Check1(): stdout = self.RunGsUtil(['ls', '-L', objuri], return_stdout=True) self.assertRegexpMatches(stdout, 'Content-Type:\s+%s' % ct) self.assertRegexpMatches(stdout, 'x-goog-meta-xyz:\s+abc') _Check1() def test_overwrite_existing(self): objuri = suri(self.CreateObject(contents='foo')) inpath = self.CreateTempFile() self.RunGsUtil(['-h', 'x-goog-meta-xyz:abc', '-h', 'Content-Type:image/gif', 'cp', inpath, objuri]) self.RunGsUtil(['setmeta', '-n', '-h', 'Content-Type:text/html', '-h', 'x-goog-meta-xyz', objuri]) # Use @Retry as hedge against bucket listing eventual consistency. @Retry(AssertionError, tries=3, timeout_secs=1) def _Check1(): stdout = self.RunGsUtil(['ls', '-L', objuri], return_stdout=True) self.assertRegexpMatches(stdout, 'Content-Type:\s+text/html') self.assertNotIn('xyz', stdout) _Check1() def test_duplicate_header_removal(self): stderr = self.RunGsUtil( ['setmeta', '-h', 'Content-Type:text/html', '-h', 'Content-Type', 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('Each header must appear at most once', stderr) def test_duplicate_header(self): stderr = self.RunGsUtil( ['setmeta', '-h', 'Content-Type:text/html', '-h', 'Content-Type:foobar', 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('Each header must appear at most once', stderr) def test_invalid_non_ascii_custom_header(self): unicode_header = u'x-goog-meta-soufflé:5' unicode_header_bytes = unicode_header.encode('utf-8') stderr = self.RunGsUtil( ['setmeta', '-h', unicode_header_bytes, 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('Invalid non-ASCII header', stderr) def test_valid_non_ascii_custom_header(self): objuri = self.CreateObject(contents='foo') unicode_header = u'x-goog-meta-dessert:soufflé' unicode_header_bytes = unicode_header.encode('utf-8') self.RunGsUtil(['setmeta', '-h', unicode_header_bytes, suri(objuri)]) # Use @Retry as hedge against bucket listing eventual consistency. @Retry(AssertionError, tries=3, timeout_secs=1) def _Check1(): stdout = self.RunGsUtil(['ls', '-L', suri(objuri)], return_stdout=True) stdout = stdout.decode('utf-8') self.assertIn(u'x-goog-meta-dessert:\t\tsoufflé', stdout) _Check1() def test_disallowed_header(self): stderr = self.RunGsUtil( ['setmeta', '-h', 'Content-Length:5', 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('Invalid or disallowed header', stderr) def test_setmeta_bucket(self): bucket_uri = self.CreateBucket() stderr = self.RunGsUtil( ['setmeta', '-h', 'x-goog-meta-foo:5', suri(bucket_uri)], expected_status=1, return_stderr=True) self.assertIn('must name an object', stderr) def test_setmeta_invalid_arg(self): stderr = self.RunGsUtil( ['setmeta', '-h', 'foo:bar:baz', 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('must be either header or header:value', stderr) def test_invalid_non_ascii_header_value(self): unicode_header = u'Content-Type:dessert/soufflé' unicode_header_bytes = unicode_header.encode('utf-8') stderr = self.RunGsUtil( ['setmeta', '-h', unicode_header_bytes, 'gs://foo/bar'], expected_status=1, return_stderr=True) self.assertIn('Invalid non-ASCII header', stderr)
Python
0
@@ -3659,10 +3659,8 @@ rt:%5C -t%5C tsou
95e36e9db9e28b808601df9c82ede19ea3486d7c
Add checks for lingering listeners
numba/tests/test_event.py
numba/tests/test_event.py
import unittest import string import numpy as np from numba import njit, jit, literal_unroll from numba.core import event as ev from numba.tests.support import TestCase class TestEvent(TestCase): def test_recording_listener(self): @njit def foo(x): return x + x with ev.install_recorder("numba:compile") as rec: foo(1) self.assertIsInstance(rec, ev.RecordingListener) # Check there must be at least two events. # Because there must be a START and END for the compilation of foo() self.assertGreaterEqual(len(rec.buffer), 2) def test_compiler_lock_event(self): @njit def foo(x): return x + x foo(1) md = foo.get_metadata(foo.signatures[0]) lock_duration = md['timers']['compiler_lock'] self.assertIsInstance(lock_duration, float) self.assertGreater(lock_duration, 0) def test_llvm_lock_event(self): @njit def foo(x): return x + x foo(1) md = foo.get_metadata(foo.signatures[0]) lock_duration = md['timers']['llvm_lock'] self.assertIsInstance(lock_duration, float) self.assertGreater(lock_duration, 0) def test_install_listener(self): ut = self class MyListener(ev.Listener): def on_start(self, event): ut.assertEqual(event.status, ev.EventStatus.START) ut.assertEqual(event.kind, "numba:compile") ut.assertIs(event.data["dispatcher"], foo) dispatcher = event.data["dispatcher"] ut.assertIs(dispatcher, foo) # Check that the compiling signature is NOT in the overloads ut.assertNotIn(event.data["args"], dispatcher.overloads) def on_end(self, event): ut.assertEqual(event.status, ev.EventStatus.END) ut.assertEqual(event.kind, "numba:compile") dispatcher = event.data["dispatcher"] ut.assertIs(dispatcher, foo) # Check that the compiling signature is in the overloads ut.assertIn(event.data["args"], dispatcher.overloads) @njit def foo(x): return x listener = MyListener() with ev.install_listener("numba:compile", listener) as yielded: foo(1) # Check that the yielded value is the same listener self.assertIs(listener, yielded) def test_global_register(self): ut = self class MyListener(ev.Listener): def on_start(self, event): ut.assertEqual(event.status, ev.EventStatus.START) ut.assertEqual(event.kind, "numba:compile") # Check it is the same dispatcher dispatcher = event.data["dispatcher"] ut.assertIs(dispatcher, foo) # Check that the compiling signature is NOT in the overloads ut.assertNotIn(event.data["args"], dispatcher.overloads) def on_end(self, event): ut.assertEqual(event.status, ev.EventStatus.END) ut.assertEqual(event.kind, "numba:compile") # Check it is the same dispatcher dispatcher = event.data["dispatcher"] ut.assertIs(dispatcher, foo) # Check that the compiling signature is in the overloads ut.assertIn(event.data["args"], dispatcher.overloads) @njit def foo(x): return x listener = MyListener() ev.register("numba:compile", listener) foo(1) ev.unregister("numba:compile", listener) def test_lifted_dispatcher(self): @jit def foo(): object() # to trigger loop-lifting c = 0 for i in range(10): c += i return c with ev.install_recorder("numba:compile") as rec: foo() # Check that there are 4 events. # Two for `foo()` and two for the lifted loop. self.assertGreaterEqual(len(rec.buffer), 4) cres = foo.overloads[foo.signatures[0]] [ldisp] = cres.lifted lifted_cres = ldisp.overloads[ldisp.signatures[0]] self.assertIsInstance( lifted_cres.metadata["timers"]["compiler_lock"], float, ) self.assertIsInstance( lifted_cres.metadata["timers"]["llvm_lock"], float, ) def test_timing_property(self): a = tuple(string.ascii_lowercase) @njit def bar(x): acc = 0 for i in literal_unroll(a): if i in {'1': x}: acc += 1 else: acc += np.sqrt(x[0, 0]) return np.linalg.svd(x), acc @njit def foo(x): return bar(np.zeros((x, x))) foo(1) def get_timers(fn): md = fn.get_metadata(fn.signatures[0]) return md['timers'] foo_timers = get_timers(foo) bar_timers = get_timers(bar) # Check: time spent in bar() must be longer than in foo() self.assertLess(bar_timers['llvm_lock'], foo_timers['llvm_lock']) self.assertLess(bar_timers['compiler_lock'], foo_timers['compiler_lock']) # Check: time spent in LLVM lock must be less than in compiler self.assertLess(foo_timers['llvm_lock'], foo_timers['compiler_lock']) self.assertLess(bar_timers['llvm_lock'], bar_timers['compiler_lock']) if __name__ == "__main__": unittest.main()
Python
0
@@ -193,16 +193,344 @@ tCase):%0A +%0A def setUp(self):%0A # Trigger compilation to ensure all listeners are initialized%0A njit(lambda: None)()%0A self.__registered_listeners = len(ev._registered)%0A%0A def tearDown(self):%0A # Check there is no lingering listeners%0A self.assertEqual(len(ev._registered), self.__registered_listeners)%0A%0A def
afea4f0732e68f5cbb38f5a8ac194698aec8e520
Allow any of the previous tasks to satisfy requirements.
taskflow/patterns/linear_flow.py
taskflow/patterns/linear_flow.py
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from taskflow import exceptions as exc from taskflow.patterns import ordered_flow def _convert_to_set(items): if not items: return set() if isinstance(items, set): return items if isinstance(items, dict): return items.keys() return set(iter(items)) class Flow(ordered_flow.Flow): """A linear chain of tasks that can be applied as one unit or rolled back as one unit. Each task in the chain may have requirements which are satisfied by the previous task in the chain.""" def __init__(self, name, tolerant=False, parents=None): super(Flow, self).__init__(name, tolerant, parents) self._tasks = [] def _fetch_task_inputs(self, task): inputs = {} if self.results: (_last_task, last_results) = self.results[-1] for k in task.requires(): if last_results and k in last_results: inputs[k] = last_results[k] return inputs def _validate_provides(self, task): requires = _convert_to_set(task.requires()) last_provides = set() last_provider = None if self._tasks: last_provider = self._tasks[-1] last_provides = _convert_to_set(last_provider.provides()) # Ensure that the last task provides all the needed input for this # task to run correctly. req_diff = requires.difference(last_provides) if req_diff: if last_provider is None: msg = ("There is no previous task providing the outputs %s" " for %s to correctly execute.") % (req_diff, task) else: msg = ("%s does not provide the needed outputs %s for %s to" " correctly execute.") msg = msg % (last_provider, req_diff, task) raise exc.InvalidStateException(msg) def add(self, task): self._validate_provides(task) self._tasks.append(task) def order(self): return list(self._tasks)
Python
0.001182
@@ -1205,24 +1205,26 @@ revious task +/s in the chai @@ -1448,24 +1448,103 @@ -if self.results: +for r in _convert_to_set(task.requires()):%0A # Find the last task that provided this. %0A @@ -1556,10 +1556,13 @@ +for ( -_ last @@ -1582,18 +1582,28 @@ esults) -= +in reversed( self.res @@ -1606,20 +1606,18 @@ .results -%5B-1%5D +): %0A @@ -1625,34 +1625,92 @@ -for k in task.requir + if r not in _convert_to_set(last_task.provid es() +) :%0A + continue%0A @@ -1741,17 +1741,17 @@ lts and -k +r in last @@ -1787,17 +1787,17 @@ inputs%5B -k +r %5D = last @@ -1805,17 +1805,17 @@ results%5B -k +r %5D%0A @@ -1820,81 +1820,324 @@ -return inputs%0A%0A def _validate_provides(self, task):%0A requires = + else:%0A inputs%5Br%5D = None%0A # Some task said they had it, get the next requirement.%0A break%0A return inputs%0A%0A def _validate_provides(self, task):%0A # Ensure that some previous task provides this input.%0A missing_requires = %5B%5D%0A for r in _co @@ -2165,25 +2165,27 @@ uires()) -%0A +:%0A last_pro @@ -2172,36 +2172,40 @@ ):%0A -last + found _provide s = set()%0A @@ -2196,183 +2196,277 @@ vide -s = set()%0A last_provider = None%0A if self._tasks:%0A last_provider = self._tasks%5B-1%5D%0A last_provides = _convert_to_set(last_provider.provides() +r = False%0A for prev_task in reversed(self._tasks):%0A if r in _convert_to_set(prev_task.provides()):%0A found_provider = True%0A break%0A if not found_provider:%0A missing_requires.append(r )%0A @@ -2583,117 +2583,34 @@ -req_diff = requires.difference(last_provides)%0A if req_diff:%0A if last_provider is None:%0A +if len(missing_requires):%0A @@ -2696,20 +2696,16 @@ - %22 for %25s @@ -2736,217 +2736,24 @@ %25 ( -req_diff, task)%0A else:%0A msg = (%22%25s does not provide the needed outputs %25s for %25s to%22%0A %22 correctly execute.%22)%0A msg = msg %25 (last_provider, req_diff +missing_requires , ta
e77380401d04feb1ff283add4dca9f6bad57f330
Rewrite order/tests/MemberViewTests.
haveaniceday/order/tests.py
haveaniceday/order/tests.py
from django.test import TestCase from .models import Member # Create your tests here. class OrderViewTests(TestCase): def test_order_view(self): response = self.client.get('/order/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'order/home.html') class MemberViewTests(TestCase): def setUp(self): member = Member( kmID='AB123456', edocID='edoc', emailID='abc', name='捕夢網', location='A局-B分局-C所', title='代理執行秘書') member.save() another_member = Member( name='test') another_member.save() def tearDown(self): Member.objects.all().delete() def test_member_view(self): r = self.client.get('/order/member/?id=1') self.assertContains(r, 'AB123456') def test_member_all(self): r = self.client.get('/order/member/all/') self.assertContains(r, 'test') self.assertContains(r, 'abc')
Python
0
@@ -52,16 +52,84 @@ t Member +%0Afrom website_component.models import CustomWebPage, CustomComponent %0A%0A# Crea @@ -680,16 +680,226 @@ r.save() +%0A page = CustomWebPage(name='%E4%BA%BA%E5%93%A1%E6%B8%85%E5%96%AE')%0A page.save()%0A customcomponent = CustomComponent(name='CustomComponent', value='value')%0A customcomponent.page = page%0A customcomponent.save() %0A%0A de
9d61639c0f2783c37bb1037d55d7670bc43e7d01
Should be break; not continue
oonib/bouncer/handlers.py
oonib/bouncer/handlers.py
import json import random import yaml from oonib import errors as e from oonib.handlers import OONIBHandler from oonib import config class Bouncer(object): def __init__(self): self.knownHelpers = {} self.updateKnownHelpers() self.updateKnownCollectors() def updateKnownCollectors(self): """ Returns the list of all known collectors """ self.knownCollectors = [] with open(config.main.bouncer_file) as f: bouncerFile = yaml.safe_load(f) for collectorName, helpers in bouncerFile['collector'].items(): if collectorName not in self.knownCollectors: self.knownCollectors.append(collectorName) def updateKnownHelpers(self): with open(config.main.bouncer_file) as f: bouncerFile = yaml.safe_load(f) for collectorName, helpers in bouncerFile['collector'].items(): for helperName, helperAddress in helpers['test-helper'].items(): if helperName not in self.knownHelpers.keys(): self.knownHelpers[helperName] = [] self.knownHelpers[helperName].append({ 'collector-name': collectorName, 'helper-address': helperAddress }) def getHelperAddresses(self, helper_name): """ Returns a dict keyed on the collector address of known test helpers. example: { 'httpo://thirteenchars1.onion': '127.0.0.1', 'httpo://thirteenchars2.onion': '127.0.0.2', 'httpo://thirteenchars3.onion': '127.0.0.3' } """ try: helpers = self.knownHelpers[helper_name] except KeyError: raise e.TestHelperNotFound helpers_dict = {} for helper in helpers: helpers_dict[helper['collector-name']] = helper['helper-address'] return helpers_dict def filterHelperAddresses(self, requested_helpers): """ Returns a dict of collectors that support all the requested_helpers. Example: requested_helpers = ['a', 'b', 'c'] will return: { 'a': { 'address': '127.0.0.1', 'collector': 'httpo://thirteenchars1.onion' }, 'b': { 'address': '127.0.0.1:8081', 'collector': 'httpo://thirteenchars1.onion' }, 'c': { 'address': 'http://127.0.0.1', 'collector': 'httpo://thirteenchars2.onion' }, 'default': { 'collector': 'httpo://thirteenchars1.onion' } } or {'error': 'test-helper-not-found'} if no valid helper was found """ response = {} for helper_name in requested_helpers: try: # If we can, try to pick the same collector. choices = self.getHelperAddresses(helper_name) for item in response.values(): if item['collector'] in choices.keys(): choice = item continue # Or default to a random selection else: c,h = random.choice(choices.items()) choice = {'collector': c, 'address': h} response[helper_name] = choice except e.TestHelperNotFound: response = {'error': 'test-helper-not-found'} return response response['default'] = {'collector': random.choice(self.knownCollectors)} return response class BouncerQueryHandler(OONIBHandler): def initialize(self): self.bouncer = Bouncer() def updateKnownHelpers(self): with open(config.main.bouncer_file) as f: bouncerFile = yaml.safe_load(f) for collectorName, helpers in bouncerFile['collector'].items(): for helperName, helperAddress in helpers['test-helper'].items(): if helperName not in self.knownHelpers.keys(): self.knownHelpers[helperName] = [] self.knownHelpers[helperName].append({ 'collector-name': collectorName, 'helper-address': helperAddress }) def post(self): try: query = json.loads(self.request.body) except ValueError: raise e.InvalidRequest try: requested_helpers = query['test-helpers'] except KeyError: raise e.TestHelpersKeyMissing response = self.bouncer.filterHelperAddresses(requested_helpers) self.write(response)
Python
0.998255
@@ -3273,16 +3273,13 @@ -continue +break %0A
718049a991470b6fa95d8db65a6482735219fc57
Fix get_acl_on
openquake/server/utils.py
openquake/server/utils.py
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2015-2017 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import getpass import requests import logging import django from time import sleep from django.conf import settings from openquake.engine import __version__ as oqversion if settings.LOCKDOWN: django.setup() from django.contrib.auth.models import User def get_user(request): """ Returns the users from `request` if authentication is enabled, otherwise returns the default user (from settings, or as reported by the OS). """ if settings.LOCKDOWN and hasattr(request, 'user'): user = request.user.username else: user = (settings.DEFAULT_USER if hasattr(settings, 'DEFAULT_USER') else getpass.getuser()) return user def get_valid_users(request): """" Returns a list of `users` based on groups membership. Returns a list made of a single user when it is not member of any group. """ users = [] users.append(get_user(request)) if settings.LOCKDOWN and hasattr(request, 'user'): if request.user.is_authenticated(): groups = request.user.groups.values_list('name', flat=True) if groups: users = list(User.objects.filter(groups__name=groups) .values_list('username', flat=True)) return users def get_acl_on(request): """ Returns `true` if ACL should be honorated, returns otherwise `false`. """ if settings.LOCKDOWN and hasattr(request, 'user'): if not request.user.is_superuser and settings.ACL_ON: acl_on = True else: acl_on = False return acl_on def user_has_permission(request, owner): """ Returns `true` if user coming from the request has the permission to view a resource, returns `false` otherwise. """ return (True if owner in get_valid_users(request) or not get_acl_on(request) else False) def oq_server_context_processor(request): """ A custom context processor which allows injection of additional context variables. """ context = {} context['oq_engine_server_url'] = ('//' + request.META.get('HTTP_HOST', 'localhost:8800')) # this context var is also evaluated by the STANDALONE_APPS to identify # the running environment. Keep it as it is context['oq_engine_version'] = oqversion return context def check_webserver_running(url="http://localhost:8800", max_retries=30): """ Returns True if a given URL is responding within a given timeout. """ retry = 0 response = '' success = False while response != requests.codes.ok and retry < max_retries: try: response = requests.head(url, allow_redirects=True).status_code success = True except: sleep(1) retry += 1 if not success: logging.warn('Unable to connect to %s within %s retries' % (url, max_retries)) return success
Python
0.000001
@@ -2128,32 +2128,61 @@ false%60.%0A %22%22%22%0A + acl_on = settings.ACL_ON%0A if settings. @@ -2232,98 +2232,90 @@ -if not request.user.is_superuser and settings.ACL_ON:%0A acl_on = True%0A else:%0A +# ACL is always disabled for superusers%0A if request.user.is_superuser:%0A
0c1196723202655fcce5ae93db9f93997abf1b94
Remove unused .shaes attribute from CdnDecrypter
telethon/crypto/cdn_decrypter.py
telethon/crypto/cdn_decrypter.py
from hashlib import sha256 from ..tl import Session from ..tl.functions.upload import GetCdnFileRequest, ReuploadCdnFileRequest from ..tl.types.upload import CdnFileReuploadNeeded, CdnFile from ..crypto import AESModeCTR from ..errors import CdnFileTamperedError class CdnDecrypter: """Used when downloading a file results in a 'FileCdnRedirect' to both prepare the redirect, decrypt the file as it downloads, and ensure the file hasn't been tampered. """ def __init__(self, cdn_client, file_token, cdn_aes, cdn_file_hashes): self.client = cdn_client self.file_token = file_token self.cdn_aes = cdn_aes self.cdn_file_hashes = cdn_file_hashes self.shaes = [sha256() for _ in range(len(cdn_file_hashes))] @staticmethod def prepare_decrypter(client, client_cls, cdn_redirect): """Prepares a CDN decrypter, returning (decrypter, file data). 'client' should be the original TelegramBareClient that tried to download the file. 'client_cls' should be the class of the TelegramBareClient. """ # TODO Avoid the need for 'client_cls=TelegramBareClient' # https://core.telegram.org/cdn cdn_aes = AESModeCTR( key=cdn_redirect.encryption_key, # 12 first bytes of the IV..4 bytes of the offset (0, big endian) iv=cdn_redirect.encryption_iv[:12] + bytes(4) ) # Create a new client on said CDN dc = client._get_dc(cdn_redirect.dc_id, cdn=True) session = Session(client.session) session.server_address = dc.ip_address session.port = dc.port cdn_client = client_cls( # Avoid importing TelegramBareClient session, client.api_id, client.api_hash, timeout=client._sender.connection.get_timeout() ) # This will make use of the new RSA keys for this specific CDN. # # We assume that cdn_redirect.cdn_file_hashes are ordered by offset, # and that there will be enough of these to retrieve the whole file. # # This relies on the fact that TelegramBareClient._dc_options is # static and it won't be called from this DC (it would fail). cdn_client.connect() # CDN client is ready, create the resulting CdnDecrypter decrypter = CdnDecrypter( cdn_client, cdn_redirect.file_token, cdn_aes, cdn_redirect.cdn_file_hashes ) cdn_file = client(GetCdnFileRequest( file_token=cdn_redirect.file_token, offset=cdn_redirect.cdn_file_hashes[0].offset, limit=cdn_redirect.cdn_file_hashes[0].limit )) if isinstance(cdn_file, CdnFileReuploadNeeded): # We need to use the original client here client(ReuploadCdnFileRequest( file_token=cdn_redirect.file_token, request_token=cdn_file.request_token )) # We want to always return a valid upload.CdnFile cdn_file = decrypter.get_file() else: cdn_file.bytes = decrypter.cdn_aes.encrypt(cdn_file.bytes) cdn_hash = decrypter.cdn_file_hashes.pop(0) decrypter.check(cdn_file.bytes, cdn_hash) return decrypter, cdn_file def get_file(self): """Calls GetCdnFileRequest and decrypts its bytes. Also ensures that the file hasn't been tampered. """ if self.cdn_file_hashes: cdn_hash = self.cdn_file_hashes.pop(0) cdn_file = self.client(GetCdnFileRequest( self.file_token, cdn_hash.offset, cdn_hash.limit )) cdn_file.bytes = self.cdn_aes.encrypt(cdn_file.bytes) self.check(cdn_file.bytes, cdn_hash) else: cdn_file = CdnFile(bytes(0)) return cdn_file @staticmethod def check(data, cdn_hash): """Checks the integrity of the given data""" if sha256(data).digest() != cdn_hash.hash: raise CdnFileTamperedError()
Python
0
@@ -699,77 +699,8 @@ shes -%0A self.shaes = %5Bsha256() for _ in range(len(cdn_file_hashes))%5D %0A%0A
4f2b4b07131c462873b87b869e8df1de41af5848
Add some test code
RNACompete/SeqStruct.py
RNACompete/SeqStruct.py
# Copyright 2000-2002 Brad Chapman. # Copyright 2004-2005 by M de Hoon. # Copyright 2007-2015 by Peter Cock. # All rights reserved. # This code is part of the Biopython distribution and governed by its # license. Please see the LICENSE file that should have been included # as part of this package. # Modified Copyright 2016 by Kevin Ha """This class inherits Bio.Seq that adds functionality for handling RNAContextualSequenceSecondaryStructure alphabets. Specifically, will take a RNA sequence and contextual secondary structure sequence and convert it to a unified RNAContextualSequenceSecondaryStructure alphabet. """ from secondarystructure import RNAContextualSequenceSecondaryStructure as RNASS from Bio.Seq import Seq class SeqStruct(Seq): """A read-only Sequence object that extends Bio.Seq Adds extra function for converting RNA sequence and contextual secondary structure sequence into a RNAContextualSequenceSecondaryStructure sequence """ def __init__(self, seq, struct): # Convert sequence and struct sequences newseq = SeqStruct.convert(seq, struct) super(SeqStruct, self).__init__(newseq, RNASS) @staticmethod def convert(seq, struct): """Convert a seq and struct SeqRecord to a new SeqRecord with alphabet RNAContextualSequenceSecondaryStructure """ if len(seq) != len(struct): raise ValueError(('Sequence and structure records have' ' different lengths')) seqstruct_sequence = '' for i,j in zip(seq, struct): seqstruct_sequence += RNASS.convert(i, j) return seqstruct_sequence
Python
0.000037
@@ -1636,16 +1636,88 @@ struct_sequence%0A +%0Aif __name__ == %22__main__%22:%0A s = SeqStruct('AGC', 'BBB')%0A print s%0A
762f30d1bf11f46f8541735cd522bf0fae07ba42
add new sc to current window
sm.py
sm.py
# 1.create projects skeleton based on defined scaffolds # 2.create eslint.sublime-build # # Project: https://github.com/molee1905/ShenMa # License: MIT # import sublime, sublime_plugin import os import tempfile import shutil import re import subprocess import datetime, time import json SHORTCUTS_PATH_RE = re.compile(r'sc[/|\\]shortcuts', re.I) INPUT_SC_NAME = 'please input sc name: ' INPUT_SC_PATH = 'please input shortcuts path(e.g. xxx/sc/shortcuts): ' INVALID_SC_PATH = '''please input correct shortcuts path\r(e.g. $HOME/sc/shortcuts) ''' ALREADY_EXISTED_ERROR = 'The shortcut “{}” already exists.' COPY_ERROR = 'An error occurred while copying the template: {}' SETTINGS_FILE = 'ShenMa.sublime-settings' def open_directory(path): cmd = (get_subl_executable_path(), path) subprocess.Popen(cmd, cwd=path) def get_subl_executable_path(): executable_path = sublime.executable_path() if sublime.platform() == 'osx': suffix = '.app/' app_path = executable_path[:executable_path.rfind(suffix) + len(suffix)] executable_path = app_path + 'Contents/SharedSupport/bin/subl' return executable_path class CreateScCommand(sublime_plugin.WindowCommand): """A command that creates a new sc """ def run(self): self.settings = sublime.load_settings(SETTINGS_FILE) path = self.settings.get('shortcuts') if not path: self.window.show_input_panel( INPUT_SC_PATH, '', on_done=self.checkPath, on_change=None, on_cancel=None) else: self.checkPath(path) def checkPath(self, path): if self.isScPath(path): self.window.show_input_panel( INPUT_SC_NAME, '', on_done=self.render, on_change=None, on_cancel=None) else: if not sublime.ok_cancel_dialog(INVALID_SC_PATH): return self.window.show_input_panel( INPUT_SC_PATH, '', on_done=self.checkPath, on_change=None, on_cancel=None) def isScPath(self, path): match = SHORTCUTS_PATH_RE.search(path); if match: index = path.index('shortcuts') scpath = path[0:index] if os.path.exists(scpath): self.settings.set('shortcuts', path) sublime.save_settings(SETTINGS_FILE) return True else: self.settings.erase('shortcuts') return False def render(self, name): self.name = name self.author = os.getlogin() self.clzName = 'sc_{}'.format(name) self.cssPath = 'sc_advanced_{}.css'.format(name) self.jsPath = 'sc_{}.js'.format(name) self.dest = os.path.join(self.settings.get('shortcuts'), self.name) if os.path.exists(self.dest): sublime.error_message(ALREADY_EXISTED_ERROR.format(self.name)) return src = os.path.join(sublime.packages_path(), os.path.dirname(__file__), 'template') self.temp_dir = None try: self.temp_dir = tempfile.mkdtemp() self.temp_dest = os.path.join(self.temp_dir, self.name) shutil.copytree(src, self.temp_dest) os.mkdir(os.path.join(self.temp_dest, 'data')) os.mkdir(os.path.join(self.temp_dest, 'img')) os.mkdir(os.path.join(self.temp_dest, 'res')) os.mkdir(os.path.join(self.temp_dest, 'tmpl')) if not self.fill_template(self.temp_dir, self.name): return shutil.move(self.temp_dest, self.dest) open_directory(self.dest) except Exception as ex: if self.temp_dir and os.path.exists(self.temp_dir): shutil.rmtree(self.temp_dir) sublime.error_message(COPY_ERROR.format(str(ex))) def fill_template(self, template_dir, name): placeholders = { '__author__': self.author, '__name__': self.name, '__clz__': self.clzName, '__csspath__': self.cssPath, '__jspath__': self.jsPath, '__version__': '0.0.1' } for dirpath, dirnames, filenames in os.walk(template_dir): for filename in filenames: extension = os.path.splitext(filename)[1] if extension in ('.scss', '.js', '.html', '.md'): path = os.path.join(dirpath, filename) with open(path, encoding='utf-8') as f: text = f.read() for placeholder, value in placeholders.items(): text = text.replace(placeholder, value) with open(path, mode='w', encoding='utf-8') as f: f.write(text) if extension in ('.scss', '.js'): os.rename(path, os.path.join(dirpath, filename.format(name))) return True def build_eslint_system(): """A command that creates a eslint.sublimt-build file.""" settings = sublime.load_settings(SETTINGS_FILE) build_path = os.path.join(sublime.packages_path(), 'User', 'eslint.sublime-build') if not os.path.exists(build_path): print('no exists eslint.sublime-build') exec_path = settings.get('eslint') if os.path.exists(exec_path): build = {} build['path'] = exec_path build['cmd'] = ["eslint", "--fix", "$file"] build_text = json.dumps(build) with open(build_path, mode='w', encoding='utf-8') as f: f.write(build_text) print('elint already build, press cmd+b') else: print('eslint build path: ', build_path) sublime.set_timeout_async(build_eslint_system, 20)
Python
0
@@ -780,17 +780,21 @@ _path(), - +'-a', path)%0A @@ -819,18 +819,8 @@ (cmd -, cwd=path )%0A%0A%0A
951c0dbcfeb016dbde6e1a7a3f0eacc506c9211e
Rename sockjs router prefix to /ws/api/
ws.py
ws.py
import json from tornado import web, ioloop from sockjs.tornado import SockJSRouter, SockJSConnection from blimp.utils.websockets import WebSocketsRequest class EchoConnection(SockJSConnection): def on_open(self, info): self.send_json({'connected': True}) def on_message(self, data): response = WebSocketsRequest(data).get_response() self.send_json(response) def send_json(self, obj): self.send(json.dumps(obj)) if __name__ == '__main__': import logging import argparse parser = argparse.ArgumentParser() parser.add_argument('--port', help='Optional port number. Defaults to 8080', default=8080, ) parser.add_argument('--debug', help='Verbosity level set to DEBUG. Defaults to WARNING.', action='store_const', dest='loglevel', const=logging.DEBUG, default=logging.WARNING ) parser.add_argument('--verbose', help='Verbosity level set to INFO.', action='store_const', dest='loglevel', const=logging.INFO ) args = parser.parse_args() port = args.port logging.getLogger().setLevel(args.loglevel) EchoRouter = SockJSRouter(EchoConnection, '/echo') app = web.Application(EchoRouter.urls) app.listen(port) logging.info(" [*] Listening on 0.0.0.0:{}".format(port)) ioloop.IOLoop.instance().start()
Python
0.000002
@@ -158,20 +158,23 @@ %0A%0Aclass -Echo +RESTAPI Connecti @@ -590,16 +590,25 @@ rgument( +%0A '--port' @@ -716,16 +716,25 @@ rgument( +%0A '--debug @@ -949,16 +949,25 @@ rgument( +%0A '--verbo @@ -1237,20 +1237,23 @@ SRouter( -Echo +RESTAPI Connecti @@ -1262,12 +1262,15 @@ , '/ -echo +ws/api/ ')%0A%0A
a98107225a2a1925913b3e330dfb46d2f31d2801
Use argparse choices option to validate --tabular
mattersend.py
mattersend.py
#!/usr/bin/env python # -*- coding:utf-8 -*- name = 'mattersend' version = '1.2' url = 'https://github.com/mtorromeo/mattersend' description = "Sends messages to mattermost's incoming webhooks via CLI" def build(options, args, message): payload = {} # build request if message: payload['text'] = message for opt in ('channel', 'username', 'icon_url', 'icon_emoji'): if opt in options: payload[opt] = options[opt] return payload def main(): import sys import os import argparse import configparser import json import csv import setproctitle import requests from io import StringIO setproctitle.setproctitle(name) dialects = csv.list_dialects() dialects.sort() dialects.insert(0, 'sniff') # CLI arguments parser = argparse.ArgumentParser(prog=name, description=description) parser.add_argument('-V', '--version', action='version', version="%(prog)s " + version) parser.add_argument('-C', '--config', help='Use a different configuration file') parser.add_argument('-s', '--section', help='Configuration file section', default='DEFAULT') parser.add_argument('-c', '--channel', help='Send to this channel or @username') parser.add_argument('-U', '--url', help='Mattermost webhook URL') parser.add_argument('-u', '--username', help='Username') parser.add_argument('-i', '--icon', help='Icon') parser.add_argument('-t', '--tabular', metavar='DIALECT', const='sniff', nargs='?', help='Parse input as CSV and format it as a table (DIALECT can be one of {})' .format(", ".join(dialects))) parser.add_argument('-n', '--dry-run', '--just-print', action='store_true', help="Don't send, just print the payload") parser.add_argument('-f', '--file', default='-', help="Read content from FILE. If - reads from standard input (DEFAULT: %(default)s)") args = parser.parse_args() # CONFIG file config = configparser.SafeConfigParser() try: if args.config: config.read(args.config) else: config.read(["/etc/{}.conf".format(name), os.path.expanduser("~/.{}.conf".format(name))]) except configparser.Error as e: sys.exit(e.message) # merge config file with cli arguments options = {} for opt in ('channel', 'url', 'username', 'icon'): arg = getattr(args, opt) if arg: options[opt] = arg elif opt in config[args.section]: options[opt] = config[args.section][opt] elif args.section != 'DEFAULT' and opt in config['DEFAULT']: options[opt] = config['DEFAULT'][opt] if 'url' not in options: sys.exit('Missing mattermost webhook URL') if 'icon' in options: ioptname = 'icon_url' if '://' in options['icon'] else 'icon_emoji' # workaround mattermost missing icon_emoji until implemented if ioptname == 'icon_emoji' and options['icon'][0] == ':' and options['icon'][-1] == ':': baseurl = options['url'].split('/hooks/', 1) if len(baseurl) == 2: ioptname = 'icon_url' options['icon'] = "{}/static/images/emoji/{}.png".format(baseurl[0], options['icon'][1:-1]) options[ioptname] = options['icon'] del options['icon'] if args.tabular and args.tabular not in dialects: sys.exit("Invalid dialect {}".format(args.tabular)) # read message from CLI or stdin if args.file == '-': message = sys.stdin.read() else: with open(args.file, 'rU') as f: message = f.read() if args.tabular: csvfile = StringIO(message.strip()) if args.tabular == 'sniff': sniffer = csv.Sniffer() dialect = sniffer.sniff(message) has_header = sniffer.has_header(message) else: dialect = args.tabular has_header = True message = [] for i, row in enumerate(csv.reader(csvfile, dialect)): if i == 1 and has_header: message.append("| --- " * len(row) + "|") message.append("| {} |".format(" | ".join( [cell.replace("|", "❘").replace("\n", " ").replace("\r", " ") for cell in row] ))) message = "\n".join(message) payload = build(options, args, message) if args.dry_run: print("POST {}".format(options['url'])) print(json.dumps(payload, sort_keys=True, indent=4)) sys.exit(0) r = requests.post(options['url'], data={'payload': json.dumps(payload, sort_keys=True)}) if r.status_code != 200: try: r = json.loads(r.text) except json.decoder.JSONDecodeError: r = {'message': r.text, 'status_code': r.status_code} sys.exit("{} ({})".format(r['message'], r['status_code'])) if __name__ == '__main__': main()
Python
0.000001
@@ -1523,18 +1523,60 @@ ff', - nargs='?' +%0A nargs='?', choices=dialects ,%0A @@ -1674,65 +1674,21 @@ of -%7B%7D)'%0A .format(%22, %22.join(dialects)) +%25(choices)s)' )%0A @@ -3418,123 +3418,8 @@ '%5D%0A%0A - if args.tabular and args.tabular not in dialects:%0A sys.exit(%22Invalid dialect %7B%7D%22.format(args.tabular))%0A%0A
4fe8a1c1b294f0d75a901d4e8e80f47f5583e44e
Fix for test failure introduced by basic auth changes
pages/lms/info.py
pages/lms/info.py
from e2e_framework.page_object import PageObject from ..lms import BASE_URL class InfoPage(PageObject): """ Info pages for the main site. These are basically static pages, so we use one page object to represent them all. """ # Dictionary mapping section names to URL paths SECTION_PATH = { 'about': '/about', 'faq': '/faq', 'press': '/press', 'contact': '/contact', 'terms': '/tos', 'privacy': '/privacy', 'honor': '/honor', } # Dictionary mapping URLs to expected css selector EXPECTED_CSS = { '/about': 'section.vision', '/faq': 'section.faq', '/press': 'section.press', '/contact': 'section.contact', '/tos': 'section.tos', '/privacy': 'section.privacy-policy', '/honor': 'section.honor-code', } @property def name(self): return "lms.info" @property def requirejs(self): return [] @property def js_globals(self): return [] def url(self, section=None): return BASE_URL + self.SECTION_PATH[section] def is_browser_on_page(self): stripped_url = self.browser.url.replace(BASE_URL, "") css_sel = self.EXPECTED_CSS[stripped_url] return self.is_css_present(css_sel) @classmethod def sections(cls): return cls.SECTION_PATH.keys()
Python
0
@@ -1152,24 +1152,25 @@ (self):%0A +%0A stripped @@ -1165,69 +1165,73 @@ -stripped_url = self.browser.url.replace(BASE_URL, %22%22)%0A +# Find the appropriate css based on the URL%0A for url_path, css @@ -1235,17 +1235,18 @@ css_sel -= +in self.EX @@ -1259,66 +1259,197 @@ _CSS -%5Bstripped_url%5D%0A return self.is_css_present(css_sel) +.iteritems():%0A if self.browser.url.endswith(url_path):%0A return self.is_css_present(css_sel)%0A%0A # Could not find the CSS based on the URL%0A return False %0A%0A
70271985105fb1abec7180d0c60fcf2a4247fe99
Copy QtGui to QtPrintSupport in membership.py
membership.py
membership.py
import os import pkgutil import json from optparse import OptionParser from functools import reduce from pprint import pprint # This is where all files are read from and saved to PREFIX = '/Qt.py' SKIP_MODULES = [ 'PyQt4.uic.pyuic', # Problematic as it is executed on import 'PyQt5.uic.pyuic' # Problematic as it is executed on import ] SKIP_MEMBERS = [ 'qApp' # See main README.md on qApp ] # Will contain all modules for the current binding MODULES = [] # Flags from environment variables QT_VERBOSE = bool(os.getenv("QT_VERBOSE")) def read_json(filename): """Read JSON, return dict""" with open(filename, 'r') as data_file: return json.load(data_file) def write_json(dictionary, filename): """Write dictionary to JSON""" with open(filename, 'w') as data_file: json.dump(dictionary, data_file, indent=4, sort_keys=True) print('--> Wrote ' + os.path.basename(filename)) def compare(dicts): """Compare by iteration""" common_members = {} common_keys = reduce(lambda x, y: x & y, map(dict.keys, dicts)) for k in common_keys: common_members[k] = list( reduce(lambda x, y: x & y, [set(d[k]) for d in dicts])) return common_members def copy_qtgui_to_qtwidgets(): """Copies the QtGui list of PySide/PyQt4 into QtWidgets""" pyside_filepath = PREFIX + '/PySide.json' pyqt4_filepath = PREFIX + '/PyQt4.json' pyside = read_json(pyside_filepath) pyqt4 = read_json(pyqt4_filepath) pyside['QtWidgets'] = pyside['QtGui'] pyqt4['QtWidgets'] = pyqt4['QtGui'] write_json(pyside, pyside_filepath) print('--> Copied QtGui to QtWidgets for ' + os.path.basename( pyside_filepath)) write_json(pyqt4, pyqt4_filepath) print('--> Copied QtGui to QtWidgets for ' + os.path.basename( pyqt4_filepath)) def sort_common_members(): """Sorts the keys and members""" filename = PREFIX + '/common_members.json' sorted_json_data = {} json_data = read_json(filename) all_keys = [] for key, value in json_data.items(): all_keys.append(key) sorted_keys = sorted(all_keys) for key in sorted_keys: if len(json_data[key]) > 0: # Only add modules which have common members sorted_json_data[key] = sorted(json_data[key]) print('--> Sorted/cleaned ' + os.path.basename(filename)) write_json(sorted_json_data, filename) def generate_common_members(): """Generate JSON with commonly shared members""" pyside = read_json(PREFIX + '/PySide.json') pyside2 = read_json(PREFIX + '/PySide2.json') pyqt4 = read_json(PREFIX + '/PyQt4.json') pyqt5 = read_json(PREFIX + '/PyQt5.json') dicts = [pyside, pyside2, pyqt4, pyqt5] common_members = compare(dicts) write_json(common_members, PREFIX + '/common_members.json') if __name__ == '__main__': # Parse commandline arguments parser = OptionParser() parser.add_option('--binding', dest='binding', metavar='BINDING') parser.add_option( '--copy-qtgui', action='store_true', dest='copy', default=False) parser.add_option( '--generate-common-members', action='store_true', dest='generate', default=False) parser.add_option( '--sort-common-members', action='store_true', dest='sort', default=False) (options, args) = parser.parse_args() if options.copy: copy_qtgui_to_qtwidgets() elif options.generate: generate_common_members() elif options.sort: sort_common_members() else: # Import <binding> binding = __import__(options.binding) for importer, modname, ispkg in pkgutil.walk_packages( path=binding.__path__, prefix=binding.__name__ + '.', onerror=lambda x: None): if modname not in SKIP_MODULES: MODULES.append(modname) basemodule = modname[:modname.rfind('.')] submodule = modname[modname.rfind('.')+1:] try: import_statement = ( 'from ' + basemodule + ' import ' + submodule) exec(import_statement) # print(import_statement) except (ImportError, AttributeError, SyntaxError) as error: # SyntaxError catched here because e.g. _port3 # running on Python 2... print('WARNING: Skipped import', modname, error) try: raw_members = [] # avoid Hound errors exec('raw_members = dir(' + submodule + ')') members = [] for member in raw_members: if member not in SKIP_MEMBERS and \ not member.startswith('_'): try: import_statement = ( 'from ' + basemodule + '.' + submodule + ' import ' + member) exec(import_statement) # print(import_statement) MODULES.append(modname + '.' + member) except (AttributeError, SyntaxError) as error: # SyntaxError catched here because e.g. _port3 # running on Python 2... print('WARNING: Skipped import', modname, error) except (NameError) as error: print('WARNING: Skipped dir() command', modname, error) # Remove duplicates and sort MODULES = sorted(list(set(MODULES))) if QT_VERBOSE: # Print all modules (for debugging) for module in MODULES: print(module) # Create dictionary members = {} for module in MODULES: key = module.split('.')[1] if key not in members: members[key] = [] try: value = module.split('.')[2] members[key].append(value) except IndexError: pass # Sort and remove duplicates sorted_members = {} for key, value in members.copy().items(): sorted_members[key] = sorted(list(set(value))) if QT_VERBOSE: # Debug pprint(sorted_members) # Write to disk filepath = PREFIX + '/' + binding.__name__ + '.json' write_json(sorted_members, filepath)
Python
0
@@ -1247,24 +1247,22 @@ tgui_to_ -qtwidget +module s():%0A @@ -1499,66 +1499,257 @@ -pyside%5B'QtWidgets'%5D = pyside%5B'QtGui'%5D%0A pyqt4%5B'QtWidgets +# When Qt4 was moved to Qt5, they split QtGui into QtGui, QtWidgets, and%0A # QtPrintSupport.%0A pyside%5B'QtWidgets'%5D = pyside%5B'QtGui'%5D%0A pyqt4%5B'QtWidgets'%5D = pyqt4%5B'QtGui'%5D%0A pyside%5B'QtPrintSupport'%5D = pyside%5B'QtGui'%5D%0A pyqt4%5B'QtPrintSupport '%5D = @@ -3675,16 +3675,14 @@ _to_ -qtwidget +module s()%0A
4d942291734641bbdd6a71e16167fefca37a68e7
Fix default config file path on auto creation
passpie/config.py
passpie/config.py
import copy import logging import os import yaml DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.passpierc') DB_DEFAULT_PATH = os.path.join(os.path.expanduser('~'), '.passpie') DEFAULT_CONFIG = { 'path': DB_DEFAULT_PATH, 'short_commands': False, 'key_length': 4096, 'genpass_length': 32, 'genpass_symbols': "_-#|+=", 'table_format': 'fancy_grid', 'headers': ['name', 'login', 'password', 'comment'], 'colors': {'name': 'yellow', 'login': 'green'}, 'repo': True, 'status_repeated_passwords_limit': 5, 'copy_timeout': 0, 'extension': '.pass', 'recipient': None } def read_config(path): try: with open(path) as config_file: content = config_file.read() config = yaml.load(content) except IOError: logging.debug('config file "%s" not found' % path) return {} except yaml.scanner.ScannerError as e: logging.error('Malformed user configuration file {}'.format(e)) return {} return config def create(path, default=True, **kwargs): config_path = os.path.join(os.path.expanduser(path), '.passpierc') with open(config_path, 'w') as config_file: if default: config_file.write(yaml.dump(DEFAULT_CONFIG, default_flow_style=False)) else: config_file.write(yaml.dump(kwargs, default_flow_style=False)) def load(): if not os.path.isfile(DEFAULT_CONFIG_PATH): create(DEFAULT_CONFIG['path'], default=True) global_config = read_config(DEFAULT_CONFIG_PATH) config = copy.deepcopy(DEFAULT_CONFIG) config.update(global_config) local_config = read_config(os.path.join(config['path'], '.passpierc')) config.update(local_config) return config
Python
0.000001
@@ -1469,24 +1469,21 @@ T_CONFIG -%5B'path'%5D +_PATH , defaul
2559fa50a80ac90f36c3aed251bf397f1af83dd2
bump version to 0.1b2
paste/__init__.py
paste/__init__.py
name = 'paste' version = '0.1b1'
Python
0.000001
@@ -23,11 +23,11 @@ = '0.1b -1 +2 '%0A
cf716e0d35df2a76c57c0b08a027c092ff60fd47
Refactor `.open(...)` for clarity
pdfplumber/pdf.py
pdfplumber/pdf.py
import itertools import logging import pathlib from pdfminer.layout import LAParams from pdfminer.pdfdocument import PDFDocument from pdfminer.pdfinterp import PDFResourceManager from pdfminer.pdfpage import PDFPage from pdfminer.pdfparser import PDFParser from pdfminer.psparser import PSException from .container import Container from .page import Page from .utils import resolve_and_decode logger = logging.getLogger(__name__) class PDF(Container): cached_properties = Container.cached_properties + ["_pages"] def __init__( self, stream, pages=None, laparams=None, password="", strict_metadata=False, ): self.laparams = None if laparams is None else LAParams(**laparams) self.stream = stream self.pages_to_parse = pages self.doc = PDFDocument(PDFParser(stream), password=password) self.rsrcmgr = PDFResourceManager() self.metadata = {} for info in self.doc.info: self.metadata.update(info) for k, v in self.metadata.items(): try: self.metadata[k] = resolve_and_decode(v) except Exception as e: if strict_metadata: # Raise an exception since unable to resolve the metadata value. raise # This metadata value could not be parsed. Instead of failing the PDF # read, treat it as a warning only if `strict_metadata=False`. logger.warning( f'[WARNING] Metadata key "{k}" could not be parsed due to ' f"exception: {str(e)}" ) @classmethod def open(cls, path_or_fp, **kwargs): if isinstance(path_or_fp, (str, pathlib.Path)): fp = open(path_or_fp, "rb") try: inst = cls(fp, **kwargs) except PSException: fp.close() raise inst.close_file = fp.close return inst else: return cls(path_or_fp, **kwargs) @property def pages(self): if hasattr(self, "_pages"): return self._pages doctop = 0 pp = self.pages_to_parse self._pages = [] for i, page in enumerate(PDFPage.create_pages(self.doc)): page_number = i + 1 if pp is not None and page_number not in pp: continue p = Page(self, page, page_number=page_number, initial_doctop=doctop) self._pages.append(p) doctop += p.height return self._pages @property def objects(self): if hasattr(self, "_objects"): return self._objects all_objects = {} for p in self.pages: for kind in p.objects.keys(): all_objects[kind] = all_objects.get(kind, []) + p.objects[kind] self._objects = all_objects return self._objects @property def annots(self): gen = (p.annots for p in self.pages) return list(itertools.chain(*gen)) @property def hyperlinks(self): gen = (p.hyperlinks for p in self.pages) return list(itertools.chain(*gen))
Python
0
@@ -1725,17 +1725,24 @@ i -f +s_path = isinsta @@ -1777,17 +1777,16 @@ b.Path)) -: %0A @@ -1786,20 +1786,16 @@ - fp = ope @@ -1813,21 +1813,45 @@ p, %22rb%22) -%0A + if is_path else path_or_fp%0A%0A @@ -1863,28 +1863,24 @@ - inst = cls(f @@ -1892,20 +1892,16 @@ kwargs)%0A - @@ -1920,16 +1920,40 @@ eption:%0A + if is_path:%0A @@ -1979,33 +1979,50 @@ - raise +%0A%0A if is_path: %0A @@ -2049,20 +2049,17 @@ p.close%0A - +%0A @@ -2073,67 +2073,8 @@ inst -%0A else:%0A return cls(path_or_fp, **kwargs) %0A%0A
a9808c822e598fd17148b8fc4063ea11f0a270e9
Add bawler specific exception
pg_bawler/core.py
pg_bawler/core.py
''' ============== pg_bawler.core ============== Base classes for LISTEN / NOTIFY. Postgresql documentation for `LISTEN <https://www.postgresql.org/docs/current/static/sql-listen.html>`_ / `NOTIFY <https://www.postgresql.org/docs/current/static/sql-notify.html>`_. ''' import asyncio import logging import aiopg LOGGER = logging.getLogger(name='pg_bawler.core') def cache_async_def(func): cache_attr_name = '_cache_async_def_{func.__name__}'.format(func=func) async def _cache_method(self, *args, **kwargs): if not hasattr(self, cache_attr_name): setattr(self, cache_attr_name, await func(self, *args, **kwargs)) return getattr(self, cache_attr_name) # simulate functools.update_wrapper _cache_method.__name__ = func.__name__ _cache_method.__doc__ = func.__doc__ _cache_method.__module__ = func.__module__ # save cache_attr_name on function # so delattr(self, func.cache_attr_name) will clear the cache _cache_method.cache_attr_name = cache_attr_name return _cache_method class BawlerBase: ''' Base ``pg_bawler`` class with convenience methods around ``aiopg``. ''' def __init__(self, connection_params, *, loop=None): self.connection_params = connection_params self._connection = None self.loop = asyncio.get_event_loop() if loop is None else loop @cache_async_def async def pg_pool(self): return await aiopg.create_pool( loop=self.loop, **self.connection_params) @cache_async_def async def pg_connection(self): return await (await self.pg_pool()).acquire() async def drop_connection(self): ''' Drops current connection Next call to the ``self.pg_connection`` will acquire new connection from pool. Use this method to drop dead connections on server restart. ''' if hasattr(self, self.pg_connection.cache_attr_name): pg_conn = (await self.pg_connection()) pg_conn.close() await (await self.pg_pool()).release(pg_conn) # clear cached connection property (cache_async_def) delattr(self, self.pg_connection.cache_attr_name) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc, tb): await self.drop_connection()
Python
0.000001
@@ -362,16 +362,124 @@ ore')%0A%0A%0A +class PgBawlerException(Exception):%0A '''%0A Base class for all %60%60pg_bawler%60%60 related failures%0A '''%0A%0A%0A def cach
d350c180606060e9539c12d7d49eed4d6802ac8b
Bump to 1.1.5
pilkit/pkgmeta.py
pilkit/pkgmeta.py
__title__ = 'pilkit' __author__ = 'Matthew Tretter' __version__ = '1.1.4' __license__ = 'BSD' __all__ = ['__title__', '__author__', '__version__', '__license__']
Python
0.000205
@@ -68,9 +68,9 @@ 1.1. -4 +5 '%0A__
b1bd2acbe756922a4cfa2b3a307d60b7e89734c2
Update command.py
pipwin/command.py
pipwin/command.py
# -*- coding: utf-8 -*- """pipwin installs compiled python binaries on windows provided by Christoph Gohlke Usage: pipwin install (<package> | [-r=<file> | --file=<file>]) pipwin uninstall <package> pipwin download (<package> | [-r=<file> | --file=<file>]) [-d=<dest> | --dest=<dest>] pipwin search <package> pipwin list pipwin refresh [--log=<log>] pipwin (-h | --help) pipwin (-v | --version) Options: -h --help Show this screen. -v --version Show version. -r=<file> --file=<file> File with list of package names. -d=<dest> --dest=<dest> Download packages into <dest>. """ from docopt import docopt import sys import platform import logging from warnings import warn from . import pipwin, __version__ from packaging.requirements import Requirement def _package_names(args): if args["--file"]: with open(args["--file"], 'r') as fid: for package in fid.readlines(): if package and not package.startswith('#'): yield Requirement(package.strip()) elif not args["<package>"]: print("Provide a package name") sys.exit(0) else: yield Requirement(args["<package>"]) return def _print_unresolved_match_msg(package, matches): if len(matches) > 0: print("Did you mean any of these ?\n") print(" * " + "\n * ".join(matches)) print("") else: print("Package `{}` not found".format(package.name)) print("Try `pipwin refresh`") def main(): """ Command line entry point """ args = docopt(__doc__, version="pipwin v{}".format(__version__)) # Warn if not on windows if platform.system() != "Windows": warn("Found a non Windows system. Package installation might not work.") # Handle refresh if args["refresh"]: log_level = args.get("--log", None) if log_level: log_level = log_level.upper() try: logging.basicConfig(level=log_level) except ValueError: print("Log level should be DEBUG, INFO, WARNING or ERROR") pipwin.refresh() sys.exit(0) cache = pipwin.PipwinCache() # Handle list if args["list"]: cache.print_list() sys.exit(0) for package in _package_names(args): exact_match, matches = cache.search(package) if not exact_match: _print_unresolved_match_msg(package, matches) if args["--file"]: # We just skip this specific package and work on the others continue else: sys.exit(1) print("Package `{}` found in cache".format(package)) # Handle install/uninstall/download if args["install"]: cache.install(package) elif args["uninstall"]: cache.uninstall(package) elif args["download"]: cache.download(package, dest=args["--dest"])
Python
0.000002
@@ -1201,16 +1201,24 @@ ckage%3E%22%5D +.lower() )%0A re
cfd06b763ad9329dccc5ef7f1f8f86a310997c6e
Break the loop immediately after lives are lost
ale.py
ale.py
import collections import os import sys import numpy as np import scipy.misc as spm from ale_python_interface import ALEInterface from PIL import Image import cv2 import environment class ALE(environment.EpisodicEnvironment): """Arcade Learning Environment. """ def __init__(self, rom_filename, seed=0, use_sdl=False, n_last_screens=4, frame_skip=4, treat_life_lost_as_terminal=True, crop_or_scale='scale'): self.n_last_screens = n_last_screens self.treat_life_lost_as_terminal = treat_life_lost_as_terminal self.crop_or_scale = crop_or_scale ale = ALEInterface() ale.setInt(b'random_seed', seed) self.frame_skip = frame_skip if use_sdl: if 'DISPLAY' not in os.environ: raise RuntimeError( 'Please set DISPLAY environment variable for use_sdl=True') # SDL settings below are from the ALE python example if sys.platform == 'darwin': import pygame pygame.init() ale.setBool('sound', False) # Sound doesn't work on OSX elif sys.platform.startswith('linux'): ale.setBool('sound', True) ale.setBool('display_screen', True) ale.loadROM(str.encode(rom_filename)) assert ale.getFrameNumber() == 0 self.ale = ale self.legal_actions = ale.getMinimalActionSet() self.initialize() def current_screen(self): # Max of two consecutive frames rgb_img = np.maximum(self.ale.getScreenRGB(), self.last_raw_screen) assert rgb_img.shape == (210, 160, 3) # RGB -> Luminance img = rgb_img[:, :, 0] * 0.2126 + rgb_img[:, :, 1] * \ 0.0722 + rgb_img[:, :, 2] * 0.7152 if img.shape == (250, 160): raise RuntimeError("This ROM is for PAL. Please use ROMs for NTSC") assert img.shape == (210, 160) if self.crop_or_scale == 'crop': # Shrink (210, 160) -> (110, 84) img = cv2.resize(img, (84, 110), interpolation=cv2.INTER_LINEAR) img = img.astype(np.float32) assert img.shape == (110, 84) # Crop (110, 84) -> (84, 84) unused_height = 110 - 84 bottom_crop = 8 top_crop = unused_height - bottom_crop img = img[top_crop: 110 - bottom_crop, :] elif self.crop_or_scale == 'scale': img = cv2.resize(img, (84, 84), interpolation=cv2.INTER_LINEAR) img = img.astype(np.float32) else: raise RuntimeError('crop_or_scale must be either crop or scale') assert img.shape == (84, 84) # [0,255] -> [-128, 127] img -= 128 # [-128, 127] -> [-1, 1) img /= 128.0 return img @property def state(self): ret = np.asarray(self.last_screens) assert ret.shape == (4, 84, 84) return ret @property def is_terminal(self): if self.treat_life_lost_as_terminal: return self.lives_lost or self.ale.game_over() else: return self.ale.game_over() @property def reward(self): return self._reward @property def number_of_actions(self): return len(self.legal_actions) def receive_action(self, action): assert not self.is_terminal raw_reward = 0 for i in xrange(4): if self.ale.game_over(): break if i == 3: self.last_raw_screen = self.ale.getScreenRGB() raw_reward += self.ale.act(self.legal_actions[action]) self.last_screens.append(self.current_screen()) # Check lives are lost if self.lives > self.ale.lives(): self.lives_lost = True else: self.lives_lost = False self.lives = self.ale.lives() if raw_reward > 0: self._reward = 1 elif raw_reward < 0: self._reward = -1 else: self._reward = 0 return self._reward def initialize(self): if self.ale.game_over(): self.ale.reset_game() self._reward = 0 self.last_raw_screen = self.ale.getScreenRGB() self.last_screens = collections.deque( [self.current_screen()] * self.n_last_screens, maxlen=self.n_last_screens) self.lives_lost = False self.lives = self.ale.lives()
Python
0.000001
@@ -3488,16 +3488,17 @@ nge(4):%0A +%0A @@ -3505,54 +3505,69 @@ -if self.ale.game_over():%0A break +# Last screeen must be stored before executing the 4th action %0A @@ -3649,16 +3649,17 @@ enRGB()%0A +%0A @@ -3721,65 +3721,13 @@ n%5D)%0A +%0A - self.last_screens.append(self.current_screen())%0A%0A @@ -3738,16 +3738,19 @@ # Check +if lives ar @@ -3756,16 +3756,20 @@ re lost%0A + @@ -3806,32 +3806,36 @@ ():%0A + + self.lives_lost @@ -3841,32 +3841,36 @@ = True%0A + else:%0A @@ -3863,32 +3863,36 @@ se:%0A + + self.lives_lost @@ -3891,32 +3891,36 @@ es_lost = False%0A + self.liv @@ -3938,24 +3938,198 @@ le.lives()%0A%0A + if self.is_terminal:%0A break%0A%0A # We must have last screen here unless it's terminal%0A self.last_screens.append(self.current_screen())%0A%0A if r
5975f9e86951a751f2661a2571ee73a8fd94d142
make all file output using codecs(removes Ascii hack)
platsr2odyssey.py
platsr2odyssey.py
import json, urllib.request, re, html, codecs, sys def removeNonAscii(string): #Platsr.se does not return utf-8 encoded stuff so remove non supported Ascii(150) return ''.join(i for i in string if 150 != ord(i)) class Parse: platsrEndpoint = 'http://www.platsr.se/platsr/api/v1/' result = {} def __init__(self, id): collectionUrl = self.platsrEndpoint + 'collection/' + id Parse.result = self.parseCollection(self.call(collectionUrl)) def call(self, url): print('Hämtar: ' + url) return json.loads(urllib.request.urlopen(url).read().decode('utf-8')) def parseCollection(self, data): collection = {} collection['title'] = data['Name'] collection['description'] = data['Description'] if 'Image' in data.keys(): collection['image'] = self.parseImage(self.call(data['Image']['Href'])) else: collection['image'] = False collection['author'] = self.parseAuthor(self.call(data['CreatedBy']['Href'])) collection['places'] = [] for place in data['Places']: collection['places'].append(self.parsePlace(self.call(place['Href']))) return collection def parseAuthor(self, data): author = {} author['user'] = data['Username'] #TODO construct platsr profile link return author def parsePlace(self, data): place = {} place['title'] = data['Name'] place['description'] = data['Description'] coordinate = re.findall(r'([-]?[0-9]+\.[0-9]+)', data['GmlWGS84']) place['coordinate'] = {} place['coordinate']['lng'] = coordinate[1] place['coordinate']['lat'] = coordinate[0] place['author'] = self.parseAuthor(self.call(data['CreatedBy']['Href'])) if 'Stories' in data.keys(): place['stories'] = [] for story in data['Stories']: place['stories'].append(self.parseStory(self.call(story['Href']))) else: place['stories'] = False return place def parseImage(self, data): image = {} image['title'] = data['Name'] image['description'] = data['Description'] image['author'] = data['Upphovsman'] image['file'] = data['Url'] image['copyrigth'] = self.parseCopyrigth(self.call(data['Copyright']['Href'])) return image def parseStory(self, data): story = {} story['title'] = data['Name'] story['content'] = data['Description'] story['author'] = data['Upphovsman'] story['copyrigth'] = self.parseCopyrigth(self.call(data['Copyright']['Href'])) if 'Image' in data.keys(): story['image'] = self.parseImage(self.call(data['Image']['Href'])) else: story['image'] = False return story def parseCopyrigth(self, data): copyrigth = data['Name'] return copyrigth class OdysseyMarkdown: markdown = '' def __init__(self, data): self.config(data) for place in data['places']: self.place(place) def config(self, data): self.markdown = '```\n' + '-title: "' + data['title'] + '"\n-author: "' + data['author']['user'] + '"\n' + '```\n' def place(self, data): self.markdown += '#' + data['title'] + '\n```\n' + '- center: [' + data['coordinate']['lng'] + ', ' + data['coordinate']['lat'] + ']\n' + '- zoom: 15\n' + 'L.marker([' + data['coordinate']['lng'] + ', ' + data['coordinate']['lat'] + ']).actions.addRemove(S.map)\n```\n' self.markdown += '**' + data['description'] + '**\n' if data['stories'] != False: for story in data['stories']: self.story(story) def story(self, data): self.markdown += '##' + data['title'] + '\n' self.markdown += '*Av ' + data['author'] + ' \nCopyright: ' + data['copyrigth'] + '*\n\n' if data['image'] != False: self.image(data['image']) # There is probably more HTML tags that needs to be converted storyContent = data['content'].replace('<p>', '').replace('</p>', '\n\n') storyContent = storyContent.replace('<em>', '*').replace('</em>', '*') storyContent = storyContent.replace('<strong>', '**').replace('</strong>', '**') self.markdown += storyContent + '\n' def image(self, data): self.markdown += '![' + data['description'] + '](' + data['file'] + ')\n' self.markdown += '**' + data['title'] + '**\n' self.markdown += '*Upphovsman: ' + data['author'] + ' Copyright: ' + data['copyrigth'] + '*\n' Parse(sys.argv[1]) output = OdysseyMarkdown(Parse.result) outputFile = open('output/markdown.txt', 'w') outputFile.write(removeNonAscii(html.unescape(output.markdown))) odysseyHtml = open('template.html', 'r').read() odysseyHtml = odysseyHtml.replace('content=""', 'content="' + Parse.result['description'] + '"').replace('<script id="md_template" type="text/template"></script>', '<script id="md_template" type="text/template">' + removeNonAscii(html.unescape(output.markdown)) + '</script>') outputOdysseyFile = codecs.open('output/odyssey/index.html', 'w', 'utf-8') outputOdysseyFile.write(odysseyHtml) print('\nKlar')
Python
0
@@ -49,174 +49,8 @@ ys%0A%0A -def removeNonAscii(string):%0A #Platsr.se does not return utf-8 encoded stuff so remove non supported Ascii(150)%0A return ''.join(i for i in string if 150 != ord(i))%0A%0A clas @@ -4185,16 +4185,23 @@ tFile = +codecs. open('ou @@ -4223,16 +4223,25 @@ xt', 'w' +, 'utf-8' )%0Aoutput @@ -4251,31 +4251,16 @@ e.write( -removeNonAscii( html.une @@ -4282,17 +4282,16 @@ rkdown)) -) %0A%0Aodysse @@ -4551,23 +4551,8 @@ ' + -removeNonAscii( html @@ -4577,17 +4577,16 @@ arkdown) -) + '%3C/sc
91167b7595b8a6c62b6aba3f7089e0ca87c0c6f5
Allow adding new reports dynamically.
pledger/report.py
pledger/report.py
from pledger.value import ZERO from pledger.util import struct, linearized, PrefixTree from pledger.template import BalanceTemplate, RegisterTemplate from pledger.ledger_processor import LedgerProcessor class BalanceEntryProcessor(object): Entry = struct("level", "account", "amount") def __init__(self): self.sheet = { } self.total = ZERO def process_entry(self, transaction, entry): self.add_entry(entry.account, entry.amount) def add_entry(self, account, amount): self.sheet.setdefault(account, ZERO) self.sheet[account] += amount if account.parent: self.add_entry(account.parent, amount) else: self.total += amount def accounts(self): grouped = self.grouped_accounts(None, 0, sorted(self.sheet.keys())) root, items = grouped[0] return linearized(items) def grouped_accounts(self, root, level, accounts, prefix = ""): children = [account for account in accounts if account.parent == root] if len(children) == 1 and root and root.base_name and self.sheet[root] == self.sheet[children[0]]: return self.grouped_accounts(children[0], level, accounts, prefix + root.base_name + ":") else: result = [self.grouped_accounts(child, level + 1, accounts) for child in children] if root: return ((root, prefix + str(root.base_name), level), result) else: return result def post_process(self): pass @property def result(self): yield self.__class__.Entry(level=None, account=None, amount=self.total) for account, name, level in self.accounts(): yield self.__class__.Entry(level=level, account=name, amount=self.sheet[account]) class RegisterEntryProcessor(object): class Entry(struct("transaction", "entry", "total")): @property def date(self): return self.entry.date(self.transaction) def __init__(self, sorting): self.unsorted_result = [] self.total = ZERO self.sorting = sorting def process_entry(self, transaction, entry): e = RegisterEntryProcessor.Entry( transaction=transaction, entry=entry, total=ZERO) self.unsorted_result.append(e) def post_process(self): self.result = self.sorting(self.unsorted_result) total = ZERO for entry in self.result: total += entry.entry.amount entry.total = total class Report(object): def __init__(self, ledger, rules, transaction_rules, filter, entry_processor, template): self.ledger_processor = LedgerProcessor(ledger, rules, transaction_rules) self.ledger_processor.add_listener(self) self.filter = filter self.entry_processor = entry_processor self.template = template def generate(self): self.ledger_processor.run() self.entry_processor.post_process() return self.result() def on_transaction(self, transaction, entries): for entry in entries: if self.filter(transaction, entry): self.entry_processor.process_entry(transaction, entry) def result(self): return self.entry_processor.result @classmethod def balance(cls, ledger, rules, transaction_rules, filter, sorting): return cls(ledger, rules, transaction_rules, filter, BalanceEntryProcessor(), template=BalanceTemplate()) @classmethod def register(cls, ledger, rules, transaction_rules, filter, sorting): return cls(ledger, rules, transaction_rules, filter, RegisterEntryProcessor(sorting), template=RegisterTemplate()) class ReportRegistry(object): def __init__(self, reps): self.reps = reps self.prefix_tree = PrefixTree(reps.keys()) def get(self, prefix): candidates = self.prefix_tree.from_prefix(prefix) if len(candidates) == 1: return self.reps[candidates[0]] reports = ReportRegistry({ "balance" : Report.balance, "register" : Report.register })
Python
0
@@ -4215,16 +4215,285 @@ es%5B0%5D%5D%0A%0A + def __getitem__(self, name):%0A return self.reps%5Bname%5D%0A%0A def add(self, name, factory):%0A if name in self.reps:%0A raise Exception.new(%22Report %25s already exists%22 %25 name)%0A self.reps%5Bname%5D = factory%0A self.prefix_tree.insert(name)%0A%0A reports
3ce78ad88d8c963dfb819b323b40b2415d8624b2
Split create user feature into two functions
api.py
api.py
#!/usr/bin/env python """ Copyright 2016 Brian Quach Licensed under MIT (https://github.com/brianquach/udacity-nano-fullstack-conference/blob/master/LICENSE) # noqa """ import endpoints from protorpc import remote from resource import StringMessage from resource import USER_REQUEST @endpoints.api(name='poker', version='v1') class FiveCardPokerAPI(remote.Service): """An API for a two-player five card poker game.""" # Players are delt a five card hand and each player has one opportunity, # starting with player one, to replace up to 5 cards in their hand with new # cards from the top of the deck. # Once each player has finished replacing their cards, each hand is then # revealed. The player with the highest poker hand wins. # Username must be unique. # Code Citation: # https://github.com/udacity/FSND-P4-Design-A-Game/blob/master/Skeleton%20Project%20Guess-a-Number/api.py # noqa @endpoints.method( request_message=USER_REQUEST, response_message=StringMessage, path='user/create', name='createUser', http_method='POST' ) def create_user(self, request): """Create a User.""" if User.query(User.name == request.user_name).get(): raise endpoints.ConflictException( 'A User with that name already exists!' ) user = User(name=request.user_name, email=request.email) user.put() return StringMessage(message='User {} created!'.format( request.user_name)) api = endpoints.api_server([FiveCardPokerAPI])
Python
0
@@ -428,344 +428,370 @@ -# Players are delt a five card hand and each player has one opportunity,%0A # starting with player one, to replace up to 5 cards in their hand with new%0A # cards from the top of the deck.%0A%0A # Once each player has finished replacing their cards, each hand is then%0A # revealed. The player with the highest poker hand wins [email protected](%0A request_message=USER_REQUEST,%0A response_message=StringMessage,%0A path='user/create',%0A name='createUser',%0A http_method='POST'%0A )%0A def create_user(self, request):%0A %22%22%22Create a player.%22%22%22%0A return self._create_user(request)%0A%0A def _create_user(request):%0A %22%22%22Create a player .%0A%0A -# + Use @@ -817,17 +817,19 @@ e.%0A%0A -# + Code Ci @@ -840,17 +840,19 @@ on:%0A -# + htt @@ -968,254 +968,12 @@ [email protected](%0A request_message=USER_REQUEST,%0A response_message=StringMessage,%0A path='user/create',%0A name='createUser',%0A http_method='POST'%0A )%0A def create_user(self, request):%0A %22%22%22Create a User. + %22%22%22%0A
ce8ba26877505481795edd024c3859b14c548ffd
Refactor comics.py
plugins/comics.py
plugins/comics.py
# Copyright 2017 Starbot Discord Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from api import command, caching, message, plugin def onInit(plugin_in): xkcd_command = command.command(plugin_in, 'xkcd', shortdesc='Posts the latest XKCD, or by specific ID') return plugin.plugin(plugin_in, 'comics', [xkcd_command]) async def onCommand(message_in): if message_in.command == 'xkcd': if message_in.body != '': try: if int(message_in.body) < 0: return message.message(body="ID `{}` is not a valid ID".format(message_in.body)) except: return message.message(body='Input of `{}` is not a valid number'.format(message_in.body)) data = json.loads(caching.getJson("https://xkcd.com/{}/info.0.json".format(message_in.body.strip()), caller='xkcd', customName='{}.json'.format(message_in.body.strip()))) else: data = json.loads(caching.getJson("https://xkcd.com/info.0.json", caller='xkcd', save=False)) caching.downloadToCache(data['img'], '{}.png'.format(data['num']), caller='xkcd') return message.message(body='**{}/{}/{} - {}**\n_{}_'.format(data['month'], data['day'], data['year'], data['safe_title'], data['alt']), file='cache/xkcd_{}.png'.format(data['num']))
Python
0.000001
@@ -38,18 +38,17 @@ roject%0A# - %0A + # Lic @@ -224,17 +224,16 @@ nse at%0A# - %0A# @@ -278,17 +278,16 @@ SE-2.0%0A# - %0A# Un @@ -609,16 +609,44 @@ License. +%0A'''Get comics from xkcd.''' %0A%0Aimport @@ -651,17 +651,16 @@ rt json%0A -%0A from api @@ -722,24 +722,60 @@ plugin_in):%0A + '''List commands for plugin.'''%0A xkcd_com @@ -962,24 +962,55 @@ essage_in):%0A + '''Run plugin commands.'''%0A if messa @@ -1064,14 +1064,8 @@ body - != '' :%0A @@ -1243,16 +1243,27 @@ except + ValueError :%0A @@ -1961,16 +1961,85 @@ 'year'%5D, +%0A data%5B's @@ -2141,8 +2141,9 @@ 'num'%5D)) +%0A
05531d19b1f891774bff8946b4acf2718712a677
Add resizing and better highlight method
plugins/notify.py
plugins/notify.py
# coding: UTF-8 """ Copyright (c) 2009 Michael Kuhn All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import sushi # tekka-specific import config import lib.gui_control as gui import gobject import gtk import pynotify import string # FIXME configurable highlight words plugin_info = ( "Notifies on highlight.", "1.0", "Michael Kuhn" ) class notify (sushi.Plugin): def __init__ (self): sushi.Plugin.__init__(self, "notify") pynotify.init("tekka") self.caps = pynotify.get_server_caps() try: self.pixbuf = gtk.gdk.pixbuf_new_from_file_at_scale( config.get("tekka", "status_icon"), 128, 128) except: self.pixbuf = None # FIXME self.connect_signal("message", self.message_cb) self.connect_signal("action", self.action_cb) def unload (self): self.disconnect_signal("message", self.message_cb) self.disconnect_signal("action", self.action_cb) def notify (self, subject, body): if gui.has_focus(): return notification = pynotify.Notification(subject, body) if self.pixbuf: notification.set_icon_from_pixbuf(self.pixbuf) if "append" in self.caps: notification.set_hint_string("append", "allowed") if "x-canonical-append" in self.caps: notification.set_hint_string("x-canonical-append", "allowed") notification.show() def escape (self, message): # Bold message = message.replace(chr(2), "") # Underline message = message.replace(chr(31), "") message = gobject.markup_escape_text(message) return message def _has_highlight(text, needle): punctuation = string.punctuation + " \n\t" ln = len(needle) for line in text.split("\n"): i = line.find(needle) if i >= 0: if (line[i-1:i] in punctuation and line[ln+i:ln+i+1] in punctuation): return True return False def message_cb (self, timestamp, server, from_str, target, message): nick = from_str.split("!")[0] own_nick = self.get_nick(server) if own_nick: own_nick = own_nick.lower() if not own_nick: return elif own_nick == nick.lower(): return if own_nick == target.lower(): self.notify(nick, self.escape(message)) elif _has_highlight(message, own_nick): self.notify(target, "&lt;%s&gt; %s" % (nick, self.escape(message))) def action_cb (self, time, server, from_str, target, action): nick = from_str.split("!")[0] own_nick = self.get_nick(server) if own_nick: own_nick = own_nick.lower() if not own_nick: return elif own_nick == nick.lower(): return if own_nick == target.lower(): self.notify(nick, self.escape(action)) elif _has_highlight(action, own_nick): self.notify(target, "%s %s" % (nick, self.escape(action)))
Python
0.000001
@@ -1808,16 +1808,20 @@ %22), -128, 128 +64, 64, True )%0A%09%09 @@ -2709,16 +2709,22 @@ ghlight( +self, text, ne @@ -3309,32 +3309,37 @@ essage))%0A%09%09elif +self. _has_highlight(m @@ -3755,23 +3755,28 @@ ction))%0A - %09%09elif +self. _has_hig
318e6b5fd2382766c065574f6b202fd09e68cf6e
increment version #
pmagpy/version.py
pmagpy/version.py
""" Module contains current pmagpy version number. Version number is displayed by GUIs and used by setuptools to assign number to pmagpy/pmagpy-cli. """ "pmagpy-3.11.0" version = 'pmagpy-3.11.0'
Python
0.000001
@@ -160,17 +160,17 @@ py-3.11. -0 +1 %22%0Aversio @@ -190,7 +190,7 @@ .11. -0 +1 '%0A
dfd326611c99c3c517746f0a6f6eee5680afaefa
Add subversion status as number of changed files in cwd
powerline-bash.py
powerline-bash.py
#!/usr/bin/python # -*- coding: utf-8 -*- import os import subprocess import sys class Powerline: separator = '⮀' separator_thin="⮁" ESC = '\e' LSQ = '\[' RSQ = '\]' clear_fg = LSQ + ESC + '[38;0m' + RSQ clear_bg = LSQ + ESC + '[48;0m' + RSQ reset = LSQ + ESC + '[0m' + RSQ def __init__(self): self.segments = [] def append(self, content, fg, bg, separator=None, separator_fg=None): if separator == None: separator = self.separator if separator_fg == None: separator_fg = bg segment = { 'content': str(content), 'fg': str(fg), 'bg': str(bg), 'separator': str(separator), 'separator_fg': str(separator_fg) } self.segments.append(segment) def color(self, prefix, code): return self.LSQ + self.ESC + '[' + prefix + ';5;' + code + 'm' + self.RSQ def fgcolor(self, code): return self.color('38', code) def bgcolor(self, code): return self.color('48', code) def draw(self): i=0 line='' while i < len(self.segments)-1: s = self.segments[i] ns = self.segments[i+1] line += self.fgcolor(s['fg']) + self.bgcolor(s['bg']) + s['content'] line += self.fgcolor(s['separator_fg']) + self.bgcolor(ns['bg']) + s['separator'] i += 1 s = self.segments[i] line += self.fgcolor(s['fg']) + self.bgcolor(s['bg']) + s['content'] line += self.reset + self.fgcolor(s['separator_fg']) + s['separator'] + self.reset return line def add_git_segment(powerline): try: cmd = "git branch 2> /dev/null | grep -e '\*'" p1 = subprocess.Popen(['git', 'branch'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) p2 = subprocess.Popen(['grep', '-e', '\*'], stdin=p1.stdout, stdout=subprocess.PIPE) output = p2.communicate()[0].strip() if len(output) > 0: branch = output.rstrip()[2:] p.append(' ' + branch + ' ', 22, 148) except subprocess.CalledProcessError: pass # Show working directory with fancy separators def add_cwd_segment(powerline): #p.append(' \w ', 15, 237) home = os.getenv('HOME') cwd = os.getenv('PWD') if cwd.find(home) == 0: cwd = cwd.replace(home, '~', 1) if cwd[0] == '/': cwd = cwd[1:] names = cwd.split('/') for n in names[:-1]: powerline.append(' ' + n + ' ', 250, 237, Powerline.separator_thin, 244) powerline.append(' ' + names[-1] + ' ', 254, 237) def add_root_indicator(powerline, error): bg = 236 fg = 15 if int(error) != 0: fg = 15 bg = 161 p.append(' \$ ', fg, bg) if __name__ == '__main__': p = Powerline() p.append(' \u ', 250, 240) p.append(' \h ', 250, 238) add_cwd_segment(p) add_git_segment(p) add_root_indicator(p, sys.argv[1] if len(sys.argv) > 1 else 0) print p.draw(),
Python
0
@@ -1655,16 +1655,17 @@ +# cmd = %22g @@ -1703,16 +1703,16 @@ e '%5C*'%22%0A - @@ -1868,28 +1868,16 @@ .stdout, -%0A stdout= @@ -2107,16 +2107,1113 @@ pass%0A%0A +def add_svn_segment(powerline):%0A '''svn info:%0A First column: Says if item was added, deleted, or otherwise changed%0A ' ' no modifications%0A 'A' Added%0A 'C' Conflicted%0A 'D' Deleted%0A 'I' Ignored%0A 'M' Modified%0A 'R' Replaced%0A 'X' an unversioned directory created by an externals definition%0A '?' item is not under version control%0A '!' item is missing (removed by non-svn command) or incomplete%0A '~' versioned item obstructed by some item of a different kind%0A '''%0A #TODO: Color segment based on above status codes%0A try:%0A #cmd = '%22svn status %7C grep -c %22%5E%5BACDIMRX%5C!%5C~%5D%22'%0A p1 = subprocess.Popen(%5B'svn', 'status'%5D, stdout=subprocess.PIPE, stderr=subprocess.PIPE)%0A p2 = subprocess.Popen(%5B'grep', '-c', '%5E%5BACDIMRX%5C!%5C~%5D'%5D, stdin=p1.stdout, stdout=subprocess.PIPE)%0A output = p2.communicate()%5B0%5D.strip()%0A if len(output) %3E 0 and int(output) %3E 0:%0A changes = output.strip()%0A p.append(' ' + changes + ' ', 22, 148)%0A except subprocess.CalledProcessError:%0A pass%0A%0A # Show w @@ -3945,32 +3945,32 @@ _cwd_segment(p)%0A - add_git_segm @@ -3968,32 +3968,55 @@ _git_segment(p)%0A + add_svn_segment(p)%0A add_root_ind
7150413cccbf8f812b3fd4a1fc2b82a4020aed9f
fix heroku postgresql path to allow sqlalchemy upgrade
app.py
app.py
from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_compress import Compress from flask_debugtoolbar import DebugToolbarExtension import sentry_sdk from sentry_sdk.integrations.flask import FlaskIntegration from sqlalchemy.pool import NullPool import logging import sys import os import requests HEROKU_APP_NAME = "paperbuzz-api" # set up logging # see http://wiki.pylonshq.com/display/pylonscookbook/Alternative+logging+configuration logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format="%(name)s - %(message)s" ) logger = logging.getLogger("paperbuzz") libraries_to_mum = [ "requests.packages.urllib3", "requests_oauthlib", "stripe", "oauthlib", "boto", "newrelic", "RateLimiter", ] for a_library in libraries_to_mum: the_logger = logging.getLogger(a_library) the_logger.setLevel(logging.WARNING) the_logger.propagate = True requests.packages.urllib3.disable_warnings() # error reporting with sentry sentry_sdk.init( dsn=os.environ.get('SENTRY_DSN'), integrations=[FlaskIntegration()] ) app = Flask(__name__) # database stuff app.config[ "SQLALCHEMY_TRACK_MODIFICATIONS" ] = True # as instructed, to suppress warning app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL") app.config["SQLALCHEMY_ECHO"] = os.getenv("SQLALCHEMY_ECHO", False) == "True" # from http://stackoverflow.com/a/12417346/596939 class NullPoolSQLAlchemy(SQLAlchemy): def apply_driver_hacks(self, app, info, options): options["poolclass"] = NullPool return super(NullPoolSQLAlchemy, self).apply_driver_hacks(app, info, options) db = NullPoolSQLAlchemy(app) # do compression. has to be above flask debug toolbar so it can override this. compress_json = os.getenv("COMPRESS_DEBUG", "False") == "True" # set up Flask-DebugToolbar if os.getenv("FLASK_DEBUG", False) == "True": logger.info("Setting app.debug=True; Flask-DebugToolbar will display") compress_json = False app.debug = True app.config["DEBUG"] = True app.config["DEBUG_TB_INTERCEPT_REDIRECTS"] = False app.config["SQLALCHEMY_RECORD_QUERIES"] = True app.config["SECRET_KEY"] = os.getenv("SECRET_KEY") toolbar = DebugToolbarExtension(app) # gzip responses Compress(app) app.config["COMPRESS_DEBUG"] = compress_json
Python
0
@@ -1218,16 +1218,181 @@ warning%0A +db_uri = os.getenv(%22DATABASE_URL%22)%0Aif db_uri.startswith(%22postgres://%22):%0A db_uri = db_uri.replace(%22postgres://%22, %22postgresql://%22, 1) # temp heroku sqlalchemy fix%0A app.conf @@ -1427,33 +1427,14 @@ %5D = -os.getenv(%22DATABASE_URL%22) +db_uri %0Aapp
9c8f9bec47ebccca95b226c300709be219e4edf2
Bump version.
app.py
app.py
#!/usr/bin/python # coding: utf-8 """This app contains endpoints to calculate shared Steam games.""" import os import requests from flask import Flask from flask import g from flask import jsonify from flask import render_template from flask import request as flask_request __version__ = "1.3.0" def get_steam_api_key(): """Get the API key either from OS keyring or from env variable. The latter overwrites the first. """ key = None try: import keyring key = keyring.get_password("steamwhat", "api_key") except ImportError: pass key = os.environ.get("STEAM_API_KEY", key) if key is not None: return key raise RuntimeError("Must configure a Steam API Key") STEAM_API_KEY = get_steam_api_key() # See: http://steamwebapi.azurewebsites.net/ endpoints = { "get_owned_games": { "url": "https://api.steampowered.com/IPlayerService/GetOwnedGames/v1", "params": [ "key", "steamid", ], }, "get_app_list": { "url": "http://api.steampowered.com/ISteamApps/GetAppList/v2", }, "get_player_summaries": { "url": "https://api.steampowered.com/ISteamUser/GetPlayerSummaries/v2", "params": [ "key", "steamids", ], }, } def get_player_summaries(steamids): """Return details for multiple players.""" params = { "key": STEAM_API_KEY, "steamids": ",".join([str(steamid) for steamid in steamids]), } response = requests.get(endpoints["get_player_summaries"]["url"], params=params) return response.json()["response"]["players"] def get_appid_to_name_map(): """Return a map from app id to app name for all apps on Steam.""" response = requests.get(endpoints["get_app_list"]["url"]) apps = response.json()["applist"]["apps"] appid_to_name = {} for app in apps: appid_to_name[app["appid"]] = app["name"] return appid_to_name def get_games(steamid): """Return owned games for player.""" params = { "key": STEAM_API_KEY, "steamid": steamid, } response = requests.get(endpoints["get_owned_games"]["url"], params=params) games = response.json()["response"]["games"] return games def get_player_by_steamid(steamid): """Return details on the player assocated with the steam id.""" for player in g.player_summaries: if player["steamid"] == str(steamid): return player return None def get_player_reports(steamids): """Return a list of player reports. Each report contains the player name and a list of owned app ids. If a steam id does not result in a player report, ignore it. """ player_reports = [] for steamid in steamids: try: raw_games = get_games(steamid) appids = set([raw_game["appid"] for raw_game in raw_games]) player = get_player_by_steamid(steamid) player_report = { "steamid": steamid, "appids": appids, "name": player["personaname"], } player_reports.append(player_report) except ValueError: pass return player_reports def get_shared_games_report(steamids): """Return a JSON containing identified players and a list shared games.""" # Prepare some lookups shared over the request lifetime. g.player_summaries = get_player_summaries(steamids) g.appid_to_name = get_appid_to_name_map() player_reports = get_player_reports(steamids) player_appids = [set(player["appids"]) for player in player_reports] shared_appids = set.intersection(*player_appids) players = [] for report in player_reports: report.pop("appids") players.append(report) players.sort(key=lambda player: player["name"].lower()) shared_games = [] for appid in shared_appids: shared_games.append({ "name": g.appid_to_name[appid], "appid": appid }) shared_games.sort(key=lambda game: game["name"].lower()) return jsonify(players=players, shared_games=shared_games) def parse_steamids_from_query(): raw_steamids = flask_request.args.get("steamids") if raw_steamids is None: raise ValueError("ERROR: No steam ids specified") try: steamids = [str(raw).strip() for raw in raw_steamids.split(",")] steamids = list(set(steamids)) except ValueError: raise ValueError("ERROR: Steam ids are malformed") return steamids app = Flask(__name__) @app.route('/players') def players(): """Get a list of Steam IDs and return a list of players. Each player has a steam ID and a name. If a steam ID fails being looked up, ignore it and continue fetching other players. Query Params: steamids (str): Comma-separated list of steam ids, each identifying a player. Returns: json Example use: /?steamids=12345,6789 Example result: [ {"steamid": 12345, "name": "spamlord84"}, ... ] """ steamids = parse_steamids_from_query() try: g.player_summaries = get_player_summaries(steamids) players = [] for steamid in steamids: try: player = get_player_by_steamid(steamid) if player is not None: players.append({ "name": player["personaname"], "steamid": player["steamid"], }) except Exception: pass except Exception as err: return "ERROR: " + str(err), 500 return jsonify(players) @app.route('/sharedgames') def shared_games_report(): """Return which games are shared between the players. Query Params: steamids (str): Comma-separated list of steam ids, each identifying a player. Returns: json Example use: /?steamids=12345,6789 Example result: { "players": [ {"name": "Gabe", "steamid": 123}, {"name": "John", "steamid": 456}, ... ], "shared_games": [ {"name": "Half Life", "appid": 10}, {"name": "Age of Empires II", "appid": 642}, ... ], } """ steamids = parse_steamids_from_query() try: result = get_shared_games_report(steamids) except Exception as err: return "ERROR: " + str(err), 500 return result @app.route('/') def client(): return render_template("client.html") if __name__ == "__main__": app.run(debug=True)
Python
0
@@ -289,17 +289,17 @@ __ = %221. -3 +4 .0%22%0A%0A%0Ade
b8bf769d55a61f9d29b15c4b657d9df293045304
convert to int
app.py
app.py
#!/usr/bin/env python from flask import Flask, render_template, Response, request from i2c import I2C from camera_pi import Camera RobotArduino = I2C(); #The robot's arduino controller app = Flask(__name__, template_folder='site') @app.route('/') def index(): """Main page: controller + video stream""" return render_template('index.html') @app.route('/action', methods=['POST']) def action(): """Handle button presses - Send commands to the robot""" val = request.form.get('command') print("Sending ["+str(val)+"] To Arduino") RobotArduino.writeNumber(val) return ('',204) #no response def gen(camera): """Video streaming generator function.""" while True: frame = camera.get_frame() yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') @app.route('/video_feed') def video_feed(): """Video streaming route. Put this in the src attribute of an img tag.""" return Response(gen(Camera()), mimetype='multipart/x-mixed-replace; boundary=frame') if __name__ == '__main__': app.run(host='0.0.0.0', debug=True, threaded=True, port=8000)
Python
1
@@ -498,16 +498,17 @@ mmand')%0A +%0A prin @@ -575,20 +575,25 @@ eNumber( +int( val) +) %0A%0A re
0a3fd9d1a14e009215e2f1cfb76fe3989f00c30a
add route for events/<event-id>
app.py
app.py
#!/usr/bin/python from flask import Flask, url_for, request, session, redirect from flask.ext.restful import Resource, Api from api.artists import Artists from api.art import Art from api.artList import ArtList from api.artImage import get_image from api.venues import Venues,VenueList from api.events import Event,EventList from api.manage import ManageEvent, ManageVenue, ManagePerson from api.staff import Staff,StaffList from api.profile import Profile from api.register import Register from api.artistlist import ArtistList from flask import render_template import logging from logging.handlers import RotatingFileHandler import os, sys sys.path.append(os.getcwd()) from flask import Flask from db import mongo from mail import mail from conf import BaseConfig def create_app(name,config=BaseConfig): app = Flask(name,static_folder="./upload") app.name = "artFlask" app.config.from_object(config) configure_logger(app) configure_routes(app) configure_extensions(app) return app def configure_extensions(app): mongo.init_app(app) mail.init_app(app) def configure_logger(app): MAX_CONTENT_LENGTH = 4 * 1024 * 1024 # 4MB max upload size app.config['MAX_CONTENT_LENGTH'] = MAX_CONTENT_LENGTH app.secret_key = 'z\xcbu\xe5#\xf2U\xe5\xc4,\x0cz\xf9\xcboA\xd2Z\xf7Y\x15"|\xe4' log = logging.getLogger('werkzeug') if app.config.has_key('LOGGING_FILE'): handler = RotatingFileHandler(app.config['LOGGING_FILE'], maxBytes=10000000, backupCount=5) else: handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s') handler.setFormatter(formatter) app.logger.setLevel(logging.INFO) log.addHandler(handler) def configure_routes(app): @app.after_request def after_request(response): response.headers.add('Access-Control-Allow-Origin', '*') return response @app.route('/') def index(): ## TESTING print "**********\n", mongo.db, "\n**********\n" return render_template('index.html') # todo: Should route for OpenID Connect Authn @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('index')) else: return render_template('login.html') @app.route('/api/v1/art/<string:art_id>/<string:action_type>',methods=['GET']) def render_image(art_id,action_type): return get_image(art_id,action_type) # todo: Should route for OpenID Connect Logout @app.route('/logout') def logout(): # remove the username from the session if it's there session.pop('username', None) return redirect(url_for('index')) api = Api(app) api.add_resource(Art,'/api/v1/art/<string:art_id>/') # api.add_resource(ArtImage,'/api/v1/art/<string:art_id>/<string:action_type>') api.add_resource(ArtList,'/api/v1/art/') api.add_resource(Artists, '/api/v1/artists/<string:artist_id>') api.add_resource(ArtistList, '/api/v1/artists/') api.add_resource(Register, '/api/v1/register/<string:token>','/api/v1/register/') api.add_resource(EventList, '/api/v1/events/') api.add_resource(Event, '/api/v1/event/<string:event_id>') api.add_resource(VenueList, '/api/v1/venues') api.add_resource(Venues, '/api/v1/venues/<string:venue_id>') api.add_resource(Profile, '/api/v1/profile') api.add_resource(StaffList, '/api/v1/staff/') api.add_resource(Staff, '/api/v1/staff/<string:person_id>') api.add_resource(ManageEvent, '/api/v1/manage/event','/api/v1/manage/event/<string:event_id>') api.add_resource(ManageVenue, '/api/v1/manage/venue','/api/v1/manage/venue/<string:venue_id>') api.add_resource(ManagePerson, '/api/v1/manage/person','/api/v1/manage/person/<string:person_id>') if __name__ == '__main__': app = create_app(__name__) app.run(host='0.0.0.0',debug=True)
Python
0
@@ -3406,24 +3406,88 @@ 1/events/')%0A + api.add_resource(Event, '/api/v1/events/%3Cstring:event_id%3E')%0A api.add_
dd1948d9d9d057093fafc633b72fea73c92b3adc
Remove srcset filter for updating image links
app.py
app.py
from __future__ import print_function from flask import Flask, request, render_template, send_from_directory, url_for, redirect from mercury_parser import ParserAPI from urlparse import urljoin import validators import urllib import os import sys from bs4 import BeautifulSoup from config import MERCURY_API_KEY, DO_NOT_REDIRECT, FALLBACK_REDIRECT_URL # initialization app = Flask(__name__) app.config.update(DEBUG=True) # functions def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def get_remote_data(url): ''' fetch website data ''' mercury = ParserAPI(api_key=MERCURY_API_KEY) return mercury.parse(url) def strip_redirects(page_url): ''' strip out any redirects (adverts/analytics/etc) and get final link url ''' t = page_url.lower().replace('%3a', ':').replace('%2f', '/') i = t.rfind('http') if (i > 0): t = t[i:] j = t.find('&') if (j > 0): t = t[:j] return t def build_link_url(page_url, page_theme, page_links): u = strip_redirects(page_url) if any(x in page_url for x in DO_NOT_REDIRECT) or \ page_links == 'original': link = u else: params = {'url': u} # build optional parameter list if page_theme: params['theme'] = page_theme if page_links: params['links'] = page_links link = urljoin(request.url_root, url_for('main', **params)) return link def build_img_url(img_url): ''' take first image if srcset specified (Mercury screws it up) e.g. <img src="http://... .jpg%201024w,%20http://... ''' t = img_url i = img_url.find(',') if (i > 0): t = t[:i] j = t.rfind('%') if (j > 0): t = t[:j] return t def update_links(content, page_theme, page_links): ''' update image and outgoing links to pass through this site ''' soup = BeautifulSoup(content, 'lxml') for h in soup.findAll('a', href=True): h['href'] = build_link_url(h['href'], page_theme, page_links) for i in soup.findAll('img', srcset=True): i['src'] = build_img_url(i['src']) return soup.prettify(formatter="html").strip() # controllers @app.route('/favicon.ico') def favicon(): return send_from_directory(os.path.join(app.root_path, 'static'), 'ico/favicon.ico') @app.route("/") def main(): # variables page_url = "" # parse query string parameters paramTheme = request.args.get('theme') page_theme = 'dark' if paramTheme and paramTheme.lower() == 'dark' else '' paramLinks = request.args.get('links') page_links = 'original' if paramLinks and paramLinks.lower() == 'original' else '' paramUrl = request.args.get('url') if paramUrl: url = urllib.unquote(paramUrl).strip().replace(' ', '%20') if validators.url(url): # get page content try: data = get_remote_data(url) if data.url: page_title = data.title page_content = update_links(data.content, page_theme, page_links) page_url = data.url else: # parser is unavailable eprint("Unexpected Error: ", url, data) return redirect(FALLBACK_REDIRECT_URL + url) except: eprint("Unexpected Error: ", sys.exc_info()[0]) return redirect(FALLBACK_REDIRECT_URL + url) # raise else: page_title = 'Invalid URL' page_content = 'Check URL and try again.' else: page_title = 'Enter URL to get started' page_content = '<a href="https://github.com/irfancharania/readability-wrapper">source</a>' return render_template('index.html', title=page_title, content=page_content, url=page_url, theme=page_theme, links=page_links) # launch if __name__ == "__main__": port = int(os.environ.get("PORT", 5000)) app.run(host='0.0.0.0', port=port)
Python
0
@@ -2078,16 +2078,79 @@ links)%0A%0A + ''' removing srcset=True filter to catch lxml screwups '''%0A for @@ -2176,21 +2176,8 @@ img' -, srcset=True ):%0A
c432ae2dc25b2af77b3f57d610b111a24919d987
Add collision detection on paste creation
app.py
app.py
__author__ = 'zifnab' from flask import Flask, redirect, request, render_template, flash, current_app as app, abort from mongoengine import connect from flask_admin import Admin from flask_debugtoolbar import DebugToolbarExtension from flask_login import LoginManager, login_user, login_required, logout_user, current_user from flask_wtf import Form from wtforms.fields import * from wtforms.validators import * from passlib.hash import sha512_crypt from datetime import datetime, timedelta, date import database import arrow from util import random_string app = Flask(__name__) with app.app_context(): import auth from config import local_config app.config.from_object(local_config) db = connect('zifbin') admin = Admin(app) import admin toolbar = DebugToolbarExtension(app) class PasteForm(Form): text = TextAreaField('Paste Here', validators=[Required()]) expiration = SelectField('Expiration', choices=[('0', 'Expires Never'), ('1', 'Expires In Fifteen Minutes'), ('2', 'Expires In Thirty Minutes'), ('3', 'Expires In One Hour'), ('4', 'Expires In Six Hours'), ('5', 'Expires In One Day')], default='3') @app.route('/', methods=('POST', 'GET')) @app.route('/new', methods=('POST', 'GET')) def main(): form = PasteForm(request.form) if form.validate_on_submit(): times = { '0':None, '1':{'minutes':+15}, '2':{'minutes':+30}, '3':{'hours':+1}, '4':{'hours':+6}, '5':{'days':+1} } paste = database.Paste(paste=form.text.data) if (current_user.is_authenticated()): paste.user = current_user.to_dbref() paste.name = random_string() if times.get(form.expiration.data) is not None: paste.expire = arrow.utcnow().replace(**times.get(form.expiration.data)).datetime paste.save() return redirect('/{id}'.format(id=paste.name)) return render_template('new_paste.html', form=form) @app.route('/<string:id>') def get(id): paste = database.Paste.objects(name__exact=id).first() if paste is None: abort(404) elif paste.expire is not None and arrow.get(paste.expire) < arrow.utcnow(): abort(404) else: return render_template("paste.html", paste=paste, title=paste.id) app.debug = app.config['DEBUG'] def run(): app.run( host=app.config.get('HOST', None), port=app.config.get('PORT', None) )
Python
0
@@ -2062,16 +2062,284 @@ tring()%0A +%0A collision_check = database.Paste.objects(name__exact=paste.name).first()%0A while collision_check is not None:%0A paste.name = random_string()%0A collision_check = database.Paste.objects(name__exact=paste.name).first()%0A%0A
3786d74b86dcde49243dfe9f85559fbcf49cbb85
Fix bug in game init method
app.py
app.py
import json import os import webapp2 from webapp2_extras import jinja2 import tokens from models import user_repo, game_repo from game import Game, GAME_STATUS class BaseHandler(webapp2.RequestHandler): @webapp2.cached_property def jinja2(self): return jinja2.get_jinja2(app=self.app) def render_template(self, filename, **template_args): self.response.write(self.jinja2.render_template(filename, **template_args)) def validate_json_fields(self, fields, json_object): """ Return 422 is not all fields are present in a JSON object. """ messages = [] for field in fields: if field not in json_object: messages.append('Field [{}] is required<br />'.format(field)) if messages: webapp2.abort(422, detail='\n'.join(messages)) def authenticate(self): """ Return 401 if authorization fails. """ username = self.request.headers.get('Username', None) access_token = self.request.headers.get('Access-Token', None) print(username, access_token) db_user = user_repo.find_by_username(username) if username is None or access_token is None or db_user is None or db_user.access_token != access_token: webapp2.abort(401, 'Authentication failed, please verify Username and Access-Token headers') return username class IndexHandler(BaseHandler): def get(self): self.render_template('index.html', name=self.request.get('name')) class RegistrationHandler(BaseHandler): def post(self): """ Generates and returns an access token for a POSTed username. """ json_object = json.loads(self.request.body) self.validate_json_fields(['username'], json_object) posted_username = json_object['username'] existing_user = user_repo.find_by_username(posted_username) if existing_user is None: user = user_repo.create(username=posted_username) content = { 'message': 'Registration succeeded', 'username': user.username, 'access_token': user.access_token, } else: content = { 'username': posted_username, 'message': 'Registration failed: username already exists', 'access_token': None, } self.response.content_type = 'application/json' self.response.write(json.dumps(content)) class CreateGameHandler(BaseHandler): def post(self): username = self.authenticate() # TODO: @authenticate json_object = json.loads(self.request.body) # TODO: Players optional, allows for open game lobby or bots self.validate_json_fields(['players'], json_object) players = json_object['players'] map = json_object.get('map', 'default') game = Game(players=players, map=map) game_model = game_repo.persist(game) content = { 'id': game_model.key.id(), 'name': game_model.name, 'players': game_model.players, 'map_name': game_model.map, 'created': game_model.created.isoformat(), 'message': 'Game creation succeeded', } self.response.content_type = 'application/json' self.response.write(json.dumps(content)) class PlayGameHandler(BaseHandler): def post(self): username = self.authenticate() # TODO: @authenticate json_object = json.loads(self.request.body) self.validate_json_fields(['game_id', 'message'], json_object) game_id = json_object['game_id'] game_model = game_repo.find_by_id(game_id) if not game_model: webapp2.abort(404, 'Could not find game for game_id [{}]'.format(game_id)) else: print('Game id [{}] found'.format(game_model.key.id())) message = json_object['message'] if message not in ['join', 'move']: webapp2.abort(422, 'Invalid message type [{}]. Must be "join" or "move".'.format(message)) game = Game().load_from_model(game_model) content = { # Start building response content 'game_id': game_model.key.id() } if game.status == GAME_STATUS.lobby: if message == 'join': # Ignore all other messages game.queue.set_status(username, 'joined') if game.queue.is_complete(): game.status == GAME_STATUS.playing content['message'] = 'Game started' else: content['message'] = 'Waiting for players {}'.format(', '.join(game.queue.not_joined)) elif game.status == GAME_STATUS.playing: if game.queue.is_turn(username): content['message'] = 'Move successful' # And the player sent a move: # update the game else: content['message'] = 'Not your turn.' pass self.response.content_type = 'application/json' self.response.write(json.dumps(content)) app = webapp2.WSGIApplication([ webapp2.Route('/', handler=IndexHandler, name='home', methods=['GET']), webapp2.Route('/users/register', handler=RegistrationHandler, name='registration', methods=['POST']), webapp2.Route('/games/create', handler=CreateGameHandler, name='games_create', methods=['POST']), webapp2.Route('/games/play', handler=PlayGameHandler, name='games_play', methods=['POST']), ], debug=True)
Python
0.000001
@@ -2901,16 +2901,21 @@ ers, map +_name =map)%0A
038196b5bc478ff561b6f8031ecbcb37a765ba3e
Change to be more pep8 compliant.
bot.py
bot.py
import praw import urllib import cv2, numpy as np from PIL import Image import time import getpass import re # Eye Classifier eyeData = "xml/eyes.xml" eyeClass = cv2.CascadeClassifier(eyeData) # Glasses Asset glasses = cv2.imread('assets/glasses.png', cv2.IMREAD_UNCHANGED) ratio = glasses.shape[1] / glasses.shape[0] # How much we are going to downscale image while processing it. DOWNSCALE = 4 foundImage = False # List of posts already processed. already_done = [] # Super secret Reddit password. password = getpass.getpass("Reddit password: ") def process_image(url, frame, eyeList): for eye in eyeList: x, y, w, h = [v * DOWNSCALE for v in eye] h = w / ratio y += h / 2 # resize glasses to a new var called small glasses smallglasses = cv2.resize(glasses, (w, h)) # the area you want to change bg = frame[y:y+h, x:x+w] bg *= np.atleast_3d(255 - smallglasses[:, :, 3])/255.0 bg += smallglasses[:, :, 0:3] * np.atleast_3d(smallglasses[:, :, 3]) # put the changed image back into the scene frame[y:y+h, x:x+w] = bg print("Found image. Writing image.") cv2.imwrite(url, frame) while True: foundImage = False r = praw.Reddit('/u/powderblock Glasses Bot') r.login('DealWithItbot', password) for post in r.get_subreddit('all').get_new(limit=20): if post not in already_done: already_done.append(post) if "imgur.com" in post.url and (".jpg" in post.url or ".png" in post.url): print(post.url) response = urllib.urlopen(str(post.url)) # load the image we want to detect features on # Convert rawImage to Mat filearray = np.asarray(bytearray(response.read()), dtype=np.uint8) frame = cv2.imdecode(filearray, cv2.CV_LOAD_IMAGE_UNCHANGED) if frame is None or frame.size is None: print("Error, couldn't load image, skipping.") # Skip to next image continue if frame.shape[0] > 5000 or frame.shape[1] > 5000: print("Image is too large, skipping.") continue if frame.shape[0] == 0 or frame.shape[1] == 0: print("Image has a width or height of 0, skipping.") continue minisize = (frame.shape[1]/DOWNSCALE,frame.shape[0]/DOWNSCALE) miniframe = cv2.resize(frame, minisize) eyes = eyeClass.detectMultiScale(miniframe) if len(eyes) > 0: print(str(post.url)) foundImage = True process_image(str(post.url), frame, eyes) if not foundImage: print("No image with eyes found.") time.sleep(30)
Python
0
@@ -29,17 +29,23 @@ port cv2 -, +%0Aimport numpy a @@ -551,16 +551,17 @@ rd: %22)%0A%0A +%0A def proc
f67807b0f2064e1c6374fe4c10ed87c7a9222426
mark all events after processing it
bot.py
bot.py
#! /usr/bin/env python from time import gmtime, strftime from foaas import foaas from diaspy_client import Client import re import urllib2 def log_write(text): f = open('bot.log', 'a') f.write(strftime("%a, %d %b %Y %H:%M:%S ", gmtime())) f.write(text) f.write('\n') f.close() client=Client() notify = client.notifications() for n in notify: if not n.unread: continue m = re.search('\shas\smentioned.+post\s([^\/]+)\s(.+)\.+$', str(n)) try: if hasattr(m, 'group'): command = m.group(2).replace(' ', '__') client.post(foaas(command)) # finally mark as read n.mark() except urllib2.URLError: log_write("ERROR: "+str(n))
Python
0
@@ -565,49 +565,8 @@ nd)) -%0A%0A # finally mark as read%0A n.mark() %0A e @@ -613,16 +613,53 @@ RROR: %22+str(n))%0A +%0A # finally mark as read%0A n.mark()%0A
d3409629c120e366c9c7500bc111f61b13e74dc8
Change port.
bot.py
bot.py
import os import json import requests from flask import Flask from flask_restful import Resource, Api, reqparse from slackclient import SlackClient app = Flask(__name__) api = Api(app) token = os.environ.get('SLACK_KEY') sc = SlackClient(token) print sc.api_call('api.test') class RealName(Resource): def user_ids(self): r = requests.get( 'https://slack.com/api/groups.list?token={}'.format(token)) content = r.json() return content.get('groups')[0].get('members') def get_username(self, ids): r = requests.get( 'https://slack.com/api/users.list?token={}'.format(token)) content = r.json().get('members') names = [] for id in ids: for user in content: if id == user.get('id') and not user.get('deleted') and not user.get('is_bot'): # noqa names.append( { 'id': id, 'name': user.get('real_name'), 'images': user.get('profile').get('image_48') } ) return names def get(self): # return real_name from user id info from slack ids = self.user_ids() output = self.get_username(ids) return output api.add_resource(RealName, '/names') class PostDM(Resource): def post(self): # expect user_id and message data from the client parser = reqparse.RequestParser() parser.add_argument('user_id') parser.add_argument('visitor_name') # assign data from request to variables args = parser.parse_args() user_id = args.get('user_id') visitor_name = args.get('visitor_name') if visitor_name: message = 'You have a visitor called {} at the gate.'.format( visitor_name) else: message = 'Hi! You have a visitor waiting for you.' # returns a string - to be converted to dict later. Then retrieve # channel ID string_resp = sc.api_call('im.open', user=user_id) dict_resp = json.loads(string_resp) channelID = dict_resp.get('channel').get('id') sc.api_call( 'chat.postMessage', as_user='true:', channel=channelID, text=message ) return {'message': 'Notification sent'}, 200 api.add_resource(PostDM, '/send') if __name__ == '__main__': app.run(debug=True)
Python
0
@@ -2495,24 +2495,84 @@ -app.run(debug=True +port = int(os.environ.get('PORT', 5000))%0A app.run(host='0.0.0.0', port=port )%0A
aa9d068095365dbc65fb3b8439df4fe59a8824c5
Remove extraneous code
bot.py
bot.py
import socket import ssl import sys import commands import config import log import utils import zirc class Bot(zirc.Client): def __init__(self): self.userdb = {} # zIRC self.connection = zirc.Socket(family=socket.AF_INET6, wrapper=ssl.wrap_socket) self.config = zirc.IRCConfig(host="chat.freenode.net", port=6697, nickname="zIRCBot2", ident="zirc", realname="A zIRC bot", channels=config.channels, caps=config.caps) self.ctcp = { 'VERSION': utils.version, 'TIME': __import__('time').localtime(), 'FINGER': "Don't finger me" 'USERINFO': 'An IRC bot built using zIRC on Python', 'SOURCE': 'https://github.com/wolfy1339/Python-IRC-Bot' } self.connect(self.config) self.start() # Non-numeric events @staticmethod def on_all(event, irc): if event.raw.startswith("ERROR"): log.error(" ".join(event.arguments)) else: if event.raw.find("%") == -1: log.debug(event.raw) @staticmethod def on_ctcp(irc, event, raw): log.info("Received CTCP reply " + raw) if event.arguments == 'VERSION': sysver = "".join(__import__("sys").version.split("\n")) gitver = __import__("subprocess").check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().split()[0] messsage = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1", gitver, sysver) irc.notice(event.source.nick, "\x01" + message + "\x01") def on_privmsg(self, event, irc, arguments): if " ".join(arguments).startswith(config.commandChar): utils.call_command(self, event, irc, arguments) @staticmethod def on_send(data): if data.find("%") == -1: log.debug(data) def on_quit(self, event, irc): nick = event.source.nick if nick == self.config['nickname']: sys.exit(1) else: try: self.userdb[event.target].pop(nick) except KeyError: for c in self.userdb: for i in c: if i['host'] == event.source.host: self.userdb[c].pop(i['hostmask'].split("!")[0]) def on_kick(self, event, irc): nick = event.raw.split(" ")[3] if nick == self.config['nickname']: log.warning("Kicked from %s, trying to re-join", event.target) irc.join(event.target) else: try: self.userdb[event.target].pop(nick) except KeyError: for i in self.userdb[event.target]: if i['host'] == event.source.host: self.userdb[event.target].pop(i['hostmask'].split("!")[0]) def on_part(self, event, irc): requested = "".join(event.arguments).startswith("requested") if event.source.nick == self.config['nickname'] and requested: log.warning("Removed from %s, trying to re-join", event.target) irc.join(event.target) else: try: self.userdb[event.target].pop(event.source.nick) except KeyError: for i in self.userdb[event.target]: if i['host'] == event.source.host: self.userdb[event.target].pop(i['hostmask'].split("!")[0]) def on_join(self, event, irc): if event.source.nick == self.config['nickname']: log.info("Joining %s", event.target) irc.send("WHO {0} nuhs%nhuac".format(event.target)) else: irc.send("WHO {0} nuhs%nhuac".format(event.source.nick)) @staticmethod def on_invite(event, irc): if utils.checkPerms(event.source.host, trusted=True): hostmask = event.source.host log.info("Invited to %s by %s", event.arguments[1], hostmask) irc.join(event.arguments[1]) # Numeric events def on_nicknameinuse(self, event, irc): log.error("Nick already in use, trying alternative") irc.nick(self.config['nickname'] + "_") @staticmethod def on_bannedfromchan(event, irc): s = event.raw.split(" ") channel = s[3] irc.notice("wolfy1339", "Banned from {0}".format(channel)) log.warning("Banned from %s", channel) @staticmethod def on_endofmotd(event, irc): log.info("Received MOTD from network") @staticmethod def on_welcome(event, irc): log.info("Connected to network") def on_whoreply(self, event, irc): (ident, host, nick) = event.arguments[1:3] + event.arguments[4:5] channel = event.arguments[0] hostmask = "{0}!{1}@{2}".format(nick, ident, host) if nick != "ChanServ": try: self.userdb[channel][nick] = { 'hostmask': hostmask, 'host': host, 'account': ''.join(host.split("/")[-1:]) } except KeyError: self.userdb[channel] = {} self.userdb[channel][nick] = { 'hostmask': hostmask, 'host': host, 'account': ''.join(host.split("/")[-1:]) } def on_whospcrpl(self, event, irc): (ident, host, nick) = event.arguments[1:4] hostmask = "{0}!{1}@{2}".format(nick, ident, host) channel = event.arguments[0] account = event.arguments[4] if nick != "ChanServ": try: self.userdb[channel][nick] = { 'hostmask': hostmask, 'host': host, 'account': account } except KeyError: self.userdb[channel] = {} self.userdb[channel][nick] = { 'hostmask': hostmask, 'host': host, 'account': account } @staticmethod def on_315(event, irc): log.info("Received end of WHO reply from network") Bot()
Python
0.999999
@@ -1418,400 +1418,8 @@ raw) -%0A if event.arguments == 'VERSION':%0A sysver = %22%22.join(__import__(%22sys%22).version.split(%22%5Cn%22))%0A gitver = __import__(%22subprocess%22).check_output(%5B'git', 'rev-parse', '--short', 'HEAD'%5D).decode().split()%5B0%5D%0A messsage = %22A zIRC bot v%7B0%7D@%7B1%7D, running on Python %7B2%7D%22.format(%220.1%22, gitver, sysver)%0A irc.notice(event.source.nick, %22%5Cx01%22 + message + %22%5Cx01%22) %0A%0A
92dedb59e89c8e590bee831ea79fb682baa736a2
Clean up
bot.py
bot.py
import os import json import asyncio import websockets import slacker ################################ # IMPORTANT: just for testing! # ################################ slack = slacker.Slacker('xoxb-24649221783-q40uS6HJkH7D6TMhykeyaH7h') # Use this for production: # # slack = slacker.Slacker(os.environ["SLACKAPIKEY"]) # channels = json.loads(slack.channels.list()) for channel in channels.get('channels', {}): slack.channels.join(channel.get['id']) def open_im_channel(user): try: response = slack.im.open(user) except slacker.Error as e: print(e) return None # https://api.slack.com/methods/im.open return response.body.get('channel', {}).get('id') async def read_loop(uri): ws = await websockets.connect(uri) while True: #wait for the data from slack to come in. json_data = await ws.recv() data = json.loads(json_data) print(data) #if a user joins the devolio team if data.get('type') == 'team_join': user_id = data.get('user').get('id') user_name = data.get('user').get('name') im_channel_id = open_im_channel(user_id) if im_channel_id is not None: send_introduction_message(user_id, user_name) #We sadly cant force the #slack.channels.join("intro") #if a user changes his preferences if data.get('type') == "user_change": user_id = data.get('user').get('id') user_name = data.get('user').get('name') im_channel_id = open_im_channel(user_id) title = data.get('user').get('profile').get('title') print(title) if im_channel_id is not None: slack.chat.post_message(user_id, "I see you changed your preferences, that's great!") slack.chat.post_message(user_id, "I will now put you in some channels that I think might be relevant to you.") slack.chat.post_message(user_id, "Feel free to join other channels as well!") scan_relevant_channels(user_id) def get_rtm_uri(): rtm = slack.rtm.start() print(rtm) try: body = rtm.body except slacker.Error as e: print(e) return None return body.get('url') def scan_relevant_channels(user_id): print("Hi") def send_introduction_message(user_id, user_name): slack.chat.post_message(user_id, "Test message, sent when you message") slack.chat.post_message(user_id, "Hey " + user_name + ", welcome to the Devolio Slack group!") slack.chat.post_message(user_id, "We'd love to hear a little about you - feel free to drop" \ "in on <#intro> and let everyone know what you're about.") slack.chat.post_message(user_id, "You can add your interests to your profile by clicking on your name, " \ "and then you can join different channels for your various interests " \ "by clicking on that \"Channels\" link up near the top left.") #check if this is the main application running (not imported) if __name__ == '__main__': ws_url = get_rtm_uri() if ws_url is not None: asyncio.get_event_loop().run_until_complete( read_loop(ws_url) ) # Send a message to #general channel # slack.chat.post_message('#general', 'Hello fellow slackers!') # while True: # Get users list # response = slack.users.list() # users = response.body['members'] # print(users) # for x in users: # print(user_id) # slack.chat.post_message(user_id, 'Hey ' + x['name'] + ', welcome to the Devolio Slack group!') # slack.chat.post_message(user_id, 'We\'d love to hear a little about you - feel free to drop in on #intro and let everyone know what you\'re about.') # slack.chat.post_message(user_id, 'You can add your interests to your profile by [fill this out - I don\'t know what the easiest way to describe this is], and then you can join different channels for your various interests by clicking on that "Channels" link up near the top left [image of Channels link].') # time.sleep(500)
Python
0.000002
@@ -794,17 +794,18 @@ # -w + W ait for @@ -834,17 +834,16 @@ come in -. %0A @@ -933,27 +933,23 @@ a)%0A%0A -%0A - #i +# I f a user @@ -1016,16 +1016,57 @@ _join':%0A + # Get their user id and name%0A @@ -1151,32 +1151,72 @@ r').get('name')%0A + # Open im channel with user%0A im_c @@ -1244,32 +1244,65 @@ hannel(user_id)%0A + # Send intro message%0A if i @@ -1393,105 +1393,20 @@ me)%0A - #We sadly cant force the%0A #slack.channels.join(%22intro%22)%0A #i +%0A # I f a @@ -1418,19 +1418,21 @@ changes -his +their prefere @@ -1482,16 +1482,57 @@ hange%22:%0A + # Get their user id and name%0A @@ -1617,32 +1617,75 @@ r').get('name')%0A + # Make sure im channel is open%0A im_c @@ -1713,32 +1713,66 @@ hannel(user_id)%0A + # Get info about user%0A titl @@ -2559,84 +2559,8 @@ e):%0A - slack.chat.post_message(user_id, %22Test message, sent when you message%22)%0A @@ -2650,24 +2650,42 @@ ck group!%22)%0A + time.sleep(1)%0A slack.ch @@ -2869,24 +2869,42 @@ re about.%22)%0A + time.sleep(1)%0A slack.ch @@ -3056,38 +3056,20 @@ nd then -you can join different +join channel @@ -3212,10 +3212,12 @@ .%22)%0A -#c +%0A# C heck @@ -3437,16 +3437,16 @@ ws_url)%0A + @@ -3451,894 +3451,4 @@ )%0A -%0A# Send a message to #general channel%0A# slack.chat.post_message('#general', 'Hello fellow slackers!')%0A%0A# while True:%0A %0A # Get users list%0A # response = slack.users.list()%0A # users = response.body%5B'members'%5D%0A%0A # print(users)%0A%0A # for x in users:%0A # print(user_id)%0A # slack.chat.post_message(user_id, 'Hey ' + x%5B'name'%5D + ', welcome to the Devolio Slack group!')%0A # slack.chat.post_message(user_id, 'We%5C'd love to hear a little about you - feel free to drop in on #intro and let everyone know what you%5C're about.')%0A # slack.chat.post_message(user_id, 'You can add your interests to your profile by %5Bfill this out - I don%5C't know what the easiest way to describe this is%5D, and then you can join different channels for your various interests by clicking on that %22Channels%22 link up near the top left %5Bimage of Channels link%5D.')%0A%0A # time.sleep(500)%0A
9ab748de8ca86b2f62bda30c5f2f3f0b2bde7047
Handle TimeoutException and improve code structure
bot.py
bot.py
import os import re import time import json import schedule import config from slackclient import SlackClient from handlers import HandlerManager from storage import Storage BOT_ID = '' sc = SlackClient(os.environ['SLACK_BOT_TOKEN']) storage = Storage() def post(channel, text, as_user=None): if as_user is None: as_user = True sc.api_call("chat.postMessage", channel=channel, as_user=as_user, text=text) def post_report(user, title, attachments): sc.api_call("chat.postMessage", channel=config.DAILY_MEETING_CHANNEL, as_user=False, username=user['name'], icon_url=user['profile']['image_48'], text=title, attachments=json.dumps(attachments)) handler = HandlerManager(post, post_report) # http://stackoverflow.com/a/42013042/3109776 def is_direct_message(output, own_id): return output and \ 'text' in output and \ 'channel' in output and \ 'type' in output and \ 'user' in output and \ output['user'] != own_id and \ output['type'] == 'message' and \ output['channel'].startswith('D') def parse_output(output_list): if output_list and len(output_list) > 0: for output in output_list: if is_direct_message(output, BOT_ID): return output['text'], output['channel'], output['user'] return None, None, None def resolve_bot_id(): res = sc.api_call("users.list") if res.get('ok'): users = res.get('members') for user in users: if 'name' in user and user.get('name') == config.BOT_NAME: return user.get('id') raise Exception("Failed to find bot named '{}'!".format(config.BOT_NAME)) def run_daily_meeting(): users = storage.get_users_for_daily_meeting() print("Run daily meeting:") for user in users: print(user['name']) channel = "@{}".format(user['name']) first_name = re.split(" +", user['real_name'])[0].strip() post(channel, "Hi {}! Time for the standup metting. Please answer the following questions:" .format(first_name)) handler.handle(channel, user, 'report') storage.save_user(user) if __name__ == "__main__": if not sc.rtm_connect(): raise Exception("Connection failed! Please check your Slack Token") BOT_ID = resolve_bot_id() print("Bot {} connected and running!".format(BOT_ID)) schedule \ .every().day \ .at(config.TIME) \ .do(run_daily_meeting) while True: msg, channel, user_id = parse_output(sc.rtm_read()) if msg and channel and user_id: user = sc.api_call("users.info", user=user_id)['user'] user = storage.get_user(user['id'], user) handler.handle(channel, user, msg) storage.save_user(user) schedule.run_pending() time.sleep(1)
Python
0
@@ -1176,73 +1176,104 @@ def -parse_output(output_list):%0A +fetch_messages():%0A try:%0A messages = sc.rtm_read()%0A + if -output_list and len(output_list +messages and len(messages ) %3E @@ -1287,50 +1287,139 @@ + + for -output in output_list:%0A +m in messages:%0A handle_message(m)%0A except TimeoutError:%0A pass%0A%0A%0Adef handle_message(m):%0A if +not is_d @@ -1432,22 +1432,17 @@ message( -output +m , BOT_ID @@ -1456,29 +1456,46 @@ - - return output +return%0A%0A text, user_id, channel = m %5B'te @@ -1500,22 +1500,17 @@ text'%5D, -output +m %5B'channe @@ -1514,22 +1514,17 @@ nnel'%5D, -output +m %5B'user'%5D @@ -1528,35 +1528,451 @@ r'%5D%0A +%0A -return None, None, None +if text and channel and user_id:%0A user = get_user(user_id)%0A handler.handle(channel, user, text)%0A storage.save_user(user)%0A%0A%0A%22%22%22Get the user cached in local storage or fetch from API (It'll be cached later)%22%22%22%0Adef get_user(user_id):%0A user = storage.get_user(user_id, None)%0A # TODO: update this user from API once in while%0A if user:%0A return user%0A return sc.api_call(%22users.info%22, user=user_id)%5B'user'%5D %0A%0A%0Ad @@ -3145,302 +3145,23 @@ -msg, channel, user_id = parse_output(sc.rtm_read())%0A if msg and channel and user_id:%0A user = sc.api_call(%22users.info%22, user=user_id)%5B'user'%5D%0A user = storage.get_user(user%5B'id'%5D, user)%0A handler.handle(channel, user, msg)%0A storage.save_user(user +fetch_messages( )%0A
b367ff9e032d01f15aaaedc7e93446e9dda2649a
Fix outputing
bot.py
bot.py
import evaluation settings = {} current_grid = [[0]] current_round = 0 me = -1 op = -1 def play(grid, column, color): grid = [x[:] for x in grid] for row in reversed(grid): if row[column] == 0: row[column] = color return grid # Can't play there return None def nodes(grid, player): for i in range(settings['field_columns']): new_grid = play(grid, i, player) if new_grid: yield i, new_grid def minimax(grid, depth, is_max_player): depth -= 1 if is_max_player: best = evaluation.LOSE for i, new_grid in nodes(grid, me): current_value = evaluation.scan(new_grid, me ,op) if current_value == evaluation.WIN or depth == 0: return current_value v = minimax(new_grid, depth, False) best = max(best, v) if best == evaluation.WIN: break return best else: best = evaluation.WIN for i, new_grid in nodes(grid, op): current_value = evaluation.scan(new_grid, me ,op) if current_value == evaluation.LOSE or depth == 0: return current_value v = minimax(new_grid, depth, True) best = min(best, v) if best == evaluation.LOSE: break return best if __name__ == '__main__': while True: line = raw_input() if not line: continue content = line.split() if content[0] == 'settings': try: settings[content[1]] = int(content[2]) except: settings[content[1]] = content[2] if content[1] == 'your_botid': me = int(content[2]) # assuming the ids are always 1 and 2? op = [2,1][me - 1] elif content[0] == 'update': if content[2] == 'field': current_grid = [[int(x) for x in y.split(',')] for y in content[3].split(';')] elif content[2] == 'round': current_round = int(content[3]) elif content[0] == 'action': if current_round == 1: print('place_disk %d' % (settings['field_columns'] // 2)) continue values = sorted((minimax(g, 2, False), i) for i, g in nodes(current_grid, me)) print('place_disk %d' % values[-1][1]) # TODO get the remaining time? # TODO get the per-turn time?
Python
0.999986
@@ -10,16 +10,27 @@ aluation +%0Aimport sys %0A%0Asettin @@ -1360,16 +1360,29 @@ n best%0A%0A +first = True%0A if __nam @@ -2179,49 +2179,78 @@ if -current_round == 1:%0A print +first:%0A first = False%0A sys.stdout.write( ('pl @@ -2252,25 +2252,25 @@ (('place_dis -k +c %25d' %25 (sett @@ -2297,16 +2297,59 @@ %5D // 2)) + + '%5Cn')%0A sys.stdout.flush() %0A @@ -2406,17 +2406,17 @@ imax(g, -2 +3 , False) @@ -2473,13 +2473,25 @@ -print +sys.stdout.write( ('pl @@ -2497,17 +2497,17 @@ lace_dis -k +c %25d' %25 v @@ -2519,16 +2519,55 @@ %5B-1%5D%5B1%5D) + + '%5Cn')%0A sys.stdout.flush() %0A
fae0989a5dc6886b11896f6ba5c6484cd1c1f735
Fix error on unknown command and blank game name
bot.py
bot.py
import asyncio import discord import text_adventure class Bot(object): def __init__(self, client, config): self.client = client self.config = config self.game_obj = None @asyncio.coroutine def do_command(self, message, command, *args): yield from getattr(self, command)(message, *args) @asyncio.coroutine def game(self, message, command, *args): yield from getattr(self, 'game_' + command)(message, *args) @asyncio.coroutine def game_start(self, message, name): if self.game_obj is not None: return self.game_obj = text_adventure.Game(self.config, name) yield from self.client.change_presence(game = discord.Game(name = name)) yield from self.client.send_message(message.channel, self.game_obj.output()) @asyncio.coroutine def game_input(self, message, inp): if self.game_obj is None: return self.game_obj.inp(inp) yield from self.client.send_message(message.channel, self.game_obj.output()) @asyncio.coroutine def game_end(self, message): if self.game_obj is None: return self.game_obj.stop() self.game_obj = None yield from self.client.change_presence(game = discord.Game(name = '')) @asyncio.coroutine def parse_chatter(self, message): if message.content.lower() == 'so' or ':so:' in message.content.lower(): yield from self.client.send_message(message.channel, 'so') elif message.content.startswith(self.config['game_prefix']) and self.game_obj is not None: yield from self.game_input(message, message.content[1:])
Python
0.000011
@@ -259,32 +259,49 @@ ommand, *args):%0A + try:%0A yield fr @@ -333,32 +333,80 @@ (message, *args) +%0A except AttributeError:%0A pass %0A%0A @asyncio.c @@ -1337,31 +1337,12 @@ e = -discord.Game(name = '') +None )%0A%0A
0958e4760264fcf232e655c47d88a03bf38896b0
Renamed subreddit command to reddit
bot.py
bot.py
import praw import discord from discord.ext import commands import os from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv()) reddit = praw.Reddit(client_id = os.environ.get("REDDIT_CLIENT_ID"), client_secret = os.environ.get("REDDIT_CLIENT_SECRET"), user_agent = "aySH Bot") print(reddit.read_only) async def top_subreddit(subreddit, time): tops = reddit.subreddit(subreddit).top(time, limit = 1) for top in tops: await client.say(top.url) # Bot config prefix = '!' des = 'aySH' client = commands.Bot(description=des, command_prefix=prefix) # Make a startup command @client.event async def on_ready(): print("[*]I'm in") print('[*] Name: {}'.format(client.user.name)) # subreddit @client.command(pass_context=True) async def subreddit(ctx, subreddit = "all", time = "day"): # await print(top) await top_subreddit(subreddit, time) # random @client.command(pass_context=True) async def random(ctx): # await print(top) await top_subreddit('random', 'all') # probuild @client.command(pass_context=True) async def build(ctx, champion = "janna"): await client.say('http://www.probuilds.net/champions/details/' + champion) # counter @client.command(pass_context=True) async def counter(ctx, champion = "janna"): await client.say('http://lolcounter.com/champions/' + champion) client.run(os.environ.get("DISCORD_CLIENT_TOKEN"))
Python
0.999985
@@ -807,19 +807,16 @@ ync def -sub reddit(c @@ -1424,8 +1424,9 @@ TOKEN%22)) +%0A
af7af25ed5a13a4ce45f358ec5548c2f9e6a492e
remove wiki from DNL
bot.py
bot.py
import json import traceback from datetime import datetime from pathlib import Path import aiohttp import aredis import asyncpg from discord.ext import commands from utils.custom_context import CustomContext class QTBot(commands.Bot): def __init__(self, config_file, *args, **kwargs): self.config_file = config_file self.description = "qtbot is a big qt written in python3 and love." self.do_not_load = ("league", "poll", "music", "timer", "ris", "timer", "wiki") with open(self.config_file) as f: self.api_keys = json.load(f) self.token = self.api_keys["discord"] super().__init__( command_prefix=self.get_prefix, description=self.description, help_command=commands.DefaultHelpCommand(dm_help=True), case_insensitive=True, *args, **kwargs, ) self.aio_session = aiohttp.ClientSession(loop=self.loop) # self.rune_client = lolrune.AioRuneClient() self.redis_client = aredis.StrictRedis(host="localhost", decode_responses=True) self.startup_extensions = [x.stem for x in Path("cogs").glob("*.py")] self.loop.run_until_complete(self.create_db_pool()) self.loop.run_until_complete(self.load_all_prefixes()) def run(self): super().run(self.token) async def load_all_prefixes(self): pres = await self.pg_con.fetch("SELECT * from custom_prefix") # Load custom prefixes into a dict self.pre_dict = {r["guild_id"]: r["prefix"] for r in pres} async def get_prefix(self, message): try: return ("qt.", self.pre_dict[message.guild.id]) except (KeyError, AttributeError): return "qt." async def create_db_pool(self): with open(self.config_file) as f: self.pg_pw = json.load(f)["postgres"] self.pg_con = await asyncpg.create_pool( user="james", password=self.pg_pw, database="discord_testing" ) async def on_message(self, message): ctx = await self.get_context(message, cls=CustomContext) await self.invoke(ctx) async def on_ready(self): if not hasattr(self, "start_time"): self.start_time = datetime.now() self.start_time_str = self.start_time.strftime("%B %d %H:%M:%S") for extension in self.startup_extensions: if extension not in self.do_not_load: try: self.load_extension(f"cogs.{extension}") except: print(f"Failed Extension: {extension}") traceback.print_exc() else: print(f"Loaded Extension: {extension}") print(f"Client logged in at {self.start_time_str}") print(self.user.name) print(self.user.id) print("----------")
Python
0
@@ -500,16 +500,8 @@ mer%22 -, %22wiki%22 )%0D%0A%0D
b8fb30a06ff15000a2d7542e7089b6c8ac1074e5
Add --allow-drilled flag to cli.py, and increase recursion limit
cli.py
cli.py
# Copyright (c) 2015 Matthew Earl # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN # NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE # USE OR OTHER DEALINGS IN THE SOFTWARE. """ Command line interface for solving a placement problem. """ __all__ = ( 'main', ) import argparse import sys import placer import svg def main(board, components, nets, args=None): parser = argparse.ArgumentParser( description='Find circuit placements.') parser.add_argument('--first-only', action='store_true', help="Only output the first solution") parser.add_argument('--svg', nargs='?', const=True, help="Output SVG for the solutions") parsed_args = parser.parse_args(args if args is not None else sys.argv[1:]) placement_iter = placer.place(board, components, nets) if parsed_args.first_only: placement_iter = [next(placement_iter)] if not parsed_args.svg: count = 0 for placement in placement_iter: placement.print_solution() print() count += 1 print("{} solutions".format(count)) else: if isinstance(parsed_args.svg, str): with open(parsed_args.svg, "w") as f: svg.print_svg(placement_iter, file=f) else: svg.print_svg(placement_iter, file=sys.stdout)
Python
0.000001
@@ -1317,16 +1317,51 @@ =None):%0A + sys.setrecursionlimit(100000)%0A%0A pars @@ -1430,16 +1430,16 @@ ents.')%0A - pars @@ -1554,16 +1554,142 @@ ution%22)%0A + parser.add_argument('--allow-drilled', action='store_true',%0A help=%22Allow holes to be drilled out%22)%0A pars @@ -1878,16 +1878,16 @@ v%5B1:%5D)%0A%0A - plac @@ -1935,16 +1935,91 @@ ts, nets +,%0A allow_drilled=parsed_args.allow_drilled )%0A%0A i
2352ce413cebb9f0fd7b1f26bb33bd0325abedfd
make more pylint friendly
csw.py
csw.py
#!/usr/bin/python -u # -*- coding: ISO-8859-15 -*- # ================================================================= # # $Id$ # # Authors: Tom Kralidis <[email protected]> # # Copyright (c) 2010 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ================================================================= import os from server import server config = 'default.cfg' if os.environ['QUERY_STRING'].lower().find('config') != -1: for kvp in os.environ['QUERY_STRING'].split('&'): if kvp.lower().find('config') != -1: config = kvp.split('=')[1] # get runtime configuration CSW = server.Csw(config) # go! CSW.dispatch()
Python
0.000002
@@ -1380,22 +1380,22 @@ server%0A%0A -config +CONFIG = 'defa @@ -1575,22 +1575,22 @@ -config +CONFIG = kvp.s @@ -1648,22 +1648,22 @@ ver.Csw( -config +CONFIG )%0A%0A# go!
12c7d473e2a270d46722b936a8fe9b62eb7548f1
Add test for issue 203
h5py/_hl/tests/test_slicing.py
h5py/_hl/tests/test_slicing.py
import numpy as np from .common import ut, TestCase import h5py from h5py.highlevel import File class BaseSlicing(TestCase): def setUp(self): self.f = File(self.mktemp(), 'w') def tearDown(self): if self.f: self.f.close() class TestSingleElement(BaseSlicing): """ Feature: Retrieving a single element works with NumPy semantics """ def test_single_index(self): """ Single-element selection with [index] yields array scalar """ dset = self.f.create_dataset('x', (1,), dtype='i1') out = dset[0] self.assertIsInstance(out, np.int8) def test_single_null(self): """ Single-element selection with [()] yields ndarray """ dset = self.f.create_dataset('x', (1,), dtype='i1') out = dset[()] self.assertIsInstance(out, np.ndarray) self.assertEqual(out.shape, (1,)) def test_scalar_index(self): """ Slicing with [...] yields scalar ndarray """ dset = self.f.create_dataset('x', shape=(), dtype='f') out = dset[...] self.assertIsInstance(out, np.ndarray) self.assertEqual(out.shape, ()) def test_scalar_null(self): """ Slicing with [()] yields array scalar """ dset = self.f.create_dataset('x', shape=(), dtype='i1') out = dset[()] self.assertIsInstance(out, np.int8) def test_compound(self): """ Compound scalar is numpy.void, not tuple (issue 135) """ dt = np.dtype([('a','i4'),('b','f8')]) v = np.ones((4,), dtype=dt) dset = self.f.create_dataset('foo', (4,), data=v) self.assertEqual(dset[0], v[0]) self.assertIsInstance(dset[0], np.void) class TestObjectIndex(BaseSlicing): """ Feauture: numpy.object_ subtypes map to real Python objects """ def test_reference(self): """ Indexing a reference dataset returns a h5py.Reference instance """ dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(ref=h5py.Reference)) dset[0] = self.f.ref self.assertEqual(type(dset[0]), h5py.Reference) def test_regref(self): """ Indexing a region reference dataset returns a h5py.RegionReference """ dset1 = self.f.create_dataset('x', (10,10)) regref = dset1.regionref[...] dset2 = self.f.create_dataset('y', (1,), dtype=h5py.special_dtype(ref=h5py.RegionReference)) dset2[0] = regref self.assertEqual(type(dset2[0]), h5py.RegionReference) def test_scalar(self): """ Indexing returns a real Python object on scalar datasets """ dset = self.f.create_dataset('x', (), dtype=h5py.special_dtype(ref=h5py.Reference)) dset[()] = self.f.ref self.assertEqual(type(dset[()]), h5py.Reference) def test_bytestr(self): """ Indexing a byte string dataset returns a real python byte string """ dset = self.f.create_dataset('x', (1,), dtype=h5py.special_dtype(vlen=bytes)) dset[0] = b"Hello there!" self.assertEqual(type(dset[0]), bytes)
Python
0
@@ -3076,16 +3076,568 @@ bytes)%0A%0A +class TestSimpleSlicing(TestCase):%0A%0A %22%22%22%0A Feature: Simple NumPy-style slices (start:stop:step) are supported.%0A %22%22%22%0A%0A def setUp(self):%0A self.f = File(self.mktemp(), 'w')%0A self.arr = np.arange(10)%0A self.dset = self.f.create_dataset('x', data=self.arr)%0A%0A def tearDown(self):%0A if self.f:%0A self.f.close()%0A%0A @ut.expectedFailure%0A def test_negative_stop(self):%0A %22%22%22 Negative stop indexes work as they do in NumPy %22%22%22%0A self.assertArrayEqual(self.dset%5B2:-2%5D, self.arr%5B2:-2%5D)%0A%0A %0A%0A%0A%0A%0A%0A%0A%0A
d72e6cc3069e280756c59d42406a070529ee8498
Switch to using non-deprecated method
pskc/xml.py
pskc/xml.py
# xml.py - module for parsing and writing XML for PSKC files # coding: utf-8 # # Copyright (C) 2014-2016 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """Module for parsing XML in PSKC files. This module provides some utility functions for parsing XML files. """ from __future__ import absolute_import # try to find a usable ElementTree module try: from lxml import etree except ImportError: # pragma: no cover (different implementations) import xml.etree.ElementTree as etree # the relevant XML namespaces for PSKC namespaces = dict( # the XML namespace URI for version 1.0 of PSKC pskc='urn:ietf:params:xml:ns:keyprov:pskc', # the XML Signature namespace ds='http://www.w3.org/2000/09/xmldsig#', # the XML Encryption namespace xenc='http://www.w3.org/2001/04/xmlenc#', # the XML Encryption version 1.1 namespace xenc11='http://www.w3.org/2009/xmlenc11#', # the PKCS #5 namespace pkcs5='http://www.rsasecurity.com/rsalabs/pkcs/schemas/pkcs-5v2-0#', ) # register the namespaces so the correct short names will be used for ns, namespace in namespaces.items(): etree.register_namespace(ns, namespace) def parse(source): """Parse the provided file and return an element tree.""" return etree.parse(source) def remove_namespaces(tree): """Remove namespaces from all elements in the tree.""" import re for elem in tree.getiterator(): if isinstance(elem.tag, ''.__class__): # pragma: no branch elem.tag = re.sub(r'^\{[^}]*\}', '', elem.tag) def findall(tree, *matches): """Find the child elements.""" for match in matches: for element in tree.findall(match, namespaces=namespaces): yield element def find(tree, *matches): """Find a child element that matches any of the patterns (or None).""" try: return next(findall(tree, *matches)) except StopIteration: pass def findtext(tree, *matches): """Get the text value of an element (or None).""" element = find(tree, *matches) if element is not None: return element.text.strip() def findint(tree, *matches): """Return an element value as an int (or None).""" value = findtext(tree, *matches) if value: return int(value) def findtime(tree, *matches): """Return an element value as a datetime (or None).""" value = findtext(tree, *matches) if value: import dateutil.parser return dateutil.parser.parse(value) def findbin(tree, *matches): """Return the binary element value base64 decoded.""" value = findtext(tree, *matches) if value: import base64 return base64.b64decode(value) def getint(tree, attribute): """Return an attribute value as an integer (or None).""" value = tree.get(attribute) if value: return int(value) def getbool(tree, attribute, default=None): """Return an attribute value as a boolean (or None).""" value = tree.get(attribute) if value: value = value.lower() if value in ('1', 'true'): return True elif value in ('0', 'false'): return False else: raise ValueError('invalid boolean value: %r' % value) return default def _format(value): import datetime if isinstance(value, datetime.datetime): value = value.isoformat() if value.endswith('+00:00'): value = value[:-6] + 'Z' return value elif value is True: return 'true' elif value is False: return 'false' return str(value) def mk_elem(parent, tag=None, text=None, empty=False, **kwargs): """Add element as a child of parent.""" # special-case the top-level element if tag is None: tag = parent parent = None empty = True # don't create empty elements if not empty and text is None and \ all(x is None for x in kwargs.values()): return # replace namespace identifier with URL if ':' in tag: ns, name = tag.split(':', 1) tag = '{%s}%s' % (namespaces[ns], name) if parent is None: element = etree.Element(tag) else: element = etree.SubElement(parent, tag) # set text of element if text is not None: element.text = _format(text) # set kwargs as attributes for k, v in kwargs.items(): if v is not None: element.set(k, _format(v)) return element def tostring(element): """Return a serialised XML document for the element tree.""" from xml.dom import minidom # if we are using lxml.etree move namespaces to toplevel element if hasattr(element, 'nsmap'): # pragma: no cover (only on lxml) # get all used namespaces nsmap = {} for e in element.iter(): nsmap.update(e.nsmap) # replace toplevel element with all namespaces e = etree.Element(element.tag, attrib=element.attrib, nsmap=nsmap) for a in element: e.append(a) element = e xml = etree.tostring(element, encoding='UTF-8') return minidom.parseString(xml).toprettyxml( indent=' ', encoding='UTF-8').strip()
Python
0
@@ -2095,18 +2095,11 @@ ree. -getiterato +ite r():
8c87da20876c6b633988063eac81ff2b0f602dbb
Fix url encoding
ptscrape.py
ptscrape.py
#======================================================================= # Screen-scraping framework #======================================================================= import logging try: import bs4 as soup except ImportError: import BeautifulSoup as soup import urllib2 from urllib import urlencode from urlparse import urljoin import cookielib import os import re _log = logging.getLogger(__name__) class PageSource(object): def __init__(self, cachedir=None, replay=False): self.cachedir = cachedir self.replay = replay self.jar = cookielib.CookieJar() self.agent = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.jar)) # urllib2.HTTPRedirectHandler()) def get(self, url, query=None, tag=None): '''HTTP GET request on a URL with optional query''' if query: url += '?' + query.urlencode() _log.info('GET %s', url) return self._transact(url, tag=tag) def post(self, url, query=None, tag=None): '''HTTP POST request on a URL with optional query''' _log.info('POST %s', url) data = '' if query: data = urlencode(query) return self._transact(url, data, tag=tag) def _transact(self, url, data=None, tag=None): '''Perform an HTTP request, or fetch page from cache''' if tag is None: tag = os.path.basename(url) if self.replay: content = self.read_cache(tag) else: doc = self.agent.open(url, data) _log.info('info %r', doc.info()) content = doc.read() if self.cachedir: self.write_cache(tag, content) doc = soup.BeautifulSoup(content) return Page(url, doc) def read_cache(self, tag): cachefile = os.path.join(os.path.expanduser(self.cachedir), tag) with open(cachefile, 'rb') as f: content = f.read() return content def write_cache(self, tag, content): cachefile = os.path.join(os.path.expanduser(self.cachedir), tag) with open(cachefile, 'wb') as f: f.write(content) class Page(object): def __init__(self, url, doc): self.url = url self.doc = doc def bs_cdata(tag): '''Get the character data inside a BeautifulSoup element, ignoring all markup''' return ''.join(tag.findAll(text=True)) if __name__=='__main__': import argparse ap = argparse.ArgumentParser() ap.add_argument('--replay', action='store_true') ap.add_argument('url') args = ap.parse_args() logging.basicConfig(level=logging.INFO)
Python
0.998718
@@ -907,14 +907,8 @@ ' + -query. urle @@ -913,16 +913,21 @@ lencode( +query )%0A
51f46c57e209e35063f67055e45ff6e26f8aa552
Format error on urlfetch.get fail
heat/engine/resources/stack.py
heat/engine/resources/stack.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.common import exception from heat.common import template_format from heat.common import urlfetch from heat.engine.properties import Properties from heat.engine import stack_resource from heat.openstack.common import log as logging logger = logging.getLogger(__name__) (PROP_TEMPLATE_URL, PROP_TIMEOUT_MINS, PROP_PARAMETERS) = ('TemplateURL', 'TimeoutInMinutes', 'Parameters') class NestedStack(stack_resource.StackResource): ''' A Resource representing a child stack to allow composition of templates. ''' properties_schema = { PROP_TEMPLATE_URL: { 'Type': 'String', 'Required': True, 'Description': _('The URL of a template that specifies the stack' ' to be created as a resource.')}, PROP_TIMEOUT_MINS: { 'Type': 'Number', 'Description': _('The length of time, in minutes, to wait for the' ' nested stack creation.')}, PROP_PARAMETERS: { 'Type': 'Map', 'Description': _('The set of parameters passed to this nested' ' stack.')}} update_allowed_keys = ('Properties',) update_allowed_properties = (PROP_TEMPLATE_URL, PROP_TIMEOUT_MINS, PROP_PARAMETERS) def handle_create(self): template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL]) template = template_format.parse(template_data) return self.create_with_template(template, self.properties[PROP_PARAMETERS], self.properties[PROP_TIMEOUT_MINS]) def handle_delete(self): return self.delete_nested() def FnGetAtt(self, key): if key and not key.startswith('Outputs.'): raise exception.InvalidTemplateAttribute(resource=self.name, key=key) return self.get_output(key.partition('.')[-1]) def FnGetRefId(self): return self.nested().identifier().arn() def handle_update(self, json_snippet, tmpl_diff, prop_diff): # Nested stack template may be changed even if the prop_diff is empty. self.properties = Properties(self.properties_schema, json_snippet.get('Properties', {}), self.stack.resolve_runtime_data, self.name) template_data = urlfetch.get(self.properties[PROP_TEMPLATE_URL]) template = template_format.parse(template_data) return self.update_with_template(template, self.properties[PROP_PARAMETERS], self.properties[PROP_TIMEOUT_MINS]) def resource_mapping(): return { 'AWS::CloudFormation::Stack': NestedStack, }
Python
0.000002
@@ -613,16 +613,49 @@ cense.%0A%0A +from requests import exceptions%0A%0A from hea @@ -680,16 +680,16 @@ ception%0A - from hea @@ -1993,32 +1993,49 @@ e_create(self):%0A + try:%0A template @@ -2083,32 +2083,250 @@ _TEMPLATE_URL%5D)%0A + except (exceptions.RequestException, IOError) as r_exc:%0A raise ValueError(%22Could not fetch remote template '%25s': %25s%22 %25%0A (self.properties%5BPROP_TEMPLATE_URL%5D, str(r_exc)))%0A%0A template @@ -3379,16 +3379,33 @@ .name)%0A%0A + try:%0A @@ -3461,32 +3461,250 @@ _TEMPLATE_URL%5D)%0A + except (exceptions.RequestException, IOError) as r_exc:%0A raise ValueError(%22Could not fetch remote template '%25s': %25s%22 %25%0A (self.properties%5BPROP_TEMPLATE_URL%5D, str(r_exc)))%0A%0A template
22aead72594e5aa7047858c04beb3018e93c59fe
Revert "started 0.2.x"
api/apps.py
api/apps.py
from __future__ import unicode_literals from django.apps import AppConfig APP_NAME = 'vsemionov.notes.api' APP_VERSION = '0.2' class ApiConfig(AppConfig): name = 'api'
Python
0
@@ -124,9 +124,11 @@ '0. -2 +1.0 '%0A%0A%0A
492005db9a7c34b2648de8b7335bdbdd18ffb13b
Update setup.py with release version.
py/setup.py
py/setup.py
# Lint as: python3 # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Config file for distributing package via Pypi server.""" import setuptools # It is assumed that this file will moved to gps_building_block/py/setup.py, # while the README resides at gps_building_blocks/README.md. with open("../README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="gps-building-blocks", version="0.1.12", author="gPS Team", author_email="[email protected]", description="Modules and tools useful for use with advanced data solutions on Google Ads, Google Marketing Platform and Google Cloud.", long_description=long_description, long_description_tpye="text/markdown", url="https://github.com/google/gps_building_blocks", license="Apache Software License", packages=setuptools.find_packages(), install_requires=[ "absl-py==0.9.0", "google-api-core==1.17.0", "google-api-python-client==1.9.1", "google-auth==1.16.0", "google-cloud-bigquery==1.22.0", "google-cloud-storage==1.28.1", "requests==2.23.0", "dataclasses; python_version<'3.7'" ], classifiers=[ "Intended Audience :: Developers", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Internet", "Topic :: Scientific/Engineering", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Operating System :: OS Independent", ], )
Python
0
@@ -945,14 +945,13 @@ on=%22 -0.1.12 +1.0.0 %22,%0A
31fc2949deff9af411cbdf5144362052d08fc276
add comments to qrscreen.py
qrscreen.py
qrscreen.py
import web import PyQRNative import pygame import os urls = ( '/q', 'QR', '/', 'Index', '/Dream', 'Dream' ) os.environ['SDL_VIDEODRIVER'] = 'fbcon' os.environ["SDL_FBDEV"] = "/dev/fb1" os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen" os.environ["SDL_MOUSEDRV"] = "TSLIB" pygame.init() screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN) screen.fill((255,255,255)) pygame.mouse.set_visible(False) pygame.display.update() """ Helper method that will try to create the smallest version QR possible given the input data string. For more info on versions see here: http://www.qrcode.com/en/about/version.html """ def makeQR(data_string,path,level=2, boxSize=4): quality={1: PyQRNative.QRErrorCorrectLevel.L, 2: PyQRNative.QRErrorCorrectLevel.M, 3: PyQRNative.QRErrorCorrectLevel.Q, 4: PyQRNative.QRErrorCorrectLevel.H} size=1 while 1: try: print "trying size %s" % size q = PyQRNative.QRCode(size,quality[level], boxSize=boxSize) q.addData(data_string) q.make() im=q.makeImage() im.save(path,format="png") break except TypeError: print "failed increasing size" size+=1 """ url: / detail: serve the form which will allow you to choose data and parameters for creating a QR. """ class Index: def GET(self): _index = '''<html> <head> <title>Pi-QReator</title> </head> <body> <h1>Welcome to Pi-QReator</h1> <form action="/q" method="get"> Data: <input type="text" name="data"></input><br /> Size: <select name="size"> <option value="1">1</option> <option value="2">2</option> <option value="3">3</option> <option value="4" selected="selected">4</option> <option value="5">5</option> <option value="6">6</option> </select><br /> Error Correction: <select name="lvl"> <option value="1">L</option> <option value="2">M</option> <option value="3">Q</option> <option value="4">H</option> </select><br /> <input type="submit" value="Submit"></input> </form> </body> </html>''' return _index """ url: /Dream detail: Currently unused. Except to clear the screen, and deliver a positive message =) """ class Dream: def GET(self): screen.fill((20,100,200)) pygame.display.update() return "DREAM BIG" """ url: /q detail: endpoint used to create QR code and show it on the screen. args: data - string which will get encoded in the QR size - size in px of each 'box' within the QR. 4 works pretty well. lvl - Error correction level, acceptable values 1-4 """ class QR: def GET(self): args = web.input(name = 'web') print 'making qr' try: pixelSize = args.size except: pixelSize = 5 try: lvl = args.lvl except: lvl = 2 try: data = args.data makeQR(args.data, 'QRfile.png', level=int(lvl), boxSize=int(pixelSize)) qr_img = pygame.image.load("QRfile.png") x = (screen.get_width()/2) - (qr_img.get_rect().size[0]/2) y = (screen.get_height()/2) - (qr_img.get_rect().size[1]/2) screen.fill((255,255,255)) screen.blit(qr_img,(x,y)) pygame.display.update() return '''<html> <head> <title>Pi-QReator</title> </head> <body> <script type="text/javascript"> history.go(-1); </script> </body> </html>''' except Exception as e: if str(e) == "'data'": return "You must pass parameter 'data'" return str(e) if __name__ == "__main__": # Start the server app = web.application(urls, globals()) app.run()
Python
0
@@ -45,16 +45,49 @@ mport os +%0A%0A# List of URLs for webpy server %0Aurls = @@ -141,16 +141,60 @@ eam'%0A)%0A%0A +# Variables needed to enable the TFT screen%0A os.envir @@ -352,16 +352,37 @@ %22TSLIB%22 +%0A%0A# Initialize pygame %0Apygame.
862d61569a4e7bc06acd4a676072fed79a1905e8
Switch to checking upvotes/downvotes.
replybot.py
replybot.py
#/u/GoldenSights import traceback import praw # simple interface to the reddit API, also handles rate limiting of requests import time import sqlite3 '''USER CONFIGURATION''' APP_ID = "" APP_SECRET = "" APP_URI = "" APP_REFRESH = "" # https://www.reddit.com/comments/3cm1p8/how_to_make_your_bot_use_oauth2/ USERAGENT = "" # This is a short description of what the bot does. # For example "Python automatic replybot v2.0 (by /u/GoldenSights)" SUBREDDIT = "pics" # This is the sub or list of subs to scan for new posts. For a single sub, use "sub1". For multiple subreddits, use "sub1+sub2+sub3+..." KEYWORDS = ["phrase 1", "phrase 2", "phrase 3", "phrase 4"] # These are the words you are looking for KEYAUTHORS = [] # These are the names of the authors you are looking for # The bot will only reply to authors on this list # Keep it empty to allow anybody. REPLYSTRING = "Hi hungry, I'm dad." # This is the word you want to put in reply MAXPOSTS = 100 # This is how many posts you want to retrieve all at once. PRAW can download 100 at a time. WAIT = 30 # This is how many seconds you will wait between cycles. The bot is completely inactive during this time. MINSCORE = 100 # Minimum comment score must have to trigger reply CLEANCYCLES = 10 # After this many cycles, the bot will clean its database # Keeping only the latest (2*MAXPOSTS) items '''All done!''' try: import bot USERAGENT = bot.aG except ImportError: pass print('Opening SQL Database') sql = sqlite3.connect('sql.db') cur = sql.cursor() cur.execute('CREATE TABLE IF NOT EXISTS oldposts(id TEXT)') print('Logging in...') r = praw.Reddit(USERAGENT) r.set_oauth_app_info(APP_ID, APP_SECRET, APP_URI) r.refresh_access_information(APP_REFRESH) def replybot(): print('Searching %s.' % SUBREDDIT) subreddit = r.get_subreddit(SUBREDDIT) posts = list(subreddit.get_comments(limit=MAXPOSTS)) posts.reverse() for post in posts: # Anything that needs to happen every loop goes here. pid = post.id try: pauthor = post.author.name except AttributeError: # Author is deleted. We don't care about this post. continue if pauthor.lower() == r.user.name.lower(): # Don't reply to yourself, robot! print('Will not reply to myself.') continue if KEYAUTHORS != [] and all(auth.lower() != pauthor for auth in KEYAUTHORS): # This post was not made by a keyauthor continue if post.score < MINSCORE: # Comment does not meet minimum score requirement continue cur.execute('SELECT * FROM oldposts WHERE ID=?', [pid]) if cur.fetchone(): # Post is already in the database continue pbody = post.body.lower() if not any(key.lower() in pbody for key in KEYWORDS): # Does not contain our keyword continue cur.execute('INSERT INTO oldposts VALUES(?)', [pid]) sql.commit() print('Replying to %s by %s' % (pid, pauthor)) try: post.reply(REPLYSTRING) except praw.errors.Forbidden: print('403 FORBIDDEN - is the bot banned from %s?' % post.subreddit.display_name) cycles = 0 while True: try: replybot() cycles += 1 except Exception as e: traceback.print_exc() if cycles >= CLEANCYCLES: print('Cleaning database') cur.execute('DELETE FROM oldposts WHERE id NOT IN (SELECT id FROM oldposts ORDER BY id DESC LIMIT ?)', [MAXPOSTS * 2]) sql.commit() cycles = 0 print('Running again in %d seconds \n' % WAIT) time.sleep(WAIT)
Python
0
@@ -1162,18 +1162,18 @@ %0AMIN -SCORE +UPVOTES = -10 0%0A# @@ -1184,21 +1184,93 @@ mum +upvotes comment must have to trigger reply%0AMINDOWNVOTES = 0%0A# Minimum upvotes comment - score mus @@ -2587,24 +2587,147 @@ ost. -score %3C MINSCORE +ups %3C MINUPVOTES:%0A # Comment does not meet minimum upvote requirement%0A continue%0A%0A if post.downs %3C MINDOWNVOTES :%0A @@ -2772,12 +2772,15 @@ mum -scor +downvot e re
71b93971486ac4bf80284de43962d4704642a890
add missing _ on line 37
riskroll.py
riskroll.py
from sys import exit from app.RollDice import roll def get_number_of_combatants(): """Take no input and return tuple of ints.""" num_of_attackers = [1,2,3] num_of_defenders = [1,2] attackers = 0 defenders = 0 while attackers not in num_of_attackers: attackers = int(raw_input('How many attackers? [1,2,3]\n>')) while defenders not in num_of_defenders: defenders = int(raw_input('How many defenders? [1,2]\n>')) return (attackers, defenders) def fight(combatants): """Input tuple of ints and return tuple of lists of ints.""" attackers = combatants[0] defenders = combatants[1] attack_rolls = [] defence_rolls = [] attack_rolls = roll.main((attackers, 6)) defence_rolls = roll.main((defenders, 6)) return (attack_rolls, defence_rolls) def divine_winner(attack_rolls, defence_rolls): """Take two lists of ints and return tuple.""" attackrolls.sort() defence_rolls.sort() attack_wins = 0 attack_losses = 0 defence_wins = 0 defence_losses = 0 for i in xrange(len(defence_rolls), 0, -1): if defence_rolls[i] >= attack_rolls[i]: defence_wins = defence_wins + 1 attack_losses = attack_losses + 1 else: attack_wins = attack_wins + 1 defence_losses = defence_losses + 1 attack_wl = (attack_wins,attack_losses) defence_wl = (defence_wins,defence_losses) return (attack_wl, defence_wl) def print_results(attack_rolls, defence_rolls, attack_wl, defence_wl): print 'Attacker rolls %r' % (attack_rolls) print 'Defender rolls %r' % (defence_rolls) print '\n' print 'Attacker wins %d and loses %d' % (attack_wl[0], attack_wl[1]) print 'Defender wins %d and loses %d' % (defence_wl[0], defence_wl[1]) print '\n' def restart(): """Determine if another go is needed.""" options = ['s', 'd', 'x'] while again not in options: again = raw_input('Roll the [s]ame, [d]ifferent, or e[x]it...\n>') if again == 's': return True elif again == 'd': return False else: exit() if __name__ == '__main__': repeat = False while True: if repeat == False: num_combatants = get_number_of_combatants() attack_rolls, defence_rolls = fight(num_combatants) attack_wl, defence_wl = divine_winner(attack_rolls, defence_rolls) print_results(attack_rolls, defence_rolls, attack_wl, defence_wl) repeat = restart()
Python
0.999982
@@ -932,16 +932,17 @@ attack +_ rolls.so
d31d767ec4c4452e8a1d5f9dd896ade19e4ac645
Fix tests
run_test.py
run_test.py
import asynctwitch as at class Bot(at.CommandBot, at.RankedBot): pass bot = Bot( user='justinfan100' # read-only client ) @bot.command("test", desc="Some test command") async def test(m, arg1:int): pass bot.add_rank("test rank", points=10) @bot.override async def raw_event(data): print(data) @bot.override async def event_roomstate(tags): bot.stop(exit=True) print('Failed to exit!') bot.start()
Python
0.000003
@@ -349,16 +349,25 @@ omstate( +channel, tags):%0A
38a086d2c5ebf73f7ad0108def2304262a2e0452
Add trailing comma
runtests.py
runtests.py
#!/usr/bin/env python import os import sys import django from django.conf import settings DEFAULT_SETTINGS = dict( INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", "pinax.likes", "pinax.likes.tests" ], MIDDLEWARE_CLASSES=[], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } }, SITE_ID=1, ROOT_URLCONF="pinax.likes.tests.urls", SECRET_KEY="notasecret", PINAX_LIKES_LIKABLE_MODELS={ "auth.User": { "like_text_on": "unlike", "css_class_on": "fa-heart", "like_text_off": "like", "css_class_off": "fa-heart-o", "allowed": lambda user, obj: True }, "tests.Demo": { "like_text_on": "unlike", "css_class_on": "fa-heart", "like_text_off": "like", "css_class_off": "fa-heart-o" } }, AUTHENTICATION_BACKENDS=[ "pinax.likes.auth_backends.CanLikeBackend" ] ) def runtests(*test_args): if not settings.configured: settings.configure(**DEFAULT_SETTINGS) django.setup() parent = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, parent) try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ["pinax.likes.tests"] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ["tests"] failures = runner_class(verbosity=1, interactive=True, failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests(*sys.argv[1:])
Python
0.999944
@@ -1135,16 +1135,17 @@ d%22%0A %5D +, %0A)%0A%0A%0Adef
3cc083e08a586e61a8e89a549ba63c6bc5ede2bb
Add :mod:`firmant.writers.staticrst` to tests
runtests.py
runtests.py
#!/usr/bin/python import gettext import unittest import doctest import sys from optparse import OptionParser from minimock import Mock from pprint import pprint from pysettings.modules import get_module gettext.install('firmant') def safe_displayhook(s): if s is not None: sys.stdout.write('%r\n' % s) sys.displayhook = safe_displayhook if __name__ == '__main__': suite = unittest.TestSuite() modules = ['firmant.application', 'firmant.chunks', 'firmant.du', 'firmant.paginate', 'firmant.parsers', 'firmant.parsers.feeds', 'firmant.parsers.posts', 'firmant.parsers.tags', 'firmant.parsers.static', 'firmant.routing', 'firmant.routing.components', 'firmant.utils', 'firmant.utils.exceptions', 'firmant.utils.paths', 'firmant.writers', 'firmant.writers.feeds', 'firmant.writers.posts', 'firmant.writers.static', 'firmant.writers.j2' ] if len(sys.argv[1:]) > 0: modules = sys.argv[1:] for module in modules: mod = get_module(module) args = {} extraglobs = {'Mock': Mock ,'pprint': pprint } for arg, attr in [('module_relative', '_module_relative') ,('package', '_package') ,('setUp', '_setup') ,('tearDown', '_teardown') ,('globs', '_globs') ,('optionflags', '_optionflags') ,('parser', '_parser') ,('encoding', '_encoding') ]: if hasattr(mod, attr): args[arg] = getattr(mod, attr) extraglobs.update(args.get('extraglobs', dict())) args['extraglobs'] = extraglobs suite.addTest(doctest.DocTestSuite(mod, **args)) results = unittest.TextTestRunner(verbosity=2).run(suite) if not results.wasSuccessful(): sys.exit(1)
Python
0
@@ -1079,32 +1079,76 @@ riters.static',%0A + 'firmant.writers.staticrst',%0A '
98109c41048bb8330348cd0ab51a175328b056d6
make runtests executable
runtests.py
runtests.py
#!/usr/bin/env python import sys from django.conf import settings from django.core.management import execute_from_command_line if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( ## if you use auth.User: #'django.contrib.auth', ## if you use contenttypes # 'django.contrib.contenttypes', 'my_app', 'tests', ), TEST_RUNNER='django_nose.NoseTestSuiteRunner', # etc ) def runtests(): argv = sys.argv[:1] + ['test', 'tests'] execute_from_command_line(argv) if __name__ == '__main__': runtests()
Python
0.000003
8830fece0992a6e1360440b51956c6ae6a4b034a
Add `SECRET_KEY` to django config
runtests.py
runtests.py
#!/usr/bin/env python import sys from os.path import abspath, dirname import django from django.conf import settings import django sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', MIDDLEWARE=[ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ], ROOT_URLCONF='email_log.tests.urls', TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { "context_processors": [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ], ) def runtests(): if hasattr(django, 'setup'): django.setup() try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ['email_log.tests'] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ['tests'] failures = runner_class(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests()
Python
0.000009
@@ -111,30 +111,65 @@ ettings%0A -import django%0A +from django.utils.crypto import get_random_string %0A%0Asys.pa @@ -210,17 +210,16 @@ e__)))%0A%0A -%0A if not s @@ -238,16 +238,16 @@ igured:%0A - sett @@ -262,16 +262,56 @@ figure(%0A + SECRET_KEY=get_random_string(),%0A @@ -737,19 +737,17 @@ _BACKEND - = += 'django.
d9caaf949e9fe59a656c5986180038b9b3dc34fe
remove league alias
bot/module/commands/command_processor.py
bot/module/commands/command_processor.py
import logging from bot.module.commands.calendar.calendar_processor import CalendarProcessor from bot.module.commands.info.info_processor import InfoProcessor from bot.module.commands.crs.crs_processor import CrsProcessor from bot.module.commands.wiki.wiki_processor import WikiProcessor class CommandProcessor(InfoProcessor, CalendarProcessor, CrsProcessor, WikiProcessor): """Class processing all commands sent into the chat. Attributes: grenouille_bot: master class with all modules. commands: list of all commands managed by the command processor. """ def __init__(self, grenouille_bot): """Define all commands the bot will process. Args: grenouille_bot: master class with all modules. """ InfoProcessor.__init__(self) CalendarProcessor.__init__(self) CrsProcessor.__init__(self) WikiProcessor.__init__(self) self.grenouille_bot = grenouille_bot self.commands = [{ 'aliases': ['grenouille', 'help', 'aide'], 'command': self.help }, { 'aliases': ['motd', 'mdj'], 'command': self.motd }, { 'aliases': ['who', 'qui'], 'command': self.who }, { 'aliases': ['youtube', 'y'], 'command': self.youtube }, { 'aliases': ['instagram', 'i'], 'command': self.instagram }, { 'aliases': ['twitter', 't'], 'command': self.twitter }, { 'aliases': ['now'], 'command': self.now }, { 'aliases': ['next'], 'command': self.next }, { 'aliases': ['update', 'u'], 'command': self.update }, { 'aliases': ['toolmix'], 'command': self.toolmix }, { 'aliases': ['league', 'ligue', 'ftvleague', 'ftvligue'], 'command': self.league }, { 'aliases': ['wiki'], 'command': self.wiki }] def process(self, command_line, sender, is_admin): """Process a command. Args: command_line: Full command line without the ! stating a command. sender: String sender of the command. is_admin: Boolean representing user rights. """ command_split = command_line.split(' ', maxsplit=1) command = self.find_command(command_split[0]) if command is None: return if len(command_split) == 1 or command_split[1] == '': param_line = None else: param_line = command_split[1] # Call the command command(param_line=param_line, sender=sender, is_admin=is_admin) def find_command(self, name): """Find if asked command exists and returns it. Args: name: Name of the command object to find. Returns: The command method responsible to process the command, or None if no object is able to process it. """ for command in self.commands: if name in command['aliases']: return command['command'] return None
Python
0.000395
@@ -1887,18 +1887,8 @@ ': %5B -'league', 'lig
cc626bef4bb9ad4888362476a3ce9f92154f7d53
Resolve #74 -- Use result.get instastad of ready
health_check/contrib/celery/plugin_health_check.py
health_check/contrib/celery/plugin_health_check.py
# -*- coding: utf-8 -*- from datetime import datetime, timedelta from time import sleep from django.conf import settings from health_check.backends.base import ( BaseHealthCheckBackend, ServiceUnavailable ) from health_check.plugins import plugin_dir from .tasks import add class CeleryHealthCheck(BaseHealthCheckBackend): def check_status(self): timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3) try: result = add.apply_async( args=[4, 4], expires=datetime.now() + timedelta(seconds=timeout) ) now = datetime.now() while (now + timedelta(seconds=3)) > datetime.now(): print(" checking....") if result.ready(): try: result.forget() except NotImplementedError: pass return True sleep(0.5) except IOError: raise ServiceUnavailable("IOError") except: raise ServiceUnavailable("Unknown error") raise ServiceUnavailable("Unknown error") plugin_dir.register(CeleryHealthCheck)
Python
0
@@ -61,31 +61,8 @@ elta -%0Afrom time import sleep %0A%0Afr @@ -180,17 +180,53 @@ vailable -%0A +,%0A ServiceReturnedUnexpectedResult )%0Afrom h @@ -337,17 +337,16 @@ ckend):%0A -%0A def @@ -613,142 +613,34 @@ -now = datetime.now()%0A while (now + timedelta(seconds=3)) %3E datetime.now():%0A print(%22 checking....%22 +result.get(timeout=timeout )%0A @@ -649,20 +649,16 @@ - if resul @@ -665,214 +665,111 @@ t.re -ady():%0A try:%0A result.forget()%0A except NotImplementedError:%0A pass%0A return True%0A sleep(0.5 +sult != 8:%0A self.add_error(ServiceReturnedUnexpectedResult(%22Celery return wrong result%22) )%0A @@ -788,16 +788,21 @@ IOError + as e :%0A @@ -799,38 +799,47 @@ e:%0A -raise +self.add_error( ServiceUnavailab @@ -851,16 +851,20 @@ OError%22) +, e) %0A @@ -874,79 +874,56 @@ cept -:%0A raise ServiceUnavailable(%22Unknown error%22)%0A%0A raise + BaseException as e:%0A self.add_error( Serv @@ -953,16 +953,20 @@ error%22) +, e) %0A%0A%0Aplugi
b6130ccccf386e542dbe167214ea92fd31b48920
Version up
esipy/__init__.py
esipy/__init__.py
# -*- encoding: utf-8 -*- """ Entry point of EsiPy, also contains shortcuts for all required objects """ from __future__ import absolute_import try: from .client import EsiClient # noqa from .security import EsiSecurity # noqa from .app import EsiApp # noqa from pyswagger import App # noqa except ImportError: # pragma: no cover # Not installed or in install (not yet installed) so ignore pass __version__ = '0.4.2'
Python
0
@@ -463,8 +463,8 @@ 0.4. -2 +3 '%0D%0A
ac6302f506299ed881ad4971ec30367e083c9433
remove unneeded lower() call on repo name in require.rpm.repo(), as we're doing it early in the method
fabtools/require/rpm.py
fabtools/require/rpm.py
""" Rpm packages =============== This module provides high-level tools for managing CentOS/RHEL/SL packages and repositories. """ from __future__ import with_statement from fabtools.system import get_arch from fabtools.rpm import * def package(pkg_name, repos=None, yes=None, options=None): """ Require a rpm package to be installed. Example:: from fabtools import require require.rpm.package('emacs') """ if not is_installed(pkg_name): install(pkg_name, repos, yes, options) def packages(pkg_list, repos=None, yes=None, options=None): """ Require several rpm packages to be installed. Example:: from fabtools import require require.rpm.packages([ 'nano', 'unzip', 'vim', ]) """ pkg_list = [pkg for pkg in pkg_list if not is_installed(pkg)] if pkg_list: install(pkg_list, repos, yes, options) def nopackage(pkg_name, options=None): """ Require a rpm package to be uninstalled. Example:: from fabtools import require require.rpm.nopackage('emacs') """ if is_installed(pkg_name): uninstall(pkg_name, options) def nopackages(pkg_list, options=None): """ Require several rpm packages to be uninstalled. Example:: from fabtools import require require.rpm.nopackages([ 'unzip', 'vim', 'emacs', ]) """ pkg_list = [pkg for pkg in pkg_list if is_installed(pkg)] if pkg_list: uninstall(pkg_list, options) def repository(name): """ Require a repository. Aimed for 3rd party repositories. *Name* currently only supports EPEL and RPMforge. Example:: from fabtools import require # RPMforge packages for CentOS 6 require.rpm.repository('rpmforge') """ name = name.lower() epel_url = 'http://download.fedoraproject.org/pub/epel' rpmforge_url = 'http://packages.sw.be/rpmforge-release/rpmforge-release' rpmforge_version = '0.5.2-2' arch = get_arch() try: release = int(str(distrib_release())) except ValueError: release = int(float(str(distrib_release()))) if release == 6: epel_version = '6-8' elif release == 5: epel_version = '5-4' if name.lower() == 'rpmforge' and arch == 'i386': arch = 'i686' supported = { 'rpmforge': {'%(arch)s' % locals(): { '6': '%(rpmforge_url)s-%(rpmforge_version)s.el6.rf.i686.rpm' % locals(), '5': '%(rpmforge_url)s-%(rpmforge_version)s.el5.rf.x86_64.rpm' % locals()}, 'epel': { '%(arch)s' % locals(): { '6': '%(epel_url)s/6/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals(), '5': '%(epel_url)s/5/%(arch)s/epel-release-%(epel_version)s.noarch.rpm' % locals()}} }} keys = { 'rpmforge': 'http://apt.sw.be/RPM-GPG-KEY.dag.txt', 'epel': '%(epel_url)s/RPM-GPG-KEY-EPEL-%(release)s' % locals() } repo = supported[name][str(arch)][str(release)] key = keys[name] with settings(hide('warnings'), warn_only=True): sudo('rpm --import %(key)s' % locals()) sudo('rpm -Uh %(repo)s' % locals())
Python
0
@@ -2338,24 +2338,16 @@ if name -.lower() == 'rpm
8295c59deae9ddf103f843f93a04dd0f800702df
set default protocol specific data
api/push.py
api/push.py
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2012, Dongsheng Cai # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the Dongsheng Cai nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL DONGSHENG CAI BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from httplib import BAD_REQUEST, FORBIDDEN, \ INTERNAL_SERVER_ERROR, ACCEPTED from routes import route from api import APIBaseHandler, EntityBuilder import random import time from importlib import import_module from constants import DEVICE_TYPE_IOS, DEVICE_TYPE_ANDROID, DEVICE_TYPE_WNS, \ DEVICE_TYPE_MPNS from pushservices.gcm import GCMUpdateRegIDsException, \ GCMInvalidRegistrationException, GCMNotRegisteredException, GCMException @route(r"/api/v2/push[\/]?") class PushHandler(APIBaseHandler): def validate_data(self, data): data.setdefault('channel', 'default') data.setdefault('sound', None) data.setdefault('badge', None) data.setdefault('wns', {}) data.setdefault('gcm', {}) data.setdefault('mpns', {}) data.setdefault('apns', {}) data.setdefault('extra', {}) return data def get_apns_conn(self): if not self.apnsconnections.has_key(self.app['shortname']): self.send_response(INTERNAL_SERVER_ERROR, dict(error="APNs is offline")) return count = len(self.apnsconnections[self.app['shortname']]) # Find an APNS instance random.seed(time.time()) instanceid = random.randint(0, count - 1) return self.apnsconnections[self.app['shortname']][instanceid] def post(self): try: """ Send notifications """ if not self.can("send_notification"): self.send_response(FORBIDDEN, dict(error="No permission to send notification")) return # if request body is json entity data = self.json_decode(self.request.body) data = self.validate_data(data) # Hook if 'extra' in data: if 'processor' in data['extra']: try: proc = import_module('hooks.' + data['extra']['processor']) data = proc.process_pushnotification_payload(data) except Exception, ex: self.send_response(BAD_REQUEST, dict(error=str(ex))) if not self.token: self.token = data.get('token', None) # iOS and Android shared params (use sliptlines trick to remove line ending) alert = ''.join(data['alert'].splitlines()) # application specific data extra = data.get('extra', {}) device = data.get('device', DEVICE_TYPE_IOS).lower() channel = data.get('channel', 'default') token = self.db.tokens.find_one({'token': self.token}) if not token: token = EntityBuilder.build_token(self.token, device, self.appname, channel) if not self.can("create_token"): self.send_response(BAD_REQUEST, dict(error="Unknow token and you have no permission to create")) return try: # TODO check permission to insert self.db.tokens.insert(token, safe=True) except Exception as ex: self.send_response(INTERNAL_SERVER_ERROR, dict(error=str(ex))) logmessage = 'Message length: %s, Access key: %s' %(len(alert), self.appkey) self.add_to_log('%s notification' % self.appname, logmessage) if device == DEVICE_TYPE_IOS: self.get_apns_conn().process(token=self.token, alert=alert, extra=extra, apns=data['apns']) self.send_response(ACCEPTED) elif device == DEVICE_TYPE_ANDROID: try: gcm = self.gcmconnections[self.app['shortname']][0] response = gcm.process(token=[self.token], alert=alert, extra=data['extra'], gcm=data['gcm']) responsedata = response.json() if responsedata['failure'] == 0: self.send_response(ACCEPTED) except GCMUpdateRegIDsException as ex: self.send_response(ACCEPTED) except GCMInvalidRegistrationException as ex: self.send_response(BAD_REQUEST, dict(error=str(ex), regids=ex.regids)) except GCMNotRegisteredException as ex: self.send_response(BAD_REQUEST, dict(error=str(ex), regids=ex.regids)) except GCMException as ex: self.send_response(INTERNAL_SERVER_ERROR, dict(error=str(ex))) elif device == DEVICE_TYPE_WNS: wns = self.wnsconnections[self.app['shortname']][0] wns.process(token=data['token'], alert=data['alert'], extra=extra, wns=data['wns']) self.send_response(ACCEPTED) elif device == DEVICE_TYPE_MPNS: mpns = self.mpnsconnections[self.app['shortname']][0] mpns.process(token=data['token'], alert=data['alert'], extra=extra, mpns=data['mpns']) self.send_response(ACCEPTED) else: self.send_response(BAD_REQUEST, dict(error='Invalid device type')) except Exception, ex: self.send_response(INTERNAL_SERVER_ERROR, dict(error=str(ex)))
Python
0.000001
@@ -2225,150 +2225,8 @@ ne)%0A - data.setdefault('wns', %7B%7D)%0A data.setdefault('gcm', %7B%7D)%0A data.setdefault('mpns', %7B%7D)%0A data.setdefault('apns', %7B%7D)%0A @@ -4801,16 +4801,60 @@ PE_IOS:%0A + data.setdefault('apns', %7B%7D)%0A @@ -5046,16 +5046,59 @@ NDROID:%0A + data.setdefault('gcm', %7B%7D)%0A @@ -6023,24 +6023,67 @@ E_TYPE_WNS:%0A + data.setdefault('wns', %7B%7D)%0A @@ -6324,24 +6324,68 @@ _TYPE_MPNS:%0A + data.setdefault('mpns', %7B%7D)%0A
f641c4be6e88aac1e1968ca8f07c5294d4dfe6fa
Bump version
facturapdf/__about__.py
facturapdf/__about__.py
__title__ = 'facturapdf' __summary__ = 'Create PDF invoice according to Spanish regulations.' __version__ = '0.0.2' __license__ = 'BSD 3-Clause License' __uri__ = 'https://github.com/initios/factura-pdf' __author__ = 'Carlos Goce' __email__ = '[email protected]'
Python
0
@@ -110,9 +110,9 @@ 0.0. -2 +3 '%0A%0A_
32f06d1225eb578d3aaee52c8ece4b9bde6a23d7
Version 0.0.7
fastforward/__init__.py
fastforward/__init__.py
__version__ = '0.0.6' __author__ = 'nofdev'
Python
0.000001
@@ -16,9 +16,9 @@ 0.0. -6 +7 '%0A__
bd5b1c6506c7cf7d38eae31d4250300e1953ae26
Version bump
featureflow/__init__.py
featureflow/__init__.py
__version__ = '0.5.5' from model import BaseModel from feature import Feature, JSONFeature, TextFeature, CompressedFeature, \ PickleFeature from extractor import Node, Graph, Aggregator, NotEnoughData from bytestream import ByteStream, ByteStreamFeature from data import \ IdProvider, UuidProvider, UserSpecifiedIdProvider, StaticIdProvider, \ KeyBuilder, StringDelimitedKeyBuilder, Database, FileSystemDatabase, \ InMemoryDatabase from datawriter import DataWriter from database_iterator import DatabaseIterator from encoder import IdentityEncoder from decoder import Decoder from lmdbstore import LmdbDatabase from persistence import PersistenceSettings try: from nmpy import StreamingNumpyDecoder, NumpyMetaData, NumpyFeature except ImportError: pass
Python
0.000001
@@ -12,17 +12,17 @@ = '0.5. -5 +6 '%0A%0Afrom
1e3199618f55be86fa5e4259d1c6e4a7074e57ca
Update environment.py
features/environment.py
features/environment.py
""" Copyright 2017 Raul Alvarez Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """import Selenium def before_all(context): runner = Selenium.Selenium() context.runner = runner def after_all(context): context.runner.quit()
Python
0.000001
@@ -580,16 +580,17 @@ nse.%0A%22%22%22 +%0A import S
44a41555d4f2ec3eed090711f34b233085e1aebf
add missing config entries
feedservice/settings.py
feedservice/settings.py
# -*- coding: utf-8 -*- import os, os.path def bool_env(val, default): """Replaces string based environment values with Python booleans""" if not val in os.environ: return default return True if os.environ.get(val) == 'True' else False DEBUG = bool_env('MYGPOFS_DEBUG', True) TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Stefan Kögl', '[email protected]'), ) MANAGERS = ADMINS # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'UTC' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Static asset configuration BASE_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.join(BASE_DIR, '../htdocs') STATIC_ROOT = 'static' STATIC_URL = '/media/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'media'), ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'm6jkg5lzard@k^p(wui4gtx_zu4s=26c+c0bk+k1xsik6+derf' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) ROOT_URLCONF = 'feedservice.urls' TEMPLATE_DIRS = ( ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.staticfiles', 'feedservice.parse', 'feedservice.urlstore', 'feedservice.webservice', ) SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') BASE_URL='http://localhost:8080/' import dj_database_url DATABASES = {'default': dj_database_url.config()} SOUNDCLOUD_CONSUMER_KEY = os.getenv('MYGPOFS_SOUNDCLOUD_CONSUMER_KEY', '') FLATTR_THING = '' ALLOWED_HOSTS = filter(None, os.getenv('MYGPOFS_ALLOWED_HOSTS', '').split(';')) try: from settings_prod import * except ImportError, e: import sys print >> sys.stderr, 'create settings_prod.py with your customized settings'
Python
0.000002
@@ -1990,16 +1990,67 @@ .urls'%0A%0A +WSGI_APPLICATION = 'feedservice.wsgi.application'%0A%0A TEMPLATE @@ -2643,16 +2643,873 @@ ';'))%0A%0A%0A +# A sample logging configuration. The only tangible logging%0A# performed by this configuration is to send an email to%0A# the site admins on every HTTP 500 error when DEBUG=False.%0A# See http://docs.djangoproject.com/en/dev/topics/logging for%0A# more details on how to customize your logging configuration.%0ALOGGING = %7B%0A 'version': 1,%0A 'disable_existing_loggers': False,%0A 'filters': %7B%0A 'require_debug_false': %7B%0A '()': 'django.utils.log.RequireDebugFalse'%0A %7D%0A %7D,%0A 'handlers': %7B%0A 'mail_admins': %7B%0A 'level': 'ERROR',%0A 'filters': %5B'require_debug_false'%5D,%0A 'class': 'django.utils.log.AdminEmailHandler'%0A %7D%0A %7D,%0A 'loggers': %7B%0A 'django.request': %7B%0A 'handlers': %5B'mail_admins'%5D,%0A 'level': 'ERROR',%0A 'propagate': True,%0A %7D,%0A %7D%0A%7D%0A%0A%0A try:%0A
576ea74646935c00e051a46244b8f56165710df0
Add multicast send
circuits/node/server.py
circuits/node/server.py
# Module: server # Date: ... # Author: ... """Server ... """ from circuits.net.sockets import TCPServer from circuits import handler, BaseComponent from .protocol import Protocol class Server(BaseComponent): """Server ... """ channel = 'node' __protocol = {} def __init__(self, bind, channel=channel, **kwargs): super(Server, self).__init__(channel=channel, **kwargs) self.server = TCPServer(bind, channel=self.channel, **kwargs) self.server.register(self) self.__receive_event_firewall = kwargs.get( 'receive_event_firewall', None ) self.__send_event_firewall = kwargs.get( 'send_event_firewall', None ) def send(self, event, sock): return self.__protocol[sock].send(event) def send_all(self, event): for sock in self.__protocol: self.__protocol[sock].send(event) @handler('read') def _on_read(self, sock, data): self.__protocol[sock].add_buffer(data) @property def host(self): if hasattr(self, 'server'): return self.server.host @property def port(self): if hasattr(self, 'server'): return self.server.port @handler('connect') def __connect_peer(self, sock, host, port): self.__protocol[sock] = Protocol( sock=sock, server=self.server, receive_event_firewall=self.__receive_event_firewall, send_event_firewall=self.__send_event_firewall ).register(self) @handler('disconnect') def __disconnect_peer(self, sock): for s in self.__protocol.copy(): try: s.getpeername() except: del(self.__protocol[s])
Python
0.000001
@@ -824,24 +824,124 @@ end(event)%0A%0A + def send_to(self, event, socks):%0A for sock in socks:%0A self.send(event, sock)%0A%0A def send
8c68031928f54d38c92308504bc93bf61ead57f5
Update clashcallerbot_reply.py added get list of messages older than current datetime, updated outline
clashcallerbot_reply.py
clashcallerbot_reply.py
#! python3 # -*- coding: utf-8 -*- """Checks messages in database and sends PM if expiration time passed. This module checks messages saved in a MySQL-compatible database and sends a reminder via PM if the expiration time has passed. If so, the message is removed from the database. """ import praw import praw.exceptions import logging.config import datetime import clashcallerbot_database as db # Logger logging.config.fileConfig('logging.conf', disable_existing_loggers=False) logging.raiseExceptions = True # Production mode if False (no console sys.stderr output) logger = logging.getLogger('reply') # Generate reddit instance reddit = praw.Reddit('clashcallerreply') # Section name in praw.ini subreddit = reddit.subreddit('ClashCallerBot') # Limit scope for testing purposes def main(): while True: # TODO: Get list of messages ordered by expiration date (in MySQL) # TODO: Compare each message expiration datetime to current datetime (in MySQL?) # TODO: If current datetime is after expiration datetime, send PM # TODO: Delete message from database pass # If run directly, instead of imported as a module, run main(): if __name__ == '__main__': main()
Python
0
@@ -823,22 +823,16 @@ # - TODO: Get lis @@ -850,206 +850,211 @@ es o -rdered by expiration date (in MySQL)%0A%0A # TODO: Compare each message expiration datetime to current datetime (in MySQL?)%0A%0A # TODO: If current datetime is after expiration datetime, send +lder than current datetime%0A now = datetime.datetime.now(datetime.timezone.utc)%0A messages = db.get_messages(now)%0A%0A if not messages:%0A continue%0A%0A # TODO: Send reminder PM%0A @@ -1103,21 +1103,8 @@ ase%0A - pass%0A %0A%0A#
f7f16611754181c28b1c8c6a3e5942731f851c46
add some docstring
fileparser/extractor.py
fileparser/extractor.py
#!/usr/bin/env python # -*- coding: utf-8 -*- r""" # .---. .----------- # / \ __ / ------ # / / \( )/ ----- (`-') _ _(`-') <-. (`-')_ # ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .-> # //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-. # // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' / # // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ / # ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /) # '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /` # ''`` `------' `------' `-----' `--' `--' `--' # ###################################################################################### # # Author: edony - [email protected] # # twitter : @edonyzpc # # Last modified: 2015-05-28 22:20 # # Filename: extractor.py # # Description: All Rights Are Reserved # # ****** # Extract the specific content from text with the given keys. """ import os import re class Extractor(object): """ Extract the specific content with keys. """ def __init__(self, keys, extracted_file, output_file=None, flag=None): if type(keys) is list: self.keys = keys elif type(keys) is str: self.keys =[keys] else: raise ValueError("Wrong Key type") if output_file: self.output_file = output_file else: self.output_file = 'EXTRACT' if flag is None: self.flag = [0, 0] #flag are controlling add the keys into write file else: self.flag = flag self.pattern = Extractor.re_pattern(self.keys) self.extracted_file = extracted_file @staticmethod def re_pattern(keys): if len(keys) > 2: raise ValueError("The keys are too much, simplify them less than 2.\n") regular_expression = keys[0] + '(?P<con>.*)' if len(keys) == 2: regular_expression += keys[1] return re.compile(regular_expression) def parser(self): with open(self.output_file, 'w') as out: with open(self.extracted_file) as exfile: for line in exfile.readlines(): g = self.pattern.search(line) if g: if self.flag[0]: out.write(self.keys[0]) out.write(g.group('con')) if self.flag[1]: out.write(self.keys[1]) out.write('\n') print('Finish Extract') if __name__ == '__main__': tmp = Extractor('http:', 'career_old', flag=[1,0]) tmp.parser()
Python
0.000041
@@ -2206,16 +2206,90 @@ (self):%0A + %22%22%22%0A Extract the content between keys(if has).%0A %22%22%22%0A
42923355855a53cd7d6df23f666c2c74a07c7068
fix healpix_helper to take of nans
lib/healpix_helper.py
lib/healpix_helper.py
import pyfits import numpy as np try: import pywcs except ImportError: import astropy.pywcs as pywcs import healpy import warnings class HealpixData(object): def __init__(self, nside, data, coord=None, nested=False, flipy=False): self._nside = nside self._data = data self._nested = nested self._flipy = flipy self._coord = coord def get_projected_map(self, header): map_shape = (header["naxis2"], header["naxis1"]) iy, ix = np.indices(map_shape) wcs = pywcs.WCS(header) phi, theta = wcs.wcs_pix2sky(ix, iy, 0) if self._coord is not None: from pywcsgrid2.wcs_helper import coord_system_guess, sky2sky map_coord = coord_system_guess(header["ctype1"], header["ctype2"], equinox=header["equinox"]) if (map_coord is not None) and (map_coord != self._coord): warnings.warn(" doing the conversion " + map_coord) phi, theta = sky2sky(map_coord, self._coord)(phi, theta) if self._flipy: theta -= 90 theta *= -np.pi/180. else: theta += 90 theta *= np.pi/180. phi *= np.pi/180 if self._nested: ang2pix = healpy._healpy_pixel_lib._ang2pix_nest else: ang2pix = healpy._healpy_pixel_lib._ang2pix_ring ipix = ang2pix(self._nside, theta, phi) map_data = self._data[ipix].reshape(map_shape) return map_data if __name__ == '__main__': fname = "LAB_fullvel.fits" f = pyfits.open(fname) header = f[1].header ordering = header["ordering"] nside = header["nside"] data = f[1].data["temperature"] healpix_data = HealpixData(nside, data.flat, nested=False) fits_name = "lambda_mollweide_halpha_fwhm06_0512.fits" f2 = pyfits.open(fits_name) d = healpix_data.get_projected_map(f2[1].header) #data2 = f2[1].data #header2 = f2[1].header
Python
0
@@ -379,24 +379,16 @@ = coord%0A - %0A def @@ -1117,41 +1117,9 @@ ta)%0A - %0A +%0A %0A @@ -1455,16 +1455,177 @@ x_ring%0A%0A +%0A # some values could be NaNs. Maske those out before calling%0A # ang2pix and recover them.%0A mask = np.isfinite(theta) & np.isfinite(theta)%0A%0A @@ -1661,13 +1661,25 @@ heta -, phi +%5Bmask%5D, phi%5Bmask%5D )%0A%0A @@ -1693,16 +1693,17 @@ map_data +_ = self. @@ -1717,27 +1717,140 @@ pix%5D -.reshape(map_shape) +%0A map_data = np.empty(map_shape, dtype=map_data_.dtype)%0A map_data.fill(np.nan)%0A map_data.flat%5Bmask%5D = map_data_ %0A%0A @@ -1900,20 +1900,16 @@ ain__':%0A - %0A fna @@ -1984,20 +1984,16 @@ .header%0A - %0A ord @@ -2083,20 +2083,16 @@ ature%22%5D%0A - %0A hea @@ -2293,20 +2293,16 @@ header)%0A - %0A #da
2cbae4650422f7982ef50e564e9e27e7fd294be8
Add ability to nix product to admin
fjord/feedback/admin.py
fjord/feedback/admin.py
from django.contrib import admin from django.core.exceptions import PermissionDenied from fjord.feedback.models import Product, Response class ProductAdmin(admin.ModelAdmin): list_display = ( 'id', 'enabled', 'on_dashboard', 'display_name', 'db_name', 'translation_system', 'notes', 'slug') list_filter = ('enabled', 'on_dashboard') class EmptyFriendlyAVFLF(admin.AllValuesFieldListFilter): def choices(self, cl): """Displays empty string as <Empty> This makes it possible to choose Empty in the filter list. Otherwise empty strings display as '' and don't get any height and thus aren't selectable. """ for choice in super(EmptyFriendlyAVFLF, self).choices(cl): if choice.get('display') == '': choice['display'] = '<Empty>' yield choice class ResponseFeedbackAdmin(admin.ModelAdmin): list_display = ('created', 'product', 'channel', 'version', 'happy', 'description', 'user_agent', 'locale') list_filter = ('happy', ('product', EmptyFriendlyAVFLF), ('locale', EmptyFriendlyAVFLF)) search_fields = ('description',) def queryset(self, request): # Note: This ignores the super() queryset and uses the # uncached manager. return Response.uncached.all() def has_add_permission(self, request, obj=None): # Prevent anyone from adding feedback in the admin. return False def change_view(self, request, *args, **kwargs): # We don't want anyone (including superusers) to change # feedback. It's either keep it or delete it. # # That's sort of difficult with Django without writing a bunch # of stuff, so I'm lazily preventing POST here. # # TODO: Make this better, but push off any changes until other # non-superuser people have access to this view and it becomes # a relevant issue. if request.method == 'POST': raise PermissionDenied() return super(ResponseFeedbackAdmin, self).change_view( request, *args, **kwargs) admin.site.register(Product, ProductAdmin) admin.site.register(Response, ResponseFeedbackAdmin)
Python
0
@@ -1231,16 +1231,303 @@ ption',) +%0A actions = %5B'nix_product'%5D%0A list_per_page = 200%0A%0A def nix_product(self, request, queryset):%0A ret = queryset.update(product=u'')%0A self.message_user(request, '%25s responses updated.' %25 ret)%0A nix_product.short_description = u'Remove product for selected responses' %0A%0A de
08c2a121365f6d78a50117957db5dde2076e6263
update description
flaskext/mongoobject.py
flaskext/mongoobject.py
# -*- coding: utf-8 -*- """ flaskext.mongoobject ~~~~~~~~~~~~~~~~~~~~ Flask Extension for MongoDB Inspiration: https://github.com/slacy/minimongo/ :copyright: (c) 2011 by dqminh. :license: MIT, see LICENSE for more details. """ from __future__ import with_statement, absolute_import from bson.dbref import DBRef from bson.son import SON from pymongo import Connection from pymongo.collection import Collection from pymongo.cursor import Cursor from pymongo.son_manipulator import AutoReference, NamespaceInjector from flask import abort class AttrDict(dict): def __init__(self, initial=None, **kwargs): # Make sure that during initialization, that we recursively apply # AttrDict. Maybe this could be better done with the builtin # defaultdict? if initial: for key, value in initial.iteritems(): # Can't just say self[k] = v here b/c of recursion. self.__setitem__(key, value) # Process the other arguments (assume they are also default values). # This is the same behavior as the regular dict constructor. for key, value in kwargs.iteritems(): self.__setitem__(key, value) super(AttrDict, self).__init__() # These lines make this object behave both like a dict (x['y']) and like # an object (x.y). We have to translate from KeyError to AttributeError # since model.undefined raises a KeyError and model['undefined'] raises # a KeyError. we don't ever want __getattr__ to raise a KeyError, so we # 'translate' them below: def __getattr__(self, attr): try: return super(AttrDict, self).__getitem__(attr) except KeyError as excn: raise AttributeError(excn) def __setattr__(self, attr, value): try: # Okay to set directly here, because we're not recursing. self[attr] = value except KeyError as excn: raise AttributeError(excn) def __delattr__(self, key): try: return super(AttrDict, self).__delitem__(key) except KeyError as excn: raise AttributeError(excn) def __setitem__(self, key, value): # Coerce all nested dict-valued fields into AttrDicts new_value = value if isinstance(value, dict) and not isinstance(value, AttrDict): new_value = AttrDict(value) return super(AttrDict, self).__setitem__(key, new_value) class MongoCursor(Cursor): """ A cursor that will return an instance of :attr:`wrapper_class` instead of `dict` """ def __init__(self, *args, **kwargs): self.document_class = kwargs.pop('as_class') super(MongoCursor, self).__init__(*args, **kwargs) def next(self): data = super(MongoCursor, self).next() return self.document_class(data) def __getitem__(self, index): item = super(MongoCursor, self).__getitem__(index) if isinstance(index, slice): return item else: return self.document_class(item) class AutoReferenceObject(AutoReference): def __init__(self, mongo): self.mongo = mongo self.db = mongo.session def transform_outgoing(self, son, collection): def transform_value(value): if isinstance(value, DBRef): data = self.__database.dereference(value) return self.mongo.models_map[data['_ns']](data) elif isinstance(value, list): return [transform_value(v) for v in value] elif isinstance(value, dict): if value.get('_ns', None): return self.mongo.models_map[value['_ns']]( transform_dict(SON(value))) return transform_dict(SON(value)) return value def transform_dict(object): for (key, value) in object.items(): object[key] = transform_value(value) return object value = transform_dict(SON(son)) return value class BaseQuery(Collection): def __init__(self, *args, **kwargs): self.document_class = kwargs.pop('document_class') super(BaseQuery, self).__init__(*args, **kwargs) def find_one(self, *args, **kwargs): kwargs['as_class'] = self.document_class return super(BaseQuery, self).find_one(*args, **kwargs) def find(self, *args, **kwargs): kwargs['as_class'] = self.document_class return MongoCursor(self, *args, **kwargs) def find_and_modify(self, *args, **kwargs): kwargs['as_class'] = self.document_class return super(BaseQuery, self).find_and_modify(*args, **kwargs) def get_or_404(self, id): item = self.find_one(id, as_class=self.document_class) if not item: abort(404) return item class _QueryProperty(object): def __init__(self, mongo): self.mongo = mongo def __get__(self, instance, owner): self.mongo.models_map[owner.__collection__] = owner return owner.query_class(database=self.mongo.session, name=owner.__collection__, document_class=owner) class Model(AttrDict): """Base class for custom user models.""" #: Query class query_class = BaseQuery #: instance of :attr:`query_class` query = None #: name of this model collection __collection__ = None def __init__(self, *args, **kwargs): assert 'query_class' not in kwargs assert 'query' not in kwargs assert '__collection__' not in kwargs super(Model, self).__init__(*args, **kwargs) def save(self, *args, **kwargs): self.query.save(self, *args, **kwargs) return self def remove(self): return self.query.remove(self._id) def __str__(self): return '%s(%s)' % (self.__class__.__name__, super(Model, self).__str__()) def __unicode__(self): return str(self).decode('utf-8') class MongoObject(object): def __init__(self, app=None): if app is not None: self.app = app self.init_app(app) self.Model = self.make_model() def init_app(self, app): app.config.setdefault('MONGODB_HOST', "mongodb://localhost:27017") app.config.setdefault('MONGODB_DATABASE', "") app.config.setdefault('MONGODB_AUTOREF', True) # initialize connection and Model properties self.app = app self.models_map = {} self.init_connection() def init_connection(self): self.connection = Connection(self.app.config['MONGODB_HOST']) def make_model(self): model = Model model.query = _QueryProperty(self) return model @property def session(self): if not getattr(self, "db", None): self.db = self.connection[self.app.config['MONGODB_DATABASE']] if self.app.config['MONGODB_AUTOREF']: self.db.add_son_manipulator(NamespaceInjector()) self.db.add_son_manipulator(AutoReferenceObject(self)) return self.db def close_connection(self, response): self.connecton.end_request() return response def clear(self): self.connection.drop_database(self.app.config['MONGODB_DATABASE']) self.connection.end_request()
Python
0.000001
@@ -141,16 +141,62 @@ nimongo/ +%0Ahttps://github.com/mitsuhiko/flask-sqlalchemy %0A%0A:copyr
53d26896f9bd65a791d2b1967535e107254c269e
Use built-in HTML parser for django-compressor.
flicks/settings/base.py
flicks/settings/base.py
# This is your project's main settings file that can be committed to your # repo. If you need to override a setting locally, use settings_local.py from funfactory.settings_base import * PROD_LANGUAGES = ('de', 'en-US', 'es', 'fr', 'nl', 'pl', 'pt-BR', 'sl', 'sq', 'zh-TW') # Defines the views served for root URLs. ROOT_URLCONF = 'flicks.urls' AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', ) # Paths that do not need a locale SUPPORTED_NONLOCALES += ['admin', 'robots.txt'] INSTALLED_APPS = list(INSTALLED_APPS) + [ 'flicks.base', 'flicks.users', 'flicks.videos', 'django.contrib.admin', 'compressor', 'csp', 'jingo_offline_compressor', 'django_statsd', 'jingo_minify', 'south', 'waffle', ] MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES) + [ 'commonware.response.middleware.StrictTransportMiddleware', 'csp.middleware.CSPMiddleware', 'django_statsd.middleware.GraphiteRequestTimingMiddleware', 'django_statsd.middleware.GraphiteMiddleware', 'waffle.middleware.WaffleMiddleware', ] AUTH_PROFILE_MODULE = 'flicks.UserProfile' # Because Jinja2 is the default template loader, add any non-Jinja templated # apps here: JINGO_EXCLUDE_APPS = [ 'admin', 'registration', ] # Tells the extract script what files to look for L10n in and what function # handles the extraction. The Tower library expects this. DOMAIN_METHODS = { 'messages': [ ('**/flicks/**.py', 'tower.management.commands.extract.extract_tower_python'), ('**/flicks/**/templates/**.html', 'tower.management.commands.extract.extract_tower_template') ], } # # Use this if you have localizable HTML files: # DOMAIN_METHODS['lhtml'] = [ # ('**/templates/**.lhtml', # 'tower.management.commands.extract.extract_tower_template'), # ] # # Use this if you have localizable HTML files: # DOMAIN_METHODS['javascript'] = [ # # Make sure that this won't pull in strings from external libraries you # # may use. # ('media/js/**.js', 'javascript'), # ] # Always generate a CSRF token for anonymous users ANON_ALWAYS = True # Email Settings DEFAULT_FROM_EMAIL = '[email protected]' # Secure Cookies SESSION_COOKIE_SECURE = True # Django-CSP CSP_IMG_SRC = ("'self'", 'data:', 'https://d3fenhwk93s16g.cloudfront.net', 'https://www.gravatar.com', 'https://secure.gravatar.com', 'http://www.google-analytics.com', 'https://ssl.google-analytics.com', 'http://*.mozilla.org', 'https://*.mozilla.org', 'http://*.mozilla.net', 'https://*.mozilla.net',) CSP_STYLE_SRC = ("'self'", 'https://fonts.googleapis.com', 'http://*.mozilla.org', 'https://*.mozilla.org', 'http://*.mozilla.net', 'https://*.mozilla.net', 'http://*.vimeo.com', 'https://*.vimeo.com',) CSP_FONT_SRC = ("'self'", 'https://themes.googleusercontent.com', 'http://*.mozilla.org', 'https://*.mozilla.org', 'http://*.mozilla.net', 'https://*.mozilla.net',) CSP_SCRIPT_SRC = ("'self'", 'http://browserid.org', 'https://browserid.org', 'http://login.persona.org', 'https://login.persona.org', 'https://platform.twitter.com', 'https://connect.facebook.net', 'http://www.google-analytics.com', 'https://ssl.google-analytics.com', 'http://*.mozilla.org', 'https://*.mozilla.org', 'http://*.mozilla.net', 'https://*.mozilla.net', 'http://*.vimeo.com', 'https://*.vimeo.com', 'https://*.vimeocdn.com',) CSP_FRAME_SRC = ('https://vid.ly', 'http://platform.twitter.com', 'https://platform.twitter.com', 'https://www.facebook.com', 'http://*.vimeo.com', 'https://*.vimeo.com', 'https://*.vimeocdn.com',) CSP_OPTIONS = ('eval-script', 'inline-script') # Activate statsd patches to time database and cache hits. STATSD_PATCHES = [ 'django_statsd.patches.db', 'django_statsd.patches.cache', ] # Video preview settings PREVIEW_PATH = lambda inst, filename: 'previews/images/%s_%s' % (inst.id, filename) MAX_FILEPATH_LENGTH = 100 # Google Analytics GA_ACCOUNT_CODE = '' # Allow robots to crawl the site. ENGAGE_ROBOTS = True # Gravatar Settings GRAVATAR_URL = 'https://secure.gravatar.com' DEFAULT_GRAVATAR = MEDIA_URL + 'img/anon_user.png' # Promo video shortlinks PROMO_VIDEOS = { 'noir': { 'en-us': '3q4s0q', 'fr': '9j6k9j', 'de': '7r0d1f', 'es': '5m9i4w', 'ja': '8r9w3d', 'lij': '8y4r4v', 'nl': '8d0f4b', 'pl': '8u7s6j', 'sl': '6e3t9x', 'sq': '7c9p0d', 'zh-cn': '0i8v1n', 'zh-tw': '3r1o8k' }, 'dance': { 'en-us': '3x8n2e', 'fr': '2s8o4r', 'de': '5i1u9r', 'es': '8r3y6e', 'ja': '5o7b0l', 'lij': '7a8r6a', 'nl': '0m4s3u', 'pl': '4v1w8v', 'sl': '6v3h2g', 'sq': '0o5k7n', 'zh-cn': '9w8d4k', 'zh-tw': '5q2v4y' }, 'twilight': { 'en-us': '6d9t7l', 'fr': '4k0a3w', 'de': '8n1f7u', 'es': '0y9t0e', 'ja': '3f9o1c', 'lij': '5i0n9p', 'nl': '8c5a2f', 'pl': '3d8u9p', 'sl': '9e2i0u', 'sq': '3c8y0t', 'zh-cn': '4w9f9x', 'zh-tw': '3m0y4x' } }
Python
0
@@ -4865,16 +4865,86 @@ r.png'%0A%0A +# django-compressor%0ACOMPRESS_PARSER = 'compressor.parser.HtmlParser'%0A%0A # Promo
e19e2d69baabac3adedfae4e7a8c6ef5bb3d6f53
Fix alembic script
alembic/versions/4e0500347ce7_add_multigame_tables.py
alembic/versions/4e0500347ce7_add_multigame_tables.py
"""add multigame tables Revision ID: 4e0500347ce7 Revises: 29344aa34d9 Create Date: 2016-03-30 12:26:36.632566 """ # revision identifiers, used by Alembic. revision = '4e0500347ce7' down_revision = '29344aa34d9' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'publisher', sa.Column('id', sa.Integer, primary_key=True,autoincrement=True), sa.Column('name', sa.String(50), nullable=False), sa.Column('description', sa.Unicode(200)), ) op.create_table( 'game', sa.Column('id', sa.Integer, primary_key=True,autoincrement=True), sa.Column('name', sa.String(50), nullable=False), sa.Column('description', sa.Unicode(200)), sa.Column('theme', sa.Integer, nullable=True), sa.Column('publisher', sa.Integer, nullable=False), ) op.create_table( 'theme', sa.Column('id', sa.Integer, primary_key=True,autoincrement=True), sa.Column('name', sa.String(50), nullable=False), sa.Column('css', sa.String(50), nullable=False), sa.Column('description', sa.Unicode(200)), ) op.add_column('gameversion', sa.Column('game', sa.Integer, nullable=False)) op.add_column('mod', sa.Column('game', sa.Integer, nullable=False)) def downgrade(): op.drop_table('publisher') op.drop_table('game') op.drop_table('theme') op.drop_column('gameversion', 'game') op.drop_column('mod', 'game')
Python
0.000033
@@ -88,28 +88,28 @@ 16-0 -3-30 12:26:36.632566 +4-05 23:51:58.647657 %0A%0A%22%22 @@ -281,127 +281,100 @@ -op.create_table(%0A 'publisher',%0A sa.Column('id', sa.Integer, primary_key=True,autoincrement=True),%0A +### commands auto generated by Alembic - please adjust! ###%0A op.add_column('gameversion', sa. @@ -381,35 +381,37 @@ Column(' -n +g ame +_id ', sa. -String(50 +Integer( ), nulla @@ -418,300 +418,137 @@ ble= -False),%0A sa.Column('description', sa.Unicode(200)),%0A )%0A op.create_table(%0A 'game',%0A sa.Column('id', sa.Integer, primary_key=True,autoincrement=True),%0A sa.Column('name', sa.String(50), nullable=False),%0A sa.Column('description', sa.Unicode(200)),%0A +True))%0A op.create_foreign_key('gameversion_game_id_fkey', 'gameversion', 'game', %5B'game_id'%5D, %5B'id'%5D)%0A op.add_column('mod', sa. @@ -555,21 +555,23 @@ Column(' -theme +game_id ', sa.In @@ -567,32 +567,34 @@ _id', sa.Integer +() , nullable=True) @@ -597,121 +597,120 @@ rue) -, +) %0A - sa.Column('publisher', sa.Integer, nullable=False),%0A )%0A op.create_table(%0A 'theme',%0A +op.create_foreign_key('mod_game_id_fkey', 'mod', 'game', %5B'game_id'%5D, %5B'id'%5D)%0A op.add_column('modlist', sa. @@ -717,16 +717,21 @@ Column(' +game_ id', sa. @@ -741,466 +741,457 @@ eger -, primary_key=True,autoincrement +(), nullable =True) -, +) %0A - sa.Column('name', sa.String(50), nullable=False),%0A%09 sa.Column('css', sa.String(50), nullable=False),%0A sa.Column('description', sa.Unicode(200)),%0A )%0A op.add_column('gameversion', sa.Column('game', sa.Integer, nullable=False))%0A op.add_column('mod', sa.Column('game', sa.Integer, nullable=False))%0A%0A%0Adef downgrade():%0A op.drop_table('publisher')%0A op.drop_table('game')%0A op.drop_table('theme +op.create_foreign_key('modlist_game_id_fkey', 'modlist', 'game', %5B'game_id'%5D, %5B'id'%5D)%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_constraint('modlist_game_id_fkey', 'modlist', type_='foreignkey')%0A op.drop_column('modlist', 'game_id')%0A op.drop_constraint('mod_game_id_fkey', 'mod', type_='foreignkey')%0A op.drop_column('mod', 'game_id ')%0A @@ -1203,20 +1203,24 @@ .drop_co -lumn +nstraint ('gameve @@ -1228,50 +1228,133 @@ sion -', 'game')%0A op.drop_column('mod', 'game')%0A +_game_id_fkey', 'gameversion', type_='foreignkey')%0A op.drop_column('gameversion', 'game_id')%0A ### end Alembic commands ### %0A
10f8deb343d17e73185bef916396a80c73b718ed
Add link to migration guide (#10821)
conans/pylint_plugin.py
conans/pylint_plugin.py
"""Pylint plugin for ConanFile""" import re import astroid from astroid import MANAGER from pylint.checkers import BaseChecker from pylint.interfaces import IRawChecker def register(linter): """required method to auto register this checker""" linter.register_checker(ConanDeprecatedImportsChecker(linter)) def transform_conanfile(node): """Transform definition of ConanFile class so dynamic fields are visible to pylint""" str_class = astroid.builtin_lookup("str") info_class = MANAGER.ast_from_module_name("conans.model.info").lookup( "ConanInfo") build_requires_class = MANAGER.ast_from_module_name( "conans.client.graph.graph_manager").lookup("_RecipeBuildRequires") file_copier_class = MANAGER.ast_from_module_name( "conans.client.file_copier").lookup("FileCopier") file_importer_class = MANAGER.ast_from_module_name( "conans.client.importer").lookup("_FileImporter") python_requires_class = MANAGER.ast_from_module_name( "conans.client.graph.python_requires").lookup("PyRequires") dynamic_fields = { "conan_data": str_class, "build_requires": build_requires_class, "info_build": info_class, "info": info_class, "copy": file_copier_class, "copy_deps": file_importer_class, "python_requires": [str_class, python_requires_class], "recipe_folder": str_class, } for f, t in dynamic_fields.items(): node.locals[f] = [t] MANAGER.register_transform( astroid.ClassDef, transform_conanfile, lambda node: node.qname() == "conans.model.conan_file.ConanFile") def _python_requires_member(): return astroid.parse(""" from conans.client.graph.python_requires import ConanPythonRequire python_requires = ConanPythonRequire() """) astroid.register_module_extender(astroid.MANAGER, "conans", _python_requires_member) class ConanDeprecatedImportsChecker(BaseChecker): """ Check "from conans*" imports which disappears in Conan 2.x. Only "from conan*" is valid """ __implements__ = IRawChecker deprecated_imports_pattern = re.compile(r"(from|import)\s+conans[\.|\s].*") name = "conan_deprecated_imports" msgs = { "E9000": ( "Using deprecated imports from 'conans'", "conan1.x-deprecated-imports", ( "Use imports from 'conan' instead of 'conans'" " because 'conan' will be the root package for Conan 2.x" ) ) } options = () def process_module(self, node): """ Processing the module's content that is accessible via node.stream() function """ with node.stream() as stream: for (index, line) in enumerate(stream): if self.deprecated_imports_pattern.match(line.decode('utf-8')): self.add_message("conan1.x-deprecated-imports", line=index + 1)
Python
0
@@ -2303,16 +2303,88 @@ 'conans' +. Check migration guide at https://docs.conan.io/en/latest/conan_v2.html %22,%0A