commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
2d60ef3a9ff53c1623747fd1a00df4d788dd3777
fix tobler init
pysal/model/tobler/__init__.py
pysal/model/tobler/__init__.py
from tobler import area_weighted from tobler import data from tobler import dasymetric
Python
0.000124
@@ -51,10 +51,16 @@ t da -ta +symetric %0Afro @@ -63,31 +63,26 @@ %0Afrom tobler import -dasymetric +model %0A
d1c88387a129d64488a5ca2dee56d7fac36ffbf1
Disable GCC fallback, add time logging.
clang_wrapper.py
clang_wrapper.py
#!/usr/bin/env python import optparse import os import subprocess import sys WORLD_PATH = os.path.dirname(os.path.abspath(__file__)) COMPILER_PATH = {'gcc': 'gcc', 'clang': WORLD_PATH + '/third_party/llvm-build/Release+Asserts/bin/clang' } FILTER = {'gcc': ['-Qunused-arguments', '-no-integrated-as', '-mno-global-merge', '-Wdate-time', '-Wno-unknown-warning-option', '-Wno-initializer-overrides', '-Wno-tautological-compare', '-Wincompatible-pointer-types', '-Wno-gnu', '-Wno-format-invalid-specifier', '-Werror=date-time', '-Werror=incompatible-pointer-types', ],'clang': []} SOURCE = 'source' WRAPPER_LOG = WORLD_PATH + '/wrapper.log' LOG = sys.stderr def compiler(flags): path = 'clang' if SOURCE in flags: source = flags[SOURCE] print >>LOG, source # kernel/* ok # kernel/[st] broken # kernel/[kmpstuw] broken # kernel/[abckmpstuw] broken # kernel/[abcdefgkmpstuw] ok # kernel/[defgkmpstuw] ok # kernel/[defgkm] ok # kernel/[defg] ok # kernel/[de] broken # kernel/[fg] ok # kernel/[f] broken # kernel/[g] ok -- that's kernel/groups.h if source.startswith('kernel/'): pieces = source.split('/') if pieces[1][0] in ['g']: path = 'gcc' print >>LOG, path return path def filter_args(argv, cname): new_argv = [] for arg in argv: if arg not in FILTER[cname]: new_argv.append(arg) return new_argv def compiler_argv(flags, argv): cname = compiler(flags) new_argv = [COMPILER_PATH[cname]] + filter_args(argv, cname) return new_argv def make_flags(argv): flags = {} argv = argv[1:] for arg in argv: if arg.endswith('.c'): flags[SOURCE] = arg return flags, argv def main(argv): global LOG LOG = file(WRAPPER_LOG, 'a+') #print >>LOG, ' '.join(argv) flags, argv = make_flags(argv) new_argv = compiler_argv(flags, argv) #print >>LOG, ' '.join(new_argv) ret = subprocess.call(new_argv) #print >>LOG, ret LOG.close() return ret if __name__ == '__main__': sys.exit(main(sys.argv))
Python
0
@@ -69,16 +69,28 @@ port sys +%0Aimport time %0A%0AWORLD_ @@ -675,46 +675,133 @@ err%0A -%0Adef compiler(flags):%0A path = 'clang' +LOG_OPTIONS = %7B'time': True, 'argv': True%7D%0A%0Adef compiler(flags):%0A path = 'clang'%0A return path # no need to use GCC for now %0A i @@ -846,24 +846,25 @@ SOURCE%5D%0A +# print %3E%3ELOG, @@ -1331,16 +1331,17 @@ cc'%0A +# print %3E%3E @@ -1860,13 +1860,41 @@ a+') - %0A # +%0A if 'argv' in LOG_OPTIONS:%0A prin @@ -2025,16 +2025,43 @@ w_argv)%0A + start_time = time.time()%0A ret = @@ -2084,33 +2084,87 @@ new_argv)%0A -# +end_time = time.time()%0A if 'time' in LOG_OPTIONS:%0A print %3E%3E LOG, ret%0A L @@ -2155,16 +2155,74 @@ t %3E%3E + LOG, -ret +'Time elapsed: %7B:.3f%7D seconds'.format(end_time - start_time) %0A L
deebd351b09108d95b4759b179ad84b48b6c933e
Fix typo in random-seed's help
pytest_test_groups/__init__.py
pytest_test_groups/__init__.py
from random import Random import math def get_group_size(total_items, total_groups): return int(math.ceil(float(total_items) / total_groups)) def get_group(items, group_size, group_id): start = group_size * (group_id - 1) end = start + group_size if start >= len(items) or start < 0: raise ValueError("Invalid test-group argument") return items[start:end] def pytest_addoption(parser): group = parser.getgroup('split your tests into evenly sized groups and run them') group.addoption('--test-group-count', dest='test-group-count', type=int, help='The number of groups to split the tests into') group.addoption('--test-group', dest='test-group', type=int, help='The group of tests that should be executed') group.addoption('--test-group-random-seed', dest='random-seed', type=int, help='Integer to seed psuedo-random test ordering') def pytest_collection_modifyitems(session, config, items): group_count = config.getoption('test-group-count') group_id = config.getoption('test-group') seed = config.getoption('random-seed', False) if not group_count or not group_id: return if seed: seeded = Random(seed) seeded.shuffle(items) total_items = len(items) group_size = get_group_size(total_items, group_count) tests_in_group = get_group(items, group_size, group_id) del items[:] items.extend(tests_in_group) print('Running test group #{0} ({1} tests)'.format(group_id, len(items)))
Python
0.002549
@@ -909,18 +909,18 @@ seed ps -u e +u do-rando
2eca98c216a590c6163c8236c392f19ddd8d85d9
update to 4.4.12
tensorgraph/__init__.py
tensorgraph/__init__.py
# import json # from os.path import dirname # # with open(dirname(__file__) + '/pkg_info.json') as fp: # _info = json.load(fp) # __version__ = _info['version'] __version__ = "4.4.10" from .stopper import EarlyStopper from .sequential import Sequential from .graph import Graph from .node import StartNode, HiddenNode, EndNode from .progbar import ProgressBar from .data_iterator import SequentialIterator, StepIterator, SimpleBlocks, DataBlocks from . import cost from . import utils from .dataset.preprocess import *
Python
0
@@ -182,9 +182,9 @@ .4.1 -0 +2 %22%0A%0Af
c986507b9c020a2a81a290299f7ce74748641254
update linkedinviewer
linkedinviewer.py
linkedinviewer.py
from linkedin import linkedin import oauthlib class Linkedinviewer (object): def __init__ (self, cred_file): self.cred_file = cred_file self.authentication = None self.application = None def authenticate(self): # Authenticate with LinkedIn app credential cred_list = None with open(self.cred_file, 'r') as f: cred_data = f.readlines() for line in cred_data: try: cred_temp = line.split('=')[1] except: print "Bad credentials for LinkedIn api authentication" if cred_list is None: cred_list = [] cred_list.append(cred_temp.strip(' \t\n\r')) try: self.authentication = linkedin.LinkedInDeveloperAuthentication(cred_list[0], cred_list[1], cred_list[2], cred_list[3], cred_list[4], linkedin.PERMISSIONS.enums.values()) self.application = application = linkedin.LinkedInApplication(self.authentication) except: print "Failed to authenticate with LinkedIn" return None def retrieve_profile(self): # Get profile information profile = self.application.get_profile() print profile return profile def retrieve_company(self, company_ids=None, universal_names=None, selectors=None): # Get company information companies = None count = 0 if company_ids is not None: for company_id in company_ids: try: company_temp = self.application.get_companies(company_ids=[company_id], selectors=selectors) if companies is None: companies = {} companies['values'] = [] companies['values'].append(company_temp['values'][0]) count = count + 1 except: print "Unable to retrieve company id:", company_id if universal_names is not None: for universal_name in universal_names: try: company_temp = self.application.get_companies(universal_names=[universal_name], selectors=selectors) if companies is None: companies = {} companies['values'] = [] companies['values'].append(company_temp['values'][0]) count = count + 1 except: print "Unable to retrieve universal name:", universal_name if count > 0: companies['_total'] = count for company in companies['values']: print '========================\n' print company print '\n========================' return companies def retrieve_company_updates(self, companies=None, count=1): # Get company updates company_list = None company_updates_dict = None if companies is not None: for i in range(companies['_total']): if company_list is None: company_list = [] company_list.append(companies['values'][i]) for company in company_list: if company_updates_dict is None: company_updates_dict = {} company_updates_dict[company['name']] = self.application.get_company_updates(company['id'], params={'count': count}) for company_name, company_updates in company_updates_dict.iteritems(): print '\n************************', company_name, '************************\n' for i in range(company_updates['_count']): print '========================\n' print company_updates['values'][i] print '\n========================' return company_updates_dict if __name__ == "__main__": lviewer = Linkedinviewer('linkedincred.conf') lviewer.authenticate() lviewer.retrieve_profile() selectors = ['id', 'name', 'company-type', 'stock-exchange', 'ticker', 'industries', 'employee-count-range', 'locations', 'founded-year', 'num-followers' ] companies = lviewer.retrieve_company(universal_names=['sciencelogic', 'splunk'], selectors=selectors) company_updates_dict = lviewer.retrieve_company_updates(companies=companies, count=3)
Python
0
@@ -4579,8 +4579,13 @@ count=3) +%0A
4f5d81b48a5bb48771b82f30e3853472550ee65c
add demo about using file iterator
python/src/file_iter.py
python/src/file_iter.py
# Copyright (c) 2014 ASMlover. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list ofconditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materialsprovided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # #!/usr/bin/env python # -*- encoding: utf-8 -*- import sys import fileinput def process(string): print 'Processing: ', string def file_iter_by_ch(filename): f = open(filename) while True: char = f.read(1) if not char: break process(char) f.close() def file_iter_by_line(filename): f = open(filename) while True: line = f.readline() if not line: break process(line) f.close() def file_iter_by_ch_all(filename): f = open(filename) for char in f.read(): process(char) f.close() def file_iter_by_line_all(filename): f = open(filename) for line in f.readlines(): process(line) f.close() def file_iter_by_lazy(filename): for line in fileinput.input(filename): process(line) if __name__ == '__main__': if len(sys.argv) < 2: print 'invalid arguments' exit(1) filename = 'file_iter.py' if sys.argv[1] == 'c': file_iter_by_ch(filename) elif sys.argv[1] == 'l': file_iter_by_line(filename) elif sys.argv[1] == 'ca': file_iter_by_ch_all(filename) elif sys.argv[1] == 'la': file_iter_by_line_all(filename) elif sys.argv[1] == 'lazy': file_iter_by_lazy(filename) else: print 'error'
Python
0
@@ -2101,16 +2101,110 @@ line)%0A%0A%0A +def file_iter(filename):%0A f = open(filename)%0A for line in f:%0A process(line)%0A f.close()%0A%0A %0A%0Aif __n @@ -2637,18 +2637,24 @@ -print 'error' +file_iter(filename) %0A
a0f2e921edac34b82d3773e94ba88a1a28d296a7
Fix the expected "Dot not know how to make" error message.
test/Win32/bad-drive.py
test/Win32/bad-drive.py
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # """ This test verifies (on Windows systems) that we fail gracefully and provide informative messages if someone tries to use a path name with an invalid drive letter. """ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import os import string import sys import TestSCons test = TestSCons.TestSCons() if sys.platform != 'win32': msg = "Skipping drive-letter test on non-Windows platform '%s'\n" % sys.platform test.skip_test(msg) bad_drive = None for i in range(len(string.uppercase)-1, -1, -1): d = string.uppercase[i] if not os.path.isdir(d + ':' + os.sep): bad_drive = d + ':' break if bad_drive is None: print "All drive letters appear to be in use." print "Cannot test SCons handling of invalid Windows drive letters." test.no_result(1); test.write('SConstruct', """ def cat(env, source, target): target = str(target[0]) source = map(str, source) print 'cat(%%s) > %%s' %% (source, target) f = open(target, "wb") for src in source: f.write(open(src, "rb").read()) f.close() bad_drive = '%s' env = Environment(BUILDERS={'Build':Builder(action=cat)}) env.Build('aaa.out', 'aaa.in') env.Build(bad_drive + 'no_target_1', 'bbb.exists') env.Build(bad_drive + 'no_target_2', 'ccc.does_not_exist') env.Build('ddd.out', bad_drive + 'no_source') """ % (bad_drive + '\\' + os.sep)) bad_drive = bad_drive + os.sep test.write("aaa.in", "aaa.in\n") test.write("bbb.exists", "bbb.exists\n") test.write("no_target_1", "no_target_1\n") test.write("no_target_2", "no_target_2\n") test.write("no_source", "no_source\n") test.run(arguments = 'aaa.out') test.fail_test(test.read('aaa.out') != "aaa.in\n") # This next test used to provide a slightly different error message: # "scons: *** Do not know how to make File target `%snot_mentioned'. Stop.\n" # Right now, it doesn't seem important enough to track down exactly # why this changed and fix it, but we'll preserve it here in case it # becomes an issue or some refactoring restores the old behavior. test.run(arguments = bad_drive + 'not_mentioned', stderr = "scons: *** Do not know how to make File target `%snot_mentioned'. Stop.\n" % (bad_drive), status = 2) expect = "scons: *** [%sno_target_1] No drive `%s' for target `%sno_target_1'.\n" % (bad_drive, bad_drive, bad_drive) test.run(arguments=bad_drive + 'no_target_1', stderr=expect, status=2) expect = "scons: *** [%sno_target_2] Source `ccc.does_not_exist' not found, needed by target `%sno_target_2'.\n" % (bad_drive, bad_drive) test.run(arguments=bad_drive + 'no_target_2', stderr=expect, status=2) expect = "scons: *** [ddd.out] Source `%sno_source' not found, needed by target `ddd.out'.\n" % bad_drive test.run(arguments='ddd.out', stderr=expect, status=2) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
Python
0.000553
@@ -3265,32 +3265,50 @@ %25snot_mentioned' + (%25snot_mentioned) . Stop.%5Cn%22 %25 (b @@ -3298,32 +3298,43 @@ ). Stop.%5Cn%22 %25 ( +bad_drive, bad_drive),%0A
61c693005de95557172ff78c85de2d5dc4be66f1
use N for missing nucleotides
vcfkit/phylo.py
vcfkit/phylo.py
#! /usr/bin/env python """ usage: vk phylo fasta <vcf> [<region>] vk phylo tree (nj|upgma) [--plot] <vcf> [<region>] options: -h --help Show this screen. --version Show version. """ from docopt import docopt from vcfkit import __version__ from utils.vcf import * from subprocess import Popen, PIPE from utils import check_program_exists from clint.textui import colored, indent, puts_err import os from pkgutil import get_data import sys import numpy as np def main(debug=None): args = docopt(__doc__, argv=debug, options_first=False, version=__version__) def first(s): return s[0].replace(".", "-") firstv = np.vectorize(first) v = vcf(args["<vcf>"]) if len(v.samples) <= 1: exit(puts_err(colored.red("\n\tVCF must have at least two samples.\n"))) if args["<region>"]: variant_set = v(args["<region>"]) else: variant_set = v if args["fasta"] or args["tree"]: """ Generate an aligned fasta from a VCF file. """ gt_set = np.chararray((0,len(v.samples))) gt_set = [] for line in variant_set: if line.is_snp: gt_set.append(firstv(line.gt_bases)) gt_set = np.vstack(gt_set) seqs = zip(v.samples, np.transpose(gt_set)) if args["fasta"]: for sample, seq in seqs: print(">" + sample) print(''.join(seq)) elif args["tree"]: """ Generate a phylogenetic tree using an aligned fasta with muscle. """ # Check for muscle dependency check_program_exists("muscle") fasta = "" with indent(4): puts_err(colored.blue("\nGenerating Fasta\n")) for sample, seq in seqs: fasta += ">" + sample + "\n" + ''.join(seq) + "\n" tree_type = "upgma" # default is upgma if args["nj"]: tree_type = "neighborjoining" with indent(4): puts_err(colored.blue("\nGenerating " + tree_type + " Tree\n")) comm = ["muscle", "-maketree", "-in", "-", "-cluster", tree_type] tree, err = Popen(comm, stdin=PIPE, stdout=PIPE).communicate(input=fasta) # output tree print(tree) if args["--plot"]: from jinja2 import Template import webbrowser import tempfile prefix = os.path.dirname(os.path.abspath(sys.modules['vcfkit'].__file__)) + "/static" template = open(prefix + "/tree.html",'r').read() tree_template = Template(template) html_out = tempfile.NamedTemporaryFile(suffix=".html", delete=False) with html_out as f: tree = tree.replace("\n", "") sample_len = len(v.samples) f.write(tree_template.render(**locals())) webbrowser.open("file://" + html_out.name) if __name__ == '__main__': main()
Python
0.004823
@@ -708,17 +708,17 @@ e(%22.%22, %22 -- +N %22)%0A%0A @@ -1292,16 +1292,88 @@ bases))%0A + if len(gt_set) == 0:%0A exit(puts_err(%22No genotypes%22))%0A
6aead3bfc4ef7a0140238855e118e4017af1ab73
Change order of tests
pywikibot/comms/http.py
pywikibot/comms/http.py
# -*- coding: utf-8 -*- """ Basic HTTP access interface. This module handles communication between the bot and the HTTP threads. This module is responsible for - Setting up a connection pool - Providing a (blocking) interface for HTTP requests - Translate site objects with query strings into urls - Urlencoding all data - Basic HTTP error handling """ # # (C) Pywikipedia bot team, 2007 # # Distributed under the terms of the MIT license. # __version__ = '$Id$' __docformat__ = 'epytext' import Queue import urllib import urlparse import logging import atexit from pywikibot import config from pywikibot.exceptions import Server504Error import pywikibot import cookielib import threadedhttp logger = logging.getLogger("comms.http") # global variables useragent = 'Pywikipediabot/2.0' # This should include some global version string numthreads = 1 threads = [] connection_pool = threadedhttp.ConnectionPool() http_queue = Queue.Queue() cookie_jar = threadedhttp.LockableCookieJar( config.datafilepath("pywikibot.lwp")) try: cookie_jar.load() except (IOError, cookielib.LoadError): logger.debug("Loading cookies failed.") else: logger.debug("Loaded cookies from file.") # Build up HttpProcessors pywikibot.output('Starting %(numthreads)i threads...' % locals(), level=pywikibot.VERBOSE) for i in range(numthreads): proc = threadedhttp.HttpProcessor(http_queue, cookie_jar, connection_pool) proc.setDaemon(True) threads.append(proc) proc.start() # Prepare flush on quit def _flush(): for i in threads: http_queue.put(None) pywikibot.output(u'Waiting for threads to finish... ', level=pywikibot.VERBOSE) for i in threads: i.join() logger.debug('All threads finished.') atexit.register(_flush) # export cookie_jar to global namespace import pywikibot pywikibot.cookie_jar = cookie_jar def request(site, uri, *args, **kwargs): """Queue a request to be submitted to Site. All parameters not listed below are the same as L{httplib2.Http.request}, but the uri is relative @param site: The Site to connect to @return: The received data (a unicode string). """ baseuri = "%s://%s/" % (site.protocol(), site.hostname()) uri = urlparse.urljoin(baseuri, uri) # set default user-agent string kwargs.setdefault("headers", {}) kwargs["headers"].setdefault("user-agent", useragent) request = threadedhttp.HttpRequest(uri, *args, **kwargs) http_queue.put(request) request.lock.acquire() #TODO: do some error correcting stuff if request.data[0].status == 504: raise Server504Error("Server %s timed out" % site.hostname()) #if all else fails if isinstance(request.data, Exception): raise request.data if request.data[0].status != 200: pywikibot.output(u"Http response status %(status)s" % {'status': request.data[0].status}, level=pywikibot.WARNING) return request.data[1]
Python
0.000008
@@ -2618,16 +2618,111 @@ g stuff%0A + #if all else fails%0A if isinstance(request.data, Exception):%0A raise request.data%0A%0A if r @@ -2826,103 +2826,8 @@ ))%0A%0A - #if all else fails%0A if isinstance(request.data, Exception):%0A raise request.data%0A%0A
68e58114919208b69a01880f52e8b8e2918a4edb
make failing ogr/shape comparison a todo
tests/python_tests/ogr_and_shape_geometries_test.py
tests/python_tests/ogr_and_shape_geometries_test.py
#!/usr/bin/env python from nose.tools import * from utilities import execution_path import os, sys, glob, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) # TODO - fix truncation in shapefile... polys = ["POLYGON ((30 10, 10 20, 20 40, 40 40, 30 10))", "POLYGON ((35 10, 10 20, 15 40, 45 45, 35 10),(20 30, 35 35, 30 20, 20 30))", "MULTIPOLYGON (((30 20, 10 40, 45 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))" "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)),((20 35, 45 20, 30 5, 10 10, 10 30, 20 35),(30 20, 20 25, 20 15, 30 20)))" ] plugins = mapnik.DatasourceCache.instance().plugin_names() if 'shape' in plugins and 'ogr' in plugins: def test_geometries_are_interpreted_equivalently(): shapefile = '../data/shp/wkt_poly.shp' ds1 = mapnik.Ogr(file=shapefile,layer_by_index=0) ds2 = mapnik.Shapefile(file=shapefile) fs1 = ds1.featureset() fs2 = ds2.featureset() count = 0; while(True): count += 1 feat1 = fs1.next() feat2 = fs2.next() if not feat1: break #import pdb;pdb.set_trace() #print feat1 eq_(str(feat1),str(feat2)) eq_(feat1.geometries().to_wkt(),feat2.geometries().to_wkt()) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
Python
0.000004
@@ -78,16 +78,22 @@ ion_path +, Todo %0A%0Aimport @@ -1058,32 +1058,132 @@ s2.featureset()%0A + raise Todo(%22output will differ between ogr and shape, may not matter, needs a closer look%22)%0A count =
8e57bac9ca41bfcccfabc8524ddc2a8730ac4609
Update quality_score_filter.py
python/quality_score_filter.py
python/quality_score_filter.py
from Bio import SeqIO import math from Tkinter import Tk import sys name = sys.argv[1] qs = float(sys.argv[3]) output = sys.argv[2] count = 0 for rec in SeqIO.parse(name, "fastq"): count += 1 print("%i reads in fastq file" % count) qual_sequences = [] # Setup an empty list cnt = 0 for rec in SeqIO.parse(name, "fastq"): rec.letter_annotations["phred_quality"] probs = [] for q in rec.letter_annotations["phred_quality"]: e = float(math.pow(10.0,-1*(float(q)/10.0))) # print q, e probs.append(e) av_prob = float(sum(probs))/float(len((rec.letter_annotations["phred_quality"]))) # print av_prob av_q = float(-10.0*(math.log10(float(av_prob)))) # print av_prob, av_q if av_q >= qs: cnt += 1 qual_sequences.append(rec) print cnt,'Quality reads saved' output_handle = open(output +'.fa', "w") SeqIO.write(qual_sequences, output_handle, "fasta") output_handle.close() output_handle = open(output +'.fq', "w") SeqIO.write(qual_sequences, output_handle, "fastq") output_handle.close()
Python
0.000002
@@ -195,90 +195,28 @@ = 1%0A -print(%22%25i reads in fastq file%22 %25 count)%0A%0Aqual_sequences = %5B%5D # Setup an empty list +%0Aqual_sequences = %5B%5D %0A%0Acn @@ -722,41 +722,8 @@ c)%0A%0A -print cnt,'Quality reads saved'%0A%0A outp
9307163aa70b18af32af0b0e7b17245b33569756
Stop changed
homework.py
homework.py
from allgo_utils import PCA9685,ultrasonic,ir_sens import wiringpi as wp import time DIR_DISTANCE_ALERT = 20 preMillis = 0 ULTRASONIC_TRIG = 3 # TRIG port is to use as output signal ULTRASONIC_ECHO = 23 # ECHO port is to use as input signal OUT = {'front_left_led':5, 'front_right_led':0, 'rear_right_led':1, 'rear_left_led':2, 'ultra_trig':3} # 5:front_left_led, 0:front_right_led, 1:rear_right_led, 2:rear_left_led, 3:ultra_trig IN = {'left_IR':21, 'center_IR':22, 'right_IR':26, 'ultra_echo':23} # 21:left_IR, 22:center_IR, 26:right_IR, 23:ultra_echo LOW = 0 HIGH = 1 OUTPUT = wp.OUTPUT INPUT = wp.INPUT pca = PCA9685() ultra = ultrasonic(ULTRASONIC_TRIG,ULTRASONIC_ECHO) def setup(): wp.wiringPiSetup() # Initialize wiringPi to load Raspbarry Pi PIN numbering scheme for key in OUT: wp.pinMode(OUT[key],OUTPUT) wp.digitalWrite(OUT[key], LOW) for key in IN: wp.pinMode(IN[key],INPUT) def warn(times=3): for i in range(times): wp.digitalWrite(OUT['front_right_led'], HIGH) wp.digitalWrite(OUT['front_left_led'], HIGH) wp.digitalWrite(OUT['rear_right_led'], HIGH) wp.digitalWrite(OUT['rear_left_led'], HIGH) time.sleep(0.15) wp.digitalWrite(OUT['front_right_led'], LOW) wp.digitalWrite(OUT['front_left_led'], LOW) wp.digitalWrite(OUT['rear_right_led'], LOW) wp.digitalWrite(OUT['rear_left_led'], LOW) time.sleep(0.15) def ex1(): """1. DC Motor Application Create a program that: - Turn smoothly""" while(True): pca.go_left(speed_cur=120,turning_rate=0.65) time.sleep(2) pca.stop() time.sleep(2) pca.go_right(speed_cur=120,turning_rate=0.65) time.sleep(2) pca.stop() time.sleep(2) pass pass def ex2(): """2.Ultrasonic sensor application Create a program that 1. Go forward 2. Stop and flicker warning light when an Object is closer than 30cm""" pca.stop() pca.set_normal_speed(80) while True: dist = ultra.distance() print 'Distance(cm):%.2f'%dist if dist>45: pca.set_normal_speed(120) pca.go_forward() elif dist>40: pca.set_normal_speed(65) pca.go_forward() else: pca.stop() warn() #time.sleep(0.001) pass def ex3(): """3.Ultrasonic Sensor Application Create a program that Keep the 50cm distance with an object""" pca.stop() pca.set_normal_speed(85) while True: dist = ultra.distance() print 'Distance(cm):%.2f' % dist if dist > 60: pca.go_forward() elif dist<50: pca.go_back() else: pca.stop_extreme() time.sleep(0.2) pass def ex4(): """4.IR sensor application Create a program with TCRT 5000 IR sensor 1. Go straight until it detect the 2nd black belt 2. Stop""" count=0 state = False while(count!=2): l_ir = wp.digitalRead(IN['left_IR']) c_ir = wp.digitalRead(IN['center_IR']) r_ir = wp.digitalRead(IN['right_IR']) pca.go_forward() print 'left:%d center:%d right:%d '%(l_ir,c_ir,r_ir) if (bool(l_ir) and bool(c_ir) and bool(r_ir)) is True: if(state!=True): count+=1 state = True if(count ==1): if(state!=False): count+=1 state=False time.sleep(0.2) pca.stop_extreme() pass def ex4_demo(): count = 0 state = False while (True): l_ir = wp.digitalRead(IN['left_IR']) c_ir = wp.digitalRead(IN['center_IR']) r_ir = wp.digitalRead(IN['right_IR']) print 'left:%d center:%d right:%d ' % (l_ir, c_ir, r_ir) if bool(c_ir)==True: warn(1) time.sleep(0.2) def ex5(): """5.Multiple Sensor Application Create a program that Trace the line Stop, beep the buzzer and flicker warning light when obstacle is detected Wait until no object detected on the line. Stop on stop line""" pass def ex2_demo(): """2.Ultrasonic sensor application Create a program that 1. Go forward 2. Stop and flicker warning light when an Object is closer than 30cm""" pca.stop() pca.go_forward(speed_cur=255) time.sleep(1) start_ = time.time() print 'stop started: ', start_ dist=ultra.distance() print dist pca.stop() end_= time.time() print 'stop stopped', end_ , ultra.distance() pass def main(): setup() while(True): print 'Welcome to the Home work assignment' print 'Please select exercise to run:\n' \ '1 - DC Motor Application\n' \ '2 - Ultrasonic sensor application\n' \ '3 - Ultrasonic sensor application\n' \ '4 - IR sensor application\n' \ '5 - Multiple Sensor Application\n' menu=int(input()) if menu == 1: ex1() elif menu == 2: ex2() elif menu == 3: ex3() elif menu == 4: ex4() elif menu == 5: ex5() elif menu == 6: ex4_demo() elif menu ==7: ex2_demo() pass if __name__ == "__main__": main()
Python
0.000005
@@ -4706,24 +4706,80 @@ ime.time()%0A%0A + for i in range(10):%0A print ultra.distance()%0A%0A print 's
fa2c69bf4399f3a96505fe33050433f275ff6e0b
Bump version to 0.0.3
streamer/__init__.py
streamer/__init__.py
__version__ = "0.0.2"
Python
0.000001
@@ -16,8 +16,8 @@ 0.0. -2 +3 %22%0A%0A
71f991d88fc115d53fd156f3788525e35fcf405e
Remove dispatch_meta from Payload signature
stoq/data_classes.py
stoq/data_classes.py
#!/usr/bin/env python3 import uuid from typing import Dict, List, Optional, Union import stoq.helpers as helpers class PayloadMeta(): def __init__(self, should_archive: bool = True, extra_data: Optional[Dict] = None, dispatch_to: List[str] = None) -> None: self.should_archive = should_archive self.extra_data = extra_data self.dispatch_to = [] if dispatch_to is None else dispatch_to class Payload(): def __init__(self, content: bytes, payload_meta: Optional[PayloadMeta] = None, extracted_by: Optional[str] = None, extracted_from: Optional[str] = None, dispatch_meta: Optional[Dict[str, Dict]] = None, payload_id: Optional[str] = None) -> None: self.content = content self.payload_meta = PayloadMeta() if payload_meta is None else payload_meta self.extracted_by = extracted_by self.extracted_from = extracted_from self.dispatch_meta: Dict[str, Dict] = {} self.deep_dispatch_meta: Dict[str, Dict] = {} self.worker_results: List[Dict[str, Dict]] = [{}] # Empty dict for first round self.plugins_run: Dict[str, Union[List[List[str]], List[str]]] = {'workers': [[]], 'archivers': []} self.payload_id = str(uuid.uuid4()) if payload_id is None else payload_id class RequestMeta(): def __init__(self, archive_payloads: bool = True, source: Optional[str] = None, extra_data: Optional[Dict] = None) -> None: self.archive_payloads = archive_payloads self.source = source self.extra_data = {} if extra_data is None else extra_data class PayloadResults(): def __init__(self, payload_id: str, md5: str, sha1: str, sha256: str, sha512: str, size: int, payload_meta: PayloadMeta, workers: List[Dict[str, Dict]], plugins_run: Dict[str, Union[List[List[str]], List[str]]], extracted_from: Optional[str] = None, extracted_by: Optional[str] = None) -> None: self.payload_id = payload_id self.md5 = md5 self.sha1 = sha1 self.sha256 = sha256 self.sha512 = sha512 self.size = size self.payload_meta = payload_meta self.workers: List[Dict[str, Dict]] = workers self.archivers: Dict[str, Dict] = {} self.plugins_run = plugins_run self.extracted_from = extracted_from # payload_id of parent payload, if applicable self.extracted_by = extracted_by # plugin name that extracted this payload, if applicable @classmethod def from_payload(cls, payload: Payload) -> 'PayloadResults': md5 = helpers.get_md5(payload.content) sha1 = helpers.get_sha1(payload.content) sha256 = helpers.get_sha256(payload.content) sha512 = helpers.get_sha512(payload.content) size = len(payload.content) return cls(payload.payload_id, md5, sha1, sha256, sha512, size, payload.payload_meta, payload.worker_results, payload.plugins_run, payload.extracted_from, payload.extracted_by) class StoqResponse(): def __init__(self, time: str, results: List[PayloadResults], request_meta: RequestMeta, errors: List[str], decorators: Optional[Dict[str, Dict]] = None ) -> None: self.time = time self.results = results self.request_meta = request_meta self.errors = errors self.decorators: Dict[str, Dict] = {} if decorators is None else decorators self.scan_id = str(uuid.uuid4()) class ExtractedPayload(): def __init__(self, content: bytes, payload_meta: Optional[PayloadMeta] = None) -> None: self.content = content self.payload_meta: PayloadMeta = PayloadMeta() if payload_meta is None else payload_meta class WorkerResponse(): def __init__(self, results: Optional[Dict] = None, extracted: List[ExtractedPayload] = None, errors: List[str] = None) -> None: self.results = results self.extracted = [] if extracted is None else extracted self.errors = [] if errors is None else errors class ArchiverResponse(): def __init__(self, results: Optional[Dict] = None, errors: List[str] = None) -> None: self.results = results self.errors = [] if errors is None else errors class DispatcherResponse(): def __init__(self, plugin_names: Optional[List[str]] = None, meta: Optional[Dict] = None, errors: List[str] = None) -> None: self.plugin_names = [] if plugin_names is None else plugin_names self.meta = {} if meta is None else meta self.errors = [] if errors is None else errors class DeepDispatcherResponse(): def __init__(self, plugin_names: Optional[List[str]] = None, meta: Optional[Dict] = None, errors: List[str] = None) -> None: self.plugin_names = [] if plugin_names is None else plugin_names self.meta = {} if meta is None else meta self.errors = [] if errors is None else errors class DecoratorResponse(): def __init__(self, results: Optional[Dict] = None, errors: List[str] = None) -> None: self.results = results self.errors = [] if errors is None else errors
Python
0
@@ -709,74 +709,8 @@ ne,%0A - dispatch_meta: Optional%5BDict%5Bstr, Dict%5D%5D = None,%0A
bbb911c8d4f512c598451dd8361ac3f64ac9d79b
Add the diagnosis date.
qirest_client/model/subject.py
qirest_client/model/subject.py
""" The qiprofile subject Mongodb data model. """ import re import mongoengine from mongoengine import (fields, signals) from .. import choices from .common import Encounter from .imaging import (Scan, Session) from .clinical import Treatment class Project(mongoengine.Document): """The imaging project.""" meta = dict(collection='qiprofile_project') name = fields.StringField(required=True) """The required project name.""" description = fields.StringField() """The optional short description.""" class ImagingCollection(mongoengine.Document): """The imaging collection.""" meta = dict(collection='qiprofile_collection') project = fields.StringField(required=True) """The required project name.""" name = fields.StringField(required=True) """The required collection name.""" description = fields.StringField() """The optional short description.""" url = fields.StringField() """The hyperlink url to additional information.""" class Subject(mongoengine.Document): """The patient.""" RACE_CHOICES = [('White', 'White'), ('Black', 'Black or African American'), ('Asian', 'Asian'), ('AIAN', 'American Indian or Alaska Native'), ('NHOPI', 'Native Hawaiian or Other Pacific Islander')] """The standard FDA race categories, in order of US incidence.""" ETHNICITY_CHOICES = [('Hispanic' , 'Hispanic or Latino'), ('Non-Hispanic' , 'Not Hispanic or Latino')] """The standard FDA ethnicity categories.""" GENDER_CHOICES = ['Male', 'Female'] """The Male and Female genders.""" meta = dict(collection='qiprofile_subject') project = fields.StringField(required=True) """The required project name.""" collection = fields.StringField(required=True) """The required collection name.""" number = fields.IntField(required=True) """The required subject number.""" birth_date = fields.DateTimeField() """The subject date of birth, anonymized to July 7 for deidentified data.""" races = fields.ListField( fields.StringField( max_length=choices.max_length(RACE_CHOICES), choices=RACE_CHOICES)) """The :const:`RACE_CHOICES` controlled value.""" ethnicity = fields.StringField( max_length=choices.max_length(ETHNICITY_CHOICES), choices=ETHNICITY_CHOICES) """The :const:`ETHNICITY_CHOICES` controlled value.""" gender = fields.StringField( max_length=choices.max_length(GENDER_CHOICES), choices=GENDER_CHOICES) """The :const:`GENDER_CHOICES` controlled value.""" treatments = fields.ListField(field=fields.EmbeddedDocumentField(Treatment)) """The list of subject treatments.""" encounters = fields.ListField(field=fields.EmbeddedDocumentField(Encounter)) """The list of subject encounters in temporal order.""" @property def sessions(self): """ :return: the :class:`qirest_client.imaging.Session` encounters """ return (enc for enc in self.encounters if self._is_session(enc)) def add_encounter(self, encounter): """ Inserts the given encounter to this :class:`Subject` encounters list in temporal order by encounter date. """ index = next((i for i, enc in enumerate(self.encounters) if enc.date > encounter.date), len(self.encounters)) # Add the new encounter to the subject encounters list. self.encounters.insert(index, encounter) @property def clinical_encounters(self): """ :return: the non-:class:`qirest_client.imaging.Session` encounters """ return (enc for enc in self.encounters if not self._is_session(enc)) def pre_delete(cls, sender, document, **kwargs): """Cascade delete the subject's sessions.""" for sess in self.sessions: sess.delete() def _is_session(self, encounter): return isinstance(encounter, Session) def __str__(self): return ("%s %s Subject %d" % (self.project, self.collection, self.number)) signals.pre_delete.connect(Subject.pre_delete, sender=Subject)
Python
0.000221
@@ -2104,24 +2104,128 @@ d data.%22%22%22%0A%0A + diagnosis_date = fields.DateTimeField()%0A %22%22%22The date of the first recorded lesion diagnosis.%22%22%22%0A%0A races =
be1c89f107bb569fce9372c2bc051da84e875192
Update exception log message
volt/logging.py
volt/logging.py
"""Logging-related functionalities.""" # (c) 2012-2022 Wibowo Arindrarto <[email protected]> import os import sys from dataclasses import dataclass from logging.config import dictConfig from pathlib import Path from typing import Optional import better_exceptions import structlog from click import style from structlog.contextvars import bind_contextvars, merge_contextvars def _get_exceptions_max_length(default: int = 65) -> Optional[int]: value = os.environ.get("VOLT_EXCEPTIONS_MAX_LENGTH", None) if isinstance(value, str) and value.isdigit(): return int(value) return None better_exceptions.MAX_LENGTH = _get_exceptions_max_length() @dataclass class _LogLabel: bg: str text: str @property def styled(self) -> str: return style(f" {self.text} ", fg=self.bg, bold=True, reverse=True) _level_styles = { "notset": _LogLabel(text="???", bg="white"), "debug": _LogLabel(text="DBG", bg="magenta"), "info": _LogLabel(text="INF", bg="cyan"), "warn": _LogLabel(text="WRN", bg="yellow"), "error": _LogLabel(text="ERR", bg="red"), "critical": _LogLabel(text="CRT", bg="red"), "exception": _LogLabel(text="EXC", bg="red"), } _default_style = _level_styles["notset"] class _ConsoleLogRenderer: def __call__( self, _logger: structlog.types.WrappedLogger, _name: str, event_dict: structlog.types.EventDict, ) -> str: level = event_dict.pop("level", "notset") label = _level_styles.get(level, _default_style) logstr = f"{label.styled} " event = event_dict.pop("event", "") if isinstance(event, Exception): event = f"{event.__class__.__name__}: {event}" else: event = f"{event}" logstr += style(f"{event[0].upper() + event[1:]}", bold=True) exc_info = event_dict.pop("exc_info", None) if event_dict.keys(): logstr += " ·" for key in event_dict.keys(): value = event_dict[key] if not isinstance(value, (str, Path)): value = repr(value) else: value = f"{value}" logstr += style(f" {key}", fg="bright_black") logstr += style("=", fg="bright_white") logstr += style(f"{value}", fg="yellow") if exc_info is not None: if not isinstance(exc_info, tuple): exc_info = sys.exc_info() logstr += "\n" logstr += "".join(better_exceptions.format_exception(*exc_info)) return logstr def bind_drafts_context(drafts: bool) -> None: if not drafts: return None bind_contextvars(drafts=True) return None def init_logging(log_level: str) -> None: proc_chain: list[structlog.types.Processor] = [ merge_contextvars, structlog.stdlib.add_log_level, ] log_config = { "version": 1, "disable_existing_loggers": False, "root": { "level": log_level.upper(), "handlers": ["err_console"], }, "handlers": { "err_console": { "class": "logging.StreamHandler", "formatter": "console_formatter", "stream": "ext://sys.stderr", } }, "formatters": { "console_formatter": { "()": "structlog.stdlib.ProcessorFormatter", "processor": _ConsoleLogRenderer(), "foreign_pre_chain": proc_chain, }, }, } processors = proc_chain + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter] structlog.configure_once( processors=processors, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, ) dictConfig(log_config)
Python
0.000002
@@ -1690,16 +1690,25 @@ f%22%7Bevent +%7D (%7Bevent .__class @@ -1723,17 +1723,9 @@ e__%7D -: %7Bevent%7D +) %22%0A
2304dcf3ebf189d7c3b1a00211a288e359c4cbb5
Rename signals for consistency
volt/signals.py
volt/signals.py
"""Signals for hooks.""" # Copyright (c) 2012-2022 Wibowo Arindrarto <[email protected]> # SPDX-License-Identifier: BSD-3-Clause from typing import Any import structlog from blinker import signal, NamedSignal from structlog.contextvars import bound_contextvars log = structlog.get_logger(__name__) post_site_load_engines = signal("post-site-load-engines") post_site_collect_targets = signal("post-site-collect-targets") pre_site_write = signal("pre-site-write") def send(signal: NamedSignal, *args: Any, **kwargs: Any) -> None: with bound_contextvars(signal=f"{signal.name}"): log.debug("sending to signal") rvs = signal.send(*args, **kwargs) log.debug("sent to signal", num_receiver=len(rvs)) return None def _clear() -> None: for s in ( post_site_load_engines, post_site_collect_targets, pre_site_write, ): log.debug("clearing receivers", signal=s.name) s.receivers.clear() return None
Python
0.000011
@@ -343,19 +343,19 @@ post -- +_ site -- +_ load -- +_ engi @@ -404,22 +404,22 @@ post -- +_ site -- +_ collect -- +_ targ @@ -456,14 +456,14 @@ %22pre -- +_ site -- +_ writ
b58dcf4ce81b234de6701468296f4185ed63a8e2
Add filters to the admin interface
voting/admin.py
voting/admin.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import admin from voting.models import Position, SACYear, Nomination def make_rejected(ModelAdmin, request, queryset): queryset.update(is_rejected=True) make_rejected.short_description = "رفض المرشحـ/ين المختار/ين" class NominationAdmin(admin.ModelAdmin): list_filter = ['position', 'is_rejected'] list_display = ['__unicode__', 'cv', 'plan', 'is_rejected'] search_fields = ['position__title', 'user__username', 'user__email', 'user__profile__ar_first_name', 'user__profile__ar_middle_name', 'user__profile__ar_last_name', 'user__profile__en_first_name', 'user__profile__en_middle_name', 'user__profile__en_last_name', 'user__profile__student_id', 'user__profile__mobile_number'] actions = [make_rejected] admin.site.register(Nomination, NominationAdmin) admin.site.register(SACYear) admin.site.register(Position)
Python
0
@@ -977,16 +977,93 @@ ected%5D%0A%0A +class PositionAdmin(admin.ModelAdmin):%0A list_filter = %5B'entity', 'year'%5D%0A%0A admin.si @@ -1123,23 +1123,39 @@ egister( -SACYear +Position, PositionAdmin )%0Aadmin. @@ -1168,18 +1168,17 @@ egister( -Position +SACYear )%0A
cba218d658d3feb5b0953edd44db7f88bd728077
Add is_default and user current bar id
mixmind/models.py
mixmind/models.py
# -*- coding: utf-8 -*- from sqlalchemy.orm import relationship, backref from sqlalchemy import Boolean, DateTime, Column, Integer, String, ForeignKey, Enum, Float, Text, Unicode import pendulum from flask_security import UserMixin, RoleMixin from . import db from .util import VALID_UNITS class RolesUsers(db.Model): id = Column(Integer(), primary_key=True) user_id = Column('user_id', Integer(), ForeignKey('user.id')) role_id = Column('role_id', Integer(), ForeignKey('role.id')) class Role(db.Model, RoleMixin): id = Column(Integer(), primary_key=True) name = Column(String(80), unique=True) description = Column(String(255)) class User(db.Model, UserMixin): id = Column(Integer, primary_key=True) email = Column(Unicode(length=127), unique=True) first_name = Column(Unicode(length=127)) last_name = Column(Unicode(length=127)) nickname = Column(Unicode(length=127)) password = Column(Unicode(length=127)) # TODO timezone rip last_login_at = Column(DateTime()) current_login_at = Column(DateTime()) last_login_ip = Column(String(63)) current_login_ip = Column(String(63)) login_count = Column(Integer) active = Column(Boolean()) confirmed_at = Column(DateTime()) roles = relationship('Role', secondary='roles_users', backref=backref('users', lazy='dynamic')) # many to many orders = relationship('Order', back_populates="user", foreign_keys='Order.user_id')# primaryjoin="User.id==Order.user_id") # one to many orders_served = relationship('Order', back_populates="bartender", foreign_keys='Order.bartender_id')#primaryjoin="User.id==Order.bartender_id") # one to many (for bartenders) works_at = relationship('Bar', secondary='bartenders', backref=backref('bartenders', lazy='dynamic')) # many to many owns = relationship('Bar', back_populates="owner", foreign_keys='Bar.owner_id') # one to many venmo_id = Column(String(63)) # venmo id as a string def get_name(self, short=False): if short: if self.nickname: return unicode(self.nickname) else: return unicode(self.first_name) return u'{} {}'.format(self.first_name, self.last_name) def get_name_with_email(self): return u'{} ({})'.format(self.get_name(short=True), self.email) def get_role_names(self): return u', '.join([role.name for role in self.roles]) def get_bar_names(self): return u', '.join([bar.cname for bar in self.works_at]) class Order(db.Model): id = Column(Integer, primary_key=True) bar_id = Column(Integer, ForeignKey('bar.id')) user_id = Column(Integer, ForeignKey('user.id')) bartender_id = Column(Integer, ForeignKey('user.id')) user = relationship('User', back_populates="orders", foreign_keys=[user_id]) bartender = relationship('User', back_populates="orders_served", foreign_keys=[bartender_id]) timestamp = Column(DateTime()) confirmed = Column(DateTime()) recipe_name = Column(Unicode(length=127)) recipe_html = Column(Text()) def where(self): bar = Bar.query.filter_by(id=self.bar_id).one_or_none() if bar: return bar.name def time_to_confirm(self): if not self.confirmed: return "N/A" diff = pendulum.instance(self.confirmed) - pendulum.instance(self.timestamp) return "{} minutes, {} seconds".format(diff.minutes, diff.remaining_seconds) class Bar(db.Model): id = Column(Integer(), primary_key=True) cname = Column(Unicode(length=63), unique=True) # unique name for finding the bar name = Column(Unicode(length=63)) tagline = Column(Unicode(length=255), default=u"Tips – always appreciated, never required") is_active = Column(Boolean(), default=False) bartender_on_duty = Column(Integer(), ForeignKey('user.id')) owner_id = Column(Integer(), ForeignKey('user.id')) owner = relationship('User', back_populates="owns", foreign_keys=[owner_id]) ingredients = relationship('Ingredient') # one to many orders = relationship('Order') # one to many # browse display settings markup = Column(Float(), default=1.10) prices = Column(Boolean(), default=True) stats = Column(Boolean(), default=False) examples = Column(Boolean(), default=False) convert = Column(Enum(*(['']+VALID_UNITS)), default='oz') prep_line = Column(Boolean(), default=False) origin = Column(Boolean(), default=False) info = Column(Boolean(), default=True) variants = Column(Boolean(), default=False) summarize = Column(Boolean(), default=True) class Bartenders(db.Model): id = Column(Integer(), primary_key=True) user_id = Column(Integer(), ForeignKey('user.id')) bar_id = Column(Integer(), ForeignKey('bar.id'))
Python
0.000001
@@ -725,24 +725,56 @@ y_key=True)%0A + # flask-security attributes%0A email = @@ -993,32 +993,8 @@ 7))%0A - # TODO timezone rip%0A @@ -1183,16 +1183,18 @@ (Integer +() )%0A ac @@ -1252,24 +1252,86 @@ DateTime())%0A + # other attributes%0A current_bar_id = Column(Integer())%0A roles = @@ -3864,24 +3864,100 @@ ault=False)%0A + is_default = Column(Boolean(), default=False) # the current default bar%0A bartende
a6d037609d659a237783a3504e7090066a02fa76
make metaval have more elasticity
clime/command.py
clime/command.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from re import compile from inspect import getdoc, isbuiltin from .helpers import getargspec, getoptmetas, autotype, smartlyadd class ScanError(Exception): pass class Command(object): '''Make a function, a built-in function or a bound method accept arguments from command line. .. versionchanged:: 0.1.4 It is almost rewritten.''' arg_desc_re = compile(r'^-') arg_re = compile(r'--?(?P<key>[^ =,]+)[ =]?(?P<meta>[^ ,]+)?') arg_meta_map = { 'N': int, 'NUM': int, 'NUMBER': int, 'S': str, 'STR': str, 'STRING': str, '<n>': int, '<num>': int, '<number>': int, '<s>': str, '<str>': str, '<string>': str, None: autotype } def __init__(self, func): self.func = func arg_names, vararg_name, keyarg_name, arg_defaults = getargspec(func) # copy the argument spec info to instance self.arg_names = arg_names self.vararg_name = vararg_name self.keyarg_name = keyarg_name self.arg_defaults = arg_defaults # additional information self.arg_name_set = set(arg_names) self.arg_default_map = dict((k, v) for k, v in zip( *map(reversed, (arg_names or [], arg_defaults or [])) )) # try to find the metas and aliases out doc = getdoc(func) if not doc: return self.arg_alias_map = {} self.arg_meta_map = {} for line in doc.splitlines(): if self.arg_desc_re.match(line): meta_map = {} aliases_set = set() for m in self.arg_re.finditer(line): key, meta = m.group('key', 'meta') aliases_set.add(key) meta_map[key] = meta arg_name_set = self.arg_name_set & aliases_set aliases_set -= arg_name_set if arg_names: arg_name = arg_name_set.pop() self.arg_meta_map[arg_name] = meta_map[arg_name] for alias in aliases_set: self.arg_alias_map[alias] = arg_name def cast(self, key, val, meta=None): return self.arg_meta_map.get(meta)(val) def merge(self, key, val, new_val): return smartlyadd(val, new_val) def scan(self, raw_args): '''Scan the `raw_args`, and return a tuple (`pargs`, `kargs`). `raw_args` can be `string` or `list`. Uses *keyword-first resolving* -- If keyword and positional arguments are at same place, the keyword argument takes this place and pushes the positional argument to next one. Example: >>> def files(mode='r', *paths): ... print mode, paths ... >>> files_cmd = Command(files) >>> files_cmd.scan('--mode w f1.txt f2.txt') (['w', 'f1.txt', 'f2.txt'], {}) >>> files_cmd.execute('--mode w f1.txt f2.txt') w ('f1.txt', 'f2.txt') If an no-value options is given a function in which a default value is boolean type, it will put the opposite boolean into `optargs`. If no option is given to a function in which a default value is boolean type, it will put the opposite boolean value into `optargs`. >>> def test(b=True, x=None): ... print b, x ... >>> test_cmd = Command(test) >>> test_cmd.execute('-b') False None On the other hand, if more than one options are given to a function and 1. the default of function is boolean: it will count this options; 2. otherwise: it will put the value into a list. >>> test_cmd.execute('-bbb -x first -x second -x third') 3 ['first', 'second', 'third'] .. versionchanged:: 0.1.4 Use custom parser instead of `getopt`. .. versionchanged:: 0.1.4 It is rewritten from `Command.parse` (0.1.3). ''' pass def execute(self, raw_args): '''Execute this command with `raw_args`.''' pargs, kargs = self.scan(raw_args) return self.func(*pargs, **kargs) def get_usage(self, is_default=False): '''Return the usage of this command. Example: :: files [--mode VAL] [PATHS]... If `isdefault` is True, it will render usage without function name. ''' rbindings = {} for opt, target in self.bindings.iteritems(): shortcuts = rbindings.setdefault(target, []) shortcuts.append(opt) usage = [] for optarg in optargs: opts = [optarg] opts.extend( rbindings.get(optarg, []) ) for i, opt in enumerate(opts): opts[i] ='%s%s' % ('-' * (1+(len(opt)>1)), opt.replace('_', '-')) meta = self.metavars.get(opt, None) if meta: opts[i] += ' '+meta usage.append('[%s]' % ' | '.join(opts)) posargs = self.args[:-len(optargs) or None] usage.extend( map(str.upper, posargs) ) if self.vararg: usage.append('[%s]... ' % self.vararg.upper()) if is_default: return '%s' % ' '.join(usage) else: name = self.func.__name__ return '%s %s' % (name, ' '.join(usage)) if __name__ == '__main__': def f(number, message='default messagea', switcher=False): '''It is just a test function. -n=<n>, --number=<n> The number. -m=<str>, --message=<str> The default. -s, --switcher The switcher. ''' return number, message cmd = Command(f) print cmd.arg_names print cmd.arg_name_set print cmd.vararg_name print cmd.keyarg_name print cmd.arg_default_map print cmd.arg_meta_map print cmd.arg_alias_map
Python
0.000001
@@ -529,115 +529,21 @@ ' -N': int, 'NUM': int, 'NUMBER': int,%0A 'S': str, 'STR': str, 'STRING': str,%0A '%3Cn%3E': int, '%3C +n': int, ' num -%3E ': i @@ -551,16 +551,14 @@ t, ' -%3C number -%3E ': i @@ -574,11 +574,9 @@ ' -%3Cs%3E +s ': s @@ -580,21 +580,19 @@ : str, ' -%3C str -%3E ': str, @@ -596,16 +596,14 @@ r, ' -%3C string -%3E ': s @@ -2064,16 +2064,110 @@ g_name%0A%0A + def get_type(self, meta):%0A return self.arg_meta_map.get(meta.strip('%3C%3E').lower())%0A%0A def @@ -2219,32 +2219,24 @@ rn self. -arg_meta_map.get +get_type (meta)(v
4ea4f12fe589d44b2f27f6e8a645f463b15d146a
Use raw_id_fields for TeamMembership inline to avoid select field with *all* users.
studygroups/admin.py
studygroups/admin.py
from django.contrib import admin # Register your models here. from studygroups.models import Course from studygroups.models import StudyGroup from studygroups.models import Meeting from studygroups.models import Application from studygroups.models import Reminder from studygroups.models import Profile from studygroups.models import Team from studygroups.models import TeamMembership from studygroups.models import TeamInvitation class ApplicationInline(admin.TabularInline): model = Application class StudyGroupAdmin(admin.ModelAdmin): inlines = [ ApplicationInline ] list_display = ['course', 'city', 'facilitator', 'start_date', 'day', 'signup_open'] class TeamMembershipInline(admin.TabularInline): model = TeamMembership class TeamAdmin(admin.ModelAdmin): list_display = ('name', 'page_slug') inlines = [ TeamMembershipInline ] class ApplicationAdmin(admin.ModelAdmin): list_display = ('name', 'study_group', 'email', 'mobile', 'created_at') def reminder_course_title(obj): return obj.study_group.course.title class ReminderAdmin(admin.ModelAdmin): list_display = (reminder_course_title, 'email_subject', 'sent_at') class StudyGroupInline(admin.TabularInline): model = StudyGroup fields = ('venue_name', 'city', 'start_date', 'day') readonly_fields = fields def has_add_permission(self, request, obj=None): return False def has_delete_permission(self, request, obj=None): return False class CourseAdmin(admin.ModelAdmin): def get_queryset(self, request): qs = super(CourseAdmin, self).get_queryset(request) return qs.active() def created_by(course): def display_user(user): return '{} {}'.format(user.first_name, user.last_name) return display_user(course.created_by) if course.created_by else 'P2PU' def email(course): return course.created_by.email if course.created_by else '-' def learning_circles(course): return course.studygroup_set.active().count() def listed(course): return not course.unlisted listed.boolean = True list_display = ('id', 'title', 'provider', 'on_demand', 'topics', learning_circles, created_by, email, listed, 'license') exclude = ('deleted_at',) inlines = [StudyGroupInline] search_fields = ['title', 'provider', 'topics', 'created_by__email', 'license'] class ProfileAdmin(admin.ModelAdmin): def user(profile): return " ".join([profile.user.first_name, profile.user.last_name]) list_display = [user, 'mailing_list_signup', 'communication_opt_in'] search_fields = ['user__email'] admin.site.register(Course, CourseAdmin) admin.site.register(StudyGroup, StudyGroupAdmin) admin.site.register(Meeting) admin.site.register(Application, ApplicationAdmin) admin.site.register(Reminder, ReminderAdmin) admin.site.register(Team, TeamAdmin) admin.site.register(TeamInvitation) admin.site.register(Profile, ProfileAdmin)
Python
0
@@ -744,16 +744,46 @@ mbership +%0A raw_id_fields = (%22user%22,) %0A%0Aclass
847fc43b572384f8afcd395ada275b053e24a193
Fix aiohttp test
tests/test_aiohttp.py
tests/test_aiohttp.py
try: import aiohttp import aiohttp.server except ImportError: skip_tests = True else: skip_tests = False import asyncio import unittest from uvloop import _testbase as tb class _TestAioHTTP: def test_aiohttp_basic_1(self): PAYLOAD = b'<h1>It Works!</h1>' * 10000 class HttpRequestHandler(aiohttp.server.ServerHttpProtocol): async def handle_request(self, message, payload): response = aiohttp.Response( self.writer, 200, http_version=message.version ) response.add_header('Content-Type', 'text/html') response.add_header('Content-Length', str(len(PAYLOAD))) response.send_headers() response.write(PAYLOAD) await response.write_eof() asyncio.set_event_loop(self.loop) f = self.loop.create_server( lambda: HttpRequestHandler(keepalive_timeout=1), '0.0.0.0', '0') srv = self.loop.run_until_complete(f) port = srv.sockets[0].getsockname()[1] async def test(): for addr in (('localhost', port), ('127.0.0.1', port)): async with aiohttp.ClientSession() as client: async with client.get('http://{}:{}'.format(*addr)) as r: self.assertEqual(r.status, 200) self.assertEqual(len(await r.text()), len(PAYLOAD)) self.loop.run_until_complete(test()) srv.close() self.loop.run_until_complete(srv.wait_closed()) @unittest.skipIf(skip_tests, "no aiohttp module") class Test_UV_AioHTTP(_TestAioHTTP, tb.UVTestCase): pass @unittest.skipIf(skip_tests, "no aiohttp module") class Test_AIO_AioHTTP(_TestAioHTTP, tb.AIOTestCase): pass
Python
0.000009
@@ -36,22 +36,19 @@ aiohttp. -server +web %0Aexcept @@ -257,17 +257,16 @@ YLOAD = -b '%3Ch1%3EIt @@ -291,82 +291,8 @@ 00%0A%0A - class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):%0A%0A @@ -309,14 +309,10 @@ def -handle +on _req @@ -320,298 +320,71 @@ est( -self, message, payload):%0A response = aiohttp.Response(%0A self.writer, 200, http_version=message.version%0A )%0A response.add_header('Content-Type', 'text/html')%0A response.add_header('Content-Length', str(len( +request):%0A return aiohttp.web.Response(text= PAYLOAD) ))%0A @@ -379,18 +379,17 @@ PAYLOAD) -)) +%0A %0A @@ -393,164 +393,138 @@ - response.send_headers()%0A response.write(PAYLOAD)%0A await response.write_eof()%0A%0A asyncio.set_event_loop(self.loop +asyncio.set_event_loop(self.loop)%0A app = aiohttp.web.Application(loop=self.loop)%0A app.router.add_get('/', on_request )%0A%0A @@ -575,54 +575,25 @@ -lambda: HttpRequestHandler(keepalive_timeout=1 +app.make_handler( ),%0A @@ -1057,58 +1057,89 @@ -self.assertEqual(len(await r.text()), len( +result = await r.text()%0A self.assertEqual(result, PAYLOAD) )%0A%0A @@ -1134,17 +1134,16 @@ PAYLOAD) -) %0A%0A @@ -1194,17 +1194,50 @@ s -rv.close( +elf.loop.run_until_complete(app.shutdown() )%0A @@ -1275,23 +1275,19 @@ ete( -srv.wait_closed +app.cleanup ())%0A
61969ac21d7eda1162cdedd3f066aa8e396fb5ba
Fix test output
raven/scripts/runner.py
raven/scripts/runner.py
""" raven.scripts.runner ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from __future__ import print_function import logging import os import sys import time from optparse import OptionParser from raven import Client, get_version from raven.utils.json import json def store_json(option, opt_str, value, parser): try: value = json.loads(value) except ValueError: print("Invalid JSON was used for option %s. Received: %s" % (opt_str, value)) sys.exit(1) setattr(parser.values, option.dest, value) def get_loadavg(): if hasattr(os, 'getloadavg'): return os.getloadavg() return None def get_uid(): try: import pwd except ImportError: return None return pwd.getpwuid(os.geteuid())[0] def send_test_message(client, options): sys.stdout.write("Client configuration:\n") for k in ('servers', 'project', 'public_key', 'secret_key'): sys.stdout.write(' %-15s: %s\n' % (k, getattr(client, k))) sys.stdout.write('\n') remote_config = client.remote if not remote_config.is_active(): sys.stdout.write("Error: DSN configuration is not valid!\n") sys.exit(1) if not client.is_enabled(): sys.stdout.write('Error: Client reports as being disabled!\n') sys.exit(1) data = options.get('data', { 'culprit': 'raven.scripts.runner', 'logger': 'raven.test', 'request': { 'method': 'GET', 'url': 'http://example.com', } }) sys.stdout.write('Sending a test message... ') sys.stdout.flush() ident = client.get_ident(client.captureMessage( message='This is a test message generated using ``raven test``', data=data, level=logging.INFO, stack=True, tags=options.get('tags', {}), extra={ 'user': get_uid(), 'loadavg': get_loadavg(), }, )) sys.stdout.write('Event ID was %r\n' % (ident,)) def main(): root = logging.getLogger('sentry.errors') root.setLevel(logging.DEBUG) # if len(root.handlers) == 0: # root.addHandler(logging.StreamHandler()) parser = OptionParser(version=get_version()) parser.add_option("--data", action="callback", callback=store_json, type="string", nargs=1, dest="data") parser.add_option("--tags", action="callback", callback=store_json, type="string", nargs=1, dest="tags") (opts, args) = parser.parse_args() dsn = ' '.join(args[1:]) or os.environ.get('SENTRY_DSN') if not dsn: print("Error: No configuration detected!") print("You must either pass a DSN to the command, or set the SENTRY_DSN environment variable.") sys.exit(1) print("Using DSN configuration:") print(" ", dsn) print() client = Client(dsn, include_paths=['raven']) send_test_message(client, opts.__dict__) # TODO(dcramer): correctly support async models time.sleep(3) if client.state.did_fail(): sys.stdout.write('error!\n') sys.exit(1) sys.stdout.write('success!\n')
Python
0.988755
@@ -1006,15 +1006,16 @@ n (' -servers +base_url ', ' @@ -1114,16 +1114,23 @@ r(client +.remote , k)))%0A
44ce9fbb63a2b764e759c63b3bd6f20d2e506a5c
Use from-import for ykman.descriptor
test/on_yubikey/util.py
test/on_yubikey/util.py
from __future__ import print_function import click import functools import os import sys import test.util import unittest import ykman.descriptor from ykman.util import TRANSPORT _skip = True _test_serials = os.environ.get('DESTRUCTIVE_TEST_YUBIKEY_SERIALS') _no_prompt = os.environ.get('DESTRUCTIVE_TEST_DO_NOT_PROMPT') == 'TRUE' _versions = {} if _test_serials is not None: _test_serials = set(int(s) for s in _test_serials.split(',')) _serials_present = set() for dev in ykman.descriptor.list_devices(): _serials_present.add(dev.serial) _versions[dev.serial] = dev.version dev.close() _unwanted_serials = _serials_present.difference(_test_serials) if len(_unwanted_serials) != 0: print('Encountered YubiKeys not listed in serial numbers to be used ' 'for the test: {}'.format(_unwanted_serials), file=sys.stderr) sys.exit(1) if _serials_present != _test_serials: print('Test YubiKeys missing: {}' .format(_test_serials.difference(_serials_present)), file=sys.stderr) sys.exit(1) _skip = False if not _no_prompt: click.confirm( 'Run integration tests? This will erase data on the YubiKeys' ' with serial numbers: {}. Make sure these are all keys used for' ' development.'.format(_serials_present), abort=True) def _specialize_ykman_cli(dev, _transports): ''' Creates a specialized version of ykman_cli preset with the serial number of the given device. ''' return functools.partial(test.util.ykman_cli, '--device', dev.serial) def _specialize_open_device(dev, transports): ''' Creates a specialized version of open_device which will open the given device using the given transport(s). ''' return functools.partial( ykman.descriptor.open_device, transports=transports, serial=dev.serial ) def _make_skipped_original_test_cases(create_test_classes): for test_class in create_test_classes(None): yield unittest.skip('No YubiKey available for test')(test_class) def _device_satisfies_test_conditions(dev, test_method): if '_yubikey_conditions' in dir(test_method): conditions = getattr(test_method, '_yubikey_conditions') return all(cond(dev) for cond in conditions) else: return True def _delete_inapplicable_test_methods(dev, test_class): for method_name in _get_test_method_names(test_class): method = getattr(test_class, method_name) if not _device_satisfies_test_conditions(dev, method): delattr(test_class, method_name) return test_class def _add_suffix_to_class_name(transport, dev, test_class): setattr(test_class, '_original_test_name', test_class.__qualname__) fw_version = '.'.join(str(v) for v in dev.version) test_class.__qualname__ = f'{test_class.__qualname__}_{transport.name}_{fw_version}_{dev.serial}' # noqa: E501 return test_class def _create_test_classes_for_device( transport, dev, create_test_classes, create_test_class_context ): context = create_test_class_context(dev, transport) for test_class in create_test_classes(context): _delete_inapplicable_test_methods(dev, test_class) _add_suffix_to_class_name(transport, dev, test_class) yield test_class def _get_test_method_names(test_class): return set( attr_name for attr_name in dir(test_class) if attr_name.startswith('test') ) def _multiply_test_classes_by_devices( transports, create_test_classes, create_test_class_context ): tests = [] covered_test_names = {} for transport in (t for t in TRANSPORT if transports & t): for serial in _test_serials or []: with ykman.descriptor.open_device( transports=transport, serial=serial ) as dev: for test_class in _create_test_classes_for_device( transport, dev, create_test_classes, create_test_class_context ): orig_name = test_class._original_test_name test_names = _get_test_method_names(test_class) covered_test_names[orig_name] = ( covered_test_names.get(orig_name, set()) .union(test_names)) for test_method_name in test_names: tests.append(test_class(test_method_name)) return tests, covered_test_names def _make_skips_for_uncovered_tests(create_test_classes, covered_test_names): for original_test_class in _make_skipped_original_test_cases( create_test_classes): original_test_names = _get_test_method_names(original_test_class) uncovered_test_names = original_test_names.difference( covered_test_names.get( original_test_class.__qualname__, set())) for uncovered_test_name in uncovered_test_names: yield original_test_class(uncovered_test_name) def _make_test_suite(transports, create_test_class_context): def decorate(create_test_classes): def additional_tests(): (tests, covered_test_names) = _multiply_test_classes_by_devices( transports, create_test_classes, create_test_class_context ) skipped_tests = _make_skips_for_uncovered_tests( create_test_classes, covered_test_names) suite = unittest.TestSuite() suite.addTests(tests) suite.addTests(skipped_tests) return suite return additional_tests return decorate def device_test_suite(transports): if not (isinstance(transports, TRANSPORT) or isinstance(transports, int)): raise ValueError('Argument to @device_test_suite must be a TRANSPORT value.') # noqa: E501 return _make_test_suite(transports, _specialize_open_device) def cli_test_suite(transports): if not (isinstance(transports, TRANSPORT) or isinstance(transports, int)): raise ValueError('Argument to @cli_test_suite must be a TRANSPORT value.') # noqa: E501 return _make_test_suite(transports, _specialize_ykman_cli) destructive_tests_not_activated = ( _skip, 'DESTRUCTIVE_TEST_YUBIKEY_SERIALS == None') @unittest.skipIf(*destructive_tests_not_activated) class DestructiveYubikeyTestCase(unittest.TestCase): pass
Python
0
@@ -115,22 +115,21 @@ nittest%0A -import +%0Afrom ykman.d @@ -137,17 +137,49 @@ scriptor -%0A + import list_devices, open_device %0Afrom yk @@ -516,33 +516,16 @@ dev in -ykman.descriptor. list_dev @@ -1887,33 +1887,16 @@ -ykman.descriptor. open_dev @@ -3871,58 +3871,20 @@ ith -ykman.descriptor.open_device(%0A +open_device( tran @@ -3900,36 +3900,16 @@ ansport, -%0A serial= @@ -3914,29 +3914,16 @@ l=serial -%0A ) as dev
8b6cbdbae4dedfbbf025a7ecb20c7d7b3959ed11
support to overwrite position in border
rbgomoku/core/player.py
rbgomoku/core/player.py
from core.board import Piece class AIPlayer: """ Abstract AI players. To construct an AI player: Construct an instance (of its subclass) with the game Board """ def __init__(self, board, piece): self._board = board self.my_piece = piece self.opponent = Piece.WHITE if piece == Piece.BLACK else Piece.BLACK # Abstract method to get next move. Return int[2] of {row, col} */ def play(self, row, col): raise NotImplemented class HumanPlayer(AIPlayer): """ Human Player """ def __init__(self, board, piece): super(HumanPlayer, self).__init__(board, piece) def play(self, row, col): self._board.play_piece(self.my_piece, row, col) self._board.has_winner(self.my_piece, row, col) return self._board.winner
Python
0
@@ -1,8 +1,52 @@ +from core import OverwritePositionException%0A from cor @@ -401,79 +401,8 @@ CK%0A%0A - # Abstract method to get next move. Return int%5B2%5D of %7Brow, col%7D */%0A @@ -542,32 +542,44 @@ lf, board, piece +, first=True ):%0A super @@ -620,16 +620,47 @@ , piece) +%0A self.first = not first %0A%0A de @@ -683,32 +683,35 @@ , col):%0A +if self._board.play @@ -710,43 +710,87 @@ ard. -play +get _piece( -self.my_piece, row, col) +row, col) != Piece.NONE:%0A raise OverwritePositionException %0A @@ -786,32 +786,39 @@ ception%0A +return self._board.has_ @@ -817,18 +817,18 @@ ard. -has_winner +play_piece (sel @@ -853,38 +853,121 @@ ol)%0A - return self._board.winner%0A +%0A def __repr__(self):%0A player_number = int(self.first) + 1%0A return 'Player %7B%7D'.format(player_number)
f0b7eea8a603e331be6db71beb2766d022dacb23
Refactor the method who check for changes in user agent
tests/test_browser.py
tests/test_browser.py
# -*- coding: utf-8 -*- from __future__ import with_statement import __builtin__ try: import unittest2 as unittest except ImportError: import unittest import warnings from splinter.exceptions import DriverNotFoundError from splinter.utils import deprecate_driver_class from fake_webapp import EXAMPLE_APP class BrowserTest(unittest.TestCase): def patch_driver(self, pattern): self.old_import = __builtin__.__import__ def custom_import(name, *args, **kwargs): if pattern in name: return None return self.old_import(name, *args, **kwargs) __builtin__.__import__ = custom_import def unpatch_driver(self, module): __builtin__.__import__ = self.old_import reload(module) def test_should_work_even_without_zope_testbrowser(self): self.patch_driver('zope') from splinter import browser reload(browser) assert 'zope.testbrowser' not in browser._DRIVERS, 'zope.testbrowser driver should not be registered when zope.testbrowser is not installed' self.unpatch_driver(browser) def test_should_raise_an_exception_when_browser_driver_is_not_found(self): with self.assertRaises(DriverNotFoundError): from splinter.browser import Browser Browser('unknown-driver') def test_firefox_should_be_able_to_receive_user_agent(self): from splinter.browser import Browser browser = Browser(driver_name='firefox', user_agent="iphone") browser.visit(EXAMPLE_APP + "useragent") self.assertTrue(browser.is_text_present('iphone')) browser.quit() def test_chrome_should_be_able_to_receive_user_agent(self): from splinter.browser import Browser browser = Browser(driver_name='chrome', user_agent="iphone") browser.visit(EXAMPLE_APP + "useragent") self.assertTrue(browser.is_text_present('iphone')) browser.quit() class BrowserDeprecationTest(unittest.TestCase): class Foo(object): pass def test_should_deprecate_with_the_given_message(self): with warnings.catch_warnings(record=True) as warnings_list: warnings.simplefilter('default') cls = deprecate_driver_class(self.Foo, message="Foo was deprecated") cls() warning = warnings_list[0] assert type(warning.message) is DeprecationWarning self.assertEquals("Foo was deprecated", warning.message.args[0]) def test_should_prepend_a_Deprecated_to_class(self): with warnings.catch_warnings(record=True): warnings.simplefilter('default') cls = deprecate_driver_class(self.Foo, message="Foo was deprecated") self.assertEquals("DeprecatedFoo", cls.__name__) def test_webdriverfirefox_should_be_deprecated(self): with warnings.catch_warnings(record=True) as warnings_list: warnings.simplefilter('default') from splinter.browser import Browser browser = Browser('webdriver.firefox') browser.quit() warning_message = warnings_list[0].message.args[0] self.assertEquals("'webdriver.firefox' is deprecated, use just 'firefox'", warning_message) def test_webdriverchrome_should_be_deprecated(self): with warnings.catch_warnings(record=True) as warnings_list: warnings.simplefilter('default') from splinter.browser import Browser browser = Browser('webdriver.chrome') browser.quit() warning_message = warnings_list[0].message.args[0] self.assertEquals("'webdriver.chrome' is deprecated, use just 'chrome'", warning_message)
Python
0.00001
@@ -769,24 +769,343 @@ ad(module)%0A%0A + def browser_can_receive_user_agent(self, webdriver):%0A from splinter.browser import Browser%0A browser = Browser(driver_name=webdriver, user_agent=%22iphone%22)%0A browser.visit(EXAMPLE_APP + %22useragent%22)%0A result = browser.is_text_present('iphone')%0A browser.quit()%0A%0A return result%0A%0A def test @@ -1733,554 +1733,206 @@ -from splinter.browser import Browser%0A browser = Browser(driver_name='firefox', user_agent=%22iphone%22)%0A browser.visit(EXAMPLE_APP + %22useragent%22)%0A self.assertTrue(browser.is_text_present('iphone'))%0A browser.quit()%0A%0A def test_chrome_should_be_able_to_receive_user_agent(self):%0A from splinter.browser import Browser%0A browser = Browser(driver_name='chrome', user_agent=%22iphone%22)%0A browser.visit(EXAMPLE_APP + %22 +self.assertTrue(self.browser_can_receive_user_agent('firefox'))%0A%0A def test_chrome_should_be_able_to_receive_user_agent(self):%0A self.assertTrue(self.browser_can_receive_ user +_ agent -%22)%0A self.assertTrue(browser.is_text_present('iphone'))%0A browser.quit( +('chrome') )%0A%0Ac
2b7d52369206f6a6b9f0ceb4afe28e73e652e806
Fix typo s/router/route
loafer/consumer.py
loafer/consumer.py
# -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 import asyncio import json from functools import partial import logging import boto3 import botocore.exceptions from .conf import settings from .exceptions import ConsumerError logger = logging.getLogger(__name__) class SQSConsumer(object): def __init__(self, loop=None): self._loop = loop or asyncio.get_event_loop() self._semaphore = asyncio.Semaphore(settings.MAX_JOBS) self._client = boto3.client('sqs') async def process_message(self, route, message): logger.info('Message received, handling to route={}'.format(route)) logger.debug('Processing Message={}', message) # TODO: better heuristic try: body = json.loads(message['Body']) except json.decoder.JSONDecodeError: body = message['Body'] content = body if isinstance(body, dict): if 'Message' in body: content = body['Message'] # Since we don't know what will happen on message handler, use semaphore # to protect scheduling or executing too many coroutines/threads with await self._semaphore: # TODO: depending on content type, we should pass as *args or **kwargs logger.info('Message content data type is {!r}'.format(type(content))) await route.deliver(content) await self.ack_message(route.queue_url, message['ReceiptHandle']) async def ack_message(self, queue, receipt): logger.info('Acking message') logger.debug('receipt={}'.format(receipt)) fn = partial(self._client.delete_message, QueueUrl=queue, ReceiptHandle=receipt) # XXX: Refactor this when boto support asyncio return await self._loop.run_in_executor(None, fn) async def consume(self, routes): while True: for router in routes: try: messages = await router.fetch_messages() except botocore.exceptions.ClientError as exc: logger.exception(exc) raise ConsumerError('Error when fetching messages') from exc for message in messages: await self.process_message(router, message)
Python
0.999975
@@ -1875,17 +1875,16 @@ or route -r in rout @@ -1950,17 +1950,16 @@ it route -r .fetch_m @@ -2248,17 +2248,16 @@ ge(route -r , messag
cc78aef74876049a4548398133bad64e405351de
Remove redundant parameters from wagtailuserbar tag; trigger a DeprecationWarning if people are still passing a css path
wagtail/wagtailadmin/templatetags/wagtailuserbar.py
wagtail/wagtailadmin/templatetags/wagtailuserbar.py
from django import template from wagtail.wagtailadmin.views import userbar from wagtail.wagtailcore.models import Page register = template.Library() @register.simple_tag(takes_context=True) def wagtailuserbar(context, current_page=None, items=None): # Find request object request = context['request'] # Don't render if user doesn't have permission to access the admin area if not request.user.has_perm('wagtailadmin.access_admin'): return '' # Find page object if 'self' in context and isinstance(context['self'], Page) and context['self'].id is not None: pass else: return '' # Render edit bird return userbar.render_edit_frame(request, context) or ''
Python
0
@@ -1,12 +1,29 @@ +import warnings%0A%0A from django @@ -38,16 +38,17 @@ emplate%0A +%0A from wag @@ -236,38 +236,231 @@ t, c -urrent_page=None, items=None): +ss_path=None):%0A if css_path is not None:%0A warnings.warn(%0A %22Passing a CSS path to the wagtailuserbar tag is no longer required; use %7B%25 wagtailuserbar %25%7D instead%22,%0A DeprecationWarning%0A ) %0A%0A
42162048981e26aecb942ca936de86dc1dd82041
Fix #23 actors.Worker identity sent on polling for activity task
swf/actors/worker.py
swf/actors/worker.py
#! -*- coding:utf-8 -*- from swf.actors import Actor from swf.models import ActivityTask from swf.exceptions import PollTimeout class ActivityWorker(Actor): """Activity task worker actor implementation Once started, will start polling for activity task, to process, and emitting heartbeat until it's stopped or crashes for some reason. :param domain: Domain the Actor should interact with :type domain: swf.models.Domain :param task_list: task list the Actor should watch for tasks on :type task_list: string :param last_token: last seen task token :type last_token: string """ def __init__(self, domain, task_list): super(ActivityWorker, self).__init__( domain, task_list ) def cancel(self, task_token, details=None): """Responds to ``swf`` that the activity task was canceled :param task_token: canceled activity task token :type task_token: string :param details: provided details about cancel :type details: string """ return self.connection.respond_activity_task_canceled(task_token) def complete(self, task_token, result=None): """Responds to ``swf` that the activity task is completed :param task_token: completed activity task token :type task_token: string :param result: The result of the activity task. :type result: string """ return self.connection.respond_activity_task_completed( task_token, result ) def fail(self, task_token, details=None, reason=None): """Replies to ``swf`` that the activity task failed :param task_token: canceled activity task token :type task_token: string :param details: provided details about cancel :type details: string :param reason: Description of the error that may assist in diagnostics :type reason: string """ return self.connection.respond_activity_task_failed( task_token, details, reason ) def heartbeat(self, task_token, details=None): """Records activity task heartbeat :param task_token: canceled activity task token :type task_token: string :param details: provided details about cancel :type details: string """ return self.connection.record_activity_task_heartbeat( task_token, details ) def poll(self, task_list=None, **kwargs): """Polls for an activity task to process from current actor's instance defined ``task_list`` if no activity task was polled, raises a PollTimeout exception. :param task_list: task list the Actor should watch for tasks on :type task_list: string :raises: PollTimeout :returns: polled activity task :type: swf.models.ActivityTask """ task_list = task_list or self.task_list polled_activity_data = self.connection.poll_for_activity_task( self.domain.name, task_list, ) if not 'taskToken' in polled_activity_data: raise PollTimeout("Activity Worker poll timed out") activity_task = ActivityTask.from_poll(self.domain, self.task_list, polled_activity_data) task_token = activity_task.task_token return task_token, activity_task
Python
0
@@ -563,63 +563,341 @@ am -last_token: last seen task token%0A :type last_token +identity: Identity of the worker making the request,%0A which is recorded in the ActivityTaskStarted%0A event in the workflow history. This enables%0A diagnostic tracing when problems arise.%0A The form of this identity is user defined.%0A :type identity : st @@ -949,16 +949,31 @@ ask_list +, identity=None ):%0A @@ -1058,32 +1058,66 @@ list%0A )%0A%0A + self.identity = identity%0A%0A def cancel(s @@ -2915,16 +2915,21 @@ ne, -**kwargs +identity=None ):%0A @@ -3218,32 +3218,411 @@ k_list: string%0A%0A + :param identity: Identity of the worker making the request,%0A which is recorded in the ActivityTaskStarted%0A event in the workflow history. This enables%0A diagnostic tracing when problems arise.%0A The form of this identity is user defined.%0A :type identity: string%0A%0A :raises: @@ -3772,16 +3772,61 @@ ask_list +%0A identity = identity or self.identity %0A%0A @@ -3939,24 +3939,54 @@ task_list,%0A + identity=identity%0A )%0A%0A @@ -4152,20 +4152,45 @@ oll( -self.domain, +%0A self.domain,%0A sel @@ -4201,16 +4201,28 @@ sk_list, +%0A polled_ @@ -4234,16 +4234,25 @@ ity_data +%0A )%0A
153f7b28e5b4763dd41f95b4840dcf56d9895393
Update bot.py
code/bot1/bot.py
code/bot1/bot.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import tweepy, time, sys # pip install tweepy import sys sys.path.append("..") from course_config import * argfile = str(sys.argv[1]) # need CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth) filename=open(argfile,'r') f=filename.readlines() filename.close() for line in f: api.update_status(line) time.sleep(30) #Tweet every 15 minutes
Python
0.000001
@@ -182,16 +182,78 @@ %5B1%5D)%0A%0A%0A# + go to https://dev.twitter.com/ and register application%0A# you need CO @@ -311,16 +311,17 @@ RET %0A +%0A %0Aauth
0bc98e3cbab019af6f0543c6618387511e354f5f
Add unittests for WhisperFinder
tests/test_finders.py
tests/test_finders.py
import random import time from . import TestCase from graphite_api.intervals import Interval, IntervalSet from graphite_api.node import LeafNode, BranchNode from graphite_api.storage import Store class FinderTest(TestCase): def test_custom_finder(self): store = Store([DummyFinder()]) nodes = list(store.find("foo")) self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].path, 'foo') nodes = list(store.find('bar.*')) self.assertEqual(len(nodes), 10) node = nodes[0] self.assertEqual(node.path.split('.')[0], 'bar') time_info, series = node.fetch(100, 200) self.assertEqual(time_info, (100, 200, 10)) self.assertEqual(len(series), 10) def test_multi_finder(self): store = Store([DummyFinder(), DummyFinder()]) nodes = list(store.find("foo")) self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].path, 'foo') nodes = list(store.find('bar.*')) self.assertEqual(len(nodes), 10) node = nodes[0] self.assertEqual(node.path.split('.')[0], 'bar') time_info, series = node.fetch(100, 200) self.assertEqual(time_info, (100, 200, 10)) self.assertEqual(len(series), 10) class DummyReader(object): __slots__ = ('path',) def __init__(self, path): self.path = path def fetch(self, start_time, end_time): npoints = (end_time - start_time) // 10 return (start_time, end_time, 10), [ random.choice([None, 1, 2, 3]) for i in range(npoints) ] def get_intervals(self): return IntervalSet([Interval(time.time() - 3600, time.time())]) class DummyFinder(object): def find_nodes(self, query): if query.pattern == 'foo': yield BranchNode('foo') elif query.pattern == 'bar.*': for i in range(10): path = 'bar.{0}'.format(i) yield LeafNode(path, DummyReader(path))
Python
0
@@ -1,12 +1,22 @@ +import os%0A import rando @@ -52,17 +52,63 @@ TestCase -%0A +, WHISPER_DIR%0A%0Afrom graphite_api.app import app %0Afrom gr @@ -247,16 +247,57 @@ t Store%0A +from graphite_api._vendor import whisper%0A %0A%0Aclass @@ -2082,8 +2082,1479 @@ (path))%0A +%0A%0Aclass WhisperFinderTest(TestCase):%0A _listdir_counter = 0%0A _original_listdir = os.listdir%0A%0A def test_whisper_finder(self):%0A for db in (%0A ('whisper_finder', 'foo.wsp'),%0A ('whisper_finder', 'foo', 'bar', 'baz.wsp'),%0A ('whisper_finder', 'bar', 'baz', 'baz.wsp'),%0A ):%0A db_path = os.path.join(WHISPER_DIR, *db)%0A if not os.path.exists(os.path.dirname(db_path)):%0A os.makedirs(os.path.dirname(db_path))%0A whisper.create(db_path, %5B(1, 60)%5D)%0A%0A def listdir_mock(d):%0A self._listdir_counter += 1%0A return self._original_listdir(d)%0A%0A try:%0A os.listdir = listdir_mock%0A store = app.config%5B'GRAPHITE'%5D%5B'store'%5D%0A print(%22store = %25s%22 %25 store)%0A%0A self._listdir_counter = 0%0A nodes = store.find('whisper_finder.foo')%0A self.assertEqual(len(list(nodes)), 2)%0A self.assertEqual(self._listdir_counter, 2)%0A%0A self._listdir_counter = 0%0A nodes = store.find('whisper_finder.foo.bar.baz')%0A self.assertEqual(len(list(nodes)), 1)%0A self.assertEqual(self._listdir_counter, 4)%0A%0A self._listdir_counter = 0%0A nodes = store.find('whisper_finder.*.ba?.%7Bbaz,foo%7D')%0A self.assertEqual(len(list(nodes)), 2)%0A self.assertEqual(self._listdir_counter, 6)%0A%0A finally:%0A os.listdir = self._original_listdir%0A
c7f284e710b06af04c0c7142f29a169fff680b4b
Add a slash at the end of form URL
website/urls.py
website/urls.py
## # Copyright (C) 2013 Jessica Tallon & Matt Molyneaux # # This file is part of Inboxen. # # Inboxen is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Inboxen is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Inboxen. If not, see <http://www.gnu.org/licenses/>. ## import os from django.conf import settings, urls from django.contrib import admin from django.core.urlresolvers import reverse_lazy from django.utils.translation import ugettext as _ from two_factor.views import core as twofactor from website import views from website.views import error from website.forms import PlaceHolderPasswordChangeForm urls.handler400 = error.BadRequest.as_view() urls.handler403 = error.PermissionDenied.as_view() urls.handler404 = error.NotFound.as_view() urls.handler500 = error.ServerError.as_view() # If you're debugging regex, test it out on http://www.debuggex.com/ first - M urlpatterns = urls.patterns('', urls.url(r'^$', views.Index.as_view(), name='index'), urls.url(r'^huh', views.TemplateView.as_view(template_name='huh.html', headline=_('Huh?')), name='huh'), urls.url(r'^stats', views.StatsView.as_view(), name='stats'), # inbox views urls.url(r'^inbox/add/', views.InboxAddView.as_view(), name='inbox-add'), urls.url(r'^inbox/edit/(?P<inbox>[a-zA-Z0-9\.]+)@(?P<domain>[a-zA-Z0-9\.]+)', views.InboxEditView.as_view(), name='inbox-edit'), urls.url(r'^inbox/attachment/(?P<attachmentid>\d+)/(?P<method>\w+)', views.AttachmentDownloadView.as_view(), name='email-attachment'), urls.url(r'^inbox/(?P<inbox>[a-zA-Z0-9\.]+)@(?P<domain>[a-zA-Z0-9\.]+)/email/(?P<id>[a-fA-F0-9]+)', views.EmailView.as_view(), name='email-view'), urls.url(r'^inbox/(?P<inbox>[a-zA-Z0-9\.]+)@(?P<domain>[a-zA-Z0-9\.]+)/(?P<page>\d+)', views.SingleInboxView.as_view(), name='single-inbox'), urls.url(r'^inbox/(?P<inbox>[a-zA-Z0-9\.]+)@(?P<domain>[a-zA-Z0-9\.]+)/', views.SingleInboxView.as_view(), name='single-inbox'), urls.url(r'^inbox/(?P<page>\d+)', views.UnifiedInboxView.as_view(), name='unified-inbox'), urls.url(r'^inbox/', views.UnifiedInboxView.as_view(), name='unified-inbox'), # form inlines urls.url(r'^forms/inbox/edit/(?P<inbox>[a-zA-Z0-9\.]+)@(?P<domain>[a-zA-Z0-9\.]+)', views.FormInboxEditView.as_view(), name='form-inbox-edit'), # user views urls.url(r'^user/login/', views.LoginView.as_view(), name='user-login'), urls.url(r'^user/home/(?P<page>\d+)', views.UserHomeView.as_view(), name='user-home'), urls.url(r'^user/home/', views.UserHomeView.as_view(), name='user-home'), urls.url(r'^user/search/(?P<q>.*)/(?P<page>\d+)', views.SearchView.as_view(), name='user-search'), urls.url(r'^user/search/(?P<q>.*)/', views.SearchView.as_view(), name='user-search'), urls.url(r'^user/search/', views.SearchView.as_view(), name='user-search'), urls.url(r'^user/searchapi/(?P<q>.*)/', views.SearchApiView.as_view(), name='user-searchapi'), urls.url(r'^user/account/security/password', 'django.contrib.auth.views.password_change', { 'template_name': 'user/account/password.html', 'post_change_redirect': reverse_lazy('user-security'), 'password_change_form': PlaceHolderPasswordChangeForm, 'extra_context': { 'headline': _('Change Password'), }, }, name='user-password', ), urls.url(r'^user/account/security/setup', views.TwoFactorSetupView.as_view(), name='user-twofactor-setup'), urls.url(r'^user/account/security/backup', views.TwoFactorBackupView.as_view(), name='user-twofactor-backup'), urls.url(r'^user/account/security/disable', views.TwoFactorDisableView.as_view(), name='user-twofactor-disable'), urls.url(r'^user/account/security/qrcode', twofactor.QRGeneratorView.as_view(), name='user-twofactor-qrcode'), urls.url(r'^user/account/security', views.TwoFactorView.as_view(), name='user-security'), urls.url(r'^user/account/liberate/download', views.LiberationDownloadView.as_view(), name='user-liberate-get'), urls.url(r'^user/account/liberate', views.LiberationView.as_view(), name='user-liberate'), urls.url(r'^user/account/delete', views.AccountDeletionView.as_view(), name='user-delete'), urls.url(r'^user/account/username', views.UsernameChangeView.as_view(), name='user-username'), urls.url(r'^user/account/', views.GeneralSettingsView.as_view(), name='user-settings'), urls.url(r'^user/logout/', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='user-logout'), # other apps urls.url(r'^blog/', urls.include("blog.urls")), urls.url(r'^click/', urls.include("redirect.urls")), urls.url(r'^help/tickets/', urls.include("tickets.urls")), urls.url(r'^help/', urls.include("termsofservice.urls")), urls.url(r'^source/', urls.include("source.urls")), ) if settings.ENABLE_REGISTRATION: urlpatterns += urls.patterns('', urls.url(r'^user/register/status', views.TemplateView.as_view(template_name='user/register/software-status.html', headline=_('We\'re not stable!')), name='user-status'), urls.url(r'^user/register/success', views.TemplateView.as_view(template_name='user/register/success.html', headline=_('Welcome!')), name='user-success'), urls.url(r'^user/register/', views.UserRegistrationView.as_view(), name='user-registration'), ) if ("INBOXEN_ADMIN_ACCESS" in os.environ and os.environ["INBOXEN_ADMIN_ACCESS"]) or settings.DEBUG: admin.autodiscover() urlpatterns += urls.patterns('', urls.url(r'^admin/', urls.include(admin.site.urls)), )
Python
0
@@ -2717,32 +2717,33 @@ %3E%5Ba-zA-Z0-9%5C.%5D+) +/ ', views.FormInb
50e5d5c1993b164a3e6035b1572336c7969413de
Clarify wording based on whippit feedback.
synapse/lib/queue.py
synapse/lib/queue.py
from __future__ import absolute_import, unicode_literals import threading import collections from synapse.eventbus import EventBus class QueueShutdown(Exception): pass class Queue(EventBus): ''' A simple custom queue to address python Queue() issues. ''' def __init__(self, items=()): EventBus.__init__(self) self.deq = collections.deque() self.lock = threading.Lock() self.event = threading.Event() self._que_done = False self.onfini(self.event.set) def __exit__(self, exc, cls, tb): self.done() self.waitfini() def __iter__(self): while not self.isfini: ret = self.get() if ret is None: return yield ret def done(self): ''' Gracefully mark this Queue as done. This still allows a Queue consumer to finish consuming it. The Queue functions ``get()``, ``slice()`` and ``slices()`` will not block when .done() has been called on a Queue. Returns: None ''' self._que_done = True self.event.set() def get(self, timeout=None): ''' Get the next item from the queue. Args: timeout (int): Duration, in seconds, to wait for items to be available to the queue before returning. Notes: This will block if the queue is empty and no timeout value is specified, or .done() has not been called on the Queue. Examples: Get an item and do stuff with it:: item = q.get(timeout=30) dostuff(item) Returns: Item from the queue, or None if the queue is fini() or timeout occurs. ''' while not self.isfini: # for perf, try taking a lockless shot at it try: return self.deq.popleft() except IndexError as e: pass with self.lock: try: return self.deq.popleft() except IndexError as e: if self._que_done: self.fini() return None self.event.clear() self.event.wait(timeout=timeout) if not self.event.is_set(): return None def put(self, item): ''' Add an item to the queue and wake any consumers waiting on the queue. Args: item: Item to add to the queue. Examples: Put a string in a queue:: q.put('woot') Returns: None ''' with self.lock: self.deq.append(item) self.event.set() def slice(self, size, timeout=None): ''' Get a slice of the next items from the queue. Args: size (int): Maximum number of items to get from the queue. timeout (int): Duration, in seconds, to wait for items to be available to the queue before returning. Examples: Return 3 items on a 30 second timeout from the queue:: item = q.slice(3, timeout=30) Notes: This will block if the queue is empty and no timeout value is specified, or .done() has not been called on the Queue. Returns: list: A list of items from the queue. This will return None on fini() or timeout. ''' while not self.isfini: with self.lock: ret = [] while len(ret) < size and self.deq: ret.append(self.deq.popleft()) if ret: return ret if self._que_done and not self.deq: self.fini() return None self.event.clear() self.event.wait(timeout=timeout) if not self.event.is_set(): return None def slices(self, size, timeout=None): ''' Yields slices of items from the queue. Args: size (int): Maximum number of items to yield at a time. timeout (int): Duration, in seconds, to wait for items to be added to the queue before exiting. Examples: Yield 2 items at a time with a 1 second time:: for items in q.slices(2, timeout=1): dostuff(items) Notes: This will block if the queue is empty and no timeout value is specified, or .done() has not been called on the Queue. Yields: list: This generator yields a list of items. ''' ret = self.slice(size, timeout=timeout) while ret is not None: yield ret ret = self.slice(size, timeout=timeout)
Python
0
@@ -3159,16 +3159,22 @@ Return +up to 3 items @@ -3230,24 +3230,25 @@ item +s = q.slice(3
091f960c8f0415acf543b40cb468d8cf33c0539e
Add name attribute to test series
tests/test_geocode.py
tests/test_geocode.py
from __future__ import absolute_import from fiona.crs import from_epsg import pandas as pd import pandas.util.testing as tm from shapely.geometry import Point import geopandas as gpd import nose from geopandas import GeoSeries from geopandas.tools import geocode, reverse_geocode from geopandas.tools.geocoding import _prepare_geocode_result from .util import unittest, mock, assert_geoseries_equal def _skip_if_no_geopy(): try: import geopy except ImportError: raise nose.SkipTest("Geopy not installed. Skipping tests.") except SyntaxError: raise nose.SkipTest("Geopy is known to be broken on Python 3.2. " "Skipping tests.") class ForwardMock(mock.MagicMock): """ Mock the forward geocoding function. Returns the passed in address and (p, p+.5) where p increases at each call """ def __init__(self, *args, **kwargs): super(ForwardMock, self).__init__(*args, **kwargs) self._n = 0.0 def __call__(self, *args, **kwargs): self.return_value = args[0], (self._n, self._n + 0.5) self._n += 1 return super(ForwardMock, self).__call__(*args, **kwargs) class ReverseMock(mock.MagicMock): """ Mock the reverse geocoding function. Returns the passed in point and 'address{p}' where p increases at each call """ def __init__(self, *args, **kwargs): super(ReverseMock, self).__init__(*args, **kwargs) self._n = 0 def __call__(self, *args, **kwargs): self.return_value = 'address{0}'.format(self._n), args[0] self._n += 1 return super(ReverseMock, self).__call__(*args, **kwargs) class TestGeocode(unittest.TestCase): def setUp(self): _skip_if_no_geopy() self.locations = ['260 Broadway, New York, NY', '77 Massachusetts Ave, Cambridge, MA'] self.points = [Point(-71.0597732, 42.3584308), Point(-77.0365305, 38.8977332)] def test_prepare_result(self): # Calls _prepare_result with sample results from the geocoder call # loop p0 = Point(12.3, -45.6) # Treat these as lat/lon p1 = Point(-23.4, 56.7) d = {'a': ('address0', p0.coords[0]), 'b': ('address1', p1.coords[0])} df = _prepare_geocode_result(d) assert type(df) is gpd.GeoDataFrame self.assertEqual(from_epsg(4326), df.crs) self.assertEqual(len(df), 2) self.assert_('address' in df) coords = df.loc['a']['geometry'].coords[0] test = p0.coords[0] # Output from the df should be lon/lat self.assertAlmostEqual(coords[0], test[1]) self.assertAlmostEqual(coords[1], test[0]) coords = df.loc['b']['geometry'].coords[0] test = p1.coords[0] self.assertAlmostEqual(coords[0], test[1]) self.assertAlmostEqual(coords[1], test[0]) def test_prepare_result_none(self): p0 = Point(12.3, -45.6) # Treat these as lat/lon d = {'a': ('address0', p0.coords[0]), 'b': (None, None)} df = _prepare_geocode_result(d) assert type(df) is gpd.GeoDataFrame self.assertEqual(from_epsg(4326), df.crs) self.assertEqual(len(df), 2) self.assert_('address' in df) row = df.loc['b'] self.assertEqual(len(row['geometry'].coords), 0) self.assert_(pd.np.isnan(row['address'])) def test_bad_provider_forward(self): with self.assertRaises(ValueError): geocode(['cambridge, ma'], 'badprovider') def test_bad_provider_reverse(self): with self.assertRaises(ValueError): reverse_geocode(['cambridge, ma'], 'badprovider') def test_forward(self): with mock.patch('geopy.geocoders.googlev3.GoogleV3.geocode', ForwardMock()) as m: g = geocode(self.locations, provider='googlev3', timeout=2) self.assertEqual(len(self.locations), m.call_count) n = len(self.locations) self.assertIsInstance(g, gpd.GeoDataFrame) expected = GeoSeries([Point(float(x)+0.5, float(x)) for x in range(n)], crs=from_epsg(4326)) assert_geoseries_equal(expected, g['geometry']) tm.assert_series_equal(g['address'], pd.Series(self.locations, name='address')) def test_reverse(self): with mock.patch('geopy.geocoders.googlev3.GoogleV3.reverse', ReverseMock()) as m: g = reverse_geocode(self.points, provider='googlev3', timeout=2) self.assertEqual(len(self.points), m.call_count) self.assertIsInstance(g, gpd.GeoDataFrame) expected = GeoSeries(self.points, crs=from_epsg(4326)) assert_geoseries_equal(expected, g['geometry']) tm.assert_series_equal(g['address'], pd.Series('address' + str(x) for x in range(len(self.points))))
Python
0
@@ -4867,75 +4867,17 @@ -tm.assert_series_equal(g%5B'address'%5D,%0A +address = pd. @@ -4883,16 +4883,17 @@ .Series( +%5B 'address @@ -4907,76 +4907,137 @@ (x) -%0A for x in range(len(self.points))) +for x in range(len(self.points))%5D,%0A name='address')%0A tm.assert_series_equal(g%5B'address'%5D, address )%0A
a318fed2c0bf66b607b319703d03f81a15b4bd57
fix sorting of last 20 entries for min/max calculation
weight/views.py
weight/views.py
""" Part of weight_app :copyright: (c) 2012 by Andreas Madsack. :license: BSD, see LICENSE for more details. """ from flask import Blueprint, Response, request, abort, redirect, flash, \ url_for, render_template from jinja2 import TemplateNotFound from flask.ext.login import login_required, login_user, logout_user, \ current_user from forms import LoginForm, ProfileForm, WeightForm, ScaleForm import datetime from main import db, DbUser weight_pages = Blueprint('weight_app', __name__, template_folder='templates') @weight_pages.route('/favicon.ico') def favicon(): abort(404) @weight_pages.route('/') def index(): return render_template('index.html') @weight_pages.route('/about') def about(): return render_template('about.html') @weight_pages.route("/login", methods=["GET", "POST"]) def login(): form = LoginForm() if form.validate_on_submit(): username = request.form['username'] password = request.form['password'] from models import User u1 = User.query.get(username) if login_user(DbUser(u1.username)): flash("You have logged in", "info") next = request.args.get('next') return redirect(next or url_for('.index')) return render_template('login.html', form=form) @weight_pages.route('/logout') def logout(): logout_user() flash('You have logged out', "info") return(redirect(url_for('.login'))) @weight_pages.route("/profile", methods=["GET", "POST"]) @login_required def profile(): from models import User, Scale u1 = User.query.get(current_user._user) form = ProfileForm(obj=u1) form.default_scale.choices = [(g.name, g.name) for g in Scale.query.order_by('name')] form.default_scale.choices.insert(0, ("", "Select...")) if form.validate_on_submit(): if 'firstname' in request.form: u1.firstname = request.form['firstname'] if 'lastname' in request.form: u1.lastname = request.form['lastname'] if 'email' in request.form: u1.email = request.form['email'] if 'password' in request.form: u1.set_password(request.form['password']) if 'default_scale' in request.form: u1.default_scale_name = request.form['default_scale'] db.session.add(u1) db.session.commit() flash('Data saved', 'info') if u1.default_scale_name: form.default_scale.data = u1.default_scale_name return render_template('profile.html', form=form) @weight_pages.route("/weight/") @weight_pages.route("/weight/<wid>/", methods=["GET","POST"]) @login_required def weight(wid=None): from models import Weight, Scale, User import math if not wid and 'wid' in request.args: wid = request.args.get('wid') if wid: # edit weight elem = Weight.query.get(wid) # get min/max for buttons x = Weight.query.order_by(Weight.wdate).limit(20).all() if x: wmin = int(math.floor(min([i.weight for i in x])) - 1) wmax = int(math.ceil(max([i.weight for i in x])) + 2) else: wmin=70 wmax=75 if elem: # is this weight from logged_in user? or is user admin? if elem.user_username == current_user._user or \ current_user._user == 'admin': form = WeightForm(obj=elem) else: # unauthorized abort(401) else: # add form = WeightForm() # get scales list form.scale_name.choices = [(g.name, g.name) for g in Scale.query.order_by('name')] form.scale_name.choices.insert(0, ("", "Select...")) if form.validate_on_submit(): if not elem: elem = Weight(weight=request.form['weight']) if 'weight' in request.form: elem.weight = request.form['weight'] if 'wdate' in request.form: elem.wdate = datetime.datetime.strptime(request.form['wdate'], '%Y-%m-%d') if 'scale_name' in request.form: elem.scale_name = request.form['scale_name'] elem.user_username = current_user._user db.session.add(elem) db.session.commit() flash('Data saved [%s with %s]' % (elem.wdate, elem.weight), 'info') if elem: if elem.scale_name: form.scale_name.data = elem.scale_name else: u1 = User.query.get(current_user._user) if u1.default_scale_name: form.scale_name.data = u1.default_scale_name return render_template('weight_edit.html', form=form, wrange=range(wmin,wmax),) else: # show table of weights page = request.args.get('page', '') if page.isdigit(): page = int(page) else: page = 1 elements = Weight.query.order_by('wdate desc').filter_by( user_username=unicode(current_user._user)).paginate( page, per_page=10) return render_template('weight_list.html', elements=elements.items, paginate=elements, show_comment=False,) @weight_pages.route("/scale/") @weight_pages.route("/scale/<sid>/", methods=["GET","POST"]) @login_required def scale(sid=None): from models import Scale if not sid and 'sid' in request.args: sid = request.args.get('sid') if sid: # edit weight elem = Scale.query.get(sid) if elem: form = ScaleForm(obj=elem) else: # add form = ScaleForm() if form.validate_on_submit(): if not elem: elem = Scale(name=request.form['name']) if 'name' in request.form: elem.name = request.form['name'] if 'owner' in request.form: elem.owner = request.form['owner'] if 'model' in request.form: elem.model = request.form['model'] if 'comment' in request.form: elem.comment = request.form['comment'] db.session.add(elem) db.session.commit() flash('Data saved', 'info') return render_template('scale_edit.html', form=form,) else: # show table of weights page = request.args.get('page', '') if page.isdigit(): page = int(page) else: page = 1 elements = Scale.query.order_by('name').paginate( page, per_page=10) return render_template('scale_list.html', elements=elements.items, paginate=elements,)
Python
0
@@ -3068,16 +3068,23 @@ ht.wdate +.desc() ).limit(
ff89630886fdb931576a4a4781340bc0066c2789
Add tests for Parameters
tests/test_grammar.py
tests/test_grammar.py
import viper.grammar as vg import viper.lexer as vl from viper.grammar.languages import ( SPPF, ParseTreeEmpty as Empty, ParseTreeChar as Char, ParseTreePair as Pair, ParseTreeRep as Repeat ) import pytest @pytest.mark.parametrize('line,sppf', [ ('foo', SPPF(Char(vl.Name('foo')))), ('42', SPPF(Char(vl.Number('42')))), ('...', SPPF(Char(vl.Operator('...')))), ('Zilch', SPPF(Char(vl.Class('Zilch')))), ('True', SPPF(Char(vl.Class('True')))), ('False', SPPF(Char(vl.Class('False')))), ('()', SPPF(Pair(SPPF(Char(vl.OpenParen())), SPPF(Char(vl.CloseParen()))))), ('(foo)', SPPF(Pair(SPPF(Char(vl.OpenParen())), SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Char(vl.CloseParen()))))))), ]) def test_atom(line: str, sppf: SPPF): lexemes = vl.lex_line(line) assert sppf == vg.GRAMMAR.parse_rule('atom', lexemes) @pytest.mark.parametrize('line,sppf', [ ('foo', SPPF(Char(vl.Name('foo')))), ('foo.bar', SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('bar')))))))), ('foo.bar.baz', SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('bar'))))), SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('baz')))))))))), ('foo.bar(baz)', SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('bar'))))), SPPF(Pair(SPPF(Char(vl.OpenParen())), SPPF(Pair(SPPF(Char(vl.Name('baz'))), SPPF(Char(vl.CloseParen()))))))))))), ('foo.bar(baz, qux)', SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('bar'))))), SPPF(Pair(SPPF(Char(vl.OpenParen())), SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Name('baz'))), SPPF(Pair(SPPF(Char(vl.Comma())), SPPF(Char(vl.Name('qux'))))))), SPPF(Char(vl.CloseParen()))))))))))), ('2.foo', SPPF(Pair(SPPF(Char(vl.Number('2'))), SPPF(Pair(SPPF(Char(vl.Period())), SPPF(Char(vl.Name('foo')))))))), ]) def test_expr(line: str, sppf: SPPF): lexemes = vl.lex_line(line) assert sppf == vg.GRAMMAR.parse_rule('expr', lexemes) @pytest.mark.parametrize('line,sppf', [ ('def foo() -> Bar: pass', SPPF(Pair(SPPF(Char(vl.Name('def'))), SPPF(Pair(SPPF(Char(vl.Name('foo'))), SPPF(Pair(SPPF(Pair(SPPF(Char(vl.OPEN_PAREN)), SPPF(Char(vl.CLOSE_PAREN)))), SPPF(Pair(SPPF(Char(vl.ARROW)), SPPF(Pair(SPPF(Char(vl.Class('Bar'))), SPPF(Pair(SPPF(Char(vl.COLON)), SPPF(Pair(SPPF(Char(vl.Name('pass')), Char(vl.Name('pass'))), SPPF(Char(vl.NEWLINE))))))))))))))))), ]) def test_func_def(line: str, sppf: SPPF): lexemes = vl.lex_line(line) lexemes.append(vl.NEWLINE) assert sppf == vg.GRAMMAR.parse_rule('func_def', lexemes)
Python
0
@@ -2825,32 +2825,1069 @@ pr', lexemes)%0A%0A%0A [email protected]('line,sppf', %5B%0A ('()',%0A SPPF(Pair(SPPF(Char(vl.OPEN_PAREN)),%0A SPPF(Char(vl.CLOSE_PAREN))))),%0A ('(foo: Bar)',%0A SPPF(Pair(SPPF(Char(vl.OPEN_PAREN)),%0A SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Name('foo'))),%0A SPPF(Pair(SPPF(Char(vl.COLON)),%0A SPPF(Char(vl.Class('Bar'))))))),%0A SPPF(Char(vl.CLOSE_PAREN))))))),%0A ('(foo bar: Baz)',%0A SPPF(Pair(SPPF(Char(vl.OPEN_PAREN)),%0A SPPF(Pair(SPPF(Pair(SPPF(Char(vl.Name('foo'))),%0A SPPF(Pair(SPPF(Char(vl.Name('bar'))),%0A SPPF(Pair(SPPF(Char(vl.COLON)),%0A SPPF(Char(vl.Class('Baz'))))))))),%0A SPPF(Char(vl.CLOSE_PAREN))))))),%0A%5D)%0Adef test_parameters(line: str, sppf: SPPF):%0A lexemes = vl.lex_line(line)%0A assert sppf == vg.GRAMMAR.parse_rule('parameters', lexemes)%0A%0A%0A @pytest.mark.par
850b3dea5f11d20714677e621a35b5ddb227e8c1
correct TwoColumnLayout compatibility
widgy/models.py
widgy/models.py
from django.db import models from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes import generic from django.template import Context from django.template.loader import render_to_string from mezzanine.pages.models import Page from treebeard.mp_tree import MP_Node class ContentPage(Page): root_node = models.ForeignKey('Node', null=True) def to_json(self): return { 'title': self.title, 'root_node': self.root_node.to_json(), } def render(self): return render_to_string('widgy/content_page.html', { 'title': self.title, 'root_node': self.root_node, }) class Node(MP_Node): content_type = models.ForeignKey(ContentType) content_id = models.PositiveIntegerField() content = generic.GenericForeignKey('content_type', 'content_id') def save(self, *args, **kwargs): new = not self.id super(Node, self).save(*args, **kwargs) if new: self.content.post_create() def to_json(self): children = [child.to_json() for child in self.get_children()] return { 'url': self.get_api_url(), 'content': self.content.to_json(), 'children': children, } def render(self): return self.content.render() @models.permalink def get_api_url(self): return ('widgy.views.node', (), {'node_pk': self.pk}) @staticmethod def validate_parent_child(parent, child): return parent.content.valid_parent_of(child.content) and child.content.valid_child_of(parent.content) class Content(models.Model): _node = generic.GenericRelation(Node, content_type_field='content_type', object_id_field='content_id') @property def node(self): return self._node.all()[0] class Meta: abstract = True def valid_child_of(self, content): return True def valid_parent_of(self, content): return False def add_child(self, cls, **kwargs): obj = cls.objects.create(**kwargs) node = self.node.add_child( content=obj ) return obj @classmethod def add_root(cls, **kwargs): obj = cls.objects.create(**kwargs) node = Node.add_root( content=obj ) return obj def post_create(self): """ Hook for custom code which needs to be run after creation. Since the `Node` must be created after the widget, any tree based actions have to happen after the save event has finished. """ pass def to_json(self): return { 'url': self.get_api_url(), 'model': self._meta.module_name, 'object_name': self._meta.object_name, } @models.permalink def get_api_url(self): return ('widgy.views.content', (), {'object_name': self._meta.module_name, 'object_pk': self.pk}) class Bucket(Content): title = models.CharField(max_length=255) def valid_parent_of(self, content): return True def to_json(self): json = super(Bucket, self).to_json() json['title'] = self.title return json def render(self): return render_to_string('widgy/bucket.html', { 'nodes': self.node.get_children(), }) class TwoColumnLayout(Content): """ On creation, creates a left and right bucket. """ buckets = { 'left': Bucket, 'right': Bucket, } def valid_child_of(self, content): return isinstance(content, ContentPage) def valid_parent_of(self, content): return isinstance(content, Bucket) and len(self.node.get_children()) < 2 def post_create(self): for bucket_title, bucket_class in self.buckets.iteritems(): bucket = self.add_child(bucket_class, title=bucket_title, ) @property def left_bucket(self): return [i for i in self.node.get_children() if i.content.title=='left'][0] @property def right_bucket(self): return [i for i in self.node.get_children() if i.content.title=='right'][0] def render(self): return render_to_string('widgy/two_column_layout.html', { 'left_bucket': self.left_bucket.content, 'right_bucket': self.right_bucket.content, }) class TextContent(Content): content = models.TextField() def to_json(self): json = super(TextContent, self).to_json() json['content'] = self.content return json def render(self): return render_to_string('widgy/text_content.html', { 'content': self.content, })
Python
0
@@ -3867,16 +3867,17 @@ et) and +( len(self @@ -3901,16 +3901,83 @@ n()) %3C 2 + or content.id in %5Bi.content.id for i in self.node.get_children()%5D) %0A%0A de
9e2bdfece7f5cd9e02b15e9fe11c432e10a12418
update api tests
test/test_naarad_api.py
test/test_naarad_api.py
# coding=utf-8 """ © 2013 LinkedIn Corp. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");?you may not use this file except in compliance with the License.?You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS,?WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. """ import os import sys import time sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src'))) from naarad import Naarad import naarad.naarad_constants as CONSTANTS naarad_obj = None def setup_module(): global naarad_obj naarad_obj = Naarad() def test_naarad_start_stop(): """ :return: None """ examples_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'examples') global naarad_obj test_id_1 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc')) time.sleep(60) naarad_obj.signal_stop(test_id_1) test_id_2 = naarad_obj.signal_start(os.path.join(os.path.join(examples_directory, 'conf'),'config-gc')) time.sleep(60) naarad_obj.signal_stop(test_id_2) if naarad_obj.analyze(os.path.join(examples_directory,'logs'), 'test_api_temp') != CONSTANTS.OK : naarad_obj.get_failed_analyses() naarad_obj.get_sla_data(test_id_1) naarad_obj.get_stats_data(test_id_1) naarad_obj.get_sla_data(test_id_2) naarad_obj.get_stats_data(test_id_2) if naarad_obj.diff(test_id_1, test_id_2, None) != CONSTANTS.OK: print 'Error encountered during diff' print 'Please inspect the generated reports manually' setup_module() test_naarad_start_stop()
Python
0.000001
@@ -1686,16 +1686,175 @@ g diff'%0A + if naarad_obj.diff_reports_by_location('test_api_temp/0', 'test_api_temp/1', 'test_api_temp/diff_location', None):%0A print 'Error encountered during diff'%0A print
628a1418e64ba45890daee2d85223277f3a11a54
insert asset_specific_data into deck_spawn test
test/test_peerassets.py
test/test_peerassets.py
import pytest import pypeerassets as pa @pytest.mark.parametrize("prov", [pa.Explorer, pa.Cryptoid]) def test_find_deck(prov): provider = prov(network="tppc") deck = pa.find_deck(provider, 'b6a95f94fef093ee9009b04a09ecb9cb5cba20ab6f13fe0926aeb27b8671df43', 1) assert deck.__dict__ == {'asset_specific_data': b'', 'id': 'b6a95f94fef093ee9009b04a09ecb9cb5cba20ab6f13fe0926aeb27b8671df43', 'issue_mode': 4, 'issue_time': 1488840533, 'issuer': 'msYThv5bf7KjhHT1Cj5D7Y1tofyhq9vhWM', 'name': 'hopium_v2', 'network': 'peercoin-testnet', 'number_of_decimals': 2, 'production': True, 'testnet': True, 'version': 1 } @pytest.mark.parametrize("prov", [pa.Explorer, pa.Cryptoid]) def test_find_cards(prov): provider = prov(network="tppc") deck = pa.find_deck(provider, 'b6a95f94fef093ee9009b04a09ecb9cb5cba20ab6f13fe0926aeb27b8671df43', 1) cards = pa.find_card_transfers(provider, deck) assert cards assert isinstance(next(cards)[0], pa.CardTransfer) def test_deck_spawn(): provider = pa.Explorer(network='tppc') inputs = provider.select_inputs("mthKQHpr7zUbMvLcj8GHs33mVcf91DtN6L", 0.02) change_address = "mthKQHpr7zUbMvLcj8GHs33mVcf91DtN6L" deck = pa.Deck(name="just-testing.", number_of_decimals=1, issue_mode=1, network='tppc', production=True, version=1) deck_spawn = pa.deck_spawn(provider, deck, inputs, change_address) assert isinstance(deck_spawn, pa.Transaction) def test_card_transfer(): provider = pa.Explorer(network='tppc') address = "mthKQHpr7zUbMvLcj8GHs33mVcf91DtN6L" inputs = provider.select_inputs(address, 0.02) change_address = address deck = pa.find_deck(provider, '078f41c257642a89ade91e52fd484c141b11eda068435c0e34569a5dfcce7915', 1, True) card = pa.CardTransfer(deck=deck, receiver=['n12h8P5LrVXozfhEQEqg8SFUmVKtphBetj', 'n422r6tcJ5eofjsmRvF6TcBMigmGbY5P7E'], amount=[1, 2] ) card_transfer = pa.card_transfer(provider, card, inputs, change_address) assert isinstance(card_transfer, pa.Transaction)
Python
0.000001
@@ -1639,16 +1639,85 @@ ersion=1 +,%0A asset_specific_data='https://talk.peercoin.net/' )%0A%0A d
0085e36491aa14f80c8979ee25c1ad0039bc3f00
Extend the 'test_parse_to_audio_requirement_bug' test case
tests/test_parsers.py
tests/test_parsers.py
#!/usr/bin/env python # -*- coding: UTF-8 -*- """Contains test cases for the parsers module.""" from __future__ import unicode_literals import sys import os.path import unittest PATH = os.path.realpath(os.path.abspath(__file__)) sys.path.insert(0, os.path.dirname(os.path.dirname(PATH))) try: from youtube_dl_gui.parsers import OptionsParser except ImportError as error: print error sys.exit(1) class TestParse(unittest.TestCase): """Test case for OptionsParser parse method.""" def test_parse_to_audio_requirement_bug(self): """Test case for the 'to_audio' requirement.""" options_dict = { # Extracted from youtube-dlG settings.json 'keep_video': False, 'opts_win_size': (640, 490), 'open_dl_dir': False, 'second_video_format': '0', 'native_hls': False, 'write_subs': False, 'workers_number': 3, 'max_downloads': 0, 'max_filesize': 0, 'youtube_dl_debug': False, 'shutdown': False, 'selected_format': 'mp3', 'write_all_subs': False, 'enable_log': True, 'embed_thumbnail': True, 'audio_quality': '9', 'subs_lang': 'en', 'audio_format': 'mp3', 'restrict_filenames': False, 'min_filesize_unit': '', 'selected_audio_formats': ['mp3', 'm4a', 'vorbis'], 'selected_video_formats': ['webm', 'mp4'], 'save_path': '/home/user/Workplace/test/youtube', 'output_template': '%(uploader)s/%(title)s.%(ext)s', 'show_completion_popup': True, 'locale_name': 'en_US', 'to_audio': False, 'confirm_deletion': True, 'min_filesize': 0, 'save_path_dirs': ['/home/user/Downloads', '/home/user/Desktop', '/home/user/Videos', '/home/user/Music', '/home/user/Workplace/test/youtube'], 'sudo_password': '', 'video_password': '', 'output_format': 1, 'embed_subs': False, 'write_auto_subs': False, 'video_format': '0', 'confirm_exit': False, 'referer': '', 'proxy': '', 'add_metadata': False, 'ignore_errors': False, 'log_time': True, 'password': '', 'playlist_end': 0, 'write_description': False, 'retries': 10, 'cmd_args': '', 'write_thumbnail': False, 'playlist_start': 1, 'nomtime': False, 'write_info': False, 'username': '', 'main_win_size': (930, 560), 'user_agent': '', 'max_filesize_unit': '', 'ignore_config': False, 'youtubedl_path': '/home/user/.config/youtube-dlg' } expected_cmd_list = ["--newline", "-x", "--audio-format", "mp3", "--embed-thumbnail", "--audio-quality", "9", "-o", "/home/user/Workplace/test/youtube/%(title)s.%(ext)s"] options_parser = OptionsParser() self.assertItemsEqual(options_parser.parse(options_dict), expected_cmd_list) def main(): unittest.main() if __name__ == '__main__': main()
Python
0.999918
@@ -3422,16 +3422,276 @@ _list)%0A%0A + # Setting 'to_audio' to True should return the same results%0A # since the '-x' flag is already set on audio extraction%0A options_dict%5B%22to_audio%22%5D = True%0A%0A self.assertItemsEqual(options_parser.parse(options_dict), expected_cmd_list)%0A%0A %0Adef mai
3c811b3f0a0fd974cdac2e53dfe0a6cb1ee44e55
update process tests, move to using example_resume.yml
tests/test_process.py
tests/test_process.py
#! /usr/bin/env python # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2014 Christopher C. Strelioff <[email protected]> # # Distributed under terms of the MIT license. """test_process.py Test (non-command line) methods in the process.py module. """ import unittest import os import tempfile import shutil import yaml from resumepy import process_html from resumepy import process_pdf from resumepy import process_text from resumepy import resumepy_path class ResumepyProcessTest(unittest.TestCase): """Test the elements of process.py""" def setUp(self): self.tempdir = tempfile.mkdtemp() self.cwd = os.getcwd() self.yaml = """contact: name: Jane Doe address: 555 Beverly Hills Blvd. city: Beverly Hills state: CA zip: 90210 email: [email protected] phone: 555.555.5555 jobtitle: Astronaut website: label: mysite link: 'http://mysite.com' objective: Reclaim Mars from invaders. work: - organization: European Space Agency start: Fall 2056 stop: Spring 2093 position: Captain location: Space notes: - Destroyed alien battleship - Recovered from engine failure - organization: McDonald's start: July 2012 stop: January 2014 position: Assistant to the Regional Manager location: Detriot notes: - Record for the fastest cheeseburger made - Employee of the year - Helped lead an amazing team """ self.resume = yaml.load(self.yaml) # put bad character (for LaTeX) if yaml # the `&` in the email field self.yaml_bad = """contact: name: Jane Doe address: 555 Beverly Hills Blvd. city: Beverly Hills state: CA zip: 90210 email: [email protected] & [email protected] phone: 555.555.5555 jobtitle: Astronaut objective: Reclaim Mars from invaders. work: - organization: European Space Agency start: Fall 2056 stop: Spring 2093 position: Captain location: Space notes: - Destroyed alien battleship - Recovered from engine failure - organization: McDonald's start: July 2012 stop: January 2014 position: Assistant to the Regional Manager location: Detriot notes: - Record for the fastest cheeseburger made - Employee of the year - Helped lead an amazing team """ self.resume_bad = yaml.load(self.yaml_bad) os.chdir(self.tempdir) def tearDown(self): shutil.rmtree(self.tempdir) os.chdir(self.cwd) def test_process_html_created(self): """* test_process_html_created -- build/html/resume.html created""" process_html(self.resume, os.path.join(resumepy_path, 'data', 'templates')) self.assertTrue(os.path.exists('build/html/resume.html')) def test_process_pdf_bad(self): """* test_process_pdf_bad -- bad LaTeX character""" with self.assertRaises(Exception): process_pdf(self.resume_bad, os.path.join(resumepy_path, 'data', 'templates'), 'template.tex') def test_process_pdf_created(self): """* test_process_pdf_created -- build/pdf/resume.pdf created""" process_pdf(self.resume, os.path.join(resumepy_path, 'data', 'templates'), 'template.tex') self.assertTrue(os.path.exists('build/pdf/resume.pdf')) def test_process_text_created(self): """* test_process_text_created -- build/pdf/resume.txt created""" process_text(self.resume, os.path.join(resumepy_path, 'data', 'templates')) self.assertTrue(os.path.exists('build/text/resume.txt'))
Python
0
@@ -666,831 +666,120 @@ -self.yaml = %22%22%22contact:%0A name: Jane Doe%0A address: 555 Beverly Hills Blvd.%0A city: Beverly Hills%0A state: CA%0A zip: 90210%0A email: [email protected]%0A phone: 555.555.5555%0A jobtitle: Astronaut%0Awebsite:%0A label: mysite%0A link: 'http://mysite.com'%0Aobjective:%0A Reclaim Mars from invaders.%0Awork:%0A - organization: European Space Agency%0A start: Fall 2056%0A stop: Spring 2093%0A position: Captain%0A location: Space%0A notes:%0A - Destroyed alien battleship%0A - Recovered from engine failure%0A - organization: McDonald's%0A start: July 2012%0A stop: January 2014%0A position: Assistant to the Regional Manager%0A location: Detriot%0A notes:%0A - Record for the fastest cheeseburger made%0A - Employee of the year%0A - Helped lead an amazing team%0A %22%22%22%0A self.resume = yaml.load(self +os.chdir(self.tempdir)%0A%0A self.yaml = os.path.join(resumepy_path, 'data', 'examples', 'example_resume .y -a ml +' )%0A%0A @@ -789,845 +789,39 @@ -# put bad character (for LaTeX) if yaml%0A # the %60&%60 in the email field%0A self.yaml_bad = %22%22%22contact:%0A name: Jane Doe%0A address: 555 Beverly Hills Blvd.%0A city: Beverly Hills%0A state: CA%0A zip: 90210%0A email: [email protected] & [email protected]%0A phone: 555.555.5555%0A jobtitle: Astronaut%0Aobjective:%0A Reclaim Mars from invaders.%0Awork:%0A - organization: European Space Agency%0A start: Fall 2056%0A stop: Spring 2093%0A position: Captain%0A location: Space%0A notes:%0A - Destroyed alien battleship%0A - Recovered from engine failure%0A - organization: McDonald's%0A start: July 2012%0A stop: January 2014%0A position: Assistant to the Regional Manager%0A location: Detriot%0A notes:%0A - Record for the fastest cheeseburger made%0A - Employee of the year%0A - Helped lead an amazing team%0A %22%22%22%0A +with open(self.yaml) as f:%0A @@ -835,20 +835,16 @@ f.resume -_bad = yaml. @@ -852,53 +852,9 @@ oad( -self.yaml_bad)%0A%0A os.chdir(self.tempdir +f )%0A%0A @@ -984,33 +984,49 @@ lf):%0A %22%22%22 -* +%0A process: test_process_ht @@ -1039,42 +1039,19 @@ ated - -- build/html/resume.html created +()%0A %22%22%22%0A @@ -1261,33 +1261,49 @@ lf):%0A %22%22%22 -* +%0A process: test_process_pd @@ -1311,31 +1311,19 @@ _bad - -- bad LaTeX character +()%0A %22%22%22%0A @@ -1564,33 +1564,49 @@ lf):%0A %22%22%22 -* +%0A process: test_process_pd @@ -1618,40 +1618,19 @@ ated - -- build/pdf/resume.pdf created +()%0A %22%22%22%0A @@ -1885,17 +1885,33 @@ %22%22%22 -* +%0A process: test_pr @@ -1932,40 +1932,19 @@ ated - -- build/pdf/resume.txt created +()%0A %22%22%22%0A @@ -2038,32 +2038,69 @@ a', 'templates') +,%0A 'template.txt' )%0A self.a
39db6c67fe4fc21abe23d4f801b559a0610b07ef
fix kwarg/arg order for python 2.4
conary/verify.py
conary/verify.py
# # Copyright (c) 2004-2009 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # """ Provides the output for the "conary verify" command """ from conary import showchangeset, trove from conary import versions from conary import conaryclient from conary.conaryclient import cmdline from conary.deps import deps from conary.lib import log from conary.local import defaultmap, update from conary.repository import changeset from conary import errors def LocalChangeSetCommand(db, cfg, item, changeSetPath = None): cs = _verify([ item ], db, cfg, display = False) if not [ x for x in cs.iterNewTroveList() ]: log.error("there have been no local changes") else: cs.writeToFile(changeSetPath) return def verify(troveNameList, db, cfg, all=False, changesetPath = None, forceHashCheck = False): cs = _verify(troveNameList, db, cfg, all=all, forceHashCheck = forceHashCheck, display = (changesetPath is None)) if changesetPath: # verify doesn't display changes in collections because those, by # definition, match the database for trvCs in list(cs.iterNewTroveList()): if trove.troveIsCollection(trvCs.getName()): cs.delNewTrove(*trvCs.getNewNameVersionFlavor()) cs.writeToFile(changesetPath) def _verify(troveNameList, db, cfg, all=False, forceHashCheck = False, display = False): if display: # save memory by not keeping the changeset around; this is # particularly useful when all=True cs = None else: cs = changeset.ReadOnlyChangeSet() troveNames = [ cmdline.parseTroveSpec(x) for x in troveNameList ] if all: assert(not troveNameList) client = conaryclient.ConaryClient(cfg) troveInfo = client.getUpdateItemList() troveInfo.sort() else: troveInfo = [] for (troveName, versionStr, flavor) in troveNames: try: troveInfo += db.findTrove(None, (troveName, versionStr, flavor)) except errors.TroveNotFound: if versionStr: if flavor is not None and not flavor.isEmpty(): flavorStr = deps.formatFlavor(flavor) log.error("version %s with flavor '%s' of trove %s is " "not installed", versionStr, flavorStr, troveName) else: log.error("version %s of trove %s is not installed", versionStr, troveName) elif flavor is not None and not flavor.isEmpty(): flavorStr = deps.formatFlavor(flavor) log.error("flavor '%s' of trove %s is not installed", flavorStr, troveName) else: log.error("trove %s is not installed", troveName) defaultMap = defaultmap.DefaultMap(db, troveInfo) troves = db.getTroves(troveInfo, withDeps = False, withFileObjects = True, pristine = False) seen = set() for trv in troves: newCs = _verifyTrove(trv, db, cfg, defaultMap, display, forceHashCheck = forceHashCheck, duplicateFilterSet = seen) if cs and newCs: cs.merge(newCs) cs.addPrimaryTrove(trv.getName(), trv.getVersion().createShadow( versions.LocalLabel()), trv.getFlavor()) return cs def _verifyTroveList(db, troveList, cfg, display = True, forceHashCheck = False): log.info('Verifying %s' % " ".join(x[1].getName() for x in troveList)) changedTroves = set() try: result = update.buildLocalChanges(db, troveList, root = cfg.root, #withFileContents=False, forceSha1=forceHashCheck, ignoreTransient=True, updateContainers=True) if not result: return cs = result[0] changed = False for (changed, trv) in result[1]: if changed: changedTroves.add(trv.getNameVersionFlavor()) except OSError, err: if err.errno == 13: log.warning("Permission denied creating local changeset for" " %s " % str([ x[0].getName() for x in l ])) return trovesChanged = [ x.getNameVersionFlavor() for (changed, x) in result[1] if changed ] if not trovesChanged: return None if display and trovesChanged: troveSpecs = [ '%s=%s[%s]' % x for x in trovesChanged ] showchangeset.displayChangeSet(db, cs, troveSpecs, cfg, ls=True, showChanges=True, asJob=True) return cs def _verifyTrove(trv, db, cfg, defaultMap, display = True, forceHashCheck = False, duplicateFilterSet = None, allMachineChanges = False): collections = [] if trove.troveIsCollection(trv.getName()): collections.append(trv) cs = changeset.ReadOnlyChangeSet() verifyList = [] queue = [ trv ] duplicateFilterSet.add(trv.getNameVersionFlavor()) for thisTrv in db.walkTroveSet(trv): if verifyList and (verifyList[-1][0].getName().split(':')[0] != thisTrv.getName().split(':')[0]): # display output as soon as we're done processing one named # trove; this works because walkTroveSet is guaranteed to # be depth first subCs = _verifyTroveList(db, verifyList, cfg, display = display) if subCs: cs.merge(subCs) verifyList = [] if allMachineChanges: origTrv = db.getTrove(*thisTrv.getNameVersionFlavor(), pristine = True) else: origTrv = thisTrv ver = thisTrv.getVersion().createShadow(versions.LocalLabel()) verifyList.append((thisTrv, thisTrv, ver, update.UpdateFlags())) subCs = _verifyTroveList(db, verifyList, cfg, display = display, forceHashCheck = forceHashCheck) if subCs: cs.merge(subCs) return cs
Python
0.004628
@@ -6559,39 +6559,23 @@ ove( -*thisTrv.getNameVersionFlavor() +pristine = True ,%0A @@ -6606,31 +6606,47 @@ -pristine = True +*thisTrv.getNameVersionFlavor() )%0A
ee1532cc226987904666eeb0bda61445455d04e3
Increase test timeout
tests/test_run_app.py
tests/test_run_app.py
import ssl from unittest import mock from aiohttp import web def test_run_app_http(loop, mocker): mocker.spy(loop, 'create_server') loop.call_later(0.02, loop.stop) app = web.Application(loop=loop) mocker.spy(app, 'startup') web.run_app(app, print=lambda *args: None) assert loop.is_closed() loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=128) app.startup.assert_called_once_with() def test_run_app_https(loop, mocker): mocker.spy(loop, 'create_server') loop.call_later(0.02, loop.stop) app = web.Application(loop=loop) mocker.spy(app, 'startup') ssl_context = ssl.create_default_context() web.run_app(app, ssl_context=ssl_context, print=lambda *args: None) assert loop.is_closed() loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8443, ssl=ssl_context, backlog=128) app.startup.assert_called_once_with() def test_run_app_nondefault_host_port(loop, unused_port, mocker): port = unused_port() host = 'localhost' mocker.spy(loop, 'create_server') loop.call_later(0.02, loop.stop) app = web.Application(loop=loop) mocker.spy(app, 'startup') web.run_app(app, host=host, port=port, print=lambda *args: None) assert loop.is_closed() loop.create_server.assert_called_with(mock.ANY, host, port, ssl=None, backlog=128) app.startup.assert_called_once_with() def test_run_app_custom_backlog(loop, mocker): mocker.spy(loop, 'create_server') loop.call_later(0.02, loop.stop) app = web.Application(loop=loop) mocker.spy(app, 'startup') web.run_app(app, backlog=10, print=lambda *args: None) assert loop.is_closed() loop.create_server.assert_called_with(mock.ANY, '0.0.0.0', 8080, ssl=None, backlog=10) app.startup.assert_called_once_with()
Python
0.000001
@@ -147,33 +147,33 @@ p.call_later(0.0 -2 +5 , loop.stop)%0A%0A @@ -584,33 +584,33 @@ p.call_later(0.0 -2 +5 , loop.stop)%0A%0A @@ -1178,33 +1178,33 @@ p.call_later(0.0 -2 +5 , loop.stop)%0A%0A @@ -1649,17 +1649,17 @@ ater(0.0 -2 +5 , loop.s
c4b3a09886b146c8e33fec3871b0459e838a2421
test `get_fixture` helper for doctests
testing/test_doctest.py
testing/test_doctest.py
from _pytest.doctest import DoctestModule, DoctestTextfile import py, pytest class TestDoctests: def test_collect_testtextfile(self, testdir): w = testdir.maketxtfile(whatever="") checkfile = testdir.maketxtfile(test_something=""" alskdjalsdk >>> i = 5 >>> i-1 4 """) for x in (testdir.tmpdir, checkfile): #print "checking that %s returns custom items" % (x,) items, reprec = testdir.inline_genitems(x) assert len(items) == 1 assert isinstance(items[0], DoctestTextfile) items, reprec = testdir.inline_genitems(w) assert len(items) == 1 def test_collect_module(self, testdir): path = testdir.makepyfile(whatever="#") for p in (path, testdir.tmpdir): items, reprec = testdir.inline_genitems(p, '--doctest-modules') assert len(items) == 1 assert isinstance(items[0], DoctestModule) def test_simple_doctestfile(self, testdir): p = testdir.maketxtfile(test_doc=""" >>> x = 1 >>> x == 1 False """) reprec = testdir.inline_run(p, ) reprec.assertoutcome(failed=1) def test_new_pattern(self, testdir): p = testdir.maketxtfile(xdoc =""" >>> x = 1 >>> x == 1 False """) reprec = testdir.inline_run(p, "--doctest-glob=x*.txt") reprec.assertoutcome(failed=1) def test_doctest_unexpected_exception(self, testdir): p = testdir.maketxtfile(""" >>> i = 0 >>> 0 / i 2 """) result = testdir.runpytest("--doctest-modules") result.stdout.fnmatch_lines([ "*unexpected_exception*", "*>>> i = 0*", "*>>> 0 / i*", "*UNEXPECTED*ZeroDivision*", ]) def test_doctest_unex_importerror(self, testdir): testdir.tmpdir.join("hello.py").write(py.code.Source(""" import asdalsdkjaslkdjasd """)) p = testdir.maketxtfile(""" >>> import hello >>> """) result = testdir.runpytest("--doctest-modules") result.stdout.fnmatch_lines([ "*>>> import hello", "*UNEXPECTED*ImportError*", "*import asdals*", ]) def test_doctestmodule(self, testdir): p = testdir.makepyfile(""" ''' >>> x = 1 >>> x == 1 False ''' """) reprec = testdir.inline_run(p, "--doctest-modules") reprec.assertoutcome(failed=1) def test_doctestmodule_external_and_issue116(self, testdir): p = testdir.mkpydir("hello") p.join("__init__.py").write(py.code.Source(""" def somefunc(): ''' >>> i = 0 >>> i + 1 2 ''' """)) result = testdir.runpytest(p, "--doctest-modules") result.stdout.fnmatch_lines([ '004 *>>> i = 0', '005 *>>> i + 1', '*Expected:', "* 2", "*Got:", "* 1", "*:5: DocTestFailure" ]) def test_txtfile_failing(self, testdir): p = testdir.maketxtfile(""" >>> i = 0 >>> i + 1 2 """) result = testdir.runpytest(p, "-s") result.stdout.fnmatch_lines([ '001 >>> i = 0', '002 >>> i + 1', 'Expected:', " 2", "Got:", " 1", "*test_txtfile_failing.txt:2: DocTestFailure" ])
Python
0
@@ -3732,28 +3732,320 @@ DocTestFailure%22%0A %5D)%0A +%0A def test_txtfile_with_fixtures(self, testdir, tmpdir):%0A p = testdir.maketxtfile(%22%22%22%0A %3E%3E%3E dir = get_fixture('tmpdir')%0A %3E%3E%3E type(dir).__name__%0A 'LocalPath'%0A %22%22%22)%0A reprec = testdir.inline_run(p, )%0A reprec.assertoutcome(passed=1)%0A
0c231b10fcff5ea970998a9313a2ce25ecc71d7c
Add test for invalid storage string
tests/test_storage.py
tests/test_storage.py
import time import pytest from limits.storage import ( MemcachedStorage, MemoryStorage, MongoDBStorage, RedisClusterStorage, RedisSentinelStorage, RedisStorage, Storage, storage_from_string, ) from limits.strategies import MovingWindowRateLimiter class TestBaseStorage: @pytest.mark.parametrize( "uri, args, expected_instance, fixture", [ ("memory://", {}, MemoryStorage, None), ( "redis://localhost:7379", {}, RedisStorage, pytest.lazy_fixture("redis_basic"), ), ( "redis+unix:///tmp/limits.redis.sock", {}, RedisStorage, pytest.lazy_fixture("redis_uds"), ), ( "redis+unix://:password/tmp/limits.redis.sock", {}, RedisStorage, pytest.lazy_fixture("redis_uds"), ), ( "memcached://localhost:22122", {}, MemcachedStorage, pytest.lazy_fixture("memcached"), ), ( "memcached://localhost:22122,localhost:22123", {}, MemcachedStorage, pytest.lazy_fixture("memcached_cluster"), ), ( "memcached:///tmp/limits.memcached.sock", {}, MemcachedStorage, pytest.lazy_fixture("memcached_uds"), ), ( "redis+sentinel://localhost:26379", {"service_name": "localhost-redis-sentinel"}, RedisSentinelStorage, pytest.lazy_fixture("redis_sentinel"), ), ( "redis+sentinel://localhost:26379/localhost-redis-sentinel", {}, RedisSentinelStorage, pytest.lazy_fixture("redis_sentinel"), ), ( "redis+sentinel://:sekret@localhost:26379/localhost-redis-sentinel", {}, RedisSentinelStorage, pytest.lazy_fixture("redis_sentinel_auth"), ), ( "redis+cluster://localhost:7001/", {}, RedisClusterStorage, pytest.lazy_fixture("redis_cluster"), ), ( "mongodb://localhost:37017/", {}, MongoDBStorage, pytest.lazy_fixture("mongodb"), ), ], ) def test_storage_string(self, uri, args, expected_instance, fixture): assert isinstance(storage_from_string(uri, **args), expected_instance) @pytest.mark.parametrize( "uri, args, fixture", [ ("memory://", {}, None), ("redis://localhost:7379", {}, pytest.lazy_fixture("redis_basic")), ( "redis+unix:///tmp/limits.redis.sock", {}, pytest.lazy_fixture("redis_uds"), ), ( "redis+unix://:password/tmp/limits.redis.sock", {}, pytest.lazy_fixture("redis_uds"), ), ("memcached://localhost:22122", {}, pytest.lazy_fixture("memcached")), ( "memcached://localhost:22122,localhost:22123", {}, pytest.lazy_fixture("memcached_cluster"), ), ( "memcached:///tmp/limits.memcached.sock", {}, pytest.lazy_fixture("memcached_uds"), ), ( "redis+sentinel://localhost:26379", {"service_name": "localhost-redis-sentinel"}, pytest.lazy_fixture("redis_sentinel"), ), ( "redis+sentinel://localhost:26379/localhost-redis-sentinel", {}, pytest.lazy_fixture("redis_sentinel"), ), ( "redis+sentinel://:sekret@localhost:36379/localhost-redis-sentinel", {}, pytest.lazy_fixture("redis_sentinel_auth"), ), ( "redis+cluster://localhost:7001/", {}, pytest.lazy_fixture("redis_cluster"), ), ("mongodb://localhost:37017/", {}, pytest.lazy_fixture("mongodb")), ], ) def test_storage_check(self, uri, args, fixture): assert storage_from_string(uri, **args).check() def test_pluggable_storage_no_moving_window(self): class MyStorage(Storage): STORAGE_SCHEME = ["mystorage"] def incr(self, key, expiry, elastic_expiry=False): return def get(self, key): return 0 def get_expiry(self, key): return time.time() def reset(self): return def check(self): return def clear(self): return storage = storage_from_string("mystorage://") assert isinstance(storage, MyStorage) with pytest.raises(NotImplementedError): MovingWindowRateLimiter(storage) def test_pluggable_storage_moving_window(self): class MyStorage(Storage): STORAGE_SCHEME = ["mystorage"] def incr(self, key, expiry, elastic_expiry=False): return def get(self, key): return 0 def get_expiry(self, key): return time.time() def reset(self): return def check(self): return def clear(self): return def acquire_entry(self, *a, **k): return True def get_moving_window(self, *a, **k): return (time.time(), 1) storage = storage_from_string("mystorage://") assert isinstance(storage, MyStorage) MovingWindowRateLimiter(storage)
Python
0.00001
@@ -21,16 +21,61 @@ pytest%0A%0A +from limits.errors import ConfigurationError%0A from lim @@ -2834,24 +2834,289 @@ _instance)%0A%0A + @pytest.mark.parametrize(%0A %22uri, args%22, %5B(%22blah://%22, %7B%7D), (%22redis+sentinel://localhost:26379%22, %7B%7D)%5D%0A )%0A def test_invalid_storage_string(self, uri, args):%0A with pytest.raises(ConfigurationError):%0A storage_from_string(uri, **args)%0A%0A @pytest.
c94ee5d045179fb56c172a3f4ebc7d28dfae9585
add INSTALLED_APP
myapp/settings.py
myapp/settings.py
""" Django settings for myapp project. Generated by 'django-admin startproject' using Django 1.11.6. For more information on this file, see https://docs.djangoproject.com/en/1.11/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.11/ref/settings/ """ import os from myapp import settings_local SECRET_KEY = settings_local.SECRET_KEY # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) TEMPLATE_DIR = os.path.join(BASE_DIR, "templates/") # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = ["*"] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'myapp.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [TEMPLATE_DIR], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'myapp.wsgi.application' # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases if 'RDS_DB_NAME' in os.environ: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['RDS_DB_NAME'], 'USER': os.environ['RDS_USERNAME'], 'PASSWORD': os.environ['RDS_PASSWORD'], 'HOST': os.environ['RDS_HOSTNAME'], 'PORT': os.environ['RDS_PORT'], } } else: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': "", "USER": "", "PASSWORD": "", "HOST": "localhost", 'PORT': '5432', } } # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.11/topics/i18n/ LANGUAGE_CODE = 'ja' TIME_ZONE = 'Asia/Tokyo' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' # STATICFILES_DIRS = [ # os.path.join(BASE_DIR, 'static'), # ] # 本番環境用 Linux 絶対パス ローカルではいらない STATIC_ROOT = os.path.join(BASE_DIR, "static")
Python
0.000002
@@ -1056,16 +1056,28 @@ files',%0A + %22myapp%22%0A %5D%0A%0AMIDDL @@ -3692,37 +3692,8 @@ # %5D%0A -# %E6%9C%AC%E7%95%AA%E7%92%B0%E5%A2%83%E7%94%A8%E3%80%80Linux %E7%B5%B6%E5%AF%BE%E3%83%91%E3%82%B9%E3%80%80%E3%83%AD%E3%83%BC%E3%82%AB%E3%83%AB%E3%81%A7%E3%81%AF%E3%81%84%E3%82%89%E3%81%AA%E3%81%84 %0ASTA
e5b3de7ef4b068d1ce01e8fc9aec59b9182d8662
fix error in wizard tests
tests/test_wizards.py
tests/test_wizards.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import sys from distutils.version import LooseVersion import cms from .base import BaseTest try: from unittest import skipIf except ImportError: from unittest2 import skipIf class WizardTest(BaseTest): def setUp(self): try: from cms.wizards.wizard_pool import wizard_pool delete = [ 'djangocms_blog', 'djangocms_blog.cms_wizards', ] for module in delete: if module in sys.modules: del sys.modules[module] wizard_pool._reset() except ImportError: # Not in django CMS 3.2+, no cleanup needed pass @skipIf(LooseVersion(cms.__version__) < LooseVersion('3.2'), reason='Wizards not available for django CMS < 3.2') def test_wizard(self): """ Test that Blog wizard is present and contains all items """ from cms.wizards.wizard_pool import wizard_pool self.get_pages() titles = [entry.title for entry in wizard_pool.get_entries()] self.assertTrue('New Blog' in titles) self.assertTrue('New Article' in titles) @skipIf(LooseVersion(cms.__version__) < LooseVersion('3.2'), reason='Wizards not available for django CMS < 3.2') def test_wizard_init(self): from cms.utils.permissions import current_user from cms.wizards.wizard_pool import wizard_pool from djangocms_blog.models import Post self.get_pages() with current_user(self.user_staff): wizs = [entry for entry in wizard_pool.get_entries() if entry.model == Post] for index, wiz in enumerate(wizs): app_config = self.app_config_1.pk if wiz.title == 'New Blog' else self.app_config_2.pk form = wiz.form() self.assertTrue(form.initial.get('app_config', False), app_config) self.assertTrue(form.fields['app_config'].widget.attrs['disabled']) form = wiz.form(data={ '1-title': 'title{0}'.format(index), '1-abstract': 'abstract{0}'.format(index), '1-categories': [self.category_1.pk], }, prefix=1) self.assertEqual(form.default_appconfig, app_config) self.assertTrue(form.is_valid()) self.assertTrue(form.cleaned_data['app_config'], app_config) instance = form.save() self.assertEqual(instance.author, self.user_staff) with self.settings(BLOG_AUTHOR_DEFAULT='normal'): for index, wiz in enumerate(wizs): app_config = self.app_config_1.pk if wiz.title == 'New Blog' else self.app_config_2.pk form = wiz.form(data={ '1-title': 'title-2{0}'.format(index), '1-abstract': 'abstract-2{0}'.format(index), '1-categories': [self.category_1.pk], }, prefix=1) self.assertEqual(form.default_appconfig, app_config) self.assertTrue(form.is_valid()) self.assertTrue(form.cleaned_data['app_config'], app_config) instance = form.save() self.assertEqual(instance.author, self.user_normal) def test_wizard_import(self): # The following import should not fail in any django CMS version from djangocms_blog import cms_wizards # NOQA
Python
0.000001
@@ -2480,36 +2480,37 @@ self.assert -True +Equal (form.cleaned_da @@ -2517,32 +2517,35 @@ ta%5B'app_config'%5D +.pk , app_config)%0A @@ -3292,28 +3292,29 @@ self.assert -True +Equal (form.cleane @@ -3333,16 +3333,19 @@ config'%5D +.pk , app_co
d8b322439a5fdaf31ec52dc7c2a2ff9e18c12316
solve import error on install magpie
magpie/__init__.py
magpie/__init__.py
# -*- coding: utf-8 -*- from magpie import constants import logging import sys LOGGER = logging.getLogger(__name__) def includeme(config): LOGGER.info("Adding MAGPIE_MODULE_DIR='{}' to path.".format(constants.MAGPIE_MODULE_DIR)) sys.path.insert(0, constants.MAGPIE_MODULE_DIR) # include magpie components (all the file which define includeme) config.include('cornice') config.include('cornice_swagger') config.include('pyramid_chameleon') config.include('pyramid_mako') config.include('magpie.definitions') config.include('magpie.api') config.include('magpie.db') config.include('magpie.ui')
Python
0.000006
@@ -21,37 +21,8 @@ -*-%0A -from magpie import constants%0A impo @@ -82,16 +82,16 @@ me__)%0A%0A%0A - def incl @@ -105,16 +105,162 @@ onfig):%0A + # import needs to be here, otherwise ImportError happens during setup.py install (modules not yet installed)%0A from magpie import constants%0A LOGG
5dfcd4ea8633a6bc658cccd654fce2cc7c217269
Add helpful message to end of installer.
nbdiff/install.py
nbdiff/install.py
from . import __path__ as NBDIFF_PATH import subprocess import re import os import shutil import sys def install(): profile_name = 'nbdiff' create_cmd = ['ipython', 'profile', 'create', profile_name] message = subprocess.Popen(create_cmd, stderr=subprocess.PIPE) message_str = message.stderr.read() re_msgline = \ re.compile(r'^.ProfileCre.*u\'(?P<profilepath>.*)ipython_config\.py.$') profile_paths = [ re_msgline.match(line).groups()[0] for line in message_str.splitlines() if re_msgline.match(line) ] if len(profile_paths) == 0: sys.stderr.write("It looks like creating the ipython profile " "didn't work. Maybe you've already installed it?\n") sys.exit(-1) profile_path = profile_paths[0] extension_copy_from = os.path.join(NBDIFF_PATH[0], 'extension/static') extension_copy_dest = os.path.join(profile_path, 'static') print extension_copy_from print extension_copy_dest shutil.copytree(extension_copy_from, extension_copy_dest) print profile_path
Python
0
@@ -1,12 +1,50 @@ +from __future__ import print_function%0A from . impor @@ -654,16 +654,29 @@ r.write( +%0A %22It look @@ -776,16 +776,25 @@ d it?%5Cn%22 +%0A )%0A @@ -987,68 +987,8 @@ c')%0A - print extension_copy_from%0A print extension_copy_dest%0A @@ -1054,22 +1054,70 @@ print - profile_path +(%22Finished installing NBDiff extension in profile %60nbdiff%60.%22) %0A
390fa07c191d79290b1ef83c268f38431f68093a
Fix import in test client.
tests/clients/simple.py
tests/clients/simple.py
# -*- coding: utf-8 -*- from base import jsonrpyc class MyClass(object): def one(self): return 1 def twice(self, n): return n * 2 def arglen(self, *args, **kwargs): return len(args) + len(kwargs) if __name__ == "__main__": rpc = jsonrpyc.RPC(MyClass())
Python
0
@@ -19,16 +19,111 @@ 8 -*-%0A%0A%0A +import os%0Aimport sys%0A%0Abase = os.path.dirname(os.path.abspath(__file__))%0Asys.path.append(base)%0A%0A from bas
3b706a6fb345d1b6c33c3ab8d438949fc35887d3
NotImplementedException should be called NotImplementedError
nbviewer/index.py
nbviewer/index.py
#----------------------------------------------------------------------------- # Copyright (C) 2014 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- ''' Classes for Indexing Notebooks ''' from tornado.log import app_log import uuid from elasticsearch import Elasticsearch class Indexer(): def index_notebook(self, notebook_url, notebook_contents): raise NotImplementedException("index_notebook not implemented") class NoSearch(Indexer): def __init__(self): pass def index_notebook(self, notebook_url, notebook_contents): app_log.debug("Totally not indexing \"{}\"".format(notebook_url)) pass class ElasticSearch(Indexer): def __init__(self, host="127.0.0.1", port=9200): self.elasticsearch = Elasticsearch([{'host':host, 'port':port}]) def index_notebook(self, notebook_url, notebook_contents, public=False): notebook_url = notebook_url.encode('utf-8') notebook_id = uuid.uuid5(uuid.NAMESPACE_URL, notebook_url) # Notebooks API Model # https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#notebooks-api body = { "content": notebook_contents, "public": public } resp = self.elasticsearch.index(index='notebooks', doc_type='ipynb', body=body, id=notebook_id.hex) if(resp['created']): app_log.info("Created new indexed notebook={}, public={}".format(notebook_url, public)) else: app_log.info("Indexing old notebook={}, public={}".format(notebook_url, public, resp))
Python
0.998718
@@ -578,16 +578,12 @@ tedE -xception +rror (%22in
7c6754a439f8fa1c7ebe5c12b9c51651c02c35c4
修改post参数,添加全局editor配置
manage/new_post.py
manage/new_post.py
import datetime import json import os.path import re import shutil from pypinyin import lazy_pinyin from common import file from manage import get_excerpt def get_name(nameinput): name_raw = re.sub("[\s+\.\!\/_,$%^*(+\"\']+|[+——!,。?、~@#¥%……&*()]+", "", nameinput) namelist = lazy_pinyin(name_raw) name = "" for item in namelist: name = name + "-" + item return name[1:len(name)] def new_post(name, title, filename, editor): if len(name) == 0: name = get_name(title) if os.path.isfile(filename): shutil.copyfile(filename, "./document/{0}.md".format(name)) else: if editor is not None: os.system("{0} ./document/{1}.md".format(editor, name)) excerpt = get_excerpt.get_excerpt("./document/{0}.md".format(name)) post_info = {"name": name, "title": title, "excerpt": excerpt, "time": str(datetime.date.today())} if os.path.isfile("./config/page.json"): page_list = json.loads(file.read_file("./config/page.json")) else: page_list = list() page_list.insert(0, post_info) file.write_file("./config/page.json", json.dumps(page_list, ensure_ascii=False)) def new_post_init(config_file=None, editor="vim"): if config_file is not None and os.path.exists(config_file): config = json.loads(file.read_file(config_file)) title = config["title"] name = config["name"] filename = config["file"] else: title = input("Please enter the title of the article:") name = input("Please enter the URL (Leave a blank use pinyin):") filename = input("Please enter the file path to copy (blank or Non-existent will be new):") new_post(name, title, filename, editor) print("Success!")
Python
0
@@ -1211,11 +1211,12 @@ or=%22 -vim +None %22):%0A @@ -1675,24 +1675,235 @@ be new):%22)%0A + if editor==%22None%22:%0A system_info = json.loads(file.read_file(%22./config/system.json%22))%0A if %22Editor%22 in system_info:%0A editor=system_info%5B%22Editor%22%5D%0A else:%0A editor=None%0A new_post
7bd2bfa8deb59c97f7630ed10fe70fd7e8bd8587
Update dependency bazelbuild/bazel to latest version
third_party/bazel.bzl
third_party/bazel.bzl
# Copyright 2019 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file is autogenerated by copybara, please do not edit. bazel_version = "d9ebac9c2bcca821902b86cdb5f1297790bba2f9" bazel_sha256 = "f648383e43e4172a6787dcde60365091ff4dbced0485bbf9c4b515d5f2c96139"
Python
0.000007
@@ -655,128 +655,128 @@ = %22 -d9ebac9c2bcca821902b86cdb5f1297790bba2f9%22%0Abazel_sha256 = %22f648383e43e4172a6787dcde60365091ff4dbced0485bbf9c4b515d5f2c96139 +50ce3f973cbc96a0326560a31b736a4f0ca8dc62%22%0Abazel_sha256 = %22977e63bacdec2cc29192ed52ea251915d4eda12c0cc666b4e71aade947404442 %22%0A
73d0be7a432340b4ecd140ad1cc8792d3f049779
Use SelfAttribute instead of explicit lambda
tests/factories/user.py
tests/factories/user.py
# -*- coding: utf-8 -*- # Copyright (c) 2016 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. import factory from factory.faker import Faker from pycroft.model.user import User from .base import BaseFactory from .facilities import RoomFactory from .finance import AccountFactory class UserFactory(BaseFactory): class Meta: model = User login = Faker('user_name') name = Faker('name') registered_at = Faker('date_time') password = Faker('password') email = Faker('email') account = factory.SubFactory(AccountFactory, type="USER_ASSET") room = factory.SubFactory(RoomFactory) address = factory.LazyAttribute(lambda o: o.room.address) class UserWithHostFactory(UserFactory): host = factory.RelatedFactory('tests.factories.host.HostFactory', 'owner')
Python
0
@@ -774,12 +774,12 @@ ory. -Lazy +Self Attr @@ -788,20 +788,9 @@ ute( -lambda o: o. +' room @@ -797,16 +797,17 @@ .address +' )%0A%0A%0Aclas
469d5e545e3c78aed5dad3e4957956cfe0eae991
Add a missing test for Analysis.wait_until_finished().
tests/fileinfo_tests.py
tests/fileinfo_tests.py
# # Project: retdec-python # Copyright: (c) 2015 by Petr Zemek <[email protected]> and contributors # License: MIT, see the LICENSE file for more details # """Tests for the :mod:`retdec.fileinfo` module.""" import io from unittest import mock from retdec.exceptions import AnalysisFailedError from retdec.file import File from retdec.fileinfo import Analysis from retdec.fileinfo import Fileinfo from tests.file_tests import AnyFile from tests.resource_tests import ResourceTestsBase from tests.service_tests import BaseServiceTests class FileinfoRunAnalysisTests(BaseServiceTests): """Tests for :func:`retdec.fileinfo.Fileinfo.run_analysis()`.""" def setUp(self): super().setUp() self.input_file_mock = mock.MagicMock(spec_set=File) self.fileinfo = Fileinfo(api_key='KEY') def test_creates_api_connection_with_correct_url_and_api_key(self): self.fileinfo.run_analysis(input_file=self.input_file_mock) self.APIConnectionMock.assert_called_once_with( 'https://retdec.com/service/api/fileinfo/analyses', self.fileinfo.api_key ) def test_verbose_is_set_to_0_when_not_given(self): self.fileinfo.run_analysis(input_file=self.input_file_mock) self.conn_mock.send_post_request.assert_called_once_with( '', params={'verbose': 0}, files={'input': AnyFile()} ) def test_verbose_is_set_to_0_when_given_but_false(self): self.fileinfo.run_analysis( input_file=self.input_file_mock, verbose=False ) self.conn_mock.send_post_request.assert_called_once_with( '', params={'verbose': 0}, files={'input': AnyFile()} ) def test_verbose_is_set_to_1_when_given_and_true(self): self.fileinfo.run_analysis( input_file=self.input_file_mock, verbose=True ) self.conn_mock.send_post_request.assert_called_once_with( '', params={'verbose': 1}, files={'input': AnyFile()} ) def test_uses_returned_id_to_initialize_analysis(self): self.conn_mock.send_post_request.return_value = {'id': 'ID'} analysis = self.fileinfo.run_analysis( input_file=self.input_file_mock ) self.assertTrue(analysis.id, 'ID') def test_repr_returns_correct_value(self): self.assertEqual( repr(self.fileinfo), "<Fileinfo api_url='https://retdec.com/service/api'>" ) class AnalysisTestsBase(ResourceTestsBase): """Base class for all tests of :class:`retdec.decompiler.Analysis`.""" class AnalysisTests(AnalysisTestsBase): """Tests for :class:`retdec.fileinfo.Analysis`.""" class AnalysisWaitUntilFinishedTests(AnalysisTestsBase): """Tests for :func:`retdec.resource.Analysis.wait_until_finished()`.""" def test_sends_correct_request_and_returns_when_resource_is_finished(self): self.conn_mock.send_get_request.return_value = self.status_with({ 'finished': True }) a = Analysis('ID', self.conn_mock) a.wait_until_finished() self.conn_mock.send_get_request.assert_called_once_with('/ID/status') def test_raises_exception_by_default_when_resource_failed(self): self.conn_mock.send_get_request.return_value = self.status_with({ 'finished': True, 'failed': True, 'error': 'error message' }) a = Analysis('ID', self.conn_mock) with self.assertRaises(AnalysisFailedError): a.wait_until_finished() def test_calls_on_failure_when_it_is_callable(self): self.conn_mock.send_get_request.return_value = self.status_with({ 'finished': True, 'succeeded': False, 'failed': True, 'error': 'error message' }) a = Analysis('ID', self.conn_mock) on_failure_mock = mock.Mock() a.wait_until_finished(on_failure=on_failure_mock) on_failure_mock.assert_called_once_with('error message') def test_does_not_raise_exception_when_on_failure_is_none(self): self.conn_mock.send_get_request.return_value = self.status_with({ 'finished': True, 'failed': True, 'error': 'error message' }) a = Analysis('ID', self.conn_mock) a.wait_until_finished(on_failure=None) class AnalysisGetOutputTests(AnalysisTestsBase): """Tests for :func:`retdec.resource.Analysis.get_output()`.""" def test_accesses_correct_url_and_returns_its_data(self): self.conn_mock.get_file.return_value = io.BytesIO(b'data') a = Analysis('ID', self.conn_mock) output = a.get_output() self.assertEqual(output, 'data') self.conn_mock.get_file.assert_called_once_with('/ID/output')
Python
0.000475
@@ -3256,24 +3256,436 @@ D/status')%0A%0A + def test_waits_until_analysis_finishes(self):%0A self.conn_mock.send_get_request.side_effect = %5B%0A self.status_with(%7B%0A 'finished': False,%0A 'succeeded': False%0A %7D), self.status_with(%7B%0A 'finished': True,%0A 'succeeded': True%0A %7D)%0A %5D%0A a = Analysis('ID', self.conn_mock)%0A%0A a.wait_until_finished()%0A%0A def test
5c1a404353a0cdcd49610a21d7d19b79898ac7e3
make mpi example a little more verbose
tests/helloworld_mpi.py
tests/helloworld_mpi.py
#!/usr/bin/env python # This is an example MPI4Py program that is used # by different examples and tests. from mpi4py import MPI import time SLEEP = 10 name = MPI.Get_processor_name() comm = MPI.COMM_WORLD print "mpi rank %d/%d/%s" % (comm.rank, comm.size, name) time.sleep(SLEEP) comm.Barrier() # wait for everybody to synchronize here
Python
0.000069
@@ -105,46 +105,102 @@ s.%0A%0A -from mpi4py import MPI%0Aimport time%0A%0A +import sys%0Aimport time%0Aimport traceback%0Afrom mpi4py import MPI%0A%0Atry :%0A print %22start%22%0A SLEE @@ -206,16 +206,20 @@ EP = 10%0A + name = @@ -243,16 +243,20 @@ _name()%0A + comm = @@ -271,16 +271,20 @@ _WORLD%0A%0A + print %22m @@ -334,16 +334,20 @@ name)%0A%0A + time.sle @@ -357,16 +357,20 @@ SLEEP)%0A%0A + comm.Bar @@ -420,8 +420,149 @@ e here%0A%0A +except Exception as e :%0A traceback.print_exc ()%0A print %22error : %25s%22 %25 s%0A sys.exit (1)%0A%0Afinally :%0A print %22done%22%0A sys.exit (0)%0A%0A
3bb6017897f9b8c859c2d3879c2e9d51b899f57c
Increase number of iterations for xor neural net
neuralnets/xor.py
neuralnets/xor.py
import numpy as np from net import NeuralNet net = NeuralNet(2, 1, 3, 1, 342047) output_dot = True inputs = np.array([[1,1], [0,0], [1,0], [0,1]]) outputs = np.array([[0], [0], [1], [1]]) for i in xrange(50000): if i % 100 == 0 and output_dot: open("/tmp/xor{:05d}graph".format(i), mode="w").write(net.output_dot((inputs,outputs))) net.learn(inputs, outputs, 0.05) print("trained") print("error: {}".format(net.error(inputs, outputs))) for inpt in inputs: print(net.forward(inpt))
Python
0.000002
@@ -318,17 +318,17 @@ xrange( -5 +8 0000):%0A
2c37ed091baf12e53885bfa06fdb835bb8de1218
Add Bitbucket to skipif marker reason
tests/skipif_markers.py
tests/skipif_markers.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ skipif_markers -------------- Contains pytest skipif markers to be used in the suite. """ import pytest import os try: os.environ[u'TRAVIS'] except KeyError: travis = False else: travis = True try: os.environ[u'DISABLE_NETWORK_TESTS'] except KeyError: no_network = False else: no_network = True # For some reason pytest incorrectly uses the first reason text regardless of # which condition matches. Using a unified message for now # travis_reason = 'Works locally with tox but fails on Travis.' # no_network_reason = 'Needs a network connection to GitHub.' reason = 'Fails on Travis or else there is no network connection to GitHub' skipif_travis = pytest.mark.skipif(travis, reason=reason) skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
Python
0
@@ -640,16 +640,22 @@ reason = + (%0A 'Fails @@ -710,15 +710,35 @@ to -GitHub' +'%0A 'GitHub/Bitbucket.'%0A) %0A%0Ask
44dac786339716ad8cc05f6790b73b5fc47be812
Remove extra comma to avoid flake8 test failure in CircleCI
config/jinja2.py
config/jinja2.py
from django.urls import reverse from django.utils import translation from django.template.backends.jinja2 import Jinja2 from jinja2 import Environment class FoodsavingJinja2(Jinja2): app_dirname = 'templates' def environment(**options): env = Environment(extensions=['jinja2.ext.i18n',], **options) env.globals.update({ 'url': reverse, }) env.install_gettext_translations(translation) env.install_null_translations() return env
Python
0.000001
@@ -289,17 +289,16 @@ xt.i18n' -, %5D, **opt
d912db90ff6f1d5cfd4d3acf863467d6c66a79b5
fix the tests to use correct urls for v4
tests/test_api_usage.py
tests/test_api_usage.py
import logging import subprocess import sys import os from nose.tools import raises logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) from luminoso_api import LuminosoClient from luminoso_api.errors import LuminosoAPIError, LuminosoError from luminoso_api.json_stream import open_json_or_csv_somehow ROOT_CLIENT = None RELOGIN_CLIENT = None PROJECT = None USERNAME = None PROJECT_NAME = os.environ.get('USER', 'jenkins') + '-test' PROJECT_ID = None EXAMPLE_DIR = os.path.dirname(__file__) + '/examples' ROOT_URL = 'http://localhost:5000/v4' def fileno_monkeypatch(self): return sys.__stdout__.fileno() import StringIO StringIO.StringIO.fileno = fileno_monkeypatch def setup(): """ Make sure we're working with a fresh database. Build a client for interacting with that database and save it as a global. """ global ROOT_CLIENT, PROJECT, USERNAME, RELOGIN_CLIENT, PROJECT_ID user_info_str = subprocess.check_output('tellme lumi-test', shell=True) user_info = eval(user_info_str) USERNAME = user_info['username'] ROOT_CLIENT = LuminosoClient.connect(ROOT_URL, username=USERNAME, password=user_info['password']) RELOGIN_CLIENT = LuminosoClient.connect(ROOT_URL, username=USERNAME, password=user_info['password'], auto_login=True) # check to see if the project exists projects = ROOT_CLIENT.get(USERNAME + '/projects') projdict = dict((proj['name'], proj['project_id']) for proj in projects) if PROJECT_NAME in projdict: logger.warn('The test database existed already. ' 'We have to clean it up.') ROOT_CLIENT.delete(USERNAME + '/projects', project_id=projdict[PROJECT_NAME]) # create the project and client logger.info("Creating project: " + PROJECT_NAME) logger.info("Existing projects: %r" % projdict.keys()) creation = ROOT_CLIENT.post(USERNAME + '/projects', name=PROJECT_NAME) PROJECT_ID = creation['project_id'] PROJECT = ROOT_CLIENT.change_path(USERNAME + '/projects/' + PROJECT_ID) PROJECT.get() def test_noop(): """ Sometimes you just need to do nothing. """ assert ROOT_CLIENT.get('ping') == 'pong' assert ROOT_CLIENT.post('ping') == 'pong' assert ROOT_CLIENT.put('ping') == 'pong' assert ROOT_CLIENT.delete('ping') == 'pong' def test_paths(): """ Without interacting with the network, make sure our path logic works. """ client1 = ROOT_CLIENT.change_path('foo') assert client1.url == ROOT_CLIENT.url + 'foo/' client2 = client1.change_path('bar') assert client2.url == ROOT_CLIENT.url + 'foo/bar/' client3 = client2.change_path('/baz') assert client3.url == ROOT_CLIENT.url + 'baz/' @raises(LuminosoAPIError) def test_error_raising(): """ The project was just created, so it shouldn't have any terms in it. """ PROJECT.get('terms') def test_upload_and_wait_for(): """ Upload three documents and wait for the result. """ docs = open_json_or_csv_somehow(EXAMPLE_DIR + '/example1.stream.json') job_id = PROJECT.upload('docs', docs) job_result = PROJECT.wait_for(job_id) assert job_result['success'] is True def test_post_with_parameters(): """ Test post with parameters via topics. """ topics = PROJECT.get('topics') assert topics == [] PROJECT.post('topics', name='Example topic', role='topic', color='#aabbcc', surface_texts=['Examples'] ) result = PROJECT.get('topics') assert len(result) == 1 topic = result[0] assert topic['name'] == 'Example topic' assert topic['surface_texts'] == ['Examples'] assert topic['color'] == '#aabbcc' topic_id = topic['_id'] topic2 = PROJECT.get('topics/id/%s' % topic_id) assert topic2 == topic, '%s != %s' % (topic2, topic) def test_auto_login(): """Test auto-login after 401 responses.""" RELOGIN_CLIENT._session.auth._key_id = '' assert RELOGIN_CLIENT.get('ping') == 'pong' def teardown(): """ Pack everything up, we're done. """ if ROOT_CLIENT is not None: ROOT_CLIENT.delete(USERNAME + '/projects', project_id=PROJECT_ID) PROJECT = ROOT_CLIENT.change_path(USERNAME + '/projects/' + PROJECT_ID) try: got = PROJECT.get() except LuminosoError: # it should be an error, we just deleted the project return else: assert False, got
Python
0.000005
@@ -1587,38 +1587,38 @@ ENT.get( -USERNAME + '/projects' +'projects/' + USERNAME )%0A pr @@ -1854,38 +1854,38 @@ .delete( -USERNAME + '/projects' +'projects/' + USERNAME ,%0A @@ -2121,38 +2121,38 @@ NT.post( -USERNAME + '/projects' +'projects/' + USERNAME , name=P @@ -2234,32 +2234,46 @@ ENT.change_path( +'projects/' + USERNAME + '/pro @@ -2265,33 +2265,24 @@ SERNAME + '/ -projects/ ' + PROJECT_
cac21e16693a27a7f8c3b4de60e43e316ef8791d
Remove old, less specific character tests
tests/test_character.py
tests/test_character.py
import npc import pytest def test_append_rank(): char = npc.Character() char.append_rank("Knights of the Round Table", "Dancer") assert char["rank"]["Knights of the Round Table"] == ["Dancer"] class TestCreation: """Test different instantiation behaviors""" def test_dict(self): char = npc.Character({"name": ["hello"]}) assert char["name"] == ["hello"] def test_kwargs(self): char = npc.Character(name=["hello"]) assert char["name"] == ["hello"] def test_both(self): char = npc.Character(attributes={"name": ["hello"], "profession": ["tailor"]}, name=["nope"]) assert char["name"] == ["nope"] assert char["profession"] == ["tailor"] class TestGetFirst: def test_normal(self): char = npc.Character(name=["hello", "goodbye"]) assert char.get_first("name") == "hello" def test_desc(self): char = npc.Character(description="Fee Fie Foe Fum") assert char.get_first("description") == "Fee Fie Foe Fum" def test_not_present(self): char = npc.Character() assert char.get_first("nope") == None class TestGetRemaining: def test_normal(self): char = npc.Character(name=["hello", "goodbye"]) assert char.get_remaining("name") == ["goodbye"] def test_desc(self): char = npc.Character(description="Fee Fie Foe Fum") assert char.get_remaining("description") == "Fee Fie Foe Fum" def test_not_present(self): char = npc.Character() assert char.get_remaining("nope") == [] class TestAppend: def test_normal(self): char = npc.Character() char.append("title", "The Stern") assert char["title"] == ["The Stern"] def test_desc(self): char = npc.Character() char.append("description", "Hello hello") char.append("description", " baby, you called") assert char["description"] == "Hello hello baby, you called" class TestTypeKey: def test_lowercase(self): char = npc.Character(type=['Fish', 'Great Ape']) assert char.type_key == 'fish' def test_empty(self): char = npc.Character() assert char.type_key is None class TestGetFirst: def test_normal(self): char = npc.Character(name=["hello", "goodbye"]) assert char.get_first('name') == 'hello' @pytest.mark.parametrize('keyname', npc.Character.STRING_TAGS) def test_string_tags(self, keyname): char = npc.Character() char.append(keyname, "hello") char.append(keyname, " friend") assert char.get_first(keyname) == "hello friend" def test_missing(self): char = npc.Character() assert char.get_first('nope', 'negative') == 'negative' assert char.get_first('nope') is None class TestGetRemaining: def test_normal(self): char = npc.Character(name=["hello", "goodbye"]) assert char.get_remaining('name') == ['goodbye'] @pytest.mark.parametrize('keyname', npc.Character.STRING_TAGS) def test_string_tags(self, keyname): char = npc.Character() char.append(keyname, "hello") char.append(keyname, " friend") assert char.get_remaining(keyname) == "hello friend" def test_missing(self): char = npc.Character() assert char.get_remaining('nope') == [] class TestHasItems: @pytest.mark.parametrize('limit', [(1, True), (5, True), (10, True), (11, False)]) def test_thresholds(self, limit): char = npc.Character() for thing in range(0, 10): char.append('things', thing) assert char.has_items('things', limit[0]) == limit[1] def test_bad_threshold(self): char = npc.Character() with pytest.raises(npc.util.OutOfBoundsError) as ex: char.has_items('things', 0) class TestCopyAndAlter: def titleize(self, text): return text.title() def test_single(self): char = npc.Character() char.append('snoot', 'booped') new_char = char.copy_and_alter(self.titleize) assert new_char.get('snoot') == ['Booped'] def test_multiple(self): char = npc.Character() char.append('hands', 'raised') char.append('hands', 'jazzy') new_char = char.copy_and_alter(self.titleize) assert new_char.get('hands') == ['Raised', 'Jazzy'] @pytest.mark.parametrize('keyname', npc.Character.STRING_TAGS) def test_string(self, keyname): char = npc.Character() char.append(keyname, 'hello hello') new_char = char.copy_and_alter(self.titleize) assert new_char.get(keyname) == "Hello Hello" def test_rank(self): char = npc.Character() char.append_rank('restaurant', 'chef') char.append_rank('restaurant', 'newb') new_char = char.copy_and_alter(self.titleize) assert new_char.get('rank') == {'restaurant': ['Chef', 'Newb']} # tests to do: # validation # changeling validation # valid # build header
Python
0.000139
@@ -721,856 +721,8 @@ %22%5D%0A%0A -class TestGetFirst:%0A def test_normal(self):%0A char = npc.Character(name=%5B%22hello%22, %22goodbye%22%5D)%0A assert char.get_first(%22name%22) == %22hello%22%0A%0A def test_desc(self):%0A char = npc.Character(description=%22Fee Fie Foe Fum%22)%0A assert char.get_first(%22description%22) == %22Fee Fie Foe Fum%22%0A%0A def test_not_present(self):%0A char = npc.Character()%0A assert char.get_first(%22nope%22) == None%0A%0Aclass TestGetRemaining:%0A def test_normal(self):%0A char = npc.Character(name=%5B%22hello%22, %22goodbye%22%5D)%0A assert char.get_remaining(%22name%22) == %5B%22goodbye%22%5D%0A%0A def test_desc(self):%0A char = npc.Character(description=%22Fee Fie Foe Fum%22)%0A assert char.get_remaining(%22description%22) == %22Fee Fie Foe Fum%22%0A%0A def test_not_present(self):%0A char = npc.Character()%0A assert char.get_remaining(%22nope%22) == %5B%5D%0A%0A clas
b447fa44ca1dd2e9d21af4ce61ee6092fe3c94ec
Update test_cmatrices to new interface
tests/test_cmatrices.py
tests/test_cmatrices.py
# to run this test, from directory above: # setenv PYTHONPATH /path/to/pyradiomics/radiomics # nosetests --nocapture -v tests/test_features.py import logging from nose_parameterized import parameterized import numpy import six from radiomics import cMatsEnabled, getFeatureClasses from testUtils import custom_name_func, RadiomicsTestUtils testUtils = RadiomicsTestUtils() testCases = ('brain1', 'brain2', 'breast1', 'lung1', 'lung2') featureClasses = getFeatureClasses() class TestFeatures: def generate_scenarios(): global testCases, featureClasses for testCase in testCases: for className, featureClass in six.iteritems(featureClasses): assert(featureClass is not None) if "_calculateCMatrix" in dir(featureClass) or className == "shape": logging.debug('generate_scenarios: featureClass = %s', className) yield testCase, className global testUtils @parameterized.expand(generate_scenarios(), testcase_func_name=custom_name_func) def test_scenario(self, testCase, featureClassName): print("") global testUtils, featureClasses logging.debug('test_scenario: testCase = %s, featureClassName = %s', testCase, featureClassName) assert cMatsEnabled() testUtils.setFeatureClassAndTestCase(featureClassName, testCase) testImage = testUtils.getImage() testMask = testUtils.getMask() featureClass = featureClasses[featureClassName](testImage, testMask, **testUtils.getSettings()) if featureClassName == 'shape': cSA = getattr(featureClass, 'SurfaceArea') # pre-calculated value by C extension assert (cSA is not None) pySA = getattr(featureClass, '_calculateSurfaceArea')() # Function, call to calculate SA in full-python mode assert (pySA is not None) # Check if the calculated values match assert (numpy.abs(pySA - cSA)) < 1e-3 else: assert "_calculateMatrix" in dir(featureClass) cMat = featureClass._calculateCMatrix() assert cMat is not None pyMat = featureClass._calculateMatrix() assert pyMat is not None # Check if the calculated arrays match assert numpy.max(numpy.abs(pyMat - cMat)) < 1e-3
Python
0
@@ -1019,28 +1019,24 @@ o(self, test -Case , featureCla @@ -1173,20 +1173,16 @@ s', test -Case , featur @@ -1284,20 +1284,16 @@ me, test -Case )%0A%0A t @@ -1322,16 +1322,26 @@ etImage( +'original' )%0A te @@ -1367,16 +1367,26 @@ getMask( +'original' )%0A%0A f
3b408ed7702100b7f1755f819e05bb61b1740957
add medialab events search- left todo: json and date
media_lab_prado.py
media_lab_prado.py
# http://medialab-prado.es/events/2016-12-01
Python
0
@@ -37,8 +37,914 @@ 16-12-01 +%0A# -*- coding: utf-8 -*-%0Afrom bs4 import BeautifulSoup%0Aimport urllib.request%0Aimport datetime%0A%0Adate = %222017-01-02%22%0A%0Aurl = %22http://medialab-prado.es/events/%22 + date%0Arequest = urllib.request.urlopen(url)%0A%0Aif request.getcode() == 200:%0A%09request = request.read()%0A%09soup = BeautifulSoup(request, %22html.parser%22)%0A%0Apageevents = soup.find(%22ul%22, %7B %22class%22 : %22lista%22%7D).findChildren(%22a%22)%0A%0A%0Afor event in pageevents:%0A%09%09if event.text == %22Seguir leyendo%E2%80%A6%22:%0A%09%09%09event_url = event%5B'href'%5D%0A%09%09%09request2 = urllib.request.urlopen(event_url)%0A%09%09%09%0A%09%09%09if request2.getcode() == 200:%0A%09%09%09%09request2 = request2.read()%0A%09%09%09%09soup = BeautifulSoup(request2, %22html.parser%22)%0A%0A%09%09%09%09location = soup.find(%22div%22, %7B %22class%22 : %22lugar%22%7D)%0A%09%09%09%09if location == None:%0A%09%09%09%09%09location = %22MediaLab%22%0A%09%09%09%09else:%0A%09%09%09%09%09location = location.find(%22p%22)%0A%09%09%09%09print (location)%0A%09%09%09%09%0A%09%09%09%09description = soup.find(%22div%22, %7B %22class%22 : %22entradilla%22%7D)%0A%09%09%09%09print(description.text)%0A%0A%0A%0A%0A%0A
769e6209db066b8b5908426850fd300fd29098e8
Fix codemirror mode and language name
tcl_kernel/kernel.py
tcl_kernel/kernel.py
from ipykernel.kernelbase import Kernel try: import Tkinter except ImportError: import tkinter as Tkinter __version__ = '0.0.1' class TclKernel(Kernel): implementation = 'tcl_kernel' implementation_version = __version__ language_info = {'name': 'bash', 'codemirror_mode': 'shell', 'mimetype': 'text/x-script.tcl', 'file_extension': '.tcl'} banner = "Tcl Kernel" def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.tcl = Tkinter.Tcl() self.execution_count = 0 putsredef = 'rename puts original_puts \nproc puts {args} {\n if {[llength $args] == 1} {\n return "=> [lindex $args 0]"\n } else {\n eval original_puts $args\n }\n}\n' self.tcl.eval(putsredef) def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): try: output = self.tcl.eval(code.rstrip()) if not silent: stream_content = {'name': 'stdout', 'text': output[3:]} self.send_response(self.iopub_socket, 'stream', stream_content) except Tkinter.TclError as scripterr: output = "Tcl Error: " + scripterr.args[0] if not silent: stream_content = { 'name': 'stderr', 'text': output} self.send_response(self.iopub_socket, 'stream', stream_content) return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}}
Python
0.00003
@@ -265,12 +265,11 @@ ': ' -bash +Tcl ',%0A @@ -312,12 +312,10 @@ ': ' -shel +Tc l',%0A
c517eb40b73151a9b14f46f1991ab692d8b81702
Add docstring for simulation class methods
teemof/simulation.py
teemof/simulation.py
# Date: August 2017 # Author: Kutay B. Sezginel """ Simulation class for reading and initializing Lammps simulations """ import pprint from teemof.read import read_run, read_trial, read_trial_set from teemof.parameters import k_parameters, plot_parameters from teemof.visualize import plot_thermal_conductivity, plot_distance_histogram class Simulation: """ Reading and initializing Lammps simulations """ def __init__(self, read=None, setup=None, parameters=k_parameters.copy()): self.parameters = parameters self.plot_parameters = plot_parameters.copy() if read is not None and setup is not None: self.read(read, setup) self.setup = setup def read(self, sim_dir, setup='run'): self.setup = setup if setup == 'run': self.run = read_run(sim_dir, k_par=self.parameters) elif setup == 'trial': self.trial = read_trial(sim_dir, k_par=self.parameters) elif setup == 'trial_set': self.trial_set = read_trial_set(sim_dir, k_par=self.parameters) else: print('Select setup: "run" | "trial" | "trial_set"') def initialize(self): pass def plot(self, selection): if selection == 'k': plot_data = {} plot_data['x'] = self.trial['data']['Run1']['time'] plot_data['y'] = [self.trial['data'][run]['k']['iso'] for run in self.trial['runs']] plot_data['legend'] = self.trial['runs'] plot_thermal_conductivity(plot_data, self.plot_parameters['k']) elif selection == 'hist': plot_data = {} plot_distance_histogram(plot_data, self.plot_parameters['hist']) else: print('Select plot: "k" | "k_est" | "hist"') def show_parameters(self): pprint.pprint(self.parameters) def show_plot_parameters(self): pprint.pprint(self.plot_parameters)
Python
0
@@ -492,16 +492,83 @@ opy()):%0A + %22%22%22%0A Create a Lammps simulation object.%0A %22%22%22%0A @@ -810,16 +810,101 @@ 'run'):%0A + %22%22%22%0A Read Lammps simulation results from given directory.%0A %22%22%22%0A @@ -1341,44 +1341,188 @@ -pass%0A%0A def plot(self, selection): +%22%22%22%0A Initialize input files for a Lammps simulation.%0A %22%22%22%0A pass%0A%0A def plot(self, selection):%0A %22%22%22%0A Plot Lammps simulation results.%0A %22%22%22 %0A @@ -2097,32 +2097,102 @@ rameters(self):%0A + %22%22%22%0A Show thermal conductivity parameters.%0A %22%22%22%0A pprint.p @@ -2243,32 +2243,86 @@ rameters(self):%0A + %22%22%22%0A Show plot parameters.%0A %22%22%22%0A pprint.p
b646e4f376db710101e2c1825bd384b2727e6a79
Disable on win32
tests/test_dateentry.py
tests/test_dateentry.py
import datetime import unittest from kiwi.ui.dateentry import DateEntry class TestDateEntry(unittest.TestCase): def setUp(self): self.date = datetime.date.today() def testGetSetDate(self): entry = DateEntry() entry.set_date(self.date) self.assertEqual(entry.get_date(), self.date) if __name__ == '__main__': unittest.main()
Python
0.000002
@@ -1,12 +1,23 @@ +import sys%0A import datet @@ -204,32 +204,87 @@ tSetDate(self):%0A + if sys.platform == 'win32':%0A return%0A entry =
9033c3e95b07219f49171dd88f55a951852c9b8f
Reset the beacon rather than stopping it, in the hope that it will avoid bind-related issues & delays on the CI tests
tests/test_discovery.py
tests/test_discovery.py
try: import queue except ImportError: import Queue as queue import random import re import socket import threading import time import uuid import pytest import networkzero as nw0 _logger = nw0.core.get_logger("networkzero.tests") nw0.core._enable_debug_logging() is_valid_port = nw0.core.is_valid_port is_valid_address = nw0.core.is_valid_address class SupportThread(threading.Thread): """Fake the other end of the message/command/notification chain NB we use as little as possible of the nw0 machinery here, mostly to avoid the possibility of complicated cross-thread interference but also to test our own code. """ def __init__(self, context): threading.Thread.__init__(self) self.context = context self.queue = queue.Queue() self.setDaemon(True) def run(self): try: while True: test_name, args = self.queue.get() if test_name is None: break function = getattr(self, "support_test_" + test_name) function(*args) except: _logger.exception("Problem in thread") def support_test_discover_before_advertise(self, service): time.sleep(1) nw0.advertise(service) @pytest.fixture def support(request): thread = SupportThread(nw0.sockets.context) def finalise(): thread.queue.put((None, None)) thread.join() thread.start() return thread @pytest.fixture def beacon(request): port = random.choice(nw0.config.DYNAMIC_PORTS) nw0.discovery._start_beacon(port=port) request.addfinalizer(nw0.discovery._stop_beacon) def test_beacon_already_running(): # # NB this one has to run without the beacon fixture # # Bind a socket on a random port before attempting # to start a beacon on that same port. # port = random.choice(nw0.config.DYNAMIC_PORTS) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("", port)) try: assert nw0.discovery._beacon is None nw0.discovery._start_beacon(port=port) assert nw0.discovery._beacon is nw0.discovery._remote_beacon finally: s.close() # # Make sure any future beacon use assumes it's not # already running. # nw0.discovery._stop_beacon() def test_advertise_no_address(beacon): service = uuid.uuid4().hex address = nw0.advertise(service) assert is_valid_address(address) assert [service, address] in nw0.discover_all() def test_advertise_no_port(beacon): service = uuid.uuid4().hex address = nw0.advertise(service) assert is_valid_address(address, port_range=nw0.config.DYNAMIC_PORTS) assert [service, address] in nw0.discover_all() def test_advertise_full_address(beacon): service = uuid.uuid4().hex service_address = "192.168.1.1:1234" address = nw0.advertise(service, service_address) assert address == service_address assert [service, address] in nw0.discover_all() def test_discover(beacon): service = uuid.uuid4().hex address = nw0.advertise(service) assert address == nw0.discover(service) def test_discover_not_exists_with_timeout(beacon): service = uuid.uuid4().hex address = nw0.advertise(service) assert None is nw0.discover(uuid.uuid4().hex, wait_for_s=2) def test_discover_exists_with_timeout(beacon): service = uuid.uuid4().hex address = nw0.advertise(service) assert address == nw0.discover(service, wait_for_s=2) def test_discover_all(beacon): service1 = uuid.uuid4().hex address1 = nw0.advertise(service1) service2 = uuid.uuid4().hex address2 = nw0.advertise(service2) services = dict(nw0.discover_all()) assert services == {service1:address1, service2:address2} def test_discover_before_advertise(beacon, support): service1 = uuid.uuid4().hex support.queue.put(("discover_before_advertise", [service1])) address1 = nw0.discover(service1, wait_for_s=5) assert address1 is not None def test_discover_group(beacon): group = uuid.uuid4().hex service1 = "%s/%s" % (group, uuid.uuid4().hex) service2 = "%s/%s" % (group, uuid.uuid4().hex) service3 = "%s/%s" % (uuid.uuid4().hex, uuid.uuid4().hex) address1 = nw0.advertise(service1) address2 = nw0.advertise(service2) address3 = nw0.advertise(service3) discovered_group = nw0.discover_group(group) assert set(discovered_group) == set([(service1, address1), (service2, address2)])
Python
0
@@ -1654,29 +1654,29 @@ 0.discovery. -_stop +reset _beacon)%0A%0Ade
78320df85a99b309c192a685f16f7c36b8b86c7c
add test to check that "in bounds" lat/lng do not throw.
tests/test_haversine.py
tests/test_haversine.py
from haversine import haversine, Unit from math import pi import pytest from tests.geo_ressources import LYON, PARIS, NEW_YORK, LONDON, EXPECTED_LYON_PARIS def haversine_test_factory(unit): def test(): expected = EXPECTED_LYON_PARIS[unit] assert haversine(LYON, PARIS, unit=unit) == expected assert isinstance(unit.value, str) assert haversine(LYON, PARIS, unit=unit.value) == expected return test test_kilometers = haversine_test_factory(Unit.KILOMETERS) test_meters = haversine_test_factory(Unit.METERS) test_miles = haversine_test_factory(Unit.MILES) test_nautical_miles = haversine_test_factory(Unit.NAUTICAL_MILES) test_feet = haversine_test_factory(Unit.FEET) test_inches = haversine_test_factory(Unit.INCHES) test_radians = haversine_test_factory(Unit.RADIANS) test_degrees = haversine_test_factory(Unit.DEGREES) def test_units_enum(): from haversine.haversine import _CONVERSIONS assert all(unit in _CONVERSIONS for unit in Unit) def test_haversine_deg_rad(): """ Test makes sure that one time around earth matches sphere circumference in degrees / radians. """ p1, p2 = (45, 0), (-45, 180) assert haversine(p1, p2, unit=Unit.RADIANS) == pi assert round(haversine(p1, p2, unit=Unit.DEGREES), 13) == 180.0 @pytest.mark.parametrize( "oob_from,oob_to,proper_from,proper_to", [ ((-90.0001, 0), (0, 0), (-89.9999, 180), (0, 0)), ((-90.0001, 30), (0, 0), (-89.9999, -150), (0, 0)), ((0, 0), (90.0001, 0), (0, 0), (89.9999, -180)), ((0, 0), (90.0001, 30), (0, 0), (89.9999, -150)), ((0, -180.0001), (0, 0), (0, 179.9999), (0, 0)), ((30, -180.0001), (0, 0), (30, 179.9999), (0, 0)), ((0, 0), (0, 180.0001), (0, 0), (0, -179.9999)), ((0, 0), (30, 180.0001), (0, 0), (30, -179.9999)), ] ) def test_normalization(oob_from, oob_to, proper_from, proper_to): """ Test makes sure that normalization works as expected by comparing distance of out of bounds points cases to equal cases where all points are within lat/lon ranges. The results are expected to be equal (within some tolerance to account for numerical issues). """ normalized_during, normalized_already = ( haversine(oob_from, oob_to, Unit.DEGREES, normalize=True), haversine(proper_from, proper_to, Unit.DEGREES, normalize=True), ) assert normalized_during == pytest.approx(normalized_already, abs=1e-10) @pytest.mark.parametrize( "oob_from,oob_to", [ ((-90.0001, 0), (0, 0)), ((0, 0), (90.0001, 0)), ((0, -180.0001), (0, 0)), ((0, 0), (0, 180.0001)), ] ) def test_out_of_bounds(oob_from, oob_to): """ Test makes sure that a ValueError is raised when latitude or longitude values are out of bounds. """ with pytest.raises(ValueError): haversine(oob_from, oob_to) with pytest.raises(ValueError): haversine(oob_from, oob_to, normalize=False) def test_haversine_deg_rad_great_circle_distance(): """ Test makes sure the haversine functions returns the great circle distance (https://en.wikipedia.org/wiki/Great-circle_distance) between two points on a sphere. See https://github.com/mapado/haversine/issues/45 """ p1, p2 = (0, -45), (0, 45) assert haversine(p1, p2, Unit.DEGREES) == 89.99999999999997
Python
0
@@ -2974,16 +2974,422 @@ False)%0A%0A [email protected](%0A %22in_bounds_from,in_bounds_to%22, %5B%0A ((-90, 0), (0, 0)),%0A ((0, 0), (90, 0)),%0A ((0, -180), (0, 0)),%0A ((0, 0), (0, 180)),%0A %5D%0A)%0Adef test_in_bounds(in_bounds_from, in_bounds_to):%0A %22%22%22%0A Test makes sure that a ValueError is NOT raised when latitude or longitude values are in bounds.%0A %22%22%22%0A assert haversine(in_bounds_from, in_bounds_to) %3E 0%0A%0A %0Adef tes
17ad68fe77b124fa760857c9e93cbd3d4f9d293e
Write XML of input file to tempdir as well
tests/test_hintfonts.py
tests/test_hintfonts.py
from __future__ import print_function, division, absolute_import import glob from os.path import basename import pytest from fontTools.misc.xmlWriter import XMLWriter from fontTools.cffLib import CFFFontSet from fontTools.ttLib import TTFont from psautohint.autohint import ACOptions, hintFiles from .differ import main as differ from . import DATA_DIR class Options(ACOptions): def __init__(self, inpath, outpath): super(Options, self).__init__() self.inputPaths = [inpath] self.outputPaths = [outpath] self.hintAll = True self.verbose = False @pytest.mark.parametrize("ufo", glob.glob("%s/*/*/font.ufo" % DATA_DIR)) def test_ufo(ufo, tmpdir): out = str(tmpdir / basename(ufo)) options = Options(ufo, out) hintFiles(options) assert differ([ufo, out]) @pytest.mark.parametrize("otf", glob.glob("%s/*/*/font.otf" % DATA_DIR)) def test_otf(otf, tmpdir): out = str(tmpdir / basename(otf)) options = Options(otf, out) hintFiles(options) for path in (otf, out): font = TTFont(path) assert "CFF " in font writer = XMLWriter(path + ".xml") font["CFF "].toXML(writer, font) del writer del font assert differ([otf + ".xml", out + ".xml"]) @pytest.mark.parametrize("cff", glob.glob("%s/*/*/font.cff" % DATA_DIR)) def test_cff(cff, tmpdir): out = str(tmpdir / basename(cff)) options = Options(cff, out) hintFiles(options) for path in (cff, out): font = CFFFontSet() writer = XMLWriter(path + ".xml") with open(path, "rb") as fp: font.decompile(fp, None) font.toXML(writer) del writer del font assert differ([cff + ".xml", out + ".xml"])
Python
0
@@ -952,17 +952,26 @@ me(otf)) + + %22.out%22 %0A - opti @@ -1123,36 +1123,60 @@ ter = XMLWriter( +str(tmpdir / basename( path +)) + %22.xml%22)%0A @@ -1267,19 +1267,43 @@ differ(%5B +str(tmpdir / basename( otf +)) + %22.xml @@ -1296,36 +1296,79 @@ (otf)) + %22.xml%22, - +%0A str(tmpdir / basename( out +)) + %22.xml%22%5D)%0A%0A%0A@p @@ -1502,16 +1502,25 @@ me(cff)) + + %22.out%22 %0A opt @@ -1651,20 +1651,44 @@ LWriter( +str(tmpdir / basename( path +)) + %22.xml @@ -1851,19 +1851,43 @@ differ(%5B +str(tmpdir / basename( cff +)) + %22.xml @@ -1888,20 +1888,63 @@ %22.xml%22, - +%0A str(tmpdir / basename( out +)) + %22.xml
6e67a9e8eedd959d9d0193e746a375099e9784ef
Use bytes instead of str where appropriate for Python 3
toodlepip/consoles.py
toodlepip/consoles.py
class Console(object): def __init__(self, shell, stdout): self._shell = shell self._stdout = stdout def run(self, description, command, **kwargs): return self.run_all(description, [command], **kwargs) def run_all(self, description, commands, quiet=False, cwd=None): stdout = None if quiet else self._stdout # TODO: Test printing description # TODO: detect terminal self._stdout.write('\033[1m') self._stdout.write(description) self._stdout.write("\n") self._stdout.write('\033[0m') self._stdout.flush() for command in commands: # TODO: print command result = self._shell.run( command, stdout=stdout, stderr=stdout, cwd=cwd, allow_error=True ) if result.return_code != 0: return Result(result.return_code) return Result(0) class Result(object): def __init__(self, return_code): self.return_code = return_code
Python
0.000561
@@ -459,24 +459,25 @@ tdout.write( +b '%5C033%5B1m')%0A @@ -513,16 +513,31 @@ cription +.encode(%22utf8%22) )%0A @@ -557,16 +557,17 @@ t.write( +b %22%5Cn%22)%0A @@ -591,16 +591,17 @@ t.write( +b '%5C033%5B0m
8034a521692d9857b0d36e2efced40bb69f5efda
Refactor test for and operator
tests/test_operators.py
tests/test_operators.py
from pytest import mark from intervals import IntInterval class TestComparisonOperators(object): def test_eq_operator(self): assert IntInterval([1, 3]) == IntInterval([1, 3]) assert not IntInterval([1, 3]) == IntInterval([1, 4]) def test_ne_operator(self): assert not IntInterval([1, 3]) != IntInterval([1, 3]) assert IntInterval([1, 3]) != IntInterval([1, 4]) def test_gt_operator(self): assert IntInterval([1, 3]) > IntInterval([0, 2]) assert not IntInterval([2, 3]) > IntInterval([2, 3]) @mark.parametrize(('comparison', 'result'), ( (IntInterval([1, 3]) >= IntInterval([0, 2]), True), (IntInterval((1, 4)) >= 1, False), (IntInterval((1, 6)) >= [1, 6], False), (IntInterval((1, 6)) >= 0, True) )) def test_ge_operator(self, comparison, result): assert comparison == result def test_lt_operator(self): assert IntInterval([0, 2]) < IntInterval([1, 3]) assert not IntInterval([2, 3]) < IntInterval([2, 3]) def test_le_operator(self): assert IntInterval([0, 2]) <= IntInterval([1, 3]) assert IntInterval([1, 3]) >= IntInterval([1, 3]) def test_integer_comparison(self): assert IntInterval([2, 2]) <= 3 assert IntInterval([1, 3]) >= 0 assert IntInterval([2, 2]) == 2 assert IntInterval([2, 2]) != 3 @mark.parametrize('value', ( IntInterval([0, 2]), 1, (-1, 1), )) def test_contains_operator_for_inclusive_interval(self, value): assert value in IntInterval([-1, 2]) @mark.parametrize('value', ( IntInterval([0, 2]), 2, '[-1, 1]', )) def test_contains_operator_for_non_inclusive_interval(self, value): assert value not in IntInterval((-1, 2)) class TestDiscreteRangeComparison(object): @mark.parametrize(('interval', 'interval2'), ( ([1, 3], '[1, 4)'), ('(1, 5]', '[2, 5]'), ('(1, 6)', '[2, 5]'), )) def test_eq_operator(self, interval, interval2): assert IntInterval(interval) == IntInterval(interval2) class TestBinaryOperators(object): @mark.parametrize(('interval1', 'interval2', 'result', 'empty'), ( ((2, 3), (3, 4), (3, 3), True), ((2, 3), [3, 4], '[3, 3)', True), ((2, 5), (3, 10), (3, 5), False), ('(2, 3]', '[3, 4)', [3, 3], False), ('(2, 10]', '[3, 40]', [3, 10], False), ((2, 10), (3, 8), (3, 8), False), )) def test_and_operator(self, interval1, interval2, result, empty): assert ( IntInterval(interval1) & IntInterval(interval2) == IntInterval(result) ) assert IntInterval(result).empty == empty
Python
0
@@ -2223,17 +2223,8 @@ ult' -, 'empty' ), ( @@ -2255,22 +2255,16 @@ , (3, 3) -, True ),%0A @@ -2291,22 +2291,16 @@ '%5B3, 3)' -, True ),%0A @@ -2326,23 +2326,16 @@ , (3, 5) -, False ),%0A @@ -2364,23 +2364,16 @@ , %5B3, 3%5D -, False ),%0A @@ -2405,23 +2405,16 @@ %5B3, 10%5D -, False ),%0A @@ -2440,23 +2440,16 @@ , (3, 8) -, False ),%0A ) @@ -2514,15 +2514,8 @@ sult -, empty ):%0A @@ -2639,41 +2639,353 @@ )%0A +%0A - assert IntInterval(result [email protected](('interval1', 'interval2', 'empty'), (%0A ((2, 3), (3, 4), True),%0A ((2, 3), %5B3, 4%5D, True),%0A (%5B2, 3%5D, (3, 4), True),%0A ('(2, 3%5D', '%5B3, 4)', False),%0A ))%0A def test_and_operator_for_empty_results(self, interval1, interval2, empty):%0A assert (IntInterval(interval1) & IntInterval(interval2) ).em
5a817413b91adece6f5191d7fe0bf5b4baa430af
Fix test
tests/test_retrieval.py
tests/test_retrieval.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy from numpy.testing import assert_allclose from theano import tensor from dictlearn.vocab import Vocabulary from dictlearn.retrieval import ( vec2str, Dictionary, Retrieval) from dictlearn.ops import RetrievalOp from tests.util import ( TEST_VOCAB, TEST_DICT_JSON, temporary_content_path) def test_vec2str(): vector = map(ord, 'abc') + [0, 0] assert vec2str(vector) == 'abc' def test_retrieval(): with temporary_content_path(TEST_VOCAB) as path: vocab = Vocabulary(path) with temporary_content_path(TEST_DICT_JSON) as path: dict_ = Dictionary(path) # check a super simple case batch = [['a']] defs, def_map = Retrieval(vocab, dict_).retrieve(batch) assert defs == [[8, 4, 5, 9], [8, 6, 7, 9]] assert def_map == [(0, 0, 0), (0, 0, 1)] # check that vectors are handled correctly batch = numpy.array([ord('d'), ord(' '), ord('c'), 0, 0])[None, None, :] defs, def_map = Retrieval(vocab, dict_).retrieve(batch) assert defs == [[8, 3, 4, 9]] assert def_map == [(0, 0, 0)] # check a complex case batch = [['a', 'b', 'b'], ['d c', 'a', 'b']] defs, def_map = Retrieval(vocab, dict_).retrieve(batch) assert defs == [[8, 4, 5, 9], [8, 6, 7, 9], [8, 7, 6, 9], [8, 3, 4, 9]] assert def_map == [(0, 0, 0), (0, 0, 1), (0, 1, 2), (0, 2, 2), (1, 0, 3), (1, 1, 0), (1, 1, 1), (1, 2, 2)] # check a complex case with exclude top k batch = [['a', 'b', 'c', 'd'], ['a', 'e', 'b']] exclude_top_k = 4 # should exclude 'a', 'b', 'c', 'd' and only define 'e' defs, def_map = Retrieval(vocab, dict_, exclude_top_k=exclude_top_k).retrieve(batch) assert defs == [[8, 4, 5, 6, 9]] assert def_map == [(1, 1, 0)] # check the op retrieval_op = RetrievalOp(Retrieval(vocab, dict_)) batch = tensor.as_tensor_variable( [[[ord('d'), ord(' '), ord('c'), 0, 0], [ord('e'), 0, 0, 0, 0]]]) defs_var, mask_var, def_map_var = retrieval_op(batch) assert defs_var.eval().tolist() == [[8, 3, 4, 9, 0], [8, 4, 5, 6, 9]] assert_allclose(mask_var.eval(), [[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]]) assert def_map_var.eval().tolist() == [[0, 0, 0], [0, 1, 1]]
Python
0.000004
@@ -1788,17 +1788,17 @@ top_k = -4 +7 # shoul
006e933a44241e30e1e54c24966d0859aa7c853d
test hub via vanilla, to check imports
tests/unit/test_core.py
tests/unit/test_core.py
import time import vanilla.core def test_lazy(): class C(object): @vanilla.core.lazy def now(self): return time.time() c = C() want = c.now time.sleep(0.01) assert c.now == want def test_Scheduler(): s = vanilla.core.Scheduler() s.add(4, 'f2') s.add(9, 'f4') s.add(3, 'f1') item3 = s.add(7, 'f3') assert 0.003 - s.timeout() < 0.001 assert len(s) == 4 s.remove(item3) assert 0.003 - s.timeout() < 0.001 assert len(s) == 3 assert s.pop() == ('f1', ()) assert 0.004 - s.timeout() < 0.001 assert len(s) == 2 assert s.pop() == ('f2', ()) assert 0.009 - s.timeout() < 0.001 assert len(s) == 1 assert s.pop() == ('f4', ()) assert not s class TestHub(object): def test_spawn(self): h = vanilla.core.Hub() a = [] h.spawn_later(10, lambda: a.append(1)) h.spawn(lambda: a.append(2)) h.sleep(1) assert a == [2] h.sleep(10) assert a == [2, 1] def test_exception(self): h = vanilla.core.Hub() def raiser(): raise Exception() h.spawn(raiser) h.sleep(1) a = [] h.spawn(lambda: a.append(2)) h.sleep(1) assert a == [2] def test_stop(self): h = vanilla.core.Hub() @h.spawn def _(): h.sleep(20) h.stop()
Python
0
@@ -6,16 +6,31 @@ t time%0A%0A +import vanilla%0A import v @@ -839,27 +839,22 @@ vanilla. -core. Hub()%0A + @@ -1080,37 +1080,32 @@ h = vanilla. -core. Hub()%0A%0A d @@ -1304,32 +1304,32 @@ est_stop(self):%0A + h = vani @@ -1332,21 +1332,16 @@ vanilla. -core. Hub()%0A%0A
4f2de7a3ef26e26089626ded498e304df328591f
remove psutil checks (#5283)
tests/unit/test_info.py
tests/unit/test_info.py
import os import re import shutil import pytest from dvc.info import get_dvc_info, psutil # Python's version is in the shape of: # <major>.<minor>.<patch>[{a|b|rc}N][.postN][.devN] # `patch` is more than enough for the tests. # Refer PEP-0440 for complete regex just in-case. PYTHON_VERSION_REGEX = r"Python \d\.\d+\.\d+\S*" @pytest.mark.parametrize("scm_init", [True, False]) def test_info_in_repo(scm_init, tmp_dir): tmp_dir.init(scm=scm_init, dvc=True) # Create `.dvc/cache`, that is needed to check supported link types. os.mkdir(tmp_dir.dvc.cache.local.cache_dir) dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info) assert re.search(r"Cache types: .*", dvc_info) if scm_init: assert "Repo: dvc, git" in dvc_info else: assert "Repo: dvc (no_scm)" in dvc_info def test_info_in_subdir(tmp_dir, scm, caplog): dvc_subdir = tmp_dir / "subdir" dvc_subdir.mkdir() with dvc_subdir.chdir(): dvc_subdir.init(scm=False, dvc=True) with dvc_subdir.dvc.config.edit() as conf: del conf["core"]["no_scm"] dvc_info = get_dvc_info() assert "Repo: dvc (subdir), git" in dvc_info def test_info_in_broken_git_repo(tmp_dir, dvc, scm, caplog): shutil.rmtree(dvc.scm.dir) dvc_info = get_dvc_info() assert "Repo: dvc, git (broken)" in dvc_info def test_caches(tmp_dir, dvc, caplog): tmp_dir.add_remote( name="sshcache", url="ssh://example.com/path", default=False ) with tmp_dir.dvc.config.edit() as conf: conf["cache"]["ssh"] = "sshcache" dvc_info = get_dvc_info() # Order of cache types is runtime dependent assert re.search("Caches: (local, ssh|ssh, local)", dvc_info) def test_remotes_empty(tmp_dir, dvc, caplog): # No remotes are configured dvc_info = get_dvc_info() assert "Remotes: None" in dvc_info def test_remotes(tmp_dir, dvc, caplog): tmp_dir.add_remote(name="server", url="ssh://localhost", default=False) tmp_dir.add_remote( name="r1", url="azure://example.com/path", default=False ) tmp_dir.add_remote(name="r2", url="remote://server/path", default=False) dvc_info = get_dvc_info() assert re.search("Remotes: (ssh, azure|azure, ssh)", dvc_info) @pytest.mark.skipif(psutil is None, reason="No psutil.") def test_fs_info_in_repo(tmp_dir, dvc, caplog): os.mkdir(dvc.cache.local.cache_dir) dvc_info = get_dvc_info() assert re.search(r"Cache directory: .* on .*", dvc_info) assert re.search(r"Workspace directory: .* on .*", dvc_info) def test_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info) assert not re.search(r"Cache types: .*", dvc_info) assert "Repo:" not in dvc_info @pytest.mark.skipif(psutil is None, reason="No psutil.") def test_fs_info_outside_of_repo(tmp_dir, caplog): dvc_info = get_dvc_info() assert re.search(r"DVC version: \d+\.\d+\.\d+.*", dvc_info) assert re.search(f"Platform: {PYTHON_VERSION_REGEX} on .*", dvc_info) assert re.search(r"Supports: .*", dvc_info)
Python
0
@@ -80,16 +80,8 @@ info -, psutil %0A%0A# @@ -2409,65 +2409,8 @@ )%0A%0A%0A [email protected](psutil is None, reason=%22No psutil.%22)%0A def @@ -3013,65 +3013,8 @@ o%0A%0A%0A [email protected](psutil is None, reason=%22No psutil.%22)%0A def
f20e76034eef1ea8b7b7f98ace521a3a6346103c
remove default 0.0.0.0 for ip address to pave the way for a unique constraint on the ip address column. Of course this means that network_id needs to be nullable. All of this weakens this table in a way that is making me unhappy. This can and will be solved with more clever check constraints (i.e. network_id can't be null if ip address is not null) AND by transforming IP address into a many-to-many assignment (i.e. an interface does not HAVE an ip as an indemic characteristic to itself, rather it is assigned after the fact.)
1.2.1/src/lib/python2.5/aquilon/aqdb/hw/interface.py
1.2.1/src/lib/python2.5/aquilon/aqdb/hw/interface.py
#!/ms/dist/python/PROJ/core/2.5.0/bin/python # ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # $Header$ # $Change$ # $DateTime$ # $Author$ # Copyright (C) 2008 Morgan Stanley # # This module is part of Aquilon """Classes and Tables relating to network interfaces""" from datetime import datetime import sys import os if __name__ == '__main__': DIR = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, os.path.realpath(os.path.join(DIR, '..', '..', '..'))) import aquilon.aqdb.depends from sqlalchemy import (Column, Table, Integer, Sequence, String, Index, Boolean, CheckConstraint, UniqueConstraint, DateTime, ForeignKey, PrimaryKeyConstraint, insert, select) from sqlalchemy.orm import mapper, relation, deferred from aquilon.aqdb.column_types.aqstr import AqStr from aquilon.aqdb.column_types.IPV4 import IPV4 from aquilon.aqdb.db_factory import Base from aquilon.aqdb.net.network import Network #TODO: column type for MAC #reg = re.compile('^([a-f0-9]{2,2}:){5,5}[a-f0-9]{2,2}$') #if (not reg.match(self.mac)): # raise ArgumentError ('Invalid MAC address: '+self.mac) class Interface(Base): __tablename__ = 'interface' id = Column(Integer, Sequence('interface_id_seq'), primary_key=True) interface_type = Column(AqStr(32), nullable = False) #TODO: index mac = Column(AqStr(18), nullable = False) ip = Column(IPV4, default='0.0.0.0') network_id = Column(Integer, ForeignKey(Network.__table__.c.id, name = 'iface_net_id_fk'), nullable = False) creation_date = deferred(Column('creation_date', DateTime, default = datetime.now, nullable = False)) comments = deferred(Column( 'comments',String(255))) #TODO FK to IP table) network = relation(Network, backref = 'interfaces' ) __mapper_args__ = {'polymorphic_on' : interface_type} interface = Interface.__table__ interface.primary_key.name = 'interface_pk' interface.append_constraint(UniqueConstraint('mac', name = 'mac_addr_uk')) Index('iface_ip_idx', interface.c.ip) Index('iface_net_id_idx', interface.c.network_id) def populate(*args, **kw): from aquilon.aqdb.db_factory import db_factory, Base from sqlalchemy import insert dbf = db_factory() Base.metadata.bind = dbf.engine if 'debug' in args: Base.metadata.bind.echo = True s = dbf.session() interface.create(checkfirst=True) if len(s.query(Interface).all()) < 1: #print 'no interfaces yet' pass if Base.metadata.bind.echo == True: Base.metadata.bind.echo == False
Python
0.000001
@@ -1548,25 +1548,23 @@ V4, -default='0.0.0.0' +nullable = True )%0A @@ -1747,20 +1747,19 @@ lable = -Fals +Tru e)%0A%0A @@ -2312,16 +2312,22 @@ name = ' +iface_ mac_addr @@ -2337,44 +2337,86 @@ '))%0A -Index('iface_ip_idx', interface.c.ip +interface.append_constraint(UniqueConstraint('ip', name = 'iface_ip_addr_uk') )%0AIn
bae05fd5c15e9360d09dd9456b6d4f1122ddf213
Print the url of dependency JARs being downloaded in buck build
tools/download_jar.py
tools/download_jar.py
#!/usr/bin/python # Copyright (C) 2013 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from hashlib import sha1 from optparse import OptionParser from os import link, makedirs, path import shutil from subprocess import check_call, CalledProcessError from sys import stderr from zipfile import ZipFile, BadZipfile, LargeZipFile def hashfile(p): d = sha1() with open(p, 'rb') as f: while True: b = f.read(8192) if not b: break d.update(b) return d.hexdigest() def safe_mkdirs(d): if path.isdir(d): return try: makedirs(d) except OSError as err: if not path.isdir(d): raise err opts = OptionParser() opts.add_option('-o', help='local output file') opts.add_option('-u', help='URL to download') opts.add_option('-v', help='expected content SHA-1') opts.add_option('-x', action='append', help='file to delete from ZIP') opts.add_option('--exclude_java_sources', action='store_true') args, _ = opts.parse_args() root_dir = args.o while root_dir: root_dir, n = path.split(root_dir) if n == 'buck-out': break cache_ent = path.join( root_dir, 'buck-cache', '%s-%s' % (path.basename(args.o), sha1(args.u).hexdigest())) if not path.exists(cache_ent): try: safe_mkdirs(path.dirname(cache_ent)) check_call(['curl', '-sfo', cache_ent, args.u]) except (OSError, CalledProcessError) as err: print >>stderr, "error using curl: %s" % str(err) exit(1) if args.v: have = hashfile(cache_ent) if args.v != have: o = cache_ent[len(root_dir) + 1:] print >>stderr, ( '%s:\n' + 'expected %s\n' + 'received %s\n' + ' %s\n') % (args.u, args.v, have, o) exit(1) exclude = [] if args.x: exclude += args.x if args.exclude_java_sources: try: zf = ZipFile(cache_ent, 'r') try: for n in zf.namelist(): if n.endswith('.java'): exclude.append(n) finally: zf.close() except (BadZipfile, LargeZipFile) as err: print >>stderr, "error opening %s: %s" % (cache_ent, str(err)) exit(1) safe_mkdirs(path.dirname(args.o)) if exclude: shutil.copyfile(cache_ent, args.o) check_call(['zip', '-d', args.o] + exclude) else: try: link(cache_ent, args.o) except OSError as err: symlink(cache_ent, args.o)
Python
0.000603
@@ -1794,16 +1794,59 @@ e_ent))%0A + print %3E%3Estderr, %22Download %25s%22 %25 args.u%0A chec
659db558712a49c7d288b48e9f4b47b7f5b41f6a
Fix PyTorch wrapper
thinc/extra/wrappers.py
thinc/extra/wrappers.py
import contextlib from ..compat import BytesIO from ..neural._classes.model import Model try: import cupy import torch.autograd import torch.optim import torch import torch.utils.dlpack except ImportError: torch = None cupy = None pass def xp2torch(xp_tensor): if hasattr(xp_tensor, 'toDlpack'): return torch.utils.dlpack.from_dlpack(xp_tensor.toDlpack()) else: return torch.Tensor(xp_tensor) def torch2xp(torch_tensor): if torch_tensor.is_cuda: return cupy.fromDlpack(torch.utils.dlpack.to_dlpack(torch_tensor)) else: return torch_tensor.detach().numpy() class PyTorchWrapper(Model): '''Wrap a PyTorch model, so that it has the same API as Thinc models. To optimize the model, you'll need to create a PyTorch optimizer and call optimizer.step() after each batch --- see examples/wrap_pytorch.py ''' def __init__(self, model): Model.__init__(self) self._model = model self._optimizer = None def begin_update(self, x_data, drop=0.): '''Return the output of the wrapped PyTorch model for the given input, along with a callback to handle the backward pass. ''' x_var = torch.autograd.Variable(xp2torch(x_data), requires_grad=True) # Make prediction y_var = self._model(x_var) def backward_pytorch(dy_data, sgd=None): dy_var = xp2torch(dy_data) torch.autograd.backward((y_var,), grad_tensors=(dy_var,)) if sgd is not None: if self._optimizer is None: self._optimizer = self._create_optimizer(sgd) self._optimizer.step() self._optimizer.zero_grad() return torch2xp(x_var.grad) return torch2xp(y_var), backward_pytorch def _create_optimizer(self, sgd): params = self._model.parameters() if sgd.b1 != 0 and sgd.b2 != 0: optimizer = torch.optim.Adam(params, lr=sgd.alpha, betas=(sgd.b1, sgd.b2)) elif sgd.b2 == 0: optimizer = torch.optim.SGD(params, lr=sgd.alpha, momentum=sgd.b1) else: raise NotImplementedError return optimizer def to_disk(self, path): # TODO: Untested torch.save(self._model.state_dict(), str(path)) def from_disk(self, path): # TODO: Untested self._model.load_state_dict(torch.load(path)) def to_bytes(self): # TODO: Untested filelike = BytesIO() torch.save(self._model.state_dict(), filelike) filelike.seek(0) return filelike.getvalue() def from_bytes(self, data): # TODO: Untested filelike = BytesIO(data) filelike.seek(0) self._model.load_state_dict(torch.load(filelike)) def to_gpu(self, device_num): self._model.cuda(device_num) def to_cpu(self): self._model.cpu() def resize_output(self, new_dim): #self.weight = nn.Parameter(F.pad(self.weight, ...)) # add classes #self.weight = nn.Parameter(F.pad(model.weight, ...)) # add classes raise NotImplementedError def resize_input(self): raise NotImplementedError @contextlib.contextmanager def use_params(self, params): # pragma: no cover if self.id in params: backup = self.to_bytes() self.from_bytes(params[self.id]) else: backup = None yield if backup is not None: self.from_bytes(backup) class PyTorchWrapperRNN(PyTorchWrapper): '''Wrap a PyTorch RNN model ''' def __call__(self, x_data, h_0=None): x_var = torch.autograd.Variable(xp2torch(x_data), requires_grad=False) # Make prediction out, h_n = self._model(x_var, h_0) return (self.ops.asarray(out.data), h_n) def begin_update(self, x_data, h_0=None, drop=0.): '''Return the output of the wrapped PyTorch model for the given input, along with a callback to handle the backward pass. ''' x_var = torch.autograd.Variable(xp2torch(x_data), requires_grad=True) # Make prediction out, h_n = self._model(x_var, h_0) # Shapes will be: # out = seq_len, batch, hidden_size * num_directions # h_n = num_layers * num_directions, batch, hidden_size def backward_pytorch_rnn(d_data, sgd=None): dy_data, _ = d_data dout = xp2torch(dy_data) torch.autograd.backward((out,), grad_tensors=(dout,)) if sgd is not None: if self._optimizer is None: self._optimizer = self._create_optimizer(sgd) self._optimizer.step() self._optimizer.zero_grad() return torch2xp(x_var.grad) return (torch2xp(out), h_n), backward_pytorch_rnn def resize_output(self, new_dim): #self.weight = nn.Parameter(F.pad(self.weight, ...)) # add classes #self.weight = nn.Parameter(F.pad(model.weight, ...)) # add classes raise NotImplementedError def resize_input(self): raise NotImplementedError @contextlib.contextmanager def use_params(self, params): # pragma: no cover if self.id in params: backup = self.to_bytes() self.from_bytes(params[self.id]) else: backup = None yield if backup is not None: self.from_bytes(backup)
Python
0.00004
@@ -104,16 +104,58 @@ rt cupy%0A +except ImportError:%0A cupy = None%0A%0Atry:%0A impo @@ -282,33 +282,8 @@ None -%0A cupy = None%0A pass %0A%0Ade @@ -447,14 +447,18 @@ rch. -Tensor +from_numpy (xp_ @@ -1222,32 +1222,67 @@ ss.%0A '''%0A + x_torch = xp2torch(x_data)%0A x_var =
5d8b1a62882f37bb79e8d000e2b2ecb69742cce4
Add API endpoint for generating authentification token
timeside/server/urls.py
timeside/server/urls.py
# -*- coding: utf-8 -*- from django.conf.urls import include, url from django.contrib import admin from django.conf import settings from rest_framework import routers from rest_framework.documentation import include_docs_urls from timeside.server import views from timeside.server.utils import TS_ENCODERS_EXT EXPORT_EXT = "|".join(TS_ENCODERS_EXT.keys()) admin.autodiscover() api_router = routers.DefaultRouter() api_router.register(r'selections', views.SelectionViewSet) api_router.register(r'items', views.ItemViewSet) api_router.register(r'experiences', views.ExperienceViewSet) api_router.register(r'processors', views.ProcessorViewSet) api_router.register(r'subprocessors', views.SubProcessorViewSet) api_router.register(r'results', views.ResultViewSet) api_router.register(r'presets', views.PresetViewSet) api_router.register(r'tasks', views.TaskViewSet) api_router.register(r'users', views.UserViewSet) api_router.register(r'analysis', views.AnalysisViewSet) api_router.register(r'analysis_tracks', views.AnalysisTrackViewSet, base_name='analysistrack') api_router.register(r'annotation_tracks', views.AnnotationTrackViewSet) api_router.register(r'annotations', views.AnnotationViewSet) urlpatterns = [ # ----- ADMIN ------- url(r'^admin/', include(admin.site.urls)), # ----- API --------- url(r'^api/', include(api_router.urls)), # Docs url(r'^api/docs/', include_docs_urls(title='Timeside Web API')), # Items url(r'^api/items/(?P<uuid>[0-9a-z-]+)/', include([ url(r'^waveform/', views.ItemWaveView.as_view(), name="item-waveform"), # Get transcoded audio # Ex: /api/item/--<uuid>--/download/ogg url(r'^download/(?P<extension>' + EXPORT_EXT + ')$', views.ItemTranscode.as_view(), name="item-transcode-api"), ]), ), # ----- Timeside ------ url(r'^$', views.ItemList.as_view(), name="timeside-item-list"), # Items # ex: /item/5/ url(r'^items/(?P<uuid>[0-9a-z-]+)/', include([ url(r'^$', views.ItemDetail.as_view(), name="timeside-item-detail"), url(r'^export/$', views.ItemDetailExport.as_view(), name='timeside-item-export'), url(r'^download/(?P<extension>' + EXPORT_EXT + ')$', views.ItemTranscode.as_view(), name="item-transcode"), ]) ), # Results url(r'^api/results/(?P<uuid>[0-9a-z-]+)/visual/', views.ResultVisualizationViewSet.as_view(), name="timeside-result-visualization"), url(r'^results/(?P<pk>.*)/json/$', views.ResultAnalyzerView.as_view(), name="timeside-result-json"), url(r'^results/(?P<pk>.*)/png/$', views.ResultGrapherView.as_view(), name="timeside-result-png"), url(r'^results/(?P<pk>.*)/audio/$', views.ResultEncoderView.as_view(), name="timeside-result-audio"), url(r'^results/(?P<pk>.*)/(?P<res_id>.*)/elan/$', views.ResultAnalyzerToElanView.as_view(), name="timeside-result-elan"), url(r'^results/(?P<pk>.*)/(?P<res_id>.*)/sonic/$', views.ResultAnalyzerToSVView.as_view(), name="timeside-result-sonic"), # Player url(r'^player/$', views.PlayerView.as_view(), name="timeside-player"), ] if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
Python
0.000001
@@ -161,16 +161,79 @@ routers%0A +from rest_framework.authtoken import views as authtoken_views%0A%0A from res @@ -1517,16 +1517,139 @@ API')),%0A + # API endpoint for Generating Authentification token%0A url(r'%5Eapi/token-auth/', authtoken_views.obtain_auth_token),%0A # Ite
e3832f6301ea7e043552502f88c7b3a59e35d3bd
Improve formatting
tools/glidein_status.py
tools/glidein_status.py
#!/bin/env python # # glidein_status.py # # Description: # Equivalent to condor_status, but with glidein specific info # # Usage: # glidein_status.py [-help] [-gatekeeper] [-glidecluster] [-withmonitor] # # Author: # Igor Sfiligoi # import time import sys,os.path sys.path.append(os.path.join(sys.path[0],"../lib")) import condorMonitor pool_name=None constraint=None want_gk=False want_glidecluster=False want_monitor=False for arg in sys.argv: if arg=='-gatekeeper': want_gk=True elif arg=='-glidecluster': want_glidecluster=True elif arg=='-withmonitor': want_monitor=True elif arg in ('-h','-help'): print "glidein_status.py [-help] [-gatekeeper] [-glidecluster] [-withmonitor]" sys.exit(1) if not want_monitor: constraint='IS_MONITOR_VM =!= TRUE' format_list=[('Machine','s'),('State','s'),('Activity','s'), ('GLIDEIN_Site','s'), ('GLIDEIN_Factory','s'),('GLIDEIN_Name','s'),('GLIDEIN_Entry_Name','s'),('EnteredCurrentActivity','i')] attrs=['State','Activity','GLIDEIN_Site','GLIDEIN_Factory','GLIDEIN_Name','GLIDEIN_Entry_Name','EnteredCurrentActivity'] if want_gk: format_list.append(('GLIDEIN_Gatekeeper','s')) format_list.append(('GLIDEIN_GridType','s')) attrs.append('GLIDEIN_Gatekeeper') attrs.append('GLIDEIN_GridType') if want_glidecluster: format_list.append(('GLIDEIN_ClusterId','i')) format_list.append(('GLIDEIN_ProcId','i')) format_list.append(('GLIDEIN_Schedd','s')) attrs.append('GLIDEIN_ClusterId') attrs.append('GLIDEIN_ProcId') attrs.append('GLIDEIN_Schedd') cs=condorMonitor.CondorStatus(pool_name=pool_name) cs.load(constraint=constraint,format_list=format_list) data=cs.stored_data keys=data.keys() # sort on the Machine attribute def machine_cmp(x,y): res=cmp(data[x]['Machine'],data[y]['Machine']) if res==0: res=cmp(x,y) return res keys.sort(machine_cmp) counts_header=('Total','Owner','Claimed/Busy','Claimed/Retiring','Claimed/Other','Unclaimed','Matched','Other') now=long(time.time()) def fmt_time(t): diff=now-t diff_secs=diff%60 diff=diff/60 diff_mins=diff%60 diff=diff/60 diff_hours=diff%24 diff_days=diff/24 return "%i+%02i:%02i:%02i"%(diff_days,diff_hours,diff_mins,diff_secs) print_mask="%-39s %-9s" if want_gk: print_mask+=" %-5s %-43s" print_mask+=" %-24s %-14s" if want_glidecluster: print_mask+=" %-39s %-14s" print_mask+=" %-9s %-8s %-10s" header=('Name','Site') if want_gk: header+=('Grid','Gatekeeper') header+=('Factory','Entry') if want_glidecluster: header+=('GlideSchedd','GlideCluster') header+=('State','Activity','ActvtyTime') print print print_mask%header print counts={'Total':{}} for c in counts_header: counts['Total'][c]=0 for vm_name in keys: el=data[vm_name] cel={} # this will have all the needed attributes (??? if nothing else) for a in attrs: if el.has_key(a): cel[a]=el[a] else: cel[a]='???' if cel['EnteredCurrentActivity']!='???': cel['EnteredCurrentActivity']=fmt_time(long(cel['EnteredCurrentActivity'])) state=cel['State'] activity=cel['Activity'] for t in ('Total',): ct=counts[t] ct['Total']+=1 if state in ('Owner','Unclaimed','Matched'): ct[state]+=1 elif state=='Claimed': if activity in ('Busy','Retiring'): ct['%s/%s'%(state,activity)]+=1 else: ct['Claimed/Other']+=1 else: ct['Other']+=1 print_arr=(vm_name,cel['GLIDEIN_Site']) if want_gk: print_arr+=(cel['GLIDEIN_GridType'],cel['GLIDEIN_Gatekeeper']) print_arr+=("%s@%s"%(cel['GLIDEIN_Name'],cel['GLIDEIN_Factory']),cel['GLIDEIN_Entry_Name']) if want_glidecluster: print_arr+=(cel['GLIDEIN_Schedd'],"%i.%i"%(cel['GLIDEIN_ClusterId'],cel['GLIDEIN_ProcId'])) print_arr+=(state,activity,cel['EnteredCurrentActivity']) print print_mask%print_arr print count_print_mask="%39s" for c in counts_header: count_print_mask+=" %%%is"%len(c) print count_print_mask%(('',)+counts_header) for t in ('Total',): count_print_val=[t] for c in counts_header: count_print_val.append(counts[t][c]) print
Python
0.647729
@@ -4137,17 +4137,16 @@ %25len(c)%0A -%0A print co @@ -4301,15 +4301,59 @@ %5Bt%5D%5Bc%5D)%0A + print count_print_mask%25count_print_val%0A%0A print%0A%0A
4043468de4fc448b6fda670f33b7f935883793a7
add a test to ensure False is never passed to Git.execute
test/git/test_git.py
test/git/test_git.py
import os from test.testlib import * from git import Git, GitCommandError class TestGit(object): def setup(self): base = os.path.join(os.path.dirname(__file__), "../..") self.git = Git(base) @patch(Git, 'execute') def test_method_missing_calls_execute(self, git): git.return_value = '' self.git.version() assert_true(git.called) # assert_equal(git.call_args, ((("%s version " % self.git_bin_base),), {})) def test_it_transforms_kwargs_into_git_command_arguments(self): assert_equal(["-s"], self.git.transform_kwargs(**{'s': True})) assert_equal(["-s5"], self.git.transform_kwargs(**{'s': 5})) assert_equal(["--max-count"], self.git.transform_kwargs(**{'max_count': True})) assert_equal(["--max-count=5"], self.git.transform_kwargs(**{'max_count': 5})) assert_equal(["-s", "-t"], self.git.transform_kwargs(**{'s': True, 't': True})) def test_it_executes_git_to_shell_and_returns_result(self): assert_match('^git version [\d\.]{2}.*$', self.git.execute(["git","version"])) def test_it_accepts_stdin(self): filename = fixture_path("cat_file_blob") fh = open(filename, 'r') assert_equal("70c379b63ffa0795fdbfbc128e5a2818397b7ef8", self.git.hash_object(istream=fh, stdin=True)) fh.close() def test_it_returns_status_and_ignores_stderr(self): assert_equal((1, ""), self.git.this_does_not_exist(with_status=True)) @raises(GitCommandError) def test_it_raises_errors(self): self.git.this_does_not_exist(with_exceptions=True) def test_it_returns_stderr_in_output(self): # Note: no trailiing newline assert_match(r"^git: 'this-does-not-exist' is not a git-command", self.git.this_does_not_exist(with_stderr=True)) def test_it_does_not_strip_output_when_using_with_raw_output(self): # Note: trailing newline assert_match(r"^git: 'this-does-not-exist' is not a git-command" \ r"(\. See 'git --help'\.)?" + os.linesep, self.git.this_does_not_exist(with_stderr=True, with_raw_output=True)) def test_it_handles_large_input(self): output = self.git.execute(["cat", "/bin/bash"]) assert_true(len(output) > 4096) # at least 4k
Python
0
@@ -2390,8 +2390,283 @@ east 4k%0A +%0A @patch(Git, 'execute')%0A def test_it_ignores_false_kwargs(self, git):%0A # this_should_not_be_ignored=False implies it *should* be ignored%0A output = self.git.version( pass_this_kwarg=False )%0A assert_true( %22pass_this_kwarg%22 not in git.call_args%5B1%5D )%0A
90f057e0cd2bd0a50f2daed799aa902bdb31eab1
fix typeerror with blank reqs
test/test_package.py
test/test_package.py
import os import sys import pytest from shutil import rmtree from os import path from lambda_uploader import package TESTING_TEMP_DIR = '.testing_temp' WORKING_TEMP_DIR = path.join(TESTING_TEMP_DIR, '.lambda_uploader_temp') PACKAGE_TEMP_DIR = path.join(WORKING_TEMP_DIR, 'lambda_package') def setup_module(module): print('calling setup') os.mkdir(TESTING_TEMP_DIR) def teardown_module(module): print('calling teardown') rmtree(TESTING_TEMP_DIR) def test_package_zip_location(): pkg = package.Package(TESTING_TEMP_DIR) assert pkg.zip_file == '.testing_temp/lambda_function.zip' def test_package_clean_workspace(): temp_workspace = path.join(TESTING_TEMP_DIR, package.TEMP_WORKSPACE_NAME) os.mkdir(temp_workspace) pkg = package.Package(TESTING_TEMP_DIR) pkg.clean_workspace() assert path.isdir(temp_workspace) is False def test_prepare_workspace(): temp_workspace = path.join(TESTING_TEMP_DIR, package.TEMP_WORKSPACE_NAME) pkg = package.Package(TESTING_TEMP_DIR) pkg.requirements(['pytest']) pkg.install_dependencies() assert path.isdir(temp_workspace) assert path.isdir(path.join(temp_workspace, 'venv')) if sys.platform == 'win32' or sys.platform == 'cygwin': assert path.isfile(path.join(temp_workspace, "venv\\Scripts\\pip.exe")) else: assert path.isfile(path.join(temp_workspace, 'venv/bin/pip')) def test_install_requirements(): temp_workspace = path.join(TESTING_TEMP_DIR, package.TEMP_WORKSPACE_NAME) pkg = package.Package(TESTING_TEMP_DIR) pkg.requirements(['pytest']) pkg.install_dependencies() site_packages = path.join(temp_workspace, 'venv/lib/python2.7/site-packages') if sys.platform == 'win32' or sys.platform == 'cygwin': site_packages = path.join(temp_workspace, "venv\\lib\\site-packages") assert path.isdir(path.join(site_packages, '_pytest')) def test_default_virtualenv(): temp_workspace = path.join(TESTING_TEMP_DIR, package.TEMP_WORKSPACE_NAME) reqs = ['pytest'] pkg = package.Package(TESTING_TEMP_DIR) pkg.requirements = reqs pkg._build_new_virtualenv() # ensure we picked a real venv path if using default behavior assert pkg._pkg_venv == ("%s/venv" % temp_workspace) def test_existing_virtualenv(): venv_dir = "virtualenv_test" temp_virtualenv = path.join(TESTING_TEMP_DIR, venv_dir) os.mkdir(temp_virtualenv) pkg = package.Package(TESTING_TEMP_DIR, temp_virtualenv) pkg.virtualenv(temp_virtualenv) pkg.install_dependencies() assert pkg._pkg_venv == temp_virtualenv def test_bad_existing_virtualenv(): pkg = package.Package(TESTING_TEMP_DIR) with pytest.raises(Exception): pkg.virtualenv('abc') def test_omit_virtualenv(): pkg = package.Package(TESTING_TEMP_DIR) pkg.virtualenv(False) pkg.install_dependencies() assert pkg._pkg_venv is False def test_package(): pkg = package.Package(TESTING_TEMP_DIR) pkg.package() assert path.isfile(path.join(TESTING_TEMP_DIR, 'lambda_function.zip')) def test_package_with_extras(): pkg = package.Package(TESTING_TEMP_DIR) pkg.extra_file(path.join('test', 'extra')) pkg.extra_file(path.join('test', 'dummyfile')) pkg.package() # test a single file expected_extra_file1 = path.join(PACKAGE_TEMP_DIR, 'dummyfile') assert path.isfile(expected_extra_file1) # test a recursive directory expected_extra_file2 = path.join(PACKAGE_TEMP_DIR, 'extra/foo/__init__.py') assert path.isfile(expected_extra_file2) def test_package_name(): pkg = package.Package(TESTING_TEMP_DIR, zipfile_name='test.zip') pkg.package() assert path.isfile(path.join(TESTING_TEMP_DIR, 'test.zip'))
Python
0.000001
@@ -2023,24 +2023,583 @@ pytest'))%0A%0A%0A +def test_install_no_requirements():%0A temp_workspace = path.join(TESTING_TEMP_DIR,%0A package.TEMP_WORKSPACE_NAME)%0A%0A pkg = package.Package(TESTING_TEMP_DIR)%0A pkg.requirements(%5B%5D)%0A pkg.install_dependencies()%0A%0A site_packages = path.join(temp_workspace,%0A 'venv/lib/python2.7/site-packages')%0A if sys.platform == 'win32' or sys.platform == 'cygwin':%0A site_packages = path.join(temp_workspace, %22venv%5C%5Clib%5C%5Csite-packages%22)%0A%0A assert path.isdir(path.join(site_packages, '_pytest'))%0A%0A%0A def test_def
374e10b908fbedf73f3ad40634bb680206da0652
Add setUp
test/test_quality.py
test/test_quality.py
# -*- coding: utf-8 -*- import unittest from pychord import QualityManager, Chord class TestQuality(unittest.TestCase): def setUp(self): self.quality_manager = QualityManager() def test_eq(self): q1 = self.quality_manager.get_quality("m7-5") q2 = self.quality_manager.get_quality("m7-5") self.assertEqual(q1, q2) def test_eq_alias_maj9(self): q1 = self.quality_manager.get_quality("M9") q2 = self.quality_manager.get_quality("maj9") self.assertEqual(q1, q2) def test_eq_alias_m7b5(self): q1 = self.quality_manager.get_quality("m7-5") q2 = self.quality_manager.get_quality("m7b5") self.assertEqual(q1, q2) def test_eq_alias_min(self): q1 = self.quality_manager.get_quality("m") q2 = self.quality_manager.get_quality("min") q3 = self.quality_manager.get_quality("-") self.assertEqual(q1, q2) self.assertEqual(q1, q3) def test_invalid_eq(self): q = self.quality_manager.get_quality("m7") with self.assertRaises(TypeError): print(q == 0) class TestQualityManager(unittest.TestCase): def test_singleton(self): quality_manager = QualityManager() quality_manager2 = QualityManager() self.assertIs(quality_manager, quality_manager2) class TestOverwriteQuality(unittest.TestCase): def test_overwrite(self): quality_manager = QualityManager() quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17)) chord = Chord("C11") self.assertEqual(chord.components(), ['C', 'E', 'G', 'Bb', 'D', 'F']) def test_keep_existing_chord(self): chord = Chord("C11") quality_manager = QualityManager() quality_manager.set_quality("11", (0, 4, 7, 10, 14, 17)) self.assertEqual(chord.components(), ['C', 'G', 'Bb', 'D', 'F']) if __name__ == '__main__': unittest.main()
Python
0.000002
@@ -1375,33 +1375,32 @@ .TestCase):%0A -%0A def test_overwri @@ -1391,22 +1391,13 @@ def -test_overwrite +setUp (sel @@ -1400,32 +1400,37 @@ (self):%0A +self. quality_manager @@ -1444,32 +1444,68 @@ tyManager()%0A +%0A - +def test_overwrite(self):%0A self. quality_mana @@ -1738,51 +1738,13 @@ -quality_manager = QualityManager()%0A +self. qual
3707ed6b193a5eed9ec4505f6a283fdaff07ad5e
fix deprecated method
mifiel/api_auth.py
mifiel/api_auth.py
""" [ApiAuth](https://github.com/mgomes/api_auth) for python Based on https://github.com/pd/httpie-api-auth by Kyle Hargraves Usage: import requests requests.get(url, auth=ApiAuth(app_id, secret_key)) """ import hmac, base64, hashlib, datetime from requests.auth import AuthBase from urllib.parse import urlparse class ApiAuth(AuthBase): def __init__(self, access_id, secret_key): self.access_id = access_id self.secret_key = secret_key.encode('ascii') def __call__(self, request): method = request.method.upper() content_type = request.headers.get('content-type') if not content_type: content_type = '' content_md5 = request.headers.get('content-md5') if not content_md5: m = hashlib.md5() body = request.body if not body: body = '' m.update(body.encode('ascii')) content_md5 = base64.b64encode(m.digest()).decode() request.headers['content-md5'] = content_md5 httpdate = request.headers.get('date') if not httpdate: now = datetime.datetime.utcnow() httpdate = now.strftime('%a, %d %b %Y %H:%M:%S GMT') request.headers['Date'] = httpdate url = urlparse(request.url) path = url.path if url.query: path = path + '?' + url.query canonical_string = '%s,%s,%s,%s,%s' % (method, content_type, content_md5, path, httpdate) digest = hmac.new( self.secret_key, canonical_string.encode('ascii'), hashlib.sha1 ).digest() signature = base64.encodestring(digest).rstrip().decode() request.headers['Authorization'] = 'APIAuth %s:%s' % (self.access_id, signature) return request
Python
0.000053
@@ -1491,22 +1491,21 @@ 4.encode -string +bytes (digest)
c0894d3c14b8273364454dfa13c94311578ff698
update for diverse usage
mk-1strecurring.py
mk-1strecurring.py
#!/usr/bin/env python3 # (C) Mikhail Kolodin, 2018, ver. 1.0 # class ic test task: find 1st recurring character in a string import random import string MINSIZE = 1 # min size of test string MAXSIZE = 9 # its max size TESTS = 10 # no of tests alf = string.ascii_uppercase # test alphabet arr = [] size = 0 def prepare(): """organize tests""" global arr, size size = random.randint(MINSIZE, MAXSIZE) arr = "".join([random.choice(alf) for i in range(size)]) def solve(): """find char""" global arr found = "" for c in arr: if c in found: return c else: found += c else: return "None" def main(): """run all""" global arr, szie for test in range(TESTS): prepare() print ("test =", test, ", size =", size, ", arr =", arr.ljust(MAXSIZE), ", found recurrent:", solve()) main()
Python
0
@@ -53,12 +53,23 @@ ver. + 2018-05-31 1. -0 +1 %0A# c @@ -216,10 +216,10 @@ E = +1 9 - @@ -335,16 +335,17 @@ ze = 0%0A%0A +%0A def prep @@ -397,16 +397,17 @@ r, size%0A +%0A size @@ -504,16 +504,17 @@ ize)%5D)%0A%0A +%0A def solv @@ -534,34 +534,38 @@ ind char -%22%22%22%0A global arr +, reusable function%22%22%22 %0A%0A fo @@ -698,16 +698,132 @@ return +%22%22%0A%0A%0Adef show():%0A %22%22%22find and show char, function to show result only%22%22%22%0A%0A c = solve()%0A return c if c else %22None%22%0A%0A @@ -856,29 +856,8 @@ l%22%22%22 -%0A global arr, szie %0A%0A @@ -942,23 +942,30 @@ , size = -%22, + %252d%22 %25 ( size +) , %22, arr @@ -1016,17 +1016,48 @@ %22, s -olve +how ())%0A%0A +%0Aif __name__ == %22__main__%22:%0A main
e379f35a15956204f09aa593979fe0a0186cf56e
Update the upload tool
tools/upload_build.py
tools/upload_build.py
"""This script upload a newly-build version of CocoMUD for Windows. The Download wiki page on Redmine are updated. Requirements: This script needs 'python-redmine', which you can obtain with pip install python-redmine """ import argparse from json import dumps import os import re import sys import urllib2 from redminelib import Redmine from redminelib.exceptions import ResourceNotFoundError # Create an argument parser parser = argparse.ArgumentParser( description="upload a new CocoMUD build") parser.add_argument("key", help="the API key to upload to Redmine") args = parser.parse_args() # Configure the system key = args.key # Connects to the REST API redmine = Redmine("https://cocomud.plan.io", key=key) # Check that the file exists path = os.path.abspath("../src/build/CocoMUD.zip") if not os.path.exists(path): print "The file {} cannot be found.".format(path) sys.exit(1) # Then upload this file print "Retrieving the Download wiki page on 'cocomud-client'..." page = redmine.wiki_page.get("Download", project_id="cocomud-client") print "Uploading {}...".format(path) text = page.text page.uploads = [{"path": path, "filename": "CocoMUD.zip"}] page.text = text print "Saving the page...", page.save() # Get the new resource URL url = list(page.attachments)[-1].content_url # Retrieve the version number with open("../src/version.py") as file: content = file.read() version = content.partition("=")[2].strip() # Now we get ALL wiki pages with the title 'Download' and replace the URL for project in redmine.project.all(): identifier = project.identifier # Try to get the 'Download' page try: page = redmine.wiki_page.get("Download", project_id=identifier) except ResourceNotFoundError: pass else: print "Updating the Download page for the {} project...".format( identifier) text = page.text text = re.sub(r"https\://cocomud\.plan\.io/attachments/" \ r"download/\d+/CocoMUD\.zip", url, text) text = re.sub(r"\+\*\d+\*\+", "+*" + version + "*+", text) page.text = text success = page.save() if success: print "Correctly saved the wiki page." else: print "Error while saving the wiki page." # Update the build information in the custom field build = dumps({version: {"windows": url}}) print "Updating the custom field" redmine.project.update(resource_id=2, custom_fields=[{"id": 3, "value": build}]) print "URL", url
Python
0
@@ -306,22 +306,34 @@ sys%0A -import +from urllib -2 + import request %0A%0Afr @@ -865,17 +865,17 @@ print - +( %22The fil @@ -909,16 +909,17 @@ at(path) +) %0A sys @@ -957,17 +957,17 @@ le%0Aprint - +( %22Retriev @@ -1016,16 +1016,17 @@ ent'...%22 +) %0Apage = @@ -1093,17 +1093,17 @@ %22)%0Aprint - +( %22Uploadi @@ -1124,16 +1124,17 @@ at(path) +) %0Atext = @@ -1224,17 +1224,17 @@ xt%0Aprint - +( %22Saving @@ -1258,16 +1258,17 @@ e.save() +) %0A%0A# Get @@ -1393,16 +1393,34 @@ sion.py%22 +, encoding=%22utf-8%22 ) as fil @@ -1827,25 +1827,25 @@ print - +( %22Updating th @@ -1914,24 +1914,25 @@ identifier) +) %0A tex @@ -2228,17 +2228,17 @@ print - +( %22Correct @@ -2261,16 +2261,17 @@ i page.%22 +) %0A @@ -2294,17 +2294,17 @@ print - +( %22Error w @@ -2330,16 +2330,17 @@ i page.%22 +) %0A%0A# Upda @@ -2432,17 +2432,17 @@ %7D)%0Aprint - +( %22Updatin @@ -2460,16 +2460,17 @@ m field%22 +) %0Aredmine @@ -2556,17 +2556,17 @@ %5D)%0Aprint - +( %22URL%22, u @@ -2567,9 +2567,10 @@ RL%22, url +) %0A
9442f3375ee20baf677cbdcafdce0c1d5c1007a0
update stopping cluster
xpaw/cluster.py
xpaw/cluster.py
# coding=utf-8 import time import pickle import asyncio import threading import logging.config from collections import deque from xpaw.downloader import Downloader from xpaw.http import HttpRequest, HttpResponse from xpaw.loader import TaskLoader log = logging.getLogger(__name__) class LocalCluster: def __init__(self, proj_dir, config): self._config = config self._queue = deque() self._downloader_loop = asyncio.new_event_loop() self._downloader_loop.set_exception_handler(self._handle_coro_error) self._downloader = Downloader(loop=self._downloader_loop) self._task_loader = TaskLoader(proj_dir, base_config=self._config, downloader_loop=self._downloader_loop) self._is_running = False self._last_request = None def start(self): log.info("Task ID: {}".format(self._task_loader.config.get("task_id"))) self._is_running = True self._task_loader.open_spider() self._start_downloader_loop() def _handle_coro_error(self, loop, context): log.error("Unexpected error occurred when run the event loop: {}".format(context["message"])) async def _push_start_requests(self): for res in self._task_loader.spidermw.start_requests(self._task_loader.spider): if isinstance(res, HttpRequest): self._push_request(res) elif isinstance(res, Exception): log.warning("Unexpected error occurred when handle start requests", exc_info=True) await asyncio.sleep(0.01, loop=self._downloader_loop) def _push_request(self, req): r = pickle.dumps(req) self._queue.append(r) def _start_downloader_loop(self): def _start(): asyncio.set_event_loop(self._downloader_loop) try: self._downloader_loop.run_forever() except Exception: log.error("Unexpected error occurred when run loop", exc_info=True) raise finally: self._downloader_loop.close() asyncio.ensure_future(self._push_start_requests(), loop=self._downloader_loop) asyncio.ensure_future(self._supervisor(), loop=self._downloader_loop) for i in range(self._config.getint("downloader_clients")): asyncio.ensure_future(self._pull_requests(i), loop=self._downloader_loop) t = threading.Thread(target=_start) t.start() async def _supervisor(self): timeout = self._task_loader.config.getfloat("downloader_timeout") task_finished_delay = 2 * timeout self._last_request = time.time() while self._is_running: await asyncio.sleep(5, loop=self._downloader_loop) if time.time() - self._last_request > task_finished_delay: self._is_running = False try: self._task_loader.close_spider() except Exception: log.warning("Unexpected error occurred when close spider", exc_info=True) log.info("Event loop will be stopped after 5 seconds") await asyncio.sleep(5, loop=self._downloader_loop) self._downloader_loop.stop() async def _pull_requests(self, coro_id): timeout = self._task_loader.config.getfloat("downloader_timeout") while self._is_running: if len(self._queue) > 0: data = self._queue.popleft() req = pickle.loads(data) self._last_request = time.time() log.debug("The request (url={}) has been pulled by coro[{}]".format(req.url, coro_id)) try: result = await self._task_loader.downloadermw.download(self._downloader, req, timeout=timeout) except Exception: log.warning("Unexpected error occurred when request '{}'".format(req.url), exc_info=True) else: self._handle_result(req, result) else: await asyncio.sleep(3, loop=self._downloader_loop) def _handle_result(self, request, result): if isinstance(result, HttpRequest): r = pickle.dumps(result) self._queue.append(r) elif isinstance(result, HttpResponse): # bind HttpRequest result.request = request try: for res in self._task_loader.spidermw.parse(self._task_loader.spider, result): if isinstance(res, HttpRequest): r = pickle.dumps(res) self._queue.append(r) except Exception: log.warning("Unexpected error occurred when parse response of '{}'".format(request.url), exc_info=True)
Python
0
@@ -729,33 +729,34 @@ self._ -is_running = Fals +last_request = Non e%0A @@ -759,36 +759,31 @@ self._ -last_request +futures = None%0A%0A @@ -884,40 +884,8 @@ )))%0A - self._is_running = True%0A @@ -2022,32 +2022,63 @@ close()%0A%0A + self._futures = %5B%5D%0A f = asyncio.ensure_ @@ -2133,32 +2133,64 @@ ownloader_loop)%0A + self._futures.append(f)%0A asyncio. @@ -2321,32 +2321,36 @@ %22)):%0A + f = asyncio.ensure_ @@ -2400,32 +2400,68 @@ ownloader_loop)%0A + self._futures.append(f)%0A t = thre @@ -2711,32 +2711,20 @@ while -self._is_running +True :%0A @@ -2871,32 +2871,13 @@ -self._is_running = False +break %0A @@ -3035,32 +3035,154 @@ exc_info=True)%0A + if self._futures:%0A for f in self._futures:%0A f.cancel()%0A self._futures = None%0A log.info @@ -3216,17 +3216,17 @@ d after -5 +1 seconds @@ -3248,33 +3248,33 @@ t asyncio.sleep( -5 +1 , loop=self._dow @@ -3462,24 +3462,12 @@ ile -self._is_running +True :%0A
3e988a7c4b0d5407989af8c5ece6ed77a3646afd
Update __init__.py
otter/__init__.py
otter/__init__.py
""" Otter - output interruption library. Allows you to define output streams with the following characteristics: * streams start on a new line. * streams output to a sink. * other outputs to the sink constitute an interruption to the stream. Streams observe the sink to know when it is called. * interruptions to a stream start on a new line. * output to the stream after an interruption starts on a new line, and reprints the entire stream so far, adding the new output. * writing to a stream with output which ends in a new line resets the stream, including data and registrations. * writing to a stream with output which contains a newline resets the data to only what is after the final newline, but retains registrations. * streams can observe multple sinks. """ import sys class FunctionSink: """Function sink.""" def __init__(self, func): """obj init.""" self.on_newline = None self.observers = [] self.last_output = None self.func = func self.other_sinks = [] def register_observer(self, observer): """register an observer.""" self.observers.append(observer) def unregister_observer(self, observer): """unregister an observer.""" if observer in self.observers: self.observers.remove(observer) def write(self, output, writer=None): """write the output. Also notify observers.""" needs_newline = False for observer in self.observers: if observer(output, writer): needs_newline = True if needs_newline and not self.on_newline: self.last_output = '\n' + output else: self.last_output = output self.func(self.last_output) if output: self.on_newline = output.endswith('\n') for sink in self.other_sinks: sink.on_newline = self.on_newline class Stream: """A stream object.""" def __init__(self): """obj init.""" self.sink = None self.interrupted = False self.started = False self.data = '' self.other_sinks = [] def register_sink(self, sink): """Register the sink to send output to.""" if self.sink is None: self.sink = sink else: self.other_sinks.append(sink) sink.register_observer(self.observe_sink) def write(self, output): """Write the output to the sink.""" self.data += output if self.interrupted: self.sink.write(self.data, self) else: self.sink.write(output, self) self.started = True self.interrupted = False if output.endswith('\n'): self.reset() if '\n' in self.data: _, new = self.data.rsplit('\n', 1) self.data = new def reset(self): """reset the stream.""" self.sink.unregister_observer(self.observe_sink) self.sink = None for sink in self.other_sinks: sink.unregister_observer(self.observe_sink) self.other_sinks = [] self.interrupted = False self.started = False self.data = '' def observe_sink(self, output, writer): """observe a change in a sink.""" new_interruption = False fresh_output = False post_interruption = False if writer is not self: if not self.interrupted: new_interruption = True self.interrupted = True elif not self.started: fresh_output = True elif self.interrupted: post_interruption = True return new_interruption or fresh_output or post_interruption DEFAULT_SINKS = [] def DefaultStream(): """Get the default stream.""" stream = Stream() for sink in DEFAULT_SINKS: stream.register_sink(sink) return stream def use_stds(): """Use the standard out/err streams as the default sinks.""" global DEFAULT_SINKS original_stdout_write = sys.stdout.write def write_and_flush(output): """write and flush.""" original_stdout_write(output) sys.stdout.flush() std_out_sink = FunctionSink(write_and_flush) std_err_sink = FunctionSink(sys.stderr.write) std_out_sink.other_sinks.append(std_err_sink) std_err_sink.other_sinks.append(std_out_sink) sys.stdout.write = std_out_sink.write sys.stderr.write = std_err_sink.write DEFAULT_SINKS = [std_out_sink, std_err_sink]
Python
0.000005
@@ -3426,16 +3426,27 @@ not self + and output :%0A
3d331ecdb9cb0e64050eb3e4ece27242e1714b3e
Update C_Temperature_Vertical_sections.py
Cas_1/Temperature/C_Temperature_Vertical_sections.py
Cas_1/Temperature/C_Temperature_Vertical_sections.py
import numpy as np import matplotlib.pyplot as plt from xmitgcm import open_mdsdataset plt.ion() dir1 = '/homedata/bderembl/runmit/test_southatlgyre' ds1 = open_mdsdataset(dir1,iters='all',prefix=['T']) Height = ds1.T.Z print(Height) nx = int(len(ds1.T.XC)/2) print(nx) ny = int(len(ds1.T.YC)/2) print(ny) nt = -1 # Vertical Section of Temperature plt.figure(1) ds1['T'].where(ds1.hFacC>0)[nt,:,ny,:].plot() plt.title('Case 1 : Temperature (t=-1 ; YC = 30S)') plt.savefig('T_Temperature_Vertical_section_xz_cas4'+'.png') plt.clf() plt.figure(2) ds1['T'].where(ds1.hFacC>0)[nt,:,:,nx].plot() plt.title('Case 1 : Temperature (t=-1 ; XC = 0E)') plt.savefig('T_Temperature_Vertical_section_yz_cas4'+'.png') plt.clf()
Python
0.000001
@@ -508,25 +508,25 @@ ction_xz_cas -4 +1 '+'.png')%0Apl @@ -699,9 +699,9 @@ _cas -4 +1 '+'.
2bcca13d5861c4ad1d65514f06c4d2fddfa473b8
replace distutils.spawn.find_executable with shtil.which
mod/tools/clion.py
mod/tools/clion.py
'''CLion helper functions''' import subprocess, os, shutil from mod import util, log, verb, dep from mod.tools import cmake from distutils.spawn import find_executable name = 'clion' platforms = ['osx','linux','win'] optional = True not_found = 'used as IDE with clion configs' #------------------------------------------------------------------------------ def check_exists(fips_dir) : """test if 'clion' is in the path :returns: True if clion is in the path """ host = util.get_host_platform() if host == 'linux': # See if CLion was installed from a tar.gz and manually added to the path ("clion.sh"), # or added to the path using the "create launcher" command in CLion, which would by default # create a symlink from clion.sh to /usr/local/bin/clion. # This will also pick up CLion if it was installed using snap. if find_executable("clion.sh") is not None or find_executable("clion") is not None: return True else: return False elif host == 'osx': try: subprocess.check_output("mdfind -name CLion.app | grep 'CLion'", shell=True) return True except (OSError, subprocess.CalledProcessError): return False else: return False #------------------------------------------------------------------------------ def match(build_tool): return build_tool == 'clion' #------------------------------------------------------------------------------ def run(proj_dir): host = util.get_host_platform() if host == 'linux': try: if find_executable("clion.sh") is not None: subprocess.Popen('clion.sh {}'.format(proj_dir), cwd=proj_dir, shell=True) else: subprocess.Popen('clion {}'.format(proj_dir), cwd=proj_dir, shell=True) except OSError: log.error("Failed to run JetBrains CLion as 'clion' or 'clion.sh'") elif host == 'osx': try: subprocess.Popen('open /Applications/CLion.app --args {}'.format(proj_dir), cwd=proj_dir, shell=True) except OSError: log.error("Failed to run JetBrains CLion as '/Applications/CLion.app'") else: log.error("Not supported on this platform") #------------------------------------------------------------------------------- def write_clion_module_files(fips_dir, proj_dir, cfg): '''write misc.xml, modules.xml, *.iml''' proj_name = util.get_project_name_from_dir(proj_dir) iml_path = '{}/.idea/{}.iml'.format(proj_dir, proj_name) if os.path.exists(iml_path): return with open(iml_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<module classpath="CMake" type="CPP_MODULE" version="4" />') ws_path = '{}/.idea/misc.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write(' <component name="CMakeWorkspace" IGNORE_OUTSIDE_FILES="true" PROJECT_DIR="$PROJECT_DIR$" />\n') f.write('</project>') ws_path = '{}/.idea/modules.xml'.format(proj_dir) with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') f.write(' <component name="ProjectModuleManager">\n') f.write(' <modules>\n') f.write(' <module fileurl="file://$PROJECT_DIR$/.idea/{}.iml" filepath="$PROJECT_DIR$/.idea/{}.iml" />\n'.format(proj_name, proj_name)) f.write(' </modules>\n') f.write(' </component>\n') f.write('</project>') #------------------------------------------------------------------------------- def write_clion_workspace_file(fips_dir, proj_dir, cfg): '''write bare-bone workspace.xml config file''' proj_name = util.get_project_name_from_dir(proj_dir) gen_options = '-DFIPS_CONFIG={}'.format(cfg['name']) gen_dir = '$PROJECT_DIR$/../fips-build/{}/{}'.format(proj_name, cfg['name']) ws_path = '{}/.idea/workspace.xml'.format(proj_dir) # do not overwrite existing .xml if os.path.exists(ws_path): return with open(ws_path, 'w') as f: f.write('<?xml version="1.0" encoding="UTF-8"?>\n') f.write('<project version="4">\n') # TODO: CMakeRunConfigurationManager f.write(' <component name="CMakeSettings">\n') f.write(' <configurations>\n') f.write(' <configuration PROFILE_NAME="Debug" CONFIG_NAME="Debug" GENERATION_OPTIONS="{}" GENERATION_DIR="{}" />\n'.format(gen_options, gen_dir)) f.write(' </configurations>\n') f.write(' </component>\n') # TODO: RunManager f.write('</project>') #------------------------------------------------------------------------------- def write_workspace_settings(fips_dir, proj_dir, cfg): '''write the CLion *.xml files required to open the project ''' log.info("=== writing JetBrains CLion config files...") clion_dir = proj_dir + '/.idea' if not os.path.isdir(clion_dir): os.makedirs(clion_dir) write_clion_module_files(fips_dir, proj_dir, cfg) write_clion_workspace_file(fips_dir, proj_dir, cfg) #------------------------------------------------------------------------------- def cleanup(fips_dir, proj_dir): '''deletes the .idea directory''' clion_dir = proj_dir + '/.idea' if os.path.isdir(clion_dir): log.info(log.RED + 'Please confirm to delete the following directory:' + log.DEF) log.info(' {}'.format(clion_dir)) if util.confirm(log.RED + 'Delete this directory?' + log.DEF): if os.path.isdir(clion_dir): log.info(' deleting {}'.format(clion_dir)) shutil.rmtree(clion_dir) log.info('Done.') else: log.info('Nothing deleted, done.') else: log.info('Nothing to delete.')
Python
0.000001
@@ -126,46 +126,27 @@ rom -distutils.spawn import find_executable +shutil import which %0A%0Ana @@ -854,39 +854,29 @@ %0A if -find_executable +which (%22clion.sh%22) @@ -891,31 +891,21 @@ None or -find_executable +which (%22clion%22
b2542f8c3625150f9716eb0b1fcb44ee15520ae8
fix path to nvim files
mod/vim/install.py
mod/vim/install.py
import packages import util def run(): spell_dir = '~/.config/vim/spell/' choices = [ 'vim', 'gvim', # gvim supports for X11 clipboard, but has more dependencies ] choice = None while choice not in choices: choice = input('Which package to install? (%s) ' % choices).lower() packages.try_install(choice) packages.try_install('fzf') for name in ['undo', 'backup', 'swap', 'spell', 'autoload']: util.create_dir('~/.config/vim/' + name) for path in util.find('./../mod-nvim/*.vim'): util.create_symlink(path, '~/.config/vim/') util.create_symlink('./../mod-nvim/spell/pl.utf-8.add', spell_dir) util.create_symlink('./../mod-nvim/spell/en.utf-8.add', spell_dir) util.download( 'ftp://ftp.vim.org/pub/vim/runtime/spell/en.utf-8.spl', '~/.config/vim/spell/') util.download( 'ftp://ftp.vim.org/pub/vim/runtime/spell/pl.utf-8.spl', '~/.config/vim/spell/') util.download( 'https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim', '~/.config/vim/autoload/plug.vim') util.create_file( '~/.config/zsh/editor.sh', 'export EDITOR=vim', overwrite=True) util.create_symlink('~/.config/vim/', '~/.vim') util.create_symlink('~/.config/vim/init.vim', '~/.vimrc') commands = ['PlugInstall'] for path in util.find(spell_dir): if 'add' in path and 'spl' not in path: commands.append('mkspell! ' + path) util.run_verbose(['vim'] + sum([['-c', cmd] for cmd in commands], []))
Python
0
@@ -525,28 +525,24 @@ .find('./../ -mod- nvim/*.vim') @@ -617,36 +617,32 @@ e_symlink('./../ -mod- nvim/spell/pl.ut @@ -696,12 +696,8 @@ /../ -mod- nvim
06912580de8ef1e781e8ebae46a4b886da38b8fe
add --verbose, --onetime options
trypython/__main__.py
trypython/__main__.py
""" trypython パッケージのメインファイルです. python インタープリターにて $ python -m trypython と起動した場合に実行されます。 """ import importlib import os import pathlib import sys from typing import Dict class ExampleLoadError(Exception): """サンプルのロード中にエラーが発生した場合の例外 Attributes: example_name (str): エラーが発生したモジュール名 """ def __init__(self, example_name: str): self.example_name = example_name def load_examples(raise_error: bool = False) -> Dict[str, object]: """サンプルをロードします. Returns: dict[str, object]: Key:サンプル名、Value: モジュール の辞書 Raises: ExampleLoadError: ロード中にエラーが発生した場合 """ examples = {} basedir = pathlib.Path(os.getcwd()) failed_modules = [] for p in basedir.rglob('*.py'): real_path = str(p) # 読み込み不要なものを除外 if any(ignore in real_path for ignore in ('__init__', '__main__')): continue # モジュール名を構築 # - trypython以降の文字列を取得 # - ファイル名の末尾の ".py" を除去 # - "/" を "." に置換 index = real_path.find('trypython') mod_name = real_path[index:-3].replace(os.path.sep, '.') if not mod_name: continue # windows特有のモジュールはOS判定を実施 if 'windows' in mod_name: if os.name != 'nt': continue # モジュールをロード try: m = importlib.import_module(mod_name) examples[mod_name[mod_name.rfind('.') + 1:]] = m except ModuleNotFoundError as not_found_ex: # モジュールが見つからない print(f'[警告] モジュールがロードできませんでした: {not_found_ex}') except Exception as e: print(f'[エラー] モジュールがロード中にエラー発生: {e}') failed_modules.append(mod_name) if failed_modules: print('以下のモジュールはロードに失敗しました') for m in failed_modules: print(f'\t{m}') if raise_error: raise ExampleLoadError(','.join(failed_modules)) return examples def main(): """メイン処理. 存在するサンプルをロードしてユーザに入力を要求します.""" # サンプルをロード try: examples = {k.lower(): v for k, v in load_examples().items()} except ExampleLoadError as e: print(f'サンプルのロード中にエラーが発生しました. [{e.example_name}]') sys.exit(-1) while True: # ユーザにサンプル名の入力を行ってもらう user_input: str = input('ENTER EXAMPLE NAME: ').strip() if not user_input: continue if user_input.lower() == 'quit': break # 候補リストを構築 candidates = [x for x in examples if user_input in x] if not candidates: print(f'no hit...try again.') continue if 1 < len(candidates): print(f'hit {len(candidates)} counts.') for x in candidates: print(f'\t{x}') continue # 実行対象を検査 target = candidates[0] if target not in examples: print(f'sorry...not exists - {target}. try again') continue m = examples[target] if not hasattr(m, 'go'): print(f'サンプルとして実行するには go() が実装されている必要があります.') continue # サンプル実行 try: print(f'\n[START] ==== {m.__name__} ====') m.go() print(f'[END ] ==== {m.__name__} ====\n') except Exception as e: print(f'サンプル実行中にエラーが発生しました [{e}]') print('\nDONE') if __name__ == '__main__': main()
Python
0.000403
@@ -87,16 +87,32 @@ %E3%81%BE%E3%81%99%E3%80%82%0A%22%22%22%0A +import argparse%0A import i @@ -181,16 +181,49 @@ t Dict%0A%0A +onetime = False%0Averbose = False%0A%0A %0Aclass E @@ -1542,16 +1542,44 @@ %E3%81%8C%E8%A6%8B%E3%81%A4%E3%81%8B%E3%82%89%E3%81%AA%E3%81%84%0A + if verbose:%0A @@ -1654,32 +1654,60 @@ Exception as e:%0A + if verbose:%0A prin @@ -1800,32 +1800,56 @@ failed_modules:%0A + if verbose:%0A print('%E4%BB%A5 @@ -1873,24 +1873,28 @@ %E3%81%9F')%0A + for m in fai @@ -1898,32 +1898,36 @@ failed_modules:%0A + prin @@ -3472,16 +3472,358 @@ ain__':%0A + parser = argparse.ArgumentParser(description='trypython -- example executor')%0A parser.add_argument('-o', '--onetime', action='store_true', help='%E3%82%B5%E3%83%B3%E3%83%97%E3%83%AB%E3%82%92%E4%B8%80%E5%BA%A6%E3%81%A0%E3%81%91%E5%AE%9F%E8%A1%8C%E3%81%97%E3%81%A6%E7%B5%82%E4%BA%86%E3%81%99%E3%82%8B')%0A parser.add_argument('-v', '--verbose', action='store_true', help='%E5%86%97%E9%95%B7%E3%81%AA%E3%83%AD%E3%82%B0%E5%87%BA%E5%8A%9B%E3%83%A2%E3%83%BC%E3%83%89')%0A%0A args = parser.parse_args()%0A%0A onetime = args.onetime%0A verbose = args.verbose%0A main
6d2e66ab5b9b452474701ffc5035e4a8106db637
Add test_Record unit tests
tests/test_Record.py
tests/test_Record.py
# import unittest # # import os, shutil # # from GeometrA.src.Record import * # from GeometrA.src.File.WorkSpace import WorkSpace # # RECORD_FILE = './tests/record.log' # # class RecordTestSuite(unittest.TestCase): # @classmethod # def setUpClass(cls): # path = './tests/Project0' # if os.path.isdir(path): # shutil.rmtree(path, True) # # def setUp(self): # self.recordFile = './tests/record.log' # self.path = os.getcwd() # shutil.copytree('./tests/File/Project0', './tests/Project0') # # def tearDown(self): # if os.path.isfile(self.recordFile): # os.remove(self.recordFile) # # def tearDown(self): # path = './tests/Project0' # if os.path.isdir(path): # shutil.rmtree('path', True) # # def testExportLog(self): # p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'], # 'Suite2': ['case2']}}] # path = self.path # ws = WorkSpace(self.path, p) # # exportLog(workspace = ws) # self.assertTrue(os.path.isfile(self.recordFile)) # # def testLog(self): # p = ['Project0', {'Project0':{'Suite1': ['case1', 'case2'], # 'Suite2': ['case2']}}] # path = self.path # ws1 = WorkSpace(self.path, p) # # exportLog(workspace = ws1) # # ws = WorkSpace() # loadLog(ws) # # log = [os.getcwd() + '/tests/Project0/Project0.json'] # self.assertEqual(log, ws.log())
Python
0.000001
@@ -1,14 +1,12 @@ -# import unitt @@ -9,20 +9,17 @@ nittest%0A -#%0A# +%0A import o @@ -28,20 +28,17 @@ shutil%0A -#%0A# +%0A from Geo @@ -63,18 +63,16 @@ mport *%0A -# from Geo @@ -113,20 +113,17 @@ rkSpace%0A -#%0A# +%0A RECORD_F @@ -149,20 +149,17 @@ rd.log'%0A -#%0A# +%0A class Re @@ -188,26 +188,24 @@ .TestCase):%0A -# @classme @@ -209,18 +209,16 @@ smethod%0A -# def @@ -234,18 +234,16 @@ s(cls):%0A -# @@ -264,33 +264,32 @@ s/Project0'%0A -# if os.path. @@ -268,33 +268,32 @@ oject0'%0A - if os.path.isdir @@ -292,34 +292,32 @@ th.isdir(path):%0A -# shut @@ -330,36 +330,33 @@ ree(path, True)%0A -#%0A# +%0A def setUp(se @@ -352,34 +352,32 @@ ef setUp(self):%0A -# self.rec @@ -407,18 +407,16 @@ rd.log'%0A -# @@ -439,18 +439,16 @@ etcwd()%0A -# @@ -508,20 +508,17 @@ ject0')%0A -#%0A# +%0A def @@ -525,34 +525,32 @@ tearDown(self):%0A -# if os.pa @@ -577,17 +577,16 @@ dFile):%0A -# @@ -589,17 +589,16 @@ - os.remov @@ -616,24 +616,21 @@ rdFile)%0A -#%0A# +%0A - def tear @@ -641,25 +641,24 @@ (self):%0A -# path = @@ -645,25 +645,24 @@ f):%0A - path = './te @@ -675,18 +675,16 @@ oject0'%0A -# @@ -707,18 +707,16 @@ (path):%0A -# @@ -747,20 +747,17 @@ , True)%0A -#%0A# +%0A def @@ -769,34 +769,32 @@ xportLog(self):%0A -# p = %5B'Pr @@ -837,34 +837,32 @@ se1', 'case2'%5D,%0A -# @@ -886,34 +886,32 @@ ': %5B'case2'%5D%7D%7D%5D%0A -# path = s @@ -911,34 +911,32 @@ ath = self.path%0A -# ws = Wor @@ -952,35 +952,33 @@ lf.path, p)%0A -#%0A# +%0A exportLog(w @@ -957,33 +957,32 @@ th, p)%0A%0A - exportLog(worksp @@ -991,18 +991,16 @@ e = ws)%0A -# @@ -1048,24 +1048,21 @@ dFile))%0A -#%0A# +%0A - def test @@ -1072,18 +1072,16 @@ (self):%0A -# @@ -1140,18 +1140,16 @@ ase2'%5D,%0A -# @@ -1189,25 +1189,24 @@ e2'%5D%7D%7D%5D%0A -# path = @@ -1193,25 +1193,24 @@ %7D%7D%5D%0A - path = self. @@ -1214,18 +1214,16 @@ lf.path%0A -# @@ -1252,27 +1252,25 @@ ath, p)%0A -#%0A# +%0A exportL @@ -1257,25 +1257,24 @@ p)%0A%0A - exportLog(wo @@ -1288,27 +1288,25 @@ = ws1)%0A -#%0A# +%0A ws = Wo @@ -1293,25 +1293,24 @@ 1)%0A%0A - ws = WorkSpa @@ -1314,18 +1314,16 @@ Space()%0A -# @@ -1338,12 +1338,9 @@ ws)%0A -#%0A# +%0A @@ -1401,10 +1401,8 @@ n'%5D%0A -#
36e066ae645eb9b874ff1ce814708bd024c519e0
add support to get git revision from toymaker.
toymakerlib/toymaker.py
toymakerlib/toymaker.py
#! /usr/bin/env python import sys import os import getopt import optparse import traceback import toydist from toydist.core.utils import \ subst_vars, pprint from toydist.core.platforms import \ get_scheme from toydist.core.descr_parser import \ ParseError from toydist.commands.core import \ Command, HelpCommand, get_usage from toydist.commands.configure import \ ConfigureCommand from toydist.commands.build import \ BuildCommand from toydist.commands.install import \ InstallCommand from toydist.commands.parse import \ ParseCommand from toydist.commands.convert import \ ConvertCommand from toydist.commands.sdist import \ SdistCommand from toydist.commands.detect_type import \ DetectTypeCommand from toydist.commands.build_pkg_info import \ BuildPkgInfoCommand from toydist.commands.build_egg import \ BuildEggCommand from toydist.commands.core import \ register_command, UsageException, \ MyOptionParser, get_command_names, get_command, \ get_public_command_names if os.environ.get("TOYMAKER_DEBUG", None) is not None: TOYMAKER_DEBUG = True else: TOYMAKER_DEBUG = False SCRIPT_NAME = 'toymaker' #================================ # Create the command line UI #================================ register_command("help", HelpCommand) register_command("configure", ConfigureCommand) register_command("build", BuildCommand) register_command("install", InstallCommand) register_command("convert", ConvertCommand) register_command("sdist", SdistCommand) register_command("build_egg", BuildEggCommand) register_command("build_pkg_info", BuildPkgInfoCommand, public=False) register_command("parse", ParseCommand, public=False) register_command("detect_type", DetectTypeCommand, public=False) def main(argv=None): if argv is None: argv = sys.argv[1:] show_usage = False show_version = False cmd_name = None cmd_opts = None try: opts, pargs = getopt.getopt(argv, "hv", ["help", "version"]) for opt, arg in opts: if opt in ("--help", "-h"): show_usage = True if opt in ("--version", "-v"): show_version = True if len(pargs) > 0: cmd_name = pargs.pop(0) cmd_opts = pargs except getopt.GetoptError, e: emsg = "%s: illegal global option -- %s" % (SCRIPT_NAME, e.opt) print emsg print get_usage() return 1 if show_version: print toydist.__version__ return 0 if show_usage: cmd = get_command('help')() cmd.run([]) return 0 if not cmd_name: print "Type '%s help' for usage." % SCRIPT_NAME return 1 else: if not cmd_name in get_command_names(): raise UsageException("%s: Error: unknown command %s" % (SCRIPT_NAME, cmd_name)) else: cmd = get_command(cmd_name)() cmd.run(cmd_opts) def noexc_main(argv=None): try: ret = main(argv) except UsageException, e: pprint('RED', e) sys.exit(1) except ParseError, e: pprint('RED', "".join(e.args)) sys.exit(2) except Exception, e: if TOYMAKER_DEBUG: tb = sys.exc_info()[2] traceback.print_tb(tb) pprint('RED', "%s: Error: %s crashed (uncaught exception %s: %s)." % \ (SCRIPT_NAME, SCRIPT_NAME, e.__class__, str(e))) sys.exit(1) sys.exit(ret) if __name__ == '__main__': noexc_main()
Python
0
@@ -1945,24 +1945,54 @@ ion = False%0A + show_full_version = False%0A cmd_name @@ -2095,16 +2095,32 @@ version%22 +, %22full-version%22 %5D)%0A @@ -2296,16 +2296,99 @@ n = True +%0A if opt in (%22--full-version%22):%0A show_full_version = True %0A%0A @@ -2711,24 +2711,137 @@ return 0%0A%0A + if show_full_version:%0A print toydist.__version__ + %22git%22 + toydist.__git_revision__%0A return 0%0A%0A if show_
d9407ebda411d49212da35e27f08718dade1cd02
Support Info is unable to read package version and git version
modules/support.py
modules/support.py
# -*- coding: utf-8 -*- """Support Information module. The module provides functions to gain information to be included in issues. It neither contains normal functionality nor is it used by GitGutter. """ import os import subprocess import textwrap import sublime import sublime_plugin PACKAGE = os.path.basename(os.path.dirname(os.path.dirname( os.path.abspath(__file__)))) def git(*args): """Read version of git binary.""" if os.name == 'nt': startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW else: startupinfo = None proc = subprocess.Popen( args=['git'] + [arg for arg in args], startupinfo=startupinfo, stdout=subprocess.PIPE, stdin=subprocess.PIPE, # run command in package directory if exists. cwd='/'.join((sublime.packages_path(), PACKAGE))) stdout, _ = proc.communicate() return stdout.decode('utf-8').strip() if stdout else None def git_version(): """Read version of git binary.""" try: return git('--version') except Exception as exception: print('%s: %s' % (PACKAGE, exception)) return 'git version could not be acquired!' def gitgutter_version(): """Read commit hash or version of GitGutter.""" try: return git('rev-parse', 'HEAD')[:7] except: try: return sublime.load_resource( 'Packages/%s/release_messages/dest/VERSION' % PACKAGE) except Exception as exception: print('%s: %s' % (PACKAGE, exception)) return 'GitGutter version could not be acquired!' def module_version(module, attr): """Format the module version.""" try: version = getattr(module, attr) if callable(version): version = version() except Exception as exception: print('%s: %s' % (PACKAGE, exception)) version = 'version could not be acquired!' if not isinstance(version, str): version = '.'.join((str(x) for x in version)) return version def is_installed_by_package_control(): """Check if installed by package control.""" settings = sublime.load_settings('Package Control.sublime-settings') return str(PACKAGE in set(settings.get('installed_packages', []))) class GitGutterSupportInfoCommand(sublime_plugin.ApplicationCommand): """Support Information Command.""" @staticmethod def run(): """Run command.""" info = { 'platform': sublime.platform(), 'st_version': sublime.version(), 'arch': sublime.arch(), 'package_version': gitgutter_version(), 'pc_install': is_installed_by_package_control(), 'git_version': git_version() } try: import markdown info['markdown'] = module_version(markdown, 'version') except ImportError: info['markdown'] = 'not installed!' try: import mdpopups info['mdpopups'] = module_version(mdpopups, 'version') except ImportError: info['mdpopups'] = 'not installed!' try: import jinja2 info['jinja'] = module_version(jinja2, '__version__') except ImportError: info['jinja'] = 'not installed!' try: import pygments info['pygments'] = module_version(pygments, '__version__') except ImportError: info['pygments'] = 'not installed!' msg = textwrap.dedent( """\ - Sublime Text %(st_version)s - Platform: %(platform)s - Arch: %(arch)s - GitGutter %(package_version)s - Install via PC: %(pc_install)s - %(git_version)s - mdpopups %(mdpopups)s - markdown %(markdown)s - pygments %(pygments)s - jinja2 %(jinja)s """ % info ) sublime.message_dialog(msg + '\nInfo has been copied to clipboard.') sublime.set_clipboard(msg)
Python
0
@@ -282,23 +282,63 @@ plugin%0A%0A +# get absolute path of the package%0A PACKAGE +_PATH = os.pa @@ -344,12 +344,11 @@ ath. -base +dir name @@ -376,48 +376,384 @@ ath. -dir +abspath(__file__)))%0Aif os.path.isfile(PACKAGE_PATH):%0A # Package is a PACKAGE.sublime-package so get its file name -( %0A -os.path.abspath(__file__))) +PACKAGE, _ = os.path.splitext(os.path.basename(PACKAGE_PATH))%0Aelif os.path.isdir(PACKAGE_PATH):%0A # Package is a directory, so get its basename%0A PACKAGE = os.path.basename(PACKAGE_PATH)%0Aelse:%0A raise ValueError('Package is no file and no directory!' )%0A%0A%0A @@ -1202,52 +1202,61 @@ cwd= -'/'.join((sublime.packages_path(), PACKAGE)) +PACKAGE_PATH if os.path.isdir(PACKAGE_PATH) else None )%0A @@ -1968,19 +1968,9 @@ rn ' -GitGutter v +V ersi
659bd0a11bb1460784afc50818be376d112bbfc8
Test _ravel_shape_indices
tests/test__utils.py
tests/test__utils.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import operator import pytest import numpy as np import dask.array as da import dask.array.utils as dau import dask_ndmeasure._utils def test__norm_input_labels_index_err(): shape = (15, 16) chunks = (4, 5) ind = None a = np.random.random(shape) d = da.from_array(a, chunks=chunks) lbls = (a < 0.5).astype(np.int64) d_lbls = da.from_array(lbls, chunks=d.chunks) lbls = lbls[:-1] d_lbls = d_lbls[:-1] with pytest.raises(ValueError): dask_ndmeasure._utils._norm_input_labels_index(d, d_lbls, ind) def test__norm_input_labels_index(): shape = (15, 16) chunks = (4, 5) ind = None a = np.random.random(shape) d = da.from_array(a, chunks=chunks) lbls = (a < 0.5).astype(np.int64) d_lbls = da.from_array(lbls, chunks=d.chunks) d_n, d_lbls_n, ind_n = dask_ndmeasure._utils._norm_input_labels_index( d, d_lbls, ind ) assert isinstance(d_n, da.Array) assert isinstance(d_lbls_n, da.Array) assert isinstance(ind_n, da.Array) dau.assert_eq(d_n, d) dau.assert_eq(d_lbls_n, d_lbls) dau.assert_eq(ind_n, np.array([1], dtype=np.int64)) @pytest.mark.parametrize( "shape, chunks, ind", [ ((15, 16), (4, 5), 0), ((15, 16), (4, 5), 1), ((15, 16), (4, 5), [1]), ((15, 16), (4, 5), [1, 2]), ((15, 16), (4, 5), [1, 100]), ((15, 16), (4, 5), [[1, 2, 3, 4]]), ((15, 16), (4, 5), [[1, 2], [3, 4]]), ((15, 16), (4, 5), [[[1], [2], [3], [4]]]), ] ) def test__get_label_matches(shape, chunks, ind): a = np.random.random(shape) d = da.from_array(a, chunks=chunks) lbls = np.zeros(a.shape, dtype=np.int64) lbls += ( (a < 0.5).astype(lbls.dtype) + (a < 0.25).astype(lbls.dtype) + (a < 0.125).astype(lbls.dtype) + (a < 0.0625).astype(lbls.dtype) ) d_lbls = da.from_array(lbls, chunks=d.chunks) ind = np.array(ind) d_ind = da.from_array(ind, chunks=1) lbl_mtch = operator.eq( ind[(Ellipsis,) + lbls.ndim * (None,)], lbls[ind.ndim * (None,)] ) input_i_mtch = ( lbl_mtch.astype(np.int64)[ind.ndim * (slice(None),) + (None,)] * np.indices(a.shape, dtype=np.int64)[ind.ndim * (None,)] ) input_mtch = lbl_mtch.astype(a.dtype) * a[ind.ndim * (None,)] d_lbl_mtch = dask_ndmeasure._utils._get_label_matches(d_lbls, ind) assert issubclass(d_lbl_mtch.dtype.type, np.bool8) dau.assert_eq(d_lbl_mtch, lbl_mtch)
Python
0
@@ -2586,8 +2586,432 @@ l_mtch)%0A +%0A%[email protected](%0A %22shape, chunks%22, %5B%0A ((15,), (4,)),%0A ((15, 16), (4, 5)),%0A ((15, 1, 16), (4, 1, 5)),%0A ((15, 12, 16), (4, 5, 6)),%0A %5D%0A)%0Adef test___ravel_shape_indices(shape, chunks):%0A a = np.arange(int(np.prod(shape)), dtype=np.int64).reshape(shape)%0A d = dask_ndmeasure._utils._ravel_shape_indices(%0A shape, dtype=np.int64, chunks=chunks%0A )%0A%0A dau.assert_eq(d, a)%0A
3425c2c9d19c1d0a54dafde6cc70d571421c82a9
Fix string app import error for python 3.5
tests/test_config.py
tests/test_config.py
import logging import socket import pytest from uvicorn import protocols from uvicorn.config import Config from uvicorn.middleware.debug import DebugMiddleware from uvicorn.middleware.wsgi import WSGIMiddleware async def asgi_app(): pass def wsgi_app(): pass def test_debug_app(): config = Config(app=asgi_app, debug=True) config.load() assert config.debug is True assert isinstance(config.loaded_app, DebugMiddleware) def test_wsgi_app(): config = Config(app=wsgi_app, interface="wsgi") config.load() assert isinstance(config.loaded_app, WSGIMiddleware) assert config.interface == "wsgi" def test_proxy_headers(): config = Config(app=asgi_app, proxy_headers=True) config.load() assert config.proxy_headers is True def test_app_unimportable(): config = Config(app="no.such:app") with pytest.raises(ModuleNotFoundError): config.load() def test_concrete_http_class(): config = Config(app=asgi_app, http=protocols.http.h11_impl.H11Protocol) config.load() assert config.http_protocol_class is protocols.http.h11_impl.H11Protocol def test_logger(): logger = logging.getLogger("just-for-tests") config = Config(app=asgi_app, logger=logger) config.load() assert config.logger is logger def test_socket_bind(): config = Config(app=asgi_app) config.load() assert isinstance(config.bind_socket(), socket.socket) def test_ssl_config(certfile_and_keyfile): certfile, keyfile = certfile_and_keyfile config = Config(app=asgi_app, ssl_certfile=certfile.name, ssl_keyfile=keyfile.name) config.load() assert config.is_ssl is True
Python
0.999224
@@ -872,22 +872,14 @@ ses( -ModuleNotFound +Import Erro
dcfda9f906d417ee3a62d3125f052bfad074100c
Add case-match label
yargy/labels.py
yargy/labels.py
GENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr") NUMBERS = ("sing", "plur", "Pltm") def get_token_features(candidate, case, grammemes): return ((g in t["grammemes"] for g in grammemes) for t in (case, candidate)) def is_lower_label(token, _, stack): return token[1].islower() def is_upper_label(token, _, stack): return token[1].isupper() def is_title_label(token, _, stack): return token[1].istitle() def is_capitalized_label(token, _, stack): """ http://bugs.python.org/issue7008 """ return token[1][0].isupper() and token[1][-1].islower() def eq_label(token, value, stack): return token[1] == value def gt_label(token, value, stack): return token[1] > value def lt_label(token, value, stack): return token[1] < value def gte_label(token, value, stack): return token[1] >= value def lte_label(token, value, stack): return token[1] <= value def gram_label(token, value, stack): for form in token[3]: if value in form["grammemes"]: return True return False def gram_any_label(token, values, stack): return any(gram_label(token, value, stack) for value in values) def gram_in_label(token, values, stack): return all(gram_label(token, value, stack) for value in values) def gram_not_label(token, value, stack): return not gram_label(token, value, stack) def gram_not_in_label(token, values, stack): return all(gram_not_label(token, value, stack) for value in values) def gender_match_label(token, index, stack, genders=GENDERS): for candidate_form in token[3]: for case_form in stack[index][3]: results = get_token_features(candidate_form, case_form, genders) *case_token_genders, case_token_msf, case_token_gndr = next(results) *candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results) if not candidate_token_genders == case_token_genders: if case_token_msf or candidate_token_msf: if any(case_token_genders[:2]) or any(candidate_token_genders[:2]): return True elif case_token_gndr or candidate_token_gndr: return True elif "plur" in case_form["grammemes"] and "plur" in candidate_form["grammemes"]: return True else: if (case_token_genders[0] and candidate_token_genders[0]) or \ (case_token_genders[1] and candidate_token_genders[1]) or \ (case_token_genders[2] and candidate_token_genders[2]): return True else: return True return False def number_match_label(token, index, stack, numbers=NUMBERS): for candidate_form in token[3]: for case_form in stack[index][3]: results = get_token_features(candidate_form, case_form, numbers) *case_form_features, case_form_only_plur = next(results) *candidate_form_features, candidate_form_only_plur = next(results) if case_form_features == candidate_form_features: return True elif case_form_only_plur or candidate_form_only_plur: if case_form_only_plur: if candidate_form_features[1]: return True else: if case_form_features[1]: return True return False def dictionary_label(token, values, stack): return any((form["normal_form"] in values) for form in token[3]) LABELS_LOOKUP_MAP = { "gram": gram_label, "gram-any": gram_any_label, "gram-in": gram_in_label, "gram-not": gram_not_label, "gram-not-in": gram_not_in_label, "gender-match": gender_match_label, "number-match": number_match_label, "is-lower": is_lower_label, "is-upper": is_upper_label, "is-title": is_title_label, "is-capitalized": is_capitalized_label, "eq": eq_label, "gt": gt_label, "lt": lt_label, "gte": gte_label, "lte": lte_label, "dictionary": dictionary_label, }
Python
0.000483
@@ -78,16 +78,105 @@ %22Pltm%22) +%0ACASES = (%22nomn%22, %22gent%22, %22datv%22, %22accs%22, %22ablt%22, %22loct%22, %22voct%22, %22gen2%22, %22acc2%22, %22loc2%22) %0A%0Adef ge @@ -3560,24 +3560,452 @@ turn False%0A%0A +def case_match_label(token, index, stack, cases=CASES):%0A for candidate_form in token%5B3%5D:%0A for case_form in stack%5Bindex%5D%5B3%5D:%0A results = get_token_features(candidate_form, case_form, cases)%0A case_form_features = list(next(results))%0A candidate_form_features = list(next(results))%0A if case_form_features == candidate_form_features:%0A return True%0A return False%0A%0A def dictiona @@ -4360,24 +4360,60 @@ match_label, +%0A %22case-match%22: case_match_label, %0A%0A %22is-lo
1df8efb63333e89777820a96d78d5a59252b303d
Rename test specific to with gpg
tests/test_config.py
tests/test_config.py
import unittest import figgypy.config import sys import os class TestConfig(unittest.TestCase): def test_config_load(self): os.environ['FIGGY_GPG_HOME']='tests/resources/test-keys' c = figgypy.config.Config('tests/resources/test-config.yaml') self.assertEqual(c.db['host'], 'db.heck.ya') self.assertEqual(c.db['pass'], 'test password') if __name__ == '__main__': unittest.main()
Python
0
@@ -115,16 +115,25 @@ fig_load +_with_gpg (self):%0A
7fc5fdba58880c782af54b669070592277b67583
Test decode binary with negative length
tests/test_decode.py
tests/test_decode.py
# coding: utf-8 import termformat from unittest import TestCase class TermFormatDecoderTest(TestCase): def test_decode_atom(self): result = termformat.decode(b"\x83d\x00\x03foo") self.assertEqual(result, ":foo") def test_decode_incomplete_atom(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83d\x00\x03fo") self.assertEqual(result, ":foo") def test_decode_small_int(self): result = termformat.decode(b"\x83a\x14") self.assertEqual(result, 20) def test_decode_incomplete_small_int(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83a") self.assertEqual(result, 20) def test_decode_medium_int(self): result = termformat.decode(b"\x83b\x00\x00\x01,") self.assertEqual(result, 300) def test_decode_incomplete_medium_int(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83b\x00\x00\x01") self.assertEqual(result, 300) def test_decode_large_int(self): result = termformat.decode(b"\x83n\x05\x00\x00\x00\x00\x00\x01") self.assertEqual(result, 4294967296) def test_decode_large_negative_int(self): result = termformat.decode(b"\x83n\x05\x01\x00\x00\x00\x00\x01") self.assertEqual(result, -4294967296) def test_decode_new_float(self): result = termformat.decode(b"\x83F@\t\x1e\xb8Q\xeb\x85\x1f") self.assertEqual(result, 3.14) def test_decode_float(self): result = termformat.decode(b"\x83c3.14000000000000012434e+00\x00\x00\x00\x00\x00") self.assertEqual(result, 3.14) def test_encode_zero_float(self): bytes = termformat.decode(b"\x83c0.00000000000000000000e+00\x00\x00\x00\x00\x00") self.assertEqual(bytes, 0.0) def test_decode_incomplete_float(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83F@\t\x1e\xb8Q\xeb\x85") self.assertEqual(result, 3.14) def test_decode_binary(self): result = termformat.decode(b"\x83m\x00\x00\x00\x03foo") self.assertEqual(result, "foo") def test_decode_unicode_binary(self): bytes = termformat.decode(b"\x83m\x00\x00\x00\x08\xd1\x82\xd0\xb5\xd1\x81\xd1\x82") self.assertEqual(bytes, u"тест") def test_decode_incomplete_binary(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83m\x00\x00\x00\x03fo") self.assertEqual(result, "foo") def test_decode_string(self): result = termformat.decode(b"\x83k\x00\x03foo") self.assertEqual(result, "foo") def test_decode_incomplete_string(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83k\x00\x03fo") self.assertEqual(result, "foo") def test_decode_empty_list(self): result = termformat.decode(b"\x83j") self.assertEqual(result, []) def test_decode_small_tuple(self): result = termformat.decode(b"\x83h\x03a\x01a\x02a\x03") self.assertEqual(result, (1, 2, 3)) def test_decode_complex_tuple(self): result = termformat.decode(b"\x83h\na\x01b\x00\x00\x059c3.14000000000000012434e+00" b"\x00\x00\x00\x00\x00m\x00\x00\x00\x06binaryd\x00\x04" b"atomd\x00\x04trued\x00\x05falsed\x00\tundefinedl\x00" b"\x00\x00\x02a\x02l\x00\x00\x00\x01a\x02jjh\x03a\x01a" b"\x02a\x03") self.assertEqual(result, (1, 1337, 3.14, "binary", ":atom", ":true", ":false", ":undefined", [2, [2]], (1, 2, 3))) def test_decode_large_tuple(self): bytes = termformat.encode((1, 2, 3) * 256) result = termformat.decode(bytes) self.assertEqual(result, (1, 2, 3) * 256) def test_decode_incomplete_tuple(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83h\x03a\x01a\x02") self.assertEqual(result, (1, 2, 3)) def test_decode_list(self): result = termformat.decode(b"\x83l\x00\x00\x00\x03a\x01a\x02a\x03j") self.assertEqual(result, [1, 2, 3]) def test_decode_incomplete_list(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83l\x00\x00\x00\x03a\x01j") self.assertEqual(result, [1, 2, 3]) def test_decode_unknown_type(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x83z") self.assertEqual(result, None) def test_decode_invalid_magic(self): with self.assertRaises(ValueError): result = termformat.decode(b"\x84") self.assertEqual(result, None)
Python
0.999996
@@ -2377,32 +2377,226 @@ result, %22foo%22)%0A%0A + def test_decode_binary_with_negative_length(self):%0A with self.assertRaises(ValueError):%0A result = termformat.decode(b%22%5Cx83m%5Cxff%5Cxff%5Cxff%5Cxfdfoo%22)%0A self.assertEqual(result, %22foo%22)%0A%0A def test_decod
c5f44c9dda9905e9aa817c1945d49892e686f9cd
Fix failing test
tests/test_models.py
tests/test_models.py
# -*- coding: utf-8 -*- import datetime as dt import pytest from doorman.models import Node, Pack, Query, Tag, FilePath from .factories import NodeFactory, PackFactory, QueryFactory, TagFactory @pytest.mark.usefixtures('db') class TestNode: def test_factory(self, db): node = NodeFactory(host_identifier='foo') db.session.commit() assert node.node_key is not None assert node.host_identifier == 'foo' def test_tags(self): tag = Tag.create(value='foo') node = NodeFactory(host_identifier='foo') node.tags.append(tag) node.save() assert tag in node.tags def test_config(self): node = NodeFactory(host_identifier='foo') tag = Tag.create(value='foo') node.tags.append(tag) node.save() query1 = Query.create(name='bar', sql='select * from osquery_info;') query2 = Query.create(name='foobar', sql='select * from system_info;') query2.tags.append(tag) query2.save() pack = Pack.create(name='baz') pack.queries.append(query1) pack.tags.append(tag) pack.save() file_path = FilePath.create(category='foobar', target_paths=[ '/home/foobar/%%', ]) file_path.tags.append(tag) file_path.save() assert tag in pack.tags assert tag in query2.tags assert tag in file_path.tags assert tag not in query1.tags assert query1 in pack.queries assert query2 not in pack.queries assert pack in node.packs assert query2 in node.queries assert query1 not in node.queries config = node.get_config() assert node.host_identifier == config['options']['host_identifier'] assert pack.name in config['packs'] assert query1.name in config['packs'][pack.name]['queries'] assert query1.sql == config['packs'][pack.name]['queries'][query1.name]['query'] assert query2.name not in config['packs'] assert query2.name in config['schedule'] assert query2.sql == config['schedule'][query2.name]['query'] assert file_path.category in config['file_paths'] @pytest.mark.usefixtures('db') class TestQuery: def test_factory(self, db): query = QueryFactory(name='foobar', query='select * from foobar;') db.session.commit() assert query.name == 'foobar' assert query.sql == 'select * from foobar;' @pytest.mark.usefixtures('db') class TestFilePath: def test_create(self): target_paths = [ '/root/.ssh/%%', '/home/.ssh/%%', ] file_path = FilePath.create(category='foobar', target_paths=target_paths) assert file_path.to_dict() == {'foobar': target_paths} target_paths.append('/etc/%%') file_path.target_paths = '!!'.join(target_paths) file_path.save() assert file_path.to_dict() == {'foobar': target_paths}
Python
0.000209
@@ -1689,85 +1689,8 @@ ()%0A%0A - assert node.host_identifier == config%5B'options'%5D%5B'host_identifier'%5D%0A%0A