commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
a7728b466f5cacb662566e9e71ebc661ae40271a | Create max_end3.py | Python/CodingBat/max_end3.py | Python/CodingBat/max_end3.py | Python | 0.000011 | @@ -0,0 +1,189 @@
+# http://codingbat.com/prob/p135290%0A%0Adef max_end3(nums):%0A max = nums%5B0%5D if (nums%5B0%5D %3E nums%5B-1%5D) else nums%5B-1%5D # or use max(arg1, arg2)%0A for i in range(3):%0A nums%5Bi%5D = max%0A return nums%0A
|
|
6ae82ecdd749b936289b496a10faa2caf1aa94c6 | Add first version of the code | bibsort.py | bibsort.py | Python | 0.000002 | @@ -0,0 +1,2718 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport re%0Afrom collections import OrderedDict%0Aimport codecs%0A%0Aclass BibEntry:%0A %0A def __init__(self, **kwargs):%0A self.data = %7B%7D%0A for key, value in kwargs.iteritems():%0A self.data%5Bkey%5D = value%0A %0A def entry(self):%0A data = OrderedDict(sorted(self.data.items(), key=lambda t: t%5B0%5D))%0A result = u'@%7B0%7D%7B%7B%7B1%7D,%5Cn'.format(self.data%5B'type'%5D.upper(), self.data%5B'key'%5D)%0A for key, value in data.items():%0A if key in %5B'type','key'%5D:%0A continue%0A result += u'%5Ct%7B0%7D = %7B%7B%7B1%7D%7D%7D,%5Cn'.format(key, value)%0A result = result%5B:-2%5D + u'%5Cn%7D%5Cn'%0A return result%0A %0A%0A %0Adef must_omit(i):%0A return re.match(%22comment%22, i) or re.match(%22%25%25%22, i)%0A%0Adef entries_from_file(file):%0A%0A keywords = %5B'address', 'annote', 'author', 'booktitle', 'chapter', 'crossref',%0A 'doi', 'edition', 'editor', 'eprint', 'eprintclass', 'eprinttype',%0A 'howpublished', 'institution', 'journal', 'month', 'note', 'number', %0A 'organization', 'pages', 'publisher', 'school', 'series', 'title', %0A 'type', 'url', 'urldate', 'volume', 'year'%5D%0A%0A with codecs.open(file, %22r%22, %22utf-8%22) as f:%0A text = f.read()%0A%0A entries = %5B%5D%0A%0A entry_blocks = %5Bi for i in re.split(%22%5Cn@%22, text) if not must_omit(i)%5D%0A%0A for entry in entry_blocks:%0A %0A entry_dict = %7B%7D%0A %0A search = re.match(%22(?P%3Ctype%3E.*)%7B(?P%3Ckey%3E.*)%22, entry)%0A if search:%0A%0A key = search.group(%22key%22)%5B:-1%5D%0A%0A if search.group(%22type%22).startswith('@'):%0A type = search.group(%22type%22)%5B1:%5D%0A else:%0A type = search.group(%22type%22)%0A%0A entry_dict%5B%22key%22%5D = key%0A entry_dict%5B%22type%22%5D = type%0A%0A for keyword in keywords:%0A string = %22%5Cs*%22+keyword+%22%5Cs*=%5Cs*%5B%7B%5D?(?P%3C%22+keyword+%22%3E%5CS.*),?%5Cn%22%0A search = re.search(string, entry)%0A if search:%0A # Prohibits that 'eprinttype' overrides 'type'%0A if keyword in entry_dict.keys():%0A continue%0A value = search.group(keyword)%0A if value.endswith(','):%0A value = value%5B:-1%5D%0A if value.endswith('%7D%7D'):%0A value = value%5B:-1%5D%0A if value.endswith('%7D') and not value.startswith('%7B'):%0A value = value%5B:-1%5D%0A entry_dict%5Bkeyword%5D = value%0A%0A if entry_dict != %7B%7D:%0A entries.append(BibEntry(**entry_dict))%0A%0A return entries%0A%0A%0ABibEntries = entries_from_file('bibliography.bib')%0ABibEntries.sort(key=lambda x: x.data%5B'key'%5D.lower())%0A%0Afor _ in BibEntries:%0A print _.entry()
|
|
2428467d8c0d9c70a4931e1bd1b5971c9f45a0b7 | add function | function.py | function.py | Python | 0.000355 | @@ -0,0 +1,39 @@
+def foo(x,y):%0A print(x+y)%0A%0Afoo(3,4)%0A
|
|
9873891a9f26edc51a22e51b5910615a7e08d410 | Create WaterLevel.py | device/src/WaterLevel.py | device/src/WaterLevel.py | Python | 0.000001 | @@ -0,0 +1,311 @@
+#Water level sensor.%0A#VCC %0A#GND%0A#AO %3C--%3E ADC Port(A7) Analog data%0A%0A#AO is the specific value.%0A%0Aimport pyb%0A%0Aadc = pyb.ADC(Pin('A7')) # create an analog object from a pin%0Aadc = pyb.ADC(pyb.Pin.board.A7)%0A# read an analog value%0Adef getWaterLevel(): %0A print('WaterLevel Ao')%0A return adc.read()%0A
|
|
d43d4f29752bfae8a4d2e337f5523cd5fc7888d8 | add Trimplementation of Kadane's algorithm | dp/kadane-_algorithm/py/TrieToSucceed_kadane.py | dp/kadane-_algorithm/py/TrieToSucceed_kadane.py | Python | 0 | @@ -0,0 +1,1315 @@
+#!/usr/bin/python3%0A%22%22%22%0AThis module contains an implementation of Kadane's algorithm to determine the%0Amaximum sum of a subarray.%0A%22%22%22%0A%0Adef kadane(list_obj=None):%0A %22%22%22%0A Find maximum sum of a subarray%0A%0A :param list list_int: list of objs%0A :return: maximum sum of subarray%0A :rtype: int%0A%0A DOCTESTS%0A --------%0A Test 1 (list of ints):%0A %3E%3E%3E print(kadane(%5B-1, 2, 3, -4, 5, -6%5D))%0A 6%0A%0A Test 2 (list of ints):%0A %3E%3E%3E print(kadane(%5B-1, 2, 3, -6, 5, -6%5D))%0A 5%0A%0A Test 3 (list of ints):%0A %3E%3E%3E print(kadane(%5B3, 2, 3, -7, 5, -6%5D))%0A 11%0A%0A Test 4 (invalid argument type):%0A %3E%3E%3E print(kadane())%0A Traceback (most recent call last):%0A ...%0A TypeError: input must be of type list%0A%0A Test 5 (empty list):%0A %3E%3E%3E print(kadane(%5B%5D))%0A Traceback (most recent call last):%0A ...%0A ValueError: list must not be empty%0A %22%22%22%0A %0A if type(list_obj) is not list:%0A raise TypeError(%22input must be of type list%22)%0A if not list_obj:%0A raise ValueError(%22list must not be empty%22)%0A max_sum, cur_max = list_obj%5B0%5D, list_obj%5B0%5D%0A size = len(list_obj)%0A%0A for idx, val in enumerate(list_obj):%0A cur_max = max(val, val + cur_max)%0A max_sum = max(max_sum, cur_max)%0A%0A return max_sum%0A%0Aif __name__ == '__main__':%0A import doctest%0A doctest.testmod()%0A
|
|
3b1b708b739f43bdac86784b27838c80d179572b | solved day 17 | 17/main.py | 17/main.py | Python | 0.999067 | @@ -0,0 +1,2269 @@
+import collections%0Aimport unittest%0A%0A%0Adef gen_valid_combinations(liters, container_sizes):%0A first_container_capacity = container_sizes%5B0%5D%0A if len(container_sizes) == 1:%0A if liters == first_container_capacity:%0A yield %5B1%5D%0A elif liters == 0:%0A yield %5B0%5D%0A elif liters == 0:%0A yield %5B0 for _ in xrange(len(container_sizes))%5D%0A else:%0A if liters %3E= first_container_capacity:%0A for combination in gen_valid_combinations(liters - first_container_capacity,%0A container_sizes%5B1:%5D):%0A yield %5B1%5D + combination%0A for combination in gen_valid_combinations(liters, container_sizes%5B1:%5D):%0A yield %5B0%5D + combination%0A%0A%0A# ANSWERS ---------------------------%0A%0Acontainer_sizes = %5B%0A 43,%0A 3,%0A 4,%0A 10,%0A 21,%0A 44,%0A 4,%0A 6,%0A 47,%0A 41,%0A 34,%0A 17,%0A 17,%0A 44,%0A 36,%0A 31,%0A 46,%0A 9,%0A 27,%0A 38%0A%5D%0A%0Acombinations = %5Bcombination%0A for combination in gen_valid_combinations(150, container_sizes)%0A%5D%0Acombinations_by_size = collections.defaultdict(lambda:list())%0Afor combination in combinations:%0A combinations_by_size%5Bsum(combination)%5D.append(combination)%0Afor size, combinations in combinations_by_size.iteritems():%0A print size, len(combinations)%0A%0A%0A# TESTS -----------------------------%0A%0Aclass MyTests(unittest.TestCase):%0A%0A def test_one_container_impossible(self):%0A container_sizes = %5B10%5D%0A combinations = %5Bcombination%0A for combination in gen_valid_combinations(11, container_sizes)%5D%0A self.assertEqual(combinations, %5B%5D)%0A%0A def test_one_container_possible(self):%0A container_sizes = %5B10%5D%0A combinations = %5Bcombination%0A for combination in gen_valid_combinations(10, container_sizes)%5D%0A self.assertEqual(combinations, %5B%5B1%5D%5D)%0A%0A def test_two_container_possible(self):%0A container_sizes = %5B10, 5%5D%0A combinations = %5Bcombination%0A for combination in gen_valid_combinations(15, container_sizes)%5D%0A self.assertEqual(combinations, %5B%5B1, 1%5D%5D)%0A%0A def test_examples(self):%0A container_sizes = %5B20, 15, 10, 5, 5%5D%0A combinations = %5Bcombination%0A for combination in gen_valid_combinations(25, container_sizes)%5D%0A self.assertEqual(combinations, %5B%0A %5B1, 0, 0, 1, 0%5D,%0A %5B1, 0, 0, 0, 1%5D,%0A %5B0, 1, 1, 0, 0%5D,%0A %5B0, 1, 0, 1, 1%5D%5D)%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()
|
|
4d196f4f897ac6d2c590803d491192e340ec475e | fetch option order example | examples/py/async-binance-fetch-option-order.py | examples/py/async-binance-fetch-option-order.py | Python | 0.999999 | @@ -0,0 +1,875 @@
+# -*- coding: utf-8 -*-%0A%0Aimport asyncio%0Aimport os%0Aimport sys%0Afrom pprint import pprint%0A%0Aroot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))%0Asys.path.append(root + '/python')%0A%0Aimport ccxt.async_support as ccxt # noqa: E402%0A%0A%0Aasync def main():%0A exchange = ccxt.binance(%7B%0A 'apiKey': 'YOUR_API_KEY',%0A 'secret': 'YOUR_SECRET',%0A # 'verbose': True, # for debug output%0A %7D)%0A await exchange.load_markets()%0A market_id = 'ETH-221028-1700-C'%0A order_id = 4612098335294532880%0A try:%0A response = await exchange.eapiPrivateGetOpenOrders(%7B%0A # 'symbol': market_id, # optional%0A # 'orderId': order_id, # optional%0A %7D)%0A pprint(response)%0A except Exception as e:%0A print('eapiPrivateGetOpenOrders() failed')%0A print(e)%0A await exchange.close()%0A%0A%0Aasyncio.run(main())%0A
|
|
08e43e8bfd150252b3e05ff62ee25cdf0e519f20 | Revert #830 because it broke the case when the main script is not in path. | meson.py | meson.py | #!/usr/bin/env python3
# Copyright 2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mesonbuild import mesonmain
import sys, os
sys.exit(mesonmain.run(sys.argv[0], sys.argv[1:]))
| Python | 0 | @@ -654,16 +654,380 @@
ys, os%0A%0A
+def main():%0A thisfile = __file__%0A if not os.path.isabs(thisfile):%0A thisfile = os.path.normpath(os.path.join(os.getcwd(), thisfile))%0A if __package__ == '':%0A thisfile = os.path.dirname(thisfile)%0A%0A # The first argument *must* be an absolute path because%0A # the user may have launched the program from a dir%0A # that is not in path.%0A
sys.exit
@@ -1045,19 +1045,16 @@
run(
-sys.argv%5B0%5D
+thisfile
, sy
@@ -1066,8 +1066,48 @@
v%5B1:%5D))%0A
+%0Aif __name__ == '__main__':%0A main()%0A%0A
|
27b2e87a8653961fbba45962e9e6ec1d20904a03 | Create demo_lcd.py | 20x4LCD/demo_lcd.py | 20x4LCD/demo_lcd.py | Python | 0.000001 | @@ -0,0 +1,228 @@
+import lcddriver%0Afrom time import *%0A%0Alcd = lcddriver.lcd()%0A%0Alcd.lcd_display_string(%22Hello world%22, 1)%0Alcd.lcd_display_string(%22My name is%22, 2)%0Alcd.lcd_display_string(%22picorder%22, 3)%0Alcd.lcd_display_string(%22I am a Raspberry Pi%22, 4)%0A
|
|
e4a4e8d43c1b4c63ac32467a8e49a5b81f8f2fa3 | Create roundrobin.py | roundrobin.py | roundrobin.py | Python | 0.000369 | @@ -0,0 +1,1698 @@
+import string%0Afrom game import Game%0A%0Aclass RoundRobin(object):%0A %0A def __init__(self, teams_count):%0A self.teams = generateTeams(teams_count)%0A self.current_round = 0%0A %0A def getRound(self):%0A games = %5B%5D%0A teams_count = len(self.teams)%0A home_away_index = self.current_round // (teams_count-1)%0A for i in range(0, teams_count, 2):%0A if home_away_index%252 == 0:%0A game = Game( self.teams%5Bi%5D, self.teams%5Bi+1%5D )%0A else:%0A game = Game( self.teams%5Bi+1%5D, self.teams%5Bi%5D )%0A games.append( game )%0A return games%0A %0A def getNextRound(self):%0A self.rotate()%0A return self.getRound()%0A %0A def rotate(self):%0A head = self.teams%5B0%5D%0A tail = self.teams%5B1: len(self.teams)-1%5D%0A second = self.teams%5Blen(self.teams)-1%5D%0A self.teams = %5B%5D%0A self.teams.append(head)%0A self.teams.append(second)%0A self.teams = self.teams + tail%0A self.current_round += 1%0A %0A def getSchedule(self, rounds_count):%0A schedule = %5B%5D%0A for i in range(rounds_count):%0A games = self.getRound()%0A schedule.append(games)%0A self.rotate()%0A return schedule%0A %0A def printSchedule(self, rounds_count):%0A schedule = self.getSchedule(rounds_count)%0A for day in range(len(schedule)):%0A print %22== Day #%22 + str(day+1)%0A games = schedule%5Bday%5D%0A for game in games:%0A print game%0A self.rotate()%0A %0Adef generateTeams(teams_count):%0A teams = list(string.ascii_uppercase)%5B:teams_count%5D%0A if teams_count%252 != 0:%0A teams.append(%22 %22)%0A return teams%0A
|
|
6787d62bf6f77dbf0fd5cbfeb3fad98298d596af | Add track_instance_changes note in disable_group_policy_check_upcall | nova/conf/workarounds.py | nova/conf/workarounds.py | # Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The 'workarounds' group is for very specific reasons.
If you're:
- Working around an issue in a system tool (e.g. libvirt or qemu) where the
fix is in flight/discussed in that community.
- The tool can be/is fixed in some distributions and rather than patch the
code those distributions can trivially set a config option to get the
"correct" behavior.
Then this is a good place for your workaround.
.. warning::
Please use with care! Document the BugID that your workaround is paired with.
"""
from oslo_config import cfg
workarounds_group = cfg.OptGroup(
'workarounds',
title='Workaround Options',
help="""
A collection of workarounds used to mitigate bugs or issues found in system
tools (e.g. Libvirt or QEMU) or Nova itself under certain conditions. These
should only be enabled in exceptional circumstances. All options are linked
against bug IDs, where more information on the issue can be found.
""")
ALL_OPTS = [
cfg.BoolOpt(
'disable_rootwrap',
default=False,
help="""
Use sudo instead of rootwrap.
Allow fallback to sudo for performance reasons.
For more information, refer to the bug report:
https://bugs.launchpad.net/nova/+bug/1415106
Possible values:
* True: Use sudo instead of rootwrap
* False: Use rootwrap as usual
Interdependencies to other options:
* Any options that affect 'rootwrap' will be ignored.
"""),
cfg.BoolOpt(
'disable_libvirt_livesnapshot',
default=True,
help="""
Disable live snapshots when using the libvirt driver.
Live snapshots allow the snapshot of the disk to happen without an
interruption to the guest, using coordination with a guest agent to
quiesce the filesystem.
When using libvirt 1.2.2 live snapshots fail intermittently under load
(likely related to concurrent libvirt/qemu operations). This config
option provides a mechanism to disable live snapshot, in favor of cold
snapshot, while this is resolved. Cold snapshot causes an instance
outage while the guest is going through the snapshotting process.
For more information, refer to the bug report:
https://bugs.launchpad.net/nova/+bug/1334398
Possible values:
* True: Live snapshot is disabled when using libvirt
* False: Live snapshots are always used when snapshotting (as long as
there is a new enough libvirt and the backend storage supports it)
"""),
cfg.BoolOpt(
'handle_virt_lifecycle_events',
default=True,
help="""
Enable handling of events emitted from compute drivers.
Many compute drivers emit lifecycle events, which are events that occur when,
for example, an instance is starting or stopping. If the instance is going
through task state changes due to an API operation, like resize, the events
are ignored.
This is an advanced feature which allows the hypervisor to signal to the
compute service that an unexpected state change has occurred in an instance
and that the instance can be shutdown automatically. Unfortunately, this can
race in some conditions, for example in reboot operations or when the compute
service or when host is rebooted (planned or due to an outage). If such races
are common, then it is advisable to disable this feature.
Care should be taken when this feature is disabled and
'sync_power_state_interval' is set to a negative value. In this case, any
instances that get out of sync between the hypervisor and the Nova database
will have to be synchronized manually.
For more information, refer to the bug report:
https://bugs.launchpad.net/bugs/1444630
Interdependencies to other options:
* If ``sync_power_state_interval`` is negative and this feature is disabled,
then instances that get out of sync between the hypervisor and the Nova
database will have to be synchronized manually.
"""),
cfg.BoolOpt(
'disable_group_policy_check_upcall',
default=False,
help="""
Disable the server group policy check upcall in compute.
In order to detect races with server group affinity policy, the compute
service attempts to validate that the policy was not violated by the
scheduler. It does this by making an upcall to the API database to list
the instances in the server group for one that it is booting, which violates
our api/cell isolation goals. Eventually this will be solved by proper affinity
guarantees in the scheduler and placement service, but until then, this late
check is needed to ensure proper affinity policy.
Operators that desire api/cell isolation over this check should
enable this flag, which will avoid making that upcall from compute.
"""),
]
def register_opts(conf):
conf.register_group(workarounds_group)
conf.register_opts(ALL_OPTS, group=workarounds_group)
def list_opts():
return {workarounds_group: ALL_OPTS}
| Python | 0.000104 | @@ -5206,16 +5206,155 @@
compute.
+%0A%0ARelated options:%0A%0A* %5Bfilter_scheduler%5D/track_instance_changes also relies on upcalls from the%0A compute service to the scheduler service.
%0A%22%22%22),%0A%5D
|
d1eac9803adbf9b91b22ce62a4bdf5db790b6265 | Create ShodanToCSV.py | ShodanToCSV.py | ShodanToCSV.py | Python | 0 | @@ -0,0 +1,1693 @@
+#!/usr/bin/env python%0A#%0A# Search shodan, output to CSV%0A# To ensure comma as seperator, all comma's in os and header field (if any) are replaced for ;;;%0A# To ensure row integrity all newlines (%5Cn) are replaced by #NWLN%0A# Author: Jeroen%0A%0Aimport shodan%0Aimport sys%0Aimport os%0Afrom optparse import OptionParser%0A%0A#Initialize userinput%0Aoparser = OptionParser(%22usage: %25prog %5Boptions%5D %5Bcommand%5D*%22, version=%22v%25d.%25d.%25d%22 %25 (1, 0, 0))%0Aoparser.add_option(%22-d%22, %22--debug%22, dest=%22debug%22, action = %22store_true%22, help=%22Be extremely verbose%22, default=False)%0Aoparser.add_option(%22-k%22, %22--key%22, dest=%22AKEY%22, help=%22Use your personal API key%22,default=%22GETYOUROWNKEY%22)%0Aoparser.add_option(%22-s%22, %22--search%22, dest=%22searchQuery%22, help=%22Insert shodan search query%22,default=False)%0Aoparser.add_option(%22-o%22, %22--output%22, dest=%22outputFileName%22, help=%22output filename%22,default=%22output.csv%22)%0A%0A(options,args) = oparser.parse_args(sys.argv)%0A%0Aif (options.searchQuery == False):%0Aprint 'Type shodanToCSV.py --help for syntax'%0Asys.exit(1)%0A%0Atry:%0A# Setup the api%0Aapi = shodan.WebAPI(options.AKEY)%0A%0A# Perform the search%0Aresult = api.search(options.searchQuery)%0AcsvHeader = %22ip,port,os,country,lastupdate,header%5Cn%22%0Afo = open(options.outputFileName, 'w')%0Afo.write(str(csvHeader))%0A# Loop through the matches and print each IP%0Afor result in result%5B'matches'%5D:%0Arow = result%5B'ip'%5D + ',' + str(result%5B'port'%5D) + ',' + str(result%5B'os'%5D).replace(%22,%22,%22;;;%22) + ',' + result%5B'country_name'%5D + ',' + result%5B'updated'%5D + ',' + str(result%5B'data'%5D).replace(%22,%22,%22;;;%22)%0Arow = row.replace(%22%5Cr%5Cn%22,%22%22).replace(%22%5Cn%22,%22%22) + str(os.linesep)%0Aif(options.debug != False):%0Aprint str(row)%0Afo.write(str(row))%0Afo.close()%0Aexcept Exception, e:%0Aprint 'Error: %25s' %25 e%0Aexit(1)%0A
|
|
9be177007ce95f2b9e47225a46effe7b7682ba38 | Create StockReader.py | StockReader.py | StockReader.py | Python | 0 | @@ -0,0 +1,825 @@
+#econogee, 1/28/2016%0A#Stock Data Retrieval Script%0A%0Aimport os%0Aimport numpy as np%0Aimport urllib2%0A%0Astartday = str(0)%0Astartmonth = str(1)%0Astartyear = str(2005)%0A%0Aendday = str(30)%0Aendmonth = str(1)%0Aendyear = str(2016)%0A%0A%0Asymbols = %5B%5D%0Awith open('stocklist.csv') as f:%0A content = f.readlines()%0A for l in content:%0A symbols.append(l.split(%22,%22)%5B0%5D)%0A %0Afor s in symbols:%0A response = urllib2.urlopen('http://real-chart.finance.yahoo.com/table.csv?s='+str(s)+%5C%0A '&a=' + startday + '&b=' + startmonth + '&c=' + startyear + %5C%0A '&d=' + endday + '&e=' + endmonth + '&f=' + endyear + %5C%0A '&g=d&ignore=.csv')%0A html = response.read()%0A html = html.split('%5Cn')%0A html = np.array(html)%0A np.savetxt(str(s),html,fmt='%25s',delimiter=',')%0A
|
|
706da9008e8101c03bb2c7754b709209897cd952 | Add Organization Administrator model. | app/soc/models/org_admin.py | app/soc/models/org_admin.py | Python | 0 | @@ -0,0 +1,870 @@
+#!/usr/bin/python2.5%0A#%0A# Copyright 2008 the Melange authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A# %0A# http://www.apache.org/licenses/LICENSE-2.0%0A# %0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22This module contains the Organization Administrator Model.%22%22%22%0A%0A__authors__ = %5B%0A '%22Pawel Solyga%22 %[email protected]%3E',%0A%5D%0A%0A%0Aimport soc.models.role%0A%0A%0Aclass OrgAdmin(soc.models.role.Role):%0A %22%22%22Adminitrator details for a specific Organization.%0A %22%22%22%0A pass%0A
|
|
2e3349b75fffb9a9f3906d065bc8f141eef02d38 | Add run_wsgi | run_wsgi.wsgi | run_wsgi.wsgi | Python | 0.000005 | @@ -0,0 +1,277 @@
+#!/usr/bin/env python%0A%0Aimport os%0Aimport sys%0A%0Asys.stdout = sys.stderr%0A%0AINTELLIDATA_DIR = os.path.dirname(__file__)%0A%0Asys.path.insert(0, INTELLIDATA_DIR)%0Aos.chdir(INTELLIDATA_DIR)%0A%0Aimport config%0A%0Afrom intellidata import app as application%0Aapplication.config.from_object('config')%0A
|
|
b8bb92e083983ad6a01ab41702846865d2cf2f4d | fix the auth_version | rally/plugins/openstack/verification/tempest/config.py | rally/plugins/openstack/verification/tempest/config.py | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
from oslo_config import cfg
import six
from six.moves import configparser
from six.moves.urllib import parse
from rally.verification import utils
CONF = cfg.CONF
class TempestConfigfileManager(object):
"""Class to create a Tempest config file."""
def __init__(self, deployment):
self.credential = deployment.get_credentials_for("openstack")["admin"]
self.clients = self.credential.clients()
self.available_services = self.clients.services().values()
self.conf = configparser.ConfigParser()
def _get_service_type_by_service_name(self, service_name):
for s_type, s_name in self.clients.services().items():
if s_name == service_name:
return s_type
def _configure_auth(self, section_name="auth"):
self.conf.set(section_name, "admin_username",
self.credential.username)
self.conf.set(section_name, "admin_password",
self.credential.password)
self.conf.set(section_name, "admin_project_name",
self.credential.tenant_name)
# Keystone v3 related parameter
self.conf.set(section_name, "admin_domain_name",
self.credential.user_domain_name or "Default")
# Sahara has two service types: 'data_processing' and 'data-processing'.
# 'data_processing' is deprecated, but it can be used in previous OpenStack
# releases. So we need to configure the 'catalog_type' option to support
# environments where 'data_processing' is used as service type for Sahara.
def _configure_data_processing(self, section_name="data-processing"):
if "sahara" in self.available_services:
self.conf.set(section_name, "catalog_type",
self._get_service_type_by_service_name("sahara"))
def _configure_identity(self, section_name="identity"):
self.conf.set(section_name, "region",
self.credential.region_name)
auth_url = self.credential.auth_url
if "/v2" not in auth_url and "/v3" not in auth_url:
auth_version = "v2"
auth_url_v2 = parse.urljoin(auth_url, "/v2.0")
else:
url_path = parse.urlparse(auth_url).path
auth_version = url_path[1:3]
auth_url_v2 = auth_url.replace(url_path, "/v2.0")
self.conf.set(section_name, "auth_version", auth_version)
self.conf.set(section_name, "uri", auth_url_v2)
self.conf.set(section_name, "uri_v3",
auth_url_v2.replace("/v2.0", "/v3"))
self.conf.set(section_name, "disable_ssl_certificate_validation",
str(self.credential.https_insecure))
self.conf.set(section_name, "ca_certificates_file",
self.credential.https_cacert)
# The compute section is configured in context class for Tempest resources.
# Options which are configured there: 'image_ref', 'image_ref_alt',
# 'flavor_ref', 'flavor_ref_alt'.
def _configure_network(self, section_name="network"):
if "neutron" in self.available_services:
neutronclient = self.clients.neutron()
public_nets = [net for net
in neutronclient.list_networks()["networks"]
if net["status"] == "ACTIVE" and
net["router:external"] is True]
if public_nets:
net_id = public_nets[0]["id"]
net_name = public_nets[0]["name"]
self.conf.set(section_name, "public_network_id", net_id)
self.conf.set(section_name, "floating_network_name", net_name)
else:
novaclient = self.clients.nova()
net_name = next(net.human_id for net in novaclient.networks.list()
if net.human_id is not None)
self.conf.set("compute", "fixed_network_name", net_name)
self.conf.set("validation", "network_for_ssh", net_name)
def _configure_network_feature_enabled(
self, section_name="network-feature-enabled"):
if "neutron" in self.available_services:
neutronclient = self.clients.neutron()
extensions = neutronclient.list_ext("extensions", "/extensions",
retrieve_all=True)
aliases = [ext["alias"] for ext in extensions["extensions"]]
aliases_str = ",".join(aliases)
self.conf.set(section_name, "api_extensions", aliases_str)
def _configure_object_storage(self, section_name="object-storage"):
self.conf.set(section_name, "operator_role",
CONF.tempest.swift_operator_role)
self.conf.set(section_name, "reseller_admin_role",
CONF.tempest.swift_reseller_admin_role)
def _configure_service_available(self, section_name="service_available"):
services = ["cinder", "glance", "heat", "ironic", "neutron", "nova",
"sahara", "swift"]
for service in services:
# Convert boolean to string because ConfigParser fails
# on attempt to get option with boolean value
self.conf.set(section_name, service,
str(service in self.available_services))
def _configure_validation(self, section_name="validation"):
if "neutron" in self.available_services:
self.conf.set(section_name, "connect_method", "floating")
else:
self.conf.set(section_name, "connect_method", "fixed")
def _configure_orchestration(self, section_name="orchestration"):
self.conf.set(section_name, "stack_owner_role",
CONF.tempest.heat_stack_owner_role)
self.conf.set(section_name, "stack_user_role",
CONF.tempest.heat_stack_user_role)
def create(self, conf_path, extra_options=None):
self.conf.read(os.path.join(os.path.dirname(__file__), "config.ini"))
for name, method in inspect.getmembers(self, inspect.ismethod):
if name.startswith("_configure_"):
method()
if extra_options:
utils.add_extra_options(extra_options, self.conf)
with open(conf_path, "w") as configfile:
self.conf.write(configfile)
raw_conf = six.StringIO()
raw_conf.write("# Some empty values of options will be replaced while "
"creating required resources (images, flavors, etc).\n")
self.conf.write(raw_conf)
return raw_conf.getvalue()
| Python | 0.000137 | @@ -2934,20 +2934,52 @@
n =
-url_path%5B1:3
+%5Bv for v in url_path.split(%22/%22) if v%5D%5B-1%5D%5B:2
%5D%0A
|
a086e7328ca920f269812a87be095ce638467f95 | Add youtube-dl library sample of operation | crawler/youtube_dl_op_sample.py | crawler/youtube_dl_op_sample.py | Python | 0 | @@ -0,0 +1,994 @@
+#!/usr/bin/env python2%0A#-*- coding: utf-8 -*-%0A%0Aimport sys%0Aimport youtube_dl%0A%0A%0Adef main():%0A if len(sys.argv) %3C 2:%0A print(%22Usage: youtube_dl_op_sample.py URL%22)%0A return%0A%0A opts = %7B %0A 'forceurl': True,%0A 'quiet': True,%0A 'simulate': True,%0A %7D%0A%0A url = sys.argv%5B1%5D%0A%0A try:%0A with youtube_dl.YoutubeDL(opts) as ydl:%0A extract_info = ydl.extract_info(url)%0A resource_uri = extract_info.get('url')%0A%0A if not resource_uri:%0A format_id = extract_info.get('format_id')%0A for fmt in extract_info.get('formats'):%0A if format_id != fmt.get('format_id'):%0A continue%0A resource_uri = fmt.get('url')%0A except Exception as e:%0A print(e)%0A resource_uri = None%0A%0A if resource_uri:%0A print(%22resource_uri: %25s%22 %25 resource_uri)%0A else:%0A print(%22Nothing at all.%22)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
a083baddd853514a5697e3a98eea4251c2ce5487 | Create __openerp__.py | __openerp__.py | __openerp__.py | Python | 0.005291 | @@ -0,0 +1,442 @@
+%7B%0A %22name%22: %22Product price based on margin with formula sale_price=cost_price/margin%22,%0A %22version%22: %228.0.0.1%22,%0A %22author%22: %223nodus%22,%0A 'category': 'Product',%0A %22website%22: %22http://www.3nodus.com/%22,%0A %22license%22: %22AGPL-3%22,%0A %22depends%22: %5B%0A %22product%22,%0A %5D,%0A %22demo%22: %5B%0A%0A %5D,%0A %22data%22: %5B%0A %0A %5D,%0A %22test%22: %5B%5D,%0A %22js%22: %5B%5D,%0A %22css%22: %5B%5D,%0A %22qweb%22: %5B%5D,%0A %22installable%22: True,%0A %22auto_install%22: False,%0A%7D%0A
|
|
22a3a6aa70c2960983887717b98cab2149a18d89 | Fix #121: don't accept tells to bot | plugins/tell.py | plugins/tell.py | " tell.py: written by sklnd in July 2009"
" 2010.01.25 - modified by Scaevolus"
import time
from util import hook, timesince
def db_init(db):
"check to see that our db has the tell table and return a dbection."
db.execute("create table if not exists tell"
"(user_to, user_from, message, chan, time,"
"primary key(user_to, message))")
db.commit()
return db
def get_tells(db, user_to):
return db.execute("select user_from, message, time, chan from tell where"
" user_to=lower(?) order by time",
(user_to.lower(),)).fetchall()
@hook.singlethread
@hook.event('PRIVMSG')
def tellinput(paraml, input=None, db=None):
if 'showtells' in input.msg.lower():
return
db_init(db)
tells = get_tells(db, input.nick)
if tells:
user_from, message, time, chan = tells[0]
reltime = timesince.timesince(time)
reply = "%s said %s ago in %s: %s" % (user_from, reltime, chan,
message)
if len(tells) > 1:
reply += " (+%d more, .showtells to view)" % (len(tells) - 1)
db.execute("delete from tell where user_to=lower(?) and message=?",
(input.nick, message))
db.commit()
input.pm(reply)
@hook.command(autohelp=False)
def showtells(inp, nick='', chan='', pm=None, db=None):
".showtells -- view all pending tell messages (sent in PM)."
db_init(db)
tells = get_tells(db, nick)
if not tells:
pm("You have no pending tells.")
return
for tell in tells:
user_from, message, time, chan = tell
past = timesince.timesince(time)
pm("%s said %s ago in %s: %s" % (user_from, past, chan, message))
db.execute("delete from tell where user_to=lower(?)",
(nick,))
db.commit()
@hook.command
def tell(inp, nick='', chan='', db=None):
".tell <nick> <message> -- relay <message> to <nick> when <nick> is around"
query = inp.split(' ', 1)
if len(query) != 2:
return tell.__doc__
user_to = query[0].lower()
message = query[1].strip()
user_from = nick
if chan.lower() == user_from.lower():
chan = 'a pm'
if user_to == user_from.lower():
return "No."
db_init(db)
if db.execute("select count() from tell where user_to=?",
(user_to,)).fetchone()[0] >= 5:
return "That person has too many things queued."
try:
db.execute("insert into tell(user_to, user_from, message, chan,"
"time) values(?,?,?,?,?)", (user_to, user_from, message,
chan, time.time()))
db.commit()
except db.IntegrityError:
return "Message has already been queued."
return "I'll pass that along."
| Python | 0 | @@ -1934,32 +1934,43 @@
chan='', db=None
+, conn=None
):%0A %22.tell %3Cn
@@ -2285,19 +2285,20 @@
user_to
-==
+in (
user_fro
@@ -2298,32 +2298,52 @@
ser_from.lower()
+, conn.nick.lower())
:%0A return
|
ca83457b4a003527cad9c9d57402c53e4571299c | add python opt and logging boilerplate code | sandbox/python/boilerplate_code/python_opt_log.py | sandbox/python/boilerplate_code/python_opt_log.py | Python | 0 | @@ -0,0 +1,1230 @@
+#!/usr/bin/env python%0A%0Aimport argparse%0Aimport logging%0Aimport os%0Aimport sys%0Aimport re%0Alogger = None%0A%0Adef my_function(blah):%0A return%0Aif __name__ == %22__main__%22:%0A FORMAT = '%25(levelname)s %25(asctime)-15s %25(name)-20s %25(message)s'%0A%0A parser = argparse.ArgumentParser(description=%22program name%22, formatter_class=argparse.ArgumentDefaultsHelpFormatter)%0A%0A parser.add_argument(%22input1%22, type = file)%0A parser.add_argument(%22input2%22, type = file)%0A parser.add_argument(%22--selection%22, type = str, default = 'a', choices = %5B'a', 'b', 'c'%5D, help = 'choose from a,b,c')%0A parser.add_argument(%22--cutoff%22, type = int, default = 1, help = 'cutoff score')%0A parser.add_argument(%22--variable_args%22, type = float, action = 'append', nargs = 3, %0A%09 default = %5B1.0,2.0,1.2%5D, help = '3 scores')%0A parser.add_argument(%22--verbose%22,%22-v%22, action = 'count', help='increase verbosity')%0A args = parser.parse_args()%0A%0A if args.verbose %3E= 1:%0A logging.basicConfig(level=logging.DEBUG, format = FORMAT)%0A else:%0A logging.basicConfig(level=logging.INFO, format=FORMAT)%0A logger = logging.getLogger(__name__)%0A%0A logger.info(%22working hard ...%22)%0A my_function(args.input1, args.input2)%0A logger.info(%22Done.%22)%0A
|
|
1058a9cb6e667c850f56b6003038496b77c359c5 | Add tool to fix links. | website/tools/append_index_html_to_internal_links.py | website/tools/append_index_html_to_internal_links.py | Python | 0 | @@ -0,0 +1,2555 @@
+%22%22%22Script to fix the links in the staged website.%0AFinds all internal links which do not have index.html at the end and appends%0Aindex.html in the appropriate place (preserving anchors, etc).%0A%0AUsage:%0A From root directory, after running the jekyll build, execute%0A 'python tools/append_index_html_to_internal_links.py'.%0A%0ADependencies:%0A beautifulsoup4%0A Installable via pip as 'sudo pip install beautifulsoup4' or apt via%0A 'sudo apt-get install python-beautifulsoup4'.%0A%0A%22%22%22%0A%0Aimport fnmatch%0Aimport os%0Aimport re%0Afrom bs4 import BeautifulSoup%0A%0A# Original link match. Matches any string which starts with '/' and doesn't%0A# have a file extension.%0AlinkMatch = r'%5E%5C/(.*%5C.(?!(%5B%5E%5C/%5D+)$))?%5B%5E.%5D*$'%0A%0A# Regex which matches strings of type /internal/link/#anchor. Breaks into two%0A# groups for ease of inserting 'index.html'.%0AanchorMatch1 = r'(.+%5C/)(#%5B%5E%5C/%5D+$)'%0A%0A# Regex which matches strings of type /internal/link#anchor. Breaks into two%0A# groups for ease of inserting 'index.html'.%0AanchorMatch2 = r'(.+%5C/%5Ba-zA-Z0-9%5D+)(#%5B%5E%5C/%5D+$)'%0A%0A%0Amatches = %5B%5D%0A# Recursively walk content directory and find all html files.%0Afor root, dirnames, filenames in os.walk('content'):%0A for filename in fnmatch.filter(filenames, '*.html'):%0A # Javadoc does not have the index.html problem, so omit it.%0A if 'javadoc' not in root:%0A matches.append(os.path.join(root, filename))%0A%0Aprint 'Matches: ' + str(len(matches))%0A# Iterates over each matched file looking for link matches.%0Afor match in matches:%0A print 'Fixing links in: ' + match%0A mf = open(match)%0A soup = BeautifulSoup(mf, %22lxml%22)%0A # Iterates over every %3Ca%3E%0A for a in soup.findAll('a'):%0A try:%0A hr = a%5B'href'%5D%0A if re.match(linkMatch, hr) is not None:%0A if hr.endswith('/'):%0A # /internal/link/%0A a%5B'href'%5D = hr + 'index.html'%0A elif re.match(anchorMatch1, hr) is not None:%0A # /internal/link/#anchor%0A mat = re.match(anchorMatch1, hr)%0A a%5B'href'%5D = mat.group(1) + 'index.html' + mat.group(2)%0A elif re.match(anchorMatch2, hr) is not None:%0A # /internal/link#anchor%0A mat = re.match(anchorMatch2, hr)%0A a%5B'href'%5D = mat.group(1) + '/index.html' + mat.group(2)%0A else:%0A # /internal/link%0A a%5B'href'%5D = hr + '/index.html'%0A mf.close()%0A%0A html = soup.prettify(%22utf-8%22)%0A # Write back to the file.%0A with open(match, %22wb%22) as f:%0A print 'Replacing ' + hr + ' with: ' + a%5B'href'%5D%0A f.write(html)%0A except KeyError as e:%0A # Some %3Ca%3E tags don't have an href.%0A continue%0A
|
|
ede7a61e1c1a77438bc027b41a5a9cb03eb6328c | raise a timeout in nrpe_poller test, so windows connect() has enought time | test/test_modules_nrpe_poller.py | test/test_modules_nrpe_poller.py | #!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
import os
from Queue import Empty
from multiprocessing import Queue, Manager, active_children
from shinken_test import *
from shinken.log import logger
from shinken.objects.module import Module
from shinken.modules import nrpe_poller
from shinken.modules.nrpe_poller import get_instance
modconf = Module()
modconf.module_name = "NrpePoller"
modconf.module_type = nrpe_poller.properties['type']
modconf.properties = nrpe_poller.properties.copy()
class TestNrpePoller(ShinkenTest):
# Uncomment this is you want to use a specific configuration
# for your test
#def setUp(self):
# self.setup_with_file('etc/nagios_module_hack_cmd_poller_tag.cfg')
def test_nrpe_poller(self):
mod = nrpe_poller.Nrpe_poller(modconf)
sl = get_instance(mod)
# Look if we really change our commands
print sl.__dict__
sl.id = 1
sl.i_am_dying = False
manager = Manager()
to_queue = manager.Queue()
from_queue = manager.Queue() # list()
control_queue = Queue()
# We prepare a check in the to_queue
status = 'queue'
command = "$USER1$/check_nrpe -H localhost33 -n -u -t 1 -c check_load3 -a 20" # -a arg1 arg2 arg3"
ref = None
t_to_to = time.time()
c = Check(status, command, ref, t_to_to)
msg = Message(id=0, type='Do', data=c)
to_queue.put(msg)
# The worker will read a message by loop. We want it to
# do 2 loops, so we fake a message, adn the Number 2 is a real
# exit one
msg1 = Message(id=0, type='All is good, continue')
msg2 = Message(id=0, type='Die')
control_queue.put(msg1)
for _ in xrange(1, 2):
control_queue.put(msg1)
# control_queue.put(msg1)
# control_queue.put(msg1)
# control_queue.put(msg1)
# control_queue.put(msg1)
control_queue.put(msg2)
sl.work(to_queue, from_queue, control_queue)
o = from_queue.get() # pop()
print "O", o
print o.__dict__
self.assert_(o.status == 'done')
self.assert_(o.exit_status == 2)
# to_queue.close()
# control_queue.close()
if __name__ == '__main__':
unittest.main()
| Python | 0 | @@ -2054,17 +2054,17 @@
n -u -t
-1
+5
-c chec
|
8bf248f304e7188e279a37ff06c8fc41f54e1df8 | Add console log | Logging.py | Logging.py | Python | 0.000003 | @@ -0,0 +1,249 @@
+%0Afrom GulpServer.Settings import Settings%0A%0A%0A%0Auser_settings = None%0A%0A%0A%0Adef plugin_loaded():%0A%09global user_settings%0A%09user_settings = Settings()%0A%0A%0A%0A%0A%0Aclass Console(object):%0A%0A%09def log(self, *args):%0A%0A%09%09if user_settings.get('dev'):%0A%09%09%09print(*args)%0A%0A%0A%0A%0A%0A%0A%0A%0A%0A
|
|
8f84ea073b24589327a89368ef7488ebaacbab75 | Drop Python 2.6 workaround. | gcloud/exceptions.py | gcloud/exceptions.py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom exceptions for :mod:`gcloud` package.
See: https://cloud.google.com/storage/docs/json_api/v1/status-codes
"""
import copy
import json
import six
_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module
class GCloudError(Exception):
"""Base error class for gcloud errors (abstract).
Each subclass represents a single type of HTTP error response.
"""
code = None
"""HTTP status code. Concrete subclasses *must* define.
See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
"""
def __init__(self, message, errors=()):
super(GCloudError, self).__init__()
# suppress deprecation warning under 2.6.x
self.message = message
self._errors = errors
def __str__(self):
return '%d %s' % (self.code, self.message)
@property
def errors(self):
"""Detailed error information.
:rtype: list(dict)
:returns: a list of mappings describing each error.
"""
return [copy.deepcopy(error) for error in self._errors]
class Redirection(GCloudError):
"""Base for 3xx responses
This class is abstract.
"""
class MovedPermanently(Redirection):
"""Exception mapping a '301 Moved Permanently' response."""
code = 301
class NotModified(Redirection):
"""Exception mapping a '304 Not Modified' response."""
code = 304
class TemporaryRedirect(Redirection):
"""Exception mapping a '307 Temporary Redirect' response."""
code = 307
class ResumeIncomplete(Redirection):
"""Exception mapping a '308 Resume Incomplete' response."""
code = 308
class ClientError(GCloudError):
"""Base for 4xx responses
This class is abstract
"""
class BadRequest(ClientError):
"""Exception mapping a '400 Bad Request' response."""
code = 400
class Unauthorized(ClientError):
"""Exception mapping a '401 Unauthorized' response."""
code = 401
class Forbidden(ClientError):
"""Exception mapping a '403 Forbidden' response."""
code = 403
class NotFound(ClientError):
"""Exception mapping a '404 Not Found' response."""
code = 404
class MethodNotAllowed(ClientError):
"""Exception mapping a '405 Method Not Allowed' response."""
code = 405
class Conflict(ClientError):
"""Exception mapping a '409 Conflict' response."""
code = 409
class LengthRequired(ClientError):
"""Exception mapping a '411 Length Required' response."""
code = 411
class PreconditionFailed(ClientError):
"""Exception mapping a '412 Precondition Failed' response."""
code = 412
class RequestRangeNotSatisfiable(ClientError):
"""Exception mapping a '416 Request Range Not Satisfiable' response."""
code = 416
class TooManyRequests(ClientError):
"""Exception mapping a '429 Too Many Requests' response."""
code = 429
class ServerError(GCloudError):
"""Base for 5xx responses: (abstract)"""
class InternalServerError(ServerError):
"""Exception mapping a '500 Internal Server Error' response."""
code = 500
class MethodNotImplemented(ServerError):
"""Exception mapping a '501 Not Implemented' response."""
code = 501
class ServiceUnavailable(ServerError):
"""Exception mapping a '503 Service Unavailable' response."""
code = 503
def make_exception(response, content, error_info=None, use_json=True):
"""Factory: create exception based on HTTP response code.
:type response: :class:`httplib2.Response` or other HTTP response object
:param response: A response object that defines a status code as the
status attribute.
:type content: string or dictionary
:param content: The body of the HTTP error response.
:type error_info: string
:param error_info: Optional string giving extra information about the
failed request.
:type use_json: bool
:param use_json: Flag indicating if ``content`` is expected to be JSON.
:rtype: instance of :class:`GCloudError`, or a concrete subclass.
:returns: Exception specific to the error response.
"""
if isinstance(content, six.binary_type):
content = content.decode('utf-8')
if isinstance(content, six.string_types):
payload = None
if use_json:
try:
payload = json.loads(content)
except ValueError:
# Expected JSON but received something else.
pass
if payload is None:
payload = {'error': {'message': content}}
else:
payload = content
message = payload.get('error', {}).get('message', '')
errors = payload.get('error', {}).get('errors', ())
if error_info is not None:
message += ' (%s)' % (error_info,)
try:
klass = _HTTP_CODE_TO_EXCEPTION[response.status]
except KeyError:
error = GCloudError(message, errors)
error.code = response.status
else:
error = klass(message, errors)
return error
def _walk_subclasses(klass):
"""Recursively walk subclass tree."""
for sub in klass.__subclasses__():
yield sub
for subsub in _walk_subclasses(sub):
yield subsub
# Build the code->exception class mapping.
for _eklass in _walk_subclasses(GCloudError):
code = getattr(_eklass, 'code', None)
if code is not None:
_HTTP_CODE_TO_EXCEPTION[code] = _eklass
| Python | 0 | @@ -1210,60 +1210,16 @@
t__(
-)%0A # suppress deprecation warning under 2.6.x
+message)
%0A
|
9451bfccaf9e2782dc0b1e7670f61ce765b8e7c2 | Update for Issue #163 | tamper/nonrecursivereplacement.py | tamper/nonrecursivereplacement.py | Python | 0 | @@ -0,0 +1,1147 @@
+#!/usr/bin/env python%0A%0A%22%22%22%0ACopyright (c) 2006-2012 sqlmap developers (http://sqlmap.org/)%0ASee the file 'doc/COPYING' for copying permission%0A%22%22%22%0A%0Aimport random%0Aimport re%0A%0Afrom lib.core.common import singleTimeWarnMessage%0Afrom lib.core.enums import PRIORITY%0A%0A__priority__ = PRIORITY.NORMAL%0A%0Adef tamper(payload, headers):%0A %22%22%22%0A Replaces predefined SQL keywords with representations%0A suitable for replacement (e.g. .replace(%22SELECT%22, %22%22)) filters%0A%0A Example:%0A * Input: 1 UNION SELECT 2--%0A * Output: 1 UNUNIONION SELSELECTECT 2--%0A%0A Notes:%0A * Useful to bypass very weak custom filters%0A %22%22%22%0A%0A keywords = (%22UNION%22, %22SELECT%22, %22INSERT%22, %22UPDATE%22, %22FROM%22, %22WHERE%22)%0A retVal = payload%0A%0A warnMsg = %22currently only couple of keywords are being processed %25s. %22 %25 str(keywords)%0A warnMsg += %22You can set it manually according to your needs%22%0A singleTimeWarnMessage(warnMsg)%0A%0A if payload:%0A for keyword in keywords:%0A _ = random.randint(1, len(keyword) - 1)%0A retVal = re.sub(r%22(?i)%5Cb%25s%5Cb%22 %25 keyword, %22%25s%25s%25s%22 %25 (keyword%5B:_%5D, keyword, keyword%5B_:%5D), retVal)%0A%0A return retVal, headers%0A
|
|
ae3005089da6edc4d4488b8619dcbee9e556fc22 | Fix typo | pylxd/client.py | pylxd/client.py |
# Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from . import connection
from . import container
from . import certificate
from . import hosts
from . import image
from . import network
from . import profiles
class Client(object):
def __init__(self, base_url, host):
self.unix_socket = '/var/lib/lxd/unix.socket'
self.base_url = base_url
self.host = host
if base_url == 'https':
self.connection = connection.HTTPSConnection(host, port="8443")
else:
self.connection = connection.UnixHTTPConnection(self.unix_socket)
self.hosts = hosts.LXDHost(self.connection)
self.certificate = certificate.LXDCertificate(self.connection)
self.image = image.LXDImage(self.connecton)
self.network = network.LXDNetwork(self.connection)
self.container = container.LXDContainer(self.connection)
self.profile = profiles.LXDProfile(self.connection)
# host
def host_ping(self):
pass
def host_info(self):
pass
# images
def image_list(self):
pass
def image_list_by_key(self):
pass
def image_upload(self):
pass
def image_info(self):
pass
def image_delete(self):
pass
def image_export(self):
pass
# alias
def alias_list(self):
pass
def alias_create(self):
pass
def alias_update(self):
pass
def alias_delete(self):
pass
# containers:
def container_init(self):
pass
def container_start(self):
pass
def container_stop(self):
pass
def container_destroy(self):
pass
def container_suspend(self):
pass
def container_reboot(self):
pass
def container_info(self):
pass
def container_resume(self):
pass
def get_container_log(self):
pass
def get_container_console(self):
pass
def get_container_syslog(self):
pass
# container state
def get_container_state(self):
pass
def update_container_state(self):
pass
# file operations
def get_container_file(self):
pass
def put_container_file(self):
pass
# snapshots
def container_snapshot_list(self):
pass
def container_snapshot_create(self):
pass
def container_snapshot_info(self):
pass
def container_snaphsot_delete(self):
pass
def container_run_command(self):
pass
# certificates
def certificate_list(self):
pass
def certificate_show(self):
pass
# profiles
def profile_init(self):
pass
def profile_show(self):
pass
def profile_update(self):
pass
def profile_delete(self):
pass
# lxd operations
def list_operations(self):
pass
def get_container_operation(self):
pass
# networks
def network_list(self):
pass
def network_show(self):
pass
| Python | 0.999189 | @@ -1325,16 +1325,17 @@
.connect
+i
on)%0A
|
9e090675765a2c0c6412ee51d1e0e007404a30fd | Create k-diff-pairs-in-an-array.py | Python/k-diff-pairs-in-an-array.py | Python/k-diff-pairs-in-an-array.py | Python | 0.002165 | @@ -0,0 +1,1484 @@
+# Time: O(n)%0A# Space: O(n)%0A%0A# Total Accepted: 5671%0A# Total Submissions: 20941%0A# Difficulty: Easy%0A# Contributors: murali.kf370%0A# Given an array of integers and an integer k, %0A# you need to find the number of unique k-diff pairs in the array.%0A# Here a k-diff pair is defined as an integer pair (i, j),%0A# where i and j are both numbers in the array and their absolute difference is k.%0A#%0A# Example 1:%0A# Input: %5B3, 1, 4, 1, 5%5D, k = 2%0A# Output: 2%0A# Explanation: There are two 2-diff pairs in the array, (1, 3) and (3, 5).%0A# Although we have two 1s in the input, we should only return the number of unique pairs.%0A# Example 2:%0A# Input:%5B1, 2, 3, 4, 5%5D, k = 1%0A# Output: 4%0A# Explanation: There are four 1-diff pairs in the array, (1, 2), (2, 3), (3, 4) and (4, 5).%0A# Example 3:%0A# Input: %5B1, 3, 1, 5, 4%5D, k = 0%0A# Output: 1%0A# Explanation: There is one 0-diff pair in the array, (1, 1).%0A# Note:%0A# The pairs (i, j) and (j, i) count as the same pair.%0A# The length of the array won't exceed 10,000.%0A# All the integers in the given input belong to the range: %5B-1e7, 1e7%5D.%0A%0Aclass Solution(object):%0A def findPairs(self, nums, k):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :type k: int%0A :rtype: int%0A %22%22%22%0A if k %3C 0: return 0%0A result, lookup = set(), set()%0A for num in nums:%0A if num-k in lookup:%0A result.add(num-k)%0A if num+k in lookup:%0A result.add(num)%0A lookup.add(num)%0A return len(result)%0A
|
|
9f66f31d42a16d8b9536a9cb160e454118ff4369 | Add tests for UninstallPathSet | tests/unit/test_req_uninstall.py | tests/unit/test_req_uninstall.py | Python | 0 | @@ -0,0 +1,1367 @@
+import os%0Aimport shutil%0Aimport sys%0Aimport tempfile%0A%0Aimport pytest%0Afrom mock import Mock%0A%0Afrom pip.locations import running_under_virtualenv%0Afrom pip.req.req_uninstall import UninstallPathSet%0A%0Aclass TestUninstallPathSet(object):%0A def setup(self):%0A if running_under_virtualenv():%0A # Construct tempdir in sys.prefix, otherwise UninstallPathSet%0A # will reject paths.%0A self.tempdir = tempfile.mkdtemp(prefix=sys.prefix)%0A else:%0A self.tempdir = tempfile.mkdtemp()%0A%0A def teardown(self):%0A shutil.rmtree(self.tempdir, ignore_errors=True)%0A%0A def test_add(self):%0A file_extant = os.path.join(self.tempdir, 'foo')%0A file_nonexistant = os.path.join(self.tempdir, 'nonexistant')%0A with open(file_extant, 'w'): pass%0A%0A ups = UninstallPathSet(dist=Mock())%0A assert ups.paths == set()%0A ups.add(file_extant)%0A assert ups.paths == set(%5Bfile_extant%5D)%0A%0A ups.add(file_nonexistant)%0A assert ups.paths == set(%5Bfile_extant%5D)%0A%0A @pytest.mark.skipif(%22sys.platform == 'win32'%22)%0A def test_add_symlink(self):%0A f = os.path.join(self.tempdir, 'foo')%0A with open(f, 'w'): pass%0A l = os.path.join(self.tempdir, 'foo_link')%0A os.symlink(f, l)%0A%0A ups = UninstallPathSet(dist=Mock())%0A ups.add(l)%0A assert ups.paths == set(%5Bl%5D)%0A
|
|
8d8f89c82511b86fb87cef5db3bad633283283cc | Add missing migrations in develop branch | modelview/migrations/0044_auto_20191007_1227.py | modelview/migrations/0044_auto_20191007_1227.py | Python | 0.000001 | @@ -0,0 +1,1445 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.25 on 2019-10-07 10:27%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('modelview', '0043_merge_20190425_1036'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='energyscenario',%0A name='networks_electricity_gas_electricity',%0A ),%0A migrations.RemoveField(%0A model_name='energyscenario',%0A name='networks_electricity_gas_gas',%0A ),%0A migrations.AlterField(%0A model_name='basicfactsheet',%0A name='logo',%0A field=models.ImageField(help_text='If a logo for the model exists load it up', null=True, upload_to='logos', verbose_name='Logo'),%0A ),%0A migrations.AlterField(%0A model_name='basicfactsheet',%0A name='methodical_focus_1',%0A field=models.CharField(help_text='1-3 Keyords describing the main methodical focus of the model e.g.%22open source%22, %22sector coupling%22', max_length=50, verbose_name='Methodical Focus'),%0A ),%0A migrations.AlterField(%0A model_name='basicfactsheet',%0A name='source_of_funding',%0A field=models.CharField(help_text='What is the main source of funding for the development of the model?', max_length=200, null=True, verbose_name='Source of funding'),%0A ),%0A %5D%0A
|
|
7612b8012a6550d207111c853ffbb59c1d2c9e4c | Add Go to platform list | src/sentry/constants.py | src/sentry/constants.py | """
sentry.constants
~~~~~~~~~~~~~~~~
These settings act as the default (base) settings for the Sentry-provided
web-server
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import os.path
from django.conf import settings
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
def get_all_languages():
results = []
for path in os.listdir(os.path.join(MODULE_ROOT, 'locale')):
if path.startswith('.'):
continue
results.append(path)
return results
MODULE_ROOT = os.path.dirname(__import__('sentry').__file__)
DATA_ROOT = os.path.join(MODULE_ROOT, 'data')
SORT_OPTIONS = SortedDict((
('priority', _('Priority')),
('date', _('Last Seen')),
('new', _('First Seen')),
('freq', _('Frequency')),
('tottime', _('Total Time Spent')),
('avgtime', _('Average Time Spent')),
))
SEARCH_SORT_OPTIONS = SortedDict((
('score', _('Score')),
('date', _('Last Seen')),
('new', _('First Seen')),
))
# XXX: Deprecated: use GroupStatus instead
STATUS_UNRESOLVED = 0
STATUS_RESOLVED = 1
STATUS_MUTED = 2
STATUS_CHOICES = {
'resolved': STATUS_RESOLVED,
'unresolved': STATUS_UNRESOLVED,
'muted': STATUS_MUTED,
}
MEMBER_OWNER = 0
MEMBER_ADMIN = 25
MEMBER_USER = 50
MEMBER_SYSTEM = 100
MEMBER_TYPES = (
(MEMBER_OWNER, _('Owner')),
(MEMBER_ADMIN, _('Admin')),
(MEMBER_USER, _('User')),
(MEMBER_SYSTEM, _('System Agent')),
)
# A list of values which represent an unset or empty password on
# a User instance.
EMPTY_PASSWORD_VALUES = ('!', '', '$')
PLATFORM_LIST = (
'csharp',
'connect',
'django',
'express',
'flask',
'ios',
'java',
'java_log4j',
'java_log4j2',
'java_logback',
'java_logging',
'javascript',
'node.js',
'php',
'pyramid',
'python',
'r',
'ruby',
'rails3',
'rails4',
'sidekiq',
'sinatra',
'tornado',
)
PLATFORM_ROOTS = {
'rails3': 'ruby',
'rails4': 'ruby',
'sinatra': 'ruby',
'sidekiq': 'ruby',
'django': 'python',
'flask': 'python',
'pyramid': 'python',
'tornado': 'python',
'express': 'node.js',
'connect': 'node.js',
'java_log4j': 'java',
'java_log4j2': 'java',
'java_logback': 'java',
'java_logging': 'java',
}
PLATFORM_TITLES = {
'rails3': 'Rails 3 (Ruby)',
'rails4': 'Rails 4 (Ruby)',
'php': 'PHP',
'ios': 'iOS',
'express': 'Express (Node.js)',
'connect': 'Connect (Node.js)',
'django': 'Django (Python)',
'flask': 'Flask (Python)',
'pyramid': 'Pyramid (Python)',
'csharp': 'C#',
'java_log4j': 'Log4j (Java)',
'java_log4j2': 'Log4j 2.x (Java)',
'java_logback': 'Logback (Java)',
'java_logging': 'java.util.logging',
}
# Normalize counts to the 15 minute marker. This value MUST be less than 60. A
# value of 0 would store counts for every minute, and is the lowest level of
# accuracy provided.
MINUTE_NORMALIZATION = 15
MAX_TAG_KEY_LENGTH = 32
MAX_TAG_VALUE_LENGTH = 200
MAX_CULPRIT_LENGTH = 200
# Team slugs which may not be used. Generally these are top level URL patterns
# which we don't want to worry about conflicts on.
RESERVED_ORGANIZATION_SLUGS = (
'admin', 'manage', 'login', 'account', 'register', 'api',
'organizations', 'teams', 'projects', 'help',
)
RESERVED_TEAM_SLUGS = RESERVED_ORGANIZATION_SLUGS
LOG_LEVELS = {
logging.DEBUG: 'debug',
logging.INFO: 'info',
logging.WARNING: 'warning',
logging.ERROR: 'error',
logging.FATAL: 'fatal',
}
DEFAULT_LOG_LEVEL = 'error'
DEFAULT_LOGGER_NAME = 'root'
# Default alerting threshold values
DEFAULT_ALERT_PROJECT_THRESHOLD = (500, 25) # 500%, 25 events
DEFAULT_ALERT_GROUP_THRESHOLD = (1000, 25) # 1000%, 25 events
# Default paginator value
EVENTS_PER_PAGE = 15
# Default sort option for the group stream
DEFAULT_SORT_OPTION = 'date'
# Setup languages for only available locales
LANGUAGE_MAP = dict(settings.LANGUAGES)
LANGUAGES = [(k, LANGUAGE_MAP[k]) for k in get_all_languages() if k in LANGUAGE_MAP]
# TODO(dcramer): We eventually want to make this user-editable
TAG_LABELS = {
'exc_type': _('Exception Type'),
'sentry:user': _('User'),
'sentry:filename': _('File'),
'sentry:function': _('Function'),
'sentry:release': _('Release'),
'os': _('OS'),
'url': _('URL'),
'server_name': _('Server'),
}
# TODO(dcramer): once this is more flushed out we want this to be extendable
SENTRY_RULES = (
'sentry.rules.actions.notify_event.NotifyEventAction',
'sentry.rules.actions.notify_event_service.NotifyEventServiceAction',
'sentry.rules.conditions.every_event.EveryEventCondition',
'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
'sentry.rules.conditions.regression_event.RegressionEventCondition',
'sentry.rules.conditions.tagged_event.TaggedEventCondition',
'sentry.rules.conditions.event_frequency.EventFrequencyCondition',
)
# methods as defined by http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html + PATCH
HTTP_METHODS = ('GET', 'POST', 'PUT', 'OPTIONS', 'HEAD', 'DELETE', 'TRACE', 'CONNECT', 'PATCH')
| Python | 0 | @@ -1812,24 +1812,34 @@
'flask',%0A
+ 'go',%0A
'ios',%0A
|
4a25572283448a820cf55008e81405f3eb84a072 | Add test for unicode in env (#345) | tests/system/verbs/catkin_build/test_unicode_in_env.py | tests/system/verbs/catkin_build/test_unicode_in_env.py | Python | 0 | @@ -0,0 +1,531 @@
+import os%0A%0Afrom ....utils import catkin_success%0A%0Afrom ...workspace_factory import workspace_factory%0A%0A%0Adef test_catkin_build_with_unicode_in_env():%0A with workspace_factory() as wf:%0A wf.create_package('foo', depends=%5B'bar'%5D)%0A wf.create_package('bar')%0A wf.build()%0A%0A print('Workspace: %7B0%7D'.format(wf.workspace))%0A%0A assert os.path.isdir(wf.workspace)%0A%0A env = %7B'NON_ASCII': '%5Cxc3%5Cxb6'%7D%0A cmd = %5B'build', '--no-status', '--no-notify', '--verbose'%5D%0A assert catkin_success(cmd, env)%0A
|
|
53f91164ce93a01c2ad628fd49109a5fa8917ecb | Extend datasource model schema (#2342) | timesketch/migrations/versions/180a387da650_extend_datasource_model_with_total_file_.py | timesketch/migrations/versions/180a387da650_extend_datasource_model_with_total_file_.py | Python | 0 | @@ -0,0 +1,759 @@
+%22%22%22Extend datasource model with total file events field%0A%0ARevision ID: 180a387da650%0ARevises: 75af34d75b1e%0ACreate Date: 2022-09-26 13:04:10.336534%0A%0A%22%22%22%0A# This code is auto generated. Ignore linter errors.%0A# pylint: skip-file%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '180a387da650'%0Adown_revision = '75af34d75b1e'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('datasource', sa.Column('total_file_events', sa.BigInteger(), nullable=True))%0A # ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column('datasource', 'total_file_events')%0A # ### end Alembic commands ###%0A
|
|
63dc7fb2586824b6a6de52b1ba80e6196d80ff42 | Create credentials.py | credentials.py | credentials.py | Python | 0.000001 | @@ -0,0 +1,126 @@
+# add your primary statsnz key here%0A# available from https://statisticsnz.portal.azure-api.net/%0Astatsnz_key = %22MY_SECRET_KEY%22%0A
|
|
41e21884418cdd2b525b4f02d1cfa4ed9ea2c000 | Add bug test for 9268 (#65) | bugs/issue_9268.py | bugs/issue_9268.py | Python | 0 | @@ -0,0 +1,421 @@
+# RUN: %25PYTHON %25s%0A# XFAIL: *%0A%0Aimport iree.compiler.tools.tflite as iree_tflite%0A%0A# https://github.com/iree-org/iree/issues/9268%0Air = '''%0Afunc.func @main(%25a : tensor%3Cf32%3E, %25b : tensor%3Cf32%3E) -%3E tensor%3C*xf32%3E %7B%0A %25val = %22tfl.add%22(%25a, %25b) %7Bfused_activation_function = %22NONE%22%7D : (tensor%3Cf32%3E, tensor%3Cf32%3E) -%3E tensor%3C*xf32%3E%0A return %25val : tensor%3C*xf32%3E%0A%7D%0A'''%0Aprint(ir)%0Air = iree_tflite.compile_str(ir, target_backends=%5B%22cpu%22%5D)%0A
|
|
8c401af5bb7c3678de4091b88d81e04ddf248705 | Remove unused 'fahrenheit' config option | src/collectors/lmsensors/lmsensors.py | src/collectors/lmsensors/lmsensors.py | # coding=utf-8
"""
This class collects data from libsensors. It should work against libsensors 2.x
and 3.x, pending support within the PySensors Ctypes binding:
[http://pypi.python.org/pypi/PySensors/](http://pypi.python.org/pypi/PySensors/)
Requires: 'sensors' to be installed, configured, and the relevant kernel
modules to be loaded. Requires: PySensors requires Python 2.6+
If you're having issues, check your version of 'sensors'. This collector
written against: sensors version 3.1.2 with libsensors version 3.1.2
#### Dependencies
* [PySensors](http://pypi.python.org/pypi/PySensors/)
"""
import diamond.collector
try:
import sensors
sensors # workaround for pyflakes issue #13
except ImportError:
sensors = None
class LMSensorsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(LMSensorsCollector, self).get_default_config_help()
config_help.update({
'fahrenheit': "True/False",
'send_zero': 'Send sensor data even when there is no value'
})
return config_help
def get_default_config(self):
"""
Returns default collector settings.
"""
config = super(LMSensorsCollector, self).get_default_config()
config.update({
'path': 'sensors',
'fahrenheit': 'True',
'send_zero': 'False'
})
return config
def collect(self):
if sensors is None:
self.log.error('Unable to import module sensors')
return {}
sensors.init()
try:
for chip in sensors.iter_detected_chips():
for feature in chip:
label = feature.label.replace(' ', '-')
try:
value = feature.get_value()
except:
if self.config['send_zero']:
value = 0
if value is not None:
self.publish(".".join([str(chip), label]), value)
finally:
sensors.cleanup()
| Python | 0.000004 | @@ -945,48 +945,8 @@
e(%7B%0A
- 'fahrenheit': %22True/False%22,%0A
@@ -1283,42 +1283,8 @@
s',%0A
- 'fahrenheit': 'True',%0A
|
f3e1b1404f32cd0195aa8148d1ab4285cf9ad352 | Add class BaseSpider | Spiders.py | Spiders.py | Python | 0.000001 | @@ -0,0 +1,359 @@
+'''%0D%0ACreated on 2 %D1%81%D0%B5%D0%BD%D1%82. 2016 %D0%B3.%0D%0A%0D%0A@author: garet%0D%0A'''%0D%0A%0D%0A%0D%0Aclass BaseSpider():%0D%0A %0D%0A def __init__(self):%0D%0A pass%0D%0A%0D%0A def AddUrls(self, urls):%0D%0A pass%0D%0A%0D%0A def Routing(self, url):%0D%0A pass%0D%0A%0D%0A def SaveCache(self, url, data=None):%0D%0A pass%0D%0A%0D%0A def GetCache(self, url):%0D%0A pass%0D%0A%0D%0A def Run(self):%0D%0A pass%0D%0A%0D%0A
|
|
e5be29bc3c5a77493fe64bb3fc8b52611cc13469 | Add tests for Generic Interface. | zerver/tests/test_outgoing_webhook_interfaces.py | zerver/tests/test_outgoing_webhook_interfaces.py | Python | 0 | @@ -0,0 +1,2369 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import absolute_import%0Afrom __future__ import print_function%0Afrom typing import Any%0A%0Aimport mock%0Aimport json%0A%0Afrom requests.models import Response%0Afrom zerver.lib.test_classes import ZulipTestCase%0Afrom zerver.outgoing_webhooks.generic import GenericOutgoingWebhookService%0A%0Aclass Test_GenericOutgoingWebhookService(ZulipTestCase):%0A%0A def setUp(self):%0A # type: () -%3E None%0A self.event = %7B%0A u'command': '@**test**',%0A u'message': %7B%0A 'content': 'test_content',%0A %7D%0A %7D%0A self.handler = GenericOutgoingWebhookService(service_name='test-service',%0A base_url='http://example.domain.com',%0A token='abcdef',%0A user_profile=None)%0A%0A def test_process_event(self):%0A # type: () -%3E None%0A rest_operation, request_data = self.handler.process_event(self.event)%0A request_data = json.loads(request_data)%0A self.assertEqual(request_data%5B'data'%5D, %22@**test**%22)%0A self.assertEqual(request_data%5B'token'%5D, %22abcdef%22)%0A self.assertEqual(rest_operation%5B'base_url'%5D, %22http://example.domain.com%22)%0A self.assertEqual(rest_operation%5B'method'%5D, %22POST%22)%0A self.assertEqual(request_data%5B'message'%5D, self.event%5B'message'%5D)%0A%0A def test_process_success(self):%0A # type: () -%3E None%0A%0A response = mock.Mock(spec=Response)%0A response.text = json.dumps(%7B%22response_not_required%22: True%7D)%0A success_response = self.handler.process_success(response, self.event)%0A self.assertEqual(success_response, None)%0A%0A response.text = json.dumps(%7B%22response_string%22: 'test_content'%7D)%0A success_response = self.handler.process_success(response, self.event)%0A self.assertEqual(success_response, 'test_content')%0A%0A response.text = json.dumps(%7B%7D)%0A success_response = self.handler.process_success(response, self.event)%0A self.assertEqual(success_response, %22%22)%0A%0A def test_process_failure(self):%0A # type: () -%3E None%0A response = mock.Mock(spec=Response)%0A response.text = 'test_content'%0A success_response = self.handler.process_failure(response, self.event)%0A self.assertEqual(success_response, 'test_content')%0A
|
|
1f80f3cc606d9c42e41e30108e97f776b02803c5 | Create abcprob.py | abcprob.py | abcprob.py | Python | 0.99997 | @@ -0,0 +1,1526 @@
+# by beepingmoon, 2014-07-22%0A# abc problem, http://rosettacode.org/wiki/ABC_Problem%0A%0Aimport time%0A%0Aclass Blok:%0A%09def __init__(self, znaki, czyDostepny = True):%0A%09%09self.znaki = znaki%0A%09%09self.czyDostepny = czyDostepny%0A%0A%09def sprawdzZnaki(self, znak):%0A%09%09for z in self.znaki:%0A%09%09%09if z == znak:%0A%09%09%09%09return True%0A%09%09return False%0A%0Abloki = %5BBlok('ob'),Blok('xk'),Blok('dq'),Blok('cp'),Blok('na'),%0A Blok('gt'),Blok('re'),Blok('tg'),Blok('qd'),Blok('fs'),Blok('jw'),%0A Blok('hu'),Blok('vi'),Blok('an'),Blok('ob'),Blok('er'),Blok('fs'),%0A Blok('ly'),Blok('pc'),Blok('zm')%5D%0A%0Adef resetuj():%0A%09for b in bloki:%0A%09%09b.czyDostepny = True%0A%0Adef funkcjaABC(bloki, slowo, indeks):%0A%09if indeks == len(slowo):%0A%09%09return True%0A%09for blok in bloki:%0A%09%09if blok.czyDostepny == False:%0A%09%09%09continue%0A%09%09if blok.sprawdzZnaki(slowo%5Bindeks%5D) == True:%0A%09%09%09blok.czyDostepny = False%0A%09%09%09if funkcjaABC(bloki, slowo, indeks+1):%0A%09%09%09%09return True%0A%09%09%09blok.czyDostepny = True%0A%09return False%0A%0A# check long arbitrary string in this file%0Af = open(%22slowo.txt%22,'r')%0Adata = f.read()%0Af.close()%0A%0Astart = time.time()%0Aprint funkcjaABC(bloki, data, 0)%0Aprint %22Czas szukania: %25f sekund %22 %25 (time.time() - start)%0Aresetuj()%0A%0A#print funkcjaABC(bloki, 'a', 0)%09%09%09# true%0A#resetuj()%0A#print funkcjaABC(bloki, 'bark', 0)%09%09# true%0A#resetuj()%0A#print funkcjaABC(bloki, 'book', 0)%09%09# false%0A#resetuj()%0A#print funkcjaABC(bloki, 'treat', 0)%09%09# true%0A#resetuj()%0A#print funkcjaABC(bloki, 'common', 0)%09# false%0A#resetuj()%0A#print funkcjaABC(bloki, 'squad', 0)%09%09# true%0A#resetuj()%0A#print funkcjaABC(bloki, 'confuse', 0)%09# true%0A
|
|
b7b29a00b1a2e448d78c8f3c4333753668589e16 | Create __init__.py | etc/__init__.py | etc/__init__.py | Python | 0.000429 | @@ -0,0 +1 @@
+%0A
|
|
e1ea3859b08a14c80ccd65fc5551336bdc760f96 | add biggan projukti blog | corpus_builder/spiders/public_blog/biggan_projukti.py | corpus_builder/spiders/public_blog/biggan_projukti.py | Python | 0 | @@ -0,0 +1,921 @@
+# -*- coding: utf-8 -*-%0Aimport scrapy%0Afrom scrapy.linkextractors import LinkExtractor%0Afrom scrapy.spiders import Rule%0A%0Afrom corpus_builder.templates.spider import CommonSpider%0A%0A%0Aclass BigganProjuktiSpider(CommonSpider):%0A name = 'biggan_projukti'%0A allowed_domains = %5B'www.bigganprojukti.com', 'bigganprojukti.com'%5D%0A base_url = 'http://www.bigganprojukti.com/'%0A start_request_url = base_url%0A%0A content_body = %7B%0A 'css': 'div.td-post-content p::text'%0A %7D%0A%0A rules = (%0A Rule(LinkExtractor(%0A restrict_css='div.td-main-content h3.entry-title'%0A ),%0A callback='parse_content'),%0A )%0A%0A allowed_configurations = %5B%0A %5B'start_page'%5D,%0A %5B'start_page', 'end_page'%5D%0A %5D%0A%0A def request_index(self, response):%0A for page in range(self.start_page + 1, self.end_page + 1):%0A yield scrapy.Request(self.base_url + 'page/%7Bpage%7D'.format(page=page))%0A
|
|
204e6fc49bcc739f1e5c53bfbfc3eb7e86a7640c | Add windows autostart. | StartAtBoot.py | StartAtBoot.py | Python | 0 | @@ -0,0 +1,333 @@
+import sys%0A%0Aif sys.platform.startswith('win'):%0A from PyQt4.QtCore import QSettings%0A RUN_PATH = %22HKEY_CURRENT_USER%5C%5CSoftware%5C%5CMicrosoft%5C%5CWindows%5C%5CCurrentVersion%5C%5CRun%22%0A%0A settings = QSettings(RUN_PATH, QSettings.NativeFormat)%0A settings.setValue(%22Anki%22, sys.argv%5B0%5D)%0A%0A # to remove that:%0A # self.settings.remove(%22Anki%22)%0A
|
|
7919fa239e597c0358b518740aa2657b49caddbf | add oop_advance | src/python27/oop_advance/slots.py | src/python27/oop_advance/slots.py | Python | 0.000133 | @@ -0,0 +1,603 @@
+# -*- coding: utf-8 -*-%0Aclass Student(object):%0A pass%0A%0As = Student()%0As.name = 'Tim Ho'%0Aprint s.name%0A%0A%0Adef set_age(self, age):%0A self.age = age%0A%0Afrom types import MethodType%0As.set_age = MethodType(set_age, s, Student)%0As.set_age(25)%0Aprint s.age%0A%0As2 = Student()%0A# s2.set_age(25)%0A%0A%0Adef set_score(self, score):%0A self.score = score%0A%0AStudent.set_score = MethodType(set_score, None, Student)%0A%0As.set_score(100)%0Aprint s.score%0As2.set_score(99)%0Aprint s2.score%0A%0A%0Aclass Student2(object):%0A __slots__ = ('name', 'age')%0A%0As3 = Student2()%0As3.name = 'Tim Ho'%0As3.age = 25%0A#s3.score = 99%0Aprint s3.name%0Aprint s3.age%0A
|
|
f576b7b151c6c74eea668e66fff54ab2c33f39d6 | add 100 | Volume2/100.py | Volume2/100.py | Python | 0.999998 | @@ -0,0 +1,144 @@
+if __name__ == %22__main__%22:%0A b, n, L = 85, 120, 10 ** 12%0A while n %3C= L:%0A b, n = 3 * b + 2 * n - 2, 4 * b + 3 * n - 3%0A print b, n%0A
|
|
68efa8a0fb206da8cd1410d74572520f558ebded | Create apriori.py | apriori.py | apriori.py | Python | 0.000006 | @@ -0,0 +1,1960 @@
+def preprocessing(data):%0A %22%22%22 preprocesses data to be applicable to apriori%0A %0A Parameters%0A ----------%0A data : tbd%0A %0A Returns%0A ---------%0A list of sets%0A %22%22%22%0A pass%0A%0A%0Aclass apriori():%0A %22%22%22 Frequent Itemsets using the apriori algorithm%0A %0A Parameters%0A ----------%0A baskets : list of sets%0A %0A max_set_size : int, default None%0A determine frequent item sets up to max_set_size items%0A if None, determine alls frequent item sets%0A %0A s : float %3E0 and %3C=1%0A minimum threshold for item sets to count as frequent%0A %0A rules : boolen%0A if True return association rules additionally to frequent item sets%0A %0A confidence : boolean%0A if True compute confidence of association rule. Only viable if rules is True%0A %0A interest : boolean%0A if True compute interest of association rule. Only viable if rules is True%0A %22%22%22%0A def __init__(self, baskets, max_set_size = None, s = 0.1, %0A rules = False, confidence=False, interest=False):%0A self.baskets = baskets%0A self.max_set_size = max_set_size%0A self.s = s%0A self.rules = rules%0A self.confidence = confidence%0A self.interest = interest%0A %0A def compute(self):%0A %22%22%22 Applies the apriori algorithm to baskets%0A %22%22%22%0A pass%0A %0A def _initialize(self):%0A pass%0A %0A def _construct(self):%0A pass%0A %0A def _filter(self):%0A pass%0A %0A def _construct_and_count(self, j, frequent_tuples):%0A if j == 1:%0A # count items ind baskets and return%0A if j %3E 1:%0A # for every basket, filter tuples subset of basket%0A # double loop through filtered tuples%0A # if tuple difference is j-2, unite and count unison%0A # if count(unison) = j add tuple to output and increase count%0A %0A #memoization? %0A %0A
|
|
8adf39f011d8290c07f01e807b65373e40b4c314 | Create score.py | score.py | score.py | Python | 0.000008 | @@ -0,0 +1,1775 @@
+%22%22%22 Requires sox and text2wave (via festival)%0A%22%22%22%0A%0Afrom pippi import dsp%0Afrom pippi import tune%0Aimport subprocess%0Aimport os%0A%0Adef sox(cmd, sound):%0A path = os.getcwd()%0A filename_in = '/proc-in'%0A filename_out = '/proc-out.wav'%0A%0A dsp.write(sound, filename_in)%0A%0A cmd = cmd %25 (path + filename_in + '.wav', path + filename_out)%0A subprocess.call(cmd, shell=True)%0A%0A sound = dsp.read(path + filename_out).data%0A%0A return sound%0A%0Adef text2wave(lyrics):%0A path = os.getcwd() + '/bag.wav'%0A cmd = %22echo '%25s' %7C /usr/bin/text2wave -o %25s%22 %25 (lyrics, path)%0A%0A ret = subprocess.call(cmd, shell=True)%0A%0A words = dsp.read('bag.wav').data%0A%0A return words%0A%0Adef singit(lyrics, mult):%0A words = text2wave(lyrics)%0A%0A pitches = %5B dsp.randint(1, 10) for i in range(dsp.randint(2, 4)) %5D%0A pitches = tune.fromdegrees(pitches, octave=dsp.randint(1, 4), root='a')%0A%0A sings = %5B dsp.pine(words, dsp.flen(words) * mult, pitch) for pitch in pitches %5D%0A sings = dsp.mix(sings)%0A%0A sings = sox(%22sox %25s %25s tempo 5.0%22, sings)%0A%0A return sings%0A%0Averses = %5B%0A 'sing a ling a ling a', %0A 'ding ling a sing ling ding a', %0A%0A 'ee oh ee oh see low', %0A 'me low see low tree low',%0A%0A 'ping a ding a ding a', %0A 'sling ding a bing ling ding a', %0A%0A 'ee oh ee oh see low', %0A 'me low see low tree low',%0A%0A 'sing a ling a ling a', %0A 'ding ling a sing ling ding a', %0A%0A 'ee oh ee oh see low', %0A 'me low see low tree low',%0A%0A%0A %5D%0A%0Alayers = %5B%5D%0A%0A# v1: 1 layers, 50 - 1000 mult%0A# v2: 3 layers, 50 - 1000 mult%0A# v3: 2 layers, 50 - 100 mult%0A%0Afor l in range(2):%0A out = ''.join(%5B singit(lyric, dsp.randint(50, 100)) for lyric in verses %5D)%0A%0A layers += %5B out %5D%0A%0Aout = dsp.mix(layers)%0A%0Adsp.write(out, 'sing')%0A
|
|
0af5dbce55d6e9cc2b53f4f2538001a4c1b7dfa4 | version 1.2 | setup.py | setup.py | #!/usr/bin/env python
#
# Copyright 2012-2014 Ravello Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function
import os
from setuptools import setup
version_info = {
'name': 'ravello-sdk',
'version': '1.2.dev',
'description': 'Python SDK for the Ravello API',
'author': 'Geert Jansen',
'author_email': '[email protected]',
'url': 'https://github.com/ravello/python-sdk',
'license': 'Apache 2.0',
'classifiers': [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
]
}
if __name__ == '__main__':
setup(
package_dir = { '': 'lib' },
py_modules = ['ravello_sdk'],
**version_info
)
| Python | 0.000001 | @@ -773,12 +773,8 @@
'1.2
-.dev
',%0A
|
0ac53ef31a47c61382557b9fb3ba588fd4e1ae67 | Add first working setup.py script | setup.py | setup.py | Python | 0 | @@ -0,0 +1,1215 @@
+from setuptools import setup, find_packages%0A%0Asetup(name='pygame_maker',%0A version='0.1',%0A description='ENIGMA-like pygame-based game engine',%0A classifiers=%5B%0A 'Development Status :: 2 - Pre-Alpha',%0A 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPGv2)',%0A 'Progamming Language :: Python :: 2.7',%0A 'Topic :: Software Development :: Libraries :: pygame',%0A %5D,%0A keywords='pygame engine',%0A url='http://github.com/rlc2/pygame_maker',%0A author='Ron Lockwood-Childs',%0A author_email='[email protected]',%0A license='LGPL v2.1',%0A packages=%5B%0A 'pygame_maker',%0A 'pygame_maker.actions',%0A 'pygame_maker.actors',%0A 'pygame_maker.events',%0A 'pygame_maker.logic',%0A 'pygame_maker.scenes',%0A 'pygame_maker.sounds',%0A 'pygame_maker.support',%0A %5D,%0A package_data = %7B%0A '': %5B'script_data/*.png','script_data/*.wav','script_data/*.yaml','script_data/*.tmpl','tests/unittest_files/*'%5D%0A %7D,%0A scripts = %5B%0A 'scripts/pygame_maker_app.py'%0A %5D,%0A install_requires=%5B%0A 'numpy%3E=1.10.1',%0A 'yaml%3E=3.11',%0A 'pyparsing%3E=2.0.5',%0A 'pygame%3E=1.9.0',%0A %5D,%0A zip_safe=False)%0A
|
|
1c608e69ecf61484ea1210fe0d6dc8d116c583d3 | Update homepage in setup.py | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='linaro-django-pagination',
version=version,
description="linaro-django-pagination",
long_description=open("README").read(),
classifiers=[
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
],
keywords='pagination,django',
author='Zygmunt Krynicki',
author_email='[email protected]',
url='http://launchpad.net/linaro-django-pagination/',
license='BSD',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
| Python | 0 | @@ -555,32 +555,28 @@
http
+s
://
-launchpad.net/linaro-
+github.com/zyga/
djan
@@ -588,17 +588,16 @@
gination
-/
',%0A l
|
565ff051cabe9eaec6f24df6e8c31115e0a4eed8 | Add setup.py | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,314 @@
+#!/usr/bin/env python%0Afrom setuptools import setup%0A%0Asetup(name='VSTools',%0A version='0.1',%0A description='Easy use Visual Studio msbuild with python. ',%0A author='eternnoir',%0A author_email='[email protected]',%0A url='https://github.com/eternnoir/VSTools',%0A packages=%5B'VSTools'%5D,%0A )
|
|
b164ec6fae6ea9a6734ac58ddd8c3b89f73713fe | Add setup.py | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,248 @@
+%0Afrom distutils.core import setup%0A%0Asetup(%0A name='django-classy-settings',%0A version='0.1',%0A description='Simple class-based settings for Django',%0A author='Curtis Maloney',%0A author_email='[email protected]',%0A packages=%5B'cbs',%5D,%0A)%0A
|
|
fa4ce6dc15e8b47c5978c476db7801473820af0d | add setup.py | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-%0A%0A
|
|
8e8fbf8b63239915736b788b7f1c8ac21a48c190 | Add a basic setup.py script | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,348 @@
+from distutils.core import setup%0Afrom coil import __version__ as VERSION%0A%0Asetup(%0A name = 'coil',%0A version = VERSION,%0A author = 'Michael Marineau',%0A author_email = '[email protected]',%0A description = 'A powerful configuration language',%0A license = 'MIT',%0A packages = %5B'coil', 'coil.test'%5D,%0A scripts = %5B'bin/coildump'%5D,%0A )%0A
|
|
d074995f8ce5a62104525b1f3cfed10ace12c3bc | add setup.py | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,282 @@
+from setuptools import setup%0A%0Asetup(name=%22feature%22,%0A version=%220.1%22,%0A url=%22https://github.com/slyrz/feature%22,%0A description=%22Easy feature engineering.%22,%0A long_description=open('README.md').read(),%0A packages=%5B'feature', 'feature.plugin'%5D,%0A license='MIT')%0A
|
|
699ac33eec57fa49e2c1917d2bf17950bd6e6474 | Create setup script | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,844 @@
+%22%22%22Setup script of mots-vides%22%22%22%0Afrom setuptools import setup%0Afrom setuptools import find_packages%0A%0Aimport mots_vides%0A%0Asetup(%0A name='mots-vides',%0A version=mots_vides.__version__,%0A%0A description='Python library for managing stop words in many languages.',%0A long_description=open('README.rst').read(),%0A keywords='stop, words, text, parsing',%0A%0A author=mots_vides.__author__,%0A author_email=mots_vides.__email__,%0A url=mots_vides.__url__,%0A%0A license=open('LICENSE').read(),%0A%0A packages=find_packages(),%0A classifiers=%5B%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 3',%0A 'Intended Audience :: Developers',%0A 'Operating System :: OS Independent',%0A 'License :: OSI Approved :: BSD License',%0A 'Topic :: Software Development :: Libraries :: Python Modules'%5D%0A)%0A
|
|
959580ea313e4445374e8ee9f32e1a8822dd5beb | add setup script for install | setup.py | setup.py | Python | 0 | @@ -0,0 +1,430 @@
+from setuptools import setup%0A%0Asetup(name='hlm_gibbs',%0A version='0.0.1',%0A description='Fit spatial multilevel models and diagnose convergence',%0A url='https://github.com/ljwolf/hlm_gibbs',%0A author='Levi John Wolf',%0A author_email='[email protected]',%0A license='3-Clause BSD',%0A packages=%5B'hlm_gibbs'%5D,%0A install_requires=%5B'numpy','scipy','pysal','pandas','seaborn'%5D%0A zip_safe=False)%0A
|
|
dca7a5f766b7e2fd5cfc346cbc358faafa1ec9f1 | add setup.py file | setup.py | setup.py | Python | 0 | @@ -0,0 +1,392 @@
+try:%0A from setuptools import setup%0Aexcept ImportError:%0A from distutils.core import setup%0A%0Afrom distutils.extension import Extension%0A%0Alibname=%22vgdl%22%0Asetup(%0Aname = libname,%0Aversion=%221.0%22,%0Adescription='A video game description language (VGDL) built on top pf pygame',%0Aauthor='Tom Schaul',%0Aurl='https://github.com/schaul/py-vgdl',%0Apackages= %5B'vgdl'%5D,%0Ainstall_requires=%5B'pygame'%5D%0A)%0A%0A
|
|
1618d8afeca1b667b4439d62b3727528dcba9159 | Add setup.py | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,151 @@
+from setuptools import setup%0A%0Asetup(%0A name='django-filebased-email-backend-ng',%0A packages=(%0A 'django_filebased_email_backend_ng',%0A )%0A)%0A
|
|
95d1f63ce4d9698f8ab4b64757e3669c75accbbd | throw on some more setup.py pypi classifiers | setup.py | setup.py | from distutils.core import setup
setup(
name='django-object-actions',
version='0.0.1',
author="The Texas Tribune",
author_email="[email protected]",
maintainer="Chris Chang",
# url
packages=['django_object_actions'],
include_package_data=True, # automatically include things from MANIFEST
license='Apache License, Version 2.0',
description='A Django app for adding object tools to models',
long_description=open('README.md').read(),
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: Django",
],
)
| Python | 0 | @@ -571,16 +571,214 @@
jango%22,%0A
+ %22Intended Audience :: Developers%22,%0A %22License :: OSI Approved :: Apache Software License%22,%0A %22Operating System :: OS Independent%22,%0A %22Programming Language :: Python :: 2%22,%0A
%5D,%0A)
|
591b9be8d03cf2ecd12eed1bd36f9d762e91195c | Add setup.py for package installation | setup.py | setup.py | Python | 0 | @@ -0,0 +1,602 @@
+from setuptools import setup%0A%0A%0Asetup(%0A name='simplio',%0A version='0.1',%0A description='Simplest-case command-line input/output',%0A long_description=(%0A 'Simplio is a Python function decorator that applies an input file '%0A 'object and an output file object as arguments to the decorated '%0A 'function. It determines this based on STDIN or the presence of '%0A 'command-line arguments.'),%0A url='https://github.com/josephl/simplio',%0A author='Joseph Lee',%0A author_email='[email protected]',%0A license='MIT',%0A keywords='input output file io',%0A)%0A
|
|
0abe1e173b73770b5f2ee81f57f21c41466e5c61 | Add setup script | setup.py | setup.py | Python | 0.000001 | @@ -0,0 +1,503 @@
+#!/usr/bin/env python%0A%0Aimport os.path%0Afrom setuptools import find_packages, setup%0A%0Asetup(%0A%09name = 'technic-solder-client',%0A%09version = '1.0',%0A%09description = 'Python implementation of a Technic Solder client',%0A%09author = 'Cadyyan',%0A%09url = 'https://github.com/cadyyan/technic-solder-client',%0A%09licensee = 'MIT',%0A%09packages = find_packages(),%0A%09install_requires = %5B%0A%09%09'tabulate',%0A%09%5D,%0A%09scripts = %5B%0A%09%09os.path.join('bin', 'solder'),%0A%09%5D,%0A)%0A%0A
|
|
af49ecf6ce12b2fa909733c17569c7231c343190 | add simple sql shell | shell.py | shell.py | Python | 0.000003 | @@ -0,0 +1,1055 @@
+# simple interactive shell for MSSQL server%0Aimport pytds%0Aimport os%0A%0A%0Adef main():%0A conn = pytds.connect(dsn=os.getenv(%22HOST%22, %22localhost%22), user=os.getenv(%22SQLUSER%22, %22sa%22), password=os.getenv(%22SQLPASSWORD%22))%0A while True:%0A try:%0A sql = input(%22sql%3E %22)%0A except KeyboardInterrupt:%0A return%0A with conn.cursor() as cursor:%0A try:%0A cursor.execute(sql)%0A except pytds.ProgrammingError as e:%0A print(%22Error: %22 + str(e))%0A else:%0A for _, msg in cursor.messages:%0A print(msg.text)%0A if cursor.description:%0A print('%5Ct'.join(col%5B0%5D for col in cursor.description))%0A print('-' * 80)%0A count = 0%0A for row in cursor:%0A print('%5Ct'.join(str(col) for col in row))%0A count += 1%0A print('-' * 80)%0A print(%22Returned %7B%7D rows%22.format(count))%0A print()%0A%0A%0Amain()
|
|
93e2d3d72099b854f854abc44a79b2c4edb74af8 | add basic file splitter | split.py | split.py | Python | 0 | @@ -0,0 +1,499 @@
+#!/usr/bin/python%0A%0A# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this%0A# file, You can obtain one at http://mozilla.org/MPL/2.0/.%0A%0A%0A# Echo all output starting with the line after the line that starts with splitStart.%0A%0A%0Aimport sys%0A%0A%0AsplitStart = %22QQQQQQQQQ%22%0A%0A%0AfoundLine = False%0A%0Afor l in sys.stdin:%0A if foundLine:%0A print l,%0A continue%0A%0A if l.startswith(splitStart):%0A foundLine = True%0A%0A%0A
|
|
3d44701308fe1c32d8ae2efab609d5e7bcd563c0 | Create ajastin.py | ajastin.py | ajastin.py | Python | 0.000115 | @@ -0,0 +1,740 @@
+def downloader():%0A%09#import downloader%0A #downloader.main()%0A return 0%0A%0Adef lampotila():%0A%09Tnow = 15%0A%09#import lampotila%0A%09#lampotila.main()%0A%09return Tnow%0A%09%0A%0Adef main():%0A%09%0A%09import time %0A%09from datetime import datetime %0A%09n = 0%0A%09ret1 = 0%0A%09t0 = time.time()%0A%09try:%0A%09%09while ret1 == 0:%0A%09%09%09time.sleep(10)%0A%09%09%09#t%C3%A4h%C3%A4n tulee PID funktio%0A%09%09%09now = datetime.now()%0A%09%09%09print(%22%7B:d%7D:%7B:d%7D:%7B:d%7D%22.format(now.hour, now.minute, now.second))%0A%09%09%09if now.minute == 0 and now.hour == 0:%0A%09%09%09%09downloader()%0A%09%09%09%09while now.minute == 0:%0A%09%09%09%09%09time.sleep(1)%0A%09%09%09%09%09now = datetime.now()%0A%0A%09%09%09if now.minute %25 30 == 0: %0A%09%09%09%09lampotila() %0A%09%09%09%09while now.minute %25 30:%0A%09%09%09%09%09time.sleep(1)%0A%09%09%09%09%09datetime.now()%0A%09%09%09%09%09%0A%09except KeyboardInterrupt:%0A%09%09return%0A%09%0Amain()%09%0A
|
|
c45da8544bd3e4f85073e61cfba417862ce66fc2 | add 'Appeaser' strategy | axelrod/strategies/appeaser.py | axelrod/strategies/appeaser.py | Python | 0.004212 | @@ -0,0 +1,632 @@
+from axelrod import Player%0A%0Aclass Appeaser(Player):%0A %22%22%22%0A A player who tries to guess what the opponent wants, switching his %0A behaviour every time the opponent plays 'D'.%0A %22%22%22%0A def strategy(self, opponent):%0A %22%22%22%0A Start with 'C', switch between 'C' and 'D' when opponent plays 'D'.%0A %22%22%22%0A if len(self.history) == 0:%0A%09 self.str = 'C'%0A if opponent.history%5B-1%5D == 'D':%0A%09 if self.str == 'C':%0A%09%09self.str = 'D'%0A %09 else:%0A%09%09self.str = 'C'%0A return self.str%0A%0A def __repr__(self):%0A %22%22%22%0A The string method for the strategy:%0A %22%22%22%0A return 'Appeaser'%0A
|
|
d29a94809f6f58e053a646d796fe9e55a51b334e | Initialize Ch. 1 caesarHacker | books/CrackingCodesWithPython/Chapter01/caesarHacker.py | books/CrackingCodesWithPython/Chapter01/caesarHacker.py | Python | 0.00232 | @@ -0,0 +1,1444 @@
+# Caesar Hacker improved%0A# Rewritten as function for importing%0A# SPOILERS: Chapter 6 (caesarHacker), Chapter 7 (functions)%0A%0Aimport books.CrackingCodesWithPython.Chapter01.config%0A%0A%0Adef hackCaesar(message):%0A%0A # Loop through every possible key:%0A for key in range(len(books.CrackingCodesWithPython.Chapter01.config.SYMBOLS)):%0A # It is important to set translated to the blank string so that the%0A # previous iteration's value for translated is cleared:%0A translated = ''%0A%0A # The rest of the program is almost the same as the Caesar program:%0A%0A # Loop through each symbol in message:%0A for symbol in message:%0A if symbol in books.CrackingCodesWithPython.Chapter01.config.SYMBOLS:%0A symbolIndex = books.CrackingCodesWithPython.Chapter01.config.SYMBOLS.find(symbol)%0A translatedIndex = symbolIndex - key%0A%0A # Handle the wraparound:%0A if translatedIndex %3C 0:%0A translatedIndex += len(books.CrackingCodesWithPython.Chapter01.config.SYMBOLS)%0A%0A # Append the decrypted symbol:%0A translated += books.CrackingCodesWithPython.Chapter01.config.SYMBOLS%5BtranslatedIndex%5D%0A%0A else:%0A # Append the symbol without encrypting/decrypting:%0A translated += symbol%0A%0A # Display every possible decryption:%0A print('Key #%25s: %25s' %25 (key, translated))%0A return None%0A
|
|
03fce72b60eb8cad2368447cf23f72f8084f4a4b | Add py solution for 575. Distribute Candies | py/distribute-candies.py | py/distribute-candies.py | Python | 0.000002 | @@ -0,0 +1,199 @@
+class Solution(object):%0A def distributeCandies(self, candies):%0A %22%22%22%0A :type candies: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A return min(len(candies) / 2, len(set(candies)))%0A
|
|
d34dcf1179e6e5c2b864627266ae1788d10142aa | Add Chuanping Yu's solutions to Problem02 | Week01/Problem02/cyu_02.py | Week01/Problem02/cyu_02.py | Python | 0 | @@ -0,0 +1,312 @@
+#!/usr/bin/env python3%0A%0A%22%22%22This script is written by Chuanping Yu, on Jul 24, 2017,%0Afor the Assignment#1 in IDEaS workshop%22%22%22%0A%0A#Problem 2%0AFIB = %5B%5D%0AF = 1%0AS = 0%0AFIB.append(F)%0AFIB.append(F)%0Awhile F %3C= 4000000:%0A F = FIB%5B-1%5D + FIB%5B-2%5D%0A FIB.append(F)%0A if F%252 == 0 and F %3C= 4000000:%0A S = S + F%0Aprint(S)%0A
|
|
5eb9a910096f3e0000499390541a83bc50fb73ce | add binheap | binheap.py | binheap.py | Python | 0.000841 | @@ -0,0 +1,1121 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0A%0Aclass BinHeap(object):%0A%0A def __init__(self, iterable=None):%0A self.list = %5B%5D%0A if iterable:%0A for item in iterable:%0A self.push(item)%0A%0A def push(self, value):%0A self.list.append(value)%0A self._bubble_up(len(self.list) - 1)%0A%0A def _bubble_up(self, index):%0A if self.list%5Bindex%5D %3C self.list%5B(index - 1) // 2%5D:%0A self.list%5Bindex%5D, self.list%5B(index - 1) // 2%5D = self.list%5B(index - 1) // 2%5D,%0A self.list%5Bindex%5D%0A self._bubble_up((index - 1) // 2)%0A%0A def pop(self):%0A return_val = self.list%5B0%5D%0A self.list%5B0%5D = self.list.pop()%0A self._bubble_down(0)%0A return return_val%0A%0A def _bubble_down(self, index):%0A child = None%0A if self.list%5B2 * index + 1%5D %3E self.list%5B2 * index + 2%5D:%0A child = 2 * index + 2%0A else:%0A child = 2 * index + 1%0A%0A if self.list%5Bindex%5D %3C self.list%5Bchild%5D:%0A self.list%5Bindex%5D, self.list%5Bchild%5D = self.list%5Bchild%5D, self.list%5Bindex%5D%0A self._bubble_down(child)%0A
|
|
4f0e9a14286f21d835e36e549ebee80419e46cec | test game | blocker.py | blocker.py | Python | 0.000006 | @@ -0,0 +1,170 @@
+#!/usr/bin/env python%0A%0Aclass Blocker:%0A def __init__(self):%0A print 'Blocker v1.0'%0A return%0A%0A def run(self):%0A return%0A%0Agame = Blocker()%0Agame.run()%0A
|
|
769019be1331fa58e363fba37957ec90ab6f8163 | add code for more precise arbtirage math (WiP) | arbmath.py | arbmath.py | Python | 0 | @@ -0,0 +1,2353 @@
+import decimal%0Afrom decimal import Decimal%0A%0Aclass ExchangeModel(object);%0A def __init__(self, depths, tradeApi):%0A self.depths = depths;%0A self.tradeApi = tradeApi%0A self.symbols = %5Bkey%5B:3%5D for key, value in depths%5D + %5Bkey%5B3:%5D for key, value in depths%5D%0A self.symbols = list(set(self.symbols))%0A %0A # returns (balance, remaining order)%0A def ModelL1Trade(balance, pair, type, price, amount):%0A depth = self.depths%5Bpair%5D%0A remainingOrder = %7B 'pair': pair, 'type': type, 'price': price, 'amount': amount %7D%0A remainder = remainingOrder%5B'amount'%5D%0A traded = False%0A if type == 'buy':%0A if(not depth%5B'ask'%5D):%0A return (balance, remainingOrder, traded)%0A %0A ask = depth%5B'ask'%5D%5B0%5D%0A if ask%5B'price'%5D %3E price:%0A return (balance, remainingOrder, traded)%0A %0A tradedAmount = min(amount, ask%5B'amount'%5D)%0A remainder = max(amount - ask%5B'amount'%5D, 0)%0A %0A ask%5B'amount'%5D -= tradedAmount%0A balance%5Bpair%5B:3%5D%5D += tradedAmount * k%0A balance%5Bpair%5B3:%5D%5D -= tradedAmount * ask%5B'price'%5D%0A traded = True%0A %0A if ask%5B'amount'%5D == Decimal('0'):%0A self.depths%5Bpair%5D%5B'ask'%5D = self.depths%5Bpair%5D%5B'ask'%5D%5B1:%5D%0A %0A elif type == 'sell':%0A if not depth%5B'bid'%5D:%0A return (balance, remainingOrder, traded)%0A %0A bid = depth%5B'bid'%5D%5B0%5D%0A if bid%5B'price'%5D %3C price:%0A return (balance, remainingOrder, traded)%0A %0A tradedAmount = min(amount, bid%5B'amount'%5D)%0A remainder = max(amount - bid%5B'amount'%5D, 0)%0A %0A bid%5B'amount'%5D -= tradedAmount%0A balance%5Bpair%5B:3%5D%5D -= tradedAmount%0A balance%5Bpair%5B3:%5D%5D += tradedAmount * bid%5B'price'%5D * k%0A traded = True%0A %0A if bid%5B'amount'%5D == Decimal('0'):%0A self.depths%5Bpair%5D%5B'bid'%5D = self.depths%5Bpair%5D%5B'bid'%5D%5B1:%5D%0A %0A remainingOrder%5B'amount'%5D = remainder%0A return (balance, remainingOrder, traded)%0A %0A %0A def ModelTrade(balance, pair, type, price, amount):%0A if not (pair in depths):%0A return None%0A %0A depth = depths%5Bpair%5D%0A %0A if type == 'buy':%0A ask = depth%5B'ask'%5D%0A %0A%0Adef CalculateArb(direction, price1, price2, price3, k):%0A %0Adef CalculateElemArb(direction, books, pair1, pair2, pair3, tradeApi, balance):%0A %0A %0A# returns (list of orders that produces immediate profit, balance)%0Adef CalculateArb(books, pair1, pair2, pair3, maxArbDepth, tradeApi, balance):%0A k =
|
|
cd5e6a14bb0a67d6558b691f6b55f7918c4d4970 | Create new package (#6384) | var/spack/repos/builtin/packages/r-fnn/package.py | var/spack/repos/builtin/packages/r-fnn/package.py | Python | 0 | @@ -0,0 +1,2106 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RFnn(RPackage):%0A %22%22%22Cover-tree and kd-tree fast k-nearest neighbor search algorithms and%0A related applications including KNN classification, regression and%0A information measures are implemented.%22%22%22%0A%0A homepage = %22https://cran.r-project.org/web/packages/FNN/index.html%22%0A url = %22https://cran.r-project.org/src/contrib/FNN_1.1.tar.gz%22%0A list_url = %22https://cran.rstudio.com/src/contrib/Archive/FNN%22%0A%0A version('1.1', '8ba8f5b8be271785593e13eae7b8c393')%0A version('1.0', 'e9a47dc69d1ba55165be0877b8443fe0')%0A version('0.6-4', '1c105df9763ceb7b13989cdbcb542fcc')%0A version('0.6-3', 'f0f0184e50f9f30a36ed5cff24d6cff2')%0A version('0.6-2', '20648ba934ea32b1b00dafb75e1a830c')%0A%0A depends_on('[email protected]:3.4.9')%0A depends_on('r-mvtnorm', type=('build', 'run'))%0A depends_on('r-chemometrics', type=('build', 'run'))%0A
|
|
f68175870692d128fb2a01795d20605bb2e17aa9 | Add initial functional tests | functional_tests/test_evexml.py | functional_tests/test_evexml.py | Python | 0.000001 | @@ -0,0 +1,1465 @@
+%22%22%22Functional tests for the xml api part of aniauth project.%0A%0AThis is a temporary app as EVE Online's xml api is deprecated and will be%0Adisabled March 2018.%0A%0A%22%22%22%0Afrom django.contrib.staticfiles.testing import StaticLiveServerTestCase%0Afrom django.test import tag%0Afrom django.shortcuts import reverse%0Afrom selenium import webdriver%0Afrom selenium.webdriver.common.keys import Keys%0A%0A%0AMAX_WAIT = 10%0A%0A%0A@tag('functional')%0Aclass SubmissionTest(StaticLiveServerTestCase):%0A %22%22%22Tests for users who are submitting xml api key.%0A%0A %22%22%22%0A @classmethod%0A def setUpClass(cls):%0A super(SubmissionTest, cls).setUpClass()%0A cls.browser = webdriver.Chrome()%0A cls.browser.maximize_window()%0A cls.browser.implicitly_wait(MAX_WAIT)%0A super(SubmissionTest, cls).setUpClass()%0A%0A @classmethod%0A def tearDownClass(cls):%0A cls.browser.refresh()%0A cls.browser.quit()%0A super(SubmissionTest, cls).tearDownClass()%0A%0A def tearDown(self):%0A self.browser.refresh()%0A%0A def test_user_can_see_apikey_form(self):%0A %22%22%22A user should be able to see the form for submitting api keys.%0A%0A %22%22%22%0A # They browse to the eve api keys page.%0A url = self.live_server_url + reverse('eveapi')%0A self.browser.get(self.live_server_url)%0A # They see input boxes for keyID and vCode.%0A keyid_input = self.browser.find_element_by_name('keyID')%0A vcode_input = self.browser.find_element_by_name('vCode')%0A
|
|
6a7b32e271a264aad763fbd28749ac1258cf041f | Add dialplan filestring module | wirecurly/dialplan/filestring.py | wirecurly/dialplan/filestring.py | Python | 0 | @@ -0,0 +1,789 @@
+import logging%0Afrom wirecurly.exc import *%0Afrom wirecurly.dialplan.expression import *%0Aimport os%0A%0Alog = logging.getLogger(__name__)%0A%0A__all__ = %5B'FileString'%5D%0A%0Aclass FileString(object):%0A%09'''%0A%09%09Filestring oject to use with playback app in dialplan.%0A%09'''%0A%0A%09def __init__(self,*argv):%0A%09%09super(FileString, self).__init__()%0A%09%09self.audios = %5B%5D%0A%09%09self.path = 'usr/share/freeswitch/sounds/en/us/callie/'%0A%09%09for i in argv:%0A%09%09%09self.addAudio(i)%0A%0A%0A%09def addAudio(self,audio):%0A%09%09'''%0A%09%09%09Add an audio file to FileString object%0A%09%09'''%0A%09%09self.audios.append(audio)%0A%0A%09def setPath(self,path):%0A%09%09'''%0A%09%09%09Set Path for audios%0A%09%09'''%0A%09%09self.path = path%0A%0A%09def toString(self):%0A%09%09'''%0A%09%09%09Return a string to use with playback app%0A%09%09'''%0A%09%09return 'file_string://%25s' %25 '!'.join(%5B'%25s%25s' %25 (self.path,a) for a in self.audios%5D)%0A%0A%09%09
|
|
6ce84d454ef18f7b7dfc988195bfacb4e69e8c3f | add CRUD test cases for Snippet | hackathon_starter/hackathon/unittests/testsnippets.py | hackathon_starter/hackathon/unittests/testsnippets.py | Python | 0 | @@ -0,0 +1,2095 @@
+from hackathon.models import Snippet%0Afrom rest_framework import status%0Afrom rest_framework.test import APITestCase%0A%0A%0Aclass SnippetViewTestCase(APITestCase):%0A def setUp(self):%0A self.s1 = Snippet.objects.create(title='t1', code=%22%22%22print(%22Hello, World.%22)%22%22%22)%0A self.s2 = Snippet.objects.create(title='t2', code=%22%22%22print(%22Goodbye, World.%22)%22%22%22)%0A super(SnippetViewTestCase, self).setUp()%0A%0A def test_list(self):%0A response = self.client.get('/hackathon/snippets/')%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A self.assertEqual(len(response.data), 2)%0A%0A def test_detail(self):%0A response = self.client.get('/hackathon/snippets/%7B%7D/'.format(self.s1.id))%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A self.assertEqual(response.data%5B'id'%5D, self.s1.id)%0A%0A def test_create(self):%0A payload = %7B'title': 't3', 'code': %22%22%22print(%22Create, World.%22)%22%22%22%7D%0A response = self.client.post('/hackathon/snippets/', payload)%0A self.assertEqual(response.status_code, status.HTTP_201_CREATED)%0A self.assertEqual(response.data%5B'title'%5D, 't3')%0A self.assertEqual(response.data%5B'code'%5D, %22%22%22print(%22Create, World.%22)%22%22%22)%0A%0A def test_update(self):%0A payload = %7B'title': 't666', 'code': '2 + 2'%7D%0A response = self.client.put('/hackathon/snippets/%7B%7D/'.format(self.s1.id), payload)%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A self.assertEqual(response.data%5B'title'%5D, 't666')%0A self.assertEqual(response.data%5B'code'%5D, '2 + 2')%0A%0A def test_partial_update(self):%0A payload = %7B'title': 't666'%7D%0A response = self.client.patch('/hackathon/snippets/%7B%7D/'.format(self.s1.id), payload)%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A self.assertEqual(response.data%5B'title'%5D, 't666')%0A%0A def test_delete(self):%0A response = self.client.delete('/hackathon/snippets/%7B%7D/'.format(self.s1.id))%0A self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)%0A self.assertEqual(Snippet.objects.count(), 1)%0A
|
|
f59749db263291f481c4bdc9f6ede2f6de6cb6d4 | Create foundation for input file generation (csv for connectivity table, etc.) | create_input_files.py | create_input_files.py | Python | 0 | @@ -0,0 +1,463 @@
+import csv%0Aimport argparse%0Aimport itertools%0A%0Afrom thermo_utils import csv_row_writer, read_csv_rows%0A%0A# Read input/output arguments%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument('-o','--output',required=True)%0Aparser.add_argument('-d','--dof',required=True)%0A# parser.add_argument('-v','--version',required=False)%0Aargs = parser.parse_args()%0A%0A%0A# Write all rows to equations CSV file%0Acsv_row_writer(args.output,outRows)%0Aprint('Output file: %25s' %25 args.output)%0A
|
|
9f6df0b93a7a6911d9e7eee0e4fe87e34ea52832 | Create main entrypoint of cli | shub_cli/cli.py | shub_cli/cli.py | Python | 0 | @@ -0,0 +1,1778 @@
+%22%22%22%0AScrapinghub CLI%0A%0AUsage:%0A shub-cli jobs%0A shub-cli jobs %5B-t TAG1,TAG2%5D %5B-l LACK1,LACK2%5D %5B-s SPIDER%5D %5B-e STATE%5D %5B-c COUNT%5D%0A shub-cli job -id %3Cid%3E%0A%0AOptions:%0A -t TAG1,TAG2 Description.%0A -l LACK1,LACK2 Description.%0A -s SPIDER Description.%0A -e STATE Description.%0A -c COUNT Description.%0A%0AExamples:%0A shub-cli jobs%0A shub-cli jobs -c 100%0A shub-cli jobs -t fast,production -l consumed,dev -s spider1 state finished%0A shub-cli jobs tags consumed lacks teste spider my-spider state state count 1000%0A shub-cli job -id '10/10/1000'%0A%0A%0AHelp:%0A For help using this tool, please open an issue on the Github repository:%0A https://github.com/victormartinez/shub_cli%0A%22%22%22%0Afrom docopt import docopt%0Afrom shub_cli import __version__ as VERSION%0Afrom shub.config import load_shub_config%0Afrom shub_cli.commands.job import Job%0Afrom shub_cli.commands.jobs import Jobs%0Afrom shub_cli.util.display import display, display_jobs%0A%0Aconfig = load_shub_config()%0Aapi_keys = config.apikeys%0Aprojects = config.projects%0A%0A%0A# 70953/91/7817%0A%0Adef main():%0A %22%22%22Main CLI entrypoint.%22%22%22%0A default_api_key = api_keys%5B'default'%5D%0A default_project = projects%5B'default'%5D%0A options = dict(docopt(__doc__, version=VERSION).items())%0A%0A print('Connection: %7B%7D'.format(default_api_key))%0A print('Project: %7B%7D'.format(default_project))%0A%0A if 'job' in options.keys() and options%5B'job'%5D == True:%0A if '-id' in options.keys():%0A job = Job(options, api_key=default_api_key, project=default_project)%0A display(job.run())%0A else:%0A print('')%0A print('Wrong command.')%0A%0A if 'jobs' in options.keys() and options%5B'jobs'%5D == True:%0A jobs = Jobs(options, api_key=default_api_key, project=default_project)%0A display_jobs(jobs.run())%0A
|
|
6bce6ca2ae91b2eebad1d32ed970969ea5e423a2 | String reverse done | Text/reverse.py | Text/reverse.py | Python | 0.999408 | @@ -0,0 +1,219 @@
+# -*- coding: cp1252 -*-%0A%22%22%22%0AReverse a String Enter a string and the program%0Awill reverse it and print it out.%0A%22%22%22%0A%0Astring = raw_input(%22Whatchu wanna say to me? %22)%0Aprint %22You say %25s, I say %25s%22 %25 (string, string%5B::-1%5D)%0A
|
|
087829b024ea9c5b2028c3f13786578be6dfd702 | fix the bug of loading all cifar data | load_data.py | load_data.py | # encoding: utf-8
"""
@author: ouwj
@position: ouwj-win10
@file: load_data.py
@time: 2017/4/26 14:33
"""
from tensorflow.examples.tutorials.mnist import input_data
import numpy as np
def unpickle(file):
import pickle
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
def load_data(dataset='MNIST'):
if dataset == 'MNIST':
return input_data.read_data_sets('MNIST/')
elif dataset == 'CIFAR':
dirname = 'CIFAR/cifar-10-batches-py/'
# print(unpickle(dirname+'test_batch'))
data = unpickle(dirname+'test_batch')[b'data'] / 255.0
# for i in range(1, 6):
# data = np.vstack((data, unpickle(dirname+'data_batch_'+str(i))[b'data'] / 255.0))
return data
if __name__ == '__main__':
data = load_data('CIFAR')
print(data[0:5, :]) | Python | 0.000001 | @@ -621,18 +621,16 @@
%0A
- #
for i i
@@ -651,18 +651,16 @@
%0A
- #
dat
|
54404541913185a54fea75353d9fffc72ddc2ff6 | Create discovery_diag.py | python/discovery_diag.py | python/discovery_diag.py | Python | 0 | @@ -0,0 +1,893 @@
+import requests%0Aimport json%0A%0A%0Arequests.packages.urllib3.disable_warnings()%0As = requests.Session()%0A%0Adef netmriLogin( temp, querystring ):%0A username = %22admin%22%0A password = %22infioblox%22%0A url = %22https://demo-netmri.infoblox.com/api/3.3%22 + temp%0A response = s.request(%22GET%22, url, params=querystring, verify=False,%0A auth=(username, password))%0A t = response.text%0A return(t);%0A%0A%0At = netmriLogin(temp=%22/device_group_members/index%22, querystring=%7B%22GroupID%22:%2220%22,%22select%22:%22DeviceID%22%7D)%0Az = json.loads(t)%0A%0Afor entry in z%5B'device_group_members'%5D:%0A print(entry%5B'DeviceID'%5D)%0A filename = str(entry%5B'DeviceID'%5D) + %22.txt%22%0A device = %7B%22DeviceID%22: entry%5B'DeviceID'%5D%7D%0A with open(filename, %22w%22) as f:%0A p = netmriLogin(temp=%22/devices/diagnostic%22, querystring=device)%0A i = json.loads(p)%0A print(type(i))%0A print(i)%0A f.write(i%5B'text'%5D)%0A%0A%0A
|
|
b3c408845a6aba2e5bc15509f7d06800fb9e6c8b | multiples of 3 or 5 | 1-10/1.py | 1-10/1.py | Python | 0.999834 | @@ -0,0 +1,245 @@
+def sum_of_multiples_of_three_or_five(n):%0A result = sum(%5Bx for x in range(1, n) if x %25 3 == 0 or x %25 5 == 0%5D)%0A return result%0A%0Adef main():%0A n = 10**3%0A print(sum_of_multiples_of_three_or_five(n))%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
35f4f5bbea5b291b8204a2ca30acddebfad86d3e | Create 2004-4.py | 2004-4.py | 2004-4.py | Python | 0.000009 | @@ -0,0 +1,490 @@
+times = input()%0A%0Ai = 0%0Awhile i %3C times:%0A %0A length = input()%0A %0A ascents = 0%0A descents = 0%0A plateaus = 0%0A maxA = 0%0A maxD = 0%0A maxP = 0%0A %0A sequence = %5B%5D%0A %0A j = 0%0A while j %3C length:%0A %0A currentNum = input() %0A sequence.append(currentNum)%0A %0A if j != 0:%0A if currentNum %3C sequence%5Bj-1%5D:%0A #descent%0A else:%0A #first time you can do nothing except reset max length to 1 below%0A maxA += 1%0A maxD += 1%0A maxP += 1%0A %0A j += 1%0A %0A i += 1%0A
|
|
f228b0d76a5c619e45d40d4d0da12059cb2668e9 | Create warlock.py | hsgame/cards/minions/warlock.py | hsgame/cards/minions/warlock.py | Python | 0.000001 | @@ -0,0 +1,252 @@
+import hsgame.targeting%0Afrom hsgame.constants import CHARACTER_CLASS, CARD_RARITY, MINION_TYPE%0Afrom hsgame.game_objects import MinionCard, Minion, Card%0A#from hsgame.cards.battlecries import %0A%0A__author__ = 'randomflyingtaco'%0A#let the train wreck begin%0A%0A
|
|
97fcef753647bfbdab0381b30d1533bdce36aeb9 | fix admin | django-pyodbc/contrib/admin/models/models.py | django-pyodbc/contrib/admin/models/models.py | Python | 0 | @@ -0,0 +1,2084 @@
+from django.db import models%0Afrom django.contrib.contenttypes.models import ContentType%0Afrom django.contrib.auth.models import User%0Afrom django.utils.translation import ugettext_lazy as _%0Afrom django.utils.encoding import smart_unicode%0Afrom django.utils.safestring import mark_safe%0A%0AADDITION = 1%0ACHANGE = 2%0ADELETION = 3%0A%0Aclass LogEntryManager(models.Manager):%0A def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):%0A e = self.model(None, None, user_id, content_type_id, smart_unicode(object_id), object_repr%5B:200%5D, action_flag, change_message)%0A e.save()%0A%0Aclass LogEntry(models.Model):%0A action_time = models.DateTimeField(_('action time'), auto_now=True)%0A user = models.ForeignKey(User)%0A content_type = models.ForeignKey(ContentType, blank=True, null=True)%0A object_id = models.TextField(_('object id'), blank=True, null=True)%0A object_repr = models.CharField(_('object repr'), max_length=200)%0A action_flag = models.PositiveSmallIntegerField(_('action flag'))%0A change_message = models.TextField(_('change message'), blank=True)%0A objects = LogEntryManager()%0A class Meta:%0A verbose_name = _('log entry')%0A verbose_name_plural = _('log entries')%0A db_table = 'django_admin_log'%0A ordering = ('-action_time',)%0A%0A def __repr__(self):%0A return smart_unicode(self.action_time)%0A%0A def is_addition(self):%0A return self.action_flag == ADDITION%0A%0A def is_change(self):%0A return self.action_flag == CHANGE%0A%0A def is_deletion(self):%0A return self.action_flag == DELETION%0A%0A def get_edited_object(self):%0A %22Returns the edited object represented by this log entry%22%0A return self.content_type.get_object_for_this_type(pk=self.object_id)%0A%0A def get_admin_url(self):%0A %22%22%22%0A Returns the admin URL to edit the object represented by this log entry.%0A This is relative to the Django admin index page.%0A %22%22%22%0A return mark_safe(u%22%25s/%25s/%25s/%22 %25 (self.content_type.app_label, self.content_type.model, self.object_id))%0A
|
|
0ace48790374ea75ba2c6cbc51678e3240c22a88 | Create Differ.py | Differ.py | Differ.py | Python | 0 | @@ -0,0 +1,479 @@
+%0Afile1 = raw_input('%5Bfile1:%5D ')%0Amodified = open(file1,%22r%22).readlines()%5B0%5D%0A%0Afile2 = raw_input('%5Bfile2:%5D ')%0Api = open(file2, %22r%22).readlines()%5B0%5D # %5B:len(modified)%5D%0A%0Aresultado = %22%22.join( x for x,y in zip(modified, pi) if x != y)%0Aresultado2 = %22%22.join( x for x,y in zip(pi, modified) if x != y)%0A%0Aprint %22%5BDiffer:%5D%0Aprint '%5Cn-------------------------------------'%0Aprint %22%5Bfile1%5D -%3E %5Bfile2%5D%22, resultado%0Aprint '-------------------------------------'%0Aprint %22%5Bfile2%5D -%3E %5Bfile1%5D%22, resultado2%0A
|
|
8a4d5ccd64433994854815e6c20e1fde08ec1998 | remove self.query_get_clause from trial balance report | addons/account/report/account_balance.py | addons/account/report/account_balance.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
from common_report_header import common_report_header
class account_balance(report_sxw.rml_parse, common_report_header):
_name = 'report.account.account.balance'
def __init__(self, cr, uid, name, context=None):
super(account_balance, self).__init__(cr, uid, name, context=context)
self.sum_debit = 0.00
self.sum_credit = 0.00
self.date_lst = []
self.date_lst_string = ''
self.result_acc = []
self.localcontext.update({
'time': time,
'lines': self.lines,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_fiscalyear':self._get_fiscalyear,
'get_filter': self._get_filter,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period ,
'get_account': self._get_account,
'get_journal': self._get_journal,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
})
self.context = context
def set_context(self, objects, data, ids, report_type=None):
new_ids = ids
if (data['model'] == 'ir.ui.menu'):
new_ids = 'chart_account_id' in data['form'] and [data['form']['chart_account_id']] or []
objects = self.pool.get('account.account').browse(self.cr, self.uid, new_ids)
self.query_get_clause = data['form'].get('query_line', False) or ''
return super(account_balance, self).set_context(objects, data, new_ids, report_type=report_type)
#def _add_header(self, node, header=1):
# if header == 0:
# self.rml_header = ""
# return True
def _get_account(self, data):
if data['model']=='account.account':
return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['id']).company_id.name
return super(account_balance ,self)._get_account(data)
def lines(self, form, ids=[], done=None):#, level=1):
def _process_child(accounts, disp_acc, parent):
account_rec = [acct for acct in accounts if acct['id']==parent][0]
res = {
'id': account_rec['id'],
'type': account_rec['type'],
'code': account_rec['code'],
'name': account_rec['name'],
'level': account_rec['level'],
'debit': account_rec['debit'],
'credit': account_rec['credit'],
'balance': account_rec['balance'],
'parent_id': account_rec['parent_id'],
'bal_type': '',
}
self.sum_debit += account_rec['debit']
self.sum_credit += account_rec['credit']
if disp_acc == 'bal_movement':
if res['credit'] > 0 or res['debit'] > 0 or res['balance'] > 0 :
self.result_acc.append(res)
elif disp_acc == 'bal_solde':
if res['balance'] != 0:
self.result_acc.append(res)
else:
self.result_acc.append(res)
if account_rec['child_id']:
for child in account_rec['child_id']:
_process_child(accounts,disp_acc,child)
obj_account = self.pool.get('account.account')
if not ids:
ids = self.ids
if not ids:
return []
if not done:
done={}
ctx = self.context.copy()
ctx['fiscalyear'] = form['fiscalyear_id']
if form['filter'] == 'filter_period':
ctx['periods'] = form['periods']
elif form['filter'] == 'filter_date':
ctx['date_from'] = form['date_from']
ctx['date_to'] = form['date_to']
ctx['state'] = form['target_move']
parents = ids
child_ids = obj_account._get_children_and_consol(self.cr, self.uid, ids, ctx)
if child_ids:
ids = child_ids
accounts = obj_account.read(self.cr, self.uid, ids, ['type','code','name','debit','credit','balance','parent_id','level','child_id'], ctx)
for parent in parents:
if parent in done:
continue
done[parent] = 1
_process_child(accounts,form['display_account'],parent)
return self.result_acc
report_sxw.report_sxw('report.account.account.balance', 'account.account', 'addons/account/report/account_balance.rml', parser=account_balance, header="internal")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Python | 0 | @@ -2407,84 +2407,8 @@
ds)%0A
- self.query_get_clause = data%5B'form'%5D.get('query_line', False) or ''%0A
|
60de63d2fc53c020649bc21576765366f310cf56 | fix by adding migration | src/polls/migrations/0006_auto_20171114_1128.py | src/polls/migrations/0006_auto_20171114_1128.py | Python | 0.000001 | @@ -0,0 +1,748 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.5 on 2017-11-14 10:28%0Afrom __future__ import unicode_literals%0A%0Aimport django.contrib.postgres.fields.jsonb%0Aimport django.core.serializers.json%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('polls', '0005_poll_tags'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='poll',%0A name='rules',%0A field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder, help_text='Un object JSON d%C3%A9crivant les r%C3%A8gles. Actuellement, sont reconnues %60options%60,%60min_options%60 et %60max_options', verbose_name='Les r%C3%A8gles du vote'),%0A ),%0A %5D%0A
|
|
a5d5dde8c523aa28452d790e7f0291c1cf52aacb | Make sure setUpModule is called by the test framework. We brought in pytest-2.4.0.dev8 for that specific functionality. However, one time we regressed, and our tests started misbehaving. So, this test is here to keep us honest. | tests/external/py2/testfixture_test.py | tests/external/py2/testfixture_test.py | Python | 0 | @@ -0,0 +1,1233 @@
+#!/usr/bin/env python%0A# ----------------------------------------------------------------------%0A# Copyright (C) 2013 Numenta Inc. All rights reserved.%0A#%0A# The information and source code contained herein is the%0A# exclusive property of Numenta Inc. No part of this software%0A# may be used, reproduced, stored or distributed in any form,%0A# without explicit written authorization from Numenta Inc.%0A# ----------------------------------------------------------------------%0A%0A%22%22%22%0AUnit tests for our dependencies in the pytest package; at the time of this%0Awriting, we were using an unreleased version of pytest that added support for%0Athe unittest setUpModule fixture and friends. Some of our tests rely on%0AsetUpModule. Once, there was a conflict with pytest installation in our build%0Asystem, and an older version of pytest was installed that didn't support%0AsetUpModule, which resulted in suble side-effects in some of these tests.%0A%22%22%22%0A%0Aimport unittest2 as unittest%0A%0A%0Ag_setUpModuleCalled = False%0A%0A%0A%0Adef setUpModule():%0A global g_setUpModuleCalled%0A g_setUpModuleCalled = True%0A%0A%0A%0Aclass TestPytest(unittest.TestCase):%0A%0A %0A def testSetUpModuleCalled(self):%0A self.assertTrue(g_setUpModuleCalled)%0A%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
b0c74bcf7dd4120684a944a7cd8cc005bee039f5 | Create BogoBogo.py | Challenge-175/01-Easy/BogoBogo.py | Challenge-175/01-Easy/BogoBogo.py | Python | 0.000001 | @@ -0,0 +1,647 @@
+import random%0A%0Adef bogosort(n, m):%0A%09i = 0%0A%09while n != m:%0A%09%09n = ''.join(random.sample(n,len(n)))%0A%09%09i += 1%0A%09print(i, 'iterations')%0A%09return i%0A%0Adef bogobogosort(n, m):%0A%09i = 0 #number of iterations%0A%09j = 2 #number of elements%0A%09while n%5B:j%5D != m:%0A%09%09n = ''.join(random.sample(n,len(n)))%0A%09%09while n%5B:j%5D != m%5B:j%5D:%0A%09%09%09n = ''.join(random.sample(n,len(n)))%0A%09%09%09i += 1%0A%09%09%09if n%5B:j%5D != m%5B:j%5D:%0A%09%09%09%09j = 2 #Start over%0A%09%09j += 1%0A%09print(i, 'iterations')%0A%09return i%0A%0Aprint(%22BOGO SORT%5Cn==============================%22)%0Afor i in range(10):%0A%09bogosort(%22lolhe%22,%22hello%22)%0A%0Aprint(%22%5CnBOGOBOGO SORT%5Cn==============================%22)%0Afor i in range(10):%0A%09bogobogosort(%22lolhe%22,%22hello%22)%0A
|
|
84b932df5520901645c6d999abddea1191654a34 | create skeleton of a proper in place quicksort | algorithms/sorting/quicksort_ip.py | algorithms/sorting/quicksort_ip.py | Python | 0.00004 | @@ -0,0 +1,418 @@
+from random import randint%0A%0A%0Adef partition(unsorted, start, end, pivot):%0A pass%0A%0A%0Adef choose_pivot(start, end):%0A pass%0A%0A%0Adef quicksort(unsorted, start=0, end=None):%0A pass%0A%0A%0Aif __name__ == '__main__':%0A unsorted = %5B3,345,456,7,879,970,7,4,23,123,45,467,578,78,6,4,324,145,345,3456,567,5768,6589,69,69%5D%0A sorted = quicksort(unsorted)%0A%0A print '%25r %3C-- unsorted' %25 unsorted%0A print '%25r %3C-- sorted' %25 sorted%0A
|
|
60002062970a2f83725355911dde73673c5875a5 | Add a snippet. | python/pyqt/pyqt5/button_clic_event_as_class.py | python/pyqt/pyqt5/button_clic_event_as_class.py | Python | 0.000002 | @@ -0,0 +1,1642 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A# Copyright (c) 2015 J%C3%A9r%C3%A9mie DECOCK (http://www.jdhp.org)%0A%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A %0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A# THE SOFTWARE.%0A%0Aimport sys%0Afrom PyQt5.QtWidgets import QApplication, QMainWindow, QPushButton%0A%0A%0Aclass Window(QMainWindow):%0A def __init__(self):%0A super().__init__()%0A%0A self.resize(250, 150)%0A self.setWindowTitle('Hello')%0A%0A button = QPushButton('Hello', self)%0A button.clicked.connect(self.on_clic)%0A%0A self.show()%0A%0A def on_clic(self):%0A print(%22Hello!%22)%0A%0A%0Aapp = QApplication(sys.argv)%0A%0Awindow = Window()%0A%0Aexit_code = app.exec_()%0Asys.exit(exit_code)%0A
|
|
6f5843fb04cfa2ed2082b340f282223ec374f9f6 | copy group descriptions to text table | alembic/versions/49ed2a435cf_group_description.py | alembic/versions/49ed2a435cf_group_description.py | Python | 0 | @@ -0,0 +1,1474 @@
+revision = '49ed2a435cf'%0Adown_revision = '5927719682b'%0A%0Aimport uuid%0Afrom datetime import datetime%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy import sql%0Aimport jinja2%0A%0A%0Adef random_uuid():%0A return str(uuid.uuid4())%0A%0A%0Adef upgrade():%0A text = sql.table('text',%0A sql.column('id'),%0A sql.column('ns'),%0A sql.column('name'),%0A )%0A text_version = sql.table( 'text_version',%0A sql.column('id'),%0A sql.column('text_id'),%0A sql.column('time'),%0A sql.column('content'),%0A sql.column('more_content'),%0A )%0A time = datetime(2014, 9, 22, 11, 50, 0)%0A%0A conn = op.get_bind()%0A query = (%0A %22SELECT short_name, description FROM mp_group %22%0A %22WHERE year=2012 %22%0A %22AND description IS NOT NULL%22%0A )%0A data = list(conn.execute(query))%0A%0A for name, description in data:%0A text_id = random_uuid()%0A op.execute(text.insert().values(%7B%0A 'id': text_id,%0A 'ns': 'party',%0A 'name': name,%0A %7D))%0A op.execute(text_version.insert().values(%7B%0A 'id': random_uuid(),%0A 'text_id': text_id,%0A 'time': time,%0A 'content': '%3Cp%3E' + jinja2.escape(description) + '%3C/p%3E',%0A 'more_content': '',%0A %7D))%0A%0A%0Adef downgrade():%0A op.execute(%0A %22DELETE FROM text_version %22%0A %22WHERE text_id IN (SELECT id FROM text WHERE ns = 'party')%22%0A )%0A op.execute(%22DELETE FROM text WHERE ns = 'party'%22)%0A
|
|
f18fd5c4ad61adb56ac7524a006ce9977aa06a31 | Add worker to send queue mails | mailing/management/commands/send_queued_mails_worker.py | mailing/management/commands/send_queued_mails_worker.py | Python | 0 | @@ -0,0 +1,457 @@
+# -*- coding: utf-8 -*-%0A# Copyright (c) 2016 Aladom SAS & Hosting Dvpt SAS%0Afrom django.core.management.base import BaseCommand%0A%0Afrom ...utils import send_queued_mails%0Aimport time%0A%0Aclass Command(BaseCommand):%0A help = %22%22%22Send mails with %60status%60 Mail.STATUS_PENDING and having%0A %60scheduled_on%60 set on a past date. In daemon mode.%22%22%22%0A%0A def handle(self, *args, **options):%0A while True:%0A send_queued_mails()%0A time.sleep(15)%0A%0A
|
|
5a7081c5c46a050566477adda19d30844192ceb2 | Add migration to add authtokens for existing users | src/mmw/apps/user/migrations/0002_auth_tokens.py | src/mmw/apps/user/migrations/0002_auth_tokens.py | Python | 0 | @@ -0,0 +1,584 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Afrom django.conf import settings%0Afrom django.contrib.auth.models import User%0Afrom rest_framework.authtoken.models import Token%0A%0A%0Adef add_auth_tokens_to_users(apps, schema_editor):%0A for user in User.objects.all():%0A Token.objects.create(user=user)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('authtoken', '0001_initial'),%0A ('user', '0001_initial')%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(add_auth_tokens_to_users)%0A %5D%0A
|
|
31b309c1f5981a10207e85950ef8139018afd37c | add roles urls | src/python/expedient/clearinghouse/roles/urls.py | src/python/expedient/clearinghouse/roles/urls.py | Python | 0.000001 | @@ -0,0 +1,303 @@
+'''%0ACreated on Jul 29, 2010%0A%0A@author: jnaous%0A'''%0Afrom django.conf.urls.defaults import patterns, url%0A%0Aurlpatterns = patterns(%22expedient.clearinghouse.roles.views%22,%0A url(r%22%5Econfirm/(?P%3Cproj_id%3E%5Cd+)/(?P%3Creq_id%3E%5Cd+)/(?P%3Callow%3E%5Cd)/(?P%3Cdelegate%3E%5Cd)/$%22, %22confirm_request%22, name=%22roles_confirm_request%22),%0A)%0A
|
|
14e55d45428c617507c5c161f4d33154849f63a5 | Create Endings.py | Edabit/Endings.py | Edabit/Endings.py | Python | 0.000002 | @@ -0,0 +1,164 @@
+#!/usr/bin/env python3%0A'''%0ACreate a function that adds a string ending to each member in a list.%0A'''%0Adef add_ending(lst, ending):%0A%09return %5Bi + ending for i in lst%5D%0A
|
|
ab53993b708b3f9cf3b5762664fef58bae99ea20 | Add some code to auto-remove Ltac | recursive_remove_ltac.py | recursive_remove_ltac.py | Python | 0.000001 | @@ -0,0 +1,856 @@
+import re%0A%0A__all__ = %5B%22recursively_remove_ltac%22%5D%0A%0ALTAC_REG = re.compile(r'%5E%5Cs*(?:Local%5Cs+%7CGlobal%5Cs+)?Ltac%5Cs+(%5B%5E%5Cs%5D+)', re.MULTILINE)%0A%0Adef recursively_remove_ltac(statements, exclude_n=3):%0A %22%22%22Removes any Ltac statement which is not used later in%0A statements. Does not remove any code in the last exclude_n%0A statements.%22%22%22%0A rtn = list(reversed(statements))%5B:exclude_n%5D%0A for statement in reversed(statements)%5Bexclude_n:%5D:%0A match = LTAC_REG.search(statement)%0A if match:%0A ltac_name = match.groups()%5B0%5D%0A # search for the name of the tactic, by itself%0A reg = re.compile('%5Cb%25s%5Cb' %25 ltac_name, re.MULTILINE)%0A if any(reg.search(other_statement) for other_statement in rtn):%0A rtn.append(statement)%0A else:%0A rtn.append(statement)%0A return list(reversed(rtn))%0A
|
|
cd6eebfecab9b93863e7e20acec1ba0481f6b95f | Fix benchmark naming in reporting | tensorflow/python/eager/benchmarks_test_base.py | tensorflow/python/eager/benchmarks_test_base.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Benchmark base to run and report benchmark results."""
from __future__ import absolute_import as _absolute_import
from __future__ import division as _division
from __future__ import print_function as _print_function
from tensorflow.python.eager import test
class MicroBenchmarksBase(test.Benchmark):
"""Run and report benchmark results."""
def run_report(self, run_benchmark, func, num_iters, execution_mode=None):
"""Run and report benchmark results."""
total_time = run_benchmark(func, num_iters, execution_mode)
mean_us = total_time * 1e6 / num_iters
extras = {
"examples_per_sec": float("{0:.3f}".format(num_iters / total_time)),
"us_per_example": float("{0:.3f}".format(total_time * 1e6 / num_iters))
}
self.report_benchmark(iters=num_iters, wall_time=mean_us, extras=extras)
| Python | 0.000478 | @@ -1439,16 +1439,64 @@
)%0A %7D%0A
+ benchmark_name = self._get_benchmark_name()%0A
self
@@ -1513,16 +1513,25 @@
nchmark(
+%0A
iters=nu
@@ -1571,10 +1571,31 @@
s=extras
+, name=benchmark_name
)%0A
|
19b77a282b1ade7788ae394f22ac0bd7b0a2ce76 | document target_column when Y is dataframe | tensorflow/python/estimator/inputs/pandas_io.py | tensorflow/python/estimator/inputs/pandas_io.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods to allow pandas.DataFrame."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.estimator.inputs.queues import feeding_functions
from tensorflow.python.util.tf_export import tf_export
try:
# pylint: disable=g-import-not-at-top
# pylint: disable=unused-import
import pandas as pd
HAS_PANDAS = True
except IOError:
# Pandas writes a temporary file during import. If it fails, don't use pandas.
HAS_PANDAS = False
except ImportError:
HAS_PANDAS = False
def _get_unique_target_key(features, target_column_name):
"""Returns a key that does not exist in the input DataFrame `features`.
Args:
features: DataFrame
target_column_name: Name of the target column as a `str`
Returns:
A unique key that can be used to insert the target into
features.
"""
while target_column_name in features:
target_column_name += '_n'
return target_column_name
@tf_export('estimator.inputs.pandas_input_fn')
def pandas_input_fn(x,
y=None,
batch_size=128,
num_epochs=1,
shuffle=None,
queue_capacity=1000,
num_threads=1,
target_column='target'):
"""Returns input function that would feed Pandas DataFrame into the model.
Note: `y`'s index must match `x`'s index.
Args:
x: pandas `DataFrame` object.
y: pandas `Series` object or `DataFrame`. `None` if absent.
batch_size: int, size of batches to return.
num_epochs: int, number of epochs to iterate over data. If not `None`,
read attempts that would exceed this value will raise `OutOfRangeError`.
shuffle: bool, whether to read the records in random order.
queue_capacity: int, size of the read queue. If `None`, it will be set
roughly to the size of `x`.
num_threads: Integer, number of threads used for reading and enqueueing. In
order to have predicted and repeatable order of reading and enqueueing,
such as in prediction and evaluation mode, `num_threads` should be 1.
target_column: str, name to give the target column `y`.
Returns:
Function, that has signature of ()->(dict of `features`, `target`)
Raises:
ValueError: if `x` already contains a column with the same name as `y`, or
if the indexes of `x` and `y` don't match.
TypeError: `shuffle` is not bool.
"""
if not HAS_PANDAS:
raise TypeError(
'pandas_input_fn should not be called without pandas installed')
if not isinstance(shuffle, bool):
raise TypeError('shuffle must be explicitly set as boolean; '
'got {}'.format(shuffle))
x = x.copy()
if y is not None:
if target_column in x:
raise ValueError(
'Cannot use name %s for target column: DataFrame already has a '
'column with that name: %s' % (target_column, x.columns))
if not np.array_equal(x.index, y.index):
raise ValueError('Index for x and y are mismatched.\nIndex for x: %s\n'
'Index for y: %s\n' % (x.index, y.index))
if isinstance(y, pd.DataFrame):
y_columns = [(column, _get_unique_target_key(x, column)) for column in list(y)]
target_column = [v for _, v in y_columns]
x[target_column] = y
else:
x[target_column] = y
# TODO(mdan): These are memory copies. We probably don't need 4x slack space.
# The sizes below are consistent with what I've seen elsewhere.
if queue_capacity is None:
if shuffle:
queue_capacity = 4 * len(x)
else:
queue_capacity = len(x)
min_after_dequeue = max(queue_capacity / 4, 1)
def input_fn():
"""Pandas input function."""
queue = feeding_functions._enqueue_data( # pylint: disable=protected-access
x,
queue_capacity,
shuffle=shuffle,
min_after_dequeue=min_after_dequeue,
num_threads=num_threads,
enqueue_size=batch_size,
num_epochs=num_epochs)
if num_epochs is None:
features = queue.dequeue_many(batch_size)
else:
features = queue.dequeue_up_to(batch_size)
assert len(features) == len(x.columns) + 1, ('Features should have one '
'extra element for the index.')
features = features[1:]
features = dict(zip(list(x.columns), features))
if y is not None:
if isinstance(target_column, list):
keys = [k for k, _ in y_columns]
values = [features.pop(column) for column in target_column]
target = {k: v for k, v in zip(keys, values)}
else:
target = features.pop(target_column)
return features, target
return features
return input_fn
| Python | 0.000011 | @@ -2905,16 +2905,76 @@
umn %60y%60.
+ This parameter%0A is not used when %60y%60 is a %60DataFrame%60.
%0A%0A Retu
|
dccb0f292c86da942c5e4493a5e117e5f3047a05 | add aiohttp exercise | aiohttp_ext.py | aiohttp_ext.py | Python | 0 | @@ -0,0 +1,752 @@
+import asyncio%0A%0Afrom aiohttp import web%0A%0Aasync def index(request):%0A await asyncio.sleep(0.5)%0A return web.Response(body=b'%3Ch1%3EIndex%3C/h1%3E',content_type='text/html')%0A%0Aasync def hello(request):%0A await asyncio.sleep(0.5)%0A text = '%3Ch1%3Ehello, %25s%3C/h1%3E' %25 request.match_info%5B'name'%5D%0A return web.Response(body=text.encode('utf-8'), content_type='text/html')%0A%0Aasync def init(loop):%0A app = web.Application(loop=loop)%0A app.router.add_route('GET', '/', index)%0A app.router.add_route('GET', '/hello/%7Bname%7D', hello)%0A srv = await loop.create_server(app.make_handler(), '127.0.0.1', 8000)%0A print('Server started at http://127.0.0.1:8000')%0A return srv%0A%0Aloop = asyncio.get_event_loop()%0Aloop.run_until_complete(init(loop))%0Aloop.run_forever()
|
|
5788864141c2b635a3c0b8358d868fa7e2b5e789 | Create Pedido_Cadastrar.py | backend/Models/Turma/Pedido_Cadastrar.py | backend/Models/Turma/Pedido_Cadastrar.py | Python | 0 | @@ -0,0 +1,460 @@
+from Framework.Pedido import Pedido%0Afrom Framework.ErroNoHTTP import ErroNoHTTP%0A%0Aclass PedidoCadastrar(Pedido):%0A%0A%09def __init__(self,variaveis_do_ambiente):%0A%09%09super(PedidoCadastrar, self).__init__(variaveis_do_ambiente)%0A try: %0A %0A%09%09%09self.letra = self.corpo%5B'letra'%5D%0A%09%09%09self.id_disciplina = self.corpo%5B'id_dsciplina'%5D%0A%09%09except:%0A%09%09%09raise ErroNoHTTP(400)%0A%0A%09def getLetra(self):%0A%09%09return self.letra%0A%0A%0A%09def getId_disciplina(self):%0A%09%09return self.id_disciplina%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.