commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
79f119e81ab855164edd83408d0fc344801a2f64 | fix naming | tests/test_netgen.py | tests/test_netgen.py | import pathlib
import tempfile
import numpy as np
import pytest
import meshio
from . import helpers
test_set = [
helpers.empty_mesh,
helpers.line_mesh,
helpers.tri_mesh_2d,
helpers.tri_mesh,
helpers.triangle6_mesh,
helpers.quad_mesh,
helpers.quad8_mesh,
helpers.tri_quad_mesh,
helpers.tet_mesh,
helpers.tet10_mesh,
helpers.hex_mesh,
helpers.hex20_mesh,
helpers.pyramid_mesh,
helpers.wedge_mesh,
]
netgetn_mesh_directory = pathlib.Path("meshes/netgen")
PERIODIC_1D = "periodic_1d.vol"
PERIODIC_2D = "periodic_2d.vol"
PERIODIC_3D = "periodic_3d.vol"
netgen_meshes = [PERIODIC_1D, PERIODIC_2D, PERIODIC_3D]
@pytest.mark.parametrize("mesh", test_set)
def test(mesh):
helpers.write_read(
meshio.netgen.write, meshio.netgen.read, mesh, 1.0e-13, extension=".vol"
)
helpers.write_read(
meshio.netgen.write, meshio.netgen.read, mesh, 1.0e-13, extension=".vol.gz"
)
expected_identifications = {
PERIODIC_1D: np.array([[1, 51, 1]]),
PERIODIC_2D: np.array(
[[2, 1, 4], [3, 4, 4], [9, 17, 4], [10, 18, 4], [11, 19, 4], [12, 20, 4]]
),
PERIODIC_3D: np.array(
[
[1, 3, 1],
[2, 5, 1],
[4, 7, 1],
[6, 8, 1],
[9, 11, 1],
[10, 12, 1],
[15, 13, 1],
[16, 14, 1],
[21, 19, 1],
[22, 20, 1],
[25, 23, 1],
[26, 24, 1],
[38, 54, 1],
[39, 55, 1],
[40, 56, 1],
]
),
}
expected_identificationtypes = {
PERIODIC_1D: np.array([[2]]),
PERIODIC_2D: np.array([[1, 1, 1, 2]]),
PERIODIC_3D: np.array([[2]]),
}
expected_field_data = {
PERIODIC_1D: {},
PERIODIC_2D: {"outer": [3, 1], "periodic": [4, 1]},
PERIODIC_3D: {"outer": [6, 2], "default": [3, 2]},
}
@pytest.mark.parametrize("netgen_mesh", [PERIODIC_1D, PERIODIC_2D, PERIODIC_3D])
def test_advanced(netgen_mesh):
mesh = meshio.read(str(netgetn_mesh_directory / netgen_mesh))
with tempfile.TemporaryDirectory() as temp_dir:
p = pathlib.Path(temp_dir) / ("{:s}_out.vol".format(netgen_mesh))
mesh.write(p)
mesh_out = meshio.read(p)
assert np.all(
mesh.info["netgen:identifications"] == expected_identifications[netgen_mesh]
)
assert np.all(
mesh.info["netgen:identifications"] == mesh_out.info["netgen:identifications"]
)
assert np.all(
mesh.info["netgen:identificationtypes"]
== expected_identificationtypes[netgen_mesh]
)
assert np.all(
mesh.info["netgen:identificationtypes"]
== mesh_out.info["netgen:identificationtypes"]
)
for kk, vv in mesh.field_data.items():
assert np.all(vv == expected_field_data[netgen_mesh][kk])
assert np.all(vv == mesh_out.field_data[kk])
for cd, cd_out in zip(
mesh.cell_data["netgen:index"], mesh_out.cell_data["netgen:index"]
):
assert np.all(cd == cd_out)
| Python | 0.000001 | @@ -453,25 +453,24 @@
h,%0A%5D%0A%0A%0Anetge
-t
n_mesh_direc
@@ -2009,17 +2009,16 @@
tr(netge
-t
n_mesh_d
|
66eddf04efd46fb3dbeae34c4d82f673a88be70f | Test the ability to add phone to the person | tests/test_person.py | tests/test_person.py | from copy import copy
from unittest import TestCase
from address_book import Person
class PersonTestCase(TestCase):
def test_get_groups(self):
pass
def test_add_address(self):
basic_address = ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42']
person = Person(
'John',
'Doe',
copy(basic_address),
['+79834772053'],
['[email protected]']
)
person.add_address('new address')
self.assertEqual(
person.addresses,
basic_address + ['new address']
)
def test_add_phone(self):
pass
def test_add_email(self):
pass | Python | 0.000032 | @@ -643,36 +643,378 @@
(self):%0A
-pass
+basic_phone = %5B'+79237778492'%5D%0A person = Person(%0A 'John',%0A 'Doe',%0A copy(basic_phone),%0A %5B'+79834772053'%5D,%0A %5B'[email protected]'%5D%0A )%0A person.add_phone('+79234478810')%0A self.assertEqual(%0A person.addresses,%0A basic_phone + %5B'+79234478810'%5D%0A )
%0A%0A def test_a
|
5017ee713fd03902aa502836654e1961fb7575f1 | test form action url | tests/test_plugin.py | tests/test_plugin.py | from bs4 import BeautifulSoup
from cms.api import add_plugin
from cms.models import Placeholder
from django.test import TestCase
from cmsplugin_feedback.cms_plugins import FeedbackPlugin, \
DEFAULT_FORM_FIELDS_ID, DEFAULT_FORM_CLASS
from cmsplugin_feedback.forms import FeedbackMessageForm
class FeedbackPluginTests(TestCase):
def setUp(self):
self.placeholder = Placeholder.objects.create(slot='test')
def add_plugin(self, **kwargs):
model_instance = add_plugin(
self.placeholder,
FeedbackPlugin,
'en',
**kwargs)
return model_instance
def test_plugin_context(self):
model = self.add_plugin()
plugin = model.get_plugin_class_instance()
context = plugin.render({}, model, None)
self.assertIn('form', context)
self.assertIsInstance(context['form'], FeedbackMessageForm)
self.assertEqual(context['form'].auto_id, DEFAULT_FORM_FIELDS_ID)
self.assertIn('form_class', context)
self.assertEqual(context['form_class'], DEFAULT_FORM_CLASS)
def test_form_title(self):
title = 'Feedback Form'
plugin = self.add_plugin(title=title)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.h1.string, title)
def test_default_submit_button(self):
plugin = self.add_plugin()
self.assertTrue(plugin.submit)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertEqual(plugin.submit, default)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, default)
def test_submit_button(self):
text = 'Send'
plugin = self.add_plugin(submit=text)
default = plugin._meta.get_field_by_name('submit')[0].default
self.assertNotEqual(text, default)
self.assertEqual(plugin.submit, text)
html = plugin.render_plugin({})
soup = BeautifulSoup(html)
self.assertEqual(soup.find(type='submit').string, text)
| Python | 0.000002 | @@ -89,16 +89,61 @@
eholder%0A
+from django.core.urlresolvers import reverse%0A
from dja
@@ -2132,16 +2132,278 @@
).string, text)%0A
+%0A def test_form_action_url(self):%0A plugin = self.add_plugin()%0A html = plugin.render_plugin(%7B%7D)%0A soup = BeautifulSoup(html)%0A self.assertEqual(%0A soup.form%5B'action'%5D,%0A reverse('feedback-form', args=%5Bplugin.id%5D))%0A
|
5e2b9410a7db019e4ad1056ec0a3d507374e5e4b | Make sure that get_user_config is called in replay.dump | tests/test_replay.py | tests/test_replay.py | # -*- coding: utf-8 -*-
"""
test_replay
-----------
"""
import json
import os
import pytest
from cookiecutter import replay, utils
from cookiecutter.config import get_user_config
@pytest.fixture
def replay_dir():
"""Fixture to return the expected replay directory."""
return os.path.expanduser('~/.cookiecutter_replay/')
def test_get_user_config(mocker, replay_dir):
"""Test that get_user_config holds the correct replay_dir."""
mocker.patch('os.path.exists', return_value=False)
config_dict = get_user_config()
assert 'replay_dir' in config_dict
assert config_dict['replay_dir'] == replay_dir
@pytest.fixture
def template_name():
"""Fixture to return a valid template_name."""
return 'cookiedozer'
@pytest.fixture
def context():
"""Fixture to return a valid context as known from a cookiecutter.json."""
return {
u'email': u'[email protected]',
u'full_name': u'Raphael Pierzina',
u'github_username': u'hackebrot',
u'version': u'0.1.0',
}
def test_dump_value_error_if_no_template_name(context):
"""Test that replay.dump raises if the tempate_name is not a valid str."""
with pytest.raises(ValueError):
replay.dump(None, context)
def test_dump_type_error_if_not_dict_context(template_name):
"""Test that replay.dump raises if the context is not of type dict."""
with pytest.raises(TypeError):
replay.dump(template_name, 'not_a_dict')
@pytest.fixture
def cleanup_replay_dir(request, replay_dir):
"""Fixture to remove the replay_dir that is created by replay.dump."""
def remove_dir():
if os.path.isdir(replay_dir):
utils.rmtree(replay_dir)
request.addfinalizer(remove_dir)
@pytest.mark.usefixtures('cleanup_replay_dir')
def test_raise_if_replay_dir_creation_fails(
mocker, template_name, context, replay_dir):
"""Test that replay.dump raises when the replay_dir cannot be created."""
mock_ensure = mocker.patch(
'cookiecutter.replay.make_sure_path_exists',
return_value=False
)
with pytest.raises(IOError):
replay.dump(template_name, context)
mock_ensure.assert_called_once_with(replay_dir)
@pytest.mark.usefixtures('cleanup_replay_dir')
def test_run_json_dump(
mocker, template_name, context, replay_dir):
"""Test that replay.dump runs json.dump under the hood and that the context
is correctly written to the expected file in the replay_dir.
"""
spy_ensure = mocker.spy(
'cookiecutter.replay.make_sure_path_exists',
)
spy_json_dump = mocker.spy('json.dump')
replay.dump(template_name, context)
spy_ensure.assert_called_once_with(replay_dir)
assert spy_json_dump.called == 1
replay_dir = os.path.expanduser('~/.cookiecutter_replay/')
replay_file = os.path.join(replay_dir, template_name)
with open(replay_file, 'r') as f:
dumped_context = json.load(f)
assert dumped_context == context
| Python | 0.000005 | @@ -2615,16 +2615,143 @@
dump')%0A%0A
+ mock_get_user_config = mocker.patch(%0A 'cookiecutter.config.get_user_config',%0A return_value=replay_dir%0A )%0A%0A
repl
@@ -2870,16 +2870,60 @@
led == 1
+%0A assert mock_get_user_config.called == 1
%0A%0A re
|
840691a6cb226472fff4a25d2b255483ab80cb4b | refactor outcomes to helper assertion | tests/test_socket.py | tests/test_socket.py | # -*- coding: utf-8 -*-
import pytest
from pytest_socket import enable_socket
PYFILE_SOCKET_USED_IN_TEST = """
import socket
def test_socket():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
"""
@pytest.fixture(autouse=True)
def reenable_socket():
# The tests can leave the socket disabled in the global scope.
# Fix that by automatically re-enabling it after each test
yield
enable_socket()
def assert_socket_blocked(result):
result.stdout.fnmatch_lines("""
*SocketBlockedError: A test tried to use socket.socket.*
""")
def test_socket_enabled_by_default(testdir):
testdir.makepyfile(PYFILE_SOCKET_USED_IN_TEST)
result = testdir.runpytest("--verbose")
result.assert_outcomes(1, 0, 0)
with pytest.raises(BaseException):
assert_socket_blocked(result)
def test_global_disable_via_fixture(testdir):
testdir.makepyfile("""
import pytest
import pytest_socket
import socket
@pytest.fixture(autouse=True)
def disable_socket_for_all():
pytest_socket.disable_socket()
def test_socket():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_global_disable_via_cli_flag(testdir):
testdir.makepyfile(PYFILE_SOCKET_USED_IN_TEST)
result = testdir.runpytest("--verbose", "--disable-socket")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
result.stdout.fnmatch_lines([
'socket:',
'*--disable-socket*Disable socket.socket by default to block network*'
])
def test_global_disable_via_config(testdir):
testdir.makepyfile(PYFILE_SOCKET_USED_IN_TEST)
testdir.makeini("""
[pytest]
addopts = --disable-socket
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_disable_socket_marker(testdir):
testdir.makepyfile("""
import pytest
import socket
@pytest.mark.disable_socket
def test_socket():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_enable_socket_marker(testdir):
testdir.makepyfile("""
import pytest
import socket
@pytest.mark.enable_socket
def test_socket():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose", "--disable-socket")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_urllib_succeeds_by_default(testdir):
testdir.makepyfile("""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
def test_disable_socket_urllib():
assert urlopen('http://httpbin.org/get').getcode() == 200
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(1, 0, 0)
def test_enabled_urllib_succeeds(testdir):
testdir.makepyfile("""
import pytest
import pytest_socket
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
@pytest.mark.enable_socket
def test_disable_socket_urllib():
assert urlopen('http://httpbin.org/get').getcode() == 200
""")
result = testdir.runpytest("--verbose", "--disable-socket")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_disabled_urllib_fails(testdir):
testdir.makepyfile("""
import pytest
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
@pytest.mark.disable_socket
def test_disable_socket_urllib():
assert urlopen('http://httpbin.org/get').getcode() == 200
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(0, 0, 1)
assert_socket_blocked(result)
def test_double_call_does_nothing(testdir):
testdir.makepyfile("""
import pytest
import pytest_socket
import socket
def test_double_enabled():
pytest_socket.enable_socket()
pytest_socket.enable_socket()
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def test_double_disabled():
pytest_socket.disable_socket()
pytest_socket.disable_socket()
with pytest.raises(pytest_socket.SocketBlockedError):
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def test_disable_enable():
pytest_socket.disable_socket()
pytest_socket.enable_socket()
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(3, 0, 0)
with pytest.raises(BaseException):
assert_socket_blocked(result)
def test_socket_enabled_fixture(testdir, socket_enabled):
testdir.makepyfile("""
import socket
def test_socket_enabled(socket_enabled):
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose")
result.assert_outcomes(1, 0, 0)
with pytest.raises(BaseException):
assert_socket_blocked(result)
def test_mix_and_match(testdir, socket_enabled):
testdir.makepyfile("""
import socket
def test_socket1():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def test_socket_enabled(socket_enabled):
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def test_socket2():
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
""")
result = testdir.runpytest("--verbose", "--disable-socket")
result.assert_outcomes(1, 0, 2)
| Python | 0.000007 | @@ -479,16 +479,74 @@
esult):%0A
+ result.assert_outcomes(passed=0, skipped=0, failed=1)%0A
resu
@@ -569,24 +569,24 @@
h_lines(%22%22%22%0A
-
*Soc
@@ -1317,44 +1317,8 @@
e%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -1515,44 +1515,8 @@
t%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -2005,44 +2005,8 @@
e%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -2332,44 +2332,8 @@
e%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -2677,44 +2677,8 @@
t%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -3588,44 +3588,8 @@
t%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
@@ -3988,32 +3988,32 @@
== 200%0A %22%22%22)%0A
+
result = tes
@@ -4044,44 +4044,8 @@
e%22)%0A
- result.assert_outcomes(0, 0, 1)%0A
|
02bd3772fcf20d9dc54bd94c125c2efc6ae01537 | Make sure structs are pickleable | tests/test_struct.py | tests/test_struct.py | #!/usr/bin/env python
"""
Contains various tests for the `Struct` class of the base module.
:file: StructTests.py
:date: 30/08/2015
:authors:
- Gilad Naaman <[email protected]>
"""
from .utils import *
#########################
# "Structs" for testing #
#########################
class SmallStruct(Struct):
only_element = u8
class SimpleStruct(Struct):
b_first_variable = u8(0xDE)
a_second_variable = u16(0xCAFE)
x_third_variable = u8(0xAD)
class ComplicatedStruct(Struct):
other_struct = SmallStruct
some_field = SimpleStruct[3]
numeric = u32
##############
# Test Cases #
##############
class StructTests(HydrasTestCase):
""" A testcase checking for a few of `Struct`'s features. """
def test_serialize_simple(self):
""" Test serialization of a simple struct. """
obj = SimpleStruct()
raw_data = bytes(obj)
self.assertEqual(raw_data, b'\xDE\xFE\xCA\xAD')
def test_one_does_not_complicatedly(self):
""" Test serialization and deserialization of a more complicated struct."""
s = ComplicatedStruct()
s.numeric = 0xAEAEAEAE
data = s.serialize()
# Test serialization.
self.assertEqual(data, b'\x00\xDE\xFE\xCA\xAD\xDE\xFE\xCA\xAD\xDE\xFE\xCA\xAD\xAE\xAE\xAE\xAE')
# Test deserialization.
d_s = ComplicatedStruct.deserialize(data)
self.assertEqual(d_s, s)
def test_dict_conversion(self):
d = dict(ComplicatedStruct())
expected_dict = {
'other_struct': {'only_element': 0},
'some_field': [
{'b_first_variable': 0xDE, 'a_second_variable': 0xCAFE, 'x_third_variable': 0xAD},
{'b_first_variable': 0xDE, 'a_second_variable': 0xCAFE, 'x_third_variable': 0xAD},
{'b_first_variable': 0xDE, 'a_second_variable': 0xCAFE, 'x_third_variable': 0xAD},
],
'numeric': 0
}
self.assertEqual(d, expected_dict)
def test_derived_struct(self):
class DerivedStruct(SimpleStruct):
derived = u8
class DerivedStructEmpty(SimpleStruct):
pass
simple = SimpleStruct()
derived = DerivedStruct()
empty = DerivedStructEmpty()
self.assertEqual(simple.serialize() + b'\x00', derived.serialize())
self.assertEqual(simple.serialize(), empty.serialize())
def test_invalid_multiple_derives(self):
class A(Struct):
a = u8
class B(Struct):
b = u8
with self.assertRaises(TypeError):
class C(A, B):
pass
if __name__ == '__main__':
unittest.main()
| Python | 0.000225 | @@ -2659,16 +2659,276 @@
pass%0A%0A
+ def test_pickles(self):%0A import pickle%0A o = pickle.loads(pickle.dumps(SimpleStruct()))%0A self.assertEqual(o, SimpleStruct())%0A%0A o = pickle.loads(pickle.dumps(ComplicatedStruct()))%0A self.assertEqual(o, ComplicatedStruct())%0A%0A
%0Aif __na
|
28f6af7f84860535a1a82750df286f78320a6856 | Fix monkeypatching | tests/test_things.py | tests/test_things.py | from __future__ import division
import stft
import numpy
import pytest
@pytest.fixture(params=[1, 2])
def channels(request):
return request.param
@pytest.fixture(params=[0, 1, 4])
def padding(request):
return request.param
@pytest.fixture(params=[2048])
def length(request):
return request.param
@pytest.fixture
def signal(channels, length):
return numpy.squeeze(numpy.random.random((length, channels)))
@pytest.fixture(params=[512])
def framelength(request):
return request.param
def test_shape(length, framelength):
a = numpy.squeeze(numpy.random.random((length, 1)))
x = stft.spectrogram(a, framelength=framelength, halved=True)
assert x.shape[0] == framelength / 2 + 1
x_2 = stft.spectrogram(a, framelength=framelength, halved=False)
assert x_2.shape[0] == framelength
def test_windowlength_errors():
"""
Test if way too short signals can be transformed
"""
siglen = 512
framelen = 2048
stft.spectrogram(numpy.random.random(siglen), framelength=framelen)
def test_precision(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.allclose(a, y)
def test_rms(channels, padding, signal, framelength):
"""
Test if transform-inverse identity holds
"""
a = signal
x = stft.spectrogram(a, framelength=framelength, padding=padding)
y = stft.ispectrogram(x, framelength=framelength, padding=padding)
# Crop first and last frame
assert numpy.sqrt(numpy.mean((a - y) ** 2)) < 1e-8
def test_maxdim():
a = numpy.random.random((512, 2, 2))
with pytest.raises(ValueError):
stft.spectrogram(a)
b = numpy.random.random((512, 2, 2, 3))
with pytest.raises(ValueError):
stft.ispectrogram(b)
def test_issue1():
a = numpy.random.random((512, 1))
b = stft.spectrogram(a)
assert b.ndim == 2
def raiser(*args):
raise AttributeError
def test_fallback(monkeypatch):
# Try monkeypatching signal.cosine away.
# Ignore AttributeErrors during monkeypatching, for older scipy versions
try:
import scipy.signal
monkeypatch.setattr("scipy.signal.cosine", raiser)
except AttributeError:
pass
return test_windowlength_errors()
| Python | 0.000001 | @@ -2298,21 +2298,8 @@
ons%0A
- try:%0A
@@ -2318,16 +2318,25 @@
.signal%0A
+ try:%0A
@@ -2397,30 +2397,25 @@
except
-AttributeError
+Exception
:%0A
|
551e0f44d9da2d5ac39912b8b5787505deb5588c | remove old tests | tests/test_timing.py | tests/test_timing.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import librosa
from amen.audio import Audio
from amen.utils import example_audio_file
from amen.utils import example_mono_audio_file
from amen.timing import TimeSlice
t = 5
d = 10
dummy_audio = None
time_slice = TimeSlice(t, d, dummy_audio)
def test_time():
assert(time_slice.time == pd.to_timedelta(t, 's'))
def test_duration():
assert(time_slice.duration == pd.to_timedelta(d, 's'))
def test_units():
time_slice = TimeSlice(t, d, dummy_audio, unit='ms')
assert(time_slice.time == pd.to_timedelta(t, 'ms'))
faux_samples = np.array([[1,-1, 0, 1, 0, -1, 1],
[1, -1, 0, 1, 0, -1, 1]])
faux_audio = Audio(raw_samples=faux_samples)
time_slice = TimeSlice(t, d, faux_audio)
def test_get_offsets():
left, right, zero_indexes = time_slice._get_offsets(3, 4, faux_audio.num_channels)
assert(left == (-1, 1))
def test_get_offsets_no_zero_indexes():
zero_index = np.array([0, 1, 2, 5, 6])
real_zero_indexes = [zero_index, zero_index]
left, right, zero_indexes = time_slice._get_offsets(3, 4, faux_audio.num_channels)
assert(zero_indexes[0].all() == real_zero_indexes[0].all())
def test_get_offsets_with_zero_indexes():
fake = np.array([1, 2, 3])
faux_zero_indexes = [fake, fake]
left, right, zero_indexes = time_slice._get_offsets(3, 4, faux_audio.num_channels, faux_zero_indexes)
assert(zero_indexes == faux_zero_indexes)
EXAMPLE_FILE = example_audio_file()
stereo_audio = Audio(EXAMPLE_FILE)
EXAMPLE_MONO_FILE = example_mono_audio_file()
mono_audio = Audio(EXAMPLE_FILE)
def test_offset_samples():
def __test():
res = audio.timings['beats'][0]._offset_samples(1, 2, (-1, 1), (-1, 1), audio.num_channels)
assert(res.shape == (2, 3))
for audio in [mono_audio, stereo_audio]:
yield __test
def test_get_samples_shape():
def __test():
beat = audio.timings['beats'][0]
start = beat.time.delta * 1e-9
duration = beat.duration.delta * 1e-9
starting_sample, ending_sample = librosa.time_to_samples([start, start + duration], beat.audio.sample_rate)
samples, left_offset, right_offset, _ = beat.get_samples()
left_offsets, right_offsets, _ = beat._get_offsets(starting_sample, ending_sample, beat.audio.num_channels)
duration = beat.duration.delta * 1e-9
starting_sample, ending_sample = librosa.time_to_samples([0, duration], audio.sample_rate)
initial_length = ending_sample - starting_sample
left_offset_length = initial_length - left_offsets[0] + left_offsets[1]
right_offset_length = initial_length - right_offsets[0] + right_offsets[1]
assert(len(samples[0]) == left_offset_length)
assert(len(samples[1]) == right_offset_length)
for audio in [mono_audio, stereo_audio]:
yield __test
def test_get_samples_audio():
def __test():
beat = audio.timings['beats'][0]
samples, left_offset, right_offset, _ = beat.get_samples()
start = beat.time.delta * 1e-9
duration = beat.duration.delta * 1e-9
starting_sample, ending_sample = librosa.time_to_samples([start, start + duration], beat.audio.sample_rate)
left_offsets, right_offsets, _ = beat._get_offsets(starting_sample, ending_sample, beat.audio.num_channels)
start_sample = left_offsets[0] * -1
end_sample = len(samples[0]) - left_offsets[1]
reset_samples = samples[0][start_sample : end_sample]
original_samples = audio.raw_samples[0, starting_sample : ending_sample]
assert(np.array_equiv(reset_samples, original_samples))
for audio in [mono_audio, stereo_audio]:
yield __test
| Python | 0.000743 | @@ -831,38 +831,24 @@
left, right
-, zero_indexes
= time_slic
@@ -927,557 +927,8 @@
) %0A%0A
-def test_get_offsets_no_zero_indexes():%0A zero_index = np.array(%5B0, 1, 2, 5, 6%5D)%0A real_zero_indexes = %5Bzero_index, zero_index%5D%0A%0A left, right, zero_indexes = time_slice._get_offsets(3, 4, faux_audio.num_channels)%0A assert(zero_indexes%5B0%5D.all() == real_zero_indexes%5B0%5D.all())%0A%0Adef test_get_offsets_with_zero_indexes():%0A fake = np.array(%5B1, 2, 3%5D)%0A faux_zero_indexes = %5Bfake, fake%5D%0A%0A left, right, zero_indexes = time_slice._get_offsets(3, 4, faux_audio.num_channels, faux_zero_indexes)%0A assert(zero_indexes == faux_zero_indexes)%0A%0A
EXAM
@@ -1648,35 +1648,32 @@
et, right_offset
-, _
= beat.get_samp
@@ -1705,35 +1705,32 @@
s, right_offsets
-, _
= beat._get_off
@@ -2468,19 +2468,16 @@
t_offset
-, _
= beat.
@@ -2731,11 +2731,8 @@
sets
-, _
= b
|
36200dea5889bdf4ad920adc1ab04ae3870f74ac | Edit varnet model (#5096) | tests/test_varnet.py | tests/test_varnet.py | # Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import torch
from parameterized import parameterized
from monai.apps.reconstruction.networks.nets.coil_sensitivity_model import CoilSensitivityModel
from monai.apps.reconstruction.networks.nets.complex_unet import ComplexUnet
from monai.apps.reconstruction.networks.nets.varnet import VariationalNetworkModel
from monai.networks import eval_mode
from tests.utils import test_script_save
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
coil_sens_model = CoilSensitivityModel(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
refinement_model = ComplexUnet(spatial_dims=2, features=[8, 16, 32, 64, 128, 8])
num_cascades = 12
TESTS = []
TESTS.append([coil_sens_model, refinement_model, num_cascades, (1, 10, 300, 200, 2), (1, 300, 200)]) # batch=1
TESTS.append([coil_sens_model, refinement_model, num_cascades, (2, 10, 300, 200, 2), (2, 300, 200)]) # batch=2
class TestVarNet(unittest.TestCase):
@parameterized.expand(TESTS)
def test_shape(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades).to(device)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
with eval_mode(net):
result = net(torch.randn(input_shape).to(device), mask.byte().to(device))
self.assertEqual(result.shape, expected_shape)
@parameterized.expand(TESTS)
def test_script(self, coil_sens_model, refinement_model, num_cascades, input_shape, expected_shape):
net = VariationalNetworkModel(coil_sens_model, refinement_model, num_cascades)
mask_shape = [1 for _ in input_shape]
mask_shape[-2] = input_shape[-2]
mask = torch.zeros(mask_shape)
mask[..., mask_shape[-2] // 2 - 5 : mask_shape[-2] // 2 + 5, :] = 1
test_data = torch.randn(input_shape)
test_script_save(net, test_data, mask.byte())
if __name__ == "__main__":
unittest.main()
| Python | 0.000004 | @@ -1232,11 +1232,10 @@
s =
-1
2%0A
+
TEST
@@ -1308,27 +1308,24 @@
es, (1,
-10, 300, 20
+3, 50, 5
0, 2), (
@@ -1327,23 +1327,21 @@
2), (1,
-300, 20
+50, 5
0)%5D) #
@@ -1419,19 +1419,16 @@
(2,
-10, 300, 20
+3, 50, 5
0, 2
@@ -1438,15 +1438,13 @@
(2,
-300, 20
+50, 5
0)%5D)
@@ -2029,19 +2029,19 @@
, mask.b
-yte
+ool
().to(de
@@ -2629,11 +2629,11 @@
sk.b
-yte
+ool
())%0A
|
ddd3947514900d99bc644b8a791a92807bee4f2c | use mock | tests/test_worker.py | tests/test_worker.py | import unittest
import functools
from tornado import ioloop as ti, gen as tg
from acddl import worker
class TestWorker(unittest.TestCase):
def setUp(self):
self._worker = worker.Worker()
self._worker.start()
self.assertTrue(self._worker.is_alive())
def tearDown(self):
async_call(self._worker.stop)
self.assertFalse(self._worker.is_alive())
def testDo(self):
x = [1]
def fn():
x[0] = 2
async def fn_():
await self._worker.do(fn)
async_call(fn_)
self.assertEqual(x[0], 2)
class TestAsyncWorker(unittest.TestCase):
def setUp(self):
self._worker = worker.AsyncWorker()
self._worker.start()
self.assertTrue(self._worker.is_alive)
def tearDown(self):
async_call(self._worker.stop)
self.assertFalse(self._worker.is_alive)
def testDoWithSync(self):
x = [1]
def fn():
x[0] = 2
return 3
async def fn_():
return await self._worker.do(fn)
rv = async_call(fn_)
self.assertEqual(x[0], 2)
self.assertEqual(rv, 3)
def testDoWithAsync(self):
x = [1]
async def fn():
await tg.moment
x[0] = 2
return 3
async def fn_():
return await self._worker.do(fn)
rv = async_call(fn_)
self.assertEqual(x[0], 2)
self.assertEqual(rv, 3)
def testDoLaterWithSync(self):
x = [1]
def fn():
x[0] = 2
self._worker.do_later(fn)
async_call(functools.partial(tg.sleep, 0.001))
self.assertEqual(x[0], 2)
def testDoLaterWithAsync(self):
x = [1]
async def fn():
await tg.moment
x[0] = 2
self._worker.do_later(fn)
async_call(functools.partial(tg.sleep, 0.001))
self.assertEqual(x[0], 2)
def testDoWithSyncPartial(self):
x = [1]
def fn(rv):
x[0] = 2
return rv
async def fn_():
return await self._worker.do(functools.partial(fn, 3))
rv = async_call(fn_)
self.assertEqual(x[0], 2)
self.assertEqual(rv, 3)
def testDoWithAsyncPartial(self):
x = [1]
async def fn(rv):
await tg.moment
x[0] = 2
return rv
async def fn_():
return await self._worker.do(worker.AsyncTask(fn, 3))
rv = async_call(fn_)
self.assertEqual(x[0], 2)
self.assertEqual(rv, 3)
def testDoLaterWithSyncPartial(self):
x = [1]
def fn(v):
x[0] = v
self._worker.do_later(functools.partial(fn, 2))
async_call(functools.partial(tg.sleep, 0.001))
self.assertEqual(x[0], 2)
def testDoLaterWithAsyncPartial(self):
x = [1]
async def fn(v):
await tg.moment
x[0] = v
self._worker.do_later(worker.AsyncTask(fn, 2))
async_call(functools.partial(tg.sleep, 0.001))
self.assertEqual(x[0], 2)
def async_call(fn):
return ti.IOLoop.instance().run_sync(fn)
| Python | 0.000016 | @@ -4,24 +4,25 @@
ort
+f
un
-ittest
+ctools
%0Aimport
func
@@ -17,25 +17,56 @@
%0Aimport
-functools
+unittest%0Afrom unittest import mock as um
%0A%0Afrom t
@@ -925,30 +925,31 @@
def
-testDoWithSync
+_createSyncMock
(self):%0A
@@ -960,144 +960,105 @@
-x = %5B1%5D%0A def fn():%0A x%5B0%5D = 2%0A return 3%0A async def fn_():%0A return await self._worker.do(fn
+return um.Mock(return_value=42)%0A%0A def testDoWithSync(self):%0A fn = self._createSyncMock(
)%0A
@@ -1071,35 +1071,51 @@
rv = async_call(
+self._worker.do,
fn
-_
)%0A self.a
@@ -1112,32 +1112,35 @@
-sel
f
+n
.assert
-Equal(x%5B0%5D, 2
+_called_once_with(
)%0A
@@ -1158,33 +1158,34 @@
assertEqual(rv,
-3
+42
)%0A%0A def testD
@@ -3123,16 +3123,33 @@
_call(fn
+, *args, **kwargs
):%0A r
@@ -3185,11 +3185,47 @@
n_sync(f
-n
+unctools.partial(fn, *args, **kwargs)
)%0A
|
e0a4459f732d605bc1c3d528078c1ea7fd8eb916 | Add short wait function and call before each test. | tests/test_yummly.py | tests/test_yummly.py |
import os
import unittest
import json
import yummly
HERE = os.path.dirname(__file__)
class TestYummly( unittest.TestCase ):
def setUp( self ):
config_file = os.path.join( HERE, 'config.json' )
with open( config_file ) as f:
config = json.load(f)
self.yummly = yummly
self.yummly.api_id = config.get('api_id')
self.yummly.api_key = config.get('api_key')
self.test_recipe_id = 'Hot-Turkey-Salad-Sandwiches-Allrecipes'
# cache recipe results to decrease API hits
self.test_recipe = None
@staticmethod
def verify_fields( expected, actual ):
for field in expected:
assert( field in actual )
return True
def test_search( self ):
'''Test basic search functionality'''
q = 'chicken casserole'
limit = 5
results = self.yummly.search( q, limit=limit )
# verify fields present
expected_fields = [
'attribution',
'totalMatchCount',
'facetCounts',
'matches',
'criteria',
]
assert( TestYummly.verify_fields( expected_fields, results.keys() ) )
# sanity check that our search terms are included
for term in q.split():
assert( term in results['criteria']['terms'] )
# we got some matches
assert( len( results['matches'] ) > 0 )
def test_search_match( self ):
'''Test search match return'''
q = 'chicken'
limit = 1
results = self.yummly.search( q, limit=limit )
match = results['matches'][0]
# verify expected fields
expected_fields = [
'attributes',
'flavors',
'rating',
'id',
'smallImageUrls',
'sourceDisplayName',
'totalTimeInSeconds',
'ingredients',
'recipeName'
]
assert( TestYummly.verify_fields( expected_fields, match.keys() ) )
def test_search_pagination( self ):
'''Test search pagination'''
q = 'chicken casserole'
limit = 10
results = self.yummly.search( q, limit=limit )
# verify limit enforced
len_matches = len( results['matches'] )
assert( len_matches == limit )
# sanity check that grand total of matching recipes is at least as many as matches returned
assert( results['totalMatchCount'] >= len_matches )
# check that offsetting works as expected: offset is number of records to skip
offset = 5
offset_results = self.yummly.search( q, limit=limit, offset=offset )
assert( offset_results['matches'][0]['id'] == results['matches'][offset]['id'] )
def test_recipe( self ):
self.test_recipe = self.test_recipe or self.yummly.recipe( self.test_recipe_id )
# verify API returns expected fields: https://developer.yummly.com/wiki/get-recipe-response-sample
expected_fields = [
'attribution',
'ingredientLines',
'flavors',
'nutritionEstimates',
'images',
'name',
'yield',
'totalTime',
'attributes',
'totalTimeInSeconds',
'rating',
'numberOfServings',
'source',
'id',
]
# verify we received the expected fields
assert( TestYummly.verify_fields( expected_fields, self.test_recipe.keys() ) )
def test_recipe_nutrition( self ):
self.test_recipe = self.test_recipe or self.yummly.recipe( self.test_recipe_id )
nutrition = self.test_recipe['nutritionEstimates'][0]
expected_fields = [
'attribute',
'description',
'value',
'unit',
]
assert( TestYummly.verify_fields( expected_fields, nutrition.keys() ) )
nutrition_unit = nutrition['unit']
expected_unit_fields = [
'name',
'abbreviation',
'plural',
'pluralAbbreviation'
]
assert( TestYummly.verify_fields( expected_unit_fields, nutrition_unit.keys() ) )
def test_recipe_images( self ):
self.test_recipe = self.test_recipe or self.yummly.recipe( self.test_recipe_id )
images = self.test_recipe['images'][0]
expected_fields = [
'hostedLargeUrl',
'hostedSmallUrl',
]
assert( TestYummly.verify_fields( expected_fields, images.keys() ) )
def test_recipe_source( self ):
self.test_recipe = self.test_recipe or self.yummly.recipe( self.test_recipe_id )
source = self.test_recipe['source']
expected_fields = [
'sourceRecipeUrl',
'sourceSiteUrl',
'sourceDisplayName',
]
assert( TestYummly.verify_fields( expected_fields, source.keys() ) )
def test_recipe_from_search( self ):
q = 'chicken'
s = self.yummly.search( q, limit=1 )
search = s['matches'][0]
recipe = self.yummly.recipe( search['id'] )
# ids should match
assert( recipe['id'] == search['id'] )
# both should have same number of ingredients/lines
assert( len(recipe['ingredientLines']) == len(search['ingredients']) )
# same prep+cook time
assert( recipe['totalTimeInSeconds'] == search['totalTimeInSeconds'] )
# same recipe name
assert( recipe['name'] == search['recipeName'] )
# same attributes
assert( recipe['attributes'] == search['attributes'] )
# same display name
assert( recipe['source']['sourceDisplayName'] == search['sourceDisplayName'] )
| Python | 0 | @@ -4,17 +4,16 @@
port os%0A
-%0A
import u
@@ -31,16 +31,39 @@
ort json
+%0Afrom time import sleep
%0A%0Aimport
@@ -590,16 +590,131 @@
= None%0A%0A
+ @staticmethod%0A def wait():%0A # wait some time inbetween tests for throttling self%0A sleep(0.5)%0A%0A
@sta
@@ -929,24 +929,51 @@
onality'''%0A%0A
+ TestYummly.wait()%0A%0A
q
@@ -1755,32 +1755,59 @@
, limit=limit )%0A
+ TestYummly.wait()%0A%0A
%0A match =
@@ -2325,32 +2325,32 @@
cken casserole'%0A
-
limit
@@ -2354,16 +2354,43 @@
= 10%0A
+ TestYummly.wait()%0A%0A
%0A
@@ -5203,32 +5203,60 @@
search( self ):%0A
+%0A TestYummly.wait()%0A%0A
q = 'chi
|
ec72440af0785b449e8a74b3fcadbfa214a4c304 | Extend tests for _spec_signature | tests/testhelpers.py | tests/testhelpers.py | # Copyright (C) 2007-2011 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2
from mock import Mock, ANY, call, _spec_signature
class SomeClass(object):
def one(self, a, b):
pass
def two(self):
pass
def three(self, a=None):
pass
class AnyTest(unittest2.TestCase):
def test_any(self):
self.assertEqual(ANY, object())
mock = Mock()
mock(ANY)
mock.assert_called_with(ANY)
mock = Mock()
mock(foo=ANY)
mock.assert_called_with(foo=ANY)
def test_repr(self):
self.assertEqual(repr(ANY), '<ANY>')
self.assertEqual(str(ANY), '<ANY>')
class CallTest(unittest2.TestCase):
def test_repr(self):
self.assertEqual(repr(call), '<call>')
self.assertEqual(str(call), '<call>')
self.assertEqual(repr(call.foo), '<call foo>')
def test_call(self):
self.assertEqual(call(), ((), {}))
self.assertEqual(call('foo', 'bar', one=3, two=4),
(('foo', 'bar'), {'one': 3, 'two': 4}))
mock = Mock()
mock(1, 2, 3)
mock(a=3, b=6)
self.assertEqual(mock.call_args_list,
[call(1, 2, 3), call(a=3, b=6)])
def test_attribute_call(self):
self.assertEqual(call.foo(1), ('foo', (1,), {}))
self.assertEqual(call.bar.baz(fish='eggs'),
('bar.baz', (), {'fish': 'eggs'}))
mock = Mock()
mock.foo(1, 2 ,3)
mock.bar.baz(a=3, b=6)
self.assertEqual(mock.method_calls,
[call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
class SpecSignatureTest(unittest2.TestCase):
def _check_someclass_mock(self, mock):
self.assertRaises(AttributeError, getattr, mock, 'foo')
mock.one(1, 2)
mock.one.assert_called_with(1, 2)
self.assertRaises(AssertionError,
mock.one.assert_called_with, 3, 4)
self.assertRaises(TypeError, mock.one, 1)
mock.two()
mock.two.assert_called_with()
self.assertRaises(AssertionError,
mock.two.assert_called_with, 3)
self.assertRaises(TypeError, mock.two, 1)
mock.three()
mock.three.assert_called_with(None)
self.assertRaises(AssertionError,
mock.three.assert_called_with, 3)
self.assertRaises(TypeError, mock.three, 3, 2)
mock.three(1)
mock.three.assert_called_with(1)
mock.three(a=1)
mock.three.assert_called_with(1)
def test_basic(self):
for spec in (SomeClass, SomeClass()):
mock = _spec_signature(spec)
self._check_someclass_mock(mock)
def test_spec_as_list_fails(self):
self.assertRaises(TypeError, _spec_signature, [])
self.assertRaises(TypeError, _spec_signature, ['foo'])
def test_attributes(self):
class Sub(SomeClass):
attr = SomeClass()
sub_mock = _spec_signature(Sub)
for mock in (sub_mock, sub_mock.attr):
self._check_someclass_mock(mock)
def test_builtin_functions_types(self):
# we could replace builtin functions / methods with a mocksignature
# with *args / **kwargs signature. Using the builtin method type
# as a spec seems to work fairly well though.
class BuiltinSubclass(list):
def bar(self, arg):
pass
sorted = sorted
attr = {}
mock = _spec_signature(BuiltinSubclass)
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.append, 'foo')
mock.bar('foo')
mock.bar.assert_called_with('foo')
self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
mock.sorted([1, 2])
mock.sorted.assert_called_with([1, 2])
self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
mock.attr.pop(3)
mock.attr.pop.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
def test_method_calls(self):
class Sub(SomeClass):
attr = SomeClass()
mock = _spec_signature(Sub)
mock.one(1, 2)
mock.two()
mock.three(3)
expected = [call.one(1, 2), call.two(), call.three(3)]
self.assertEqual(mock.method_calls, expected)
mock.attr.one(1, 2)
mock.attr.two()
mock.attr.three(3)
expected.extend(
[call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
)
self.assertEqual(mock.method_calls, expected)
def test_magic_methods(self):
pass
def test_spec_set(self):
# a flag indicating whether or not spec_set should be used
pass
def test_property(self):
pass
def test_classmethod(self):
pass
def test_staticmethod(self):
pass
| Python | 0.000001 | @@ -193,16 +193,27 @@
k import
+ MagicMock,
Mock, A
@@ -4281,17 +4281,16 @@
foo')%0A%0A%0A
-%0A
def
@@ -4871,36 +4871,428 @@
(self):%0A
-pass
+class BuiltinSubclass(list):%0A attr = %7B%7D%0A%0A mock = _spec_signature(BuiltinSubclass)%0A self.assertEqual(list(mock), %5B%5D)%0A self.assertRaises(TypeError, int, mock)%0A self.assertRaises(TypeError, int, mock.attr)%0A self.assertEqual(list(mock), %5B%5D)%0A%0A self.assertIsInstance(mock%5B'foo'%5D, MagicMock)%0A self.assertIsInstance(mock.attr%5B'foo'%5D, MagicMock)
%0A%0A%0A def test_
|
3e84dcb7b449db89ca6ce2b91b34a5e8f8428b39 | Allow sub- and superscript tags | core/markdown.py | core/markdown.py | from markdown.extensions import nl2br, sane_lists, fenced_code
from pymdownx import magiclink
from mdx_unimoji import UnimojiExtension
import utils.markdown
markdown_extensions = [
magiclink.MagiclinkExtension(),
nl2br.Nl2BrExtension(),
utils.markdown.ExtendedLinkExtension(),
sane_lists.SaneListExtension(),
fenced_code.FencedCodeExtension(),
utils.markdown.CuddledListExtension(),
UnimojiExtension()
]
content_allowed_tags = (
# text
'p', 'em', 'strong', 'br', 'a', 'img',
# citation
'blockquote', 'cite',
# headings
'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
# lists
'ol', 'ul', 'li',
# code
'pre', 'code'
)
content_allowed_attributes = {
'*': ['id', 'title'],
'a': ['href', 'title', 'data-component', 'data-grouplink-ref'],
'code': ['class'],
'img': ['src', 'alt']
}
| Python | 0.000006 | @@ -505,16 +505,30 @@
, 'img',
+ 'sub', 'sup',
%0A # c
|
e569f3590348eedd79b6e8e2b933d9919796c598 | Fix settings | core/settings.py | core/settings.py | """
Django settings for core project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from email.utils import getaddresses
from pathlib import Path
import environ
env = environ.Env()
BASE_DIR = Path(__file__).resolve().parent.parent
if (dotenv := (BASE_DIR / ".env")).exists():
environ.Env.read_env(dotenv)
DEBUG = env.bool('DEBUG', default=False)
SECRET_KEY = env('SECRET_KEY')
DATABASES = {'default': env.db(default='sqlite:///' + (BASE_DIR / 'db.sqlite3').absolute().as_posix()), }
ALLOWED_HOSTS = env.list("ALLOWED_HOSTS", default=["127.0.0.1", "localhost"])
# These are for DigitalOcean Spaces
AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME", default=None)
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", default=None)
AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY", default=None)
AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME", default=None)
DEFAULT_FILE_STORAGE = env("DEFAULT_FILE_STORAGE", default="django.core.files.storage.FileSystemStorage")
MEDIA_ROOT = BASE_DIR / "scratch" / "media"
MEDIA_URL = env.str("MEDIA_URL", "/media/")
STATIC_ROOT = BASE_DIR / "scratch" / "static"
STATIC_URL = '/static/'
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
EMAIL_BACKEND = env("EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend")
SERVER_EMAIL = env("SERVER_EMAIL", default="root@localhost")
ADMINS = getaddresses([env('DJANGO_ADMINS', default='root@localhost')])
MANAGERS = getaddresses([env('DJANGO_MANAGERS', default='root_localhost')])
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-US'
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'invoices',
'adminsortable2',
'storages',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
if DEBUG:
# Inject the debug toolbar
security_index = MIDDLEWARE.index('django.middleware.security.SecurityMiddleware')
MIDDLEWARE.insert(security_index+1, 'debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS.append('debug_toolbar.apps.DebugToolbarConfig')
ROOT_URLCONF = 'core.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'core.wsgi.application'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DEFAULT_AUTO_FIELD='django.db.models.BigAutoField' | Python | 0.000001 | @@ -359,16 +359,76 @@
ort Path
+%0Afrom secrets import choice%0Afrom string import ascii_letters
%0A%0Aimport
@@ -588,16 +588,476 @@
otenv)%0A%0A
+DOCKER_BUILD = env.bool('DOCKER_BUILD', default=False)%0A%0Aif DOCKER_BUILD:%0A # If we are in a Docker build, I want to set a random secret key.%0A # I consider this to be better than setting a secret key in the Dockerfile,%0A # since if it were to somehow get run in production we'd have a misbehaving%0A # system and not one with an exposed secret key%0A secret_default = ''.join(choice(ascii_letters) for _ in range(64))%0Aelse:%0A secret_default = None%0A%0A%0A
DEBUG =
@@ -1118,16 +1118,40 @@
RET_KEY'
+, default=secret_default
)%0A%0ADATAB
|
b3c55b059293d664d3e029b9c3d03203ff4af5a5 | remove ws | resturo/tests/models.py | resturo/tests/models.py | from ..models import Organization as BaseOrganization
from ..models import Membership as BaseMembership
class Organization(BaseOrganization):
"""
"""
class Membership(BaseMembership):
""" Provide non-abstract implementation for Membership model,
define some roles
"""
ROLE_MEMBER = 1
| Python | 0.000054 | @@ -309,9 +309,8 @@
BER = 1%0A
-%0A
|
56cb14754d2084f5660c8c7e16906e13d99973fd | Fix lstm | thinc/layers/lstm.py | thinc/layers/lstm.py | from typing import Optional, Tuple, Callable
from functools import partial
from ..model import Model
from ..backends import Ops
from ..backends.jax_ops import jax_jit
from ..config import registry
from ..util import get_width
from ..types import RNNState, Array2d, Array3d, Padded
from .bidirectional import bidirectional
from .clone import clone
from .linear import Linear
from .noop import noop
from ..initializers import xavier_uniform_init, zero_init
@registry.layers("LSTM.v0")
def LSTM(
nO: Optional[int] = None,
nI: Optional[int] = None,
*,
bi: bool = False,
depth: int = 1,
dropout: float = 0.0,
init_W=xavier_uniform_init,
init_b=zero_init
) -> Model[Padded, Padded]:
if dropout != 0.0:
msg = (
"LSTM dropout not implemented yet. In the meantime, use the "
"PyTorchWrapper and the torch.LSTM class."
)
raise NotImplementedError(msg)
if bi and nO is not None:
nO //= 2
model: Model[Padded, Padded] = Model(
"lstm",
forward,
dims={"nO": nO, "nI": nI},
attrs={"registry_name": "LSTM.v0"},
params={"W": None, "b": None, "c": None, "h": None},
init=partial(init, init_W, init_b)
)
if bi:
model = bidirectional(model)
return clone(model, depth)
@registry.layers("PyTorchLSTM.v0")
def PyTorchLSTM(
nO: int, nI: int, *, bi: bool = False, depth: int = 1, dropout: float = 0.0
) -> Model[Padded, Padded]:
import torch.nn
from .with_padded import with_padded
from .pytorchwrapper import PyTorchRNNWrapper
if depth == 0:
return noop() # type: ignore
if bi:
nO = nO // 2
return with_padded(
PyTorchRNNWrapper(
torch.nn.LSTM(nI, nO, depth, bidirectional=bi, dropout=dropout)
)
)
def init(
init_W: Callable, init_b: Callable, model: Model,
X: Optional[Padded] = None, Y: Optional[Padded] = None
) -> None:
if X is not None:
model.set_dim("nI", get_width(X))
if Y is not None:
model.set_dim("nO", get_width(Y))
nO = model.get_dim("nO")
nI = model.get_dim("nI")
model.set_param("W", init_W(model.ops, (nO*4, nO+nI)))
model.set_param("b", init_b(model.ops, (nO*4,)))
model.set_param("h", zero_init(model.ops, (nO,)))
model.set_param("c", zero_init(model.ops, (nO,)))
def forward(
model: Model[Array3d, Array3d], Xp: Padded, is_train: bool
) -> Tuple[Padded, Callable]:
X = Xp.data
W = model.get_param("W")
b = model.get_param("b")
h = model.get_param("h")
c = model.get_param("c")
# Initialize hiddens and cells
hiddens = model.ops.alloc_f2d(X.shape[1], h.shape[0])
cells = model.ops.alloc_f2d(X.shape[1], c.shape[0])
hiddens += h
cells += c
Y, fwd_state = model.ops.recurrent_lstm(W, b, hiddens, cells, X)
Yp = Padded(Y, Xp.size_at_t, Xp.lengths, Xp.indices)
def backprop(dYp: Padded) -> Padded:
dX, (dW, db, d_h, d_c) = model.ops.recurrent_lstm_backward(
dYp.data, fwd_state, (W, b))
model.inc_grad("W", dW)
model.inc_grad("b", db)
model.inc_grad("h", d_h)
model.inc_grad("c", d_c)
return Padded(X, dYp.size_at_t, dYp.lengths, dYp.indices)
return Yp, backprop
| Python | 0.001513 | @@ -37,16 +37,22 @@
Callable
+, cast
%0Afrom fu
@@ -250,16 +250,15 @@
ort
-RNNState
+Array1d
, Ar
@@ -2506,16 +2506,30 @@
%0A W =
+ cast(Array2d,
model.g
@@ -2545,16 +2545,17 @@
%22W%22)
+)
%0A b =
mod
@@ -2550,16 +2550,30 @@
%0A b =
+ cast(Array1d,
model.g
@@ -2589,16 +2589,17 @@
%22b%22)
+)
%0A h =
mod
@@ -2594,16 +2594,30 @@
%0A h =
+ cast(Array1d,
model.g
@@ -2633,16 +2633,17 @@
%22h%22)
+)
%0A c =
mod
@@ -2638,16 +2638,30 @@
%0A c =
+ cast(Array1d,
model.g
@@ -2673,16 +2673,17 @@
ram(%22c%22)
+)
%0A # I
|
32dd33126c9fa0076c8d7c9e8024a709674f8614 | Bump Version 0.0.28 -> 0.0.29 | threebot/__init__.py | threebot/__init__.py | # -*- encoding: utf-8 -*-
__version__ = '0.0.28'
| Python | 0 | @@ -39,11 +39,11 @@
= '0.0.2
-8
+9
'%0A
|
25ae4d42a2d3c50007d369c3288c0482037d95e0 | Fix encoding error in ecr/models.py | moto/ecr/models.py | moto/ecr/models.py | from __future__ import unicode_literals
# from datetime import datetime
from random import random
from moto.core import BaseBackend, BaseModel
from moto.ec2 import ec2_backends
from copy import copy
import hashlib
class BaseObject(BaseModel):
def camelCase(self, key):
words = []
for i, word in enumerate(key.split('_')):
if i > 0:
words.append(word.title())
else:
words.append(word)
return ''.join(words)
def gen_response_object(self):
response_object = copy(self.__dict__)
for key, value in response_object.items():
if '_' in key:
response_object[self.camelCase(key)] = value
del response_object[key]
return response_object
@property
def response_object(self):
return self.gen_response_object()
class Repository(BaseObject):
def __init__(self, repository_name):
self.arn = 'arn:aws:ecr:us-east-1:012345678910:repository/{0}'.format(
repository_name)
self.name = repository_name
# self.created = datetime.utcnow()
self.uri = '012345678910.dkr.ecr.us-east-1.amazonaws.com/{0}'.format(
repository_name
)
self.registry_id = '012345678910'
self.images = []
@property
def physical_resource_id(self):
return self.name
@property
def response_object(self):
response_object = self.gen_response_object()
response_object['registryId'] = self.registry_id
response_object['repositoryArn'] = self.arn
response_object['repositoryName'] = self.name
response_object['repositoryUri'] = self.uri
# response_object['createdAt'] = self.created
del response_object['arn'], response_object['name']
return response_object
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ecr_backend = ecr_backends[region_name]
return ecr_backend.create_repository(
# RepositoryName is optional in CloudFormation, thus create a random
# name if necessary
repository_name=properties.get(
'RepositoryName', 'ecrrepository{0}'.format(int(random() * 10 ** 6))),
)
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
if original_resource.name != properties['RepositoryName']:
ecr_backend = ecr_backends[region_name]
ecr_backend.delete_cluster(original_resource.arn)
return ecr_backend.create_repository(
# RepositoryName is optional in CloudFormation, thus create a
# random name if necessary
repository_name=properties.get(
'RepositoryName', 'RepositoryName{0}'.format(int(random() * 10 ** 6))),
)
else:
# no-op when nothing changed between old and new resources
return original_resource
class Image(BaseObject):
def __init__(self, tag, manifest, repository, registry_id="012345678910"):
self.image_tag = tag
self.image_manifest = manifest
self.image_size_in_bytes = 50 * 1024 * 1024
self.repository = repository
self.registry_id = registry_id
self.image_digest = None
self.image_pushed_at = None
def _create_digest(self):
image_contents = 'docker_image{0}'.format(int(random() * 10 ** 6))
self.image_digest = "sha256:%s" % hashlib.sha256(image_contents).hexdigest()
def get_image_digest(self):
if not self.image_digest:
self._create_digest()
return self.image_digest
@property
def response_object(self):
response_object = self.gen_response_object()
response_object['imageId'] = {}
response_object['imageId']['imageTag'] = self.image_tag
response_object['imageId']['imageDigest'] = self.get_image_digest()
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
return response_object
@property
def response_list_object(self):
response_object = self.gen_response_object()
response_object['imageTag'] = self.image_tag
response_object['imageDigest'] = "i don't know"
return response_object
@property
def response_describe_object(self):
response_object = self.gen_response_object()
response_object['imageTags'] = [self.image_tag]
response_object['imageDigest'] = self.get_image_digest()
response_object['imageManifest'] = self.image_manifest
response_object['repositoryName'] = self.repository
response_object['registryId'] = self.registry_id
response_object['imageSizeInBytes'] = self.image_size_in_bytes
response_object['imagePushedAt'] = '2017-05-09'
return response_object
class ECRBackend(BaseBackend):
def __init__(self):
self.repositories = {}
def describe_repositories(self, registry_id=None, repository_names=None):
"""
maxResults and nextToken not implemented
"""
repositories = []
for repository in self.repositories.values():
# If a registry_id was supplied, ensure this repository matches
if registry_id:
if repository.registry_id != registry_id:
continue
# If a list of repository names was supplied, esure this repository
# is in that list
if repository_names:
if repository.name not in repository_names:
continue
repositories.append(repository.response_object)
return repositories
def create_repository(self, repository_name):
repository = Repository(repository_name)
self.repositories[repository_name] = repository
return repository
def delete_repository(self, respository_name, registry_id=None):
if respository_name in self.repositories:
return self.repositories.pop(respository_name)
else:
raise Exception("{0} is not a repository".format(respository_name))
def list_images(self, repository_name, registry_id=None):
"""
maxResults and filtering not implemented
"""
images = []
for repository in self.repositories.values():
if repository_name:
if repository.name != repository_name:
continue
if registry_id:
if repository.registry_id != registry_id:
continue
for image in repository.images:
images.append(image)
return images
def describe_images(self, repository_name, registry_id=None, image_id=None):
if repository_name in self.repositories:
repository = self.repositories[repository_name]
else:
raise Exception("{0} is not a repository".format(repository_name))
response = []
for image in repository.images:
response.append(image)
return response
def put_image(self, repository_name, image_manifest, image_tag):
if repository_name in self.repositories:
repository = self.repositories[repository_name]
else:
raise Exception("{0} is not a repository".format(repository_name))
image = Image(image_tag, image_manifest, repository_name)
repository.images.append(image)
return image
ecr_backends = {}
for region, ec2_backend in ec2_backends.items():
ecr_backends[region] = ECRBackend()
| Python | 0.000068 | @@ -3734,16 +3734,32 @@
contents
+.encode('utf-8')
).hexdig
|
de9525f328373e180f8b5793620a0ca217a7434c | fix setting board | trellocardupdate/cli.py | trellocardupdate/cli.py | #!/usr/bin/env python
#TODO write bash completion scripts - matching is already there,
#the work is just figuring out the syntax again to 'complete'
import sys
import re
from operator import itemgetter
import Levenshtein
from external_editor import edit as external_edit
import trello_update
import simpledispatchargparse
def choose(s, possibilities, threshold=.6):
"""
Returns the closest match to string s if exceeds threshold, else returns None
"""
if s in possibilities:
return s
startswith = [x for x in possibilities if x.lower().startswith(s.lower())]
if len(startswith) == 1: return startswith[0]
contained = [x for x in possibilities if s.lower() in x.lower()]
if len(contained) > 1: return contained[0]
close = sorted([(x, Levenshtein.jaro_winkler(s, x, .05)) for x in possibilities], key=itemgetter(1))
best = max([(x, Levenshtein.jaro_winkler(s, x, .05)) for x in possibilities], key=itemgetter(1))
if best[1] < threshold:
print 'returning None because', best, 'is below threshold of', threshold
print 'out of', close
return None
return best[0]
def suggestions(s, possibilities):
#TODO don't use jaro_winkler, or use it more intelligently;
# ie break up words and match on each of them
# jaro_winkler weighs the front more
startswith = [x for x in possibilities if x.lower().startswith(s.lower())]
if startswith: return startswith
contained = [x for x in possibilities if s.lower() in x.lower()]
if contained: return contained
jws = [(x, Levenshtein.jaro_winkler(s, x)) for x in possibilities]
jws.sort(key=lambda x:0-x[1])
diffs = [x[1] - y[1] for x, y in zip(jws[:-1], jws[1:])]
output = []
for (card_name, score), diff in zip(jws[:-1], diffs):
output.append(card_name)
if diff > .05: break
if len(output) > 5: break
return output
def print_card_completions(s):
cards = trello_update.get_cards(use_cache=True)
m = suggestions(unicode(s), [unicode(n) for n, _id in cards])
for x in m:
print x
def get_card_name_and_id(card_query):
cards = trello_update.get_cards()
match = choose(unicode(card_query), [unicode(name) for name, id_ in cards])
if match is None: return None, None
return [(name, id_) for (name, id_) in cards if name == match][0]
def get_message_from_external_editor(card_url, card_name, moved_down):
moved_down_message = "\n# card will be moved to bottom of stack"
prompt = """
# Please enter the message you'd like to add to card. Lines starting
# with '#' will be ignored, and an empty message aborts the commit.
# On card {card_name}
# Changes to be committed:
# (url of card: {card_url})
#
# message will be added to card{moved_down}
#""".format(card_name=card_name, card_url=card_url, moved_down=moved_down_message if moved_down else '')
from_external = external_edit(prompt)
message = '\n'.join([line for line in from_external.split('\n') if ((len(line) > 0 and line[0] != '#'))])
message = re.sub(r'[^\n]\n[^\n]', '', message)
return message
def getcompletion(args):
if len(args) == 2:
command, arg = args
elif len(args) == 3:
command, arg, prevarg = args
else:
raise Exception('Bad completions arguments')
doubledashes = ['--set-board', '--get-token', '--generate-token', '--test-token', '--list-cards']
singledashes = ['-d', '-m']
if arg in singledashes + doubledashes:
print arg
elif arg == '-':
print '\n'.join([x for x in singledashes if arg in x])
elif len(arg) > 1 and arg[:2] == '--':
print '\n'.join([x for x in doubledashes if arg in x])
elif prevarg == command:
print_card_completions(arg)
elif prevarg == '-m':
print 'TYPE_A_COMMENT_HERE'
elif prevarg == '-d':
print_card_completions(arg)
else:
pass
def CLI():
# argparse can't parse some arguments to getcompletion
if '--get-bash-completion' in sys.argv:
i = sys.argv.index('--get-bash-completion')
getcompletion(sys.argv[i+1:i+4])
sys.exit()
parser = simpledispatchargparse.ParserWithSimpleDispatch(
description='Trello card updater',
usage='%(prog)s cardname [...] [-h] [-d] [-m inlinemessage [...]]')
parser.add_argument('card', action="store", nargs='+')
parser.add_argument('-d', '--move-down', action="store_true", default=False)
parser.add_argument('-m', '--message', action="store", dest="message", nargs='+', help='inline message to add to card (instead of launching editor', default=[])
#TODO get rid of almost all of these, just good for testing
@parser.add_command
def list_cards(): print 'listing cards'; print '\n'.join(x[0] for x in trello_update.get_cards())
@parser.add_command(metavar='BOARD_ID')
def set_board(s): print 'setting board to', s
@parser.add_command
def get_token(): print 'token:'; print trello_update.get_user_token()
@parser.add_command
def generate_token(): print 'generating token...'; print trello_update.generate_token()
@parser.add_command
def test_token(): print 'testing token...'; print trello_update.test_token()
# At this point we've bailed if we're not adding a comment to a person
args = parser.parse_args(sys.argv[1:])
message = ' '.join(args.message)
card = ' '.join(args.card)
#TODO handle when this doesn't get anything good, decide how lenient - fuzziness mostly happen during completion
card_name, card_id = get_card_name_and_id(card)
if card_id is None:
print "Can't find name for query", card
sys.exit()
print 'got', card_id, card_name
if not message:
#TODO populate trello card url
message = get_message_from_external_editor('NOT YET IMPLEMENTED', card_name, args.move_down)
if not message.strip():
print 'Aborting comment due to empty comment message.'
sys.exit()
trello_update.add_comment_to_card(card_id, message, args.move_down)
#TODO add ability to edit last comment
#TODO add ability to read last all comments on a person
if __name__ == '__main__':
CLI()
| Python | 0.000001 | @@ -4852,26 +4852,8 @@
and(
-metavar='BOARD_ID'
)%0A
@@ -4868,17 +4868,16 @@
t_board(
-s
): print
@@ -4895,15 +4895,39 @@
oard
- to', s
+...'; trello_update.set_board()
%0A
|
d0e369fcf43dadf01a2f4d7ba4cd172f2ffebde5 | FIX rounding | account_voucher_double_validation/account_voucher.py | account_voucher_double_validation/account_voucher.py | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
class account_voucher(models.Model):
_inherit = "account.voucher"
company_double_validation = fields.Boolean(
related='company_id.double_validation'
)
state = fields.Selection(
selection=[
('draft', _('Draft')),
('confirmed', _('Confirmed')),
('cancel', _('Cancelled')),
('proforma', _('Pro-forma')),
('posted', _('Posted'))
])
# we need amount to be not readonly on confirmed in order to compute the value
amount = fields.Float(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
account_id = fields.Many2one(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
net_amount = fields.Float(
required=False,
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
# no funciono bien
# net_amount_copy = fields.Float(
# related='net_amount',
# states={
# 'confirmed': [('readonly', False)]}
# )
journal_id = fields.Many2one(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
received_third_check_ids = fields.One2many(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
issued_check_ids = fields.One2many(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
delivered_third_check_ids = fields.One2many(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
withholding_ids = fields.One2many(
states={'draft': [('readonly', False)],
'confirmed': [('readonly', False)]}
)
date = fields.Date(
default=False,
)
payment_date = fields.Date(
string='Payment Date',
readonly=True,
states={'draft': [('readonly', False)]},
help='Payment can not be validated before this date',
)
to_pay_amount = fields.Float(
'Importe a Pagar',
# _('To Pay Amount'),
# waiting for a PR 9081 to fix computed fields translations
help='Importe a ser pagado',
# help=_('Amount To be Paid'),
compute='_get_to_pay_amount',
digits=dp.get_precision('Account'),
)
difference_amount = fields.Float(
compute='_get_to_pay_amount',
help='Diferencia enre el importe a ser pagado y el importe pagado',
digits=dp.get_precision('Account'),
)
advance_amount = fields.Float(
'Advance Amount',
digits=dp.get_precision('Account'),
readonly=True,
states={'draft': [('readonly', False)]},
help='Amount to be advance and not conciliated with debts',
)
confirmation_date = fields.Date(
'Fecha de Confirmación',
readonly=True,
states={'draft': [('readonly', False)]},
copy=False,
)
@api.one
@api.depends('writeoff_amount', 'advance_amount')
def _get_to_pay_amount(self):
"""
On v8 it is only updated on save.
In v9 should be updated live
"""
# Can not use this way because old api
debit = sum([x.amount for x in self.line_cr_ids])
credit = sum([x.amount for x in self.line_dr_ids])
# TODO probablemente haya que multiplicar por sign dependiendo receipt
# o payment
to_pay_amount = credit - debit + self.advance_amount
self.to_pay_amount = to_pay_amount
self.difference_amount = to_pay_amount - self.amount
@api.multi
def action_confirm(self):
for voucher in self:
if not voucher.confirmation_date:
voucher.write({
'state': 'confirmed',
'confirmation_date': fields.Datetime.now()
})
else:
voucher.write({
'state': 'confirmed',
})
@api.multi
def proforma_voucher(self):
"""Make two things:
* Check payment date valididy
* Fix not date on voucher error, set actual date.
"""
for voucher in self:
if not voucher.date:
voucher.date = fields.Date.context_today(self)
# only check payments for now
if (
voucher.type != 'payment' or
not voucher.company_double_validation
):
continue
if voucher.amount != voucher.to_pay_amount:
raise Warning(_(
'You can not validate a Voucher that has '
'Total Amount different from To Pay Amount'))
if voucher.payment_date > fields.Date.context_today(self):
raise Warning(_(
'You can not validate a Voucher that has '
'Payment Date before Today'))
return super(account_voucher, self).proforma_voucher()
def onchange_amount(
self, cr, uid, ids, amount, rate, partner_id, journal_id,
currency_id, ttype, date, payment_rate_currency_id, company_id,
context=None):
res = super(account_voucher, self).onchange_amount(
cr, uid, ids, amount, rate, partner_id, journal_id,
currency_id, ttype, date, payment_rate_currency_id, company_id,
context=context)
for voucher in self.browse(cr, uid, ids, context=context):
# if confirmed we clean voucher lines
if res.get('value') and voucher.state == 'confirmed':
if res['value'].get('line_cr_ids'):
del res['value']['line_cr_ids']
if res['value'].get('line_dr_ids'):
del res['value']['line_dr_ids']
return res
def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id,
partner_id, date, amount, ttype, company_id,
context=None):
res = super(account_voucher, self).onchange_journal(
cr, uid, ids, journal_id, line_ids, tax_id, partner_id, date,
amount, ttype, company_id, context=context)
if not res:
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
# if confirmed we clean voucher lines
if res.get('value') and voucher.state == 'confirmed':
if res['value'].get('line_cr_ids'):
del res['value']['line_cr_ids']
if res['value'].get('line_dr_ids'):
del res['value']['line_dr_ids']
return res
| Python | 0.000001 | @@ -4818,32 +4818,79 @@
%0A if
+voucher.currency_id.round(%0A
voucher.amount !
@@ -4888,18 +4888,17 @@
.amount
-!=
+-
voucher
@@ -4911,16 +4911,17 @@
y_amount
+)
:%0A
|
363583654998e404baba9b72860d2465bb3d339e | Remove convoluted meshgrid statement. | mplstyles/plots.py | mplstyles/plots.py | from matplotlib import cm
import matplotlib.pyplot as plt
from mplstyles import cmap as colormap
import numpy as np
import scipy.ndimage
def contour_image(x,y,Z,cmap=None,vmax=None,vmin=None,interpolation='nearest',contour_smoothing=0,contour_opts={},label_opts={},imshow_opts={},clegendlabels=[],label=False):
ax = plt.gca()
x_delta = float((x[-1]-x[0]))/(len(x)-1)/2.
y_delta = float((y[-1]-y[0]))/(len(y)-1)/2.
extent=(x[0],x[-1],y[0],y[-1])
extent_delta = (x[0]-x_delta,x[-1]+x_delta,y[0]-y_delta,y[-1]+y_delta)
ax.set_xlim(x[0],x[-1])
ax.set_ylim(y[0],y[-1])
if cmap is None:
cmap = colormap.reverse(cm.Blues)
Z = Z.transpose()
#plt.contourf(X,Y,self.pdata,interpolation=interpolation)
cs = ax.imshow(Z,interpolation=interpolation,origin='lower',aspect='auto',extent=extent_delta,cmap=cmap,vmax=vmax,vmin=vmin, **imshow_opts)
# Draw contours
if contour_smoothing != 0:
Z = scipy.ndimage.zoom(Z, contour_smoothing)
X, Y = np.meshgrid(np.linspace(x[0],x[-1],Z.shape[1]), np.linspace(y[0],y[-1],Z.shape[0]))
CS = ax.contour(X, Y, Z, extent=extent, origin='lower', **contour_opts )
# Label contours
if label:
ax.clabel(CS, **label_opts)
# Show contours in legend if desired
if len(clegendlabels) > 0:
for i in range(len(clegendlabels)):
CS.collections[i].set_label(clegendlabels[i])
#ax.legend()
return cs, CS
| Python | 0.000005 | @@ -962,78 +962,12 @@
rid(
-np.linspace(x%5B0%5D,x%5B-1%5D,Z.shape%5B1%5D), np.linspace(y%5B0%5D,y%5B-1%5D,Z.shape%5B0%5D)
+x, y
)%0A%09C
|
35ff1a2b83c4251a818ae280db04c5ae4b8cdb0d | add option to set config file to check style | tools/check-style.py | tools/check-style.py | #!/usr/bin/env python3
# This file is part of the Soletta Project
#
# Copyright (C) 2015 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from difflib import unified_diff
from shutil import which
import argparse
import os
import re
import subprocess
import sys
DIFF_INC_COLOR = "\033[92m"
DIFF_REM_COLOR = "\033[31m"
DIFF_REF_COLOR = "\033[36m"
END_COLOR = '\033[0m'
UNCRUSTIFY_VERSION = "0.60"
def run_command(cmd):
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT,
shell=True, universal_newlines=True)
return output
except subprocess.CalledProcessError as e:
try:
print("ERROR: %s" % e.output)
except BrokenPipeError as e:
return None
return None
def check_uncrustify():
uncrustify = which("uncrustify")
if not uncrustify:
print("Uncrustify tool is not present, can't check code format.")
return None
version = run_command("%s --version" % uncrustify)
if not version:
print("Could not run uncrustify command.")
return None
version = version.split()[1]
if version != UNCRUSTIFY_VERSION:
print("Uncrustify tool must be at %s version, exactly (found %s)." % \
(UNCRUSTIFY_VERSION, version))
return None
return uncrustify
def check_dirty(args):
cmd_diff = "git diff --diff-filter=ACMR --oneline --name-only -- '*.[ch]'"
cmd_cached = "git diff --cached --diff-filter=ACMR --oneline --name-only -- '*.[ch]'"
diff_list = run_command(cmd_diff)
diff_list += run_command(cmd_cached)
if not diff_list:
return None
print("Changes for (%s) marked to be checked, but the git tree is dirty " \
"-- checking these files instead" % args.target_refspec)
return diff_list
def check_commits(args):
cmd_check = "git diff --diff-filter=ACMR --oneline --name-only %s -- '*.[ch]'"
print("Working directory is clean, checking commit changes for (%s)" % args.target_refspec)
return run_command(cmd_check % args.target_refspec)
def run_check(args, uncrustify):
diff_list = check_dirty(args)
if not diff_list:
diff_list = check_commits(args)
if not diff_list:
print("No source files (*.[ch]) changed for: %s" % args.target_refspec)
return True
cmd = "%s -c data/schemas/uncrustify.schema -l C %s" % \
(uncrustify, diff_list.replace("\n", " "))
output = run_command(cmd)
if not output:
return False
passed = True
for f in diff_list.split():
unc_file = "%s.uncrustify" % f
if not os.path.exists(unc_file):
print("WARNING: Expected %s not found, skipping this file." % unc_file)
continue
with open(f) as fromf, open(unc_file) as tof:
fromlines, tolines = list(fromf), list(tof)
try:
gen = unified_diff(fromlines, tolines, f, unc_file)
for ln in gen:
passed = False
if args.color == "always":
out = re.sub("^\@", "%s@" % DIFF_REF_COLOR, \
re.sub("^\-", "%s-" % DIFF_REM_COLOR, \
re.sub("$", END_COLOR, \
re.sub("^\+", "%s+" % DIFF_INC_COLOR, ln))))
else:
out = ln
sys.stdout.write(out)
os.remove(unc_file)
except KeyboardInterrupt:
""" ignore keyboard interrupt and simply return """
return False
return passed
if __name__ == "__main__":
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument("-b", help="The base commit", dest="base_commit",
type=str, metavar="<base-commit>")
group.add_argument("-r", help="Use a refspec instead of a base commit",
dest="refspec", type=str, metavar="<refspec>")
parser.add_argument("--color", metavar="WHEN", type=str, \
help="Use colors. WHEN can be always, auto and never.")
parser.set_defaults(color="auto")
args = parser.parse_args()
if args.base_commit:
args.target_refspec = "%s..HEAD" % args.base_commit
elif args.refspec:
args.target_refspec = args.refspec
else:
args.target_refspec = "HEAD~1"
if args.color == "auto":
if sys.stdout.isatty():
args.color = "always"
else:
args.color = "never"
uncrustify = check_uncrustify()
if not uncrustify:
exit(1)
if not run_check(args, uncrustify):
exit(1)
| Python | 0 | @@ -3608,24 +3608,34 @@
, uncrustify
+, cfg_file
):%0A diff_
@@ -3866,38 +3866,10 @@
-c
-data/schemas/uncrustify.schema
+%25s
-l
@@ -3900,16 +3900,26 @@
rustify,
+ cfg_file,
diff_li
@@ -5465,24 +5465,158 @@
%3Crefspec%3E%22)%0A
+ parser.add_argument(%22-c%22, help=%22Path for the config file%22, dest=%22cfg_file%22,%0A type=str, metavar=%22%3Cconfig%3E%22)%0A
parser.a
@@ -6236,16 +6236,100 @@
xit(1)%0A%0A
+ if not args.cfg_file:%0A args.cfg_file = %22data/schemas/uncrustify.schema%22%0A%0A
if n
@@ -6357,16 +6357,31 @@
crustify
+, args.cfg_file
):%0A
|
5846d9689f756d0552fbb887397f853c574c6bf8 | add number of windows to output | tools/get_texture.py | tools/get_texture.py | #!/usr/bin/env python2
"""Calculate texture properties for a masked area."""
from __future__ import absolute_import, division, print_function
import argparse
from collections import defaultdict
import numpy as np
import dwi.files
import dwi.mask
import dwi.standardize
import dwi.texture
import dwi.util
def parse_args():
"""Parse command-line arguments."""
p = argparse.ArgumentParser(description=__doc__)
p.add_argument('--verbose', '-v', action='count',
help='increase verbosity')
p.add_argument('--input', required=True,
help='input image')
p.add_argument('--mask', required=True,
help='mask file to use')
p.add_argument('--mode', metavar='MODE', required=True,
help='imaging mode specification')
p.add_argument('--method', metavar='METHOD', required=True,
help='method')
p.add_argument('--slices', default='maxfirst',
help='slice selection (maxfirst, max, all)')
p.add_argument('--winspec', default='5',
help='window specification (side length, all, mbb)')
p.add_argument('--portion', type=float, default=0,
help='portion of selected voxels required for each window')
p.add_argument('--voxel', choices=('all', 'mean'), default='all',
help='voxel to output (all, mean)')
p.add_argument('--output', metavar='FILENAME', required=True,
help='output texture map file')
return p.parse_args()
def max_mask(mask, winsize):
"""Return a mask that has the voxels selected that have the maximum number
of surrounding voxels selected in the original mask.
"""
d = defaultdict(list)
for pos, win in dwi.util.sliding_window(mask, winsize, mask=mask):
d[np.count_nonzero(win)].append(pos)
r = np.zeros_like(mask)
for pos in d[max(d)]:
r[pos] = True
return r
def portion_mask(mask, winsize, portion=1, resort_to_max=True):
"""Return a mask that selects (only) voxels that have the window at each
selected voxel origin up to a minimum portion in the original mask selected
(1 means the whole window must be selected, 0 gives the original mask).
If resort_to_max is true, the window with maximum number of selected voxels
is used in case the resulting mask would otherwise be empty.
"""
r = np.zeros_like(mask)
for pos, win in dwi.util.sliding_window(mask, winsize, mask=mask):
if np.count_nonzero(win) / win.size >= portion:
r[pos] = True
if resort_to_max and np.count_nonzero(r) == 0:
r = max_mask(mask, winsize)
return r
def main():
args = parse_args()
if args.verbose:
print('Reading image: {}'.format(args.input))
img, attrs = dwi.files.read_pmap(args.input)
if args.mode == 'T2':
assert attrs['echotimes'][0] == 0 # TODO: Could be another?
img = img[..., 0]
assert img.ndim == 3
if args.mask is not None:
mask = dwi.mask.read_mask(args.mask)
if isinstance(mask, dwi.mask.Mask):
mask = mask.convert_to_3d(img.shape[0])
if args.verbose:
print('Using mask', args.mask)
# else:
# # A default mask for testing.
# mask = dwi.mask.Mask3D(np.zeros_like(img, dtype=np.bool))
# # mask.array[9:-9, 50:-50, 50:-50] = True
# tuples = dwi.util.bounding_box(img)
# print('Using minimum bounding box as mask: {}'.format(tuples))
# slices = [slice(*t) for t in tuples]
# mask.array[slices] = True
if img.shape != mask.shape():
raise Exception('Image shape {} does not match mask shape {}'.format(
img.shape, mask.shape()))
if mask.n_selected() == 0:
raise ValueError('Empty mask.')
if args.slices == 'maxfirst':
slice_indices = [mask.max_slices()[0]]
elif args.slices == 'max':
slice_indices = mask.max_slices()
elif args.slices == 'all':
slice_indices = mask.selected_slices()
else:
raise Exception('Invalid slice set specification', args.slices)
# Zero other slices in mask.
for i in range(len(mask.array)):
if i not in slice_indices:
mask.array[i, :, :] = 0
# Use only selected slices to save memory.
if args.voxel == 'mean':
img = img[slice_indices]
mask.array = mask.array[slice_indices]
dwi.texture.DTYPE = np.double
# dwi.texture.DTYPE = np.double
# Get portion mask.
if args.winspec in ('all', 'mbb'):
pmask = mask.array # Some methods don't use window.
elif args.winspec.isdigit():
winsize = int(args.winspec)
assert winsize > 0
winshape = (1, winsize, winsize)
pmask = portion_mask(mask.array, winshape, portion=args.portion)
else:
raise ValueError('Invalid window spec: {}'.format(args.winspec))
if args.verbose:
print('Image: {s}, slice: {i}, voxels: {n}, window: {w}'.format(
s=img.shape, i=slice_indices, n=np.count_nonzero(mask.array),
w=args.winspec))
if args.verbose:
print('Calculating {} texture features for {}...'.format(args.method,
args.mode))
avg = (args.voxel == 'mean')
dwi.texture.MODE = args.mode
path = None
if not avg and args.mode.startswith('T2w') and args.method == 'gabor':
print('Note: output array is manipulated on disk, this is slower')
path = args.output
tmap, names = dwi.texture.get_texture(img, args.method, args.winspec,
mask=pmask, avg=avg, path=path)
attrs['parameters'] = names
if args.verbose:
print('Writing shape {s}, type {t} to {o}'.format(s=tmap.shape,
t=tmap.dtype,
o=args.output))
if path is None:
dwi.files.write_pmap(args.output, tmap, attrs)
else:
attrs['shape'] = tmap.shape
attrs['dtype'] = str(tmap.dtype)
tmap.attrs.update(attrs)
if __name__ == '__main__':
main()
| Python | 0.000008 | @@ -5731,16 +5731,136 @@
= names
+%0A # Number of windows, or resulting texture map volume in general.%0A attrs%5B'tmap_voxels'%5D = np.count_nonzero(pmask)
%0A%0A if
|
2da0c539817ee6e98d67a669a6f1351dbae146c0 | fix a spelling bug | myaccount/views.py | myaccount/views.py | from django.shortcuts import render,HttpResponse,HttpResponseRedirect
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib import messages
from myaccount.models import bizCategory, bizGoal, bizProject, projectPic
from myaccount.forms import ProjectForm, projectPicform
from userprofile.models import Profile
from userprofile.forms import ProfileForm
import os
from django.conf import settings
# Create your views here.
@login_required
def index(request):
projectsCount = bizProject.objects.filter(post_by=request.user).count()
context = {'projectsCount': projectsCount}
return render(request, 'myaccount/index.html', context)
@login_required
def addProject(request):
# tflag = "add"
# print request.user.id
# A boolean value for telling the template whether the registration was successful.
# Set to False initially. Code changes value to True when registration succeeds.
# added = False
if request.method == 'POST':
project_form = ProjectForm(request.POST)
pic_form = projectPicform(request.POST)
if project_form.is_valid() :
project = project_form.save(commit=False)
project.post_by = request.user
project.save()
# added = True
# message = "Product: "+product.name+" is added successfullly."
messages.success(request, 'Project: %s is successfullly added.' % project.name)
if pic_form.is_valid():
if 'picture' in request.FILES:
pic = pic_form.save(commit=False)
pic.project = project
pic.picture = request.FILES['picture']
pic.save()
return HttpResponseRedirect(reverse('myaccount:myProjects'))
else:
project_form = ProjectForm()
pic_form = projectPicform()
context = {'project_form':project_form,'pic_form':pic_form}
return render(request, 'myaccount/addproject.html', context)
# return HttpResponse("Hello, world. You're at the Add product.")
@login_required
def editProject(request,project_id):
tflag = "edit"
# print request.user.id
# A boolean value for telling the template whether the registration was successful.
# Set to False initially. Code changes value to True when registration succeeds.
# added = False
p = bizProject.objects.get(pk=project_id)
try:
pic_pre = projectPic.objects.get(project_id = p)
except projectPic.DoesNotExist:
pic_pre = None
if p.post_by != request.user:
return HttpResponse("You don't have permission")
if request.method == 'POST':
project_form = ProjectForm(request.POST,instance=p)
pic_form = projectPicform(request.POST,instance=pic_pre)
if project_form.is_valid():
project = project_form.save(commit=False)
project.post_by = request.user
project.save()
# added = True
messages.success(request, 'Project: %s is successfullly updated.' % project.name)
if pic_form.is_valid():
pic_new = pic_form.save(commit=False)
# pic_new.project = project
if 'picture' in request.FILES:
# Delete the old picture
if pic_pre:
os.remove(os.path.join(settings.MEDIA_ROOT,pic_pre.picture.name))
pic_new.picture = request.FILES['picture']
pic_new.save()
return HttpResponseRedirect(reverse('myaccount:myProjects'))
else:
project_form = ProjectForm(instance=p)
pic_form = projectPicform(instance=pic_pre)
context = {'project_form':project_form,'pic_form':pic_form,'tflag':tflag, 'project_id':project_id}
return render(request, 'myaccount/addproject.html', context)
# return HttpResponse("Hello, world. You're at the Add product.")
def delProject(request,project_id):
project = bizProject.objects.get(pk=project_id)
try:
pic = projectPic.objects.get(project = project)
except projectPic.DoesNotExist:
pic = None
# pic = productPic.objects.get(product = product)
if project.post_by != request.user:
return HttpResponse("You don't have permission")
else:
if pic:
os.remove(os.path.join(settings.MEDIA_ROOT,pic.picture.name))
pic.delete()
messages.success(request, 'Project: %s is successfullly deleted.' % project.name)
project.delete()
return HttpResponseRedirect(reverse('myaccount:myprojects'))
@login_required
def myProjects(request):
projects = bizProject.objects.filter(post_by=request.user)
# paginator = Paginator(products,5)
# page = request.GET.get('page')
# try:
# li = paginator.page(page)
# except PageNotAnInteger:
# li = paginator.page(1)
# except EmptyPage:
# li = paginator.page(paginator.num_pages)
# context = {'listings':li}
context = {'listings':projects}
return render(request, 'myaccount/myprojects.html', context)
# return HttpResponse("Hello, world. You're at the polls index.")
@login_required
def editProfile(request):
# tflag = "edit"
# print request.user.id
# A boolean value for telling the template whether the registration was successful.
# Set to False initially. Code changes value to True when registration succeeds.
# added = False
p = request.user.myprofile
if request.method == 'POST':
profile_form = ProfileForm(request.POST,instance=p)
if profile_form.is_valid():
profile = profile_form.save()
# pro.post_by = request.user
profile.save()
# added = True
messages.success(request, 'Your profile is successfullly updated.')
return HttpResponseRedirect(reverse('myaccount:editProfile'))
else:
profile_form = ProfileForm(instance=p)
context = {'profile_form':profile_form}
return render(request, 'myaccount/editprofile.html', context)
# return HttpResponse("Hello, world. You're at the Add product.")
| Python | 0.003114 | @@ -4163,17 +4163,17 @@
count:my
-p
+P
rojects'
|
d1da925995de8a4fae070a8c6947985441fbfaa3 | remove localhost db | mysite/settings.py | mysite/settings.py | """
Django settings for mysite project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-%xk%*rd(8ug7-#fcpd)e1cf$cyb-zh&b%(yq(5ixpb6hi@w9^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# auth
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend"
)
# auth and allauth settings
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
SOCIALACCOUNT_QUERY_EMAIL = True
# SOCIALACCOUNT_PROVIDERS = {
# 'facebook': {
# 'SCOPE': ['email', 'publish_stream'],
# 'METHOD': 'js_sdk' # instead of 'oauth2'
# }
# }
# auth
TEMPLATE_CONTEXT_PROCESSORS = (
# Required by allauth template tags
"django.core.context_processors.request",
# allauth specific context processors
"django.contrib.auth.context_processors.auth",
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
)
# auth
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.webdesign',
'accounts',
'dproject',
'mysite',
# auth
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
)
# forms
CRISPY_TEMPLATE_PACK = 'uni_form'
# auth
SITE_ID = 1
# auth
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mysite.urls'
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
""" # comment out for Heroku
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
"""
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
# auth login prefs
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_USERNAME_BLACKLIST = ['root',
'fake',
'bitch',
'ass',
'nigger',
'fuck',
'shit',
'admin',
'kngofwrld',
]
# """
#### start Heroku setup ####
## https://devcenter.heroku.com/articles/getting-started-with-django
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES = {}
DATABASES['default'] = dj_database_url.config()
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "[YOUR_DATABASE_NAME]",
"USER": "",
"PASSWORD": "",
"HOST": "localhost",
"PORT": "",
}
}
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static asset configuration
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
#### end Heroku setup
# """
| Python | 0.999813 | @@ -3011,16 +3011,20 @@
ent out
+sql
for Hero
@@ -3026,16 +3026,16 @@
Heroku%0A
-
DATABASE
@@ -3586,13 +3586,43 @@
= %5B
-'root
+%0A 'admin
',%0A
@@ -3652,20 +3652,15 @@
- 'fake
+'ass
',%0A
-
@@ -3712,38 +3712,37 @@
- 'ass
+'fake
',%0A
@@ -3760,17 +3760,13 @@
- 'nigger
+'fuck
',%0A
@@ -3784,39 +3784,42 @@
- 'fuck
+'kngofwrld
',%0A
@@ -3837,15 +3837,15 @@
- 'shit
+'nigger
',%0A
@@ -3867,32 +3867,29 @@
- 'admin
+'root
',%0A
@@ -3903,36 +3903,29 @@
-
- 'kngofwrld
+'shit
',%0A
@@ -3949,17 +3949,16 @@
%5D%0A%0A%0A%0A
-%0A
# %22%22%22%0A##
@@ -4190,16 +4190,34 @@
config()
+%0A%0A%22%22%22 localhost db
%0ADATABAS
@@ -4439,17 +4439,21 @@
,%0A %7D%0A
-
%7D
+%0A%22%22%22
%0A%0A# Hono
|
7177f7e0263d8a5f2adf458f9bfe33bff12137e0 | fix syntax error | n_sided_polygon.py | n_sided_polygon.py | import turtle
import turtlehack
import random
# A function that draws an n-sided polygon
def n_sided_polygon(turtle, n, color="#FFFFFF", line_thickness=1):
'''
Draw an n-sided polygon
input: turtle, n, line_length
'''
# for n times:
# Draw a line, then turn 360/n degrees and draw another
# set initial parameters
turtle.degrees()
line_length=80
turtle.pensize(line_thickness)
turn_angle = (360/n)
i = 1
# Draw each line segment and turn
while (i <= n):
turtle.color(color)
turtle.pendown()
turtle.forward(line_length)
turtle.penup()
turtle.right(turn_angle)
i += 1
return 0
## MAIN ##
# set initial parameters
n=random.randint(3,12)
# create the Turle instance
graphic = turtle.Turtle()
turtlehack.n_sided_polygon(graphic, n, turtlehack.random_color(), random.randint(4,8))
ignore = input("hit any key to continue:")
graphic.done()
| Python | 0.000003 | @@ -749,20 +749,32 @@
e()%0A
-%0Aturtlehack.
+# Call the polygon code%0A
n_si
@@ -846,16 +846,33 @@
(4,8))%0A%0A
+# Close and exit%0A
ignore =
@@ -872,16 +872,20 @@
gnore =
+raw_
input(%22h
@@ -910,16 +910,17 @@
inue:%22)%0A
+#
graphic.
|
458d61ffb5161394f8080cea59716b2f9cb492f3 | Add error message for not implemented error | nbgrader_config.py | nbgrader_config.py | c = get_config()
c.CourseDirectory.db_assignments = [dict(name="1", duedate="2019-12-09 17:00:00 UTC")]
c.CourseDirectory.db_students = [
dict(id="foo", first_name="foo", last_name="foo")
]
c.ClearSolutions.code_stub = {'python': '##### Implement this part of the code #####\nraise NotImplementedError()'}
| Python | 0.000001 | @@ -227,16 +227,18 @@
ython':
+''
'##### I
@@ -277,10 +277,9 @@
####
-%5Cn
+%0A
rais
@@ -300,12 +300,62 @@
edError(
-)
+%22Code not implemented, follow the instructions.%22)''
'%7D%0A
|
460c6ca46524963f1c17d8773dfced0db14af521 | Version number -> 0.5 | nbopen/__init__.py | nbopen/__init__.py | """Open a notebook from the command line in the best available server"""
__version__ = '0.4.3'
from .nbopen import main
| Python | 0.000067 | @@ -88,11 +88,9 @@
'0.
-4.3
+5
'%0A%0Af
|
1cae5cf5b2874eb2bafc9486d4873abfa1a58366 | Add log_to_file method | toolsweb/__init__.py | toolsweb/__init__.py | # -*- coding: utf-8 -*-
import flask
import jinja2
import os.path
import oursql
def connect_to_database(database, host):
default_file = os.path.expanduser('~/replica.my.cnf')
if not os.path.isfile(default_file):
raise Exception('Database access not configured for this account!')
return oursql.connect(host=host, db=database,
read_default_file=default_file)
def connect_to_labsdb(project):
return connect_to_database(database=project + '_p',
host=project + '.labsdb')
def create_app(name, template_package=None, template_path=None):
app = flask.Flask(name)
app_loader = app.jinja_loader
if template_package is not None:
app_loader = jinja2.PackageLoader(template_package)
elif template_path is not None:
app_loader = jinja2.FileSystemLoader(template_path)
app.jinja_loader = jinja2.ChoiceLoader([
app_loader,
jinja2.PackageLoader('toolsweb'),
])
return app
| Python | 0.000008 | @@ -45,16 +45,31 @@
jinja2%0A
+import logging%0A
import o
@@ -75,16 +75,16 @@
os.path%0A
-
import o
@@ -628,16 +628,46 @@
ath=None
+,%0A log_file=None
):%0A a
@@ -1037,16 +1037,16 @@
%5D)%0A%0A
-
retu
@@ -1053,8 +1053,159 @@
rn app%0A%0A
+def log_to_file(app, log_file):%0A handler = logging.FileHandler(log_file)%0A app.logger.setLevel(logging.DEBUG)%0A app.logger.addHandler(handler)%0A%0A
|
cf8cc12b9a3bb4cfb550db1c75b1fa24db3c357d | {{{config.options}}} returns a list in some circumstances. | trac/tests/config.py | trac/tests/config.py | # -*- coding: iso8859-1 -*-
#
# Copyright (C) 2005 Edgewall Software
# Copyright (C) 2005 Christopher Lenz <[email protected]>
#
# Trac is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Trac is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# Author: Christopher Lenz <[email protected]>
from trac.config import Configuration
import os
import tempfile
import time
import unittest
class ConfigurationTestCase(unittest.TestCase):
def setUp(self):
self.filename = os.path.join(tempfile.gettempdir(), 'trac-test.ini')
configfile = open(self.filename, 'w')
configfile.close()
def tearDown(self):
os.remove(self.filename)
def test_default(self):
config = Configuration(self.filename)
self.assertEquals('', config.get('a', 'option'))
self.assertEquals('value', config.get('a', 'option', 'value'))
config.setdefault('a', 'option', 'value')
self.assertEquals('value', config.get('a', 'option'))
def test_read_and_get(self):
configfile = open(self.filename, 'w')
configfile.writelines(['[a]\n', 'option = x\n', '\n'])
configfile.close()
config = Configuration(self.filename)
self.assertEquals('x', config.get('a', 'option'))
self.assertEquals('x', config.get('a', 'option', 'y'))
def test_set_and_save(self):
configfile = open(self.filename, 'w')
configfile.close()
config = Configuration(self.filename)
config.set('a', 'option', 'x')
self.assertEquals('x', config.get('a', 'option'))
config.save()
configfile = open(self.filename, 'r')
self.assertEquals(['[a]\n', 'option = x\n', '\n'],
configfile.readlines())
configfile.close()
def test_sections(self):
configfile = open(self.filename, 'w')
configfile.writelines(['[a]\n', 'option = x\n',
'[b]\n', 'option = y\n'])
configfile.close()
config = Configuration(self.filename)
self.assertEquals(['a', 'b'], config.sections())
def test_options(self):
configfile = open(self.filename, 'w')
configfile.writelines(['[a]\n', 'option = x\n',
'[b]\n', 'option = y\n'])
configfile.close()
config = Configuration(self.filename)
self.assertEquals(('option', 'x'), config.options('a').next())
self.assertEquals(('option', 'y'), config.options('b').next())
def test_reparse(self):
configfile = open(self.filename, 'w')
configfile.writelines(['[a]\n', 'option = x\n', '\n'])
configfile.close()
config = Configuration(self.filename)
self.assertEquals('x', config.get('a', 'option'))
time.sleep(1) # needed because of low mtime granularity
configfile = open(self.filename, 'w')
configfile.write('[a]\noption = y')
configfile.close()
config.parse_if_needed()
self.assertEquals('y', config.get('a', 'option'))
def suite():
return unittest.makeSuite(ConfigurationTestCase, 'test')
if __name__ == '__main__':
unittest.main()
| Python | 0.999999 | @@ -2936,24 +2936,29 @@
ion', 'x'),
+iter(
config.optio
@@ -2964,16 +2964,17 @@
ons('a')
+)
.next())
@@ -3017,16 +3017,21 @@
, 'y'),
+iter(
config.o
@@ -3041,16 +3041,17 @@
ons('b')
+)
.next())
|
9ff4fbcdf5b21d263e8b20abb0a3d0395ce28981 | Document the reason for accepting only `POST` requests on `/wiki_render`, and allow `GET` requests from `TRAC_ADMIN` for testing purposes. | trac/wiki/web_api.py | trac/wiki/web_api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
from trac.mimeview.api import Context
from trac.resource import Resource
from trac.web.api import IRequestHandler
from trac.wiki.formatter import format_to
class WikiRenderer(Component):
"""Wiki text renderer."""
implements(IRequestHandler)
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/wiki_render' and req.method == 'POST'
def process_request(self, req):
realm = req.args.get('realm', 'wiki')
id = req.args.get('id')
version = req.args.get('version')
if version is not None:
try:
version = int(version)
except ValueError:
version = None
text = req.args.get('text', '')
flavor = req.args.get('flavor')
options = {}
if 'escape_newlines' in req.args:
options['escape_newlines'] = bool(req.args['escape_newlines'])
if 'shorten' in req.args:
options['shorten'] = bool(req.args['shorten'])
resource = Resource(realm, id=id, version=version)
context = Context.from_request(req, resource)
rendered = format_to(self.env, flavor, context, text, **options)
req.send(rendered.encode('utf-8'))
| Python | 0 | @@ -878,33 +878,8 @@
der'
- and req.method == 'POST'
%0A%0A
@@ -903,32 +903,317 @@
uest(self, req):
+%0A # Allow all POST requests (with a valid __FORM_TOKEN, ensuring that%0A # the client has at least some permission). Additionally, allow GET%0A # requests from TRAC_ADMIN for testing purposes.%0A if req.method != 'POST':%0A req.perm.require('TRAC_ADMIN')
%0A realm =
|
bb696f7c5b97563339f04206e649b54759fc9c6b | add transform for in__id to base get method | actions/lib/action.py | actions/lib/action.py |
from st2actions.runners.pythonrunner import Action
import requests
__all__ = [
'NetboxBaseAction'
]
class NetboxBaseAction(Action):
"""Base Action for all Netbox API based actions
"""
def __init__(self, config):
super(NetboxBaseAction, self).__init__(config)
def get(self, endpoint_uri, **kwargs):
"""Make a get request to the API URI passed in
"""
self.logger.info("Calling base get with kwargs: {}".format(kwargs))
if self.config['use_https']:
url = 'https://'
else:
url = 'http://'
url = url + self.config['hostname'] + endpoint_uri
headers = {
'Authorization': 'Token ' + self.config['api_token'],
'Accept': 'application/json'
}
r = requests.get(url, verify=self.config['ssl_verify'], headers=headers, params=kwargs)
return {'raw': r.json()}
| Python | 0.000001 | @@ -778,16 +778,148 @@
%7D%0A%0A
+ # transform %60in__id%60 if present%0A if kwargs.get('in__id'):%0A kwargs%5B'in__id'%5D = ','.join(kwargs%5B'in__id'%5D)%0A%0A
|
e1074fbc814b238a8d6d878810a8ac665a169f03 | Fix template name in views | nomadblog/views.py | nomadblog/views.py | from django.views.generic import ListView, DetailView
from django.shortcuts import get_object_or_404
from django.conf import settings
from nomadblog.models import Blog, Category
from nomadblog import get_post_model
DEFAULT_STATUS = getattr(settings, 'PUBLIC_STATUS', 0)
POST_MODEL = get_post_model()
class NomadBlogMixin(object):
def dispatch(self, request, *args, **kwargs):
if self.kwargs.get('country_code'):
self.blog = get_object_or_404(Blog, countries__code__iexact=self.kwargs.get('country_code'), slug=self.kwargs.get('blog_slug'))
else:
self.blog = Blog.objects.get(slug=settings.DEFAULT_BLOG_SLUG)
return super(NomadBlogMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, *args, **kwargs):
context = super(NomadBlogMixin, self).get_context_data(*args, **kwargs)
context['blog'] = self.blog
return context
class PostList(NomadBlogMixin, ListView):
model = POST_MODEL
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self):
qs = super(PostList, self).get_queryset()
return qs.filter(bloguser__blog=self.blog).order_by('-pub_date')
class PostDetail(NomadBlogMixin, DetailView):
model = POST_MODEL
def get_object(self, queryset=None):
queryset = self.get_queryset().filter(bloguser__blog=self.blog)
return super(PostDetail, self).get_object(queryset)
class CategoriesList(NomadBlogMixin, ListView):
model = Category
paginate_by = getattr(settings, 'CATEGORY_PAGINATE_BY', 25)
class PostsByCategoryList(NomadBlogMixin, ListView):
model = POST_MODEL
template_name = 'nomadblog/post_list_by_category.html'
paginate_by = getattr(settings, 'POST_PAGINATE_BY', 25)
def get_queryset(self, *args, **kwargs):
qs = super(PostsByCategoryList, self).get_queryset()
self.category = get_object_or_404(Category, slug=self.kwargs.get('category_slug', ''))
return qs.filter(categories=self.category)
def get_context_data(self, *args, **kwargs):
context = super(PostsByCategoryList, self).get_context_data(*args, **kwargs)
context['category'] = self.category
return context
| Python | 0 | @@ -981,24 +981,71 @@
POST_MODEL%0A
+ template_name = 'nomadblog/post_list.html'%0A
paginate
@@ -1314,16 +1314,65 @@
ST_MODEL
+%0A template_name = 'nomadblog/post_detail.html'
%0A%0A de
|
44161337282d14a48bde278b6e1669e8b3c94e4e | Bump version to 0.1.7 | notify/__init__.py | notify/__init__.py | __version__ = "0.1.6"
| Python | 0.000001 | @@ -16,7 +16,7 @@
0.1.
-6
+7
%22%0A
|
72a827b8cca6dc100e7f0d2d92e0c69aa67ec956 | change name and docstring | apps/auth/iufOAuth.py | apps/auth/iufOAuth.py | from social.backends.oauth import BaseOAuth2
# see http://psa.matiasaguirre.net/docs/backends/implementation.html
class IUFOAuth2(BaseOAuth2):
"""Github OAuth authentication backend"""
name = 'github'
AUTHORIZATION_URL = 'https://iufinc.org/login/oauth/authorize'
ACCESS_TOKEN_URL = 'https://iufinc.org/login/oauth/access_token'
SCOPE_SEPARATOR = ','
EXTRA_DATA = [
('id', 'id'),
('expires', 'expires')
]
def get_user_details(self, response):
"""Returns user details from IUF account"""
return {'username': response.get('user'),
'email': response.get('email') or '',
'first_name': response.get('first_name')} | Python | 0.000002 | @@ -148,14 +148,11 @@
%22%22%22
-Github
+IUF
OAu
@@ -196,14 +196,11 @@
= '
-github
+iuf
'%0A
|
c8f0be88729bd8b18a7e58c092c17b606feed475 | Stop returning None from judge prediction function (#179) | atcodertools/constprediction/constants_prediction.py | atcodertools/constprediction/constants_prediction.py | import re
from typing import Tuple, Optional
from bs4 import BeautifulSoup
from atcodertools.client.models.problem_content import ProblemContent, InputFormatDetectionError, SampleDetectionError
from atcodertools.common.judgetype import ErrorType, NormalJudge, DecimalJudge, InteractiveJudge, Judge
from atcodertools.common.logging import logger
from atcodertools.constprediction.models.problem_constant_set import ProblemConstantSet
class YesNoPredictionFailedError(Exception):
pass
class MultipleModCandidatesError(Exception):
def __init__(self, cands):
self.cands = cands
class MultipleDecimalCandidatesError(Exception):
def __init__(self, cands):
self.cands = cands
MOD_ANCHORS = ["余り", "あまり", "mod", "割っ", "modulo"]
DECIMAL_ANCHORS = ["誤差", " error "]
MULTISOLUTION_ANCHORS = ["複数ある場合", "どれを出力しても構わない"]
INTERACTIVE_ANCHORS = ["インタラクティブ", "リアクティブ", "interactive", "reactive"]
MOD_STRATEGY_RE_LIST = [
re.compile("([0-9]+).?.?.?で割った"),
re.compile("modu?l?o?[^0-9]?[^0-9]?[^0-9]?([0-9]+)")
]
DECIMAL_STRATEGY_RE_LIST_KEYWORD = [
re.compile("(?:絶対|相対)誤差"),
re.compile("(?:absolute|relative)")
]
DECIMAL_STRATEGY_RE_LIST_VAL = [
re.compile("10\^(-[0-9]+)"),
]
def is_mod_context(sentence):
for kw in MOD_ANCHORS:
if kw in sentence:
return True
return False
def is_decimal_context(sentence):
for kw in DECIMAL_ANCHORS:
if kw in sentence:
return True
return False
def predict_modulo(html: str) -> Optional[int]:
def normalize(sentence):
return sentence.replace('\\', '').replace("{", "").replace("}", "").replace(",", "").replace(" ", "").replace(
"10^9+7", "1000000007").lower().strip()
soup = BeautifulSoup(html, "html.parser")
sentences = soup.get_text().split("\n")
sentences = [normalize(s) for s in sentences if is_mod_context(s)]
mod_cands = set()
for s in sentences:
for regexp in MOD_STRATEGY_RE_LIST:
m = regexp.search(s)
if m is not None:
extracted_val = int(m.group(1))
mod_cands.add(extracted_val)
if len(mod_cands) == 0:
return None
if len(mod_cands) == 1:
return list(mod_cands)[0]
raise MultipleModCandidatesError(mod_cands)
def predict_yes_no(html: str) -> Tuple[Optional[str], Optional[str]]:
try:
outputs = set()
for sample in ProblemContent.from_html(html).get_samples():
for x in sample.get_output().split("\n"):
outputs.add(x.strip())
except (InputFormatDetectionError, SampleDetectionError) as e:
raise YesNoPredictionFailedError(e)
yes_kws = ["yes", "possible"]
no_kws = ["no", "impossible"]
yes_str = None
no_str = None
for val in outputs:
if val.lower() in yes_kws:
yes_str = val
if val.lower() in no_kws:
no_str = val
return yes_str, no_str
def predict_judge_method(html: str) -> Optional[Judge]:
def normalize(sentence):
return sentence.replace('\\', '').replace("{", "").replace("}", "").replace(",", "").replace(" ", "").replace(
"−", "-").lower().strip()
soup = BeautifulSoup(html, "html.parser")
sentences = soup.get_text().split("\n")
interactive_sentences = []
for s in sentences:
for kw in INTERACTIVE_ANCHORS:
if kw in s:
interactive_sentences.append(s)
if len(interactive_sentences) > 0:
return InteractiveJudge()
decimal_sentences = [normalize(s)
for s in sentences if is_decimal_context(s)]
decimal_keyword_cands = set()
decimal_val_cands = set()
if len(decimal_sentences) > 0: # Decimal
is_absolute = False
is_relative = False
for s in decimal_sentences:
for regexp in DECIMAL_STRATEGY_RE_LIST_KEYWORD:
r = regexp.findall(s)
for t in r:
if t == "絶対誤差" or t == "absolute":
is_absolute = True
elif t == "相対誤差" or t == "relative":
is_relative = True
decimal_keyword_cands.add(t)
for s in decimal_sentences:
for regexp in DECIMAL_STRATEGY_RE_LIST_VAL:
r = regexp.findall(s)
for t in r:
decimal_val_cands.add(int(t))
if len(decimal_val_cands) == 0:
return None
if len(decimal_val_cands) == 1:
if is_absolute and is_relative:
error_type = ErrorType.AbsoluteOrRelative
elif is_absolute:
error_type = ErrorType.Absolute
else:
assert is_relative
error_type = ErrorType.Relative
return DecimalJudge(error_type, 10.0**(int(list(decimal_val_cands)[0])))
raise MultipleDecimalCandidatesError(decimal_val_cands)
return NormalJudge()
def predict_constants(html: str) -> ProblemConstantSet:
try:
yes_str, no_str = predict_yes_no(html)
except YesNoPredictionFailedError:
yes_str = no_str = None
try:
mod = predict_modulo(html)
except MultipleModCandidatesError as e:
logger.warning("Modulo prediction failed -- "
"two or more candidates {} are detected as modulo values".format(e.cands))
mod = None
try:
judge = predict_judge_method(html)
except MultipleModCandidatesError as e:
logger.warning("decimal prediction failed -- "
"two or more candidates {} are detected as decimal values".format(e.cands))
judge = NormalJudge()
return ProblemConstantSet(mod=mod, yes_str=yes_str, no_str=no_str, judge_method=judge)
| Python | 0.000001 | @@ -2998,23 +2998,13 @@
-%3E
-Optional%5B
Judge
-%5D
:%0A
@@ -4463,35 +4463,92 @@
-return None
+# No error value candidate is found%0A return NormalJudge()
%0A%0A if
|
140f96ab4cddebd465ad2fdcca4560c683ca5770 | add django-markdown url for tutorials app | oeplatform/urls.py | oeplatform/urls.py | """oeplatform URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from oeplatform import settings
handler500 = "base.views.handler500"
handler404 = "base.views.handler404"
urlpatterns = [
url(r"^api/", include("api.urls")),
url(r"^", include("base.urls")),
url(r"^user/", include("login.urls")),
url(r"^factsheets/", include("modelview.urls")),
url(r"^dataedit/", include("dataedit.urls")),
url(r"^literature/", include("literature.urls")),
url(r"^ontology/", include("ontology.urls")),
url(r"^captcha/", include("captcha.urls")),
url(r"^tutorials/", include("tutorials.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Python | 0 | @@ -768,98 +768,298 @@
gs%0A%0A
-handler500 = %22base.views.handler500%22%0Ahandler404 = %22base.views.handler404%22%0A%0Aurlpatterns = %5B
+# This is used for Markdown forms in the tutorials app%0Afrom markdownx import urls as markdownx%0A%0Ahandler500 = %22base.views.handler500%22%0Ahandler404 = %22base.views.handler404%22%0A%0Aurlpatterns = %5B%0A # This is used for Markdown forms in the tutorials app%0A url(r'%5Emarkdownx/', include(markdownx)),
%0A
|
7c77a7b14432a85447ff74e7aa017ca56c86e662 | Make api-tokens view exempt from CSRF checks | oidc_apis/views.py | oidc_apis/views.py | from django.http import JsonResponse
from django.views.decorators.http import require_http_methods
from oidc_provider.lib.utils.oauth2 import protected_resource_view
from .api_tokens import get_api_tokens_by_access_token
@require_http_methods(['GET', 'POST'])
@protected_resource_view(['openid'])
def get_api_tokens_view(request, token, *args, **kwargs):
"""
Get the authorized API Tokens.
:type token: oidc_provider.models.Token
:rtype: JsonResponse
"""
api_tokens = get_api_tokens_by_access_token(token, request=request)
response = JsonResponse(api_tokens, status=200)
response['Access-Control-Allow-Origin'] = '*'
response['Cache-Control'] = 'no-store'
response['Pragma'] = 'no-cache'
return response
| Python | 0.000001 | @@ -158,16 +158,69 @@
rce_view
+%0Afrom django.views.decorators.csrf import csrf_exempt
%0A%0Afrom .
@@ -270,16 +270,29 @@
token%0A%0A%0A
+@csrf_exempt%0A
@require
|
69fe8cce71a9046a99932045b1dbe57edc420cae | add __str__ | openhab/openhab.py | openhab/openhab.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Georges Toth (c) 2014 <[email protected]>
#
# python-openhab is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-openhab is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with python-openhab. If not, see <http://www.gnu.org/licenses/>.
#
import datetime
import json
import requests
import dateutil.parser
class Item(object):
def __init__(self, name):
self.name = name
self.type_ = ''
self.state_ = ''
def init_from_json(self, j):
self.name = j['name']
self.type_ = j['type']
self.__set_state(j['state'])
@staticmethod
def initj(base_url, j):
if j['type'] == 'SwitchItem':
i = SwitchItem(j)
elif j['type'] == 'DateTimeItem':
i = DateTimeItem(j)
elif j['type'] == 'NumberItem':
i = NumberItem(j)
else:
i = Item(j['name'])
i.init_from_json(j)
i.base_url = base_url
return i
@staticmethod
def init(base_url, name):
j = Item.__get_item(base_url, name)
if j['type'] == 'SwitchItem':
i = SwitchItem(j)
elif j['type'] == 'DateTimeItem':
i = DateTimeItem(j)
else:
i = Item(name)
i.state
i.base_url = base_url
return i
@property
def state(self):
j = Item.__get_item(self.base_url, self.name)
self.type_ = j['type']
self.__set_state(j['state'])
return self.state_
@state.setter
def state(self, value):
v = value
if self.type_ == 'DateTimeItem':
if not isinstance(v, datetime.datetime):
raise ValueError()
else:
v = value.strftime('%Y-%m-%d %H:%M:%S')
elif self.type_ == 'NumberItem':
if not (isinstance(value, float) or isinstance(value, int)):
raise ValueError()
else:
v = str(v)
elif self.type_ == 'SwitchItem':
if not (isinstance(value, str) or isinstance(value, unicode)) or not value in ['ON', 'OFF']:
raise ValueError()
else:
raise ValueError()
r = requests.post(self.base_url + '/items/' + self.name, data=v, headers={'accept': 'application/json'})
if r.status_code == requests.codes.ok:
return r.json()
else:
r.raise_for_status()
@staticmethod
def __get_item(base_url, name):
r = requests.get(base_url + '/items/' + name, headers={'accept': 'application/json'})
if r.status_code == requests.codes.ok:
return r.json()
else:
r.raise_for_status()
def __set_state(self, value):
if self.type_ == 'DateTimeItem':
self.state_ = dateutil.parser.parse(value)
elif self.type_ == 'NumberItem':
if value in ('Uninitialized', 'Undefined'):
self.state_ = None
else:
self.state_ = float(value)
else:
self.state_ = value
class DateTimeItem(Item):
def __init__(self, j):
super(DateTimeItem, self).init_from_json(j)
def __gt__(self, other):
return self.state_ > other
def __lt__(self, other):
return not self.__gt__(other)
def __eq__(self, other):
return self.state_ == other
def __ne__(self, other):
return not self.__eq__(other)
@Item.state.setter
def state(self, value):
if not isinstance(value, datetime.datetime):
raise ValueError()
Item.state.fset(self, value)
class SwitchItem(Item):
def __init__(self, j):
super(SwitchItem, self).init_from_json(j)
@Item.state.setter
def state(self, value):
if not value in ['ON', 'OFF']:
raise ValueError()
Item.state.fset(self, value)
def on(self):
self.state = 'ON'
def off(self):
self.state = 'OFF'
class NumberItem(Item):
def __init__(self, j):
super(NumberItem, self).init_from_json(j)
@Item.state.setter
def state(self, value):
if not (isinstance(value, float) or isinstance(value, int)):
raise ValueError()
Item.state.fset(self, value)
| Python | 0.020582 | @@ -3182,16 +3182,129 @@
value%0A%0A
+ def __str__(self):%0A return u'%3C%7B0%7D - %7B1%7D : %7B2%7D%3E'.format(self.type_, self.name, self.state_).encode('utf-8')%0A%0A
%0Aclass D
|
4c12bd87a9ebe10b7d42d09d25c83abad6103b19 | Fix default arguments for info command | scriptorium/__main__.py | scriptorium/__main__.py | #!/usr/bin/env python
#Script to build a scriptorium paper in a cross-platform friendly fashion
import argparse
import argcomplete
import shutil
import sys
import os
import os.path
import yaml
import scriptorium
def build_cmd(args):
"""Creates PDF from paper in the requested location."""
pdf = scriptorium.to_pdf(args.paper, use_shell_escape=args.shell_escape, flatten=args.flatten)
if args.output and pdf != args.output:
shutil.move(pdf, args.output)
def info(args):
"""Function to attempt to extract useful information from a specified paper."""
fname = scriptorium.paper_root(args.paper)
if not fname:
print('{0} does not contain a valid root document.'.format(args.paper))
sys.exit(1)
if args.template:
template = scriptorium.get_template(os.path.join(args.paper, fname))
if not template:
print('Could not find footer indicating template name.')
sys.exit(2)
print(template)
def template_cmd(args):
"""Prints out all installed templates."""
if args.update:
rev = args.update[1] if len(args.update) > 1 else None
scriptorium.update_template(args.update[0], args.template_dir, rev)
if args.list:
templates = scriptorium.all_templates(args.template_dir)
print('\n'.join(templates))
if args.readme:
template = scriptorium.find_template(args.readme, args.template_dir)
template_readme = os.path.join(template, 'README.md')
if template and os.path.exists(template_readme):
with open(template_readme, 'r') as readme:
print(readme.read())
if args.install:
scriptorium.install_template(args.install, args.template_dir)
if args.variables:
variables = scriptorium.list_variables(args.variables, args.template_dir)
print('\n'.join(variables))
if args.manifest:
manifest = scriptorium.get_manifest(args.manifest, args.template_dir)
for kk, vv in manifest.items():
print("{0} -> {1}".format(vv, kk))
def create_cmd(args):
"""Creates a new paper given flags."""
config = {kk:vv for kk, vv in args.config}
if not scriptorium.create(args.output, args.template, force=args.force, config=config):
sys.exit(3)
def doctor_cmd(_):
"""Command for checking the health of scriptorium."""
missing_packages = scriptorium.find_missing_packages()
if missing_packages:
for package in missing_packages:
print('Missing package {0}\n'.format(package))
def config_cmd(args):
"""Command to access configuration values."""
if args.list:
print(yaml.dump(scriptorium.CONFIG, default_flow_style=False))
elif len(args.value) == 1:
print(yaml.dump({args.value[0] : scriptorium.CONFIG[args.value[0]]}))
elif len(args.value) == 2:
scriptorium.CONFIG[args.value[0].upper()] = args.value[1]
scriptorium.save_config()
def clean_cmd(args):
"""Command to clean cruft from current directory."""
scriptorium.clean(args.paper)
def main():
"""Main function for executing scriptorium as a standalone script."""
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action="store_true")
subparsers = parser.add_subparsers()
# Build Command
build_parser = subparsers.add_parser('build')
build_parser.add_argument('paper', default='.', nargs='?',
help='Directory containing paper to build')
build_parser.add_argument('-o', '--output', help='Destination of PDF')
build_parser.add_argument('-s', '--shell-escape', action='store_true', default=False,
help='Flag indicating shell-escape should be used')
build_parser.add_argument('-f', '--flatten', action='store_true', default=False,
help='Flatten root LaTeX file output')
build_parser.set_defaults(func=build_cmd)
# Info Command
info_parser = subparsers.add_parser('info')
info_parser.add_argument('paper', default='.', help='Directory containing paper to make')
info_parser.add_argument('-t', '--template', action='store_true',
help='Flag to extract template')
info_parser.set_defaults(func=info)
# New Command
new_parser = subparsers.add_parser("new")
new_parser.add_argument("output", help="Directory to create paper in.")
new_parser.add_argument("-f", "--force", action="store_true",
help="Overwrite files in paper creation.")
new_parser.add_argument("-t", "--template", help="Template to use in paper.")
new_parser.add_argument("-c", "--config", nargs=2, action='append', default=[],
help='Provide "key" "value" to replace in default paper.')
new_parser.set_defaults(func=create_cmd)
# Template Command
template_parser = subparsers.add_parser("template")
template_parser.add_argument('-l', '--list', action='store_true', default=False,
help='List available templates')
template_parser.add_argument('-u', '--update', nargs='+',
help='Update the given template to the latest version')
template_parser.add_argument('-r', '--readme', help='Print README for the specified template')
template_parser.add_argument('-d', '--template_dir', default=None,
help='Overrides template directory used for listing templates')
template_parser.add_argument('-i', '--install',
help='Install repository at given URL in template directory')
template_parser.add_argument('-v', '--variables',
help='List variables available when using the new command')
template_parser.add_argument('-m', '--manifest',
help='List the manifest of files this template provides for new documents')
template_parser.set_defaults(func=template_cmd)
# Doctor Command
doctor_parser = subparsers.add_parser('doctor')
doctor_parser.set_defaults(func=doctor_cmd)
# Config Command
config_parser = subparsers.add_parser('config')
config_parser.add_argument('-l', '--list', action='store_true',
help='List available configuration options and current vaules')
config_parser.add_argument('value', nargs='*', help='Access configuration value')
config_parser.set_defaults(func=config_cmd)
#Clean Command
clean_parser = subparsers.add_parser('clean')
clean_parser.add_argument('paper', default='.', nargs='?', help='Directory containing paper to clean')
clean_parser.set_defaults(func=clean_cmd)
argcomplete.autocomplete(parser)
args = parser.parse_args()
if args.version:
print(scriptorium.__version__)
return 0
if 'func' in args:
args.func(args)
else:
parser.print_help()
if __name__ == '__main__':
# Only save configuration with main
main()
| Python | 0.000002 | @@ -4069,16 +4069,27 @@
ult='.',
+ nargs='?',
help='D
|
2645159df7cd98fa3577eb8e729b1ed085ca87bd | Fix BuildHelpers to create all necessary directories | scripts/BuildHelpers.py | scripts/BuildHelpers.py | #coding=UTF-8
## Collection of helpers for Build scripts ##
import sys, argparse, subprocess, platform
from xml.etree import ElementTree
from os.path import join, isdir, isfile, basename, exists
from os import listdir, mkdir
from shutil import copy, rmtree
from glob import glob
# Staging repo base url
repo = "http://oss.sonatype.org/content/repositories/comvaadin-%d"
# Directory where the resulting war files are stored
# TODO: deploy results
resultPath = join("result", "demos")
if not exists(resultPath):
mkdir(resultPath)
elif not isdir(resultPath):
print("Result path is not a directory.")
sys.exit(1)
args = None
# Default argument parser
parser = argparse.ArgumentParser(description="Automated staging validation")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--version", help="Vaadin version to use")
group.add_argument("--artifactPath", help="Path to local folder with Vaadin artifacts")
parser.add_argument("--maven", help="Additional maven command line parameters", default=None)
# Parse command line arguments <version>
def parseArgs():
# If no args, give help
if len(sys.argv) == 1:
args = parser.parse_args(["-h"])
else:
args = parser.parse_args()
return args
# Function for determining the path for maven executable
def getMavenCommand():
# This method uses .split("\n")[0] which basically chooses the first result where/which returns.
# Fixes the case with multiple maven installations available on PATH
if platform.system() == "Windows":
try:
return subprocess.check_output(["where", "mvn.cmd"], universal_newlines=True).split("\n")[0]
except:
try:
return subprocess.check_output(["where", "mvn.bat"], universal_newlines=True).split("\n")[0]
except:
print("Unable to locate mvn with where. Is the maven executable in your PATH?")
else:
try:
return subprocess.check_output(["which", "mvn"], universal_newlines=True).split("\n")[0]
except:
print("Unable to locate maven executable with which. Is the maven executable in your PATH?")
return None
mavenCmd = getMavenCommand()
# Get command line arguments. Parses arguments if needed.
def getArgs():
global args
if args is None:
args = parseArgs()
return args
# Maven Package and Validation
def mavenValidate(artifactId, mvnCmd = mavenCmd, logFile = sys.stdout, repoIds = None):
if repoIds is None:
repoIds = getArgs()
print("Do maven clean package validate")
cmd = [mvnCmd]
if hasattr(repoIds, "version") and repoIds.version is not None:
cmd.append("-Dvaadin.version=%s" % (repoIds.version))
if hasattr(repoIds, "maven") and repoIds.maven is not None:
cmd.extend(repoIds.maven.strip('"').split(" "))
cmd.extend(["clean", "package", "validate"])
print("executing: %s" % (" ".join(cmd)))
subprocess.check_call(cmd, cwd=join(resultPath, artifactId), stdout=logFile)
# Collect .war files to given folder with given naming
def copyWarFiles(artifactId, resultDir = resultPath, name = None):
if name is None:
name = artifactId
copiedWars = []
warFiles = glob(join(resultDir, artifactId, "target", "*.war"))
warFiles.extend(glob(join(resultDir, artifactId, "*", "target", "*.war")))
for warFile in warFiles:
if len(warFiles) == 1:
deployName = "%s.war" % (name)
else:
deployName = "%s-%d.war" % (name, warFiles.index(warFile))
print("Copying .war file %s as %s to result folder" % (basename(warFile), deployName))
copy(warFile, join(resultDir, deployName))
copiedWars.append(join(resultDir, deployName))
return copiedWars
def readPomFile(pomFile):
# pom.xml namespace workaround
root = ElementTree.parse(pomFile).getroot()
nameSpace = root.tag[1:root.tag.index('}')]
print("Using namespace: %s" % (nameSpace))
ElementTree.register_namespace('', nameSpace)
# Read the pom.xml correctly
return ElementTree.parse(pomFile), nameSpace
# Recursive pom.xml update script
def updateRepositories(path, repoIds = None, repoUrl = repo):
# If versions are not supplied, parse arguments
if repoIds is None:
repoIds = getArgs()
# Read pom.xml
pomXml = join(path, "pom.xml")
if isfile(pomXml):
# Read the pom.xml correctly
tree, nameSpace = readPomFile(pomXml)
# NameSpace needed for finding the repositories node
repoNode = tree.getroot().find("{%s}repositories" % (nameSpace))
else:
return
if repoNode is not None:
print("Add staging repositories to " + pomXml)
if hasattr(repoIds, "framework") and repoIds.framework is not None:
# Add framework staging repository
addRepo(repoNode, "repository", "vaadin-%s-staging" % (repoIds.version), repoUrl % (repoIds.framework))
# Find the correct pluginRepositories node
pluginRepo = tree.getroot().find("{%s}pluginRepositories" % (nameSpace))
if pluginRepo is None:
# Add pluginRepositories node if needed
pluginRepo = ElementTree.SubElement(tree.getroot(), "pluginRepositories")
if hasattr(repoIds, "plugin") and repoIds.plugin is not None:
# Add plugin staging repository
addRepo(pluginRepo, "pluginRepository", "vaadin-%s-plugin-staging" % (repoIds.version), repoUrl % (repoIds.plugin))
# Overwrite the modified pom.xml
tree.write(pomXml, encoding='UTF-8')
# Recursive pom.xml search.
for i in listdir(path):
file = join(path, i)
if isdir(file):
updateRepositories(join(path, i), repoIds, repoUrl)
# Add a repository of repoType to given repoNode with id and URL
def addRepo(repoNode, repoType, id, url):
newRepo = ElementTree.SubElement(repoNode, repoType)
idElem = ElementTree.SubElement(newRepo, "id")
idElem.text = id
urlElem = ElementTree.SubElement(newRepo, "url")
urlElem.text = url
# Get a logfile for given artifact
def getLogFile(artifact, resultDir = resultPath):
return open(join(resultDir, "%s.log" % (artifact)), 'w')
def removeDir(subdir):
if '..' in subdir or '/' in subdir:
# Dangerous relative paths.
return
rmtree(join(resultPath, subdir))
def mavenInstall(pomFile, jarFile = None, mvnCmd = mavenCmd, logFile = sys.stdout):
cmd = [mvnCmd, "install:install-file"]
cmd.append("-Dfile=%s" % (jarFile if jarFile is not None else pomFile))
cmd.append("-DpomFile=%s" % (pomFile))
print("executing: %s" % (" ".join(cmd)))
subprocess.check_call(cmd, stdout=logFile)
| Python | 0.000002 | @@ -215,20 +215,23 @@
stdir, m
-k
+ake
dir
+s
%0Afrom sh
@@ -517,12 +517,15 @@
:%0A%09m
-k
+ake
dir
+s
(res
|
0417707ab0dca78f0daa8aa3b9003913ba90bbac | Add length and highway attribute to the edges | osmABTS/network.py | osmABTS/network.py | """
Road network formation
======================
The primary purpose of this model is to abstract a road connectivity network
from the complicated OSM raw GIS data. The network is going to be stored as a
NetworkX graph.
The nodes are going to be just the traffic junctions and the dead ends of the
road traffic system. And each node has the original id in the raw OSM data as
their identity, and the coordinate stored in the attribute ``coord``.
Each edge is going to be an undirected edge connecting the nodes. They all have
got the attribute of ``name`` for the name of the road, and the attribute of
``travel_time`` for the time needed to traverse the edge by a common traveller.
"""
import networkx as nx
from geopy.distance import vincenty
#
# Constants controlling the bahaviour of the code
# -----------------------------------------------
#
# if the ``highway`` key contains the follow value for a node in raw OSM, then
# it is considered a node in the network.
_NODES_TAGS = [
'traffic_signals',
'crossing',
'turning_circle',
'motorway_junction',
]
# The speed to travel on each kind of highways
# In miles per hour
_HIGHWAY_SPEEDS = {
'residential': 20.0,
'primary': 40.0,
'primary_link': 40.0,
'secondary': 35.0,
'tertiary': 30.0,
'footway': 35.0,
'service': 35.0,
'motorway': 70.0,
}
#
# Utility functions
# -----------------
#
def _test_if_node(node):
"""Tests if a node in the raw OSM data a node in the network"""
tags = node.tags
return 'highway' in tags and tags['highway'] in _NODES_TAGS
def _calc_distance(coord1, coord2):
"""Calculates the distance between two points
A shallow wrapper of the geopy Vicinty distance calculator, returns the
distance in miles.
"""
return vincenty(coord1, coord2).miles
#
# The driver function
# -------------------
#
def form_network_from_osm(raw_osm):
"""Forms a road network from the raw OSM data
:param raw_osm: A :py:class:`readosm.RawOSM` instance for the raw data
:returns: A networkX graph for the road connectivity
"""
net = nx.Graph()
# nodes formation
nodes = raw_osm.nodes
for node_id, node in nodes.iteritems():
if _test_if_node(node):
net.add_node(node_id)
net.node[node_id]['coord'] = node.coord
continue
# edge formation
for way in raw_osm.ways.itervalues():
# test if it is actually a road
tags = way.tags
if 'highway' in tags:
highway = tags['highway']
else:
continue # building or something like that
# connect the nodes in the network
prev_node_id = None # The previous node in the network
# The coordinate of the previous raw node in the OSM data
prev_coord = way.nodes[0].coord
distance = 0.0
for node_id in way.nodes:
node = nodes[node_id]
# Update the distance
curr_coord = node.coord
distance += _calc_distance(curr_coord, prev_coord)
prev_coord = curr_coord
if _test_if_node(node):
# add edge if there is a previous node
if prev_node_id is not None:
# Add the new edge
try:
travel_time = distance / _HIGHWAY_SPEEDS[highway]
except IndexError:
raise IndexError(
'Unknown highway type %s' % highway
)
net.add_edge(
node_id, prev_node_id,
travel_time=travel_time,
name=tags.get('name', '')
)
# Update previous node no matter there is a previous one
prev_node_id = node_id
distance = 0.0
return net
| Python | 0 | @@ -679,16 +679,140 @@
aveller.
+%0AAlso there is an attribute %60%60length%60%60 for the length of the actual road and%0Aattribute %60%60highway%60%60 for the type of the road.
%0A%0A%22%22%22%0A%0Ai
@@ -3796,16 +3796,33 @@
el_time,
+ length=distance,
%0A
@@ -3837,16 +3837,33 @@
+ highway=highway,
name=ta
|
e5aaa0a050baf1aaa49b0400843047c1bbec76e1 | allow empty file names | ownpaste/models.py | ownpaste/models.py | # -*- coding: utf-8 -*-
"""
ownpaste.models
~~~~~~~~~~~~~~~
Module with SQL-Alchemy models.
:copyright: (c) 2012 by Rafael Goncalves Martins
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import current_app
from flask.ext.sqlalchemy import SQLAlchemy
from jinja2 import Markup
from fnmatch import fnmatch
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import TextLexer, get_lexer_by_name, guess_lexer, \
guess_lexer_for_filename
from pytz import timezone, utc
import random
import string
import time
db = SQLAlchemy()
class Private(object):
def _random_id(self, length=20):
chars = string.ascii_letters + string.digits
return ''.join(random.choice(chars) for x in range(length))
def __get__(self, obj, cls):
return obj.private_id is not None
def __set__(self, obj, value):
if not value:
obj.private_id = None
return
# we should not override existing private_id
if obj.private_id is not None:
return
# loop until find an unique id
while 1:
obj.private_id = self._random_id()
paste = Paste.query.filter(db.and_(
Paste.private_id == obj.private_id,
Paste.paste_id != obj.paste_id)).first()
if paste is None:
break
class Blocked(object):
def __get__(self, obj, cls):
return obj.blocked_date is not None
def __set__(self, obj, value):
if not value:
obj.blocked_date = None
obj.hits = 0
return
# we should not override existing blocked_date
if obj.blocked_date is not None:
return
obj.blocked_date = datetime.utcnow()
class Paste(db.Model):
paste_id = db.Column(db.Integer, primary_key=True)
private_id = db.Column(db.String(40), unique=True, nullable=True)
language = db.Column(db.String(30))
file_name = db.Column(db.Text, nullable=True)
file_content = db.Column(db.Text)
pub_date = db.Column(db.DateTime)
private = Private()
def __init__(self, file_content, file_name=None, language=None,
private=False):
self.set_file_content(file_content)
self.file_name = file_name
self.language = language
self.private = private
self.pub_date = datetime.utcnow()
# fix file_name, we just want the basename
self.file_name = self.file_name.split('/')[-1]
self.file_name = self.file_name.split('\\')[-1]
# guess language, if needed
if self.language is None:
if self.file_name is None:
lexer = guess_lexer(self.file_content)
else:
try:
lexer = guess_lexer_for_filename(self.file_name,
self.file_content)
except:
lexer = guess_lexer(self.file_content)
# verify if lexer is ok for filename
found = False
for pattern in lexer.filenames:
if fnmatch(self.file_name, pattern):
found = True
break
else:
found = True
if not found:
lexer = TextLexer
self.language = lexer.aliases[0]
def set_file_content(self, fc):
if not isinstance(fc, unicode):
self.file_content = u'\n'.join(fc.decode('utf-8').splitlines())
else:
self.file_content = fc
@staticmethod
def get(paste_id):
if isinstance(paste_id, basestring) and not paste_id.isdigit():
return Paste.query.filter(
Paste.private_id == paste_id).first_or_404()
return Paste.query.filter(
Paste.paste_id == int(paste_id)).first_or_404()
@staticmethod
def all(hide_private=True):
if hide_private:
query = Paste.query.filter(Paste.private_id == None)
else:
query = Paste.query
return query.order_by(Paste.paste_id.desc())
@property
def pub_timestamp(self):
return int(time.mktime(self.pub_date.timetuple()))
@property
def pub_date_localized(self):
date_utc = utc.localize(self.pub_date)
try:
tz = current_app.config.get('TIMEZONE', 'UTC')
return date_utc.astimezone(timezone(tz))
except:
return date_utc
@property
def language_name(self):
return self.lexer.name
@property
def lexer(self):
try:
return get_lexer_by_name(self.language)
except:
return TextLexer
@property
def file_content_highlighted(self):
linenos = current_app.config['PYGMENTS_LINENOS']
style = current_app.config['PYGMENTS_STYLE']
formatter = HtmlFormatter(linenos=linenos, style=style,
cssclass='syntax')
return Markup('<div id="paste">%s</div>' % \
highlight(self.file_content, self.lexer, formatter))
def to_json(self, short=False):
rv = dict(paste_id=self.paste_id, language=self.language,
file_name=self.file_name, pub_timestamp=self.pub_timestamp,
private=self.private, private_id=self.private_id)
if short:
rv.update(file_content_preview=u'\n'.join(
self.file_content.splitlines()[:5]))
else:
rv.update(file_content=self.file_content)
return rv
def __repr__(self):
return '<%s %s: language=%s; private=%r>' % \
(self.__class__.__name__, self.file_name or 'unnamed',
self.language, self.private)
class Ip(db.Model):
ip_id = db.Column(db.Integer, primary_key=True)
ip = db.Column(db.String(45), unique=True)
hits = db.Column(db.Integer)
nonce = db.Column(db.String(16))
blocked_date = db.Column(db.DateTime, nullable=True)
blocked = Blocked()
def __init__(self, ip):
self.ip = ip
self.hits = 0
self.blocked = False
@classmethod
def get(cls, ip):
obj = cls.query.filter(cls.ip == ip).first()
if obj is None:
obj = cls(ip)
db.session.add(obj)
db.session.commit()
return obj
def __repr__(self):
rv = '<%s %s: ' % (self.__class__.__name__, self.ip)
if self.blocked:
rv += 'blocked>'
else:
rv += '%i hits>' % self.hits
return rv
| Python | 0.000007 | @@ -2510,24 +2510,67 @@
he basename%0A
+ if self.file_name is not None:%0A
self
@@ -2612,16 +2612,20 @@
/')%5B-1%5D%0A
+
|
981715431ae2710fb0c19f1a7caa749bef1c1593 | add comments to build commands | packaging/build.py | packaging/build.py | import string
import tarfile
import scriptine
import stat
from scriptine import path, log
from scriptine.shell import call
from ConfigParser import ConfigParser
def load_build_conf():
parser = ConfigParser()
parser.readfp(open(path(__file__).dirname() + 'build.ini'))
config = {}
for key, value in parser.items('build'):
if value.startswith(('./', '/')) or value[1:3] == ':\\':
config[key.lower()] = path(value).abspath()
else:
config[key.lower()] = value
# modify version for console builds
if config['build_with_console'] == 'True':
config['version'] += '-console'
return config
config = load_build_conf()
log.debug(config)
def no_repo(members):
rw_mode = stat.S_IWUSR | stat.S_IRUSR | stat.S_IWGRP | stat.S_IRGRP | stat.S_IROTH
rw_dir_mode = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
for tar_info in members:
tar_info.mode = tar_info.mode | rw_mode
if tar_info.isdir():
tar_info.mode = tar_info.mode | rw_dir_mode
name = tar_info.name
if not '\\.git\\' in name and not '/.git/' in name:
yield tar_info
def unpack_command():
log.mark('unpacking packages')
config['build_dir'].ensure_dir()
with config['build_dir'].as_working_dir():
for fname in [config['geocouch_pkg'], config['couchdb_pkg'],
config['openssl_pkg'], config['gdal_pkg'], config['geos_pkg'],
config['proj4_pkg'], config['mapproxy_templates_pkg']]:
log.info('unpacking %s', fname)
tar = tarfile.open(fname)
tar.extractall(members=no_repo(tar))
tar.close()
keep_erl_libs = [
'common_test-*',
'compiler-*',
'couch-*',
'crypto-*',
'debugger-*',
'ejson-*',
'erlang-oauth*',
'erts-*',
'etap*',
'eunit*',
'hipe*',
'ibrowse-*',
'inets-*',
'kernel-*',
'mochiweb-*',
'os_mon-*',
'parsetools-*',
'pman-*',
'public_key-*',
'reltool-*',
'runtime_tools-*',
'sasl-*',
'snappy-*',
'ssl-*',
'stdlib-*',
'xmerl-*',
]
def prepare_command():
prepare_couchdb_command()
prepare_geocouch_command()
def prepare_couchdb_command():
log.mark('preparing couchdb')
log.info('removing unneeded erlang packages')
with config['couchdb_dir'].as_working_dir():
dest = path('lib_new')
dest.ensure_dir()
lib_dir = path('lib')
for lib_name in keep_erl_libs:
lib = lib_dir.dirs(lib_name)
if lib:
lib[0].move(dest)
else:
log.warn('could not find %s' % lib_name)
lib_dir.rmtree()
dest.move(lib_dir)
for rm in [
'erts-*/src', 'erts-*/include', 'erts-*/man', 'erts-*/doc',
'lib/*/src', 'lib/*/examples', 'lib/*/include',
'share/doc', 'share/man',
]:
for p in path('.').glob(rm):
p.rmtree(ignore_errors=True)
def prepare_geocouch_command():
log.mark('preparing geocouch')
lib_geocouch_dir = config['couchdb_dir'] / 'lib' / 'geocouch'
lib_geocouch_dir.ensure_dir()
(config['geocouch_dir'] / 'ebin').copytree(lib_geocouch_dir / 'ebin')
def clean_command():
config['build_dir'].rmtree()
def build_command():
build_app_command()
build_installer_command()
def all_command():
unpack_command()
prepare_command()
build_app_command()
build_installer_command()
def create_iss_config_command():
iss_tpl = open(path('installer.iss.tpl')).read()
template = string.Template(iss_tpl)
(config['build_dir'] / 'installer.iss').write_text(template.substitute(config))
def build_installer_command():
create_iss_config_command()
config['dist_dir'].ensure_dir()
call([config['inno_dir'] / 'iscc.exe', config['build_dir'] / 'installer.iss'])
def build_app_command():
"""Build GeoBox Python application as .exe"""
geobox_conf = config.get('appconfig')
if geobox_conf:
path(geobox_conf).copy(path('../app/geobox/appconfig.py'))
pyinstaller_spec_tpl = open(path('geobox.spec.tpl')).read()
template = string.Template(pyinstaller_spec_tpl)
pyinstaller_spec = path('geobox.spec')
pyinstaller_spec.write_text(template.substitute(config))
call(['python', config['pyinstaller_dir'] / 'pyinstaller.py', pyinstaller_spec, '-y'])
if __name__ == '__main__':
scriptine.run() | Python | 0 | @@ -3255,24 +3255,64 @@
_command():%0A
+ %22%22%22%0A Clean up build dir.%0A %22%22%22%0A
config%5B'
@@ -3350,24 +3350,84 @@
_command():%0A
+ %22%22%22%0A Build GeoBox application and installer.%0A %22%22%22%0A
build_ap
@@ -3484,24 +3484,82 @@
_command():%0A
+ %22%22%22%0A Unpack, prepare and build everything.%0A %22%22%22%0A
unpack_c
@@ -3882,24 +3882,58 @@
_command():%0A
+ %22%22%22Build GeoBox installer.%22%22%22%0A
create_i
@@ -4143,16 +4143,17 @@
as .exe
+.
%22%22%22%0A
|
7e9dd7469f88d676959141534809b0bc10fc9a66 | Print newline on de-initialization. | picotui/context.py | picotui/context.py | from .screen import Screen
class Context:
def __init__(self, cls=True, mouse=True):
self.cls = cls
self.mouse = mouse
def __enter__(self):
Screen.init_tty()
if self.mouse:
Screen.enable_mouse()
if self.cls:
Screen.cls()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.mouse:
Screen.disable_mouse()
Screen.goto(0, 50)
Screen.cursor(True)
Screen.deinit_tty()
| Python | 0 | @@ -494,16 +494,164 @@
en.deinit_tty()%0A
+ # This makes sure that entire screenful is scrolled up, and%0A # any further output happens on a normal terminal line.%0A print()%0A
|
1a8ab29c9f7a02730cababc077f196f9b21e26d4 | Use own repo slug by default for Bitbucket.deploy_key.all() . | bitbucket/deploy_key.py | bitbucket/deploy_key.py | # -*- coding: utf-8 -*-
URLS = {
# deploy keys
'GET_DEPLOY_KEYS': 'repositories/%(username)s/%(repo_slug)s/deploy-keys',
'SET_DEPLOY_KEY': 'repositories/%(username)s/%(repo_slug)s/deploy-keys',
'GET_DEPLOY_KEY': 'repositories/%(username)s/%(repo_slug)s/deploy-key/%(key_id)s',
'DELETE_DEPLOY_KEY': 'repositories/%(username)s/%(repo_slug)s/deploy-key/%(key_id)s',
}
class DeployKey(object):
""" This class provide services-related methods to Bitbucket objects."""
def __init__(self, bitbucket):
self.bitbucket = bitbucket
self.bitbucket.URLS.update(URLS)
def all(self, repo_slug=None):
""" Get all ssh keys associated with a repo
"""
url = self.bitbucket.url('GET_DEPLOY_KEYS',
username=self.bitbucket.username,
repo_slug=repo_slug)
return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
def get(self, repo_slug=None, key_id=None):
""" Get one of the ssh keys associated with this repo
"""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
url = self.bitbucket.url('GET_DEPLOY_KEY',
key_id=key_id,
username=self.bitbucket.username,
repo_slug=repo_slug)
return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
def create(self, repo_slug=None, key=None, label=None):
""" Associate an ssh key with your repo and return it.
"""
key = '%s' % key
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
url = self.bitbucket.url('SET_DEPLOY_KEY',
username=self.bitbucket.username,
repo_slug=repo_slug)
return self.bitbucket.dispatch('POST',
url,
auth=self.bitbucket.auth,
key=key,
label=label)
def delete(self, repo_slug=None, key_id=None):
""" Delete one of the ssh keys associated with your repo.
Please use with caution as there is NO confimation and NO undo.
"""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
url = self.bitbucket.url('DELETE_DEPLOY_KEY',
key_id=key_id,
username=self.bitbucket.username,
repo_slug=repo_slug)
return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)
| Python | 0 | @@ -686,32 +686,96 @@
epo%0A %22%22%22%0A
+ repo_slug = repo_slug or self.bitbucket.repo_slug or ''%0A
url = se
|
30843082f3847c3117db62cfb5015f7b993292d2 | Add PIP_UPGRADE=True to localrc of ucsm test job | neutron_ci/ci/tests/test_ml2_ucsm.py | neutron_ci/ci/tests/test_ml2_ucsm.py | # Copyright 2014 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Nikolay Fedotov, Cisco Systems, Inc.
import os
from ci import PARENT_FOLDER_PATH
from ci.lib.test_case import BaseTestCase
from fabric.operations import local
TEST_LIST_FILE = os.path.join(PARENT_FOLDER_PATH, 'cisco_plugin_tests.txt')
Q_PLUGIN_EXTRA_CONF_PATH = \
'/opt/stack/networking-cisco/etc/neutron/plugins/ml2'
Q_PLUGIN_EXTRA_CONF_FILES = 'ml2_conf_cisco.ini'
LOCAL_CONF = '''
[[local|localrc]]
NEUTRON_REPO={neutron_repo}
NEUTRON_BRANCH={neutron_branch}
MYSQL_PASSWORD=nova
RABBIT_PASSWORD=nova
SERVICE_TOKEN=nova
SERVICE_PASSWORD=nova
ADMIN_PASSWORD=nova
ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cpu,n-cond,n-sch,n-novnc,n-xvnc,n-cauth,rabbit
enable_service mysql
disable_service n-net
enable_service q-svc
enable_service q-agt
enable_service q-l3
enable_service q-dhcp
enable_service q-meta
enable_service q-lbaas
enable_service neutron
enable_service tempest
enable_plugin networking-cisco {net_cisco_repo} {net_cisco_ref}
enable_service net-cisco
LIBVIRT_TYPE=qemu
NOVA_USE_QUANTUM_API=v2
VOLUME_BACKING_FILE_SIZE=2052M
Q_PLUGIN=ml2
Q_ML2_PLUGIN_MECHANISM_DRIVERS=openvswitch,cisco_ucsm
Q_ML2_PLUGIN_TYPE_DRIVERS=vlan
ENABLE_TENANT_TUNNELS=False
Q_ML2_TENANT_NETWORK_TYPE=local
Q_PLUGIN_EXTRA_CONF_PATH=({Q_PLUGIN_EXTRA_CONF_PATH})
Q_PLUGIN_EXTRA_CONF_FILES=({Q_PLUGIN_EXTRA_CONF_FILES})
ML2_VLAN_RANGES=physnet1:100:200
PHYSICAL_NETWORK=physnet1
OVS_PHYSICAL_BRIDGE=br-eth1
TENANT_VLAN_RANGE=100:200
ENABLE_TENANT_VLANS=True
IP_VERSION=4
API_RATE_LIMIT=False
VERBOSE=True
DEBUG=True
LOGFILE=/opt/stack/screen-logs/stack.sh.log
USE_SCREEN=True
SCREEN_LOGDIR=/opt/stack/screen-logs
RECLONE=True
[[post-config|{Q_PLUGIN_EXTRA_CONF_PATH}/{Q_PLUGIN_EXTRA_CONF_FILES}]]
[ml2_cisco_ucsm]
ucsm_ip=172.21.19.10
ucsm_username=admin
ucsm_password=Cisc0123
ucsm_host_list=neutron1:neutron1
'''
class ML2UCSMTest(BaseTestCase):
neutron_repo = os.environ.get('NEUTRON_REPO')
neutron_ref = os.environ.get('NEUTRON_REF')
net_cisco_repo = os.environ.get('NET_CISCO_REPO')
net_cisco_ref = os.environ.get('NET_CISCO_REF')
@classmethod
def setUpClass(cls):
BaseTestCase.setUpClass()
local_conf = LOCAL_CONF.format(
neutron_repo=cls.neutron_repo,
neutron_branch=cls.neutron_ref,
net_cisco_repo=cls.net_cisco_repo,
net_cisco_ref=cls.net_cisco_ref,
Q_PLUGIN_EXTRA_CONF_PATH=Q_PLUGIN_EXTRA_CONF_PATH,
Q_PLUGIN_EXTRA_CONF_FILES=Q_PLUGIN_EXTRA_CONF_FILES)
cls.devstack.local_conf = local_conf
cls.devstack.clone()
script = 'python ' + os.path.join(
PARENT_FOLDER_PATH,
'files/ucsm/ucsm_delete_admin_sessions.py')
local(script)
def test_tempest(self):
self.assertFalse(self.devstack.stack())
self.assertFalse(self.devstack.run_tempest(
test_list_path=TEST_LIST_FILE))
| Python | 0.000001 | @@ -2245,16 +2245,33 @@
ONE=True
+%0APIP_UPGRADE=True
%0A%0A%5B%5Bpost
|
85b1cf05b63da5e627f0f37276f80a43781aee4d | true AsyncConnectionContextManager | aioworkers_redis/base.py | aioworkers_redis/base.py | from typing import Optional, Union
import aioredis
from aioworkers.core.base import (
AbstractConnector, AbstractNestedEntity, LoggingEntity
)
from aioworkers.core.config import ValueExtractor
from aioworkers.core.formatter import FormattedEntity
class Connector(
AbstractNestedEntity,
AbstractConnector,
FormattedEntity,
LoggingEntity,
):
def __init__(self, *args, **kwargs):
self._joiner: str = ':'
self._prefix: str = ''
self._connector: Optional[Connector] = None
self._pool: Optional[aioredis.Redis] = None
super().__init__(*args, **kwargs)
def set_config(self, config):
self._joiner = config.get('joiner', ':')
self._prefix = config.get('prefix', '')
cfg = config.new_parent(logger='aioworkers_redis')
c = cfg.get('connection')
if not isinstance(c, str):
if cfg.get('dsn'):
cfg = cfg.new_child(connection=dict(dsn=cfg.get('dsn')))
elif c.startswith('redis://'):
cfg = cfg.new_child(connection=dict(dsn=c))
elif not c.startswith('.'):
raise ValueError('Connector link must be startswith point .%s' % c)
super().set_config(cfg)
@property
def pool(self) -> aioredis.Redis:
connector = self._connector or self._get_connector()
assert connector._pool is not None, 'Pool not ready'
return connector._pool
def _get_connector(self) -> 'Connector':
cfg = self.config.get('connection')
if isinstance(cfg, str):
self.logger.debug('Connect to %s', cfg)
self._connector = self.context.get_object(cfg)
assert self._connector is not None, 'Not found reference %s' % cfg
else:
self._connector = self
return self._connector
def get_child_config(
self, item: str, config: Optional[ValueExtractor] = None,
) -> Optional[ValueExtractor]:
if config is None:
result = ValueExtractor(dict(
name=f'{self.config.name}.{item}',
))
else:
result = super().get_child_config(item, config)
if self._connector is None:
connection = self.config.get('connection')
if not isinstance(connection, str):
connection = f'.{self.config.name}'
result = result.new_parent(
connection=connection,
)
else:
result = result.new_child(
connection=f'.{self._connector.config.name}',
)
return result.new_parent(
prefix=self.raw_key(item),
joiner=self._joiner,
format=self.config.get('format'),
)
def raw_key(self, key: str) -> str:
k = [i for i in (self._prefix, key) if i]
return self._joiner.join(k)
def clean_key(self, raw_key: Union[str, bytes]) -> str:
result = raw_key[len(self._prefix) + len(self._joiner):]
if isinstance(result, str):
return result
return result.decode()
def acquire(self):
return AsyncConnectionContextManager(self)
async def connect(self):
connector = self._connector or self._get_connector()
if connector is not self:
return
cfg = self.config.get('connection')
c = self
while True:
if c._connector is not c:
c = c._connector
else:
if self.config.get('connect'):
self._connector = self
cfg = c.config.get('connection')
break
if cfg:
cfg = dict(cfg)
else:
cfg = {}
self._pool = await self.pool_factory(cfg)
async def pool_factory(self, cfg: dict) -> Optional[aioredis.Redis]:
if cfg.get('dsn'):
address = cfg.pop('dsn')
elif cfg.get('address'):
address = cfg.pop('address')
else:
address = cfg.pop('host', 'localhost'), cfg.pop('port', 6379)
self.logger.debug('Create pool with address %s', address)
return await aioredis.create_redis_pool(
address, **cfg, loop=self.loop,
)
async def disconnect(self):
pool = self._pool
if pool is not None:
self.logger.debug('Close pool')
pool.close()
await pool.wait_closed()
def decode(self, b):
if b is not None:
return super().decode(b)
def encode(self, b):
if b is not None:
return super().encode(b)
async def __aenter__(self):
await self.connect()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.disconnect()
class AsyncConnectionContextManager:
__slots__ = ('_connector',)
def __init__(self, connector: Connector):
self._connector: Connector = connector
async def __aenter__(self):
return self._connector.pool
async def __aexit__(self, exc_type, exc_value, tb):
pass
| Python | 0.99889 | @@ -4838,16 +4838,30 @@
nector',
+ '_connection'
)%0A%0A d
@@ -4993,35 +4993,122 @@
-return self._connector.pool
+self._connection = await self._connector.pool%0A self._connection.__enter__()%0A return self._connection
%0A%0A
@@ -5173,9 +5173,55 @@
-pass
+self._connection.__exit__(exc_type, exc_value, tb)
%0A
|
ce53d45f48b8ad64dc97c2924f20d639292ecaed | Fix issues reported by flake8 in vm_manager | akanda/rug/vm_manager.py | akanda/rug/vm_manager.py | import netaddr
import time
from oslo.config import cfg
from akanda.rug.api import configuration
from akanda.rug.api import nova
from akanda.rug.api import quantum
from akanda.rug.api import akanda_client as router_api
DOWN = 'down'
UP = 'up'
CONFIGURED = 'configured'
RESTART = 'restart'
MAX_RETRIES = 3
BOOT_WAIT = 60
RETRY_DELAY = 1
class VmManager(object):
def __init__(self, router_id, log):
self.router_id = router_id
self.log = log
self.state = DOWN
self._logical_router = None
self.quantum = quantum.Quantum(cfg.CONF)
def update_state(self, silent=False):
self._ensure_cache()
addr = _get_management_address(router)
for i in xrange(MAX_RETRIES):
try:
if router_api.is_alive(addr, cfg.CONF.akanda_mgt_service_port):
break
except:
if not silent:
self.log.exception(
'Alive check failed. Attempt %d of %d',
i,
MAX_RETRIES
)
time.sleep(RETRY_DELAY)
else:
self.state = DOWN
if self.state == DOWN:
self.state = UP
return self.state
def boot(self):
self._logical_router = self.quantum.get_router_detail(router_id)
self.log.info('Booting router')
nova_client = nova.Nova(cfg.CONF)
nova_client.reboot_router_instance(self._logical_router)
self.state = DOWN
start = time.time()
while time.time() - start < BOOT_WAIT:
if self.update_state(silent=True) in (UP, CONFIGURED):
return
self.log.debug('Router has not finished booting. IP: %s', addr)
self.log.error('Router failed to boot within %d secs', BOOT_WAIT)
def stop(self):
self._ensure_cache()
self.log.info('Destroying router')
nova_client = nova.Nova(cfg.CONF)
nova_client.reboot_router_instance(self._logical_router)
def configure(self):
self.log.debug('Begin router config')
self.state = UP
self._logical_router = self.quantum.get_router_detail(router_id)
addr = _get_management_address(self._logical_router)
interfaces = router_api.get_interfaces(
addr,
cfg.CONF.akanda_mgt_service_port
)
if not self._verify_interfaces(self._logical_router, interfaces):
self.state = RESTART
return
config = configuration.build_config(
self.quantum,
self._logical_router,
interfaces
)
for i in xrange(MAX_RETRIES):
try:
router_api.update_config(
addr,
cfg.CONF.akanda_mgt_service_port,
config
)
except Exception as e:
self.log.exception('failed to update config')
time.sleep(i + 1)
else:
self.state = CONFIGURED
self.log.debug('Router config updated')
return
def _ensure_cache(self):
if self._logical_router:
return
self._logical_router = self.quantum.get_router_detail(router_id)
def _verify_interfaces(self, logical_config, interfaces):
router_macs = set((iface['lladdr'] for iface in interfaces))
self.log.debug('MACs found: %s', ', '.join(sorted(router_macs)))
expected_macs = set((p.mac_address for p in router.internal_ports))
expected_macs.add(router.management_port.mac_address)
expected_macs.add(router.external_port.mac_address)
self.log.debug('MACs expected: %s', ', '.join(sorted(expected_macs)))
return router_macs == expected_macs
def _get_management_address(router):
network = netaddr.IPNetwork(cfg.CONF.management_prefix)
tokens = ['%02x' % int(t, 16)
for t in router.management_port.mac_address.split(':')]
eui64 = int(''.join(tokens[0:3] + ['ff', 'fe'] + tokens[3:6]), 16)
# the bit inversion is required by the RFC
return str(netaddr.IPAddress(network.value + (eui64 ^ 0x0200000000000000)))
| Python | 0.000003 | @@ -681,22 +681,30 @@
address(
+self.
router
+_id
)%0A
@@ -1344,32 +1344,37 @@
t_router_detail(
+self.
router_id)%0A%0A
@@ -1771,23 +1771,59 @@
ting
-.
I
-P
+D
: %25s',
- addr
+%0A self.router_id
)%0A%0A
@@ -2247,32 +2247,37 @@
t_router_detail(
+self.
router_id)%0A%0A
@@ -2976,13 +2976,8 @@
tion
- as e
:%0A
@@ -3351,16 +3351,21 @@
_detail(
+self.
router_i
@@ -3601,17 +3601,16 @@
s = set(
-(
p.mac_ad
@@ -3618,18 +3618,60 @@
ress
- for p in
+%0A for p in self._logical_
rout
@@ -3688,17 +3688,16 @@
l_ports)
-)
%0A
@@ -3711,24 +3711,38 @@
ed_macs.add(
+self._logical_
router.manag
@@ -3791,16 +3791,30 @@
acs.add(
+self._logical_
router.e
|
6650e5898ca058d1dc8494dbc3d0ba2e2d8c1e4c | Compute the distance between two points on the globe and determine if air travel is possible between them in the time between when localities were recorded | alerts/geomodel/alert.py | alerts/geomodel/alert.py | from datetime import datetime
from operator import attrgetter
from typing import List, NamedTuple, Optional
import netaddr
from alerts.geomodel.config import Whitelist
from alerts.geomodel.locality import State, Locality
_DEFAULT_SUMMARY = 'Authenticated action taken by a user outside of any of '\
'their known localities.'
# TODO: Switch to dataclasses when we move to Python3.7+
class Origin(NamedTuple):
'''A description of a location.
'''
city: str
country: str
latitude: float
longitude: float
geopoint: str
class Alert(NamedTuple):
'''A container for the data the alerts output by GeoModel contain.
'''
source: str
category: str
type_: str
username: str
sourceipaddress: str
timestamp: datetime
origin: Origin
tags: List[str]
summary: str
def new(
username: str,
sourceip: str,
origin: Origin,
summary: str = _DEFAULT_SUMMARY
) -> 'Alert':
'''Produce a new `Alert` with default values filled.
'''
return Alert(
source='geomodel',
category='geomodel',
type_='geomodel',
username=username,
sourceipaddress=sourceip,
timestamp=datetime.now(),
origin=origin,
tags=['geomodel'],
summary=summary)
def _travel_possible(loc1: Locality, loc2: Locality) -> bool:
'''Given two localities, determine whether it would be possible for a user
to have travelled from the former to the latter in the time between when the
actions took place.
'''
return False
def alert(user_state: State, whitelist: Whitelist) -> Optional[Alert]:
'''Determine whether an alert should fired given a particular user's
locality state. If an alert should fire, an `Alert` is returned, otherwise
this function returns `None`.
'''
ignore_cidrs = [netaddr.IPSet([cidr]) for cidr in whitelist.cidrs]
if user_state.username in whitelist.users:
return None
locs_to_consider = []
for loc in sorted(user_state.localities, key=attrgetter('lastaction')):
ip = netaddr.IPAddress(loc.sourceipaddress)
if all([ip not in cidr for cidr in ignore_cidrs]):
locs_to_consider.append(loc)
if len(locs_to_consider) < 2:
return None
locations = locs_to_consider[-2:]
if _travel_possible(*locations):
return None
(ip, city, country, lat, lon) = (
locations[1].sourceipaddress,
locations[1].city,
locations[1].country,
locations[1].latitude,
locations[1].longitude
)
return Alert.new(
user_state.username,
ip,
Origin(city, country, lat, lon, ''))
| Python | 0.998967 | @@ -23,16 +23,28 @@
atetime%0A
+import math%0A
from ope
@@ -230,16 +230,94 @@
ality%0A%0A%0A
+_AIR_TRAVEL_SPEED = 1000.0 # km/h%0A%0A_EARTH_RADIUS = 6373.0 # km # approximate%0A%0A
_DEFAULT
@@ -1704,20 +1704,715 @@
-return False
+lat1 = math.radians(loc1.latitude)%0A lat2 = math.radians(loc2.latitude)%0A lon1 = math.radians(loc1.longitude)%0A lon2 = math.radians(loc2.longitude)%0A%0A dlat = lat2 - lat1%0A dlon = lon2 - lon1%0A%0A a = math.sin(dlat / 2.0) ** 2 +%5C%0A math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2.0) ** 2%0A c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))%0A%0A distance = c * _EARTH_RADIUS%0A%0A seconds_between = (loc2.lastaction - loc1.lastaction).total_seconds()%0A hours_between = math.ceil(seconds_between / 60.0 / 60.0)%0A%0A # We pad the time with an hour to account for things like planes being%0A # slowed, network delays, etc.%0A return (distance / _AIR_TRAVEL_SPEED) %3C= (hours_between - 1)
%0A%0Ade
|
451c821118eff98d7e92b3a3f46b1a76048abbb5 | add wiki canned response | androiddev_bot/config.py | androiddev_bot/config.py | import praw
# Put your vars here
suspect_title_strings = ['?', 'help', 'stuck', 'why', 'my', 'feedback']
subreddit = 'androiddev'
# Canned responses
cans = {
'questions_thread': "Removed because, per sub rules, this doesn't merit its own post. We have a questions thread every day, please use it for questions like this.",
'rules': 'Removed because posts like this are against the sub rules.'
}
# Specify the keyword and what days they should be removed
weekly_threads = {
'anything': {
'day': 'Saturday',
'name': 'Weekly \"anything goes\"'
},
'hiring': {
'day': 'Monday',
'name': 'Weekly \"who\'s hiring?\"'
}
}
flair_mapping = {
'Library': 'library',
'Discussion': 'discussion',
'News': 'news',
'Tech Talk': 'talk',
}
def post_is_suspicious(post_to_check: praw.objects.Submission) -> bool:
"""
A function that can be passed a submission to check against and return whether or not it's "suspicious" or otherwise
deserving of closer attention.
:type post_to_check: praw.objects.Submission
:rtype : bool
:param post_to_check: The Submission instance to check
:return: True if suspicious, False if now
"""
return \
any(word in post_to_check.title.lower() for word in suspect_title_strings) \
or post_to_check.domain == 'stackoverflow.com' \
or (post_to_check.selftext and 'stackoverflow' in post_to_check.selftext.lower()) \
or (post_to_check.selftext_html and any(block in post_to_check.selftext_html for block in ['<code', '%3Ccode'])) | Python | 0 | @@ -396,16 +396,157 @@
rules.'
+,%0A 'wiki': %22Removed because relevant information can be found in the /r/androiddev %5Bwiki%5D(https://www.reddit.com/r/androiddev/wiki/index)%22
%0A%7D%0A%0A# Sp
@@ -1717,8 +1717,9 @@
code'%5D))
+%0A
|
0d561690eb3a59a50b54cdc2ab36130114a5bea1 | Fix bugs | ansible-check-builder.py | ansible-check-builder.py | # Author - Sagi Yosef
# Creation Date 30.07.2017
#
# This script builds sensu check json file by user input.
#
# --- HOW TO RUN ---
# Copy the script to the ansible server and run the script
#
# python check-json-builder-standalone.py
#
# *** Notice you have to install python ***
import sys
import json
import os
import argparse
CHECK_PATH="/etc/ansible/roles/sagyos.advanced.monitoring/checks/"
SCRIPTS_PATH="/etc/ansible/roles/sagyos.advanced.monitoring/scripts/"
CHECK_EXTENSION=".json"
# Check if checks directory exists and creates it if not
if not os.path.exists(CHECK_PATH):
os.makedirs(CHECK_PATH)
# Check if checks directory exists and creates it if not
if not os.path.exists(SCRIPTS_PATH):
os.makedirs(SCRIPTS_PATH)
# Set arguments for the tool
def setArgParse():
# Setting the parser arguments
parser = argparse.ArgumentParser(prog="ansible-check-builder",
description="This script builds a check json file for the Sensu monitoring system." +
"\nIf using arguments only one check wil be created at a time" +
"\n***NOTICE*** If using arguments option all arguments except version are required")
parser.add_argument("-v", "--version", action='version', version='%(prog)s 2.0', help="Prints the tool version")
parser.add_argument("-g", "--group", help="Ansible group")
parser.add_argument("-n", "--name", help="The check's name")
parser.add_argument("-c", "--command", help="The check's command to execute on the monitored machine")
parser.add_argument("-i", "--interval", help="The check's interval time (seconds)")
return (parser.parse_args())
# Write a dictionary to file
def writeToFile(jsCheckJson, strGroup, strCheckName):
try:
# Creating a file and dumping the check's json
fCheck=open(CHECK_PATH + strGroup + "/" + strCheckName + CHECK_EXTENSION, 'w+')
json.dump(jsCheckJson, fCheck, indent=2, sort_keys=True)
fCheck.close()
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
# Print the check final json
print json.dumps(jsCheckJson, indent=2, sort_keys=True)
def argsProvided(args, jsCheckJson):
# Check if scripts directory for requested group exists and creates it if not
if not os.path.exists(SCRIPTS_PATH + args.group):
os.makedirs(SCRIPTS_PATH + args.group)
# Check if scripts directory for requested group exists and creates it if not
if not os.path.exists(CHECK_PATH + args.group):
os.makedirs(CHECK_PATH + args.group)
jsCheckJson['checks'][args.name]={}
jsCheckJson['checks'][args.name]['command']=args.command
jsCheckJson['checks'][args.name]['subscribers']=[]
# Setting the subscribers
jsCheckJson['checks'][strCheckName]['subscribers'].append(args.group)
# Check if the user wants to add more subscribers
bMoreSubscribers=raw_input("Do you want to add subscribers? [y/n] ")
if (bMoreSubscribers == "y" or bMoreSubscribers == "yes"):
# Get list of subscribers
print "Enter subscibers names. For exit enter exit"
strSubscribers=raw_input()
# While user didn't enter exit, subscribers will be added
while (strSubscribers != "exit"):
jsCheckJson['checks'][strCheckName]['subscribers'].append(strSubscribers)
strSubscribers=raw_input()
# Set the interval
jsCheckJson['checks'][args.name]['interval']=args.interval
# Set the check standalone attribute to true
jsCheckJson['checks'][strCheckName]['standalone']=bool(True)
# Write the json to file
writeToFile(jsCheckJson, strAnsibleGroup, strCheckName)
# This function creates a check json file based on execution time user input
def argsNotProvided(jsCheckJson):
# Get ansible monitored group
strAnsibleGroup=raw_input("Enter the ansible group name (must be the same): ")
# Check if scripts directory for requested group exists and creates it if not
if not os.path.exists(SCRIPTS_PATH + strAnsibleGroup):
os.makedirs(SCRIPTS_PATH + strAnsibleGroup)
# Check if scripts directory for requested group exists and creates it if not
if not os.path.exists(CHECK_PATH + strAnsibleGroup):
os.makedirs(CHECK_PATH + strAnsibleGroup)
# Get the checks uniqe name
strCheckName=raw_input("Enter check uniqe name: ")
jsCheckJson['checks'][strCheckName]={}
# Get checks command
print "Enter the check's command"
print "-------------------------"
jsCheckJson['checks'][strCheckName]['command']=raw_input()
jsCheckJson['checks'][strCheckName]['subscribers']=[]
# Setting the subscribers to the same as the ansible group
jsCheckJson['checks'][strCheckName]['subscribers'].append(strAnsibleGroup)
# Check if the user wants to add more subscribers
bMoreSubscribers=raw_input("Do you want to add subscribers? [y/n] ")
if (bMoreSubscribers == "y" or bMoreSubscribers == "yes"):
# Get list of subscribers
print "Enter subscibers names. For exit enter exit"
strSubscribers=raw_input()
# While user didn't enter exit, subscribers will be added
while (strSubscribers != "exit"):
jsCheckJson['checks'][strCheckName]['subscribers'].append(strSubscribers)
strSubscribers=raw_input()
# Getting the checks interval
while True:
try:
# Get checks interval time in seconds
jsCheckJson['checks'][strCheckName]['interval']=int(raw_input("Enter interval time (seconds): "))
except ValueError:
print("Please enter integer")
continue
else:
# Interval was successfully parsed!
break
# Set the check standalone attribute to true
jsCheckJson['checks'][strCheckName]['standalone']=bool(True)
# Write the json to file
writeToFile(jsCheckJson, strAnsibleGroup, strCheckName)
# Clear the dictonary
jsCheckJson['checks'].clear()
prArgs=setArgParse()
# Creating check definition
jsCheckJson={}
jsCheckJson['checks']={}
# Check if the user entered arguments
if not len(sys.argv) > 1:
# While user don't want to quit new checks will be created
while True:
argsNotProvided(jsCheckJson)
strUserChoice=raw_input('Continue to another check? y/n: ')
# Check if user want to exit
if (strUserChoice == "n"):
break
elif not len(sys.argv) == 9:
print "Please provide all the arguments except -v or --version"
else:
argsProvided(prArgs, jsCheckJson) | Python | 0.000004 | @@ -2775,33 +2775,30 @@
n%5B'checks'%5D%5B
-strCheckN
+args.n
ame%5D%5B'subscr
@@ -3255,33 +3255,30 @@
n%5B'checks'%5D%5B
-strCheckN
+args.n
ame%5D%5B'subscr
@@ -3412,16 +3412,20 @@
erval'%5D=
+int(
args.int
@@ -3429,16 +3429,17 @@
interval
+)
%0A %0A #
@@ -3501,33 +3501,30 @@
n%5B'checks'%5D%5B
-strCheckN
+args.n
ame%5D%5B'standa
@@ -3602,34 +3602,26 @@
on,
-strAnsibleGroup, strCheckN
+args.group, args.n
ame)
@@ -6343,24 +6343,25 @@
ded(prArgs, jsCheckJson)
+%0A
|
831e288d99cb978d18adba26049c91801b8c4473 | remove the code to process kwargs because these are done in the parent class' methods already in ini backend | anyconfig/backend/ini.py | anyconfig/backend/ini.py | #
# Copyright (C) 2011 - 2017 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
# pylint: disable=unused-argument
r"""INI backend:
- Format to support: INI or INI like ones
- Requirements: The following standard module which should be available always.
- ConfigParser in python 2 standard library:
https://docs.python.org/2.7/library/configparser.html
- configparser in python 3 standard library:
https://docs.python.org/3/library/configparser.html
- Development Status :: 4 - Beta
- Limitations: It cannot process nested configuration dicts correctly due to
the limitation of the module and the format ifself.
- Special options:
- Use 'ac_parse_value' boolean keyword option if you want to parse values by
custom parser, anyconfig.backend.ini._parse.
Changelog:
.. versionchanged:: 0.3
- Introduce 'ac_parse_value' keyword option to switch behaviors, same as
original configparser and rich backend parsing each parameter values.
"""
from __future__ import absolute_import
import anyconfig.backend.base
import anyconfig.parser as P
import anyconfig.utils
from anyconfig.compat import configparser, iteritems, OrderedDict
from anyconfig.backend.base import mk_opt_args
_SEP = ','
def _noop(val, *args, **kwargs):
"""
Parser does nothing.
"""
# It means nothing but can suppress 'Unused argument' pylint warns.
# (val, args, kwargs)[0]
return val
def _parse(val_s, sep=_SEP):
"""
FIXME: May be too naive implementation.
:param val_s: A string represents some value to parse
:param sep: separator between values
>>> _parse(r'"foo string"')
'foo string'
>>> _parse("a, b, c")
['a', 'b', 'c']
>>> _parse("aaa")
'aaa'
"""
if (val_s.startswith('"') and val_s.endswith('"')) or \
(val_s.startswith("'") and val_s.endswith("'")):
return val_s[1:-1]
elif sep in val_s:
return [P.parse(x) for x in P.parse_list(val_s)]
else:
return P.parse(val_s)
def _to_s(val, sep=", "):
"""Convert any to string.
:param val: An object
:param sep: separator between values
>>> _to_s([1, 2, 3])
'1, 2, 3'
>>> _to_s("aaa")
'aaa'
"""
if anyconfig.utils.is_iterable(val):
return sep.join(str(x) for x in val)
else:
return str(val)
def _make_parser(container, **kwargs):
"""
:param container: any callable to make container
:return: (container, keyword args to be used, parser object)
"""
if kwargs.get("ac_ordered", False) or kwargs.get("dict_type", False):
kwargs["dict_type"] = container = OrderedDict
if "dict_type" not in kwargs and not kwargs.get("ac_ordered", True):
kwargs["dict_type"] = container
# Optional arguements for configparser.SafeConfigParser{,readfp}
kwargs_0 = mk_opt_args(("defaults", "dict_type", "allow_no_value"), kwargs)
kwargs_1 = mk_opt_args(("filename", ), kwargs)
try:
parser = configparser.SafeConfigParser(**kwargs_0)
except TypeError:
# .. note::
# It seems ConfigParser.*ConfigParser in python 2.6 does not support
# 'allow_no_value' option parameter, and TypeError will be thrown.
kwargs_0 = mk_opt_args(("defaults", "dict_type"), kwargs)
parser = configparser.SafeConfigParser(**kwargs_0)
return (container, kwargs_1, parser)
def _load(stream, container=dict, sep=_SEP, **kwargs):
"""
:param stream: File or file-like object provides ini-style conf
:param container: any callable to make container
:param sep: Seprator string
:return: Dict or dict-like object represents config values
"""
(container, kwargs_1, parser) = _make_parser(container, **kwargs)
parse = _parse if kwargs.get("ac_parse_value") else anyconfig.utils.noop
cnf = container()
parser.readfp(stream, **kwargs_1)
# .. note:: Process DEFAULT config parameters as special ones.
defaults = parser.defaults()
if defaults:
cnf["DEFAULT"] = container()
for key, val in iteritems(defaults):
cnf["DEFAULT"][key] = parse(val, sep)
for sect in parser.sections():
cnf[sect] = container()
for key, val in parser.items(sect):
cnf[sect][key] = parse(val, sep)
return cnf
def _dumps_itr(cnf):
"""
:param cnf: Configuration data to dump
"""
dkey = "DEFAULT"
for sect, params in iteritems(cnf):
yield "[%s]" % sect
for key, val in iteritems(params):
if sect != dkey and dkey in cnf and cnf[dkey].get(key) == val:
continue # It should be in [DEFAULT] section.
yield "%s = %s" % (key, _to_s(val))
yield '' # it will be a separator between each sections.
def _dumps(cnf, **kwargs):
"""
:param cnf: Configuration data to dump
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: String representation of `cnf` object in INI format
"""
return '\n'.join(l for l in _dumps_itr(cnf))
class Parser(anyconfig.backend.base.FromStreamLoader,
anyconfig.backend.base.ToStringDumper):
"""
Ini config files parser.
"""
_type = "ini"
_extensions = ["ini"]
_load_opts = ["defaults", "dict_type", "allow_no_value", "filename",
"ac_parse_value"]
dump_to_string = anyconfig.backend.base.to_method(_dumps)
def load_from_stream(self, stream, container, **options):
"""
Load config from given file like object `stream`.
:param stream: Config file or file like object
:param container: callble to make a container object
:param options: optional keyword arguments
:return: Dict-like object holding config parameters
"""
return _load(stream, container=container, **options)
# vim:sw=4:ts=4:et:
| Python | 0 | @@ -1145,22 +1145,9 @@
tems
-, OrderedDict
%0A
+
from
@@ -2394,33 +2394,48 @@
ble to make
-container
+dict or dict-like object
%0A :return
@@ -2500,250 +2500,8 @@
%22%22%22%0A
- if kwargs.get(%22ac_ordered%22, False) or kwargs.get(%22dict_type%22, False):%0A kwargs%5B%22dict_type%22%5D = container = OrderedDict%0A if %22dict_type%22 not in kwargs and not kwargs.get(%22ac_ordered%22, True):%0A kwargs%5B%22dict_type%22%5D = container%0A%0A
@@ -3165,13 +3165,8 @@
iner
-=dict
, se
@@ -5097,16 +5097,50 @@
_value%22%5D
+%0A _dict_options = %5B%22dict_type%22%5D
%0A%0A du
@@ -5562,32 +5562,32 @@
ers%0A %22%22%22%0A
+
return _
@@ -5603,18 +5603,8 @@
am,
-container=
cont
|
4f74b5e1d75b570229098531b172f2eb4877f78d | initialize 'dict_type' keyword argument for configparser.SafeConfigParser correctly | anyconfig/backend/ini.py | anyconfig/backend/ini.py | #
# Copyright (C) 2011 - 2015 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
# pylint: disable=unused-argument
"""INI or INI like config files backend.
.. versionchanged:: 0.3
Introduce 'ac_parse_value' keyword option to switch behaviors, same as
original configparser and rich backend parsing each parameter values.
- Format to support: INI or INI like ones
- Requirements: It should be available always.
- ConfigParser in python 2 standard library:
https://docs.python.org/2.7/library/configparser.html
- configparser in python 3 standard library:
https://docs.python.org/3/library/configparser.html
- Limitations: None obvious
- Special options:
- Use 'ac_parse_value' boolean keyword option if you want to parse values by
custom parser, anyconfig.backend.ini._parse.
"""
from __future__ import absolute_import
import anyconfig.backend.base
import anyconfig.parser as P
import anyconfig.utils
from anyconfig.compat import configparser, iteritems, OrderedDict
from anyconfig.backend.base import mk_opt_args
_SEP = ','
def _noop(val, *args, **kwargs):
"""
Parser does nothing.
"""
# It means nothing but can suppress 'Unused argument' pylint warns.
# (val, args, kwargs)[0]
return val
def _parse(val_s, sep=_SEP):
"""
FIXME: May be too naive implementation.
:param val_s: A string represents some value to parse
:param sep: separator between values
>>> _parse(r'"foo string"')
'foo string'
>>> _parse("a, b, c")
['a', 'b', 'c']
>>> _parse("aaa")
'aaa'
"""
if (val_s.startswith('"') and val_s.endswith('"')) or \
(val_s.startswith("'") and val_s.endswith("'")):
return val_s[1:-1]
elif sep in val_s:
return [P.parse(x) for x in P.parse_list(val_s)]
else:
return P.parse(val_s)
def _to_s(val, sep=", "):
"""Convert any to string.
:param val: An object
:param sep: separator between values
>>> _to_s([1, 2, 3])
'1, 2, 3'
>>> _to_s("aaa")
'aaa'
"""
if anyconfig.utils.is_iterable(val):
return sep.join(str(x) for x in val)
else:
return str(val)
def _load(stream, to_container=dict, sep=_SEP, **kwargs):
"""
:param stream: File or file-like object provides ini-style conf
:param to_container: any callable to make container
:param sep: Seprator string
:return: Dict or dict-like object represents config values
"""
_parse_val = _parse if kwargs.get("ac_parse_value", False) else _noop
if kwargs.get("ac_ordered", False):
dict_type = to_container = OrderedDict
# Optional arguements for configparser.SafeConfigParser{,readfp}
kwargs_0 = mk_opt_args(("defaults", "dict_type", "allow_no_value"), kwargs)
kwargs_1 = mk_opt_args(("filename", ), kwargs)
try:
parser = configparser.SafeConfigParser(**kwargs_0)
except TypeError:
# .. note::
# It seems ConfigPaser.*ConfigParser in python 2.6 does not support
# 'allow_no_value' option parameter, and TypeError will be thrown.
kwargs_0 = mk_opt_args(("defaults", "dict_type"), kwargs)
parser = configparser.SafeConfigParser(**kwargs_0)
cnf = to_container()
parser.readfp(stream, **kwargs_1)
# .. note:: Process DEFAULT config parameters as special ones.
defaults = parser.defaults()
if defaults:
cnf["DEFAULT"] = to_container()
for key, val in iteritems(defaults):
cnf["DEFAULT"][key] = _parse_val(val, sep)
for sect in parser.sections():
cnf[sect] = to_container()
for key, val in parser.items(sect):
cnf[sect][key] = _parse_val(val, sep)
return cnf
def _dumps_itr(cnf):
"""
:param cnf: Configuration data to dump
"""
dkey = "DEFAULT"
for sect, params in iteritems(cnf):
yield "[%s]" % sect
for key, val in iteritems(params):
if sect != dkey and dkey in cnf and cnf[dkey].get(key) == val:
continue # It should be in [DEFAULT] section.
yield "%s = %s" % (key, _to_s(val))
yield '' # it will be a separator between each sections.
def _dumps(cnf, **kwargs):
"""
:param cnf: Configuration data to dump
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: String representation of `cnf` object in INI format
"""
return '\n'.join(l for l in _dumps_itr(cnf))
class Parser(anyconfig.backend.base.FromStreamLoader,
anyconfig.backend.base.ToStringDumper):
"""
Ini config files parser.
"""
_type = "ini"
_extensions = ["ini"]
_load_opts = ["defaults", "dict_type", "allow_no_value", "filename",
"ac_parse_value"]
dump_to_string = anyconfig.backend.base.to_method(_dumps)
def load_from_stream(self, stream, to_container, **options):
"""
Load config from given file like object `stream`.
:param stream: Config file or file like object
:param to_container: callble to make a container object
:param options: optional keyword arguments
:return: Dict-like object holding config parameters
"""
return _load(stream, to_container=to_container, **options)
# vim:sw=4:ts=4:et:
| Python | 0.000023 | @@ -2574,16 +2574,24 @@
+kwargs%5B%22
dict_typ
@@ -2591,16 +2591,18 @@
ict_type
+%22%5D
= to_co
|
e5dcea13a27b90f89469518386a1748f3e141b5b | Improve docs of doQuery.py file. | app/lib/query/doQuery.py | app/lib/query/doQuery.py | # -*- coding: utf-8 -*-
"""
Receive SQL query in stdin, send to configured database file, then return
the query result rows.
Usage:
## methods of input:
# Pipe text to the script.
$ echo "SELECT * FROM Trend LIMIT 10" | python -m lib.query.doQuery
# Redirect text from .sql file to the script.
$ python -m lib.query.doQuery --csv < lib/query/sql/abc.sql \
> var/reporting/abc.csv
# Enter an ad hoc query in lines of stdin.
$ python -m lib.query.doQuery <enter>
SELECT *
FROM Trend LIMIT 10;
<ctrl+D>
## methods to ouput:
# Print to console
$ $ python -m lib.query.doQuery < abc.sql
# Write to CSV
$ python -m lib.query.doQuery --csv < abc.sql > abc.csv
TODO
Test printing with u'\xed' character
"""
import sys
from lib import database as db
def CSVFormat(cell):
"""
Remove double-quotes from a string and if there is a comma then returns
value enclosed in double-quotes (ideal for outputting to CSV).
Null values are returned as an empty string.
TODO: If the data required in more than just a trending topic
(e.g. user tweets) then it may be better to use the CSV module instead.
@param cell: any python object representing a cell value from a table row.
@return: stringified version of the input cell value, with CSV
formatting applied.
"""
if cell is None:
return ''
else:
phrase = str(cell)
# Remove double-quotes.
phrase = phrase.replace('"', "'")
# Add quotes if there is a comma.
phrase = '"{}"'.format(phrase) if ',' in phrase else phrase
return phrase
def main(args, query=None):
"""
Receive a SQL query as a string and execute then print results to stdout.
"""
if set(args) & set(('-h', '--help')):
print 'Usage: python -m lib.query.sql.doQuery [-c|--csv]'\
' [-s|--summary] [-h|--help]'
print ' A query is required in stdin.'
print 'Options and arguments:'
print '--help : show help.'
print '--csv : default behaviour is print rows as tuples. The CSV'
print ' flags makes results return in a format ideal for'
print ' writing out to a CSV file. i.e. comma separate'
print ' values without tuple brackets and quoting any'
print ' strings containing a comma. Headers are still'
print ' excluded.'
print '--summary : print only count of rows returned.'
else:
if not query:
query = sys.stdin.read()
if not query:
raise ValueError('Database query is required as stdin.')
results = db.conn.queryAll(query)
if set(args) & set(('-s', '--summary')):
print len(results)
elif set(args) & set(('-c', '--csv')):
for row in results:
# Any unicode characters will be lost (replaced with
# question marks) by converting to str.
rowStr = (CSVFormat(c) for c in row)
print ','.join(rowStr)
else:
for row in results:
print row
if __name__ == '__main__':
main(sys.argv[1:])
| Python | 0 | @@ -119,16 +119,200 @@
rows.%0A%0A
+Note that db queries don't have to done through python like this,%0Abut can be done in SQL directly. For example:%0A $ sqlite3 path/to/db -csv -header %3C path/to/query %3E path/to/report%0A%0A
Usage:%0A
@@ -742,16 +742,17 @@
trl+D%3E%0A%0A
+%0A
## m
@@ -797,18 +797,16 @@
le%0A $
- $
python
|
7a6d62f01e7b69c4f5ded3b7a0d8f7798601b0ff | Print matplotlib warning on stderr | nupic/research/monitor_mixin/plot.py | nupic/research/monitor_mixin/plot.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014-2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Plot class used in monitor mixin framework.
"""
import os
import traceback
try:
# We import in here to avoid creating a matplotlib dependency in nupic.
import matplotlib.pyplot as plt
import matplotlib.cm as cm
except ImportError:
print "Cannot import matplotlib. Plot class will not work."
print traceback.format_exc() + "\n"
class Plot(object):
def __init__(self, monitor, title, show=True):
"""
@param monitor (MonitorMixinBase) Monitor Mixin instance that generated
this plot
@param title (string) Plot title
"""
self._monitor = monitor
self._title = title
self._fig = self._initFigure()
self._show = show
if self._show:
plt.ion()
plt.show()
def _initFigure(self):
fig = plt.figure()
fig.suptitle(self._prettyPrintTitle())
return fig
def _prettyPrintTitle(self):
if self._monitor.mmName is not None:
return "[{0}] {1}".format(self._monitor.mmName, self._title)
return self._title
def addGraph(self, data, position=111, xlabel=None, ylabel=None):
""" Adds a graph to the plot's figure.
@param data See matplotlib.Axes.plot documentation.
@param position A 3-digit number. The first two digits define a 2D grid
where subplots may be added. The final digit specifies the nth grid
location for the added subplot
@param xlabel text to be displayed on the x-axis
@param ylabel text to be displayed on the y-axis
"""
ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel)
ax.plot(data)
plt.draw()
def addHistogram(self, data, position=111, xlabel=None, ylabel=None,
bins=None):
""" Adds a histogram to the plot's figure.
@param data See matplotlib.Axes.hist documentation.
@param position A 3-digit number. The first two digits define a 2D grid
where subplots may be added. The final digit specifies the nth grid
location for the added subplot
@param xlabel text to be displayed on the x-axis
@param ylabel text to be displayed on the y-axis
"""
ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel)
ax.hist(data, bins=bins, color="green", alpha=0.8)
plt.draw()
def add2DArray(self, data, position=111, xlabel=None, ylabel=None, cmap=None,
aspect="auto", interpolation="nearest", name=None):
""" Adds an image to the plot's figure.
@param data a 2D array. See matplotlib.Axes.imshow documentation.
@param position A 3-digit number. The first two digits define a 2D grid
where subplots may be added. The final digit specifies the nth grid
location for the added subplot
@param xlabel text to be displayed on the x-axis
@param ylabel text to be displayed on the y-axis
@param cmap color map used in the rendering
@param aspect how aspect ratio is handled during resize
@param interpolation interpolation method
"""
if cmap is None:
# The default colormodel is an ugly blue-red model.
cmap = cm.Greys
ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel)
ax.imshow(data, cmap=cmap, aspect=aspect, interpolation=interpolation)
if self._show:
plt.draw()
if name is not None:
if not os.path.exists("log"):
os.mkdir("log")
plt.savefig("log/{name}.png".format(name=name), bbox_inches="tight",
figsize=(8, 6), dpi=400)
def _addBase(self, position, xlabel=None, ylabel=None):
""" Adds a subplot to the plot's figure at specified position.
@param position A 3-digit number. The first two digits define a 2D grid
where subplots may be added. The final digit specifies the nth grid
location for the added subplot
@param xlabel text to be displayed on the x-axis
@param ylabel text to be displayed on the y-axis
@returns (matplotlib.Axes) Axes instance
"""
ax = self._fig.add_subplot(position)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
return ax
| Python | 0.000002 | @@ -1054,16 +1054,27 @@
raceback
+%0Aimport sys
%0A%0Atry:%0A
@@ -1236,16 +1236,31 @@
%0A print
+ %3E%3E sys.stderr,
%22Cannot
@@ -1313,16 +1313,31 @@
%0A print
+ %3E%3E sys.stderr,
traceba
|
6d71726ab50a1fc81ff1ee03edbc77de3422bce8 | change back _ to unused name | openquake/hazardlib/shakemap/maps.py | openquake/hazardlib/shakemap/maps.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2018-2021 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import logging
import numpy
from openquake.baselib.general import CallableDict
from openquake.hazardlib import geo, site
from openquake.hazardlib.shakemap.parsers import get_array
F32 = numpy.float32
get_sitecol_shakemap = CallableDict()
@get_sitecol_shakemap.add('shapefile')
def get_sitecol_shapefile(kind, uridict, required_imts, sitecol=None,
_=None, mode='filter'):
"""
:param uridict: a dictionary specifying the ShakeMap resource
:param imts: required IMTs as a list of strings
:param sitecol: SiteCollection used to reduce the shakemap
:param _: assoc_dist, unused for shapefiles
:param mode: 'strict', 'warn' or 'filter'
:returns: filtered site collection, filtered shakemap, discarded
"""
polygons, shakemap = get_array(kind, **uridict)
available_imts = set(shakemap['val'].dtype.names)
bbox = (shakemap['bbox']['minx'].min(), shakemap['bbox']['miny'].min(),
shakemap['bbox']['maxx'].max(), shakemap['bbox']['maxy'].max())
check_required_imts(required_imts, available_imts)
# build a copy of the ShakeMap with only the relevant IMTs
shakemap = filter_unused_imts(
shakemap, required_imts, site_fields=['vs30'])
if sitecol is None:
# use centroids of polygons to generate sitecol
centroids = numpy.array([tuple(*p.centroid.coords)
for p in polygons],
dtype=[('lon', numpy.float32),
('lat', numpy.float32)])
for name in ('lon', 'lat'):
shakemap[name] = centroids[name]
return site.SiteCollection.from_usgs_shakemap(shakemap), shakemap, []
sitecol = apply_bounding_box(sitecol, bbox)
logging.info('Associating %d GMVs to %d sites',
len(shakemap), len(sitecol))
return geo.utils.assoc_to_polygons(polygons, shakemap, sitecol, mode)
@get_sitecol_shakemap.add('usgs_xml', 'usgs_id', 'file_npy')
def get_sitecol_usgs(kind, uridict, required_imts, sitecol=None,
assoc_dist=None, mode='warn'):
"""
:param uridict: a dictionary specifying the ShakeMap resource
:param imts: required IMTs as a list of strings
:param sitecol: SiteCollection used to reduce the shakemap
:param assoc_dist: the maximum distance for association
:param mode: 'strict', 'warn' or 'filter'
:returns: filtered site collection, filtered shakemap, discarded
"""
shakemap = get_array(kind, **uridict)
available_imts = set(shakemap['val'].dtype.names)
bbox = (shakemap['lon'].min(), shakemap['lat'].min(),
shakemap['lon'].max(), shakemap['lat'].max())
check_required_imts(required_imts, available_imts)
# build a copy of the ShakeMap with only the relevant IMTs
shakemap = filter_unused_imts(shakemap, required_imts)
if sitecol is None:
return site.SiteCollection.from_usgs_shakemap(shakemap), shakemap, []
sitecol = apply_bounding_box(sitecol, bbox)
logging.info('Associating %d GMVs to %d sites',
len(shakemap), len(sitecol))
return geo.utils.assoc(shakemap, sitecol, assoc_dist, mode)
def filter_unused_imts(shakemap, required_imts,
site_fields=('lon', 'lat', 'vs30')):
"""
build a copy of the ShakeMap with only the relevant IMTs
:param shakemap: shakemap array which should be filtered
:param required_imts: imts to keep in shakemap array
:param site_fields: single columns which are copied over
"""
dt = [(imt, F32) for imt in sorted(required_imts)]
dtlist = [('lon', F32), ('lat', F32), ('vs30', F32),
('val', dt), ('std', dt)]
data = numpy.zeros(len(shakemap), dtlist)
for name in site_fields:
data[name] = shakemap[name]
for name in ('val', 'std'):
for im in required_imts:
data[name][im] = shakemap[name][im]
return data
def check_required_imts(required_imts, available_imts):
"""
Check if the list of required imts is present in the list of available imts
:param required_imts: list of strings of required imts
:param available_imts: set of available imts
:raises RuntimeError: if required imts are not present
"""
missing = set(required_imts) - available_imts
if missing:
msg = ('The IMT %s is required but not in the available set %s, '
'please change the risk model otherwise you will have '
'incorrect zero losses for the associated taxonomies' %
(missing.pop(), ', '.join(available_imts)))
raise RuntimeError(msg)
def apply_bounding_box(sitecol, bbox):
"""
Filter out sites which are not in the bounding box.
:param sitecol: SiteCollection of sites from exposed assets
:param bbox: Bounding Box (lon.min, lat.min, lon.max, lat.max)
:raises RuntimeError: if no sites are found within the Bounding Box
"""
indices = sitecol.within_bbox(bbox)
if len(indices) == 0:
raise RuntimeError('There are no sites within '
'the bounding box %s' % str(bbox))
return sitecol.filtered(indices)
| Python | 0.002502 | @@ -1131,17 +1131,26 @@
-_
+assoc_dist
=None, m
@@ -1367,11 +1367,8 @@
aram
- _:
ass
@@ -1374,17 +1374,17 @@
soc_dist
-,
+:
unused
|
fe3e09af2accaf6924fc42b9df6ae5c99a005056 | Remove moxstubout usage | oslo_i18n/tests/test_gettextutils.py | oslo_i18n/tests/test_gettextutils.py | # Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
import logging
from babel import localedata
import mock
from oslotest import base as test_base
from oslotest import moxstubout
import six
from oslo_i18n import _factory
from oslo_i18n import _gettextutils
from oslo_i18n import _lazy
from oslo_i18n import _message
LOG = logging.getLogger(__name__)
class GettextTest(test_base.BaseTestCase):
def setUp(self):
super(GettextTest, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.mox = moxfixture.mox
# remember so we can reset to it later in case it changes
self._USE_LAZY = _lazy.USE_LAZY
self.t = _factory.TranslatorFactory('oslo_i18n.test')
def tearDown(self):
# reset to value before test
_lazy.USE_LAZY = self._USE_LAZY
super(GettextTest, self).tearDown()
def test_gettext_does_not_blow_up(self):
LOG.info(self.t.primary('test'))
def test__gettextutils_install(self):
_gettextutils.install('blaa')
_lazy.enable_lazy(False)
self.assertTrue(isinstance(self.t.primary('A String'),
six.text_type))
_gettextutils.install('blaa')
_lazy.enable_lazy(True)
self.assertTrue(isinstance(self.t.primary('A Message'),
_message.Message))
def test_gettext_install_looks_up_localedir(self):
with mock.patch('os.environ.get') as environ_get:
with mock.patch('gettext.install'):
environ_get.return_value = '/foo/bar'
_gettextutils.install('blaa')
environ_get.assert_has_calls([mock.call('BLAA_LOCALEDIR')])
def test_gettext_install_updates_builtins(self):
with mock.patch('os.environ.get') as environ_get:
with mock.patch('gettext.install'):
environ_get.return_value = '/foo/bar'
if '_' in six.moves.builtins.__dict__:
del six.moves.builtins.__dict__['_']
_gettextutils.install('blaa')
self.assertIn('_', six.moves.builtins.__dict__)
def test_get_available_languages(self):
# All the available languages for which locale data is available
def _mock_locale_identifiers():
# 'zh', 'zh_Hant'. 'zh_Hant_HK', 'fil' all have aliases
# missing from babel but we add them in _gettextutils, we
# test that here too
return ['zh', 'es', 'nl', 'fr', 'zh_Hant', 'zh_Hant_HK', 'fil']
self.stubs.Set(localedata,
'list' if hasattr(localedata, 'list')
else 'locale_identifiers',
_mock_locale_identifiers)
# Only the languages available for a specific translation domain
def _mock_gettext_find(domain, localedir=None, languages=None, all=0):
languages = languages or []
if domain == 'domain_1':
return 'translation-file' if any(x in ['zh', 'es', 'fil']
for x in languages) else None
elif domain == 'domain_2':
return 'translation-file' if any(x in ['fr', 'zh_Hant']
for x in languages) else None
return None
self.stubs.Set(gettext, 'find', _mock_gettext_find)
# Ensure that no domains are cached
_gettextutils._AVAILABLE_LANGUAGES = {}
# en_US should always be available no matter the domain
# and it should also always be the first element since order matters
domain_1_languages = _gettextutils.get_available_languages('domain_1')
domain_2_languages = _gettextutils.get_available_languages('domain_2')
self.assertEqual('en_US', domain_1_languages[0])
self.assertEqual('en_US', domain_2_languages[0])
# The domain languages should be included after en_US with
# with their respective aliases when it applies
self.assertEqual(6, len(domain_1_languages))
self.assertIn('zh', domain_1_languages)
self.assertIn('zh_CN', domain_1_languages)
self.assertIn('es', domain_1_languages)
self.assertIn('fil', domain_1_languages)
self.assertIn('tl_PH', domain_1_languages)
self.assertEqual(4, len(domain_2_languages))
self.assertIn('fr', domain_2_languages)
self.assertIn('zh_Hant', domain_2_languages)
self.assertIn('zh_TW', domain_2_languages)
self.assertEqual(2, len(_gettextutils._AVAILABLE_LANGUAGES))
# Now test an unknown domain, only en_US should be included
unknown_domain_languages = _gettextutils.get_available_languages('huh')
self.assertEqual(1, len(unknown_domain_languages))
self.assertIn('en_US', unknown_domain_languages)
| Python | 0 | @@ -3214,29 +3214,47 @@
-self.stubs.Se
+mock_patcher = mock.patch.objec
t(locale
@@ -3282,16 +3282,34 @@
+
+
'list' i
@@ -3361,16 +3361,34 @@
+
+
else 'lo
@@ -3429,16 +3429,34 @@
+
+
_mock_lo
@@ -3472,16 +3472,88 @@
tifiers)
+%0A mock_patcher.start()%0A self.addCleanup(mock_patcher.stop)
%0A%0A
@@ -4154,21 +4154,39 @@
-self.stubs.Se
+mock_patcher = mock.patch.objec
t(ge
@@ -4219,16 +4219,88 @@
xt_find)
+%0A mock_patcher.start()%0A self.addCleanup(mock_patcher.stop)
%0A%0A
|
c4907587ef2d14ad746baf79b4c52252026b711a | add the test plans list in the subject | app/utils/report/test.py | app/utils/report/test.py | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Create the tests email report."""
import json
import os
import models
import utils
import utils.db
import utils.report.common as rcommon
TEST_REPORT_FIELDS = [
models.ARCHITECTURE_KEY,
models.BOARD_INSTANCE_KEY,
models.BOARD_KEY,
models.BOOT_ID_KEY,
models.BOOT_LOG_HTML_KEY,
models.BOOT_LOG_KEY,
models.BUILD_ID_KEY,
models.CREATED_KEY,
models.DEFCONFIG_FULL_KEY,
models.DEFCONFIG_KEY,
models.DEFINITION_URI_KEY,
models.DEVICE_TYPE_KEY,
models.GIT_BRANCH_KEY,
models.GIT_COMMIT_KEY,
models.GIT_DESCRIBE_KEY,
models.GIT_URL_KEY,
models.ID_KEY,
models.INITRD_KEY,
models.INITRD_INFO_KEY,
models.JOB_ID_KEY,
models.JOB_KEY,
models.KERNEL_KEY,
models.LAB_NAME_KEY,
models.METADATA_KEY,
models.NAME_KEY,
models.STATUS_KEY,
models.TEST_CASES_KEY,
models.SUB_GROUPS_KEY,
models.TIME_KEY,
models.VCS_COMMIT_KEY,
models.VERSION_KEY,
]
def _add_test_group_data(group, database):
test_cases = []
for test_case_id in group[models.TEST_CASES_KEY]:
test_case = utils.db.find_one2(
database[models.TEST_CASE_COLLECTION],
{"_id": test_case_id})
test_cases.append(test_case)
sub_groups = []
for sub_group_id in group[models.SUB_GROUPS_KEY]:
sub_group = utils.db.find_one2(
database[models.TEST_GROUP_COLLECTION],
{"_id": sub_group_id})
_add_test_group_data(sub_group, database)
sub_groups.append(sub_group)
total = {status: 0 for status in ["PASS", "FAIL", "SKIP"]}
for test_case in test_cases:
total[test_case["status"]] += 1
for sub_group_total in (sg["total"] for sg in sub_groups):
for status, count in sub_group_total.iteritems():
total[status] += count
group.update({
"test_cases": test_cases,
"sub_groups": sub_groups,
"total_tests": sum(total.values()),
"total": total,
})
def create_test_report(data, email_format, db_options,
base_path=utils.BASE_PATH):
"""Create the tests report email to be sent.
:param data: The meta-data for the test job.
:type data: dictionary
:param email_format: The email format to send.
:type email_format: list
:param db_options: The mongodb database connection parameters.
:type db_options: dict
:param base_path: Path to the top-level storage directory.
:type base_path: string
:return A tuple with the TXT email body, the HTML email body and the
headers as dictionary. If an error occured, None.
"""
database = utils.db.get_db_connection(db_options)
job, branch, kernel, plans = (data[k] for k in [
models.JOB_KEY,
models.GIT_BRANCH_KEY,
models.KERNEL_KEY,
models.PLANS_KEY
])
# Avoid using the field "plans" when fetching the documents
# from mongodb
del data['plans']
specs = {x: data[x] for x in data.keys() if data[x]}
test_group_docs = list(utils.db.find(
database[models.TEST_GROUP_COLLECTION],
spec=specs,
fields=TEST_REPORT_FIELDS))
top_groups = []
sub_group_ids = []
for group in test_group_docs:
sub_group_ids.extend(group[models.SUB_GROUPS_KEY])
top_groups = []
for group in test_group_docs:
if group["_id"] not in sub_group_ids and \
group["name"] != "lava" and \
not plans:
top_groups.append(group)
elif plans and group["name"] in plans:
top_groups.append(group)
if not top_groups:
utils.LOG.warning("Failed to find test group documents")
return None
for group in top_groups:
_add_test_group_data(group, database)
subject_str = "Test results for {}/{} - {}".format(job, branch, kernel)
if not plans:
plans_string = "All the results are included"
else:
plans_string = ", ".join(plans)
git_url, git_commit = (top_groups[0][k] for k in [
models.GIT_URL_KEY, models.GIT_COMMIT_KEY])
headers = {
rcommon.X_REPORT: rcommon.TEST_REPORT_TYPE,
rcommon.X_BRANCH: branch,
rcommon.X_TREE: job,
rcommon.X_KERNEL: kernel,
}
template_data = {
"subject_str": subject_str,
"tree": job,
"branch": branch,
"git_url": git_url,
"kernel": kernel,
"git_commit": git_commit,
"plans_string": plans_string,
"boot_log": models.BOOT_LOG_KEY,
"boot_log_html": models.BOOT_LOG_HTML_KEY,
"storage_url": rcommon.DEFAULT_STORAGE_URL,
"test_groups": top_groups,
}
if models.EMAIL_TXT_FORMAT_KEY in email_format:
txt_body = rcommon.create_txt_email("test.txt", **template_data)
else:
txt_body = None
if models.EMAIL_HTML_FORMAT_KEY in email_format:
html_body = rcommon.create_html_email("test.html", **template_data)
else:
html_body = None
return txt_body, html_body, subject_str, headers
| Python | 0.000016 | @@ -4412,24 +4412,100 @@
database)%0A%0A
+ if not plans:%0A plans_string = %22All the results are included%22%0A
subject_
@@ -4568,33 +4568,24 @@
kernel)%0A
-%0A
-if not plans
+else
:%0A
@@ -4606,86 +4606,121 @@
= %22
-All the results are included%22%0A else:%0A plans_string = %22, %22.join(plans
+, %22.join(plans)%0A subject_str = %22Test results (%7B%7D) for %7B%7D/%7B%7D - %7B%7D%22.format(plans_string, job, branch, kernel
)%0A%0A
|
db2f6f4c2a70875aade3741fb57d0bc1b109ce3c | Add regexp to create_user form logic | app/views/create_user.py | app/views/create_user.py | from flask import request, flash, render_template
import bcrypt
from app import app, helpers
@app.route('/create_user', methods=['GET', 'POST'])
def create_user():
if request.method == 'POST':
username = request.form.get('username', None).strip()
password = request.form.get('password', None)
role = request.form.get('role', 'Guest')
if not username or username == '' or not password or password == '':
flash('Please enter a username and password.')
else:
# Form was completed
matching_user = "SELECT user_pk FROM users WHERE username = %s;"
user_does_exist = helpers.duplicate_check(matching_user, [username])
if user_does_exist:
flash('Username already exists')
else:
salt = bcrypt.gensalt(12)
password = bcrypt.hashpw(password.encode('utf-8'), bytes(salt))
new_user = ("INSERT INTO users (username, password, salt, role_fk) "
"VALUES (%s, %s, %s, %s);")
helpers.db_change(new_user, [username, password, salt, role])
flash('Your account was created!')
return render_template('create_user.html')
| Python | 0 | @@ -254,17 +254,36 @@
.strip()
+ # Aa09_.- allowed
%0A
-
@@ -393,107 +393,30 @@
if
-not username or username == '' or not password or password == '':%0A flash('Please enter a
+re.match(r'%5E%5B%5Cw.-%5D+$',
use
@@ -420,16 +420,17 @@
username
+)
and pas
@@ -438,25 +438,8 @@
word
-.')%0A%0A else
:%0A
@@ -468,16 +468,33 @@
ompleted
+ with valid input
%0A
@@ -1133,24 +1133,98 @@
created!')%0A%0A
+ else:%0A flash('Please enter a username and password.')%0A%0A
return r
|
e4097fc77139abde6311886c2a7792d675e5f805 | Update merge_intervals.py | array/merge_intervals.py | array/merge_intervals.py | """
Given a collection of intervals, merge all overlapping intervals.
"""
class Interval:
"""
In mathematics, a (real) interval is a set of real
numbers with the property that any number that lies
between two numbers in the set is also included in the set.
"""
def __init__(self, start=0, end=0):
self.start = start
self.end = end
def __repr__(self):
return f"Interval ({self.start}, {self.end})"
def __iter__(self):
return iter(range(self.start, self.end))
def __getitem__(self, index):
if index < 0:
return self.end + index
return self.start + index
def __len__(self):
return self.end - self.start
def __contains__(self, item):
if self.start >= item >= self.end:
return True
return False
def __eq__(self, other):
if self.start == other.start and self.end == other.end:
return True
return False
def as_list(self):
""" Return interval as list. """
return list(self)
@staticmethod
def merge(intervals):
""" Merges two intervals into one. """
out = []
for i in sorted(intervals, key=lambda i: i.start):
if out and i.start <= out[-1].end:
out[-1].end = max(out[-1].end, i.end)
else:
out += i,
return out
@staticmethod
def print_intervals(intervals):
"""
Prints out the intervals.
"""
res = []
for i in intervals:
res.append(repr(i))
print("".join(res))
@staticmethod
def merge_v2(intervals):
""" Merges intervals in the form of list. """
if intervals is None:
return None
intervals.sort(key=lambda i: i[0])
out = [intervals.pop(0)]
for i in intervals:
if out[-1][-1] >= i[0]:
out[-1][-1] = max(out[-1][-1], i[-1])
else:
out.append(i)
return out
import unittest
class TestMergeInterval(unittest.TestCase):
def test_merge(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
intervals = [Interval(i[0], i[1]) for i in interval_list]
merged_intervals = Interval.merge(intervals)
self.assertEqual(
merged_intervals,
[Interval(1, 6), Interval(8, 10), Interval(15, 18)]
)
def test_merge_v2(self):
interval_list = [[1, 3], [2, 6], [8, 10], [15, 18]]
merged_intervals = Interval.merge_v2(interval_list)
self.assertEqual(
merged_intervals,
[[1, 6], [8, 10], [15, 18]]
)
if __name__ == "__main__":
unittest.main()
| Python | 0.000002 | @@ -1612,38 +1612,17 @@
(res))%0A%0A
- @staticmethod%0A
+%0A
def merg
@@ -1630,36 +1630,32 @@
_v2(intervals):%0A
-
%22%22%22 Merges i
@@ -1684,28 +1684,24 @@
f list. %22%22%22%0A
-
if inter
@@ -1714,20 +1714,16 @@
s None:%0A
-
@@ -1734,20 +1734,16 @@
rn None%0A
-
inte
@@ -1773,20 +1773,16 @@
: i%5B0%5D)%0A
-
out
@@ -1802,20 +1802,16 @@
pop(0)%5D%0A
-
for
@@ -1818,36 +1818,32 @@
i in intervals:%0A
-
if out%5B-
@@ -1866,28 +1866,24 @@
-
out%5B-1%5D%5B-1%5D
@@ -1908,20 +1908,16 @@
i%5B-1%5D)%0A
-
@@ -1934,20 +1934,16 @@
-
out.appe
@@ -1948,20 +1948,16 @@
pend(i)%0A
-
retu
@@ -2474,33 +2474,24 @@
intervals =
-Interval.
merge_v2(int
|
ef69106ceb6a494cb809bcda1883bfc31cf12ea5 | Move raise_for_status to BaseResponse. | asks/response_objects.py | asks/response_objects.py | import codecs
from types import SimpleNamespace
import json as _json
from async_generator import async_generator, yield_
import h11
from .http_utils import decompress, parse_content_encoding
from .utils import timeout_manager
from .errors import BadStatus
class BaseResponse:
'''
A response object supporting a range of methods and attribs
for accessing the status line, header, cookies, history and
body of a response.
'''
def __init__(self,
encoding,
http_version,
status_code,
reason_phrase,
headers,
body,
method,
url):
self.encoding = encoding
self.http_version = http_version
self.status_code = status_code
self.reason_phrase = reason_phrase
self.headers = headers
self.body = body
self.method = method
self.url = url
self.history = []
self.cookies = []
def __repr__(self):
return '<{} {} {}>'.format(self.__class__.__name__,
self.status_code,
self.reason_phrase)
def _guess_encoding(self):
try:
guess = self.headers['content-type'].split('=')[1]
codecs.lookup(guess)
self.encoding = guess
except LookupError: # IndexError/KeyError are LookupError subclasses
pass
def _decompress(self, encoding=None):
content_encoding = self.headers.get('Content-Encoding', None)
if content_encoding is not None:
decompressor = decompress(parse_content_encoding(content_encoding),
encoding)
r = decompressor.send(self.body)
return r
else:
if encoding is not None:
return self.body.decode(encoding, errors='replace')
else:
return self.body
async def __aenter__(self):
return self
async def __aexit__(self, *exc_info):
...
class Response(BaseResponse):
def json(self, **kwargs):
'''
If the response's body is valid json, we load it as a python dict
and return it.
'''
body = self._decompress(self.encoding)
return _json.loads(body, **kwargs)
def raise_for_status(self):
'''
Raise BadStatus if one occurred.
'''
if 400 <= self.status_code < 500:
raise BadStatus(
'{} Client Error: {} for url: {}'.format(
self.status_code, self.reason_phrase, self.url
),
self,
self.status_code
)
elif 500 <= self.status_code < 600:
raise BadStatus(
'{} Server Error: {} for url: {}'.format(
self.status_code, self.reason_phrase, self.url
),
self,
self.status_code
)
@property
def text(self):
'''
Returns the (maybe decompressed) decoded version of the body.
'''
return self._decompress(self.encoding)
@property
def content(self):
'''
Returns the content as-is after decompression, if any.
'''
return self._decompress()
@property
def raw(self):
'''
Returns the response body as received.
'''
return self.body
class StreamResponse(BaseResponse):
...
class StreamBody:
def __init__(self, h11_connection, sock, content_encoding=None, encoding=None):
self.h11_connection = h11_connection
self.sock = sock
self.content_encoding = content_encoding
self.encoding = encoding
# TODO: add decompress data to __call__ args
self.decompress_data = True
self.timeout = None
self.read_size = 10000
@async_generator
async def __aiter__(self):
if self.content_encoding is not None:
decompressor = decompress(parse_content_encoding(self.content_encoding))
while True:
event = await self._recv_event()
if isinstance(event, h11.Data):
if self.content_encoding is not None:
if self.decompress_data:
event.data = decompressor.send(event.data)
await yield_(event.data)
elif isinstance(event, h11.EndOfMessage):
break
async def _recv_event(self):
while True:
event = self.h11_connection.next_event()
if event is h11.NEED_DATA:
data = await timeout_manager(self.timeout, self.sock.receive_some, self.read_size)
self.h11_connection.receive_data(data)
continue
return event
def __call__(self, timeout=None):
self.timeout = timeout
return self
async def __aenter__(self):
return self
async def close(self):
await self.sock.close()
async def __aexit__(self, *exc_info):
await self.close()
class Cookie(SimpleNamespace):
'''
A simple cookie object, for storing cookie stuff :)
Needs to be made compatible with the API's cookies kw arg.
'''
def __init__(self, host, data):
self.name = None
self.value = None
self.domain = None
self.path = None
self.secure = False
self.expires = None
self.comment = None
self.__dict__.update(data)
super().__init__(**self.__dict__)
self.host = host
def __repr__(self):
if self.name is not None:
return '<Cookie {} from {}>'.format(self.name, self.host)
else:
return '<Cookie {} from {}>'.format(self.value, self.host)
def __iter__(self):
for k, v in self.__dict__.items():
yield k, v
| Python | 0 | @@ -998,16 +998,684 @@
s = %5B%5D%0A%0A
+ def raise_for_status(self):%0A '''%0A Raise BadStatus if one occurred.%0A '''%0A if 400 %3C= self.status_code %3C 500:%0A raise BadStatus(%0A '%7B%7D Client Error: %7B%7D for url: %7B%7D'.format(%0A self.status_code, self.reason_phrase, self.url%0A ),%0A self,%0A self.status_code%0A )%0A elif 500 %3C= self.status_code %3C 600:%0A raise BadStatus(%0A '%7B%7D Server Error: %7B%7D for url: %7B%7D'.format(%0A self.status_code, self.reason_phrase, self.url%0A ),%0A self,%0A self.status_code%0A )%0A%0A
def
@@ -3033,676 +3033,8 @@
s)%0A%0A
- def raise_for_status(self):%0A '''%0A Raise BadStatus if one occurred.%0A '''%0A if 400 %3C= self.status_code %3C 500:%0A raise BadStatus(%0A '%7B%7D Client Error: %7B%7D for url: %7B%7D'.format(%0A self.status_code, self.reason_phrase, self.url%0A ),%0A self,%0A self.status_code%0A )%0A elif 500 %3C= self.status_code %3C 600:%0A raise BadStatus(%0A '%7B%7D Server Error: %7B%7D for url: %7B%7D'.format(%0A self.status_code, self.reason_phrase, self.url%0A ),%0A self,%0A self.status_code%0A )%0A%0A
|
5e11d6766bea9098b89a8c5246518b4a09a163d5 | Add some paramters to the generic REST API class: - api_root - timeout - api_version | atlassian/rest_client.py | atlassian/rest_client.py | import json
import logging
from urllib.parse import urlencode, urljoin
import requests
log = logging.getLogger("atlassian")
class AtlassianRestAPI:
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
self._session = requests.Session()
if username and password:
self._session.auth = (username, password)
def log_curl_debug(self, method, path, data=None, headers={}, level=logging.DEBUG):
message = "curl --silent -X {method} -u '{username}':'{password}' -H {headers} {data} '{url}'".format(
method=method,
username=self.username,
password=self.password,
headers=' -H '.join(["'{0}: {1}'".format(key, value) for key, value in headers.items()]),
data='' if not data else "--data '{0}'".format(json.dumps(data)),
url='{0}{1}'.format(self.url, path))
log.log(level=level, msg=message)
def resource_url(self, resource, version='latest'):
return '/'.join(['rest', 'api', version, resource])
def request(self, method='GET', path='/', data=None, flags=None, params=None,
headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
self.log_curl_debug(method=method, path=path, headers=headers, data=data)
url = urljoin(self.url, path)
if params or flags:
url += '?'
if params:
url += urlencode(params or {})
if flags:
url += ('&' if params else '') + '&'.join(flags or [])
response = requests.request(
response = self._session.request(
method=method,
url=url,
headers=headers,
data=json.dumps(data),
auth=(self.username, self.password),
timeout=60)
if response.status_code == 200:
log.debug('Received: {0}'.format(response.json()))
elif response.status_code == 204:
log.debug('Received "204 No Content" response')
else:
self.log_curl_debug(method=method, path=path, headers=headers, data=data, level=logging.DEBUG)
log.info(response.json())
response.raise_for_status()
return response
def get(self, path, data=None, flags=None, params=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
return self.request('GET', path=path, flags=flags, params=params, data=data, headers=headers).json()
def post(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('POST', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def put(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
try:
return self.request('PUT', path=path, data=data, headers=headers).json()
except ValueError:
log.debug('Received response with no content')
return None
def delete(self, path, data=None, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}):
"""
Deletes resources at given paths.
:rtype: dict
:return: Empty dictionary to have consistent interface. Some of Atlassian rest resources don't return any content.
"""
self.request('DELETE', path=path, data=data, headers=headers) | Python | 0.999987 | @@ -157,138 +157,384 @@
def
- __init__(self, url, username, password):%0A self.url = url%0A self.username = username%0A self.password = password
+ault_headers=%7B'Content-Type': 'application/json', 'Accept': 'application/json'%7D%0A%0A def __init__(self, url, username, password, timeout=60, api_root='rest/api', api_version='latest'):%0A self.url = url%0A self.username = username%0A self.password = password%0A self.timeout = timeout%0A self.api_root = api_root%0A self.api_version = api_version
%0A
@@ -1271,26 +1271,8 @@
urce
-, version='latest'
):%0A
@@ -1299,23 +1299,32 @@
in(%5B
-'rest', 'api',
+self.api_root, self.api_
vers
@@ -2094,18 +2094,37 @@
timeout=
-60
+self.timeout%0A
)%0A
|
484c3caecb3cb5fa2a6d2a4c59071e531378aba3 | Output error if Google blocks ip | autoload/stackanswers.py | autoload/stackanswers.py | import re
import json
import requests
import vim
import sys
API_KEY = "vYizAQxn)7tmkShJZyHqWQ(("
def strip_html(html):
pattern = re.compile(r'<.*?>')
return pattern.sub('', html)
def query_google(query, domain):
search = "https://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=%s"
url = search % (query + ":" + domain)
try:
response = requests.get(url)
except:
return None
data = json.loads(response.text)["responseData"]["results"]
urls = []
for result in data:
url = result["url"]
if is_valid_url(url):
urls.append(url)
return urls
def get_question_id(url):
try:
qid = int(url.split("/")[-2])
except:
qid = None
return qid
def is_valid_url(url):
return "stackoverflow" in url and "/tagged/" not in url
def get_question_data(qid):
response = requests.get("https://api.stackexchange.com/2.2/questions/%s/answers?order=&sort=votes&site=stackoverflow&key=%s&filter=!)Q2B_4mND07Uc*hKpm6.P0Q5" % (qid, API_KEY))
return json.loads(response.text)["items"]
def parse_question_data(data, _filter):
post = {
"question": "",
"answers": []
}
if len(data) == 0 or "title" not in data[0]:
return post
post["question"] = data[0]["title"]
for answer in data:
answer_data = parse_answer(answer)
if _filter == "accepted":
if answer_data[2]:
post["answers"].append(parse_answer(answer))
elif _filter == "top":
post["answers"].append(parse_answer(answer))
break
else:
post["answers"].append(parse_answer(answer))
return post
def parse_answer(answer):
author = answer["owner"]["display_name"]
content = answer["body"]
is_accepted = answer["is_accepted"]
upvotes = answer["score"]
url = answer["share_link"]
return [content, url, is_accepted, upvotes, author]
def fetch_mass_data(query, _filter):
urls = query_google(query, "www.stackoverflow.com")
if urls is None:
return None
posts = []
for url in urls:
qid = get_question_id(url)
if not qid:
continue
data = get_question_data(qid)
posts.append(parse_question_data(data, _filter))
return posts
# StackAnswers output ---------------------------------------------------------
def _goto_window_for_buffer(b):
w = int(vim.eval('bufwinnr(%d)' % int(b)))
vim.command('%dwincmd w' % w)
def _goto_window_for_buffer_name(bn):
b = vim.eval('bufnr("%s")' % bn)
return _goto_window_for_buffer(b)
def _output_preview_text(lines):
_goto_window_for_buffer_name('__Answers__')
vim.command('setlocal modifiable')
lines = [line.encode('utf-8').replace("\n", "\r") for line in lines]
vim.current.buffer[:] = lines
vim.command('silent %s/\r\+/\n/ge')
vim.command('silent %s/\%x00/\r/ge')
vim.command('setlocal nomodifiable')
def _generate_stack_answers_format(posts):
response = []
for post in posts:
answers = len(post["answers"])
# If there is no question, pass
if post["question"] == "":
continue
question = "Q: " + post["question"]
response.append(question)
response.append("%d Answer(s)" % answers)
for answer in post["answers"]:
response.append("=" * 80)
response.append(answer[1] + " Upvotes: " + str(answer[3]))
response.append(strip_html(answer[0]) + "\r")
return response
def stackAnswers(query, _filter):
query = vim.eval("a:2")
_filter = vim.eval("g:stack_filter")
data = fetch_mass_data(query, _filter)
if data is None:
_output_preview_text(["Not connected to the Internet"])
else:
_output_preview_text(_generate_stack_answers_format(data))
| Python | 0.999909 | @@ -393,36 +393,8 @@
-except:%0A return None%0A
@@ -453,16 +453,48 @@
sults%22%5D%0A
+ except:%0A return None%0A
urls
@@ -3752,37 +3752,196 @@
t(%5B%22
-Not connected to the Internet
+Error fetching data...%22, %22There are a few possibilities:%22, %221) You are not connected to the Internet%22, %222) You've been sending too many requests, and Google has temporarily blocked your ip
%22%5D)%0A
|
304d7c75f68e999536f610e4e6eecfbfaa8c069e | make sure that nested calls succeed | awaitchannel/__init__.py | awaitchannel/__init__.py | """
Extends the synchronisation objects of asyncio (e.g. Lock, Event, Condition, Semaphore, Queue) with Channels like in Go.
Channels can be used for asynchronous or synchronous message exchange.
The select() can be used to react on finished await-calls and thus also on sending or receiving with channels.
The helpers go() and run() provide a simple way to setup an event loop for the concurrent functions.
"""
import asyncio
class Chan:
"""
Go-style channel with await send/recv
can also be used as an iterator which is calling recv() until a ChannelClosed exception occurs
"""
q = None # data channel
x = None # sync channel for size=0
size = None
is_closed = False
close = "{}{}".format(hash("Chan.closed"), "Chan.closed") # magic string as last element
def __init__(self, size=0):
"""size 0 or None indicates a blocking channel (handshake)
size -1 indicates an unlimited buffer size
otherwise send will block when buffer size is reached"""
if size == 0:
self.q = asyncio.Queue(1)
self.x = asyncio.Queue(1)
elif size == -1:
self.q = asyncio.Queue(0)
else:
self.q = asyncio.Queue(size)
self.size = size
@asyncio.coroutine
def close(self):
"""closes the channel which leads to a failure at the recv side and disallows further sending"""
self.is_closed = True
yield from self.q.put(self.close)
@asyncio.coroutine
def send(self, item):
"""blocks if size=0 until recv is called
blocks if send was used <size> times without a recv
blocks never for size=-1"""
if self.is_closed:
raise ChannelClosed
yield from self.q.put(item)
if self.size == 0:
yield from self.x.get()
def send_ready(self):
return not self.q.full()
def recv_ready(self):
return not self.q.empty()
@asyncio.coroutine
def recv(self):
"""blocks until something is available
fails if channel is closed"""
if self.is_closed and self.q.empty():
self.put_nowait(self.close)
raise ChannelClosed
g = yield from self.q.get()
if self.is_closed and self.q.empty() and g == self.close:
self.q.put_nowait(self.close) # push back
raise ChannelClosed
if self.size == 0:
yield from self.x.put(True)
return g
async def __aiter__(self):
return self
async def __anext__(self):
try:
return await self.recv()
except ChannelClosed:
raise StopAsyncIteration
class ChannelClosed(Exception):
pass
### select on await events
async def wrap_future(e, f):
return e, await f
class SelectTasks:
"""helper class used for (pending) await-tasks monitored by select()"""
tasks = []
completed = []
def __init__(self, futures_list=None, already_running=False, completed=[]):
if futures_list and not already_running:
self.extend(futures_list)
elif futures_list and already_running:
self.tasks = list(futures_list)
else:
self.tasks = []
self.completed = completed
def append(self, a):
e, f = a
self.tasks.append(wrap_future(e, f))
def extend(self, futures_list):
self.tasks.extend([wrap_future(e, f) for e, f in futures_list])
def __bool__(self):
return bool(self.tasks) or bool(self.completed)
def __len__(self):
return len(self.tasks) + len(self.completed)
@asyncio.coroutine
def select(futures_list):
"""
select on a list of identifier-await-tuples like ['r', c.recv()), (c, c.send(2))]
returns a tuple consiting of an identifier-result-tuple like ('r', 7) or (c, None) and
a special list object of pending tasks which can be directly used for the next select call or even expanded/appended on before
be aware that the results are internally buffered when more complete at the same time and thus the logical ordering can be different
"""
if type(futures_list) is not SelectTasks:
futures_list = SelectTasks(futures_list)
if futures_list.completed:
result = futures_list.completed.pop()
return result, futures_list
done, running = yield from asyncio.wait(futures_list.tasks, return_when=asyncio.FIRST_COMPLETED)
result = done.pop().result()
results = [r.result() for r in done]
return result, SelectTasks(running, already_running=True, completed=results)
# short helper functions
go_tasks = []
def go(f, *args, **kwargs):
"""adds an async function to the asyncio event loop, must be called before run()"""
go_tasks.append(asyncio.ensure_future(f(*args, **kwargs)))
def run():
"""start the asyncio event loop with the tasks enqueued by go()"""
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(asyncio.wait(go_tasks))
finally:
loop.stop()
| Python | 0.000001 | @@ -4581,24 +4581,61 @@
)%0A try:%0A
+ while go_tasks:%0A done, others =
loop.run_un
@@ -4671,16 +4671,64 @@
tasks))%0A
+ for d in done:%0A go_tasks.remove(d)%0A
finall
|
8bcc09e4d3d0a14abd132e023bb4b4896aaac4f2 | make imports Python 3 friendly | barak/absorb/__init__.py | barak/absorb/__init__.py | from absorb import *
from equiv_width import *
from aod import *
| Python | 0.000012 | @@ -1,13 +1,14 @@
from
+.
absorb i
@@ -20,16 +20,17 @@
*%0Afrom
+.
equiv_wi
@@ -47,16 +47,17 @@
*%0Afrom
+.
aod impo
|
4b6bffdb048aa44b42cb80a54fca9a204ede833f | Update version to 0.0.3 | boto3facade/metadata.py | boto3facade/metadata.py | # -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'boto3facade'
project = "boto3facade"
project_no_spaces = project.replace(' ', '')
version = '0.0.2'
description = 'A simple facade for boto3'
authors = ['German Gomez-Herrero', 'Innovative Travel Ltd']
authors_string = ', '.join(authors)
emails = ['[email protected]']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://github.com/InnovativeTravel/boto3facade'
| Python | 0.000001 | @@ -254,17 +254,17 @@
= '0.0.
-2
+3
'%0Adescri
|
b9cd8a6656e3f271b4ca273981ca4a2315ced91a | Improve warning | bumpversion/__init__.py | bumpversion/__init__.py |
import ConfigParser
import argparse
import os.path
import warnings
import re
import sre_constants
def attempt_version_bump(args):
try:
regex = re.compile(args.parse)
except sre_constants.error:
warnings.warn("--patch '{}' is not a valid regex".format(args.parse))
return
if args.current_version:
match = regex.search(args.current_version)
else:
return
if not match:
warnings.warn("'{}' does not parse current version".format(args.parse))
return
parsed = match.groupdict()
parsed[args.bump] = int(parsed[args.bump]) + 1
try:
return args.serialize.format(**parsed)
except KeyError as e:
warnings.warn("Did not find {} in {} when serializing version number".format(e.message, repr(parsed)))
return
def main(args=None):
parser1 = argparse.ArgumentParser(add_help=False)
parser1.add_argument('--config-file', default='.bumpversion.cfg', metavar='FILE',
help='Config file to read most of the variables from', required=False)
known_args, remaining_argv = parser1.parse_known_args(args)
defaults = {}
config = None
if os.path.exists(known_args.config_file):
config = ConfigParser.SafeConfigParser()
config.read([known_args.config_file])
defaults = dict(config.items("bumpversion"))
elif known_args.config_file != parser1.get_default('config_file'):
raise argparse.ArgumentTypeError("Could not read config file at {}".format(
known_args.config_file))
parser2 = argparse.ArgumentParser(add_help=False, parents=[parser1])
parser2.set_defaults(**defaults)
parser2.add_argument('--current-version', metavar='VERSION',
help='Version that needs to be updated', required=False)
parser2.add_argument('--bump', metavar='PART',
help='Part of the version to be bumped.',
default='patch')
parser2.add_argument('--parse', metavar='REGEX',
help='Regex parsing the version string',
default='(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)')
parser2.add_argument('--serialize', metavar='FORMAT',
help='How to format what is parsed back to a version',
default='{major}.{minor}.{patch}')
known_args, remaining_argv = parser2.parse_known_args(remaining_argv)
defaults.update(vars(known_args))
_attempted_new_version = attempt_version_bump(known_args)
if not ('new_version' in defaults) and _attempted_new_version != None:
defaults['new_version'] = _attempted_new_version
parser3 = argparse.ArgumentParser(
description='Bumps version strings',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve',
parents=[parser2],
)
parser3.set_defaults(**defaults)
parser3.add_argument('--current-version', metavar='VERSION',
help='Version that needs to be updated',
required=not 'current_version' in defaults)
parser3.add_argument('--dry-run', '-n', action='store_true',
default=False, help="Don't write any files, just pretend.")
parser3.add_argument('--new-version', metavar='VERSION',
help='New version that should be in the files',
required=not 'new_version' in defaults)
files = []
if 'files' in defaults:
assert defaults['files'] != None
files = defaults['files'].split(' ')
parser3.add_argument('files', metavar='file',
nargs='+' if len(files) == 0 else '*',
help='Files to change', default=files)
args = parser3.parse_args(remaining_argv)
if len(args.files) is 0:
warnings.warn("No files specified")
for path in args.files:
with open(path, 'r') as f:
before = f.read()
assert args.current_version in before, 'Did not find string {} in file {}'.format(
args.current_version, path)
after = before.replace(args.current_version, args.new_version)
if not args.dry_run:
with open(path, 'w') as f:
f.write(after)
if config:
config.remove_option('bumpversion', 'new_version')
config.set('bumpversion', 'current_version', args.new_version)
if not args.dry_run:
config.write(open(known_args.config_file, 'wb'))
| Python | 0.000007 | @@ -721,16 +721,20 @@
ot find
+key
%7B%7D in %7B%7D
@@ -774,16 +774,34 @@
.format(
+%0A repr(
e.messag
@@ -801,16 +801,17 @@
.message
+)
, repr(p
|
81df185279a8d46ca2e8ed9fbed4c3204522965e | Extend potential life of social media queue entries | bvspca/social/models.py | bvspca/social/models.py | import logging
from datetime import datetime, timedelta
from django.db import models
from wagtail.core.models import Page
logger = logging.getLogger('bvspca.social')
class SocialMediaPostable():
def social_media_ready_to_post(self):
raise NotImplemented()
def social_media_post_text(self):
raise NotImplemented()
def social_media_post_image(self):
raise NotImplemented()
class Meta:
abstract = True
class SocialMediaQueueManager(models.Manager):
def delete_old_entries(self):
"""
Delete all entries from queue older than 7 days
:return:
"""
count, counts_by_object_type = self.filter(date__lt=datetime.now() - timedelta(7)).delete()
if count > 0:
for object_type, object_count in counts_by_object_type.items():
logger.info('Deleted {} objects of type {}'.format(object_count, object_type))
def next_postable_entry(self):
"""
Get the next queued entry that is ready to post
:return:
"""
entries = self.order_by('+priority', '+date')
for entry in entries:
if entry.page.ready_to_post():
return entry
class SocialMediaQueue(models.Model):
"""
A queue of potential pages to post to social media
"""
PRIORITIES = ((1, 1), (2, 2), (3, 3), (4, 4), (5, 5))
date = models.DateTimeField(verbose_name='timestamp', auto_now_add=True)
priority = models.PositiveSmallIntegerField(choices=PRIORITIES)
page = models.OneToOneField(
Page,
on_delete=models.DO_NOTHING,
related_name='+',
)
objects = SocialMediaQueueManager()
class Meta:
pass
def ready(self):
return self.page.specific.social_media_ready_to_post()
def __str__(self):
return self.page.title
| Python | 0.000003 | @@ -591,17 +591,18 @@
er than
-7
+14
days%0A%0A
@@ -720,9 +720,10 @@
lta(
-7
+14
)).d
|
e1aafc85403366dba19963abb1c868bd328a4706 | fix unicode writes | bzETL/util/env/files.py | bzETL/util/env/files.py | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from datetime import datetime
import io
import os
import shutil
from ..maths import crypto
from ..struct import listwrap, nvl
from ..cnv import CNV
class File(object):
"""
ASSUMES ALL FILE CONTENT IS UTF8 ENCODED STRINGS
"""
def __init__(self, filename, buffering=2 ** 14, suffix=None):
"""
YOU MAY SET filename TO {"path":p, "key":k} FOR CRYPTO FILES
"""
if filename == None:
from ..env.logs import Log
Log.error("File must be given a filename")
elif isinstance(filename, basestring):
self.key = None
self._filename = "/".join(filename.split(os.sep)) # USE UNIX STANDARD
self.buffering = buffering
else:
self.key = CNV.base642bytearray(filename.key)
self._filename = "/".join(filename.path.split(os.sep)) # USE UNIX STANDARD
self.buffering = buffering
if suffix:
self._filename = File.add_suffix(self._filename, suffix)
@property
def filename(self):
return self._filename.replace("/", os.sep)
@property
def abspath(self):
return os.path.abspath(self._filename)
@staticmethod
def add_suffix(filename, suffix):
"""
ADD suffix TO THE filename (NOT INCLUDING THE FILE EXTENSION)
"""
path = filename.split("/")
parts = path[-1].split(".")
i = max(len(parts)-2, 0)
parts[i]=parts[i]+suffix
path[-1]=".".join(parts)
return "/".join(path)
def backup_name(self, timestamp=None):
"""
RETURN A FILENAME THAT CAN SERVE AS A BACKUP FOR THIS FILE
"""
suffix = CNV.datetime2string(nvl(timestamp, datetime.now()), "%Y%m%d_%H%M%S")
return File.add_suffix(self._filename, suffix)
def read(self, encoding="utf8"):
with open(self._filename, "rb") as f:
content = f.read().decode(encoding)
if self.key:
return crypto.decrypt(content, self.key)
else:
return content
def read_ascii(self):
if not self.parent.exists:
self.parent.create()
with open(self._filename, "r") as f:
return f.read()
def write_ascii(self, content):
if not self.parent.exists:
self.parent.create()
with open(self._filename, "w") as f:
f.write(content)
def write(self, data):
if not self.parent.exists:
self.parent.create()
with open(self._filename, "wb") as f:
if isinstance(data, list) and self.key:
from ..env.logs import Log
Log.error("list of data and keys are not supported, encrypt before sending to file")
for d in listwrap(data):
if not isinstance(d, unicode):
from ..env.logs import Log
Log.error("Expecting unicode data only")
if self.key:
f.write(crypto.encrypt(d, self.key).encode("utf8"))
else:
f.write(d.encode("utf8"))
def __iter__(self):
#NOT SURE HOW TO MAXIMIZE FILE READ SPEED
#http://stackoverflow.com/questions/8009882/how-to-read-large-file-line-by-line-in-python
#http://effbot.org/zone/wide-finder.htm
def output():
with io.open(self._filename, "rb") as f:
for line in f:
yield line.decode("utf8")
return output()
def append(self, content):
if not self.parent.exists:
self.parent.create()
with open(self._filename, "a") as output_file:
output_file.write(content)
def add(self, content):
return self.append(content)
def extend(self, content):
if not self.parent.exists:
self.parent.create()
with open(self._filename, "a") as output_file:
for c in content:
output_file.write(c)
def delete(self):
try:
if os.path.isdir(self._filename):
shutil.rmtree(self._filename)
elif os.path.isfile(self._filename):
os.remove(self._filename)
return self
except Exception, e:
if e.strerror == "The system cannot find the path specified":
return
from ..env.logs import Log
Log.error("Could not remove file", e)
def backup(self):
names = self._filename.split("/")[-1].split(".")
if len(names) == 1:
backup = File(self._filename + ".backup " + datetime.utcnow().strftime("%Y%m%d %H%i%s"))
def create(self):
try:
os.makedirs(self._filename)
except Exception, e:
from ..env.logs import Log
Log.error("Could not make directory {{dir_name}}", {"dir_name": self._filename}, e)
@property
def parent(self):
return File("/".join(self._filename.split("/")[:-1]))
@property
def exists(self):
if self._filename in ["", "."]:
return True
try:
return os.path.exists(self._filename)
except Exception, e:
return False
def __bool__(self):
return self.__nonzero__()
def __nonzero__(self):
"""
USED FOR FILE EXISTENCE TESTING
"""
if self._filename in ["", "."]:
return True
try:
return os.path.exists(self._filename)
except Exception, e:
return False
| Python | 0.005837 | @@ -4233,16 +4233,163 @@
ontent:%0A
+ if isinstance(c, str):%0A from .logs import Log%0A Log.error(%22expecting to write unicode only%22)%0A%0A
@@ -4415,16 +4415,32 @@
.write(c
+.encode(%22utf-8%22)
)%0A%0A%0A
|
77f4b5b1bc3c30fb454212d3c4d2aa62d8c06ca8 | Update exportyaml.py | canmatrix/exportyaml.py | canmatrix/exportyaml.py | #!/usr/bin/env python
from __future__ import absolute_import
from .canmatrix import *
import codecs
import yaml
from yaml.representer import SafeRepresenter
from builtins import *
import copy
#Copyright (c) 2013, Eduard Broecker
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
#WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
#PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
#PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
#DAMAGE.
#
# this script exports yaml-files from a canmatrix-object
# yaml-files are just object-dumps human readable.
# This export is complete, no information lost
representers = False
try:
yaml.add_representer(int, SafeRepresenter.represent_int)
yaml.add_representer(long, SafeRepresenter.represent_long)
yaml.add_representer(unicode, SafeRepresenter.represent_unicode)
yaml.add_representer(str, SafeRepresenter.represent_unicode)
yaml.add_representer(list, SafeRepresenter.represent_list)
representers = True
except:
representers = False
# some error with representers ... continue anyway
def exportYaml(db, filename, **options):
newdb = copy.deepcopy(db)
for i,frame in enumerate(newdb._fl._list):
for j,signal in enumerate(frame._signals):
if signal._is_little_endian == False:
signal._startbit = signal.getStartbit(bitNumbering = 1, startLittle = True)
newdb._fl._list[i]._signals[j]._startbit = signal._startbit
f = open(filename,"w")
if representers:
f.write(unicode(yaml.dump(newdb)))
else:
f.write(yaml.dump(newdb))
| Python | 0.000001 | @@ -2534,16 +2534,17 @@
ename,%22w
+b
%22)%0A i
|
df679af352f156ad4846fbc53a8efc43814b897c | Update ce.transformer.utils | ce/transformer/utils.py | ce/transformer/utils.py | import itertools
from ce.expr import Expr
from ce.transformer.core import TreeTransformer
from ce.transformer.biop import associativity, distribute_for_distributivity, \
BiOpTreeTransformer
from ce.analysis import expr_frontier
def closure(tree, depth=None):
return BiOpTreeTransformer(tree, depth=depth).closure()
def transform(tree,
reduction_methods=None, transform_methods=None, depth=None):
t = TreeTransformer(tree)
t.reduction_methods = reduction_methods or []
t.transform_methods = transform_methods or []
return t.closure()
def expand(tree):
return transform(tree, [distribute_for_distributivity]).pop()
def reduce(tree):
try:
tree = Expr(tree)
except TypeError:
return {reduce(t) for t in tree}
s = set(transform(tree, BiOpTreeTransformer.reduction_methods))
if len(s) > 1:
s.remove(tree)
if len(s) == 1:
return s.pop()
raise Exception
def parsings(tree):
return transform(tree, None, [associativity])
def martel_closure(tree, depth=None):
t = BiOpTreeTransformer(tree, depth=depth)
t.transform_methods.remove(distribute_for_distributivity)
return t.closure()
class MartelExpr(Expr):
def traces(self, var_env=None, depth=None):
def subtraces(a):
try:
return MartelExpr(a).traces(depth)
except (ValueError, TypeError):
return {a}
stl = [subtraces(a) for a in self.args]
sts = set(Expr(self.op, args) for args in itertools.product(*stl))
logger.debug('Generating %s~=%d traces for tree: %s' %
('*'.join([str(len(s)) for s in stl]),
len(sts), str(self)))
cll = martel_closure(sts, depth=depth)
if var_env:
cll = expr_frontier(cll, var_env)
return cll
def __repr__(self):
return "MartelExpr(op='%s', a1=%s, a2=%s)" % \
(self.op, repr(self.a1), repr(self.a2))
def martel(tree, var_env=None, depth=2):
return reduce(MartelExpr(expand(tree)).traces(var_env, depth))
if __name__ == '__main__':
from matplotlib import pyplot as plt
import ce.logger as logger
from ce.common import timeit
from ce.semantics import cast_error
from ce.analysis import analyse, frontier, zip_result
logger.set_context(level=logger.levels.info)
Expr.__repr__ = Expr.__str__
logger.info('Expand', expand('(a + 3) * (a + 3)'))
logger.info('Parsings', parsings('a + b + c'))
logger.info('Reduction', reduce('a + 2 * 3 * 4 + 6 * b + 3'))
e = 'a * a * b * b + a * a * b + 2 * a * b + 3 * a + 4'
v = {
'a': cast_error('0.1', '0.2'),
'b': cast_error('100', '200'),
}
def closure_frontier(e, v):
c = closure(e)
return c, expr_frontier(c, v)
complete, complete_front = timeit(closure_frontier)(e, v)
martel_front = timeit(martel)(e, v)
logger.info('Closure', len(complete_front), complete_front)
logger.info('Martel', len(martel_front), martel_front)
plt.scatter(*zip_result(analyse(complete, v)))
plt.plot(*zip_result(frontier(complete, v)))
plt.plot(*zip_result(analyse(martel_front, v)))
plt.show()
| Python | 0.000001 | @@ -2127,49 +2127,8 @@
_':%0A
- from matplotlib import pyplot as plt%0A
@@ -2154,16 +2154,16 @@
logger%0A
+
from
@@ -2274,25 +2274,19 @@
ontier,
-zip_resul
+Plo
t%0A lo
@@ -2687,16 +2687,28 @@
,%0A %7D%0A
+ @timeit%0A
def
@@ -2823,23 +2823,16 @@
front =
-timeit(
closure_
@@ -2839,17 +2839,16 @@
frontier
-)
(e, v)%0A
@@ -3018,127 +3018,78 @@
p
-lt.scatter(*zip_result(analyse(complete, v)))%0A plt.plot(*zip_result(frontier(c
+ = Plot()%0A p.add(analyse(complete, v), legend='C
omplete
-, v))
+'
)%0A p
-lt.plot(*zip_result
+.add
(ana
@@ -3113,18 +3113,32 @@
, v)
-)
+, legend='Martel'
)%0A p
-lt
.sho
|
1a50aaf6be0f866046d88944607802a4e8661c61 | Revert "Test jenkins failure" | ceam_tests/test_util.py | ceam_tests/test_util.py | # ~/ceam/tests/test_util.py
from unittest import TestCase
from datetime import timedelta
from unittest.mock import Mock
import numpy as np
import pandas as pd
from ceam.engine import SimulationModule
from ceam.util import from_yearly, to_yearly, rate_to_probability, probability_to_rate
class TestRateConversions(TestCase):
"""
Simple regression tests for rate functions
"""
def test_from_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.01
new_rate = from_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.0008356164383561645)
def test_to_yearly(self):
one_month = timedelta(days=30.5)
rate = 0.0008356164383561645
new_rate = to_yearly(rate, one_month)
self.assertAlmostEqual(new_rate, 0.01)
def test_rate_to_probability(self):
rate = 0.001
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob, 0.00099950016662497809)
def test_probablity_to_rate(self):
prob = 0.00099950016662497809
rate = probability_to_rate(prob)
self.assertAlmostEqual(rate, 0.001)
def test_rate_to_probability_symmetry(self):
rate = 0.0001
for _ in range(100):
prob = rate_to_probability(rate)
self.assertAlmostEqual(rate, probability_to_rate(prob))
rate += (1-0.0001)/100.0
def test_rate_to_probablity_vectorizability(self):
rate = 0.001
rate = np.array([rate]*100)
prob = rate_to_probability(rate)
self.assertAlmostEqual(prob[10], 0.00099950016662497809)
self.assertAlmostEqual(np.sum(rate), np.sum(probability_to_rate(prob)))
def test_failure(self):
assert False
# End.
| Python | 0 | @@ -1674,58 +1674,8 @@
))%0A%0A
- def test_failure(self):%0A assert False%0A%0A
%0A# E
|
219dccb4deb0abc255d80a35c6106ded9f89a315 | Fix typo | checks.d/supervisord.py | checks.d/supervisord.py | from collections import defaultdict
import errno
import socket
import time
import xmlrpclib
from checks import AgentCheck
import supervisor.xmlrpc
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = '9001'
DEFAULT_SOCKET_IP = 'http://127.0.0.1'
DD_STATUS = {
'STOPPED': AgentCheck.CRITICAL,
'STARTING': AgentCheck.UNKNOWN,
'RUNNING': AgentCheck.OK,
'BACKOFF': AgentCheck.CRITICAL,
'STOPPING': AgentCheck.CRITICAL,
'EXITED': AgentCheck.CRITICAL,
'FATAL': AgentCheck.CRITICAL,
'UNKNOWN': AgentCheck.UNKNOWN
}
PROCESS_STATUS = {
AgentCheck.CRITICAL: 'down',
AgentCheck.OK: 'up',
AgentCheck.UNKNOWN: 'unknown'
}
SERVER_TAG = 'supervisord_server'
PROCESS_TAG = 'supervisord_process'
FORMAT_TIME = lambda x: time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(x))
SERVER_SERVICE_CHECK = 'supervisord.can_connect'
PROCESS_SERVICE_CHECK = 'supervisord.process.status'
class SupervisordCheck(AgentCheck):
def check(self, instance):
server_name = instance.get('name')
if not server_name or not server_name.strip():
raise Exception("Supervisor server name not specified in yaml configuration.")
server_service_check_tags = ['%s:%s' % (SERVER_TAG, server_name)]
supe = self._connect(instance)
count_by_status = defaultdict(int)
# Grab process information
try:
proc_names = instance.get('proc_names')
if proc_names:
if not isinstance(proc_names, list) or not len(proc_names):
raise Exception("Empty or invalid proc_names.")
processes = []
for proc_name in proc_names:
try:
processes.append(supe.getProcessInfo(proc_name))
except xmlrpclib.Fault, e:
if e.faultCode == 10: # bad process name
self.warning('Process not found: %s' % proc_name)
else:
raise Exception('An error occurred while reading'
'process %s information: %s %s'
% (proc_name, e.faultCode, e.faultString))
else:
processes = supe.getAllProcessInfo()
except socket.error, e:
host = instance.get('host', DEFAULT_HOST)
port = instance.get('port', DEFAULT_PORT)
sock = instance.get('socket')
if sock is None:
msg = 'Cannot connect to http://%s:%s. ' \
'Make sure supervisor is running and XML-RPC ' \
'inet interface is enabled.' % (host, port)
else:
msg = 'Cannot connect to %s. Make sure sure supervisor ' \
'is running and socket is enabled and socket file' \
' has the right permissions.' % sock
self.service_check(SERVER_SERVICE_CHECK, AgentCheck.CRITICAL,
tags=server_service_check_tags,
message=msg)
raise Exception(msg)
except xmlrpclib.ProtocolError, e:
if e.errcode == 401: # authorization error
msg = 'Username or password to %s are incorrect.' % server_name
else:
msg = "An error occurred while connecting to %s: "\
"%s %s " % (server_name, e.errcode, e.errmsg)
self.service_check(SERVER_SERVICE_CHECK, AgentCheck.CRITICAL,
tags=server_service_check_tags,
message=msg)
raise Exception(msg)
# If we're here, we were able to connect to the server
self.service_check(SERVER_SERVICE_CHECK, AgentCheck.OK,
tags=server_service_check_tags)
# Report service checks and uptime for each process
for proc in processes:
proc_name = proc['name']
tags = ['%s:%s' % (SERVER_TAG, server_name),
'%s:%s' % (PROCESS_TAG, proc_name)]
# Report Service Check
status = DD_STATUS[proc['statename']]
msg = self._build_message(proc)
count_by_status[status] += 1
self.service_check(PROCESS_SERVICE_CHECK,
status, tags=tags, message=msg)
# Report Uptime
uptime = self._extract_uptime(proc)
self.gauge('supervisord.process.uptime', uptime, tags=tags)
# Report counts by status
tags = ['%s:%s' % (SERVER_TAG, server_name)]
for status in PROCESS_STATUS:
self.gauge('supervisord.process.count', count_by_status[status],
tags=tags + ['status:%s' % PROCESS_STATUS[status]])
@staticmethod
def _connect(instance):
sock = instance.get('socket')
if sock is not None:
host = instance.get('host', DEFAULT_SOCKET_IP)
transport = supervisor.xmlrpc.SupervisorTransport(None, None, sock)
server = xmlrpclib.ServerProxy(host, transport=transport)
else:
host = instance.get('host', DEFAULT_HOST)
port = instance.get('port', DEFAULT_PORT)
user = instance.get('user')
password = instance.get('pass')
auth = '%s:%s@' % (user, password) if user and password else ''
server = xmlrpclib.Server('http://%s%s:%s/RPC2' % (auth, host, port))
return server.supervisor
@staticmethod
def _extract_uptime(proc):
start, now = int(proc['start']), int(proc['now'])
status = proc['statename']
active_state = status in ['BACKOFF', 'RUNNING', 'STOPPING']
return now - start if active_state else 0
@staticmethod
def _build_message(proc):
start, stop, now = int(proc['start']), int(proc['stop']), int(proc['now'])
proc['now_str'] = FORMAT_TIME(now)
proc['start_str'] = FORMAT_TIME(start)
proc['stop_str'] = '' if stop == 0 else FORMAT_TIME(stop)
return """Current time: %(now_str)s
Process name: %(name)s
Process group: %(group)s
Description: %(description)s
Error log file: %(stderr_logfile)s
Stdout log file: %(stdout_logfile)s
Log file: %(logfile)s
State: %(statename)s
Start time: %(start_str)s
Stop time: %(stop_str)s
Exit Status: %(exitstatus)s""" % proc
| Python | 0.999999 | @@ -2585,24 +2585,29 @@
'Make sure
+that
supervisor i
@@ -2769,20 +2769,20 @@
ke sure
-sure
+that
supervi
@@ -2829,29 +2829,16 @@
and
-socket is enabled and
+that the
soc
|
c00c7e6099269c66b64a15c15318093eadbf3851 | Fix excluded_extensions when ignore_hidden is False | checksumdir/__init__.py | checksumdir/__init__.py | """
Function for deterministically creating a single hash for a directory of files,
taking into account only file contents and not filenames.
Usage:
from checksumdir import dirhash
dirhash('/path/to/directory', 'md5')
"""
import os
import hashlib
import re
import pkg_resources
__version__ = pkg_resources.require("checksumdir")[0].version
HASH_FUNCS = {
'md5': hashlib.md5,
'sha1': hashlib.sha1,
'sha256': hashlib.sha256,
'sha512': hashlib.sha512
}
def dirhash(dirname, hashfunc='md5', excluded_files=None, ignore_hidden=False,
followlinks=False, excluded_extensions=None):
hash_func = HASH_FUNCS.get(hashfunc)
if not hash_func:
raise NotImplementedError('{} not implemented.'.format(hashfunc))
if not excluded_files:
excluded_files = []
if not excluded_extensions:
excluded_extensions = []
if not os.path.isdir(dirname):
raise TypeError('{} is not a directory.'.format(dirname))
hashvalues = []
for root, dirs, files in os.walk(dirname, topdown=True, followlinks=followlinks):
if ignore_hidden:
if not re.search(r'/\.', root):
hashvalues.extend(
[_filehash(os.path.join(root, f),
hash_func) for f in files if not
f.startswith('.') and not re.search(r'/\.', f)
and f not in excluded_files
and f.split('.')[-1:][0] not in excluded_extensions
]
)
else:
hashvalues.extend([_filehash(os.path.join(root, f),
hash_func) for f in files if f not in excluded_files])
return _reduce_hash(hashvalues, hash_func)
def _filehash(filepath, hashfunc):
hasher = hashfunc()
blocksize = 64 * 1024
with open(filepath, 'rb') as fp:
while True:
data = fp.read(blocksize)
if not data:
break
hasher.update(data)
return hasher.hexdigest()
def _reduce_hash(hashlist, hashfunc):
hasher = hashfunc()
for hashvalue in sorted(hashlist):
hasher.update(hashvalue.encode('utf-8'))
return hasher.hexdigest()
| Python | 0 | @@ -1571,17 +1571,55 @@
.extend(
-%5B
+%0A %5B%0A
_filehas
@@ -1634,32 +1634,44 @@
h.join(root, f),
+ hash_func)
%0A
@@ -1667,34 +1667,48 @@
-
+for f in files %0A
@@ -1714,61 +1714,138 @@
-hash_func) for f in files if f not in excluded_files%5D
+ if f not in excluded_files%0A and f.split('.')%5B-1:%5D%5B0%5D not in excluded_extensions%0A %5D%0A
)%0A
|
ee9a6d126237679114da3afd6120861474905402 | fix for messaging address | clearblade/Messaging.py | clearblade/Messaging.py | import paho.mqtt.client as mqtt
import thread
import time
import Client
import UserClient
import math
import random
import string
import auth
from urlparse import urlparse
class Messaging():
CB_MSG_ADDR = ""
response = 0
keep_alive = 30
subscribeDict = dict()
def __init__(self, clientType):
self.client = ""
self.rc = 0
self.clientType = clientType
self.CB_MSG_ADDR, seperator, port = urlparse(clientType.platform).netloc.rpartition(':')
self.auth = auth.Auth()
def printValue(self):
if isinstance(self.clientType, Client.UserClient):
print self.clientType.email
if isinstance(self.clientType, Client.DevClient):
print self.clientType.email
def InitializeMQTT(self, **keyword_parameters):
print("Inside initialize")
if isinstance(self.clientType, Client.UserClient):
self.client = mqtt.Client(client_id=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(23)), clean_session=True, userdata=None, protocol=mqtt.MQTTv311)
self.client.username_pw_set(self.clientType.UserToken, self.clientType.systemKey)
print self.clientType.UserToken
if isinstance(self.clientType, Client.DevClient):
self.client = mqtt.Client(client_id=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(23)), protocol=mqtt.MQTTv311)
self.client.username_pw_set(self.clientType.DevToken, self.clientType.systemKey)
def on_connect(client, flag, userdata, rc):
self.response = 1
self.rc = rc
if self.rc == 0:
print flag
print userdata
print client
print "Connected successfully "
for topic, qos in self.subscribeDict.iteritems():
def onMessageCallback(client, obj, msg):
print "DSD"+msg.payload
self.subscribeNew(topic,qos,onMessageCallback)
print "SUBSCRIBING "+topic+", "+str(qos)
else:
print "Error in connection with code ", str(rc)+"... Trying to reconnect"
self.reconnectFunction()
def on_log(client, userdata, level, buf):
print "Inside log : ", buf
self.client.on_connect = on_connect
self.on_log = on_log
if ('keep_alive' in keyword_parameters):
print "Timeout is : ", keyword_parameters['keep_alive']
self.client.connect(self.CB_MSG_ADDR, 1883, keyword_parameters['keep_alive'])
self.client.loop_start()
else:
print "Attempting to connect now"
self.client.connect_async(self.CB_MSG_ADDR, 1883, keepalive=30)
self.client.loop_start()
while(self.response !=1):
continue
if self.rc > 0:
self.client.loop_stop()
return self.rc
def publishMessage(self, topic, data, qos):
if isinstance(self.clientType, Client.UserClient) or isinstance(self.clientType, Client.DevClient):
def on_publish(client, userdata, mid):
print "Published", client
self.client.on_publish = on_publish
self.client.publish(topic, data, qos)
def subscribe(self, topic, qos, onMessageCallback):
if isinstance(self.clientType, Client.UserClient) or isinstance(self.clientType, Client.DevClient):
if topic in self.subscribeDict:
print "Already subscribed to the topic"
else:
self.subscribeDict[topic] = qos
thread.start_new_thread(self.keepSubscribed, (topic,qos,onMessageCallback))
def subscribeNew(self, topic, qos, onMessageCallback):
if isinstance(self.clientType, Client.UserClient) or isinstance(self.clientType, Client.DevClient):
thread.start_new_thread(self.keepSubscribed, (topic,qos,onMessageCallback))
def keepSubscribed(self,topic,qos,onMessageCallback):
def on_subscribe(client, userdata, mid, gqos):
print "Subscribed"
# def on_message(client, obj,msg):
# print msg.payload
self.client.subscribe(topic, qos)
self.client.on_subscribe = on_subscribe
self.client.on_message = onMessageCallback
def unsubscribe(self, topic):
if isinstance(self.clientType, Client.UserClient) or isinstance(self.clientType, Client.DevClient):
print("Inside unsubscribed")
self.client.unsubscribe(topic)
self.subscribeDict.pop(topic, None)
def on_unsubscribe(client, userdata, mid):
print "Unsubscribed ",mid
self.client.on_unsubscribe = on_unsubscribe
def disconnect(self):
if isinstance(self.clientType, Client.UserClient) or isinstance(self.clientType, Client.DevClient):
def on_disconnect(client, userdata, rc):
print "Disconnected"
self.client.disconnect()
self.client.on_disconnect = on_disconnect
self.client.loop_stop()
def reconnectFunction(self):
print("Trying to reconnect")
#userClient = Client.UserClient("cecdeef40a98c1e1cb87c58dad58", "CECDEEF40A869A818AC6D9D4C21F", "[email protected]", "edge", "http://localhost")
userClient = self.clientType
self.auth.Authenticate(userClient)
self.clientType = userClient
if isinstance(self.clientType, Client.UserClient):
#self.client = mqtt.Client(client_id=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(23)), clean_session=True, userdata=None, protocol=mqtt.MQTTv311)
#self.client.reinitialise(client_id=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(23)), clean_session=True, userdata=None)
self.client.disconnect()
self.client.username_pw_set(self.clientType.UserToken, self.clientType.systemKey)
#print "###"+self.clientType.UserToken+"###"
#print self.clientType.systemKey
#self.client.connect(self.CB_MSG_ADDR, 1883, self.keep_alive)
self.client.reconnect()
self.client.loop_start()
print "reinitialised"
if isinstance(self.clientType, Client.DevClient):
self.client = mqtt.Client(client_id=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(23)), protocol=mqtt.MQTTv311)
self.client.username_pw_set(self.clientType.DevToken, self.clientType.systemKey)
self.client.disconnect()
self.client.username_pw_set(self.clientType.UserToken, self.clientType.systemKey)
#print "###"+self.clientType.UserToken+"###"
#print self.clientType.systemKey
#self.client.connect(self.CB_MSG_ADDR, 1883, self.keep_alive)
self.client.reconnect()
self.client.loop_start()
print "reinitialised"
| Python | 0.000001 | @@ -378,25 +378,8 @@
ADDR
-, seperator, port
= u
@@ -418,23 +418,59 @@
loc.
-rpartition(':')
+split(':')%0A%09%09self.CB_MSG_ADDR = self.CB_MSG_ADDR%5B0%5D
%0A%09%09s
|
093d905f6800b9e6b4c0beeeccde55a20f9fa3f3 | Add Colemak lesson #3 | colemaktutor/lessons.py | colemaktutor/lessons.py | # Copyright (c) 2014 Benjamin Althues <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
try:
from colors import green, cyan, bold, underline
except ImportError:
green = cyan = bold = underline = lambda x: x
class ColemakLessons:
titles = [
'Lesson 1 - letters ' + green('STNE'),
'Lesson 2 - letters ' + green('RI'),
]
def __init__(self, tutor, header_func):
self.tutor = tutor
self.header_func = header_func
def start(self, start=None):
if not start or start == 1:
self.lesson1()
self.lesson2()
def printHeader(self, lessonNum):
self.header_func()
print(self.titles[lessonNum - 1] + '\n')
def lesson1(self):
self.printHeader(1)
# print(' --ST--NE--- --DF--JK---')
print(
' '
'--' + green('S') + green('T', style='underline') + '-'
'-' + green('N', style='underline') + green('E') + '---'
' '
'-SD' + underline('F') + '--' + underline('J') + 'KL--'
)
print('\n (colemak) (qwerty)\n')
self.tutor.lines([
'sets tens ten tnt sestet tenet seen nene testee tenets',
'essen sent senses tenses teens stent sense tent nets',
'tenseness net tense nests tennessee teen nest tents',
'net tens teen tenets senses nests nest nets tenet',
'sent sense tenses tennessee essen tnt tent teens',
'tense nene stent seen'
])
def lesson2(self):
self.printHeader(2)
# print(' -RST--NEI-- -SDF--JKL--')
print(
' '
'-' + green('R') + cyan('S') + cyan('T', style='underline') + '-'
'-' + cyan('N', style='underline') + cyan('E') + green('I') + '--'
' '
'-SD' + underline('F') + '--' + underline('J') + 'KL--'
)
print('\n (colemak) (qwerty)\n')
self.tutor.lines([
'trite stress sire it entire terse tit sir tire sinner retire',
'rinse inn tree insist tier rite teeter resin stir siren enter',
'sitter insert site sneer intern tie inner series steer tin',
'riser its resent sin rise rent rein iris stern in titter resist',
'eerie inert street is renter sit nine risen sister serene',
'stint err snit intent entree nit inter rest tennis re tint'
])
| Python | 0 | @@ -1031,14 +1031,59 @@
'),%0A
+ 'Lesson 3 - letters ' + green('AO'),%0A
%5D%0A
-
%0A
@@ -1220,50 +1220,74 @@
art=
-None):%0A if not start or start =
+1):%0A n = 0%0A for i in self.titles:%0A n +
= 1
-:
%0A
@@ -1299,43 +1299,71 @@
-self.lesson1()%0A
+if start %3C= n:%0A getattr(
self
-.
+, '
lesson
-2
+' + str(n))
()%0A%0A
@@ -3258,20 +3258,1057 @@
re tint'%0A %5D)%0A
+%0A def lesson3(self):%0A self.printHeader(3)%0A # print(' ARST--NEIO- ASDF--JKL;-')%0A print(%0A ' '%0A + green('A') + cyan('RS') + cyan('T', style='underline') + '-'%0A '-' + cyan('N', style='underline') + cyan('EI') + green('O') + '-'%0A ' '%0A 'ASD' + underline('F') + '--' + underline('J') + 'KL;-'%0A )%0A print('%5Cn (colemak) (qwerty)%5Cn')%0A self.tutor.lines(%5B%0A 'retain roe rant ratio toast sort stat tore earn noose',%0A 'teat eater oat trio tear tone artist nor tattoo seat arise',%0A 'noise start toss tenant oasis one aria no arson sonata ',%0A 'soon rear to ass soot irate sane onset star root state',%0A 'oar errant resort tartan sonnet notes eat rotten stain',%0A 'ration arose reason noon sass retina iota torn stairs',%0A 'iron estate toe are season not attire tenor innate',%0A 'torso tease arisen note tar snort tarot',%0A %5D)%0A
|
f1dd824978ad8581113a088afe1d1bdf99a00802 | Move to dev. | command_line/griddex.py | command_line/griddex.py | from __future__ import absolute_import, division, print_function
import libtbx.phil
import libtbx.load_env
help_message = '''
Cross reference indexing solutions.
Examples::
%s expts0.json refl0.json
''' % libtbx.env.dispatcher_name
phil_scope = libtbx.phil.parse("""
d_min = None
.type = float(value_min=0.0)
""")
def test_index(experiment, reflections):
from dials.algorithms.indexing import indexer
# map reflections to reciprocal space from image space
refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(
reflections, experiment.detector, experiment.scan)
indexer.indexer_base.map_centroids_to_reciprocal_space(
refl, experiment.detector, experiment.beam, experiment.goniometer)
# now compute fractional indices - in Python rather than trying to push
# everything to C++ for the moment
from scitbx import matrix
ub = matrix.sqr(experiment.crystal.get_A())
rub = ub.inverse()
from dials.array_family import flex
hkl_real = flex.vec3_double(len(reflections))
for j, rlp in enumerate(reflections['rlp']):
hkl_real[j] = rub * rlp
hkl = hkl_real.iround()
ms = 0.0
for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):
ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2
import math
return math.sqrt(ms / len(reflections))
def run(args):
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from dials.util.options import flatten_reflections
import libtbx.load_env
usage = "%s [options] datablock.json reflections.pickle" % (
libtbx.env.dispatcher_name)
parser = OptionParser(
usage=usage,
phil=phil_scope,
read_experiments=True,
read_reflections=True,
check_format=False,
epilog=help_message)
params, options = parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
reflections = flatten_reflections(params.input.reflections)
assert len(experiments) == len(reflections)
nn = len(experiments)
# FIXME check that all the crystals are in the primitive setting...
# now compute grid of reciprocal RMSD's
result = { }
for j, expt in enumerate(experiments):
for k, refl in enumerate(reflections):
result[j, k] = test_index(expt, refl)
# print matrix of results
print(' ' + ''.join(['%7d' % j for j in range(nn)]))
for k in range(nn):
record = ''.join([' %6.3f' % result[j, k] for j in range(nn)])
print('%8d' % k + record)
if __name__ == '__main__':
import sys
run(sys.argv[1:])
| Python | 0 | @@ -1,12 +1,59 @@
+# LIBTBX_SET_DISPATCHER_NAME dev.dials.griddex%0A
from __futur
|
cab9666d25988d13ac4294d9bc88e46c632ce4d8 | change error log | commands/FBClassDump.py | commands/FBClassDump.py | #!/usr/bin/python
import os
import re
import string
import lldb
import fblldbbase as fb
import fblldbobjcruntimehelpers as runtimeHelpers
def lldbcommands():
return [
FBPrintClassInstanceMethods(),
FBPrintClassMethods()
]
class FBPrintClassInstanceMethods(fb.FBCommand):
def name(self):
return 'pclassinstancemethod'
def description(self):
return 'Print the class instance methods.'
def args(self):
return [ fb.FBCommandArgument(arg='class', type='Class', help='an OC Class.') ]
def run(self, arguments, options):
ocarray = instanceMethosOfClass(arguments[0])
methodAddrs = covertOCArrayToPyArray(ocarray)
methods = []
for i in methodAddrs:
method = createMethodFromOCMethod(i)
if method is not None:
methods.append(method)
print "- " + method.prettyPrint()
class FBPrintClassMethods(fb.FBCommand):
def name(self):
return 'pclassmethod'
def description(self):
return 'Print the class`s class methods.'
def args(self):
return [ fb.FBCommandArgument(arg='class', type='Class', help='an OC Class.') ]
def run(self, arguments, options):
ocarray = instanceMethosOfClass(runtimeHelpers.object_getClass(arguments[0]))
if not ocarray:
print "-- have none method -- "
return
methodAddrs = covertOCArrayToPyArray(ocarray)
methods = []
for i in methodAddrs:
method = createMethodFromOCMethod(i)
if method is not None:
methods.append(method)
print "+ " + method.prettyPrint()
# I find that a method that has variable parameter can not b.evaluateExpression
# so I use numberWithLongLong: rather than -[NSString stringWithFormat:]
def instanceMethosOfClass(klass):
tmpString = """
unsigned int outCount;
void **methods = (void **)class_copyMethodList((Class)$cls, &outCount);
NSMutableArray *result = [NSMutableArray array];
for (int i = 0; i < outCount; i++) {
NSNumber *num = (NSNumber *)[NSNumber numberWithLongLong:(long long)methods[i]];
[result addObject:num];
}
(void)free(methods);
id ret = result.count ? [result copy] : nil;
ret;
"""
command = string.Template(tmpString).substitute(cls=klass)
command = '({' + command + '})'
ret = fb.evaluateExpression(command)
if int(ret, 16) == 0: # return nil
ret = None
return ret
# OC array only can hold id,
# @return an array whose instance type is str of the oc object`s address
def covertOCArrayToPyArray(oc_array):
is_array = fb.evaluateBooleanExpression("[{} isKindOfClass:[NSArray class]]".format(oc_array))
if not is_array:
return None
result = []
count = fb.evaluateExpression("(int)[{} count]".format(oc_array))
for i in range(int(count)):
value = fb.evaluateExpression("(id)[{} objectAtIndex:{}]".format(oc_array, i))
value = fb.evaluateExpression("(long long)[{} longLongValue]".format(value))
result.append(value)
return result
class Method:
encodeMap = {
'c': 'char',
'i': 'int',
's': 'short',
'l': 'long',
'q': 'long long',
'C': 'unsigned char',
'I': 'unsigned int',
'S': 'unsigned short',
'L': 'unsigned long',
'Q': 'unsigned long long',
'f': 'float',
'd': 'double',
'B': 'bool',
'v': 'void',
'*': 'char *',
'@': 'id',
'#': 'Class',
':': 'SEL',
}
def __init__(self, name, type_encoding, imp, oc_method):
self.name = name
self.type = type_encoding
self.imp = imp
self.oc_method = self.toHex(oc_method)
def prettyPrint(self):
# mast be bigger then 2, 0-idx for self, 1-st for SEL
argnum = fb.evaluateIntegerExpression("method_getNumberOfArguments({})".format(self.oc_method))
names = self.name.split(':')
for i in range(2, argnum):
arg_type = fb.evaluateCStringExpression("(char *)method_copyArgumentType({}, {})".format(self.oc_method, i))
names[i-2] = names[i-2] + ":(" + self.decode(arg_type) + ")arg" + str(i-2)
string = " ".join(names)
ret_type = fb.evaluateCStringExpression("(char *)method_copyReturnType({})".format(self.oc_method))
return "({}){}".format(self.decode(ret_type), string)
def decode(self, type):
ret = type
if type in Method.encodeMap:
ret = Method.encodeMap[type]
return ret
def toHex(self, addr):
return addr
def __str__(self):
return "<Method:" + self.oc_method + "> " + self.name + " --- " + self.type + " --- " + self.imp
def createMethodFromOCMethod(method):
process = lldb.debugger.GetSelectedTarget().GetProcess()
error = lldb.SBError()
nameValue = fb.evaluateExpression("(char *)method_getName({})".format(method))
name = process.ReadCStringFromMemory(int(nameValue, 16), 256, error)
if not error.Success():
print "--error--"
return None
typeEncodingValue = fb.evaluateExpression("(char *)method_getTypeEncoding({})".format(method))
type_encoding = process.ReadCStringFromMemory(int(typeEncodingValue, 16), 256, error)
if not error.Success():
print "--error--"
return None
imp = fb.evaluateExpression("(void *)method_getImplementation({})".format(method))
return Method(name, type_encoding, imp, method)
| Python | 0.000001 | @@ -4753,35 +4753,29 @@
:%0A print
-%22--
error
---%22
%0A return
@@ -5006,19 +5006,13 @@
int
-%22--
error
---%22
%0A
|
9ebc7c3aee73f4a950d4975034f3c41417d59444 | clean up unused imports | common/util/__init__.py | common/util/__init__.py | import itertools
import sublime
from plistlib import readPlistFromBytes
from .parse_diff import parse_diff
syntax_file_map = {}
def move_cursor(view, line_no, char_no):
# Line numbers are one-based, rows are zero-based.
line_no -= 1
# Negative line index counts backwards from the last line.
if line_no < 0:
last_line, _ = view.rowcol(view.size())
line_no = last_line + line_no + 1
pt = view.text_point(line_no, char_no)
view.sel().clear()
view.sel().add(sublime.Region(pt))
view.show(pt)
def _region_within_regions(all_outer, inner):
for outer in all_outer:
if outer.begin() <= inner.begin() and outer.end() >= inner.end():
return True
return False
def get_lines_from_regions(view, regions, valid_ranges=None):
full_line_regions = (view.full_line(region) for region in regions)
valid_regions = ([region for region in full_line_regions if _region_within_regions(valid_ranges, region)]
if valid_ranges else
full_line_regions)
return [line for region in valid_regions for line in view.substr(region).split("\n")]
def determine_syntax_files():
syntax_files = sublime.find_resources("*.tmLanguage")
for syntax_file in syntax_files:
try:
# Use `sublime.load_resource`, in case Package is `*.sublime-package`.
resource = sublime.load_resource(syntax_file)
plist = readPlistFromBytes(bytearray(resource, encoding="utf-8"))
for extension in plist["fileTypes"]:
if extension not in syntax_file_map:
syntax_file_map[extension] = []
extension_list = syntax_file_map[extension]
extension_list.append(syntax_file)
except:
continue
def get_syntax_for_file(filename):
extension = get_file_extension(filename)
try:
# Return last syntax file applicable to this extension.
return syntax_file_map[extension][-1]
except KeyError:
pass
return "Packages/Text/Plain text.tmLanguage"
def get_file_extension(filename):
period_delimited_segments = filename.split(".")
return "" if len(period_delimited_segments) < 2 else period_delimited_segments[-1]
| Python | 0.000001 | @@ -1,22 +1,4 @@
-import itertools%0A%0A
impo
@@ -53,44 +53,8 @@
es%0A%0A
-from .parse_diff import parse_diff%0A%0A
synt
|
8c3b20f8aa655a7f8fe1ae485e493aa4a5f24abd | Remove __getattr__ method from CompositeField | composite_field/l10n.py | composite_field/l10n.py | from copy import deepcopy
from django.conf import settings
from django.db.models.fields import Field, CharField, TextField, FloatField
from django.utils.functional import lazy
from django.utils import six
from . import CompositeField
LANGUAGES = map(lambda lang: lang[0], getattr(settings, 'LANGUAGES', ()))
class LocalizedField(CompositeField):
def __init__(self, field_class, *args, **kwargs):
self.languages = kwargs.pop('languages', LANGUAGES)
if not self.languages:
raise RuntimeError('Set LANGUAGES in your settings.py or pass a non empty "languages" argument before using LocalizedCharField')
super(LocalizedField, self).__init__()
self.verbose_name = kwargs.pop('verbose_name', None)
for language in self.languages:
self[language] = field_class(*args, **kwargs)
def __getattr__(self, name):
# Proxy all other getattr calls to the first language field. This makes
# it possible to access subfield specific details like 'max_length',
# 'blank', etc. without duplication.
return getattr(self[self.languages[0]], name)
def contribute_to_class(self, cls, field_name):
if self.verbose_name is None:
self.verbose_name = field_name.replace('_', ' ')
for language in self:
# verbose_name must be lazy in order for the admin to show the
# translated verbose_names of the fields
self[language].verbose_name = lazy(lambda language: self.verbose_name + ' (' + language + ')', six.text_type)(language)
super(LocalizedField, self).contribute_to_class(cls, field_name)
def get_proxy(self, model):
return LocalizedField.Proxy(self, model)
class Proxy(CompositeField.Proxy):
def __bool__(self):
return bool(six.text_type(self))
def __unicode__(self):
from django.utils.translation import get_language
language = get_language()
translation = None
# 1. complete language code
translation = getattr(self, language, None)
if translation is not None:
return translation
# 2. base of language code
if '-' in language:
base_lang = language.split('-')[0]
translation = getattr(self, base_lang, None)
if translation is not None:
return translation
# 3. first available translation
for language in settings.LANGUAGES:
getattr(self, base_lang, None)
if translation is not None:
return translation
return None
class LocalizedCharField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedCharField, self).__init__(CharField, *args, **kwargs)
class LocalizedTextField(LocalizedField):
def __init__(self, *args, **kwargs):
super(LocalizedTextField, self).__init__(TextField, *args, **kwargs)
| Python | 0 | @@ -844,298 +844,8 @@
s)%0A%0A
- def __getattr__(self, name):%0A # Proxy all other getattr calls to the first language field. This makes%0A # it possible to access subfield specific details like 'max_length',%0A # 'blank', etc. without duplication.%0A return getattr(self%5Bself.languages%5B0%5D%5D, name)%0A%0A
|
5caf134eedc4ace933da8c2f21aacc5f5b1224ef | bump version | confetti/__version__.py | confetti/__version__.py | __version__ = "2.2.0"
| Python | 0 | @@ -16,7 +16,7 @@
2.2.
-0
+1
%22%0A
|
0a188bfdf15660161faac1dc90176be180728af6 | fix bugs related to logging | rplugin/python3/chromatica/chromatica.py | rplugin/python3/chromatica/chromatica.py | # ============================================================================
# FILE: chromatica.py
# AUTHOR: Yanfei Guo <yanf.guo at gmail.com>
# License: MIT license
# based on original version by BB Chung <afafaf4 at gmail.com>
# ============================================================================
from chromatica import logger
from chromatica import syntax
from chromatica.util import load_external_module
from chromatica.compile_args_database import CompileArgsDatabase
current = __file__
load_external_module(current, "")
from clang import cindex
import os
import re
class Chromatica(logger.LoggingMixin):
"""Chromatica Core """
def __init__(self, vim):
self.__vim = vim
self.__runtimepath = ""
self.name = "core"
self.mark = "[Chromatica Core]"
self.library_path = self.__vim.vars["chromatica#libclang_path"]
self.occurrence_pri = self.__vim.vars["chromatica#occurrence_priority"]
self.syntax_pri = self.__vim.vars["chromatica#syntax_priority"]
self.global_args = self.__vim.vars["chromatica#global_args"]
self.ctx = {}
if not cindex.Config.loaded:
if os.path.isdir(os.path.abspath(self.library_path)):
cindex.Config.set_library_path(self.library_path)
else:
cindex.Config.set_library_file(self.library_path)
cindex.Config.set_compatibility_check(False)
self.args_db = CompileArgsDatabase(self.__vim.current.buffer.name,\
self.global_args)
self.idx = cindex.Index.create()
def get_unsaved_buffer(self, filename):
buf_idx = self.ctx[filename]["bufnr"] - 1
return [(self.__vim.buffers[buf_idx].name, "\n".join(self.__vim.buffers[buf_idx]))]
def get_buf(self, filename):
buf_idx = self.ctx[filename]["bufnr"] - 1
return self.__vim.buffers[buf_idx]
def get_bufname(self, filename):
return self.get_buf(filename).name
def parse(self, context):
ret = False
self.debug("parse context: %s" % context)
# check if context is already in ctx db
filename = context["filename"]
if filename not in self.ctx:
self.ctx[filename] = context
# check if context is has the right filetype
filetype = self.get_buf(filename).options["filetype"].strip(".")[0]
if filetype not in ["c", "cpp"]:
del(self.ctx[filename])
return ret
self.ctx[filename]["args"] = \
self.args_db.get_args_filename(context["filename"])
self.debug("file: %s, args: %s" % (filename, self.ctx[filename]["args"]))
self.ctx[filename]["tu"] = self.idx.parse(self.get_bufname(filename), \
self.ctx[filename]["args"], \
self.get_unsaved_buffer(filename), \
options=cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD)
ret = True
elif self.ctx[filename]["changedtick"] != context["changedtick"]:
self.ctx[filename]["tu"].reparse(\
self.get_unsaved_buffer(filename), \
options=cindex.TranslationUnit.PARSE_DETAILED_PROCESSING_RECORD)
self.ctx[filename]["changedtick"] = context["changedtick"]
ret = True
if ret:
self.highlight(context)
return ret
def highlight(self, context):
"""backend of highlight event"""
self.debug("highlight context: %s" % context)
filename = context["filename"]
lbegin, lend = context["range"]
row, col = context["position"]
highlight_tick = context["highlight_tick"]
if highlight_tick != self.__vim.current.buffer.vars["highlight_tick"]:
return
if filename not in self.ctx:
self.parse(context)
tu = self.ctx[filename]['tu']
symbol = syntax.get_symbol_from_loc(tu, self.get_bufname(filename), row, col)
syn_group, occurrence = syntax.get_highlight(tu, self.get_bufname(filename), \
lbegin, lend, symbol)
self.debug(syn_group)
self.debug(occurrence)
for hl_group in syn_group:
for pos in syn_group[hl_group]:
row = pos[0] - 1
col_start = pos[1] - 1
col_end = col_start + pos[2]
self.get_buf(filename).add_highlight(hl_group, row, col_start, col_end,\
self.syntax_pri)
# self.__vim.current.buffer.add_highlight()
| Python | 0 | @@ -2025,32 +2025,34 @@
= False%0A
+ #
self.debug(%22par
@@ -2611,24 +2611,26 @@
%0A
+ #
self.debug(
@@ -3476,32 +3476,34 @@
event%22%22%22%0A
+ #
self.debug(%22hig
@@ -3847,24 +3847,31 @@
+if not
self.parse(c
@@ -3869,32 +3869,56 @@
f.parse(context)
+:%0A return
%0A%0A tu = s
@@ -3939,12 +3939,12 @@
me%5D%5B
-'tu'
+%22tu%22
%5D%0A%0A
@@ -4152,32 +4152,34 @@
symbol)%0A
+ #
self.debug(syn_
@@ -4188,24 +4188,26 @@
oup)%0A
+ #
self.debug(
|
b48a29e1f940f6b9c0305dbcc15e98ea37057232 | Update moscow.py | russian_metro/parser/providers/moscow.py | russian_metro/parser/providers/moscow.py | # -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
from russian_metro.parser.base import BaseDataProvider
class DataProvider(BaseDataProvider):
metro_lines_src = u"http://ru.wikipedia.org/wiki/Модуль:MoscowMetro#ColorByNum"
metro_stations_src = u"http://ru.wikipedia.org/w/index.php?title=\
Список_станций_Московского_метрополитена"
def download_lines(self):
html = BeautifulSoup(requests.get(self.metro_lines_src).content)
table = html.find('table')
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
number = 0
for j, cell in enumerate(row.find_all('td')):
value = cell.string
if j == 0:
if value and value.isdigit():
number = int(value)
elif j == 1:
title = value
elif j == 2:
color = value
if number > 0:
self.line_model.objects.get_or_create(
number=number,
defaults=dict(
title=title, color='#' + color
)
)
def download_stations(self):
html = BeautifulSoup(requests.get(self.metro_stations_src).content)
table = html.find('table', 'wikitable')
lines = self.line_model.get_all()
for i, row in enumerate(table.find_all('tr')):
if i == 0:
continue
for j, cell in enumerate(row.find_all('td')):
if j == 0:
line = 0
value = cell.find('span', 'sortkey').string
if value and value.isdigit():
line = int(value)
elif j == 1:
title = cell.find('span').string
try:
line_inst = lines[line]
except KeyError:
logger.warning(
u'MetroLine with number %d does not exist' % line
)
continue
self.station_model\
.objects\
.get_or_create(line=line_inst, title=title)
| Python | 0 | @@ -1981,128 +1981,8 @@
or:%0A
- logger.warning(%0A u'MetroLine with number %25d does not exist' %25 line%0A )%0A
|
3dae8f25cda4827397ab3812ea552ed27d37e757 | Remove contraints on dotted names | base_vat_optional_vies/models/res_partner.py | base_vat_optional_vies/models/res_partner.py | # Copyright 2015 Tecnativa - Antonio Espinosa
# Copyright 2017 Tecnativa - David Vidal
# Copyright 2019 FactorLibre - Rodrigo Bonilla
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
vies_passed = fields.Boolean(
string="VIES validation", readonly=True)
@api.model
def simple_vat_check(self, country_code, vat_number):
res = super(ResPartner, self).simple_vat_check(
country_code, vat_number,
)
partner = self.env.context.get('vat_partner')
if partner and self.vies_passed:
# Can not be sure that this VAT is signed up in VIES
partner.update({'vies_passed': False})
return res
@api.model
def vies_vat_check(self, country_code, vat_number):
partner = self.env.context.get('vat_partner')
if partner:
# If there's an exception checking VIES, the upstream method will
# call simple_vat_check and thus the flag will be removed
partner.update({'vies_passed': True})
res = super(ResPartner, self).vies_vat_check(country_code, vat_number)
if not res:
return self.simple_vat_check(country_code, vat_number)
return res
@api.constrains('vat', 'commercial_partner.country_id')
def check_vat(self):
for partner in self:
partner = partner.with_context(vat_partner=partner)
super(ResPartner, partner).check_vat()
| Python | 0.000001 | @@ -1347,41 +1347,8 @@
vat'
-, 'commercial_partner.country_id'
)%0A
|
32c2415a9daa4e2320b75e02648e9caaddcac58d | Revert "Raise exception in normalik, since it is buggy." | scikits/learn/machine/em2/likelihoods.py | scikits/learn/machine/em2/likelihoods.py | #! /usr/bin/python
#
# Copyrighted David Cournapeau
# Last Change: Sat Jan 24 07:00 PM 2009 J
"""This module implements various basic functions related to multivariate
gaussian, such as likelihood, confidence interval/ellipsoids, etc..."""
import numpy as np
from _lk import mquadform, logsumexp as _logsumexp
def normalik(data, mu, va, log=False, out=None):
raise ValueError("Does not work correctly")
if data.ndim == 1:
data = np.atleast_2d(data)
mu = np.atleast_1d(mu)
va = np.atleast_1d(va)
n, d = np.shape(data)
if out is not None:
out = np.asarray(out)
if out.ndim > 1:
raise ValueError("Expected rank 1 out array")
no = out.shape[0]
if not no == n:
raise ValueError("Argout not the right size %d vs %d" % (no, n))
else:
out = np.empty(n, dtype=data.dtype, order='C')
if mu.shape == va.shape:
inva = 1/va
fac = (2*np.pi) ** (-d/2.0) * np.prod(np.sqrt(inva))
inva *= -0.5
pquadform(data, mu, inva, np.log(fac), out)
if not log:
return np.exp(out)
else:
return out
else:
raise ValueError("Full covariance not yet supported")
def mnormalik(data, mu, va, log=False, out=None):
k = np.shape(mu)[0]
n, d = np.shape(data)
if out is not None:
out = np.asarray(out)
(no, ko) = out.shape
if not no == n or not ko == k:
raise ValueError("Argout not the right size (%d, %d), "
"expected (%d, %d)" % (no, ko, n, k))
else:
out = np.empty((n, k), dtype=data.dtype, order='C')
if mu.shape == va.shape:
inva = 1/va
fac = (2*np.pi) ** (-d/2.0) * np.prod(np.sqrt(inva), axis=-1)
inva *= -0.5
#mpquadform(data, mu, inva, np.log(fac), out)
mquadform(data, mu, inva, np.log(fac), out)
if not log:
return np.exp(out)
else:
return out
else:
raise ValueError("Full covariance not yet supported")
def pquadform(input, mu, sp, fac, out):
x = (input-mu) ** 2
out[:] = np.dot(x, sp.T)
out[:] += fac
def pmquadform(input, mu, sp, fac, out):
for c in range(k):
x = (input-mu[c]) ** 2
out[:, c] = np.dot(x, sp[c].T)
out[:,c] += fac[c]
def test(data, mu, va, log=False):
y = mnormalik(data, mu, va, log)
yr = multiple_gauss_den(data, mu, va, log=log)
np.testing.assert_array_almost_equal(y, yr)
def logsumexp(x, out=None):
if not out:
y = np.empty(x.shape[0], x.dtype)
_logsumexp(x, y)
return y
if __name__ == '__main__':
from scikits.learn.machine.em.densities import multiple_gauss_den
d = 20
k = 15
n = 1e4
log = True
type = np.float64
mu = np.random.randn(k, d).astype(type)
va = np.random.randn(k, d).astype(type)
va **= 2
x = np.random.randn(n, d).astype(type)
test(x[:1000, :], mu, va, log)
y = np.empty((n, k), dtype=x.dtype)
mnormalik(x, mu, va, out=y, log=log)
#mnormalik(x, mu, va, out=None, log=log)
x = np.array([[-1000., -1001], [-2000, -2500]])
print logsumexp(x)
print np.log(np.sum(np.exp(x), axis=-1))
| Python | 0.000059 | @@ -64,21 +64,21 @@
ge:
-Sat
+Fri
Jan 2
-4 07
+3 01
:00
@@ -360,56 +360,8 @@
e):%0A
- raise ValueError(%22Does not work correctly%22)%0A
|
6ed3f5d97fe8f8967df5624f62e69ce2a58a9413 | Add color to pytest tests on CI (#20723) | scripts/ci/images/ci_run_docker_tests.py | scripts/ci/images/ci_run_docker_tests.py | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import shlex
import subprocess
import sys
from pathlib import Path
from typing import List
AIRFLOW_SOURCE = Path(__file__).resolve().parent.parent.parent
BUILD_CACHE_DIR = AIRFLOW_SOURCE / ".build"
CBLUE = '\033[94m'
CEND = '\033[0m'
def get_parser():
parser = argparse.ArgumentParser(
prog="ci_run_docker_tests",
description="Running Docker tests using pytest",
epilog="Unknown arguments are passed unchanged to Pytest.",
)
parser.add_argument(
"--interactive",
"-i",
action='store_true',
help="Activates virtual environment ready to run tests and drops you in",
)
parser.add_argument("--initialize", action="store_true", help="Initialize virtual environment and exit")
parser.add_argument("pytestopts", nargs=argparse.REMAINDER, help="Tests to run")
return parser
def run_verbose(cmd: List[str], *, check=True, **kwargs):
print(f"{CBLUE}$ {' '.join(shlex.quote(c) for c in cmd)}{CEND}")
subprocess.run(cmd, check=check, **kwargs)
def create_virtualenv():
virtualenv_path = (
BUILD_CACHE_DIR / ".docker_venv" / f"host_python_{sys.version_info[0]}.{sys.version_info[1]}"
)
virtualenv_path.parent.mkdir(parents=True, exist_ok=True)
if not virtualenv_path.exists():
print("Creating virtualenv environment")
run_verbose([sys.executable, "-m", "venv", str(virtualenv_path)])
python_bin = virtualenv_path / "bin" / "python"
run_verbose([str(python_bin), "-m", "pip", "install", "pytest", "pytest-xdist", "requests"])
return python_bin
def main():
parser = get_parser()
args = parser.parse_args()
python_bin = create_virtualenv()
if args.initialize:
return
if args.interactive:
activate_bin = python_bin.parent / "activate"
bash_trampoline = f"source {shlex.quote(str(activate_bin))}"
print("To enter virtual environment, run:")
print(f" {bash_trampoline}")
return
extra_pytest_args = (
args.pytestopts[1:] if args.pytestopts and args.pytestopts[0] == "--" else args.pytestopts
)
if not extra_pytest_args:
raise SystemExit("You must select the tests to run.")
pytest_args = (
"-n",
"auto",
)
run_verbose([str(python_bin), "-m", "pytest", *pytest_args, *extra_pytest_args])
if __name__ == "__main__":
main()
| Python | 0 | @@ -3053,43 +3053,35 @@
= (
-%0A %22-n%22,%0A %22auto%22,%0A
+%22-n%22, %22auto%22, %22--color=yes%22
)%0A%0A
|
b5b31136ff716b423d78d307e107df4b8d8cfedc | Add images field on article model abstract, is many to many | opps/core/models/article.py | opps/core/models/article.py | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from opps.core.models.published import Published
from opps.core.models.date import Date
from opps.core.models.channel import Channel
from tagging.models import Tag
from tagging.fields import TagField
class Article(Published, Date):
title = models.CharField(_(u"Title"), max_length=140)
slug = models.SlugField(_(u"URL"), max_length=150, unique=True,
db_index=True)
short_title = models.CharField(_(u"Short title"), max_length=140,
blank=False, null=True)
headline = models.TextField(_(u"Headline"), blank=True)
channel = models.ForeignKey(Channel, verbose_name=_(u"Channel"))
content = models.TextField(_(u"Content"))
tags = TagField(null=True, verbose_name=_(u"Tags"))
class Meta:
abstract = True
def __unicode__(self):
return "{0}/{1}".format(self.site.name, self.slug)
class Post(Article):
credit = models.CharField(_("Credit"), blank=True, max_length=255)
class Meta:
app_label = 'core'
| Python | 0.000059 | @@ -306,16 +306,57 @@
Channel
+%0Afrom opps.core.models.image import Image
%0A%0Afrom t
@@ -884,24 +884,136 @@
Content%22))%0A%0A
+ images = models.ManyToManyField(Image, through='ArticleImage',%0A related_name='article_images')%0A%0A%0A
tags = T
|
5cf84d646796bf5d2f96c67b12a21dc557532c4f | move recv_threads checking loop to run() | orchard/cli/socketclient.py | orchard/cli/socketclient.py | # Adapted from https://github.com/benthor/remotty/blob/master/socketclient.py
from select import select
import sys
import tty
import fcntl
import os
import termios
import threading
import time
import errno
import logging
log = logging.getLogger(__name__)
class SocketClient:
def __init__(self,
socket_in=None,
socket_out=None,
socket_err=None,
raw=True,
):
self.socket_in = socket_in
self.socket_out = socket_out
self.socket_err = socket_err
self.raw = raw
self.stdin_fileno = sys.stdin.fileno()
self.recv_threads = []
def __enter__(self):
self.create()
return self
def __exit__(self, type, value, trace):
self.destroy()
def create(self):
if os.isatty(sys.stdin.fileno()):
self.settings = termios.tcgetattr(sys.stdin.fileno())
else:
self.settings = None
if self.socket_in is not None:
self.set_blocking(sys.stdin, False)
self.set_blocking(sys.stdout, True)
self.set_blocking(sys.stderr, True)
if self.raw:
tty.setraw(sys.stdin.fileno())
def set_blocking(self, file, blocking):
fd = file.fileno()
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
flags = (flags & ~os.O_NONBLOCK) if blocking else (flags | os.O_NONBLOCK)
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def run(self):
if self.socket_in is not None:
self.start_send_thread(self.socket_in, sys.stdin)
if self.socket_out is not None:
self.start_recv_thread(self.socket_out, sys.stdout)
if self.socket_err is not None:
self.start_recv_thread(self.socket_err, sys.stderr)
self.alive_check()
def start_send_thread(self, *args):
thread = threading.Thread(target=self.send_ws, args=args)
thread.daemon = True
thread.start()
def start_recv_thread(self, *args):
thread = threading.Thread(target=self.recv_ws, args=args)
thread.daemon = True
thread.start()
self.recv_threads.append(thread)
def recv_ws(self, socket, stream):
try:
while True:
chunk = socket.recv()
if chunk:
stream.write(chunk)
stream.flush()
else:
break
except Exception, e:
log.debug(e)
def send_ws(self, socket, stream):
while True:
r, w, e = select([stream.fileno()], [], [])
if r:
chunk = stream.read(1)
if chunk == '':
socket.send_close()
break
else:
try:
socket.send(chunk)
except Exception, e:
if hasattr(e, 'errno') and e.errno == errno.EPIPE:
break
else:
raise e
def alive_check(self):
while True:
time.sleep(1)
if not any(t.is_alive() for t in self.recv_threads):
break
def destroy(self):
if self.settings is not None:
termios.tcsetattr(self.stdin_fileno, termios.TCSADRAIN, self.settings)
sys.stdout.flush()
if __name__ == '__main__':
import websocket
if len(sys.argv) != 2:
sys.stderr.write("Usage: python socketclient.py WEBSOCKET_URL\n")
exit(1)
url = sys.argv[1]
socket = websocket.create_connection(url)
print "connected\r"
with SocketClient(socket, interactive=True) as client:
client.run()
| Python | 0 | @@ -1760,25 +1760,84 @@
-self.alive_check(
+while any(t.is_alive() for t in self.recv_threads):%0A time.sleep(1
)%0A%0A
@@ -3096,170 +3096,8 @@
e%0A%0A
- def alive_check(self):%0A while True:%0A time.sleep(1)%0A%0A if not any(t.is_alive() for t in self.recv_threads):%0A break%0A%0A
|
6aa92f13673ec49a67b5f9e2970c7751a852c19b | Fix typos in test_handler.py (#1953) | opentelemetry-sdk/tests/logs/test_handler.py | opentelemetry-sdk/tests/logs/test_handler.py | # Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import unittest
from unittest.mock import Mock
from opentelemetry.sdk import trace
from opentelemetry.sdk.logs import LogEmitter, OTLPHandler
from opentelemetry.sdk.logs.severity import SeverityNumber
from opentelemetry.trace import INVALID_SPAN_CONTEXT
def get_logger(level=logging.NOTSET, log_emitter=None):
logger = logging.getLogger(__name__)
handler = OTLPHandler(level=level, log_emitter=log_emitter)
logger.addHandler(handler)
return logger
class TestOTLPHandler(unittest.TestCase):
def test_handler_default_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Make sure debug messages are ignored by default
logger.debug("Debug message")
self.assertEqual(emitter_mock.emit.call_count, 0)
# Assert emit gets called for warning message
logger.warning("Wanrning message")
self.assertEqual(emitter_mock.emit.call_count, 1)
def test_handler_custom_log_level(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(level=logging.ERROR, log_emitter=emitter_mock)
logger.warning("Warning message test custom log level")
# Make sure any log with level < ERROR is ignored
self.assertEqual(emitter_mock.emit.call_count, 0)
logger.error("Mumbai, we have a major problem")
logger.critical("No Time For Caution")
self.assertEqual(emitter_mock.emit.call_count, 2)
def test_log_record_no_span_context(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
# Assert emit gets called for warning message
logger.warning("Wanrning message")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertIsNotNone(log_record)
self.assertEqual(log_record.trace_id, INVALID_SPAN_CONTEXT.trace_id)
self.assertEqual(log_record.span_id, INVALID_SPAN_CONTEXT.span_id)
self.assertEqual(
log_record.trace_flags, INVALID_SPAN_CONTEXT.trace_flags
)
def test_log_record_trace_correlation(self):
emitter_mock = Mock(spec=LogEmitter)
logger = get_logger(log_emitter=emitter_mock)
tracer = trace.TracerProvider().get_tracer(__name__)
with tracer.start_as_current_span("test") as span:
logger.critical("Critical message within span")
args, _ = emitter_mock.emit.call_args_list[0]
log_record = args[0]
self.assertEqual(log_record.body, "Critical message within span")
self.assertEqual(log_record.severity_text, "CRITICAL")
self.assertEqual(log_record.severity_number, SeverityNumber.FATAL)
span_context = span.get_span_context()
self.assertEqual(log_record.trace_id, span_context.trace_id)
self.assertEqual(log_record.span_id, span_context.span_id)
self.assertEqual(log_record.trace_flags, span_context.trace_flags)
| Python | 0.0006 | @@ -1475,33 +1475,32 @@
gger.warning(%22Wa
-n
rning message%22)%0A
@@ -2291,17 +2291,16 @@
ning(%22Wa
-n
rning me
|
d6811b34bbb7628307f82fbe7ef5284b1fa20172 | Remove orderer's user name, line item count from order database model's text representation | byceps/services/shop/order/dbmodels/order.py | byceps/services/shop/order/dbmodels/order.py | """
byceps.services.shop.order.dbmodels.order
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
from typing import Optional, TYPE_CHECKING
if TYPE_CHECKING:
hybrid_property = property
else:
from sqlalchemy.ext.hybrid import hybrid_property
from .....database import db, generate_uuid
from .....typing import UserID
from .....util.instances import ReprBuilder
from ....user.dbmodels.user import User
from ...shop.transfer.models import ShopID
from ..transfer.models import OrderNumber, PaymentState
class Order(db.Model):
"""An order for articles, placed by a user."""
__tablename__ = 'shop_orders'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
created_at = db.Column(db.DateTime, nullable=False)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number = db.Column(db.UnicodeText, unique=True, nullable=False)
placed_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'), index=True, nullable=False)
placed_by = db.relationship(User, foreign_keys=[placed_by_id])
first_names = db.Column(db.UnicodeText, nullable=False)
last_name = db.Column(db.UnicodeText, nullable=False)
country = db.Column(db.UnicodeText, nullable=False)
zip_code = db.Column(db.UnicodeText, nullable=False)
city = db.Column(db.UnicodeText, nullable=False)
street = db.Column(db.UnicodeText, nullable=False)
total_amount = db.Column(db.Numeric(7, 2), nullable=False)
invoice_created_at = db.Column(db.DateTime, nullable=True)
payment_method = db.Column(db.UnicodeText, nullable=True)
_payment_state = db.Column('payment_state', db.UnicodeText, index=True, nullable=False)
payment_state_updated_at = db.Column(db.DateTime, nullable=True)
payment_state_updated_by_id = db.Column(db.Uuid, db.ForeignKey('users.id'), nullable=True)
payment_state_updated_by = db.relationship(User, foreign_keys=[payment_state_updated_by_id])
cancelation_reason = db.Column(db.UnicodeText, nullable=True)
processing_required = db.Column(db.Boolean, nullable=False)
processed_at = db.Column(db.DateTime, nullable=True)
def __init__(
self,
shop_id: ShopID,
order_number: OrderNumber,
placed_by_id: UserID,
first_names: str,
last_name: str,
country: str,
zip_code: str,
city: str,
street,
*,
created_at: Optional[datetime] = None,
) -> None:
if created_at is None:
created_at = datetime.utcnow()
self.created_at = created_at
self.shop_id = shop_id
self.order_number = order_number
self.placed_by_id = placed_by_id
self.first_names = first_names
self.last_name = last_name
self.country = country
self.zip_code = zip_code
self.city = city
self.street = street
self.payment_state = PaymentState.open
@hybrid_property
def payment_state(self) -> PaymentState:
return PaymentState[self._payment_state]
@payment_state.setter
def payment_state(self, state: PaymentState) -> None:
assert state is not None
self._payment_state = state.name
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add('shop', self.shop_id) \
.add_with_lookup('order_number') \
.add('placed_by', self.placed_by.screen_name) \
.add_custom(f'{len(self.line_items):d} line items') \
.add_custom(self.payment_state.name) \
.build()
| Python | 0.000001 | @@ -3523,134 +3523,8 @@
) %5C%0A
- .add('placed_by', self.placed_by.screen_name) %5C%0A .add_custom(f'%7Blen(self.line_items):d%7D line items') %5C%0A
|
a25141dca6ce6f8ead88c43fa7f5726afb2a9dba | Fix currency dialog to match model changes | cbpos/mod/currency/views/dialogs/currency.py | cbpos/mod/currency/views/dialogs/currency.py | from PySide import QtGui
from cbpos.mod.currency.models.currency import Currency
import cbpos
class CurrencyDialog(QtGui.QWidget):
def __init__(self):
super(CurrencyDialog, self).__init__()
self.name = QtGui.QLineEdit()
self.symbol = QtGui.QLineEdit()
self.value = QtGui.QSpinBox()
self.value.setMinimum(0)
self.value.setSingleStep(1)
self.decimalPlaces = QtGui.QSpinBox()
self.decimalPlaces.setRange(0, 10)
self.decimalPlaces.setSingleStep(1)
self.digitGrouping = QtGui.QCheckBox()
buttonBox = QtGui.QDialogButtonBox()
self.okBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Ok)
self.okBtn.pressed.connect(self.onOkButton)
self.cancelBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Cancel)
self.cancelBtn.pressed.connect(self.onCancelButton)
rows = [["Name", self.name],
["Symbol", self.symbol],
["Value", self.value],
["Decimal Places", self.decimalPlaces],
["Digit Grouping", self.digitGrouping],
[buttonBox]]
form = QtGui.QFormLayout()
form.setSpacing(10)
[form.addRow(*row) for row in rows]
self.setLayout(form)
def onOkButton(self):
currency = Currency(name=self.name.text(),
symbol=self.symbol.text(),
value=self.value.text(),
decimal_places=self.decimalPlaces.value(),
digit_grouping=self.digitGrouping.isChecked()
)
session = cbpos.database.session()
session.add(currency)
session.commit()
cbpos.config["mod.currency", "default"] = unicode(currency.id)
self.close()
cbpos.ui.show_default()
def onCancelButton(self):
self.close()
| Python | 0.000001 | @@ -21,16 +21,71 @@
tGui%0D%0A%0D%0A
+import cbpos%0D%0A%0D%0Alogger = cbpos.get_logger(__name__)%0D%0A%0D%0A
from cbp
@@ -106,25 +106,16 @@
y.models
-.currency
import
@@ -124,28 +124,69 @@
rrency%0D%0A
-import cbpos
+%0D%0Afrom cbpos.mod.currency.views import CurrenciesPage
%0D%0A%0D%0Aclas
@@ -312,190 +312,129 @@
-self.name = QtGui.QLineEdit()%0D%0A self.symbol = QtGui.QLineEdit()%0D%0A self.value = QtGui.QSpinBox()%0D%0A self.value.setMinimum(0)%0D%0A self.value.setSingleStep(1
+message = cbpos.tr.currency._(%22Set up the currencies you will be using. You will be able to change them later also.%22
)%0D%0A
+%0D%0A
@@ -446,175 +446,77 @@
elf.
-decimalPlaces = QtGui.QSpinBox()%0D%0A self.decimalPlaces.setRange(0, 10)%0D%0A self.decimalPlaces.setSingleStep(1)%0D%0A self.digitGrouping = QtGui.QCheckBox
+message = QtGui.QLabel(message)%0D%0A%0D%0A self.form = CurrenciesPage
()%0D%0A
@@ -594,18 +594,20 @@
self.
-ok
+done
Btn = bu
@@ -647,18 +647,21 @@
ttonBox.
-Ok
+Close
)%0D%0A
@@ -672,10 +672,12 @@
elf.
-ok
+done
Btn.
@@ -699,18 +699,20 @@
(self.on
-Ok
+Done
Button)%0D
@@ -734,195 +734,117 @@
-self.cancelBtn = buttonBox.addButton(QtGui.QDialogButtonBox.Cancel)%0D%0A self.cancelBtn.pressed.connect(self.onCancelButton)%0D%0A %0D%0A rows = %5B%5B%22Name%22, self.name%5D,%0D%0A
+layout = QtGui.QVBoxLayout()%0D%0A layout.setSpacing(10)%0D%0A%0D%0A layout.addWidget(self.message)
@@ -843,212 +843,72 @@
+%0D%0A
-%5B%22Symbol%22, self.symbol%5D,%0D%0A %5B%22Value%22, self.value%5D,%0D%0A %5B%22Decimal Places%22, self.decimalPlaces%5D,%0D%0A %5B%22Digit Grouping%22, self.digitGrouping%5D,%0D%0A %5B
+ layout.addWidget(self.form)%0D%0A layout.addWidget(
butt
@@ -916,10 +916,9 @@
nBox
-%5D%5D
+)
%0D%0A
@@ -937,243 +937,161 @@
-form = QtGui.QFormLayout()%0D%0A form.setSpacing(10
+self.setLayout(layout
)%0D%0A
-
-
%0D%0A
- %5Bform.addRow(*row) for row in rows%5D%0D%0A %0D%0A self.setLayout(form)
+def onDoneButton(self):
%0D%0A
-%0D%0A
-def onOkButton(self):%0D%0A currency = Currency(name=self.name.tex
+session = cbpos.database.session()%0D%0A currency = session.query(Currency).firs
t()
-,
%0D%0A
@@ -1092,32 +1092,34 @@
()%0D%0A
+%0D%0A
@@ -1110,247 +1110,220 @@
- symbol=self.symbol.text(),%0D%0A value=self.value.text(),%0D%0A decimal_places=self.decimalPlaces.value(),%0D%0A digit_grouping=self.digitGrouping.isChecked()
+if currency is None:%0D%0A QtGui.QMessageBox.warning(self, cbpos.tr.currency._(%22No currency%22),%0D%0A cbpos.tr.currency._(%22You have to sest up at least one currency%22),
%0D%0A
@@ -1348,27 +1348,24 @@
-)%0D%0A
session
@@ -1360,72 +1360,36 @@
-session = cbpos.database.session()%0D%0A session.add(currency
+ QtGui.QMessageBox.Ok
)%0D%0A
@@ -1399,24 +1399,18 @@
+
-session.commit()
+ return
%0D%0A
@@ -1558,63 +1558,4 @@
()%0D%0A
- %0D%0A def onCancelButton(self):%0D%0A self.close()%0D%0A
|
3af6de3097c864d0674245c5329e5887380f3b75 | fix to cooling network | cea/plots/supply_system/supply_system_map.py | cea/plots/supply_system/supply_system_map.py | """
Show a Pareto curve plot for individuals in a given generation.
"""
from __future__ import division
from __future__ import print_function
import pandas as pd
import geopandas
import json
import cea.inputlocator
import cea.plots.supply_system
from cea.technologies.network_layout.main import network_layout
from cea.utilities.standardize_coordinates import get_geographic_coordinate_system
__author__ = "Jimeno Fonseca"
__copyright__ = "Copyright 2019, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno Fonseca"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
class SupplySystemMapPlot(cea.plots.supply_system.SupplySystemPlotBase):
"""Show a pareto curve for a single generation"""
name = "Supply system map"
expected_parameters = {
'generation': 'plots-supply-system:generation',
'individual': 'plots-supply-system:individual',
'scenario-name': 'general:scenario-name',
}
def __init__(self, project, parameters, cache):
super(SupplySystemMapPlot, self).__init__(project, parameters, cache)
self.generation = self.parameters['generation']
self.individual = self.parameters['individual']
self.scenario = self.parameters['scenario-name']
self.config = cea.config.Configuration()
@property
def title(self):
return "Supply system map for system #%s" % (self.individual)
@property
def output_path(self):
return self.locator.get_timeseries_plots_file(
'gen{generation}_ind{individual}supply_system_map'.format(individual=self.individual,
generation=self.generation),
self.category_name)
def _plot_div_producer(self):
import os
import hashlib
from jinja2 import Template
template = os.path.join(os.path.dirname(__file__), "map_div.html")
data = self.data_processing()
zone = geopandas.GeoDataFrame.from_file(self.locator.get_zone_geometry())\
.to_crs(get_geographic_coordinate_system()).to_json(show_bbox=True)
dc = self.get_newtork_json(data['path_output_edges_DC'], data['path_output_nodes_DC'])
dh = self.get_newtork_json(data['path_output_edges_DH'], data['path_output_nodes_DH'])
# Generate div id using hash of parameters
div = Template(open(template).read())\
.render(id=hashlib.md5(repr(sorted(data.items()))).hexdigest(), data=json.dumps(data), zone=zone, dc=dc, dh=dh)
return div
def get_newtork_json(self, edges, nodes):
if not edges or not nodes:
return {}
edges_df = geopandas.GeoDataFrame.from_file(edges)
nodes_df = geopandas.GeoDataFrame.from_file(nodes)
network_json = json.loads(edges_df.to_crs(get_geographic_coordinate_system()).to_json())
network_json['features']\
.extend(json.loads(nodes_df.to_crs(get_geographic_coordinate_system()).to_json())['features'])
return json.dumps(network_json)
def data_processing(self):
# get data from generation
building_connectivity = pd.read_csv(self.locator.get_optimization_slave_building_connectivity(self.individual,
self.generation))
# FOR DISTRICT HEATING NETWORK
if building_connectivity[
'DH_connectivity'].sum() > 1: # there are buildings connected and hence we can create the network
connected_buildings_DH = [x for x, y in zip(building_connectivity['Name'].values,
building_connectivity['DH_connectivity'].values) if y == 1]
disconnected_buildings_DH = [x for x in building_connectivity['Name'].values if
x not in connected_buildings_DH]
network_type = "DH"
network_name = "gen_" + str(self.generation) + "_ind_" + str(self.individual)
path_output_edges_DH, path_output_nodes_DH = self.create_network_layout(connected_buildings_DH,
network_type, network_name)
else:
connected_buildings_DH = []
disconnected_buildings_DH = building_connectivity['Name'].values.tolist()
path_output_edges_DH = None
path_output_nodes_DH = None
# FOR DISTRICT COOLING NETWORK
if building_connectivity[
'DC_connectivity'].sum() > 1: # there are buildings connected and hence we can create the network
connected_buildings_DC = [x for x in building_connectivity['Name'].values if
building_connectivity.loc[building_connectivity['Name'] == x][
'DC_connectivity'] == 1]
disconnected_buildings_DC = [x for x in building_connectivity['Name'].values if
x not in connected_buildings_DC]
network_type = "DC"
network_name = "gen_" + str(self.generation) + "_ind_" + str(self.individual)
path_output_edges_DC, path_output_nodes_DC = self.create_network_layout(connected_buildings_DC,
network_type, network_name)
else:
connected_buildings_DC = []
disconnected_buildings_DC = building_connectivity['Name'].values.tolist()
path_output_edges_DC = None
path_output_nodes_DC = None
data_processed = {
'connected_buildings_DH': connected_buildings_DH,
'disconnected_buildings_DH': disconnected_buildings_DH,
'path_output_edges_DH': path_output_edges_DH,
'path_output_nodes_DH': path_output_nodes_DH,
'connected_buildings_DC': connected_buildings_DC,
'disconnected_buildings_DC': disconnected_buildings_DC,
'path_output_edges_DC': path_output_edges_DC,
'path_output_nodes_DC': path_output_nodes_DC,
}
return data_processed
def create_network_layout(self, connected_buildings, network_type, network_name):
# Modify config inputs for this function
self.config.network_layout.network_type = network_type
self.config.network_layout.connected_buildings = connected_buildings
network_layout(self.config, self.locator, output_name_network=network_name)
# Output paths
path_output_edges = self.locator.get_network_layout_edges_shapefile(network_type, network_name)
path_output_nodes = self.locator.get_network_layout_nodes_shapefile(network_type, network_name)
return path_output_edges, path_output_nodes
def main():
"""Test this plot"""
import cea.config
import cea.plots.cache
config = cea.config.Configuration()
cache = cea.plots.cache.NullPlotCache()
SupplySystemMapPlot(config.project,
{'scenario-name': config.scenario_name,
'generation': config.plots_supply_system.generation,
'individual': config.plots_supply_system.individual},
cache).plot(auto_open=True)
if __name__ == '__main__':
main()
| Python | 0.000001 | @@ -4773,36 +4773,43 @@
gs_DC = %5Bx for x
+, y
in
+zip(
building_connect
@@ -4820,36 +4820,52 @@
y%5B'Name'%5D.values
- if%0A
+,%0A
@@ -4911,110 +4911,40 @@
vity
-.loc%5Bbuilding_connectivity%5B'Name'%5D == x%5D%5B%0A 'DC_connectivity'%5D
+%5B'DC_connectivity'%5D.values) if y
==
|
0dacb5382e3099d0b9faa65e207c3be407747eeb | Use .array | chainerrl/optimizers/nonbias_weight_decay.py | chainerrl/optimizers/nonbias_weight_decay.py | # This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from chainer import cuda
class NonbiasWeightDecay(object):
"""Weight decay only for non-bias parameters.
This hook can be used just like chainer.optimizer_hooks.WeightDecay except
that this hook does not apply weight decay to bias parameters.
This hook assumes that all the bias parameters have the name of "b". Any
parameter whose name is "b" is considered as a bias and excluded from
weight decay.
"""
name = 'NonbiasWeightDecay'
call_for_each_param = True
timing = 'pre'
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
if param.name == 'b':
return
p, g = param.data, param.grad
if p is None or g is None:
return
with cuda.get_device_from_array(p) as dev:
if int(dev) == -1:
g += self.rate * p
else:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
kernel(p, self.rate, g)
| Python | 0 | @@ -1007,12 +1007,13 @@
ram.
-data
+array
, pa
|
b1f894c128c62b02ce21c6ccd22376347c356059 | Bump version for a release | sh00t/settings.py | sh00t/settings.py | import os
BANNER = """
_ ___ ___ _
| | / _ \ / _ \| |
___| |__ | | | | | | | |_
/ __| '_ \| | | | | | | __|
\__ \ | | | |_| | |_| | |_
|___/_| |_|\___/ \___/ \__|
A Testing Environment for Manual Security Testers
"""
NAME = "sh00t!"
DESCRIPTION = "An integration testing framework"
VERSION = 0.1
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'f6l3gz5xxeg($j*15%_q8nvx0mvp*9h$$^^!4a_q3-dw*2n(&%'
# Set if production (for static files)
LIVE = False
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'app',
'configuration',
'rest_framework',
'django_tables2',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sh00t.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'sh00t.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_URL = '/admin/login/'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'app/static'),
)
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAdminUser',
],
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 20,
}
| Python | 0 | @@ -321,11 +321,11 @@
N =
-0.1
+1.0
%0A%0A#
|
83eb166addc3fc54ff97118770ac8cfd7685b10f | Fix service info | shaddock/model.py | shaddock/model.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Thibaut Lapierre <[email protected]>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
from oslo_config import cfg
OPTS = [
cfg.StrOpt('template_dir',
default='/var/lib/shaddock',
help='Template directory to use.'),
cfg.StrOpt('user',
default='shaddock',
help='User used to build Docker images.'),
cfg.StrOpt('nocache',
default='False',
help='Build images w/o cache.')
]
CONF = cfg.CONF
CONF.register_opts(OPTS)
CONF.register_cli_opts(OPTS)
class Template(object):
def __init__(self):
self.template_dir = CONF.template_dir
self.user = CONF.user
services_dic = open('%s/etc/services.yml' % CONF.template_dir, "r")
services_dic = services_dic.read()
services_dic = yaml.load(services_dic)
self.services_keys = services_dic.keys()
config_dic = open('%s/etc/configuration.yml' % CONF.template_dir, "r")
config_dic = config_dic.read()
config_dic = yaml.load(config_dic)
self.template_vars = config_dic.get('template_vars')
class ContainerConfig(object):
def __init__(self, service_name):
self.name = service_name
self.dictionary = self.make_service_dictionary()
self.tag = self.dictionary.get('tag')
self.path = self.dictionary.get('path')
self.ports = self.dictionary.get('ports')
self.port_bindings = self.dictionary.get('port_bindings')
self.config = self.dictionary.get('confs')
self.volumes = self.dictionary.get('volumes')
self.binds = self.dictionary.get('binds')
self.privileged = self.dictionary.get('privileged')
self.network_mode = self.dictionary.get('network_mode')
def make_service_dictionary(self):
template = Template()
services_dic = open('%s/etc/services.yml' % CONF.template_dir,
"r")
services_dic = services_dic.read()
services_dic = yaml.load(services_dic)
ports = None
volumes = None
for service in template.services_keys:
if service.lower() == self.name:
service_info = services_dic.get(self.name, None)
ports = service_info.get('ports')
volumes = service_info.get('volumes')
privileged = service_info.get('privileged')
network_mode = service_info.get('network_mode')
service_dic = {}
service_dic['tag'] = '%s/%s' % (CONF.user, self.name)
service_dic['path'] = '%s/template/%s' % (CONF.template_dir,
self.name)
ports_list = []
ports_bind_dico = {}
if ports is not None:
for port in ports:
ports_list.append((port, 'tcp'))
ports_bind_dico[port] = ('0.0.0.0', port)
service_dic['ports'] = ports_list
service_dic['port_bindings'] = ports_bind_dico
volumes_list = []
binds_dico = {}
if volumes is not None:
for volume in volumes.keys():
volumes_list.append(volume)
bind = volumes.get(volume)
binds_dico[bind] = {'bind': volume, 'ro': False}
service_dic['volumes'] = volumes_list
service_dic['binds'] = binds_dico
service_dic['privileged'] = privileged
service_dic['network_mode'] = network_mode
return service_dic
# Dictionary should be like:
# 'glance': {
# 'tag': '%s/osglance' % (user),
# 'path': '%s/glance/' % (path),
# 'ports': [(9292, 'tcp')],
# 'port_bindings': {9292: ('0.0.0.0', 9292)},
# 'confs': {'HOST_NAME': host_name,
# 'MYSQL_DB': mysql_host,
# 'MYSQL_USER': mysql_user,
# 'MYSQL_PASSWORD': mysql_pass,
# 'RABBITMQ_HOST': rabbitmq_host,
# 'RABBITMQ_PASSWORD': rabbitmq_password,
# 'GLANCE_DBPASS': glance_pass
# },
# 'volumes': ['/var/log/supervisor'],
# 'binds': {'/var/log/shaddock/glance':
# {'bind': '/var/log/supervisor', 'ro': False}},
# 'privileged': False
# },
| Python | 0.000002 | @@ -2848,16 +2848,53 @@
, None)%0A
+ if service_info:%0A
@@ -2927,32 +2927,36 @@
fo.get('ports')%0A
+
@@ -3009,16 +3009,20 @@
+
+
privileg
@@ -3049,32 +3049,36 @@
t('privileged')%0A
+
@@ -3120,24 +3120,114 @@
twork_mode')
+%0A else:%0A ports, volumes, privileged, network_mode = None
%0A%0A se
|
65df9fd5c6820733b7ba0ec78152bfffb01301db | Fix #3323 | rest_framework/filters.py | rest_framework/filters.py | """
Provides generic filtering backends that can be used to filter the results
returned by list views.
"""
from __future__ import unicode_literals
import operator
from functools import reduce
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils import six
from rest_framework.compat import (
distinct, django_filters, get_model_name, guardian
)
from rest_framework.settings import api_settings
FilterSet = django_filters and django_filters.FilterSet or None
class BaseFilterBackend(object):
"""
A base class from which all filter backend classes should inherit.
"""
def filter_queryset(self, request, queryset, view):
"""
Return a filtered queryset.
"""
raise NotImplementedError(".filter_queryset() must be overridden.")
class DjangoFilterBackend(BaseFilterBackend):
"""
A filter backend that uses django-filter.
"""
default_filter_set = FilterSet
def __init__(self):
assert django_filters, 'Using DjangoFilterBackend, but django-filter is not installed'
def get_filter_class(self, view, queryset=None):
"""
Return the django-filters `FilterSet` used to filter the queryset.
"""
filter_class = getattr(view, 'filter_class', None)
filter_fields = getattr(view, 'filter_fields', None)
if filter_class:
filter_model = filter_class.Meta.model
assert issubclass(queryset.model, filter_model), \
'FilterSet model %s does not match queryset model %s' % \
(filter_model, queryset.model)
return filter_class
if filter_fields:
class AutoFilterSet(self.default_filter_set):
class Meta:
model = queryset.model
fields = filter_fields
return AutoFilterSet
return None
def filter_queryset(self, request, queryset, view):
filter_class = self.get_filter_class(view, queryset)
if filter_class:
return filter_class(request.query_params, queryset=queryset).qs
return queryset
class SearchFilter(BaseFilterBackend):
# The URL query parameter used for the search.
search_param = api_settings.SEARCH_PARAM
def get_search_terms(self, request):
"""
Search terms are set by a ?search=... query parameter,
and may be comma and/or whitespace delimited.
"""
params = request.query_params.get(self.search_param, '')
return params.replace(',', ' ').split()
def construct_search(self, field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
if field_name.startswith('$'):
return "%s__iregex" % field_name[1:]
else:
return "%s__icontains" % field_name
def filter_queryset(self, request, queryset, view):
search_fields = getattr(view, 'search_fields', None)
orm_lookups = [
self.construct_search(six.text_type(search_field))
for search_field in search_fields
]
search_terms = self.get_search_terms(request)
if not search_fields or not search_terms:
return queryset
base = queryset
for search_term in search_terms:
queries = [
models.Q(**{orm_lookup: search_term})
for orm_lookup in orm_lookups
]
queryset = queryset.filter(reduce(operator.or_, queries))
# Filtering against a many-to-many field requires us to
# call queryset.distinct() in order to avoid duplicate items
# in the resulting queryset.
return distinct(queryset, base)
class OrderingFilter(BaseFilterBackend):
# The URL query parameter used for the ordering.
ordering_param = api_settings.ORDERING_PARAM
ordering_fields = None
def get_ordering(self, request, queryset, view):
"""
Ordering is set by a comma delimited ?ordering=... query parameter.
The `ordering` query parameter can be overridden by setting
the `ordering_param` value on the OrderingFilter or by
specifying an `ORDERING_PARAM` value in the API settings.
"""
params = request.query_params.get(self.ordering_param)
if params:
fields = [param.strip() for param in params.split(',')]
ordering = self.remove_invalid_fields(queryset, fields, view)
if ordering:
return ordering
# No ordering was included, or all the ordering fields were invalid
return self.get_default_ordering(view)
def get_default_ordering(self, view):
ordering = getattr(view, 'ordering', None)
if isinstance(ordering, six.string_types):
return (ordering,)
return ordering
def remove_invalid_fields(self, queryset, fields, view):
valid_fields = getattr(view, 'ordering_fields', self.ordering_fields)
if valid_fields is None:
# Default to allowing filtering on serializer fields
serializer_class = getattr(view, 'serializer_class')
if serializer_class is None:
msg = ("Cannot use %s on a view which does not have either a "
"'serializer_class' or 'ordering_fields' attribute.")
raise ImproperlyConfigured(msg % self.__class__.__name__)
valid_fields = [
field.source or field_name
for field_name, field in serializer_class().fields.items()
if not getattr(field, 'write_only', False)
]
elif valid_fields == '__all__':
# View explicitly allows filtering on any model field
valid_fields = [field.name for field in queryset.model._meta.fields]
valid_fields += queryset.query.aggregates.keys()
return [term for term in fields if term.lstrip('-') in valid_fields]
def filter_queryset(self, request, queryset, view):
ordering = self.get_ordering(request, queryset, view)
if ordering:
return queryset.order_by(*ordering)
return queryset
class DjangoObjectPermissionsFilter(BaseFilterBackend):
"""
A filter backend that limits results to those where the requesting user
has read object level permissions.
"""
def __init__(self):
assert guardian, 'Using DjangoObjectPermissionsFilter, but django-guardian is not installed'
perm_format = '%(app_label)s.view_%(model_name)s'
def filter_queryset(self, request, queryset, view):
extra = {}
user = request.user
model_cls = queryset.model
kwargs = {
'app_label': model_cls._meta.app_label,
'model_name': get_model_name(model_cls)
}
permission = self.perm_format % kwargs
if guardian.VERSION >= (1, 3):
# Maintain behavior compatibility with versions prior to 1.3
extra = {'accept_global_perms': False}
else:
extra = {}
return guardian.shortcuts.get_objects_for_user(user, permission, queryset, **extra)
| Python | 0 | @@ -3168,32 +3168,166 @@
fields', None)%0A%0A
+ search_terms = self.get_search_terms(request)%0A%0A if not search_fields or not search_terms:%0A return queryset%0A%0A
orm_look
@@ -3456,141 +3456,8 @@
%5D
-%0A search_terms = self.get_search_terms(request)%0A%0A if not search_fields or not search_terms:%0A return queryset
%0A%0A
|
1006ac44b8ef9654976c1b57ccf20387877db1cb | Update results/title/forms100.py | results/title/forms100.py | results/title/forms100.py | from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os.path
from laboratory.settings import FONTS_FOLDER
from directions.models import Issledovaniya
from reportlab.platypus import Paragraph, Table, TableStyle, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import mm
from reportlab.lib.enums import TA_CENTER
def form_01(iss: Issledovaniya):
pdfmetrics.registerFont(TTFont('PTAstraSerifBold', os.path.join(FONTS_FOLDER, 'PTAstraSerif-Bold.ttf')))
styleSheet = getSampleStyleSheet()
style = styleSheet["Normal"]
style.fontName = "PTAstraSerifBold"
style.fontSize = 12
style.leading = 8
style.spaceAfter = 0 * mm
style.alignment = TA_CENTER
hospital = iss.doc_confirmation.hospital
hospital_short_title = hospital.safe_short_title
hospital_address = hospital.safe_address
hospital_ogrn = hospital.safe_ogrn
data = [
[Paragraph("Министерство здравоохранения Российской Федерации", style)],
[Paragraph(hospital_short_title, style)],
[Paragraph(hospital_address, style)],
[Paragraph(f"Код ОГРН {hospital_ogrn}", style)],
[Spacer(1, 1 * mm)],
[Paragraph("<u>ВЫПИСКА ИЗ АМБУЛАТОРНОЙ КАРТЫ</u>", style)],
]
t = Table(data, colWidths= 180 * mm)
t.setStyle(
TableStyle(
[
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
]
)
)
return t
| Python | 0 | @@ -1311,17 +1311,16 @@
lWidths=
-
180 * mm
|
89714d70273bdeb386cedf810ea53ef540be49a4 | switch to failsafe_add to prevent race conditions (#204) | funnel/views/session.py | funnel/views/session.py | # -*- coding: utf-8 -*-
from baseframe import _
from flask import request, render_template, jsonify
from coaster.views import load_models
from .helpers import localize_date
from .. import app, lastuser
from ..models import db, Profile, Proposal, ProposalRedirect, ProposalSpace, ProposalSpaceRedirect, Session
from ..forms import SessionForm
def rooms_list(space):
return [(u"", _("Select Room"))] + [
(room.id, "{venue} - {room}".format(venue=room.venue.title, room=room.title)) for room in space.rooms]
def session_form(space, proposal=None, session=None):
if session:
form = SessionForm(obj=session, model=Session)
else:
form = SessionForm()
form.venue_room_id.choices = rooms_list(space)
if request.method == 'GET':
if not (session or proposal):
form.is_break.data = True
if proposal:
form.description.data = proposal.description
form.speaker_bio.data = proposal.bio
form.speaker.data = proposal.owner.fullname
form.title.data = proposal.title
return render_template('session_form.html', form=form, formid='session_form')
if form.validate_on_submit():
new = False
if not session:
new = True
session = Session()
if proposal:
session.proposal = proposal
form.populate_obj(session)
if new:
session.parent = space
session.make_id() # FIXME: This should not be required
session.make_name()
db.session.add(session)
db.session.commit()
data = dict(
id=session.url_id, title=session.title, room_scoped_name=session.venue_room.scoped_name if session.venue_room else None,
is_break=session.is_break, modal_url=session.url_for('edit'), delete_url=session.url_for('delete'),
proposal_id=session.proposal_id)
return jsonify(status=True, data=data)
return jsonify(
status=False,
form=render_template('session_form.html', form=form, formid='session_new'))
@app.route('/<space>/sessions/new', methods=['GET', 'POST'], subdomain='<profile>')
@lastuser.requires_login
@load_models(
(Profile, {'name': 'profile'}, 'g.profile'),
((ProposalSpace, ProposalSpaceRedirect), {'name': 'space', 'profile': 'profile'}, 'space'),
permission='new-session')
def session_new(profile, space):
return session_form(space)
@app.route('/<space>/<proposal>/schedule', methods=['GET', 'POST'], subdomain='<profile>')
@lastuser.requires_login
@load_models(
(Profile, {'name': 'profile'}, 'g.profile'),
((ProposalSpace, ProposalSpaceRedirect), {'name': 'space', 'profile': 'profile'}, 'space'),
((Proposal, ProposalRedirect), {'url_name': 'proposal', 'proposal_space': 'space'}, 'proposal'),
permission='new-session')
def proposal_schedule(profile, space, proposal):
return session_form(space, proposal=proposal)
@app.route('/<space>/<session>/viewsession-popup', methods=['GET'], subdomain='<profile>')
@load_models(
(Profile, {'name': 'profile'}, 'g.profile'),
((ProposalSpace, ProposalSpaceRedirect), {'name': 'space', 'profile': 'profile'}, 'space'),
(Session, {'url_name': 'session', 'proposal_space': 'space'}, 'session'),
permission='view')
def session_view_popup(profile, space, session):
return render_template('session_view_popup.html', session=session, timezone=space.timezone, localize_date=localize_date)
@app.route('/<space>/<session>/editsession', methods=['GET', 'POST'], subdomain='<profile>')
@lastuser.requires_login
@load_models(
(Profile, {'name': 'profile'}, 'g.profile'),
((ProposalSpace, ProposalSpaceRedirect), {'name': 'space', 'profile': 'profile'}, 'space'),
(Session, {'url_name': 'session', 'proposal_space': 'space'}, 'session'),
permission='edit-session')
def session_edit(profile, space, session):
return session_form(space, session=session)
@app.route('/<space>/<session>/deletesession', methods=['POST'], subdomain='<profile>')
@lastuser.requires_login
@load_models(
(Profile, {'name': 'profile'}, 'g.profile'),
((ProposalSpace, ProposalSpaceRedirect), {'name': 'space', 'profile': 'profile'}, 'space'),
(Session, {'url_name': 'session', 'proposal_space': 'space'}, 'session'),
permission='edit-session')
def session_delete(profile, space, session):
modal_url = session.proposal.url_for('schedule') if session.proposal else None
db.session.delete(session)
db.session.commit()
return jsonify(status=True, modal_url=modal_url)
| Python | 0 | @@ -131,16 +131,60 @@
d_models
+%0Afrom coaster.sqlalchemy import failsafe_add
%0A%0Afrom .
@@ -1593,30 +1593,101 @@
-db.session.add(session
+session = failsafe_add(db.session, session, proposal_space_id=space.id, url_id=session.url_id
)%0A
|
4ea9a0bc8b7ef47dd98c155b3f35440fe88b564a | Fix output order, add uri attribute | resync/capability_list.py | resync/capability_list.py | """ResourceSync Capability List object
An Capability List is a set of capabilitys with some metadata for
each capability. The Capability List object may also contain metadata
and links like other lists.
"""
import collections
from resource import Resource
from resource_set import ResourceSet
from list_base import ListBase
from sitemap import Sitemap
class CapabilitySet(ResourceSet):
"""Class for storage of resources in a Capability List
Extends the ResourceSet to add checks to ensure that there are
never two entries for the same resource, and that values are
returned in the canonical order.
"""
def __init__(self):
self.order = [ 'resourcelist', 'resourcedump',
'changelist', 'changedump',
'resourcelist-archive', 'resourcedump-archive',
'changelist-archive', 'changedump-archive' ]
def __iter__(self):
"""Iterator over all the resources in capability order
Deals with the case of unknown capabilities or duplicate entries
by using uri order for duplicates and adding any unknown ones
at the end
"""
self._iter_next_list = []
# look through all resources and build capability to uri index
uris = {}
for uri in self.keys():
cap = self[uri].capability
if (cap not in uris):
uris[cap]=[]
uris[cap].append(uri)
# now build list or uris in order for iterator
for cap in uris:
for uri in sorted(uris[cap]):
self._iter_next_list.append(uri)
self._iter_next_list.reverse()
return(iter(self._iter_next, None))
def _iter_next(self):
if (len(self._iter_next_list)>0):
return(self[self._iter_next_list.pop()])
else:
return(None)
class CapabilityList(ListBase):
"""Class representing a Capability List
An Capability List will admit only one resource with any given
URI. The iterator over resources is expected to return them in
canonical order of capability names as defined in main specification
section 9.2 and archives specification section 6.
"""
def __init__(self, resources=None, md=None, ln=None):
if (resources is None):
resources = CapabilitySet()
super(CapabilityList, self).__init__(resources=resources, md=md, ln=ln)
self.capability_name='capabilitylist'
self.capability_md='capabilitylist'
self.md['from']=None #usually don't want a from date
def add(self, resource, replace=False):
"""Add a resource or an iterable collection of resources
Will throw a ValueError if the resource (ie. same uri) already
exists in the capability_list, unless replace=True.
"""
if isinstance(resource, collections.Iterable):
for r in resource:
self.resources.add(r,replace)
else:
self.resources.add(resource,replace)
def add_capability(self,capability=None,uri=None,name=None):
"""Specific add function for capabilities
Takes either:
- a capability object (derived from ListBase) as the first argument
from which the capability name is extracted
- or a plain name string
and
- the URI of the capability
"""
if (capability is not None):
name = capability.capability_md
self.add( Resource(uri=uri,capability=name) )
def has_capability(self,name=None):
"""True if the Capability List includes the named capability"""
return( self.capability(name) is not None )
def capability(self,name=None):
"""Return information about the requested capability from this list
Will return None if there is no information about the requested capability
"""
for r in self.resources:
if (r.capability == name):
return(r)
return(None)
| Python | 0.000022 | @@ -1462,12 +1462,8 @@
#
- now
bui
@@ -1481,16 +1481,24 @@
uris in
+defined
order fo
@@ -1508,16 +1508,289 @@
terator%0A
+ for cap in self.order:%0A if (cap in uris):%0A for uri in sorted(uris%5Bcap%5D):%0A self._iter_next_list.append(uri)%0A del uris%5Bcap%5D%0A # add any left over capabilities we don't know about in alphabetical order%0A
@@ -2444,11 +2444,9 @@
ion
-9.2
+7
and
@@ -2540,24 +2540,34 @@
one, ln=None
+, uri=None
):%0A i
@@ -2707,16 +2707,25 @@
d, ln=ln
+, uri=uri
)%0A
@@ -2811,69 +2811,8 @@
ist'
-%0A self.md%5B'from'%5D=None #usually don't want a from date
%0A%0A
@@ -3534,16 +3534,38 @@
xtracted
+, and the URI if given
%0A
@@ -3731,16 +3731,96 @@
lity_md%0A
+ if (capability.uri is not None):%0A uri=capability.uri%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.